15 #ifndef _ASM_TILE_DMA_MAPPING_H
16 #define _ASM_TILE_DMA_MAPPING_H
47 return paddr + get_dma_offset(dev);
52 return daddr - get_dma_offset(dev);
69 return addr + size - 1 <= *dev->
dma_mask;
75 return get_dma_ops(dev)->mapping_error(dev, dma_addr);
90 if ((dma_ops == gx_pci_dma_map_ops) && (mask <=
DMA_BIT_MASK(32))) {
91 set_dma_ops(dev, gx_legacy_pci_dma_map_ops);
92 set_dma_offset(dev, 0);
93 if (mask > dev->
archdata.max_direct_dma_addr)
94 mask = dev->
archdata.max_direct_dma_addr;
105 static inline void *dma_alloc_attrs(
struct device *dev,
size_t size,
112 cpu_addr = dma_ops->
alloc(dev, size, dma_handle, flag, attrs);
119 static inline void dma_free_attrs(
struct device *dev,
size_t size,
127 dma_ops->
free(dev, size, cpu_addr, dma_handle, attrs);
130 #define dma_alloc_coherent(d, s, h, f) dma_alloc_attrs(d, s, h, f, NULL)
131 #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_attrs(d, s, h, f, NULL)
132 #define dma_free_coherent(d, s, v, h) dma_free_attrs(d, s, v, h, NULL)
133 #define dma_free_noncoherent(d, s, v, h) dma_free_attrs(d, s, v, h, NULL)