7 #ifndef _ASM_DMA_MAPPING_H
8 #define _ASM_DMA_MAPPING_H
11 #include <linux/types.h>
19 #include <asm/swiotlb.h>
21 #define DMA_ERROR_CODE (~(dma_addr_t)0x0)
35 #ifdef CONFIG_NOT_COHERENT_CACHE
57 #define __dma_alloc_coherent(dev, gfp, size, handle) NULL
58 #define __dma_free_coherent(size, addr) ((void)0)
59 #define __dma_sync(addr, size, rw) ((void)0)
60 #define __dma_sync_page(pg, off, sz, rw) ((void)0)
64 static inline unsigned long device_to_mask(
struct device *
dev)
109 return dev->
archdata.dma_data.dma_offset;
117 dev->
archdata.dma_data.dma_offset = off;
121 #define flush_write_buffers()
138 #define dma_alloc_coherent(d,s,h,f) dma_alloc_attrs(d,s,h,f,NULL)
140 static inline void *dma_alloc_attrs(
struct device *dev,
size_t size,
149 cpu_addr = dma_ops->
alloc(dev, size, dma_handle, flag, attrs);
156 #define dma_free_coherent(d,s,c,h) dma_free_attrs(d,s,c,h,NULL)
158 static inline void dma_free_attrs(
struct device *dev,
size_t size,
168 dma_ops->
free(dev, size, cpu_addr, dma_handle, attrs);
187 #ifdef CONFIG_SWIOTLB
197 return addr + size - 1 <= *dev->
dma_mask;
202 return paddr + get_dma_offset(dev);
207 return daddr - get_dma_offset(dev);
210 #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
211 #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
213 #define ARCH_HAS_DMA_MMAP_COHERENT