11 #include <linux/types.h>
14 #include <linux/module.h>
16 #include <linux/string.h>
20 #include <asm/cache.h>
23 #include <dma-coherence.h>
25 static inline struct page *dma_addr_to_page(
struct device *
dev,
29 plat_dma_addr_to_phys(dev, dma_addr) >>
PAGE_SHIFT);
38 static inline int cpu_is_noncoherent_r10000(
struct device *dev)
40 return !plat_device_is_coherent(dev) &&
57 #if defined(CONFIG_ZONE_DMA32) && defined(CONFIG_ZONE_DMA)
64 #if defined(CONFIG_ZONE_DMA32) && !defined(CONFIG_ZONE_DMA)
69 #if defined(CONFIG_ZONE_DMA) && !defined(CONFIG_ZONE_DMA32)
79 return gfp | dma_flag;
87 gfp = massage_gfp_flags(dev, gfp);
93 *dma_handle = plat_map_dma_mem(dev, ret, size);
100 static void *mips_dma_alloc_coherent(
struct device *dev,
size_t size,
108 gfp = massage_gfp_flags(dev, gfp);
114 *dma_handle = plat_map_dma_mem(dev, ret, size);
116 if (!plat_device_is_coherent(dev)) {
134 static void mips_dma_free_coherent(
struct device *dev,
size_t size,
void *
vaddr,
137 unsigned long addr = (
unsigned long) vaddr;
145 if (!plat_device_is_coherent(dev))
151 static inline void __dma_sync_virtual(
void *addr,
size_t size,
186 if (PageHighMem(page)) {
198 __dma_sync_virtual(addr + offset, len, direction);
212 if (cpu_is_noncoherent_r10000(dev))
216 plat_unmap_dma_mem(dev, dma_addr, size, direction);
224 for (i = 0; i < nents; i++, sg++) {
225 if (!plat_device_is_coherent(dev))
228 sg->
dma_address = plat_map_dma_mem_page(dev, sg_page(sg)) +
239 if (!plat_device_is_coherent(dev))
242 return plat_map_dma_mem_page(dev, page) +
offset;
251 for (i = 0; i < nhwentries; i++, sg++) {
252 if (!plat_device_is_coherent(dev) &&
260 static void mips_dma_sync_single_for_cpu(
struct device *dev,
263 if (cpu_is_noncoherent_r10000(dev))
265 dma_handle & ~
PAGE_MASK, size, direction);
268 static void mips_dma_sync_single_for_device(
struct device *dev,
271 plat_extra_sync_for_device(dev);
272 if (!plat_device_is_coherent(dev))
274 dma_handle & ~
PAGE_MASK, size, direction);
277 static void mips_dma_sync_sg_for_cpu(
struct device *dev,
283 for (i = 0; i < nelems; i++, sg++) {
284 if (cpu_is_noncoherent_r10000(dev))
290 static void mips_dma_sync_sg_for_device(
struct device *dev,
296 for (i = 0; i < nelems; i++, sg++) {
297 if (!plat_device_is_coherent(dev))
305 return plat_dma_mapping_error(dev, dma_addr);
310 return plat_dma_supported(dev, mask);
318 plat_extra_sync_for_device(dev);
319 if (!plat_device_is_coherent(dev))
320 __dma_sync_virtual(vaddr, size, direction);
325 static struct dma_map_ops mips_default_dma_map_ops = {
326 .alloc = mips_dma_alloc_coherent,
327 .free = mips_dma_free_coherent,
328 .map_page = mips_dma_map_page,
329 .unmap_page = mips_dma_unmap_page,
330 .map_sg = mips_dma_map_sg,
331 .unmap_sg = mips_dma_unmap_sg,
332 .sync_single_for_cpu = mips_dma_sync_single_for_cpu,
333 .sync_single_for_device = mips_dma_sync_single_for_device,
334 .sync_sg_for_cpu = mips_dma_sync_sg_for_cpu,
335 .sync_sg_for_device = mips_dma_sync_sg_for_device,
343 #define PREALLOC_DMA_DEBUG_ENTRIES (1 << 16)
345 static int __init mips_dma_init(
void)