24 #include <linux/export.h>
27 #include <asm/cpuinfo.h>
29 #include <asm/tlbflush.h>
33 unsigned long next,
struct mm_walk *walk)
53 page_clear_nocache(
pte_t *pte,
unsigned long addr,
54 unsigned long next,
struct mm_walk *walk)
90 struct mm_walk walk = {
91 .pte_entry = page_set_nocache,
100 *dma_handle =
__pa(page);
102 va = (
unsigned long)page;
119 or1k_dma_free(
struct device *dev,
size_t size,
void *
vaddr,
122 unsigned long va = (
unsigned long)vaddr;
123 struct mm_walk walk = {
124 .pte_entry = page_clear_nocache,
137 or1k_map_page(
struct device *dev,
struct page *page,
138 unsigned long offset,
size_t size,
148 for (cl = addr; cl < addr +
size;
154 for (cl = addr; cl < addr +
size;
208 or1k_sync_single_for_cpu(
struct device *dev,
221 or1k_sync_single_for_device(
struct device *dev,
234 .alloc = or1k_dma_alloc,
235 .free = or1k_dma_free,
236 .map_page = or1k_map_page,
237 .unmap_page = or1k_unmap_page,
238 .map_sg = or1k_map_sg,
239 .unmap_sg = or1k_unmap_sg,
240 .sync_single_for_cpu = or1k_sync_single_for_cpu,
241 .sync_single_for_device = or1k_sync_single_for_device,
246 #define PREALLOC_DMA_DEBUG_ENTRIES (1 << 16)