19 #include <linux/export.h>
20 #include <asm/tlbflush.h>
30 #define PAGE_HOME_DMA PAGE_HOME_UNCACHED
32 #define PAGE_HOME_DMA PAGE_HOME_HASH
35 static void *tile_dma_alloc_coherent(
struct device *
dev,
size_t size,
40 int node = dev_to_node(dev);
65 if (addr + size > dma_mask) {
78 static void tile_dma_free_coherent(
struct device *
dev,
size_t size,
100 static void __dma_prep_page(
struct page *
page,
unsigned long offset,
139 #ifdef DEBUG_ALIGNMENT
142 pr_warn(
"Unaligned DMA to non-hfh memory: PA %#llx/%#lx\n",
148 static void __dma_complete_page(
struct page *page,
unsigned long offset,
174 unsigned long offset = dma_addr & (
PAGE_SIZE - 1);
178 __dma_prep_page(page, offset, bytes, direction);
186 static void __dma_complete_pa_range(
dma_addr_t dma_addr,
size_t size,
190 unsigned long offset = dma_addr & (
PAGE_SIZE - 1);
194 __dma_complete_page(page, offset, bytes, direction);
209 BUG_ON(!valid_dma_direction(direction));
216 #ifdef CONFIG_NEED_SG_DMA_LENGTH
217 sg->dma_length = sg->
length;
231 BUG_ON(!valid_dma_direction(direction));
240 unsigned long offset,
size_t size,
244 BUG_ON(!valid_dma_direction(direction));
247 __dma_prep_page(page, offset, size, direction);
256 BUG_ON(!valid_dma_direction(direction));
262 static void tile_dma_sync_single_for_cpu(
struct device *dev,
267 BUG_ON(!valid_dma_direction(direction));
269 __dma_complete_pa_range(dma_handle, size, direction);
272 static void tile_dma_sync_single_for_device(
struct device *dev,
276 __dma_prep_pa_range(dma_handle, size, direction);
279 static void tile_dma_sync_sg_for_cpu(
struct device *dev,
286 BUG_ON(!valid_dma_direction(direction));
295 static void tile_dma_sync_sg_for_device(
struct device *dev,
302 BUG_ON(!valid_dma_direction(direction));
323 static struct dma_map_ops tile_default_dma_map_ops = {
324 .alloc = tile_dma_alloc_coherent,
325 .free = tile_dma_free_coherent,
326 .map_page = tile_dma_map_page,
327 .unmap_page = tile_dma_unmap_page,
328 .map_sg = tile_dma_map_sg,
329 .unmap_sg = tile_dma_unmap_sg,
330 .sync_single_for_cpu = tile_dma_sync_single_for_cpu,
331 .sync_single_for_device = tile_dma_sync_single_for_device,
332 .sync_sg_for_cpu = tile_dma_sync_sg_for_cpu,
333 .sync_sg_for_device = tile_dma_sync_sg_for_device,
334 .mapping_error = tile_dma_mapping_error,
335 .dma_supported = tile_dma_supported
343 static void *tile_pci_dma_alloc_coherent(
struct device *dev,
size_t size,
347 int node = dev_to_node(dev);
368 static void tile_pci_dma_free_coherent(
struct device *dev,
size_t size,
382 BUG_ON(!valid_dma_direction(direction));
391 #ifdef CONFIG_NEED_SG_DMA_LENGTH
392 sg->dma_length = sg->
length;
399 static void tile_pci_dma_unmap_sg(
struct device *dev,
407 BUG_ON(!valid_dma_direction(direction));
415 static dma_addr_t tile_pci_dma_map_page(
struct device *dev,
struct page *page,
416 unsigned long offset,
size_t size,
420 BUG_ON(!valid_dma_direction(direction));
423 __dma_prep_page(page, offset, size, direction);
428 static void tile_pci_dma_unmap_page(
struct device *dev,
dma_addr_t dma_address,
433 BUG_ON(!valid_dma_direction(direction));
441 static void tile_pci_dma_sync_single_for_cpu(
struct device *dev,
446 BUG_ON(!valid_dma_direction(direction));
450 __dma_complete_pa_range(dma_handle, size, direction);
453 static void tile_pci_dma_sync_single_for_device(
struct device *dev,
461 __dma_prep_pa_range(dma_handle, size, direction);
464 static void tile_pci_dma_sync_sg_for_cpu(
struct device *dev,
472 BUG_ON(!valid_dma_direction(direction));
481 static void tile_pci_dma_sync_sg_for_device(
struct device *dev,
489 BUG_ON(!valid_dma_direction(direction));
510 static struct dma_map_ops tile_pci_default_dma_map_ops = {
511 .alloc = tile_pci_dma_alloc_coherent,
512 .free = tile_pci_dma_free_coherent,
513 .map_page = tile_pci_dma_map_page,
514 .unmap_page = tile_pci_dma_unmap_page,
515 .map_sg = tile_pci_dma_map_sg,
516 .unmap_sg = tile_pci_dma_unmap_sg,
517 .sync_single_for_cpu = tile_pci_dma_sync_single_for_cpu,
518 .sync_single_for_device = tile_pci_dma_sync_single_for_device,
519 .sync_sg_for_cpu = tile_pci_dma_sync_sg_for_cpu,
520 .sync_sg_for_device = tile_pci_dma_sync_sg_for_device,
521 .mapping_error = tile_pci_dma_mapping_error,
522 .dma_supported = tile_pci_dma_supported
530 #ifdef CONFIG_SWIOTLB
531 static void *tile_swiotlb_alloc_coherent(
struct device *dev,
size_t size,
539 static void tile_swiotlb_free_coherent(
struct device *dev,
size_t size,
547 .
alloc = tile_swiotlb_alloc_coherent,
548 .free = tile_swiotlb_free_coherent,
567 #ifdef CONFIG_ARCH_HAS_DMA_SET_COHERENT_MASK
573 if (((dma_ops == gx_pci_dma_map_ops) ||
574 (dma_ops == gx_legacy_pci_dma_map_ops)) &&
576 if (mask > dev->
archdata.max_direct_dma_addr)
577 mask = dev->
archdata.max_direct_dma_addr;