|
#define | __raw_ioswabb(a, x) (x) |
|
#define | __raw_ioswabw(a, x) (x) |
|
#define | __raw_ioswabl(a, x) (x) |
|
#define | __raw_ioswabq(a, x) (x) |
|
#define | ____raw_ioswabq(a, x) (x) |
|
#define | IO_SPACE_LIMIT 0xffff |
|
#define | __SLOW_DOWN_IO |
|
#define | SLOW_DOWN_IO |
|
#define | isa_page_to_bus page_to_phys |
|
#define | virt_to_bus virt_to_phys |
|
#define | bus_to_virt phys_to_virt |
|
#define | page_to_phys(page) ((dma_addr_t)page_to_pfn(page) << PAGE_SHIFT) |
|
#define | __IS_LOW512(addr) (!((phys_t)(addr) & (phys_t) ~0x1fffffffULL)) |
|
#define | ioremap(offset, size) __ioremap_mode((offset), (size), _CACHE_UNCACHED) |
|
#define | ioremap_nocache(offset, size) __ioremap_mode((offset), (size), _CACHE_UNCACHED) |
|
#define | ioremap_cachable(offset, size) __ioremap_mode((offset), (size), _page_cachable_default) |
|
#define | ioremap_cacheable_cow(offset, size) __ioremap_mode((offset), (size), _CACHE_CACHABLE_COW) |
|
#define | ioremap_uncached_accelerated(offset, size) __ioremap_mode((offset), (size), _CACHE_UNCACHED_ACCELERATED) |
|
#define | __IS_KSEG1(addr) (((unsigned long)(addr) & ~0x1fffffffUL) == CKSEG1) |
|
#define | war_octeon_io_reorder_wmb() do { } while (0) |
|
#define | __BUILD_MEMORY_SINGLE(pfx, bwlq, type, irq) |
|
#define | __BUILD_IOPORT_SINGLE(pfx, bwlq, type, p, slow) |
|
#define | __BUILD_MEMORY_PFX(bus, bwlq, type) |
|
#define | BUILDIO_MEM(bwlq, type) |
|
#define | __BUILD_IOPORT_PFX(bus, bwlq, type) |
|
#define | BUILDIO_IOPORT(bwlq, type) |
|
#define | __BUILDIO(bwlq, type) |
|
#define | readb_relaxed readb |
|
#define | readw_relaxed readw |
|
#define | readl_relaxed readl |
|
#define | readq_relaxed readq |
|
#define | readb_be(addr) __raw_readb((__force unsigned *)(addr)) |
|
#define | readw_be(addr) be16_to_cpu(__raw_readw((__force unsigned *)(addr))) |
|
#define | readl_be(addr) be32_to_cpu(__raw_readl((__force unsigned *)(addr))) |
|
#define | readq_be(addr) be64_to_cpu(__raw_readq((__force unsigned *)(addr))) |
|
#define | writeb_be(val, addr) __raw_writeb((val), (__force unsigned *)(addr)) |
|
#define | writew_be(val, addr) __raw_writew(cpu_to_be16((val)), (__force unsigned *)(addr)) |
|
#define | writel_be(val, addr) __raw_writel(cpu_to_be32((val)), (__force unsigned *)(addr)) |
|
#define | writeq_be(val, addr) __raw_writeq(cpu_to_be64((val)), (__force unsigned *)(addr)) |
|
#define | readq readq |
|
#define | writeq writeq |
|
#define | __BUILD_MEMORY_STRING(bwlq, type) |
|
#define | __BUILD_IOPORT_STRING(bwlq, type) |
|
#define | BUILDSTRING(bwlq, type) |
|
#define | mmiowb() asm volatile ("sync" ::: "memory") |
|
#define | dma_cache_wback_inv(start, size) do { (void) (start); (void) (size); } while (0) |
|
#define | dma_cache_wback(start, size) do { (void) (start); (void) (size); } while (0) |
|
#define | dma_cache_inv(start, size) do { (void) (start); (void) (size); } while (0) |
|
#define | __CSR_32_ADJUST 0 |
|
#define | csr_out32(v, a) (*(volatile u32 *)((unsigned long)(a) + __CSR_32_ADJUST) = (v)) |
|
#define | csr_in32(a) (*(volatile u32 *)((unsigned long)(a) + __CSR_32_ADJUST)) |
|
#define | xlate_dev_mem_ptr(p) __va(p) |
|
#define | xlate_dev_kmem_ptr(p) p |
|