15 #ifndef _ASM_TILE_CACHEFLUSH_H
16 #define _ASM_TILE_CACHEFLUSH_H
26 #define flush_cache_all() do { } while (0)
27 #define flush_cache_mm(mm) do { } while (0)
28 #define flush_cache_dup_mm(mm) do { } while (0)
29 #define flush_cache_range(vma, start, end) do { } while (0)
30 #define flush_cache_page(vma, vmaddr, pfn) do { } while (0)
31 #define ARCH_IMPLEMENTS_FLUSH_DCACHE_PAGE 0
32 #define flush_dcache_page(page) do { } while (0)
33 #define flush_dcache_mmap_lock(mapping) do { } while (0)
34 #define flush_dcache_mmap_unlock(mapping) do { } while (0)
35 #define flush_cache_vmap(start, end) do { } while (0)
36 #define flush_cache_vunmap(start, end) do { } while (0)
37 #define flush_icache_page(vma, pg) do { } while (0)
38 #define flush_icache_user_range(vma, pg, adr, len) do { } while (0)
44 #define __flush_icache() __flush_icache_range(0, CHIP_L1I_CACHE_SIZE())
54 #define flush_icache_range __flush_icache_range
66 void *
dst,
void *
src,
int len)
71 (
unsigned long) dst + len);
75 #define copy_from_user_page(vma, page, vaddr, dst, src, len) \
76 memcpy((dst), (src), (len))
85 static inline void __inv_buffer(
void *
buffer,
size_t size)
89 while (next < finish) {
96 static inline void __flush_buffer(
void *
buffer,
size_t size)
100 while (next < finish) {
107 static inline void __finv_buffer(
void *buffer,
size_t size)
111 while (next < finish) {
119 static inline void inv_buffer(
void *buffer,
size_t size)
121 __inv_buffer(buffer, size);
129 static inline void flush_buffer_local(
void *buffer,
size_t size)
131 __flush_buffer(buffer, size);
139 static inline void finv_buffer_local(
void *buffer,
size_t size)
141 __finv_buffer(buffer, size);
160 static inline void sched_cacheflush(
void)