23 #include <linux/types.h>
26 #define MMU_BASE_VAL_MASK 0xFC00
27 #define MMU_PAGE_MAX 3
28 #define MMU_ELEMENTSIZE_MAX 3
29 #define MMU_ADDR_MASK 0xFFFFF000
30 #define MMU_TTB_MASK 0xFFFFC000
31 #define MMU_SECTION_ADDR_MASK 0xFFF00000
32 #define MMU_SSECTION_ADDR_MASK 0xFF000000
33 #define MMU_PAGE_TABLE_MASK 0xFFFFFC00
34 #define MMU_LARGE_PAGE_MASK 0xFFFF0000
35 #define MMU_SMALL_PAGE_MASK 0xFFFFF000
37 #define MMU_LOAD_TLB 0x00000001
38 #define MMU_GFLUSH 0x60
92 const u32 preserved_bit,
94 const u32 virtual_addr_tag);
147 MMUMMU_CNTLMMU_ENABLE_WRITE32(base_address,
HW_SET);
156 MMUMMU_CNTLMMU_ENABLE_WRITE32(base_address,
HW_CLEAR);
162 u32 num_locked_entries)
166 MMUMMU_LOCK_BASE_VALUE_WRITE32(base_address, num_locked_entries);
172 u32 victim_entry_num)
176 MMUMMU_LOCK_CURRENT_VICTIM_WRITE32(base_address, victim_entry_num);
185 MMUMMU_IRQSTATUS_WRITE_REGISTER32(base_address, irq_mask);
195 irq_reg = MMUMMU_IRQENABLE_READ_REGISTER32(base_address);
197 MMUMMU_IRQENABLE_WRITE_REGISTER32(base_address, irq_reg & ~irq_mask);
207 irq_reg = MMUMMU_IRQENABLE_READ_REGISTER32(base_address);
209 MMUMMU_IRQENABLE_WRITE_REGISTER32(base_address, irq_reg | irq_mask);
218 *irq_mask = MMUMMU_IRQSTATUS_READ_REGISTER32(base_address);
228 *addr = MMUMMU_FAULT_AD_READ_REGISTER32(base_address);
238 load_ttb = ttb_phys_addr & ~0x7F
UL;
240 MMUMMU_TTB_WRITE_REGISTER32(base_address, load_ttb);
249 MMUMMU_CNTLTWL_ENABLE_WRITE32(base_address,
HW_SET);
258 MMUMMU_CNTLTWL_ENABLE_WRITE32(base_address,
HW_CLEAR);
273 u32 virtual_addr_tag;
298 lock_reg = MMUMMU_LOCK_READ_REGISTER32(base_address);
304 mmu_set_cam_entry(base_address, mmu_pg_size, preserved_bit, valid_bit,
309 mmu_set_ram_entry(base_address, physical_addr, map_attrs->
endianism,
314 MMUMMU_LOCK_CURRENT_VICTIM_WRITE32(base_address, entry_num);
318 MMUMMU_LD_TLB_WRITE_REGISTER32(base_address,
MMU_LOAD_TLB);
320 MMUMMU_LOCK_WRITE_REGISTER32(base_address, lock_reg);
336 pte_addr = hw_mmu_pte_addr_l2(pg_tbl_va,
341 (map_attrs->
endianism << 9) | (map_attrs->
348 pte_addr = hw_mmu_pte_addr_l2(pg_tbl_va,
353 (map_attrs->
endianism << 9) | (map_attrs->
359 pte_addr = hw_mmu_pte_addr_l1(pg_tbl_va,
364 (map_attrs->
endianism << 15) | (map_attrs->
365 element_size << 10) |
366 (map_attrs->
mixed_size << 17)) & ~0x40000) | 0x2);
371 pte_addr = hw_mmu_pte_addr_l1(pg_tbl_va,
376 (map_attrs->
endianism << 15) | (map_attrs->
377 element_size << 10) |
383 pte_addr = hw_mmu_pte_addr_l1(pg_tbl_va,
385 MMU_SECTION_ADDR_MASK);
393 while (--num_entries >= 0)
407 pte_addr = hw_mmu_pte_addr_l2(pg_tbl_va,
414 pte_addr = hw_mmu_pte_addr_l2(pg_tbl_va,
421 pte_addr = hw_mmu_pte_addr_l1(pg_tbl_va,
428 pte_addr = hw_mmu_pte_addr_l1(pg_tbl_va,
437 while (--num_entries >= 0)
446 const u32 preserved_bit,
448 const u32 virtual_addr_tag)
453 mmu_cam_reg = (virtual_addr_tag << 12);
454 mmu_cam_reg = (mmu_cam_reg) | (page_sz) | (valid_bit << 2) |
455 (preserved_bit << 3);
458 MMUMMU_CAM_WRITE_REGISTER32(base_address, mmu_cam_reg);
474 mmu_ram_reg = (mmu_ram_reg) | ((endianism << 9) | (element_size << 7) |
478 MMUMMU_RAM_WRITE_REGISTER32(base_address, mmu_ram_reg);