From: York Sun Date: Mon, 23 Jun 2014 22:15:53 +0000 (-0700) Subject: ARMv8: Adjust MMU setup X-Git-Tag: v2014.07~71 X-Git-Url: https://git.sur5r.net/?a=commitdiff_plain;h=22932ffc03e521130cfd33cae1fc2531eb42604a;p=u-boot ARMv8: Adjust MMU setup Make MMU function reusable. Platform code can setup its own MMU tables. Signed-off-by: York Sun CC: David Feng --- diff --git a/arch/arm/cpu/armv8/cache_v8.c b/arch/arm/cpu/armv8/cache_v8.c index a96ecda7e3..af3c4945ec 100644 --- a/arch/arm/cpu/armv8/cache_v8.c +++ b/arch/arm/cpu/armv8/cache_v8.c @@ -12,15 +12,14 @@ DECLARE_GLOBAL_DATA_PTR; #ifndef CONFIG_SYS_DCACHE_OFF - -static void set_pgtable_section(u64 section, u64 memory_type) +void set_pgtable_section(u64 *page_table, u64 index, u64 section, + u64 memory_type) { - u64 *page_table = (u64 *)gd->arch.tlb_addr; u64 value; - value = (section << SECTION_SHIFT) | PMD_TYPE_SECT | PMD_SECT_AF; + value = section | PMD_TYPE_SECT | PMD_SECT_AF; value |= PMD_ATTRINDX(memory_type); - page_table[section] = value; + page_table[index] = value; } /* to activate the MMU we need to set up virtual memory */ @@ -28,10 +27,13 @@ static void mmu_setup(void) { int i, j, el; bd_t *bd = gd->bd; + u64 *page_table = (u64 *)gd->arch.tlb_addr; /* Setup an identity-mapping for all spaces */ - for (i = 0; i < (PGTABLE_SIZE >> 3); i++) - set_pgtable_section(i, MT_DEVICE_NGNRNE); + for (i = 0; i < (PGTABLE_SIZE >> 3); i++) { + set_pgtable_section(page_table, i, i << SECTION_SHIFT, + MT_DEVICE_NGNRNE); + } /* Setup an identity-mapping for all RAM space */ for (i = 0; i < CONFIG_NR_DRAM_BANKS; i++) { @@ -39,38 +41,26 @@ static void mmu_setup(void) ulong end = bd->bi_dram[i].start + bd->bi_dram[i].size; for (j = start >> SECTION_SHIFT; j < end >> SECTION_SHIFT; j++) { - set_pgtable_section(j, MT_NORMAL); + set_pgtable_section(page_table, j, j << SECTION_SHIFT, + MT_NORMAL); } } /* load TTBR0 */ el = current_el(); if (el == 1) { - asm volatile("msr ttbr0_el1, %0" - : : "r" (gd->arch.tlb_addr) : "memory"); - asm volatile("msr tcr_el1, %0" - : : "r" (TCR_FLAGS | TCR_EL1_IPS_BITS) - : "memory"); - asm volatile("msr mair_el1, %0" - : : "r" (MEMORY_ATTRIBUTES) : "memory"); + set_ttbr_tcr_mair(el, gd->arch.tlb_addr, + TCR_FLAGS | TCR_EL1_IPS_BITS, + MEMORY_ATTRIBUTES); } else if (el == 2) { - asm volatile("msr ttbr0_el2, %0" - : : "r" (gd->arch.tlb_addr) : "memory"); - asm volatile("msr tcr_el2, %0" - : : "r" (TCR_FLAGS | TCR_EL2_IPS_BITS) - : "memory"); - asm volatile("msr mair_el2, %0" - : : "r" (MEMORY_ATTRIBUTES) : "memory"); + set_ttbr_tcr_mair(el, gd->arch.tlb_addr, + TCR_FLAGS | TCR_EL2_IPS_BITS, + MEMORY_ATTRIBUTES); } else { - asm volatile("msr ttbr0_el3, %0" - : : "r" (gd->arch.tlb_addr) : "memory"); - asm volatile("msr tcr_el3, %0" - : : "r" (TCR_FLAGS | TCR_EL2_IPS_BITS) - : "memory"); - asm volatile("msr mair_el3, %0" - : : "r" (MEMORY_ATTRIBUTES) : "memory"); + set_ttbr_tcr_mair(el, gd->arch.tlb_addr, + TCR_FLAGS | TCR_EL3_IPS_BITS, + MEMORY_ATTRIBUTES); } - /* enable the mmu */ set_sctlr(get_sctlr() | CR_M); } diff --git a/arch/arm/include/asm/armv8/mmu.h b/arch/arm/include/asm/armv8/mmu.h index 1193e76a82..4b7b67b643 100644 --- a/arch/arm/include/asm/armv8/mmu.h +++ b/arch/arm/include/asm/armv8/mmu.h @@ -108,4 +108,28 @@ TCR_IRGN_WBWA | \ TCR_T0SZ(VA_BITS)) +#ifndef __ASSEMBLY__ +void set_pgtable_section(u64 *page_table, u64 index, + u64 section, u64 memory_type); +static inline void set_ttbr_tcr_mair(int el, u64 table, u64 tcr, u64 attr) +{ + asm volatile("dsb sy"); + if (el == 1) { + asm volatile("msr ttbr0_el1, %0" : : "r" (table) : "memory"); + asm volatile("msr tcr_el1, %0" : : "r" (tcr) : "memory"); + asm volatile("msr mair_el1, %0" : : "r" (attr) : "memory"); + } else if (el == 2) { + asm volatile("msr ttbr0_el2, %0" : : "r" (table) : "memory"); + asm volatile("msr tcr_el2, %0" : : "r" (tcr) : "memory"); + asm volatile("msr mair_el2, %0" : : "r" (attr) : "memory"); + } else if (el == 3) { + asm volatile("msr ttbr0_el3, %0" : : "r" (table) : "memory"); + asm volatile("msr tcr_el3, %0" : : "r" (tcr) : "memory"); + asm volatile("msr mair_el3, %0" : : "r" (attr) : "memory"); + } else { + hang(); + } + asm volatile("isb"); +} +#endif #endif /* _ASM_ARMV8_MMU_H_ */