#include <asm-offsets.h>
#include <config.h>
-#include <version.h>
#include <asm/macro.h>
+#include <asm/system.h>
#include <linux/linkage.h>
/*
- * void __asm_flush_dcache_level(level)
+ * void __asm_dcache_level(level)
*
- * clean and invalidate one level cache.
+ * flush or invalidate one level cache.
*
* x0: cache level
- * x1: 0 flush & invalidate, 1 invalidate only
+ * x1: 0 clean & invalidate, 1 invalidate only
* x2~x9: clobbered
*/
-ENTRY(__asm_flush_dcache_level)
+ENTRY(__asm_dcache_level)
lsl x12, x0, #1
msr csselr_el1, x12 /* select cache level */
isb /* sync change of cssidr_el1 */
b.ge loop_set
ret
-ENDPROC(__asm_flush_dcache_level)
+ENDPROC(__asm_dcache_level)
/*
* void __asm_flush_dcache_all(int invalidate_only)
*
- * x0: 0 flush & invalidate, 1 invalidate only
+ * x0: 0 clean & invalidate, 1 invalidate only
*
- * clean and invalidate all data cache by SET/WAY.
+ * flush or invalidate all data cache by SET/WAY.
*/
ENTRY(__asm_dcache_all)
mov x1, x0
and x12, x12, #7 /* x12 <- cache type */
cmp x12, #2
b.lt skip /* skip if no cache or icache */
- bl __asm_flush_dcache_level /* x1 = 0 flush, 1 invalidate */
+ bl __asm_dcache_level /* x1 = 0 flush, 1 invalidate */
skip:
add x0, x0, #1 /* increment cache level */
cmp x11, x0
b.gt loop_level
mov x0, #0
- msr csselr_el1, x0 /* resotre csselr_el1 */
+ msr csselr_el1, x0 /* restore csselr_el1 */
dsb sy
isb
mov lr, x15
ENDPROC(__asm_dcache_all)
ENTRY(__asm_flush_dcache_all)
- mov x16, lr
mov x0, #0
- bl __asm_dcache_all
- mov lr, x16
- ret
+ b __asm_dcache_all
ENDPROC(__asm_flush_dcache_all)
ENTRY(__asm_invalidate_dcache_all)
- mov x16, lr
- mov x0, #0xffff
- bl __asm_dcache_all
- mov lr, x16
- ret
+ mov x0, #0x1
+ b __asm_dcache_all
ENDPROC(__asm_invalidate_dcache_all)
/*
isb sy
ret
ENDPROC(__asm_invalidate_icache_all)
+
+ENTRY(__asm_invalidate_l3_dcache)
+ mov x0, #0 /* return status as success */
+ ret
+ENDPROC(__asm_invalidate_l3_dcache)
+ .weak __asm_invalidate_l3_dcache
+
+ENTRY(__asm_flush_l3_dcache)
+ mov x0, #0 /* return status as success */
+ ret
+ENDPROC(__asm_flush_l3_dcache)
+ .weak __asm_flush_l3_dcache
+
+ENTRY(__asm_invalidate_l3_icache)
+ mov x0, #0 /* return status as success */
+ ret
+ENDPROC(__asm_invalidate_l3_icache)
+ .weak __asm_invalidate_l3_icache
+
+/*
+ * void __asm_switch_ttbr(ulong new_ttbr)
+ *
+ * Safely switches to a new page table.
+ */
+ENTRY(__asm_switch_ttbr)
+ /* x2 = SCTLR (alive throghout the function) */
+ switch_el x4, 3f, 2f, 1f
+3: mrs x2, sctlr_el3
+ b 0f
+2: mrs x2, sctlr_el2
+ b 0f
+1: mrs x2, sctlr_el1
+0:
+
+ /* Unset CR_M | CR_C | CR_I from SCTLR to disable all caches */
+ movn x1, #(CR_M | CR_C | CR_I)
+ and x1, x2, x1
+ switch_el x4, 3f, 2f, 1f
+3: msr sctlr_el3, x1
+ b 0f
+2: msr sctlr_el2, x1
+ b 0f
+1: msr sctlr_el1, x1
+0: isb
+
+ /* This call only clobbers x30 (lr) and x9 (unused) */
+ mov x3, x30
+ bl __asm_invalidate_tlb_all
+
+ /* From here on we're running safely with caches disabled */
+
+ /* Set TTBR to our first argument */
+ switch_el x4, 3f, 2f, 1f
+3: msr ttbr0_el3, x0
+ b 0f
+2: msr ttbr0_el2, x0
+ b 0f
+1: msr ttbr0_el1, x0
+0: isb
+
+ /* Restore original SCTLR and thus enable caches again */
+ switch_el x4, 3f, 2f, 1f
+3: msr sctlr_el3, x2
+ b 0f
+2: msr sctlr_el2, x2
+ b 0f
+1: msr sctlr_el1, x2
+0: isb
+
+ ret x3
+ENDPROC(__asm_switch_ttbr)