From 6775a8208e3ac60f09b982c2d7b3258e0af86540 Mon Sep 17 00:00:00 2001 From: Simon Glass Date: Wed, 5 Apr 2017 17:53:18 -0600 Subject: [PATCH] arm: Support cache invalidate At present there is not operation to invalidate a cache range. This seems to be needed to fill out the cache operations. Add an implementation based on the flush operation. Signed-off-by: Simon Glass Reviewed-by: York Sun --- arch/arm/cpu/armv8/cache.S | 24 ++++++++++++++++++++++++ arch/arm/cpu/armv8/cache_v8.c | 2 +- arch/arm/include/asm/system.h | 15 +++++++++++++++ 3 files changed, 40 insertions(+), 1 deletion(-) diff --git a/arch/arm/cpu/armv8/cache.S b/arch/arm/cpu/armv8/cache.S index f1deaa7230..7cba308ee7 100644 --- a/arch/arm/cpu/armv8/cache.S +++ b/arch/arm/cpu/armv8/cache.S @@ -138,6 +138,30 @@ ENTRY(__asm_flush_dcache_range) dsb sy ret ENDPROC(__asm_flush_dcache_range) +/* + * void __asm_invalidate_dcache_range(start, end) + * + * invalidate data cache in the range + * + * x0: start address + * x1: end address + */ +ENTRY(__asm_invalidate_dcache_range) + mrs x3, ctr_el0 + ubfm x3, x3, #16, #19 + mov x2, #4 + lsl x2, x2, x3 /* cache line size */ + + /* x2 <- minimal cache line size in cache system */ + sub x3, x2, #1 + bic x0, x0, x3 +1: dc ivac, x0 /* invalidate data or unified cache */ + add x0, x0, x2 + cmp x0, x1 + b.lo 1b + dsb sy + ret +ENDPROC(__asm_invalidate_dcache_range) /* * void __asm_invalidate_icache_all(void) diff --git a/arch/arm/cpu/armv8/cache_v8.c b/arch/arm/cpu/armv8/cache_v8.c index bd1c3e0335..adc7e1746f 100644 --- a/arch/arm/cpu/armv8/cache_v8.c +++ b/arch/arm/cpu/armv8/cache_v8.c @@ -446,7 +446,7 @@ inline void flush_dcache_all(void) */ void invalidate_dcache_range(unsigned long start, unsigned long stop) { - __asm_flush_dcache_range(start, stop); + __asm_invalidate_dcache_range(start, stop); } /* diff --git a/arch/arm/include/asm/system.h b/arch/arm/include/asm/system.h index 9c3261c884..79bd19af7d 100644 --- a/arch/arm/include/asm/system.h +++ b/arch/arm/include/asm/system.h @@ -180,6 +180,21 @@ static inline unsigned long read_mpidr(void) void __asm_flush_dcache_all(void); void __asm_invalidate_dcache_all(void); void __asm_flush_dcache_range(u64 start, u64 end); + +/** + * __asm_invalidate_dcache_range() - Invalidate a range of virtual addresses + * + * This performance an invalidate from @start to @end - 1. Both addresses + * should be cache-aligned, otherwise this function will align the start + * address and may continue past the end address. + * + * Data in the address range is evicted from the cache and is not written back + * to memory. + * + * @start: Start address to invalidate + * @end: End address to invalidate up to (exclusive) + */ +void __asm_invalidate_dcache_range(u64 start, u64 end); void __asm_invalidate_tlb_all(void); void __asm_invalidate_icache_all(void); int __asm_invalidate_l3_dcache(void); -- 2.39.5