X-Git-Url: https://git.sur5r.net/?a=blobdiff_plain;f=arch%2Farm%2Fcpu%2Farmv7%2Fcache_v7.c;h=99484c2636378a2f340c5c4b6d647ec08bf53930;hb=9ce751a6f5d6683b0fabd3cfc62da7e9f30bc57f;hp=a5aa4fa6436ca719505c04472bb3b60bc67c7d4f;hpb=633b6ccedf9d536fd93299b2207a5227dedd987c;p=u-boot diff --git a/arch/arm/cpu/armv7/cache_v7.c b/arch/arm/cpu/armv7/cache_v7.c index a5aa4fa643..99484c2636 100644 --- a/arch/arm/cpu/armv7/cache_v7.c +++ b/arch/arm/cpu/armv7/cache_v7.c @@ -1,32 +1,22 @@ +// SPDX-License-Identifier: GPL-2.0+ /* * (C) Copyright 2010 * Texas Instruments, * Aneesh V - * - * SPDX-License-Identifier: GPL-2.0+ */ #include #include #include #include -#define ARMV7_DCACHE_INVAL_ALL 1 -#define ARMV7_DCACHE_CLEAN_INVAL_ALL 2 -#define ARMV7_DCACHE_INVAL_RANGE 3 -#define ARMV7_DCACHE_CLEAN_INVAL_RANGE 4 +#define ARMV7_DCACHE_INVAL_RANGE 1 +#define ARMV7_DCACHE_CLEAN_INVAL_RANGE 2 #ifndef CONFIG_SYS_DCACHE_OFF -/* - * Write the level and type you want to Cache Size Selection Register(CSSELR) - * to get size details from Current Cache Size ID Register(CCSIDR) - */ -static void set_csselr(u32 level, u32 type) -{ - u32 csselr = level << 1 | type; - /* Write to Cache Size Selection Register(CSSELR) */ - asm volatile ("mcr p15, 2, %0, c0, c0, 0" : : "r" (csselr)); -} +/* Asm functions from cache_v7_asm.S */ +void v7_flush_dcache_all(void); +void v7_invalidate_dcache_all(void); static u32 get_ccsidr(void) { @@ -37,118 +27,6 @@ static u32 get_ccsidr(void) return ccsidr; } -static u32 get_clidr(void) -{ - u32 clidr; - - /* Read current CP15 Cache Level ID Register */ - asm volatile ("mrc p15,1,%0,c0,c0,1" : "=r" (clidr)); - return clidr; -} - -static void v7_inval_dcache_level_setway(u32 level, u32 num_sets, - u32 num_ways, u32 way_shift, - u32 log2_line_len) -{ - int way, set; - u32 setway; - - /* - * For optimal assembly code: - * a. count down - * b. have bigger loop inside - */ - for (way = num_ways - 1; way >= 0 ; way--) { - for (set = num_sets - 1; set >= 0; set--) { - setway = (level << 1) | (set << log2_line_len) | - (way << way_shift); - /* Invalidate data/unified cache line by set/way */ - asm volatile (" mcr p15, 0, %0, c7, c6, 2" - : : "r" (setway)); - } - } - /* DSB to make sure the operation is complete */ - DSB; -} - -static void v7_clean_inval_dcache_level_setway(u32 level, u32 num_sets, - u32 num_ways, u32 way_shift, - u32 log2_line_len) -{ - int way, set; - u32 setway; - - /* - * For optimal assembly code: - * a. count down - * b. have bigger loop inside - */ - for (way = num_ways - 1; way >= 0 ; way--) { - for (set = num_sets - 1; set >= 0; set--) { - setway = (level << 1) | (set << log2_line_len) | - (way << way_shift); - /* - * Clean & Invalidate data/unified - * cache line by set/way - */ - asm volatile (" mcr p15, 0, %0, c7, c14, 2" - : : "r" (setway)); - } - } - /* DSB to make sure the operation is complete */ - DSB; -} - -static void v7_maint_dcache_level_setway(u32 level, u32 operation) -{ - u32 ccsidr; - u32 num_sets, num_ways, log2_line_len, log2_num_ways; - u32 way_shift; - - set_csselr(level, ARMV7_CSSELR_IND_DATA_UNIFIED); - - ccsidr = get_ccsidr(); - - log2_line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >> - CCSIDR_LINE_SIZE_OFFSET) + 2; - /* Converting from words to bytes */ - log2_line_len += 2; - - num_ways = ((ccsidr & CCSIDR_ASSOCIATIVITY_MASK) >> - CCSIDR_ASSOCIATIVITY_OFFSET) + 1; - num_sets = ((ccsidr & CCSIDR_NUM_SETS_MASK) >> - CCSIDR_NUM_SETS_OFFSET) + 1; - /* - * According to ARMv7 ARM number of sets and number of ways need - * not be a power of 2 - */ - log2_num_ways = log_2_n_round_up(num_ways); - - way_shift = (32 - log2_num_ways); - if (operation == ARMV7_DCACHE_INVAL_ALL) { - v7_inval_dcache_level_setway(level, num_sets, num_ways, - way_shift, log2_line_len); - } else if (operation == ARMV7_DCACHE_CLEAN_INVAL_ALL) { - v7_clean_inval_dcache_level_setway(level, num_sets, num_ways, - way_shift, log2_line_len); - } -} - -static void v7_maint_dcache_all(u32 operation) -{ - u32 level, cache_type, level_start_bit = 0; - u32 clidr = get_clidr(); - - for (level = 0; level < 7; level++) { - cache_type = (clidr >> level_start_bit) & 0x7; - if ((cache_type == ARMV7_CLIDR_CTYPE_DATA_ONLY) || - (cache_type == ARMV7_CLIDR_CTYPE_INSTRUCTION_DATA) || - (cache_type == ARMV7_CLIDR_CTYPE_UNIFIED)) - v7_maint_dcache_level_setway(level, operation); - level_start_bit += 3; - } -} - static void v7_dcache_clean_inval_range(u32 start, u32 stop, u32 line_len) { u32 mva; @@ -165,27 +43,8 @@ static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len) { u32 mva; - /* - * If start address is not aligned to cache-line do not - * invalidate the first cache-line - */ - if (start & (line_len - 1)) { - printf("ERROR: %s - start address is not aligned - 0x%08x\n", - __func__, start); - /* move to next cache line */ - start = (start + line_len - 1) & ~(line_len - 1); - } - - /* - * If stop address is not aligned to cache-line do not - * invalidate the last cache-line - */ - if (stop & (line_len - 1)) { - printf("ERROR: %s - stop address is not aligned - 0x%08x\n", - __func__, stop); - /* align to the beginning of this cache line */ - stop &= ~(line_len - 1); - } + if (!check_cache_range(start, stop)) + return; for (mva = start; mva < stop; mva = mva + line_len) { /* DCIMVAC - Invalidate data cache by MVA to PoC */ @@ -215,7 +74,7 @@ static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op) } /* DSB to make sure the operation is complete */ - DSB; + dsb(); } /* Invalidate TLB */ @@ -228,14 +87,14 @@ static void v7_inval_tlb(void) /* Invalidate entire instruction TLB */ asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0)); /* Full system DSB - make sure that the invalidation is complete */ - DSB; + dsb(); /* Full system ISB - make sure the instruction stream sees it */ - ISB; + isb(); } void invalidate_dcache_all(void) { - v7_maint_dcache_all(ARMV7_DCACHE_INVAL_ALL); + v7_invalidate_dcache_all(); v7_outer_cache_inval_all(); } @@ -246,7 +105,7 @@ void invalidate_dcache_all(void) */ void flush_dcache_all(void) { - v7_maint_dcache_all(ARMV7_DCACHE_CLEAN_INVAL_ALL); + v7_flush_dcache_all(); v7_outer_cache_flush_all(); } @@ -257,6 +116,8 @@ void flush_dcache_all(void) */ void invalidate_dcache_range(unsigned long start, unsigned long stop) { + check_cache_range(start, stop); + v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE); v7_outer_cache_inval_range(start, stop); @@ -269,6 +130,8 @@ void invalidate_dcache_range(unsigned long start, unsigned long stop) */ void flush_dcache_range(unsigned long start, unsigned long stop) { + check_cache_range(start, stop); + v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE); v7_outer_cache_flush_range(start, stop); @@ -295,6 +158,14 @@ void flush_dcache_all(void) { } +void invalidate_dcache_range(unsigned long start, unsigned long stop) +{ +} + +void flush_dcache_range(unsigned long start, unsigned long stop) +{ +} + void arm_init_before_mmu(void) { } @@ -322,10 +193,10 @@ void invalidate_icache_all(void) asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0)); /* Full system DSB - make sure that the invalidation is complete */ - DSB; + dsb(); /* ISB - make sure the instruction stream sees it */ - ISB; + isb(); } #else void invalidate_icache_all(void)