X-Git-Url: https://git.sur5r.net/?a=blobdiff_plain;f=arch%2Farm%2Fcpu%2Farmv7%2Fcache_v7.c;h=99484c2636378a2f340c5c4b6d647ec08bf53930;hb=9ce751a6f5d6683b0fabd3cfc62da7e9f30bc57f;hp=8748c145c44a6d15805a4adcb79ecfdb6de49d55;hpb=fed029f3c31b7d5df674b5090a13356b631918c7;p=u-boot diff --git a/arch/arm/cpu/armv7/cache_v7.c b/arch/arm/cpu/armv7/cache_v7.c index 8748c145c4..99484c2636 100644 --- a/arch/arm/cpu/armv7/cache_v7.c +++ b/arch/arm/cpu/armv7/cache_v7.c @@ -1,47 +1,22 @@ +// SPDX-License-Identifier: GPL-2.0+ /* * (C) Copyright 2010 * Texas Instruments, * Aneesh V - * - * See file CREDITS for list of people who contributed to this - * project. - * - * This program is free software; you can redistribute it and/or - * modify it under the terms of the GNU General Public License as - * published by the Free Software Foundation; either version 2 of - * the License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 59 Temple Place, Suite 330, Boston, - * MA 02111-1307 USA */ #include #include #include #include -#define ARMV7_DCACHE_INVAL_ALL 1 -#define ARMV7_DCACHE_CLEAN_INVAL_ALL 2 -#define ARMV7_DCACHE_INVAL_RANGE 3 -#define ARMV7_DCACHE_CLEAN_INVAL_RANGE 4 +#define ARMV7_DCACHE_INVAL_RANGE 1 +#define ARMV7_DCACHE_CLEAN_INVAL_RANGE 2 #ifndef CONFIG_SYS_DCACHE_OFF -/* - * Write the level and type you want to Cache Size Selection Register(CSSELR) - * to get size details from Current Cache Size ID Register(CCSIDR) - */ -static void set_csselr(u32 level, u32 type) -{ u32 csselr = level << 1 | type; - /* Write to Cache Size Selection Register(CSSELR) */ - asm volatile ("mcr p15, 2, %0, c0, c0, 0" : : "r" (csselr)); -} +/* Asm functions from cache_v7_asm.S */ +void v7_flush_dcache_all(void); +void v7_invalidate_dcache_all(void); static u32 get_ccsidr(void) { @@ -52,119 +27,7 @@ static u32 get_ccsidr(void) return ccsidr; } -static u32 get_clidr(void) -{ - u32 clidr; - - /* Read current CP15 Cache Level ID Register */ - asm volatile ("mrc p15,1,%0,c0,c0,1" : "=r" (clidr)); - return clidr; -} - -static void v7_inval_dcache_level_setway(u32 level, u32 num_sets, - u32 num_ways, u32 way_shift, - u32 log2_line_len) -{ - int way, set, setway; - - /* - * For optimal assembly code: - * a. count down - * b. have bigger loop inside - */ - for (way = num_ways - 1; way >= 0 ; way--) { - for (set = num_sets - 1; set >= 0; set--) { - setway = (level << 1) | (set << log2_line_len) | - (way << way_shift); - /* Invalidate data/unified cache line by set/way */ - asm volatile (" mcr p15, 0, %0, c7, c6, 2" - : : "r" (setway)); - } - } - /* DSB to make sure the operation is complete */ - CP15DSB; -} - -static void v7_clean_inval_dcache_level_setway(u32 level, u32 num_sets, - u32 num_ways, u32 way_shift, - u32 log2_line_len) -{ - int way, set, setway; - - /* - * For optimal assembly code: - * a. count down - * b. have bigger loop inside - */ - for (way = num_ways - 1; way >= 0 ; way--) { - for (set = num_sets - 1; set >= 0; set--) { - setway = (level << 1) | (set << log2_line_len) | - (way << way_shift); - /* - * Clean & Invalidate data/unified - * cache line by set/way - */ - asm volatile (" mcr p15, 0, %0, c7, c14, 2" - : : "r" (setway)); - } - } - /* DSB to make sure the operation is complete */ - CP15DSB; -} - -static void v7_maint_dcache_level_setway(u32 level, u32 operation) -{ - u32 ccsidr; - u32 num_sets, num_ways, log2_line_len, log2_num_ways; - u32 way_shift; - - set_csselr(level, ARMV7_CSSELR_IND_DATA_UNIFIED); - - ccsidr = get_ccsidr(); - - log2_line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >> - CCSIDR_LINE_SIZE_OFFSET) + 2; - /* Converting from words to bytes */ - log2_line_len += 2; - - num_ways = ((ccsidr & CCSIDR_ASSOCIATIVITY_MASK) >> - CCSIDR_ASSOCIATIVITY_OFFSET) + 1; - num_sets = ((ccsidr & CCSIDR_NUM_SETS_MASK) >> - CCSIDR_NUM_SETS_OFFSET) + 1; - /* - * According to ARMv7 ARM number of sets and number of ways need - * not be a power of 2 - */ - log2_num_ways = log_2_n_round_up(num_ways); - - way_shift = (32 - log2_num_ways); - if (operation == ARMV7_DCACHE_INVAL_ALL) { - v7_inval_dcache_level_setway(level, num_sets, num_ways, - way_shift, log2_line_len); - } else if (operation == ARMV7_DCACHE_CLEAN_INVAL_ALL) { - v7_clean_inval_dcache_level_setway(level, num_sets, num_ways, - way_shift, log2_line_len); - } -} - -static void v7_maint_dcache_all(u32 operation) -{ - u32 level, cache_type, level_start_bit = 0; - - u32 clidr = get_clidr(); - - for (level = 0; level < 7; level++) { - cache_type = (clidr >> level_start_bit) & 0x7; - if ((cache_type == ARMV7_CLIDR_CTYPE_DATA_ONLY) || - (cache_type == ARMV7_CLIDR_CTYPE_INSTRUCTION_DATA) || - (cache_type == ARMV7_CLIDR_CTYPE_UNIFIED)) - v7_maint_dcache_level_setway(level, operation); - level_start_bit += 3; - } -} - -static void v7_dcache_clean_inval_range(u32 start, - u32 stop, u32 line_len) +static void v7_dcache_clean_inval_range(u32 start, u32 stop, u32 line_len) { u32 mva; @@ -180,27 +43,8 @@ static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len) { u32 mva; - /* - * If start address is not aligned to cache-line do not - * invalidate the first cache-line - */ - if (start & (line_len - 1)) { - printf("ERROR: %s - start address is not aligned - 0x%08x\n", - __func__, start); - /* move to next cache line */ - start = (start + line_len - 1) & ~(line_len - 1); - } - - /* - * If stop address is not aligned to cache-line do not - * invalidate the last cache-line - */ - if (stop & (line_len - 1)) { - printf("ERROR: %s - stop address is not aligned - 0x%08x\n", - __func__, stop); - /* align to the beginning of this cache line */ - stop &= ~(line_len - 1); - } + if (!check_cache_range(start, stop)) + return; for (mva = start; mva < stop; mva = mva + line_len) { /* DCIMVAC - Invalidate data cache by MVA to PoC */ @@ -230,7 +74,7 @@ static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op) } /* DSB to make sure the operation is complete */ - CP15DSB; + dsb(); } /* Invalidate TLB */ @@ -243,14 +87,14 @@ static void v7_inval_tlb(void) /* Invalidate entire instruction TLB */ asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0)); /* Full system DSB - make sure that the invalidation is complete */ - CP15DSB; + dsb(); /* Full system ISB - make sure the instruction stream sees it */ - CP15ISB; + isb(); } void invalidate_dcache_all(void) { - v7_maint_dcache_all(ARMV7_DCACHE_INVAL_ALL); + v7_invalidate_dcache_all(); v7_outer_cache_inval_all(); } @@ -261,7 +105,7 @@ void invalidate_dcache_all(void) */ void flush_dcache_all(void) { - v7_maint_dcache_all(ARMV7_DCACHE_CLEAN_INVAL_ALL); + v7_flush_dcache_all(); v7_outer_cache_flush_all(); } @@ -272,6 +116,7 @@ void flush_dcache_all(void) */ void invalidate_dcache_range(unsigned long start, unsigned long stop) { + check_cache_range(start, stop); v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE); @@ -285,6 +130,8 @@ void invalidate_dcache_range(unsigned long start, unsigned long stop) */ void flush_dcache_range(unsigned long start, unsigned long stop) { + check_cache_range(start, stop); + v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE); v7_outer_cache_flush_range(start, stop); @@ -302,15 +149,6 @@ void mmu_page_table_flush(unsigned long start, unsigned long stop) flush_dcache_range(start, stop); v7_inval_tlb(); } - -/* - * Flush range from all levels of d-cache/unified-cache used: - * Affects the range [start, start + size - 1] - */ -void flush_cache(unsigned long start, unsigned long size) -{ - flush_dcache_range(start, start + size); -} #else /* #ifndef CONFIG_SYS_DCACHE_OFF */ void invalidate_dcache_all(void) { @@ -332,10 +170,6 @@ void arm_init_before_mmu(void) { } -void flush_cache(unsigned long start, unsigned long size) -{ -} - void mmu_page_table_flush(unsigned long start, unsigned long stop) { } @@ -359,10 +193,10 @@ void invalidate_icache_all(void) asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0)); /* Full system DSB - make sure that the invalidation is complete */ - CP15DSB; + dsb(); /* ISB - make sure the instruction stream sees it */ - CP15ISB; + isb(); } #else void invalidate_icache_all(void) @@ -370,41 +204,10 @@ void invalidate_icache_all(void) } #endif -/* - * Stub implementations for outer cache operations - */ -void __v7_outer_cache_enable(void) -{ -} -void v7_outer_cache_enable(void) - __attribute__((weak, alias("__v7_outer_cache_enable"))); - -void __v7_outer_cache_disable(void) -{ -} -void v7_outer_cache_disable(void) - __attribute__((weak, alias("__v7_outer_cache_disable"))); - -void __v7_outer_cache_flush_all(void) -{ -} -void v7_outer_cache_flush_all(void) - __attribute__((weak, alias("__v7_outer_cache_flush_all"))); - -void __v7_outer_cache_inval_all(void) -{ -} -void v7_outer_cache_inval_all(void) - __attribute__((weak, alias("__v7_outer_cache_inval_all"))); - -void __v7_outer_cache_flush_range(u32 start, u32 end) -{ -} -void v7_outer_cache_flush_range(u32 start, u32 end) - __attribute__((weak, alias("__v7_outer_cache_flush_range"))); - -void __v7_outer_cache_inval_range(u32 start, u32 end) -{ -} -void v7_outer_cache_inval_range(u32 start, u32 end) - __attribute__((weak, alias("__v7_outer_cache_inval_range"))); +/* Stub implementations for outer cache operations */ +__weak void v7_outer_cache_enable(void) {} +__weak void v7_outer_cache_disable(void) {} +__weak void v7_outer_cache_flush_all(void) {} +__weak void v7_outer_cache_inval_all(void) {} +__weak void v7_outer_cache_flush_range(u32 start, u32 end) {} +__weak void v7_outer_cache_inval_range(u32 start, u32 end) {}