X-Git-Url: https://git.sur5r.net/?a=blobdiff_plain;ds=sidebyside;f=arch%2Farm%2Fcpu%2Farmv7%2Fcache_v7_asm.S;h=bd27ab219c6a752043148ff21798727b725986f8;hb=c1a16c3ab541c014b029b42cc27cae496107e170;hp=2e4629f927e78cddf2e938fc06f447313b63ef0b;hpb=c09d29057ab0b04db0857d319c6bff74de31b9c3;p=u-boot diff --git a/arch/arm/cpu/armv7/cache_v7_asm.S b/arch/arm/cpu/armv7/cache_v7_asm.S index 2e4629f927..bd27ab219c 100644 --- a/arch/arm/cpu/armv7/cache_v7_asm.S +++ b/arch/arm/cpu/armv7/cache_v7_asm.S @@ -7,7 +7,7 @@ #include #include -#ifdef CONFIG_SYS_THUMB_BUILD +#if CONFIG_IS_ENABLED(SYS_THUMB_BUILD) #define ARM(x...) #define THUMB(x...) x #else @@ -82,3 +82,73 @@ ENTRY(v7_flush_dcache_all) THUMB( ldmfd sp!, {r4-r7, r9-r11, lr} ) bx lr ENDPROC(v7_flush_dcache_all) + +/* + * v7_invalidate_dcache_all() + * + * Invalidate the whole D-cache. + * + * Corrupted registers: r0-r7, r9-r11 (r6 only in Thumb mode) + * + * Note: copied from __v7_flush_dcache_all above with + * mcr p15, 0, r11, c7, c14, 2 + * Replaced with: + * mcr p15, 0, r11, c7, c6, 2 + */ +ENTRY(__v7_invalidate_dcache_all) + dmb @ ensure ordering with previous memory accesses + mrc p15, 1, r0, c0, c0, 1 @ read clidr + mov r3, r0, lsr #23 @ move LoC into position + ands r3, r3, #7 << 1 @ extract LoC*2 from clidr + beq inval_finished @ if loc is 0, then no need to clean + mov r10, #0 @ start clean at cache level 0 +inval_levels: + add r2, r10, r10, lsr #1 @ work out 3x current cache level + mov r1, r0, lsr r2 @ extract cache type bits from clidr + and r1, r1, #7 @ mask of the bits for current cache only + cmp r1, #2 @ see what cache we have at this level + blt inval_skip @ skip if no cache, or just i-cache + mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr + isb @ isb to sych the new cssr&csidr + mrc p15, 1, r1, c0, c0, 0 @ read the new csidr + and r2, r1, #7 @ extract the length of the cache lines + add r2, r2, #4 @ add 4 (line length offset) + movw r4, #0x3ff + ands r4, r4, r1, lsr #3 @ find maximum number on the way size + clz r5, r4 @ find bit position of way size increment + movw r7, #0x7fff + ands r7, r7, r1, lsr #13 @ extract max number of the index size +inval_loop1: + mov r9, r7 @ create working copy of max index +inval_loop2: + ARM( orr r11, r10, r4, lsl r5 ) @ factor way and cache number into r11 + THUMB( lsl r6, r4, r5 ) + THUMB( orr r11, r10, r6 ) @ factor way and cache number into r11 + ARM( orr r11, r11, r9, lsl r2 ) @ factor index number into r11 + THUMB( lsl r6, r9, r2 ) + THUMB( orr r11, r11, r6 ) @ factor index number into r11 + mcr p15, 0, r11, c7, c6, 2 @ invalidate by set/way + subs r9, r9, #1 @ decrement the index + bge inval_loop2 + subs r4, r4, #1 @ decrement the way + bge inval_loop1 +inval_skip: + add r10, r10, #2 @ increment cache number + cmp r3, r10 + bgt inval_levels +inval_finished: + mov r10, #0 @ swith back to cache level 0 + mcr p15, 2, r10, c0, c0, 0 @ select current cache level in cssr + dsb st + isb + bx lr +ENDPROC(__v7_invalidate_dcache_all) + +ENTRY(v7_invalidate_dcache_all) + ARM( stmfd sp!, {r4-r5, r7, r9-r11, lr} ) + THUMB( stmfd sp!, {r4-r7, r9-r11, lr} ) + bl __v7_invalidate_dcache_all + ARM( ldmfd sp!, {r4-r5, r7, r9-r11, lr} ) + THUMB( ldmfd sp!, {r4-r7, r9-r11, lr} ) + bx lr +ENDPROC(v7_invalidate_dcache_all)