X-Git-Url: https://git.sur5r.net/?a=blobdiff_plain;f=arch%2Farm%2Finclude%2Fasm%2Farmv7.h;h=30e7939d8e48fa3fc897b90ee178d0001b2076fe;hb=affd4a9fa0c4f84758b2d2cdf801256f2c566bbc;hp=a13da23cf1726c6b2c4b4d22b820dd959060dffa;hpb=f10d86d3fff9f53723a404b3794cca98a785ce6a;p=u-boot diff --git a/arch/arm/include/asm/armv7.h b/arch/arm/include/asm/armv7.h index a13da23cf1..30e7939d8e 100644 --- a/arch/arm/include/asm/armv7.h +++ b/arch/arm/include/asm/armv7.h @@ -58,6 +58,7 @@ #ifndef __ASSEMBLY__ #include +#include /* * CP15 Barrier instructions @@ -69,6 +70,60 @@ #define CP15DSB asm volatile ("mcr p15, 0, %0, c7, c10, 4" : : "r" (0)) #define CP15DMB asm volatile ("mcr p15, 0, %0, c7, c10, 5" : : "r" (0)) +#ifdef __ARM_ARCH_7A__ +#define ISB asm volatile ("isb" : : : "memory") +#define DSB asm volatile ("dsb" : : : "memory") +#define DMB asm volatile ("dmb" : : : "memory") +#else +#define ISB CP15ISB +#define DSB CP15DSB +#define DMB CP15DMB +#endif + +/* + * Workaround for ARM errata # 798870 + * Set L2ACTLR[7] to reissue any memory transaction in the L2 that has been + * stalled for 1024 cycles to verify that its hazard condition still exists. + */ +static inline void v7_enable_l2_hazard_detect(void) +{ + uint32_t val; + + /* L2ACTLR[7]: Enable hazard detect timeout */ + asm volatile ("mrc p15, 1, %0, c15, c0, 0\n\t" : "=r"(val)); + val |= (1 << 7); + asm volatile ("mcr p15, 1, %0, c15, c0, 0\n\t" : : "r"(val)); +} + +/* + * Workaround for ARM errata # 799270 + * Ensure that the L2 logic has been used within the previous 256 cycles + * before modifying the ACTLR.SMP bit. This is required during boot before + * MMU has been enabled, or during a specified reset or power down sequence. + */ +static inline void v7_enable_smp(uint32_t address) +{ + uint32_t temp, val; + + /* Read auxiliary control register */ + asm volatile ("mrc p15, 0, %0, c1, c0, 1\n\t" : "=r"(val)); + + /* Enable SMP */ + val |= (1 << 6); + + /* Dummy read to assure L2 access */ + temp = readl(address); + temp &= 0; + val |= temp; + + /* Write auxiliary control register */ + asm volatile ("mcr p15, 0, %0, c1, c0, 1\n\t" : : "r"(val)); + + CP15DSB; + CP15ISB; +} + +void v7_en_l2_hazard_detect(void); void v7_outer_cache_enable(void); void v7_outer_cache_disable(void); void v7_outer_cache_flush_all(void); @@ -76,10 +131,10 @@ void v7_outer_cache_inval_all(void); void v7_outer_cache_flush_range(u32 start, u32 end); void v7_outer_cache_inval_range(u32 start, u32 end); -#if defined(CONFIG_ARMV7_NONSEC) || defined(CONFIG_ARMV7_VIRT) +#ifdef CONFIG_ARMV7_NONSEC int armv7_init_nonsec(void); -int armv7_update_dt(void *fdt); +int armv7_apply_memory_carveout(u64 *start, u64 *size); bool armv7_boot_nonsec(void); /* defined in assembly file */ @@ -91,8 +146,13 @@ void _smp_pen(void); extern char __secure_start[]; extern char __secure_end[]; -#endif /* CONFIG_ARMV7_NONSEC || CONFIG_ARMV7_VIRT */ +#endif /* CONFIG_ARMV7_NONSEC */ +void v7_arch_cp15_set_l2aux_ctrl(u32 l2auxctrl, u32 cpu_midr, + u32 cpu_rev_comb, u32 cpu_variant, + u32 cpu_rev); +void v7_arch_cp15_set_acr(u32 acr, u32 cpu_midr, u32 cpu_rev_comb, + u32 cpu_variant, u32 cpu_rev); #endif /* ! __ASSEMBLY__ */ #endif