arch/arm: move hard code macro to kconfig

Signed-off-by: zhangyuan21 <zhangyuan21@xiaomi.com>
This commit is contained in:
zhangyuan21 2023-01-13 10:46:43 +08:00 committed by Xiang Xiao
parent 6975bbb38d
commit fc623949a3
7 changed files with 106 additions and 63 deletions

View File

@ -4,3 +4,23 @@
#
comment "ARM Configuration Options"
config ARM_ALIGNMENT_TRAP
bool "Enable Alignment Check at __start"
default n
config ARM_DCACHE_WRITETHROUGH
bool "Enable DCACHE Write-Through at __start"
default n
config ARM_CACHE_ROUND_ROBIN
bool "Enable Cache Round Robin Replacement Policy at __start"
default n
config ARM_DCACHE_DISABLE
bool "Disable DCACHE at __start"
default n
config ARM_ICACHE_DISABLE
bool "Disable ICACHE at __start"
default n

View File

@ -39,14 +39,6 @@
* Configuration
****************************************************************************/
/* Hard-coded options */
#undef CPU_ALIGNMENT_TRAP
#undef CPU_DCACHE_WRITETHROUGH
#undef CPU_CACHE_ROUND_ROBIN
#undef CPU_DCACHE_DISABLE
#undef CPU_ICACHE_DISABLE
/* There are three operational memory configurations:
*
* 1. We execute in place in FLASH (CONFIG_BOOT_RUNFROMFLASH=y). In this case
@ -340,7 +332,7 @@ __start:
mcr p15, 0, r0, c8, c7 /* Invalidate I,D TLBs */
mcr p15, 0, r4, c2, c0 /* Load page table pointer */
#ifdef CPU_DCACHE_WRITETHROUGH
#ifdef CONFIG_ARM_DCACHE_WRITETHROUGH
mov r0, #4 /* Disable write-back on caches explicitly */
mcr p15, 7, r0, c15, c0, 0
#endif
@ -395,22 +387,22 @@ __start:
#endif
/* CR_RR - Round Robin cache replacement */
#ifdef CPU_CACHE_ROUND_ROBIN
#ifdef CONFIG_ARM_CACHE_ROUND_ROBIN
orr r0, r0, #(CR_RR)
#endif
/* CR_C - Dcache enable */
#ifndef CPU_DCACHE_DISABLE
#ifndef CONFIG_ARM_DCACHE_DISABLE
orr r0, r0, #(CR_C)
#endif
/* CR_C - Dcache enable */
#ifndef CPU_ICACHE_DISABLE
#ifndef CONFIG_ARM_ICACHE_DISABLE
orr r0, r0, #(CR_I)
#endif
/* CR_A - Alignment abort enable */
#ifdef CPU_ALIGNMENT_TRAP
#ifdef CONFIG_ARM_ALIGNMENT_TRAP
orr r0, r0, #(CR_A)
#endif
mcr p15, 0, r0, c1, c0, 0 /* write control reg */

View File

@ -169,3 +169,23 @@ config ARMV7A_DECODEFIQ
Select this option if your platform supports the function
arm_decodefiq(). This is used primarily to support secure TrustZone
interrupts received on the FIQ vector.
config ARMV7A_ALIGNMENT_TRAP
bool "Enable Alignment Check at __start"
default n
config ARMV7A_CACHE_ROUND_ROBIN
bool "Enable Cache Round Robin Replacement Policy at __start"
default n
config ARMV7A_DCACHE_DISABLE
bool "Disable DCACHE at __start"
default n
config ARMV7A_ICACHE_DISABLE
bool "Disable ICACHE at __start"
default n
config ARMV7A_AFE_ENABLE
bool "Enable Access Flag at __start"
default n

View File

@ -42,14 +42,6 @@
* Configuration
****************************************************************************/
/* Hard-coded options */
#undef CPU_ALIGNMENT_TRAP
#undef CPU_CACHE_ROUND_ROBIN
#undef CPU_DCACHE_DISABLE
#undef CPU_ICACHE_DISABLE
#undef CPU_AFE_ENABLE
/* Check for the identity mapping: For this configuration, this would be
* the case where the virtual beginning of RAM is the same as the physical
* beginning of RAM.
@ -307,7 +299,7 @@ __cpu3_start:
orr r0, r0, #(SCTLR_V)
#endif
#if defined(CPU_CACHE_ROUND_ROBIN) && !defined(CONFIG_ARCH_CORTEXA5)
#if defined(CONFIG_ARMV7A_CACHE_ROUND_ROBIN) && !defined(CONFIG_ARCH_CORTEXA5)
/* Round Robin cache replacement
*
* SCTLR_RR Bit 14: The Cortex-A5 processor only supports a fixed random
@ -321,7 +313,7 @@ __cpu3_start:
* after SMP cache coherency has been setup.
*/
#if !defined(CPU_DCACHE_DISABLE) && !defined(CONFIG_SMP)
#if !defined(CONFIG_ARMV7A_DCACHE_DISABLE) && !defined(CONFIG_SMP)
/* Dcache enable
*
* SCTLR_C Bit 2: DCache enable
@ -330,7 +322,7 @@ __cpu3_start:
orr r0, r0, #(SCTLR_C)
#endif
#if !defined(CPU_ICACHE_DISABLE) && !defined(CONFIG_SMP)
#if !defined(CONFIG_ARMV7A_ICACHE_DISABLE) && !defined(CONFIG_SMP)
/* Icache enable
*
* SCTLR_I Bit 12: ICache enable
@ -339,7 +331,7 @@ __cpu3_start:
orr r0, r0, #(SCTLR_I)
#endif
#ifdef CPU_ALIGNMENT_TRAP
#ifdef CONFIG_ARMV7A_ALIGNMENT_TRAP
/* Alignment abort enable
*
* SCTLR_A Bit 1: Strict alignment enabled
@ -357,7 +349,7 @@ __cpu3_start:
orr r0, r0, #(SCTLR_EE)
#endif
#ifdef CPU_AFE_ENABLE
#ifdef CONFIG_ARMV7A_AFE_ENABLE
/* AP[0:2] Permissions model
*
* SCTLR_AFE Bit 29: Full, legacy access permissions behavior (reset value).

View File

@ -39,14 +39,6 @@
* Configuration
****************************************************************************/
/* Hard-coded options */
#undef CPU_ALIGNMENT_TRAP
#undef CPU_CACHE_ROUND_ROBIN
#undef CPU_DCACHE_DISABLE
#undef CPU_ICACHE_DISABLE
#undef CPU_AFE_ENABLE
/* There are three operational memory configurations:
*
* 1. We execute in place in FLASH (CONFIG_BOOT_RUNFROMFLASH=y). In this case
@ -463,7 +455,7 @@ __cpu0_start:
orr r0, r0, #(SCTLR_V)
#endif
#if defined(CPU_CACHE_ROUND_ROBIN) && !defined(CONFIG_ARCH_CORTEXA5)
#if defined(CONFIG_ARMV7A_CACHE_ROUND_ROBIN) && !defined(CONFIG_ARCH_CORTEXA5)
/* Round Robin cache replacement
*
* SCTLR_RR Bit 14: The Cortex-A5 processor only supports a fixed random
@ -477,7 +469,7 @@ __cpu0_start:
* after SMP cache coherency has been setup.
*/
#if !defined(CPU_DCACHE_DISABLE) && !defined(CONFIG_SMP)
#if !defined(CONFIG_ARMV7A_DCACHE_DISABLE) && !defined(CONFIG_SMP)
/* Dcache enable
*
* SCTLR_C Bit 2: DCache enable
@ -486,7 +478,7 @@ __cpu0_start:
orr r0, r0, #(SCTLR_C)
#endif
#if !defined(CPU_ICACHE_DISABLE) && !defined(CONFIG_SMP)
#if !defined(CONFIG_ARMV7A_ICACHE_DISABLE) && !defined(CONFIG_SMP)
/* Icache enable
*
* SCTLR_I Bit 12: ICache enable
@ -495,7 +487,7 @@ __cpu0_start:
orr r0, r0, #(SCTLR_I)
#endif
#ifdef CPU_ALIGNMENT_TRAP
#ifdef CONFIG_ARMV7A_ALIGNMENT_TRAP
/* Alignment abort enable
*
* SCTLR_A Bit 1: Strict alignment enabled
@ -513,7 +505,7 @@ __cpu0_start:
orr r0, r0, #(SCTLR_EE)
#endif
#ifdef CPU_AFE_ENABLE
#ifdef CONFIG_ARMV7A_AFE_ENABLE
/* AP[0:2] Permissions model
*
* SCTLR_AFE Bit 29: Full, legacy access permissions behavior (reset value).

View File

@ -180,3 +180,43 @@ config ARMV7R_DECODEFIQ
---help---
Select this option if your platform supports the function
arm_decodefiq().
config ARMV7R_ALIGNMENT_TRAP
bool "Enable Alignment Check at __start"
default n
config ARMV7R_CACHE_ROUND_ROBIN
bool "Enable Cache Round Robin Replacement Policy at __start"
default n
config ARMV7R_DCACHE_DISABLE
bool "Disable DCACHE at __start"
default n
config ARMV7R_ICACHE_DISABLE
bool "Disable ICACHE at __start"
default n
config ARMV7R_SCTLR_CCP15BEN
bool "Enable CP15 Barrier at __start"
default y
config ARMV7R_BACKGROUND_REGION
bool "Enable MPU Background region at __start"
default y
config ARMV7R_DIV0_FAULT
bool "Enable DIV0 Fault at __start"
default n
config ARMV7R_FAST_INTERRUPT
bool "Enable Fast Interrupts at __start"
default n
config ARMV7R_IMPL_VECTORS
bool "Enable Interrupt Vector at __start"
default n
config ARMV7R_NONMASKABLE_FIQ
bool "Enable Non-maskable FIQ Support at __start"
default n

View File

@ -35,19 +35,6 @@
* Configuration
****************************************************************************/
/* Hard-coded options */
#undef CPU_ALIGNMENT_TRAP
#undef CPU_CACHE_ROUND_ROBIN
#undef CPU_DCACHE_DISABLE
#undef CPU_ICACHE_DISABLE
#define CPU_SCTLR_CCP15BEN 1
#define CPU_BACKGROUND_REGION 1
#undef CPU_DIV0_FAULT
#undef CPU_FAST_INTERRUPT
#undef CPU_IMPL_VECTORS
#undef CPU_NONMASKABLE_FIQ
/* There are three operational memory configurations:
*
* 1. We execute in place in FLASH (CONFIG_BOOT_RUNFROMFLASH=y). In this case
@ -204,7 +191,7 @@ __start:
/* Set configured bits */
#ifdef CPU_ALIGNMENT_TRAP
#ifdef CONFIG_ARMV7R_ALIGNMENT_TRAP
/* Alignment abort enable
*
* SCTLR_A Bit 1: Strict alignment enabled
@ -213,7 +200,7 @@ __start:
orr r0, r0, #(SCTLR_A)
#endif
#ifndef CPU_DCACHE_DISABLE
#ifndef CONFIG_ARMV7R_DCACHE_DISABLE
/* Dcache enable
*
* SCTLR_C Bit 2: DCache enable
@ -222,7 +209,7 @@ __start:
orr r0, r0, #(SCTLR_C)
#endif
#ifdef CPU_SCTLR_CCP15BEN
#ifdef CONFIG_ARMV7R_SCTLR_CCP15BEN
/* Enable memory barriers
*
* SCTLR_CCP15BEN Bit 5: CP15 barrier enable
@ -231,7 +218,7 @@ __start:
orr r0, r0, #(SCTLR_CCP15BEN)
#endif
#ifndef CPU_ICACHE_DISABLE
#ifndef CONFIG_ARMV7R_ICACHE_DISABLE
/* Icache enable
*
* SCTLR_I Bit 12: ICache enable
@ -249,7 +236,7 @@ __start:
orr r0, r0, #(SCTLR_V)
#endif
#ifdef CPU_CACHE_ROUND_ROBIN
#ifdef CONFIG_ARMV7R_CACHE_ROUND_ROBIN
/* Round Robin cache replacement
*
* SCTLR_RR Bit 14: Round-robin replacement strategy.
@ -258,7 +245,7 @@ __start:
orr r0, r0, #(SCTLR_RR)
#endif
#ifdef CPU_BACKGROUND_REGION
#ifdef CONFIG_ARMV7R_BACKGROUND_REGION
/* Allow PL1 access to back region when MPU is enabled
*
* SCTLR_BR Bit 17: Background Region bit
@ -267,7 +254,7 @@ __start:
orr r0, r0, #(SCTLR_BR)
#endif
#ifdef CPU_DIV0_FAULT
#ifdef CONFIG_ARMV7R_DIV0_FAULT
/* Enable divide by zero faults
*
* SCTLR_DZ Bit 19: Divide by Zero fault enable bit
@ -276,7 +263,7 @@ __start:
orr r0, r0, #(SCTLR_DZ)
#endif
#ifdef CPU_FAST_INTERRUPT
#ifdef CONFIG_ARMV7R_FAST_INTERRUPT
/* Fast interrupts configuration enable bit
*
* SCTLR_FI Bit 21: Fast interrupts configuration enable bit
@ -285,7 +272,7 @@ __start:
orr r0, r0, #(SCTLR_FI)
#endif
#ifdef CPU_IMPL_VECTORS
#ifdef CONFIG_ARMV7R_IMPL_VECTORS
/* Implementation defined interrupt vectors
*
* SCTLR_VE Bit 24: Interrupt Vectors Enable bit
@ -303,7 +290,7 @@ __start:
orr r0, r0, #(SCTLR_EE)
#endif
#ifdef CPU_NONMASKABLE_FIQ
#ifdef CONFIG_ARMV7R_NONMASKABLE_FIQ
/* Non-maskable FIQ support
*
* SCTLR_NMFI Bit 27: Non-maskable FIQ (NMFI) support
@ -430,7 +417,7 @@ arm_data_initialize:
cmp r1, r2
blt 3b
#ifndef CPU_DCACHE_DISABLE
#ifndef CONFIG_ARMV7R_DCACHE_DISABLE
/* Flush the copied RAM functions into physical RAM so that will
* be available when fetched into the I-Cache.
*