arm64/sysreg: Standardise naming for ID_AA64MMFR2_EL1.VARange
authorMark Brown <broonie@kernel.org>
Mon, 5 Sep 2022 22:54:08 +0000 (23:54 +0100)
committerCatalin Marinas <catalin.marinas@arm.com>
Fri, 9 Sep 2022 09:59:03 +0000 (10:59 +0100)
The kernel refers to ID_AA64MMFR2_EL1.VARange as LVA. In preparation for
automatic generation of defines for the system registers bring the naming
used by the kernel in sync with that of DDI0487H.a. No functional change.

Signed-off-by: Mark Brown <broonie@kernel.org>
Reviewed-by: Kristina Martsenko <kristina.martsenko@arm.com>
Link: https://lore.kernel.org/r/20220905225425.1871461-12-broonie@kernel.org
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
arch/arm64/include/asm/assembler.h
arch/arm64/include/asm/sysreg.h
arch/arm64/kernel/cpufeature.c
arch/arm64/kernel/head.S

index 48c7963..c1fc5f7 100644 (file)
@@ -612,7 +612,7 @@ alternative_endif
        .macro  offset_ttbr1, ttbr, tmp
 #ifdef CONFIG_ARM64_VA_BITS_52
        mrs_s   \tmp, SYS_ID_AA64MMFR2_EL1
-       and     \tmp, \tmp, #(0xf << ID_AA64MMFR2_EL1_LVA_SHIFT)
+       and     \tmp, \tmp, #(0xf << ID_AA64MMFR2_EL1_VARange_SHIFT)
        cbnz    \tmp, .Lskipoffs_\@
        orr     \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
 .Lskipoffs_\@ :
index 410b628..c80f1f7 100644 (file)
 #define ID_AA64MMFR2_EL1_ST_SHIFT      28
 #define ID_AA64MMFR2_EL1_NV_SHIFT      24
 #define ID_AA64MMFR2_EL1_CCIDX_SHIFT   20
-#define ID_AA64MMFR2_EL1_LVA_SHIFT     16
+#define ID_AA64MMFR2_EL1_VARange_SHIFT 16
 #define ID_AA64MMFR2_EL1_IESB_SHIFT    12
 #define ID_AA64MMFR2_EL1_LSM_SHIFT     8
 #define ID_AA64MMFR2_EL1_UAO_SHIFT     4
index 534819a..f927b44 100644 (file)
@@ -388,7 +388,7 @@ static const struct arm64_ftr_bits ftr_id_aa64mmfr2[] = {
        ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_ST_SHIFT, 4, 0),
        ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_NV_SHIFT, 4, 0),
        ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_CCIDX_SHIFT, 4, 0),
-       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_LVA_SHIFT, 4, 0),
+       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_VARange_SHIFT, 4, 0),
        ARM64_FTR_BITS(FTR_HIDDEN, FTR_NONSTRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_IESB_SHIFT, 4, 0),
        ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_LSM_SHIFT, 4, 0),
        ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_UAO_SHIFT, 4, 0),
index d040f57..b5accf5 100644 (file)
@@ -99,7 +99,7 @@ SYM_CODE_START(primary_entry)
         */
 #if VA_BITS > 48
        mrs_s   x0, SYS_ID_AA64MMFR2_EL1
-       tst     x0, #0xf << ID_AA64MMFR2_EL1_LVA_SHIFT
+       tst     x0, #0xf << ID_AA64MMFR2_EL1_VARange_SHIFT
        mov     x0, #VA_BITS
        mov     x25, #VA_BITS_MIN
        csel    x25, x25, x0, eq
@@ -677,7 +677,7 @@ SYM_FUNC_START(__cpu_secondary_check52bitva)
        b.ne    2f
 
        mrs_s   x0, SYS_ID_AA64MMFR2_EL1
-       and     x0, x0, #(0xf << ID_AA64MMFR2_EL1_LVA_SHIFT)
+       and     x0, x0, #(0xf << ID_AA64MMFR2_EL1_VARange_SHIFT)
        cbnz    x0, 2f
 
        update_early_cpu_boot_status \