1 /* SPDX-License-Identifier: GPL-2.0-only */
3 * Copyright (C) 2012 ARM Ltd.
8 #include <asm/cputype.h>
10 #define CTR_L1IP_SHIFT 14
11 #define CTR_L1IP_MASK 3
12 #define CTR_DMINLINE_SHIFT 16
13 #define CTR_IMINLINE_SHIFT 0
14 #define CTR_IMINLINE_MASK 0xf
15 #define CTR_ERG_SHIFT 20
16 #define CTR_CWG_SHIFT 24
17 #define CTR_CWG_MASK 15
18 #define CTR_IDC_SHIFT 28
19 #define CTR_DIC_SHIFT 29
21 #define CTR_CACHE_MINLINE_MASK \
22 (0xf << CTR_DMINLINE_SHIFT | CTR_IMINLINE_MASK << CTR_IMINLINE_SHIFT)
24 #define CTR_L1IP(ctr) (((ctr) >> CTR_L1IP_SHIFT) & CTR_L1IP_MASK)
26 #define ICACHE_POLICY_VPIPT 0
27 #define ICACHE_POLICY_RESERVED 1
28 #define ICACHE_POLICY_VIPT 2
29 #define ICACHE_POLICY_PIPT 3
31 #define L1_CACHE_SHIFT (6)
32 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
35 #define CLIDR_LOUU_SHIFT 27
36 #define CLIDR_LOC_SHIFT 24
37 #define CLIDR_LOUIS_SHIFT 21
39 #define CLIDR_LOUU(clidr) (((clidr) >> CLIDR_LOUU_SHIFT) & 0x7)
40 #define CLIDR_LOC(clidr) (((clidr) >> CLIDR_LOC_SHIFT) & 0x7)
41 #define CLIDR_LOUIS(clidr) (((clidr) >> CLIDR_LOUIS_SHIFT) & 0x7)
44 * Memory returned by kmalloc() may be used for DMA, so we must make
45 * sure that all such allocations are cache aligned. Otherwise,
46 * unrelated code may cause parts of the buffer to be read into the
47 * cache before the transfer is done, causing old data to be seen by
50 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
52 #ifdef CONFIG_KASAN_SW_TAGS
53 #define ARCH_SLAB_MINALIGN (1ULL << KASAN_SHADOW_SCALE_SHIFT)
54 #elif defined(CONFIG_KASAN_HW_TAGS)
55 #define ARCH_SLAB_MINALIGN MTE_GRANULE_SIZE
60 #include <linux/bitops.h>
62 #define ICACHEF_ALIASING 0
63 #define ICACHEF_VPIPT 1
64 extern unsigned long __icache_flags;
67 * Whilst the D-side always behaves as PIPT on AArch64, aliasing is
68 * permitted in the I-cache.
70 static inline int icache_is_aliasing(void)
72 return test_bit(ICACHEF_ALIASING, &__icache_flags);
75 static __always_inline int icache_is_vpipt(void)
77 return test_bit(ICACHEF_VPIPT, &__icache_flags);
80 static inline u32 cache_type_cwg(void)
82 return (read_cpuid_cachetype() >> CTR_CWG_SHIFT) & CTR_CWG_MASK;
85 #define __read_mostly __section(".data..read_mostly")
87 static inline int cache_line_size_of_cpu(void)
89 u32 cwg = cache_type_cwg();
91 return cwg ? 4 << cwg : ARCH_DMA_MINALIGN;
94 int cache_line_size(void);
97 * Read the effective value of CTR_EL0.
99 * According to ARM ARM for ARMv8-A (ARM DDI 0487C.a),
100 * section D10.2.33 "CTR_EL0, Cache Type Register" :
102 * CTR_EL0.IDC reports the data cache clean requirements for
103 * instruction to data coherence.
105 * 0 - dcache clean to PoU is required unless :
106 * (CLIDR_EL1.LoC == 0) || (CLIDR_EL1.LoUIS == 0 && CLIDR_EL1.LoUU == 0)
107 * 1 - dcache clean to PoU is not required for i-to-d coherence.
109 * This routine provides the CTR_EL0 with the IDC field updated to the
112 static inline u32 __attribute_const__ read_cpuid_effective_cachetype(void)
114 u32 ctr = read_cpuid_cachetype();
116 if (!(ctr & BIT(CTR_IDC_SHIFT))) {
117 u64 clidr = read_sysreg(clidr_el1);
119 if (CLIDR_LOC(clidr) == 0 ||
120 (CLIDR_LOUIS(clidr) == 0 && CLIDR_LOUU(clidr) == 0))
121 ctr |= BIT(CTR_IDC_SHIFT);
127 #endif /* __ASSEMBLY__ */