1 /* SPDX-License-Identifier: GPL-2.0-only */
3 * Copyright (C) 2012 ARM Ltd.
5 #ifndef __ASM_IRQFLAGS_H
6 #define __ASM_IRQFLAGS_H
8 #include <asm/alternative.h>
9 #include <asm/barrier.h>
10 #include <asm/ptrace.h>
11 #include <asm/sysreg.h>
14 * Aarch64 has flags for masking: Debug, Asynchronous (serror), Interrupts and
15 * FIQ exceptions, in the 'daif' register. We mask and unmask them in 'daif'
17 * Masking debug exceptions causes all other exceptions to be masked too/
18 * Masking SError masks IRQ/FIQ, but not debug exceptions. IRQ and FIQ are
19 * always masked and unmasked together, and have no side effects for other
20 * flags. Keeping to this order makes it easier for entry.S to know which
21 * exceptions should be unmasked.
25 * CPU interrupt mask handling.
27 static inline void arch_local_irq_enable(void)
29 if (system_has_prio_mask_debugging()) {
30 u32 pmr = read_sysreg_s(SYS_ICC_PMR_EL1);
32 WARN_ON_ONCE(pmr != GIC_PRIO_IRQON && pmr != GIC_PRIO_IRQOFF);
35 asm volatile(ALTERNATIVE(
36 "msr daifclr, #3 // arch_local_irq_enable",
37 __msr_s(SYS_ICC_PMR_EL1, "%0"),
38 ARM64_HAS_IRQ_PRIO_MASKING)
40 : "r" ((unsigned long) GIC_PRIO_IRQON)
46 static inline void arch_local_irq_disable(void)
48 if (system_has_prio_mask_debugging()) {
49 u32 pmr = read_sysreg_s(SYS_ICC_PMR_EL1);
51 WARN_ON_ONCE(pmr != GIC_PRIO_IRQON && pmr != GIC_PRIO_IRQOFF);
54 asm volatile(ALTERNATIVE(
55 "msr daifset, #3 // arch_local_irq_disable",
56 __msr_s(SYS_ICC_PMR_EL1, "%0"),
57 ARM64_HAS_IRQ_PRIO_MASKING)
59 : "r" ((unsigned long) GIC_PRIO_IRQOFF)
64 * Save the current interrupt enable state.
66 static inline unsigned long arch_local_save_flags(void)
70 asm volatile(ALTERNATIVE(
72 __mrs_s("%0", SYS_ICC_PMR_EL1),
73 ARM64_HAS_IRQ_PRIO_MASKING)
81 static inline int arch_irqs_disabled_flags(unsigned long flags)
85 asm volatile(ALTERNATIVE(
86 "and %w0, %w1, #" __stringify(PSR_I_BIT),
87 "eor %w0, %w1, #" __stringify(GIC_PRIO_IRQON),
88 ARM64_HAS_IRQ_PRIO_MASKING)
96 static inline int arch_irqs_disabled(void)
98 return arch_irqs_disabled_flags(arch_local_save_flags());
101 static inline unsigned long arch_local_irq_save(void)
105 flags = arch_local_save_flags();
108 * There are too many states with IRQs disabled, just keep the current
109 * state if interrupts are already disabled/masked.
111 if (!arch_irqs_disabled_flags(flags))
112 arch_local_irq_disable();
118 * restore saved IRQ state
120 static inline void arch_local_irq_restore(unsigned long flags)
122 asm volatile(ALTERNATIVE(
124 __msr_s(SYS_ICC_PMR_EL1, "%0"),
125 ARM64_HAS_IRQ_PRIO_MASKING)
133 #endif /* __ASM_IRQFLAGS_H */