arm64: Shift the __tlbi_level() indentation left
authorCatalin Marinas <catalin.marinas@arm.com>
Tue, 7 Jul 2020 10:26:14 +0000 (11:26 +0100)
committerCatalin Marinas <catalin.marinas@arm.com>
Tue, 7 Jul 2020 10:26:14 +0000 (11:26 +0100)
This is for consistency with the other __tlbi macros in this file. No
functional change.

Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
arch/arm64/include/asm/tlbflush.h

index 3505f6f..39aed2e 100644 (file)
 #define TLBI_TTL_TG_16K                2
 #define TLBI_TTL_TG_64K                3
 
-#define __tlbi_level(op, addr, level)                                  \
-       do {                                                            \
-               u64 arg = addr;                                         \
+#define __tlbi_level(op, addr, level) do {                             \
+       u64 arg = addr;                                                 \
                                                                        \
-               if (cpus_have_const_cap(ARM64_HAS_ARMv8_4_TTL) &&       \
-                   level) {                                            \
-                       u64 ttl = level & 3;                            \
+       if (cpus_have_const_cap(ARM64_HAS_ARMv8_4_TTL) &&               \
+           level) {                                                    \
+               u64 ttl = level & 3;                                    \
                                                                        \
-                       switch (PAGE_SIZE) {                            \
-                       case SZ_4K:                                     \
-                               ttl |= TLBI_TTL_TG_4K << 2;             \
-                               break;                                  \
-                       case SZ_16K:                                    \
-                               ttl |= TLBI_TTL_TG_16K << 2;            \
-                               break;                                  \
-                       case SZ_64K:                                    \
-                               ttl |= TLBI_TTL_TG_64K << 2;            \
-                               break;                                  \
-                       }                                               \
-                                                                       \
-                       arg &= ~TLBI_TTL_MASK;                          \
-                       arg |= FIELD_PREP(TLBI_TTL_MASK, ttl);          \
+               switch (PAGE_SIZE) {                                    \
+               case SZ_4K:                                             \
+                       ttl |= TLBI_TTL_TG_4K << 2;                     \
+                       break;                                          \
+               case SZ_16K:                                            \
+                       ttl |= TLBI_TTL_TG_16K << 2;                    \
+                       break;                                          \
+               case SZ_64K:                                            \
+                       ttl |= TLBI_TTL_TG_64K << 2;                    \
+                       break;                                          \
                }                                                       \
                                                                        \
-               __tlbi(op, arg);                                        \
-       } while(0)
+               arg &= ~TLBI_TTL_MASK;                                  \
+               arg |= FIELD_PREP(TLBI_TTL_MASK, ttl);                  \
+       }                                                               \
+                                                                       \
+       __tlbi(op, arg);                                                \
+} while(0)
 
 #define __tlbi_user_level(op, arg, level) do {                         \
        if (arm64_kernel_unmapped_at_el0())                             \