crypto: arm/aes-neonbs - avoid loading reorder argument on encryption
authorArd Biesheuvel <ardb@kernel.org>
Wed, 16 Sep 2020 12:36:41 +0000 (15:36 +0300)
committerHerbert Xu <herbert@gondor.apana.org.au>
Fri, 25 Sep 2020 07:48:15 +0000 (17:48 +1000)
Reordering the tweak is never necessary for encryption, so avoid the
argument load on the encryption path.

Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
arch/arm/crypto/aes-neonbs-core.S

index 07cde13..7d0cc7f 100644 (file)
@@ -956,8 +956,7 @@ ENDPROC(__xts_prepare8)
        push            {r4-r8, lr}
        mov             r5, sp                  // preserve sp
        ldrd            r6, r7, [sp, #24]       // get blocks and iv args
-       ldr             r8, [sp, #32]           // reorder final tweak?
-       rsb             r8, r8, #1
+       rsb             r8, ip, #1
        sub             ip, sp, #128            // make room for 8x tweak
        bic             ip, ip, #0xf            // align sp to 16 bytes
        mov             sp, ip
@@ -1013,9 +1012,11 @@ ENDPROC(__xts_prepare8)
        .endm
 
 ENTRY(aesbs_xts_encrypt)
+       mov             ip, #0                  // never reorder final tweak
        __xts_crypt     aesbs_encrypt8, q0, q1, q4, q6, q3, q7, q2, q5
 ENDPROC(aesbs_xts_encrypt)
 
 ENTRY(aesbs_xts_decrypt)
+       ldr             ip, [sp, #8]            // reorder final tweak?
        __xts_crypt     aesbs_decrypt8, q0, q1, q6, q4, q2, q7, q3, q5
 ENDPROC(aesbs_xts_decrypt)