1 // SPDX-License-Identifier: GPL-2.0
3 * K3 SA2UL crypto accelerator driver
5 * Copyright (C) 2018-2020 Texas Instruments Incorporated - http://www.ti.com
11 #include <linux/clk.h>
12 #include <linux/dmaengine.h>
13 #include <linux/dmapool.h>
14 #include <linux/module.h>
15 #include <linux/of_device.h>
16 #include <linux/platform_device.h>
17 #include <linux/pm_runtime.h>
19 #include <crypto/aes.h>
20 #include <crypto/authenc.h>
21 #include <crypto/des.h>
22 #include <crypto/internal/aead.h>
23 #include <crypto/internal/hash.h>
24 #include <crypto/internal/skcipher.h>
25 #include <crypto/scatterwalk.h>
26 #include <crypto/sha.h>
30 /* Byte offset for key in encryption security context */
31 #define SC_ENC_KEY_OFFSET (1 + 27 + 4)
32 /* Byte offset for Aux-1 in encryption security context */
33 #define SC_ENC_AUX1_OFFSET (1 + 27 + 4 + 32)
35 #define SA_CMDL_UPD_ENC 0x0001
36 #define SA_CMDL_UPD_AUTH 0x0002
37 #define SA_CMDL_UPD_ENC_IV 0x0004
38 #define SA_CMDL_UPD_AUTH_IV 0x0008
39 #define SA_CMDL_UPD_AUX_KEY 0x0010
41 #define SA_AUTH_SUBKEY_LEN 16
42 #define SA_CMDL_PAYLOAD_LENGTH_MASK 0xFFFF
43 #define SA_CMDL_SOP_BYPASS_LEN_MASK 0xFF000000
45 #define MODE_CONTROL_BYTES 27
46 #define SA_HASH_PROCESSING 0
47 #define SA_CRYPTO_PROCESSING 0
48 #define SA_UPLOAD_HASH_TO_TLR BIT(6)
50 #define SA_SW0_FLAGS_MASK 0xF0000
51 #define SA_SW0_CMDL_INFO_MASK 0x1F00000
52 #define SA_SW0_CMDL_PRESENT BIT(4)
53 #define SA_SW0_ENG_ID_MASK 0x3E000000
54 #define SA_SW0_DEST_INFO_PRESENT BIT(30)
55 #define SA_SW2_EGRESS_LENGTH 0xFF000000
56 #define SA_BASIC_HASH 0x10
58 #define SHA256_DIGEST_WORDS 8
59 /* Make 32-bit word from 4 bytes */
60 #define SA_MK_U32(b0, b1, b2, b3) (((b0) << 24) | ((b1) << 16) | \
63 /* size of SCCTL structure in bytes */
64 #define SA_SCCTL_SZ 16
66 /* Max Authentication tag size */
67 #define SA_MAX_AUTH_TAG_SZ 64
72 static struct device *sa_k3_dev;
75 * struct sa_cmdl_cfg - Command label configuration descriptor
76 * @aalg: authentication algorithm ID
77 * @enc_eng_id: Encryption Engine ID supported by the SA hardware
78 * @auth_eng_id: Authentication Engine ID
79 * @iv_size: Initialization Vector size
80 * @akey: Authentication key
81 * @akey_len: Authentication key length
82 * @enc: True, if this is an encode request
95 * struct algo_data - Crypto algorithm specific data
96 * @enc_eng: Encryption engine info structure
97 * @auth_eng: Authentication engine info structure
98 * @auth_ctrl: Authentication control word
99 * @hash_size: Size of digest
100 * @iv_idx: iv index in psdata
101 * @iv_out_size: iv out size
102 * @ealg_id: Encryption Algorithm ID
103 * @aalg_id: Authentication algorithm ID
104 * @mci_enc: Mode Control Instruction for Encryption algorithm
105 * @mci_dec: Mode Control Instruction for Decryption
106 * @inv_key: Whether the encryption algorithm demands key inversion
107 * @ctx: Pointer to the algorithm context
108 * @keyed_mac: Whether the authentication algorithm has key
109 * @prep_iopad: Function pointer to generate intermediate ipad/opad
112 struct sa_eng_info enc_eng;
113 struct sa_eng_info auth_eng;
123 struct sa_tfm_ctx *ctx;
125 void (*prep_iopad)(struct algo_data *algo, const u8 *key,
126 u16 key_sz, __be32 *ipad, __be32 *opad);
130 * struct sa_alg_tmpl: A generic template encompassing crypto/aead algorithms
131 * @type: Type of the crypto algorithm.
132 * @alg: Union of crypto algorithm definitions.
133 * @registered: Flag indicating if the crypto algorithm is already registered
136 u32 type; /* CRYPTO_ALG_TYPE from <linux/crypto.h> */
138 struct skcipher_alg skcipher;
139 struct ahash_alg ahash;
140 struct aead_alg aead;
146 * struct sa_mapped_sg: scatterlist information for tx and rx
147 * @mapped: Set to true if the @sgt is mapped
148 * @dir: mapping direction used for @sgt
149 * @split_sg: Set if the sg is split and needs to be freed up
150 * @static_sg: Static scatterlist entry for overriding data
151 * @sgt: scatterlist table for DMA API use
153 struct sa_mapped_sg {
155 enum dma_data_direction dir;
156 struct scatterlist static_sg;
157 struct scatterlist *split_sg;
161 * struct sa_rx_data: RX Packet miscellaneous data place holder
162 * @req: crypto request data pointer
163 * @ddev: pointer to the DMA device
164 * @tx_in: dma_async_tx_descriptor pointer for rx channel
165 * @mapped_sg: Information on tx (0) and rx (1) scatterlist DMA mapping
166 * @enc: Flag indicating either encryption or decryption
167 * @enc_iv_size: Initialisation vector size
168 * @iv_idx: Initialisation vector index
173 struct dma_async_tx_descriptor *tx_in;
174 struct sa_mapped_sg mapped_sg[2];
181 * struct sa_req: SA request definition
182 * @dev: device for the request
183 * @size: total data to the xmitted via DMA
184 * @enc_offset: offset of cipher data
185 * @enc_size: data to be passed to cipher engine
187 * @auth_offset: offset of the authentication data
188 * @auth_size: size of the authentication data
189 * @auth_iv: authentication IV
190 * @type: algorithm type for the request
191 * @cmdl: command label pointer
192 * @base: pointer to the base request
193 * @ctx: pointer to the algorithm context data
194 * @enc: true if this is an encode request
196 * @dst: destination data
197 * @callback: DMA callback for the request
198 * @mdata_size: metadata size passed to DMA
211 struct crypto_async_request *base;
212 struct sa_tfm_ctx *ctx;
214 struct scatterlist *src;
215 struct scatterlist *dst;
216 dma_async_tx_callback callback;
221 * Mode Control Instructions for various Key lengths 128, 192, 256
222 * For CBC (Cipher Block Chaining) mode for encryption
224 static u8 mci_cbc_enc_array[3][MODE_CONTROL_BYTES] = {
225 { 0x61, 0x00, 0x00, 0x18, 0x88, 0x0a, 0xaa, 0x4b, 0x7e, 0x00,
226 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
227 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
228 { 0x61, 0x00, 0x00, 0x18, 0x88, 0x4a, 0xaa, 0x4b, 0x7e, 0x00,
229 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
230 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
231 { 0x61, 0x00, 0x00, 0x18, 0x88, 0x8a, 0xaa, 0x4b, 0x7e, 0x00,
232 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
233 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
237 * Mode Control Instructions for various Key lengths 128, 192, 256
238 * For CBC (Cipher Block Chaining) mode for decryption
240 static u8 mci_cbc_dec_array[3][MODE_CONTROL_BYTES] = {
241 { 0x71, 0x00, 0x00, 0x80, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
242 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
243 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
244 { 0x71, 0x00, 0x00, 0x84, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
245 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
246 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
247 { 0x71, 0x00, 0x00, 0x88, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
248 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
249 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
253 * Mode Control Instructions for various Key lengths 128, 192, 256
254 * For CBC (Cipher Block Chaining) mode for encryption
256 static u8 mci_cbc_enc_no_iv_array[3][MODE_CONTROL_BYTES] = {
257 { 0x21, 0x00, 0x00, 0x18, 0x88, 0x0a, 0xaa, 0x4b, 0x7e, 0x00,
258 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
259 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
260 { 0x21, 0x00, 0x00, 0x18, 0x88, 0x4a, 0xaa, 0x4b, 0x7e, 0x00,
261 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
262 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
263 { 0x21, 0x00, 0x00, 0x18, 0x88, 0x8a, 0xaa, 0x4b, 0x7e, 0x00,
264 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
265 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
269 * Mode Control Instructions for various Key lengths 128, 192, 256
270 * For CBC (Cipher Block Chaining) mode for decryption
272 static u8 mci_cbc_dec_no_iv_array[3][MODE_CONTROL_BYTES] = {
273 { 0x31, 0x00, 0x00, 0x80, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
274 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
275 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
276 { 0x31, 0x00, 0x00, 0x84, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
277 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
278 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
279 { 0x31, 0x00, 0x00, 0x88, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
280 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
281 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
285 * Mode Control Instructions for various Key lengths 128, 192, 256
286 * For ECB (Electronic Code Book) mode for encryption
288 static u8 mci_ecb_enc_array[3][27] = {
289 { 0x21, 0x00, 0x00, 0x80, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
290 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
291 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
292 { 0x21, 0x00, 0x00, 0x84, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
293 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
294 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
295 { 0x21, 0x00, 0x00, 0x88, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
296 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
297 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
301 * Mode Control Instructions for various Key lengths 128, 192, 256
302 * For ECB (Electronic Code Book) mode for decryption
304 static u8 mci_ecb_dec_array[3][27] = {
305 { 0x31, 0x00, 0x00, 0x80, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
306 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
307 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
308 { 0x31, 0x00, 0x00, 0x84, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
309 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
310 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
311 { 0x31, 0x00, 0x00, 0x88, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
312 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
313 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
317 * Mode Control Instructions for DES algorithm
318 * For CBC (Cipher Block Chaining) mode and ECB mode
319 * encryption and for decryption respectively
321 static u8 mci_cbc_3des_enc_array[MODE_CONTROL_BYTES] = {
322 0x60, 0x00, 0x00, 0x18, 0x88, 0x52, 0xaa, 0x4b, 0x7e, 0x00, 0x00, 0x00,
323 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
327 static u8 mci_cbc_3des_dec_array[MODE_CONTROL_BYTES] = {
328 0x70, 0x00, 0x00, 0x85, 0x0a, 0xca, 0x98, 0xf4, 0x40, 0xc0, 0x00, 0x00,
329 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
333 static u8 mci_ecb_3des_enc_array[MODE_CONTROL_BYTES] = {
334 0x20, 0x00, 0x00, 0x85, 0x0a, 0x04, 0xb7, 0x90, 0x00, 0x00, 0x00, 0x00,
335 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
339 static u8 mci_ecb_3des_dec_array[MODE_CONTROL_BYTES] = {
340 0x30, 0x00, 0x00, 0x85, 0x0a, 0x04, 0xb7, 0x90, 0x00, 0x00, 0x00, 0x00,
341 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
346 * Perform 16 byte or 128 bit swizzling
347 * The SA2UL Expects the security context to
348 * be in little Endian and the bus width is 128 bits or 16 bytes
349 * Hence swap 16 bytes at a time from higher to lower address
351 static void sa_swiz_128(u8 *in, u16 len)
356 for (i = 0; i < len; i += 16) {
357 memcpy(data, &in[i], 16);
358 for (j = 0; j < 16; j++)
359 in[i + j] = data[15 - j];
363 /* Prepare the ipad and opad from key as per SHA algorithm step 1*/
364 static void prepare_kiopad(u8 *k_ipad, u8 *k_opad, const u8 *key, u16 key_sz)
368 for (i = 0; i < key_sz; i++) {
369 k_ipad[i] = key[i] ^ 0x36;
370 k_opad[i] = key[i] ^ 0x5c;
373 /* Instead of XOR with 0 */
374 for (; i < SHA1_BLOCK_SIZE; i++) {
380 static void sa_export_shash(struct shash_desc *hash, int block_size,
381 int digest_size, __be32 *out)
384 struct sha1_state sha1;
385 struct sha256_state sha256;
386 struct sha512_state sha512;
392 switch (digest_size) {
393 case SHA1_DIGEST_SIZE:
395 result = sha.sha1.state;
397 case SHA256_DIGEST_SIZE:
399 result = sha.sha256.state;
402 dev_err(sa_k3_dev, "%s: bad digest_size=%d\n", __func__,
407 crypto_shash_export(hash, state);
409 for (i = 0; i < digest_size >> 2; i++)
410 out[i] = cpu_to_be32(result[i]);
413 static void sa_prepare_iopads(struct algo_data *data, const u8 *key,
414 u16 key_sz, __be32 *ipad, __be32 *opad)
416 SHASH_DESC_ON_STACK(shash, data->ctx->shash);
417 int block_size = crypto_shash_blocksize(data->ctx->shash);
418 int digest_size = crypto_shash_digestsize(data->ctx->shash);
419 u8 k_ipad[SHA1_BLOCK_SIZE];
420 u8 k_opad[SHA1_BLOCK_SIZE];
422 shash->tfm = data->ctx->shash;
424 prepare_kiopad(k_ipad, k_opad, key, key_sz);
426 memzero_explicit(ipad, block_size);
427 memzero_explicit(opad, block_size);
429 crypto_shash_init(shash);
430 crypto_shash_update(shash, k_ipad, block_size);
431 sa_export_shash(shash, block_size, digest_size, ipad);
433 crypto_shash_init(shash);
434 crypto_shash_update(shash, k_opad, block_size);
436 sa_export_shash(shash, block_size, digest_size, opad);
439 /* Derive the inverse key used in AES-CBC decryption operation */
440 static inline int sa_aes_inv_key(u8 *inv_key, const u8 *key, u16 key_sz)
442 struct crypto_aes_ctx ctx;
445 if (aes_expandkey(&ctx, key, key_sz)) {
446 dev_err(sa_k3_dev, "%s: bad key len(%d)\n", __func__, key_sz);
450 /* work around to get the right inverse for AES_KEYSIZE_192 size keys */
451 if (key_sz == AES_KEYSIZE_192) {
452 ctx.key_enc[52] = ctx.key_enc[51] ^ ctx.key_enc[46];
453 ctx.key_enc[53] = ctx.key_enc[52] ^ ctx.key_enc[47];
456 /* Based crypto_aes_expand_key logic */
458 case AES_KEYSIZE_128:
459 case AES_KEYSIZE_192:
460 key_pos = key_sz + 24;
463 case AES_KEYSIZE_256:
464 key_pos = key_sz + 24 - 4;
468 dev_err(sa_k3_dev, "%s: bad key len(%d)\n", __func__, key_sz);
472 memcpy(inv_key, &ctx.key_enc[key_pos], key_sz);
476 /* Set Security context for the encryption engine */
477 static int sa_set_sc_enc(struct algo_data *ad, const u8 *key, u16 key_sz,
480 const u8 *mci = NULL;
482 /* Set Encryption mode selector to crypto processing */
483 sc_buf[0] = SA_CRYPTO_PROCESSING;
489 /* Set the mode control instructions in security context */
491 memcpy(&sc_buf[1], mci, MODE_CONTROL_BYTES);
493 /* For AES-CBC decryption get the inverse key */
494 if (ad->inv_key && !enc) {
495 if (sa_aes_inv_key(&sc_buf[SC_ENC_KEY_OFFSET], key, key_sz))
497 /* For all other cases: key is used */
499 memcpy(&sc_buf[SC_ENC_KEY_OFFSET], key, key_sz);
505 /* Set Security context for the authentication engine */
506 static void sa_set_sc_auth(struct algo_data *ad, const u8 *key, u16 key_sz,
509 __be32 ipad[64], opad[64];
511 /* Set Authentication mode selector to hash processing */
512 sc_buf[0] = SA_HASH_PROCESSING;
513 /* Auth SW ctrl word: bit[6]=1 (upload computed hash to TLR section) */
514 sc_buf[1] = SA_UPLOAD_HASH_TO_TLR;
515 sc_buf[1] |= ad->auth_ctrl;
517 /* Copy the keys or ipad/opad */
519 ad->prep_iopad(ad, key, key_sz, ipad, opad);
521 /* Copy ipad to AuthKey */
522 memcpy(&sc_buf[32], ipad, ad->hash_size);
523 /* Copy opad to Aux-1 */
524 memcpy(&sc_buf[64], opad, ad->hash_size);
527 sc_buf[1] |= SA_BASIC_HASH;
531 static inline void sa_copy_iv(__be32 *out, const u8 *iv, bool size16)
535 for (j = 0; j < ((size16) ? 4 : 2); j++) {
536 *out = cpu_to_be32(*((u32 *)iv));
542 /* Format general command label */
543 static int sa_format_cmdl_gen(struct sa_cmdl_cfg *cfg, u8 *cmdl,
544 struct sa_cmdl_upd_info *upd_info)
546 u8 enc_offset = 0, auth_offset = 0, total = 0;
547 u8 enc_next_eng = SA_ENG_ID_OUTPORT2;
548 u8 auth_next_eng = SA_ENG_ID_OUTPORT2;
549 u32 *word_ptr = (u32 *)cmdl;
552 /* Clear the command label */
553 memzero_explicit(cmdl, (SA_MAX_CMDL_WORDS * sizeof(u32)));
555 /* Iniialize the command update structure */
556 memzero_explicit(upd_info, sizeof(*upd_info));
558 if (cfg->enc_eng_id && cfg->auth_eng_id) {
560 auth_offset = SA_CMDL_HEADER_SIZE_BYTES;
561 enc_next_eng = cfg->auth_eng_id;
564 auth_offset += cfg->iv_size;
566 enc_offset = SA_CMDL_HEADER_SIZE_BYTES;
567 auth_next_eng = cfg->enc_eng_id;
571 if (cfg->enc_eng_id) {
572 upd_info->flags |= SA_CMDL_UPD_ENC;
573 upd_info->enc_size.index = enc_offset >> 2;
574 upd_info->enc_offset.index = upd_info->enc_size.index + 1;
575 /* Encryption command label */
576 cmdl[enc_offset + SA_CMDL_OFFSET_NESC] = enc_next_eng;
578 /* Encryption modes requiring IV */
580 upd_info->flags |= SA_CMDL_UPD_ENC_IV;
581 upd_info->enc_iv.index =
582 (enc_offset + SA_CMDL_HEADER_SIZE_BYTES) >> 2;
583 upd_info->enc_iv.size = cfg->iv_size;
585 cmdl[enc_offset + SA_CMDL_OFFSET_LABEL_LEN] =
586 SA_CMDL_HEADER_SIZE_BYTES + cfg->iv_size;
588 cmdl[enc_offset + SA_CMDL_OFFSET_OPTION_CTRL1] =
589 (SA_CTX_ENC_AUX2_OFFSET | (cfg->iv_size >> 3));
590 total += SA_CMDL_HEADER_SIZE_BYTES + cfg->iv_size;
592 cmdl[enc_offset + SA_CMDL_OFFSET_LABEL_LEN] =
593 SA_CMDL_HEADER_SIZE_BYTES;
594 total += SA_CMDL_HEADER_SIZE_BYTES;
598 if (cfg->auth_eng_id) {
599 upd_info->flags |= SA_CMDL_UPD_AUTH;
600 upd_info->auth_size.index = auth_offset >> 2;
601 upd_info->auth_offset.index = upd_info->auth_size.index + 1;
602 cmdl[auth_offset + SA_CMDL_OFFSET_NESC] = auth_next_eng;
603 cmdl[auth_offset + SA_CMDL_OFFSET_LABEL_LEN] =
604 SA_CMDL_HEADER_SIZE_BYTES;
605 total += SA_CMDL_HEADER_SIZE_BYTES;
608 total = roundup(total, 8);
610 for (i = 0; i < total / 4; i++)
611 word_ptr[i] = swab32(word_ptr[i]);
616 /* Update Command label */
617 static inline void sa_update_cmdl(struct sa_req *req, u32 *cmdl,
618 struct sa_cmdl_upd_info *upd_info)
622 if (likely(upd_info->flags & SA_CMDL_UPD_ENC)) {
623 cmdl[upd_info->enc_size.index] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK;
624 cmdl[upd_info->enc_size.index] |= req->enc_size;
625 cmdl[upd_info->enc_offset.index] &=
626 ~SA_CMDL_SOP_BYPASS_LEN_MASK;
627 cmdl[upd_info->enc_offset.index] |=
628 ((u32)req->enc_offset <<
629 __ffs(SA_CMDL_SOP_BYPASS_LEN_MASK));
631 if (likely(upd_info->flags & SA_CMDL_UPD_ENC_IV)) {
632 __be32 *data = (__be32 *)&cmdl[upd_info->enc_iv.index];
633 u32 *enc_iv = (u32 *)req->enc_iv;
635 for (j = 0; i < upd_info->enc_iv.size; i += 4, j++) {
636 data[j] = cpu_to_be32(*enc_iv);
642 if (likely(upd_info->flags & SA_CMDL_UPD_AUTH)) {
643 cmdl[upd_info->auth_size.index] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK;
644 cmdl[upd_info->auth_size.index] |= req->auth_size;
645 cmdl[upd_info->auth_offset.index] &=
646 ~SA_CMDL_SOP_BYPASS_LEN_MASK;
647 cmdl[upd_info->auth_offset.index] |=
648 ((u32)req->auth_offset <<
649 __ffs(SA_CMDL_SOP_BYPASS_LEN_MASK));
650 if (upd_info->flags & SA_CMDL_UPD_AUTH_IV) {
651 sa_copy_iv((void *)&cmdl[upd_info->auth_iv.index],
653 (upd_info->auth_iv.size > 8));
655 if (upd_info->flags & SA_CMDL_UPD_AUX_KEY) {
656 int offset = (req->auth_size & 0xF) ? 4 : 0;
658 memcpy(&cmdl[upd_info->aux_key_info.index],
659 &upd_info->aux_key[offset], 16);
664 /* Format SWINFO words to be sent to SA */
666 void sa_set_swinfo(u8 eng_id, u16 sc_id, dma_addr_t sc_phys,
667 u8 cmdl_present, u8 cmdl_offset, u8 flags,
668 u8 hash_size, u32 *swinfo)
671 swinfo[0] |= (flags << __ffs(SA_SW0_FLAGS_MASK));
672 if (likely(cmdl_present))
673 swinfo[0] |= ((cmdl_offset | SA_SW0_CMDL_PRESENT) <<
674 __ffs(SA_SW0_CMDL_INFO_MASK));
675 swinfo[0] |= (eng_id << __ffs(SA_SW0_ENG_ID_MASK));
677 swinfo[0] |= SA_SW0_DEST_INFO_PRESENT;
678 swinfo[1] = (u32)(sc_phys & 0xFFFFFFFFULL);
679 swinfo[2] = (u32)((sc_phys & 0xFFFFFFFF00000000ULL) >> 32);
680 swinfo[2] |= (hash_size << __ffs(SA_SW2_EGRESS_LENGTH));
683 /* Dump the security context */
684 static void sa_dump_sc(u8 *buf, dma_addr_t dma_addr)
687 dev_info(sa_k3_dev, "Security context dump:: 0x%pad\n", &dma_addr);
688 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
689 16, 1, buf, SA_CTX_MAX_SZ, false);
694 int sa_init_sc(struct sa_ctx_info *ctx, const u8 *enc_key,
695 u16 enc_key_sz, const u8 *auth_key, u16 auth_key_sz,
696 struct algo_data *ad, u8 enc, u32 *swinfo)
698 int enc_sc_offset = 0;
699 int auth_sc_offset = 0;
700 u8 *sc_buf = ctx->sc;
701 u16 sc_id = ctx->sc_id;
704 memzero_explicit(sc_buf, SA_CTX_MAX_SZ);
706 if (ad->auth_eng.eng_id) {
708 first_engine = ad->enc_eng.eng_id;
710 first_engine = ad->auth_eng.eng_id;
712 enc_sc_offset = SA_CTX_PHP_PE_CTX_SZ;
713 auth_sc_offset = enc_sc_offset + ad->enc_eng.sc_size;
714 sc_buf[1] = SA_SCCTL_FE_AUTH_ENC;
717 ad->hash_size = roundup(ad->hash_size, 8);
719 } else if (ad->enc_eng.eng_id && !ad->auth_eng.eng_id) {
720 enc_sc_offset = SA_CTX_PHP_PE_CTX_SZ;
721 first_engine = ad->enc_eng.eng_id;
722 sc_buf[1] = SA_SCCTL_FE_ENC;
723 ad->hash_size = ad->iv_out_size;
726 /* SCCTL Owner info: 0=host, 1=CP_ACE */
727 sc_buf[SA_CTX_SCCTL_OWNER_OFFSET] = 0;
728 memcpy(&sc_buf[2], &sc_id, 2);
734 /* Prepare context for encryption engine */
735 if (ad->enc_eng.sc_size) {
736 if (sa_set_sc_enc(ad, enc_key, enc_key_sz, enc,
737 &sc_buf[enc_sc_offset]))
741 /* Prepare context for authentication engine */
742 if (ad->auth_eng.sc_size)
743 sa_set_sc_auth(ad, auth_key, auth_key_sz,
744 &sc_buf[auth_sc_offset]);
746 /* Set the ownership of context to CP_ACE */
747 sc_buf[SA_CTX_SCCTL_OWNER_OFFSET] = 0x80;
749 /* swizzle the security context */
750 sa_swiz_128(sc_buf, SA_CTX_MAX_SZ);
752 sa_set_swinfo(first_engine, ctx->sc_id, ctx->sc_phys, 1, 0,
753 SA_SW_INFO_FLAG_EVICT, ad->hash_size, swinfo);
755 sa_dump_sc(sc_buf, ctx->sc_phys);
760 /* Free the per direction context memory */
761 static void sa_free_ctx_info(struct sa_ctx_info *ctx,
762 struct sa_crypto_data *data)
766 bn = ctx->sc_id - data->sc_id_start;
767 spin_lock(&data->scid_lock);
768 __clear_bit(bn, data->ctx_bm);
770 spin_unlock(&data->scid_lock);
773 dma_pool_free(data->sc_pool, ctx->sc, ctx->sc_phys);
778 static int sa_init_ctx_info(struct sa_ctx_info *ctx,
779 struct sa_crypto_data *data)
784 spin_lock(&data->scid_lock);
785 bn = find_first_zero_bit(data->ctx_bm, SA_MAX_NUM_CTX);
786 __set_bit(bn, data->ctx_bm);
788 spin_unlock(&data->scid_lock);
790 ctx->sc_id = (u16)(data->sc_id_start + bn);
792 ctx->sc = dma_pool_alloc(data->sc_pool, GFP_KERNEL, &ctx->sc_phys);
794 dev_err(&data->pdev->dev, "Failed to allocate SC memory\n");
802 spin_lock(&data->scid_lock);
803 __clear_bit(bn, data->ctx_bm);
805 spin_unlock(&data->scid_lock);
810 static void sa_cipher_cra_exit(struct crypto_skcipher *tfm)
812 struct sa_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
813 struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
815 dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
816 __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
817 ctx->dec.sc_id, &ctx->dec.sc_phys);
819 sa_free_ctx_info(&ctx->enc, data);
820 sa_free_ctx_info(&ctx->dec, data);
822 crypto_free_sync_skcipher(ctx->fallback.skcipher);
825 static int sa_cipher_cra_init(struct crypto_skcipher *tfm)
827 struct sa_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
828 struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
829 const char *name = crypto_tfm_alg_name(&tfm->base);
832 memzero_explicit(ctx, sizeof(*ctx));
833 ctx->dev_data = data;
835 ret = sa_init_ctx_info(&ctx->enc, data);
838 ret = sa_init_ctx_info(&ctx->dec, data);
840 sa_free_ctx_info(&ctx->enc, data);
844 ctx->fallback.skcipher =
845 crypto_alloc_sync_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);
847 if (IS_ERR(ctx->fallback.skcipher)) {
848 dev_err(sa_k3_dev, "Error allocating fallback algo %s\n", name);
849 return PTR_ERR(ctx->fallback.skcipher);
852 dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
853 __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
854 ctx->dec.sc_id, &ctx->dec.sc_phys);
858 static int sa_cipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
859 unsigned int keylen, struct algo_data *ad)
861 struct sa_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
863 struct sa_cmdl_cfg cfg;
866 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
867 keylen != AES_KEYSIZE_256)
870 ad->enc_eng.eng_id = SA_ENG_ID_EM1;
871 ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ;
873 memzero_explicit(&cfg, sizeof(cfg));
874 cfg.enc_eng_id = ad->enc_eng.eng_id;
875 cfg.iv_size = crypto_skcipher_ivsize(tfm);
877 crypto_sync_skcipher_clear_flags(ctx->fallback.skcipher,
878 CRYPTO_TFM_REQ_MASK);
879 crypto_sync_skcipher_set_flags(ctx->fallback.skcipher,
880 tfm->base.crt_flags &
881 CRYPTO_TFM_REQ_MASK);
882 ret = crypto_sync_skcipher_setkey(ctx->fallback.skcipher, key, keylen);
886 /* Setup Encryption Security Context & Command label template */
887 if (sa_init_sc(&ctx->enc, key, keylen, NULL, 0, ad, 1,
891 cmdl_len = sa_format_cmdl_gen(&cfg,
893 &ctx->enc.cmdl_upd_info);
894 if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
897 ctx->enc.cmdl_size = cmdl_len;
899 /* Setup Decryption Security Context & Command label template */
900 if (sa_init_sc(&ctx->dec, key, keylen, NULL, 0, ad, 0,
904 cfg.enc_eng_id = ad->enc_eng.eng_id;
905 cmdl_len = sa_format_cmdl_gen(&cfg, (u8 *)ctx->dec.cmdl,
906 &ctx->dec.cmdl_upd_info);
908 if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
911 ctx->dec.cmdl_size = cmdl_len;
912 ctx->iv_idx = ad->iv_idx;
917 dev_err(sa_k3_dev, "%s: badkey\n", __func__);
921 static int sa_aes_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key,
924 struct algo_data ad = { 0 };
925 /* Convert the key size (16/24/32) to the key size index (0/1/2) */
926 int key_idx = (keylen >> 3) - 2;
931 ad.mci_enc = mci_cbc_enc_array[key_idx];
932 ad.mci_dec = mci_cbc_dec_array[key_idx];
934 ad.ealg_id = SA_EALG_ID_AES_CBC;
938 return sa_cipher_setkey(tfm, key, keylen, &ad);
941 static int sa_aes_ecb_setkey(struct crypto_skcipher *tfm, const u8 *key,
944 struct algo_data ad = { 0 };
945 /* Convert the key size (16/24/32) to the key size index (0/1/2) */
946 int key_idx = (keylen >> 3) - 2;
951 ad.mci_enc = mci_ecb_enc_array[key_idx];
952 ad.mci_dec = mci_ecb_dec_array[key_idx];
954 ad.ealg_id = SA_EALG_ID_AES_ECB;
956 return sa_cipher_setkey(tfm, key, keylen, &ad);
959 static int sa_3des_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key,
962 struct algo_data ad = { 0 };
964 ad.mci_enc = mci_cbc_3des_enc_array;
965 ad.mci_dec = mci_cbc_3des_dec_array;
966 ad.ealg_id = SA_EALG_ID_3DES_CBC;
970 return sa_cipher_setkey(tfm, key, keylen, &ad);
973 static int sa_3des_ecb_setkey(struct crypto_skcipher *tfm, const u8 *key,
976 struct algo_data ad = { 0 };
978 ad.mci_enc = mci_ecb_3des_enc_array;
979 ad.mci_dec = mci_ecb_3des_dec_array;
981 return sa_cipher_setkey(tfm, key, keylen, &ad);
984 static void sa_sync_from_device(struct sa_rx_data *rxd)
986 struct sg_table *sgt;
988 if (rxd->mapped_sg[0].dir == DMA_BIDIRECTIONAL)
989 sgt = &rxd->mapped_sg[0].sgt;
991 sgt = &rxd->mapped_sg[1].sgt;
993 dma_sync_sgtable_for_cpu(rxd->ddev, sgt, DMA_FROM_DEVICE);
996 static void sa_free_sa_rx_data(struct sa_rx_data *rxd)
1000 for (i = 0; i < ARRAY_SIZE(rxd->mapped_sg); i++) {
1001 struct sa_mapped_sg *mapped_sg = &rxd->mapped_sg[i];
1003 if (mapped_sg->mapped) {
1004 dma_unmap_sgtable(rxd->ddev, &mapped_sg->sgt,
1006 kfree(mapped_sg->split_sg);
1013 static void sa_aes_dma_in_callback(void *data)
1015 struct sa_rx_data *rxd = (struct sa_rx_data *)data;
1016 struct skcipher_request *req;
1022 sa_sync_from_device(rxd);
1023 req = container_of(rxd->req, struct skcipher_request, base);
1026 mdptr = (__be32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl,
1028 result = (u32 *)req->iv;
1030 for (i = 0; i < (rxd->enc_iv_size / 4); i++)
1031 result[i] = be32_to_cpu(mdptr[i + rxd->iv_idx]);
1034 sa_free_sa_rx_data(rxd);
1036 skcipher_request_complete(req, 0);
1040 sa_prepare_tx_desc(u32 *mdptr, u32 pslen, u32 *psdata, u32 epiblen, u32 *epib)
1045 for (out = mdptr, in = epib, i = 0; i < epiblen / sizeof(u32); i++)
1048 mdptr[4] = (0xFFFF << 16);
1049 for (out = &mdptr[5], in = psdata, i = 0;
1050 i < pslen / sizeof(u32); i++)
1054 static int sa_run(struct sa_req *req)
1056 struct sa_rx_data *rxd;
1058 u32 cmdl[SA_MAX_CMDL_WORDS];
1059 struct sa_crypto_data *pdata = dev_get_drvdata(sa_k3_dev);
1060 struct device *ddev;
1061 struct dma_chan *dma_rx;
1062 int sg_nents, src_nents, dst_nents;
1063 struct scatterlist *src, *dst;
1064 size_t pl, ml, split_size;
1065 struct sa_ctx_info *sa_ctx = req->enc ? &req->ctx->enc : &req->ctx->dec;
1067 struct dma_async_tx_descriptor *tx_out;
1070 enum dma_data_direction dir_src;
1071 struct sa_mapped_sg *mapped_sg;
1073 gfp_flags = req->base->flags & CRYPTO_TFM_REQ_MAY_SLEEP ?
1074 GFP_KERNEL : GFP_ATOMIC;
1076 rxd = kzalloc(sizeof(*rxd), gfp_flags);
1080 if (req->src != req->dst) {
1082 dir_src = DMA_TO_DEVICE;
1085 dir_src = DMA_BIDIRECTIONAL;
1089 * SA2UL has an interesting feature where the receive DMA channel
1090 * is selected based on the data passed to the engine. Within the
1091 * transition range, there is also a space where it is impossible
1092 * to determine where the data will end up, and this should be
1093 * avoided. This will be handled by the SW fallback mechanism by
1094 * the individual algorithm implementations.
1096 if (req->size >= 256)
1097 dma_rx = pdata->dma_rx2;
1099 dma_rx = pdata->dma_rx1;
1101 ddev = dma_rx->device->dev;
1104 memcpy(cmdl, sa_ctx->cmdl, sa_ctx->cmdl_size);
1106 sa_update_cmdl(req, cmdl, &sa_ctx->cmdl_upd_info);
1108 if (req->type != CRYPTO_ALG_TYPE_AHASH) {
1111 (SA_REQ_SUBTYPE_ENC << SA_REQ_SUBTYPE_SHIFT);
1114 (SA_REQ_SUBTYPE_DEC << SA_REQ_SUBTYPE_SHIFT);
1117 cmdl[sa_ctx->cmdl_size / sizeof(u32)] = req->type;
1120 * Map the packets, first we check if the data fits into a single
1121 * sg entry and use that if possible. If it does not fit, we check
1122 * if we need to do sg_split to align the scatterlist data on the
1123 * actual data size being processed by the crypto engine.
1126 sg_nents = sg_nents_for_len(src, req->size);
1128 split_size = req->size;
1130 mapped_sg = &rxd->mapped_sg[0];
1131 if (sg_nents == 1 && split_size <= req->src->length) {
1132 src = &mapped_sg->static_sg;
1134 sg_init_table(src, 1);
1135 sg_set_page(src, sg_page(req->src), split_size,
1138 mapped_sg->sgt.sgl = src;
1139 mapped_sg->sgt.orig_nents = src_nents;
1140 ret = dma_map_sgtable(ddev, &mapped_sg->sgt, dir_src, 0);
1144 mapped_sg->dir = dir_src;
1145 mapped_sg->mapped = true;
1147 mapped_sg->sgt.sgl = req->src;
1148 mapped_sg->sgt.orig_nents = sg_nents;
1149 ret = dma_map_sgtable(ddev, &mapped_sg->sgt, dir_src, 0);
1153 mapped_sg->dir = dir_src;
1154 mapped_sg->mapped = true;
1156 ret = sg_split(mapped_sg->sgt.sgl, mapped_sg->sgt.nents, 0, 1,
1157 &split_size, &src, &src_nents, gfp_flags);
1159 src_nents = mapped_sg->sgt.nents;
1160 src = mapped_sg->sgt.sgl;
1162 mapped_sg->split_sg = src;
1166 dma_sync_sgtable_for_device(ddev, &mapped_sg->sgt, DMA_TO_DEVICE);
1169 dst_nents = src_nents;
1172 dst_nents = sg_nents_for_len(req->dst, req->size);
1173 mapped_sg = &rxd->mapped_sg[1];
1175 if (dst_nents == 1 && split_size <= req->dst->length) {
1176 dst = &mapped_sg->static_sg;
1178 sg_init_table(dst, 1);
1179 sg_set_page(dst, sg_page(req->dst), split_size,
1182 mapped_sg->sgt.sgl = dst;
1183 mapped_sg->sgt.orig_nents = dst_nents;
1184 ret = dma_map_sgtable(ddev, &mapped_sg->sgt,
1185 DMA_FROM_DEVICE, 0);
1189 mapped_sg->dir = DMA_FROM_DEVICE;
1190 mapped_sg->mapped = true;
1192 mapped_sg->sgt.sgl = req->dst;
1193 mapped_sg->sgt.orig_nents = dst_nents;
1194 ret = dma_map_sgtable(ddev, &mapped_sg->sgt,
1195 DMA_FROM_DEVICE, 0);
1199 mapped_sg->dir = DMA_FROM_DEVICE;
1200 mapped_sg->mapped = true;
1202 ret = sg_split(mapped_sg->sgt.sgl, mapped_sg->sgt.nents,
1203 0, 1, &split_size, &dst, &dst_nents,
1206 dst_nents = mapped_sg->sgt.nents;
1207 dst = mapped_sg->sgt.sgl;
1209 mapped_sg->split_sg = dst;
1214 rxd->tx_in = dmaengine_prep_slave_sg(dma_rx, dst, dst_nents,
1216 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1218 dev_err(pdata->dev, "IN prep_slave_sg() failed\n");
1223 rxd->req = (void *)req->base;
1224 rxd->enc = req->enc;
1225 rxd->iv_idx = req->ctx->iv_idx;
1226 rxd->enc_iv_size = sa_ctx->cmdl_upd_info.enc_iv.size;
1227 rxd->tx_in->callback = req->callback;
1228 rxd->tx_in->callback_param = rxd;
1230 tx_out = dmaengine_prep_slave_sg(pdata->dma_tx, src,
1231 src_nents, DMA_MEM_TO_DEV,
1232 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1235 dev_err(pdata->dev, "OUT prep_slave_sg() failed\n");
1241 * Prepare metadata for DMA engine. This essentially describes the
1242 * crypto algorithm to be used, data sizes, different keys etc.
1244 mdptr = (u32 *)dmaengine_desc_get_metadata_ptr(tx_out, &pl, &ml);
1246 sa_prepare_tx_desc(mdptr, (sa_ctx->cmdl_size + (SA_PSDATA_CTX_WORDS *
1247 sizeof(u32))), cmdl, sizeof(sa_ctx->epib),
1250 ml = sa_ctx->cmdl_size + (SA_PSDATA_CTX_WORDS * sizeof(u32));
1251 dmaengine_desc_set_metadata_len(tx_out, req->mdata_size);
1253 dmaengine_submit(tx_out);
1254 dmaengine_submit(rxd->tx_in);
1256 dma_async_issue_pending(dma_rx);
1257 dma_async_issue_pending(pdata->dma_tx);
1259 return -EINPROGRESS;
1262 sa_free_sa_rx_data(rxd);
1267 static int sa_cipher_run(struct skcipher_request *req, u8 *iv, int enc)
1269 struct sa_tfm_ctx *ctx =
1270 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
1271 struct crypto_alg *alg = req->base.tfm->__crt_alg;
1272 struct sa_req sa_req = { 0 };
1278 if (req->cryptlen % alg->cra_blocksize)
1281 /* Use SW fallback if the data size is not supported */
1282 if (req->cryptlen > SA_MAX_DATA_SZ ||
1283 (req->cryptlen >= SA_UNSAFE_DATA_SZ_MIN &&
1284 req->cryptlen <= SA_UNSAFE_DATA_SZ_MAX)) {
1285 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback.skcipher);
1287 skcipher_request_set_sync_tfm(subreq, ctx->fallback.skcipher);
1288 skcipher_request_set_callback(subreq, req->base.flags,
1290 skcipher_request_set_crypt(subreq, req->src, req->dst,
1291 req->cryptlen, req->iv);
1293 ret = crypto_skcipher_encrypt(subreq);
1295 ret = crypto_skcipher_decrypt(subreq);
1297 skcipher_request_zero(subreq);
1301 sa_req.size = req->cryptlen;
1302 sa_req.enc_size = req->cryptlen;
1303 sa_req.src = req->src;
1304 sa_req.dst = req->dst;
1306 sa_req.type = CRYPTO_ALG_TYPE_SKCIPHER;
1308 sa_req.callback = sa_aes_dma_in_callback;
1309 sa_req.mdata_size = 44;
1310 sa_req.base = &req->base;
1313 return sa_run(&sa_req);
1316 static int sa_encrypt(struct skcipher_request *req)
1318 return sa_cipher_run(req, req->iv, 1);
1321 static int sa_decrypt(struct skcipher_request *req)
1323 return sa_cipher_run(req, req->iv, 0);
1326 static void sa_sha_dma_in_callback(void *data)
1328 struct sa_rx_data *rxd = (struct sa_rx_data *)data;
1329 struct ahash_request *req;
1330 struct crypto_ahash *tfm;
1331 unsigned int authsize;
1337 sa_sync_from_device(rxd);
1338 req = container_of(rxd->req, struct ahash_request, base);
1339 tfm = crypto_ahash_reqtfm(req);
1340 authsize = crypto_ahash_digestsize(tfm);
1342 mdptr = (__be32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, &ml);
1343 result = (u32 *)req->result;
1345 for (i = 0; i < (authsize / 4); i++)
1346 result[i] = be32_to_cpu(mdptr[i + 4]);
1348 sa_free_sa_rx_data(rxd);
1350 ahash_request_complete(req, 0);
1353 static int zero_message_process(struct ahash_request *req)
1355 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1356 int sa_digest_size = crypto_ahash_digestsize(tfm);
1358 switch (sa_digest_size) {
1359 case SHA1_DIGEST_SIZE:
1360 memcpy(req->result, sha1_zero_message_hash, sa_digest_size);
1362 case SHA256_DIGEST_SIZE:
1363 memcpy(req->result, sha256_zero_message_hash, sa_digest_size);
1365 case SHA512_DIGEST_SIZE:
1366 memcpy(req->result, sha512_zero_message_hash, sa_digest_size);
1375 static int sa_sha_run(struct ahash_request *req)
1377 struct sa_tfm_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
1378 struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1379 struct sa_req sa_req = { 0 };
1382 auth_len = req->nbytes;
1385 return zero_message_process(req);
1387 if (auth_len > SA_MAX_DATA_SZ ||
1388 (auth_len >= SA_UNSAFE_DATA_SZ_MIN &&
1389 auth_len <= SA_UNSAFE_DATA_SZ_MAX)) {
1390 struct ahash_request *subreq = &rctx->fallback_req;
1393 ahash_request_set_tfm(subreq, ctx->fallback.ahash);
1394 subreq->base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1396 crypto_ahash_init(subreq);
1398 subreq->nbytes = auth_len;
1399 subreq->src = req->src;
1400 subreq->result = req->result;
1402 ret |= crypto_ahash_update(subreq);
1406 ret |= crypto_ahash_final(subreq);
1411 sa_req.size = auth_len;
1412 sa_req.auth_size = auth_len;
1413 sa_req.src = req->src;
1414 sa_req.dst = req->src;
1416 sa_req.type = CRYPTO_ALG_TYPE_AHASH;
1417 sa_req.callback = sa_sha_dma_in_callback;
1418 sa_req.mdata_size = 28;
1420 sa_req.base = &req->base;
1422 return sa_run(&sa_req);
1425 static int sa_sha_setup(struct sa_tfm_ctx *ctx, struct algo_data *ad)
1427 int bs = crypto_shash_blocksize(ctx->shash);
1429 struct sa_cmdl_cfg cfg;
1431 ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ;
1432 ad->auth_eng.eng_id = SA_ENG_ID_AM1;
1433 ad->auth_eng.sc_size = SA_CTX_AUTH_TYPE2_SZ;
1435 memset(ctx->authkey, 0, bs);
1436 memset(&cfg, 0, sizeof(cfg));
1437 cfg.aalg = ad->aalg_id;
1438 cfg.enc_eng_id = ad->enc_eng.eng_id;
1439 cfg.auth_eng_id = ad->auth_eng.eng_id;
1444 /* Setup Encryption Security Context & Command label template */
1445 if (sa_init_sc(&ctx->enc, NULL, 0, NULL, 0, ad, 0,
1449 cmdl_len = sa_format_cmdl_gen(&cfg,
1450 (u8 *)ctx->enc.cmdl,
1451 &ctx->enc.cmdl_upd_info);
1452 if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
1455 ctx->enc.cmdl_size = cmdl_len;
1460 dev_err(sa_k3_dev, "%s: badkey\n", __func__);
1464 static int sa_sha_cra_init_alg(struct crypto_tfm *tfm, const char *alg_base)
1466 struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
1467 struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
1470 memset(ctx, 0, sizeof(*ctx));
1471 ctx->dev_data = data;
1472 ret = sa_init_ctx_info(&ctx->enc, data);
1477 ctx->shash = crypto_alloc_shash(alg_base, 0,
1478 CRYPTO_ALG_NEED_FALLBACK);
1479 if (IS_ERR(ctx->shash)) {
1480 dev_err(sa_k3_dev, "base driver %s couldn't be loaded\n",
1482 return PTR_ERR(ctx->shash);
1485 ctx->fallback.ahash =
1486 crypto_alloc_ahash(alg_base, 0,
1487 CRYPTO_ALG_NEED_FALLBACK);
1488 if (IS_ERR(ctx->fallback.ahash)) {
1489 dev_err(ctx->dev_data->dev,
1490 "Could not load fallback driver\n");
1491 return PTR_ERR(ctx->fallback.ahash);
1495 dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
1496 __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
1497 ctx->dec.sc_id, &ctx->dec.sc_phys);
1499 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
1500 sizeof(struct sa_sha_req_ctx) +
1501 crypto_ahash_reqsize(ctx->fallback.ahash));
1506 static int sa_sha_digest(struct ahash_request *req)
1508 return sa_sha_run(req);
1511 static int sa_sha_init(struct ahash_request *req)
1513 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1514 struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1515 struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1517 dev_dbg(sa_k3_dev, "init: digest size: %u, rctx=%p\n",
1518 crypto_ahash_digestsize(tfm), rctx);
1520 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
1521 rctx->fallback_req.base.flags =
1522 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1524 return crypto_ahash_init(&rctx->fallback_req);
1527 static int sa_sha_update(struct ahash_request *req)
1529 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1530 struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1531 struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1533 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
1534 rctx->fallback_req.base.flags =
1535 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1536 rctx->fallback_req.nbytes = req->nbytes;
1537 rctx->fallback_req.src = req->src;
1539 return crypto_ahash_update(&rctx->fallback_req);
1542 static int sa_sha_final(struct ahash_request *req)
1544 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1545 struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1546 struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1548 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
1549 rctx->fallback_req.base.flags =
1550 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1551 rctx->fallback_req.result = req->result;
1553 return crypto_ahash_final(&rctx->fallback_req);
1556 static int sa_sha_finup(struct ahash_request *req)
1558 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1559 struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1560 struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1562 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
1563 rctx->fallback_req.base.flags =
1564 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1566 rctx->fallback_req.nbytes = req->nbytes;
1567 rctx->fallback_req.src = req->src;
1568 rctx->fallback_req.result = req->result;
1570 return crypto_ahash_finup(&rctx->fallback_req);
1573 static int sa_sha_import(struct ahash_request *req, const void *in)
1575 struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1576 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1577 struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1579 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
1580 rctx->fallback_req.base.flags = req->base.flags &
1581 CRYPTO_TFM_REQ_MAY_SLEEP;
1583 return crypto_ahash_import(&rctx->fallback_req, in);
1586 static int sa_sha_export(struct ahash_request *req, void *out)
1588 struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1589 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1590 struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1591 struct ahash_request *subreq = &rctx->fallback_req;
1593 ahash_request_set_tfm(subreq, ctx->fallback.ahash);
1594 subreq->base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1596 return crypto_ahash_export(subreq, out);
1599 static int sa_sha1_cra_init(struct crypto_tfm *tfm)
1601 struct algo_data ad = { 0 };
1602 struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
1604 sa_sha_cra_init_alg(tfm, "sha1");
1606 ad.aalg_id = SA_AALG_ID_SHA1;
1607 ad.hash_size = SHA1_DIGEST_SIZE;
1608 ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA1;
1610 sa_sha_setup(ctx, &ad);
1615 static int sa_sha256_cra_init(struct crypto_tfm *tfm)
1617 struct algo_data ad = { 0 };
1618 struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
1620 sa_sha_cra_init_alg(tfm, "sha256");
1622 ad.aalg_id = SA_AALG_ID_SHA2_256;
1623 ad.hash_size = SHA256_DIGEST_SIZE;
1624 ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA256;
1626 sa_sha_setup(ctx, &ad);
1631 static int sa_sha512_cra_init(struct crypto_tfm *tfm)
1633 struct algo_data ad = { 0 };
1634 struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
1636 sa_sha_cra_init_alg(tfm, "sha512");
1638 ad.aalg_id = SA_AALG_ID_SHA2_512;
1639 ad.hash_size = SHA512_DIGEST_SIZE;
1640 ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA512;
1642 sa_sha_setup(ctx, &ad);
1647 static void sa_sha_cra_exit(struct crypto_tfm *tfm)
1649 struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
1650 struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
1652 dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
1653 __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
1654 ctx->dec.sc_id, &ctx->dec.sc_phys);
1656 if (crypto_tfm_alg_type(tfm) == CRYPTO_ALG_TYPE_AHASH)
1657 sa_free_ctx_info(&ctx->enc, data);
1659 crypto_free_shash(ctx->shash);
1660 crypto_free_ahash(ctx->fallback.ahash);
1663 static void sa_aead_dma_in_callback(void *data)
1665 struct sa_rx_data *rxd = (struct sa_rx_data *)data;
1666 struct aead_request *req;
1667 struct crypto_aead *tfm;
1669 unsigned int authsize;
1670 u8 auth_tag[SA_MAX_AUTH_TAG_SZ];
1677 sa_sync_from_device(rxd);
1678 req = container_of(rxd->req, struct aead_request, base);
1679 tfm = crypto_aead_reqtfm(req);
1680 start = req->assoclen + req->cryptlen;
1681 authsize = crypto_aead_authsize(tfm);
1683 mdptr = (u32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, &ml);
1684 for (i = 0; i < (authsize / 4); i++)
1685 mdptr[i + 4] = swab32(mdptr[i + 4]);
1687 auth_len = req->assoclen + req->cryptlen;
1690 scatterwalk_map_and_copy(&mdptr[4], req->dst, start, authsize,
1693 auth_len -= authsize;
1695 scatterwalk_map_and_copy(auth_tag, req->src, start, authsize,
1698 err = memcmp(&mdptr[4], auth_tag, authsize) ? -EBADMSG : 0;
1701 sa_free_sa_rx_data(rxd);
1703 aead_request_complete(req, err);
1706 static int sa_cra_init_aead(struct crypto_aead *tfm, const char *hash,
1707 const char *fallback)
1709 struct sa_tfm_ctx *ctx = crypto_aead_ctx(tfm);
1710 struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
1713 memzero_explicit(ctx, sizeof(*ctx));
1715 ctx->shash = crypto_alloc_shash(hash, 0, CRYPTO_ALG_NEED_FALLBACK);
1716 if (IS_ERR(ctx->shash)) {
1717 dev_err(sa_k3_dev, "base driver %s couldn't be loaded\n", hash);
1718 return PTR_ERR(ctx->shash);
1721 ctx->fallback.aead = crypto_alloc_aead(fallback, 0,
1722 CRYPTO_ALG_NEED_FALLBACK);
1724 if (IS_ERR(ctx->fallback.aead)) {
1725 dev_err(sa_k3_dev, "fallback driver %s couldn't be loaded\n",
1727 return PTR_ERR(ctx->fallback.aead);
1730 crypto_aead_set_reqsize(tfm, sizeof(struct aead_request) +
1731 crypto_aead_reqsize(ctx->fallback.aead));
1733 ret = sa_init_ctx_info(&ctx->enc, data);
1737 ret = sa_init_ctx_info(&ctx->dec, data);
1739 sa_free_ctx_info(&ctx->enc, data);
1743 dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
1744 __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
1745 ctx->dec.sc_id, &ctx->dec.sc_phys);
1750 static int sa_cra_init_aead_sha1(struct crypto_aead *tfm)
1752 return sa_cra_init_aead(tfm, "sha1",
1753 "authenc(hmac(sha1-ce),cbc(aes-ce))");
1756 static int sa_cra_init_aead_sha256(struct crypto_aead *tfm)
1758 return sa_cra_init_aead(tfm, "sha256",
1759 "authenc(hmac(sha256-ce),cbc(aes-ce))");
1762 static void sa_exit_tfm_aead(struct crypto_aead *tfm)
1764 struct sa_tfm_ctx *ctx = crypto_aead_ctx(tfm);
1765 struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
1767 crypto_free_shash(ctx->shash);
1768 crypto_free_aead(ctx->fallback.aead);
1770 sa_free_ctx_info(&ctx->enc, data);
1771 sa_free_ctx_info(&ctx->dec, data);
1774 /* AEAD algorithm configuration interface function */
1775 static int sa_aead_setkey(struct crypto_aead *authenc,
1776 const u8 *key, unsigned int keylen,
1777 struct algo_data *ad)
1779 struct sa_tfm_ctx *ctx = crypto_aead_ctx(authenc);
1780 struct crypto_authenc_keys keys;
1782 struct sa_cmdl_cfg cfg;
1785 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
1788 /* Convert the key size (16/24/32) to the key size index (0/1/2) */
1789 key_idx = (keys.enckeylen >> 3) - 2;
1794 ad->enc_eng.eng_id = SA_ENG_ID_EM1;
1795 ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ;
1796 ad->auth_eng.eng_id = SA_ENG_ID_AM1;
1797 ad->auth_eng.sc_size = SA_CTX_AUTH_TYPE2_SZ;
1798 ad->mci_enc = mci_cbc_enc_no_iv_array[key_idx];
1799 ad->mci_dec = mci_cbc_dec_no_iv_array[key_idx];
1801 ad->keyed_mac = true;
1802 ad->ealg_id = SA_EALG_ID_AES_CBC;
1803 ad->prep_iopad = sa_prepare_iopads;
1805 memset(&cfg, 0, sizeof(cfg));
1807 cfg.aalg = ad->aalg_id;
1808 cfg.enc_eng_id = ad->enc_eng.eng_id;
1809 cfg.auth_eng_id = ad->auth_eng.eng_id;
1810 cfg.iv_size = crypto_aead_ivsize(authenc);
1811 cfg.akey = keys.authkey;
1812 cfg.akey_len = keys.authkeylen;
1814 /* Setup Encryption Security Context & Command label template */
1815 if (sa_init_sc(&ctx->enc, keys.enckey, keys.enckeylen,
1816 keys.authkey, keys.authkeylen,
1817 ad, 1, &ctx->enc.epib[1]))
1820 cmdl_len = sa_format_cmdl_gen(&cfg,
1821 (u8 *)ctx->enc.cmdl,
1822 &ctx->enc.cmdl_upd_info);
1823 if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
1826 ctx->enc.cmdl_size = cmdl_len;
1828 /* Setup Decryption Security Context & Command label template */
1829 if (sa_init_sc(&ctx->dec, keys.enckey, keys.enckeylen,
1830 keys.authkey, keys.authkeylen,
1831 ad, 0, &ctx->dec.epib[1]))
1835 cmdl_len = sa_format_cmdl_gen(&cfg, (u8 *)ctx->dec.cmdl,
1836 &ctx->dec.cmdl_upd_info);
1838 if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
1841 ctx->dec.cmdl_size = cmdl_len;
1843 crypto_aead_clear_flags(ctx->fallback.aead, CRYPTO_TFM_REQ_MASK);
1844 crypto_aead_set_flags(ctx->fallback.aead,
1845 crypto_aead_get_flags(authenc) &
1846 CRYPTO_TFM_REQ_MASK);
1847 crypto_aead_setkey(ctx->fallback.aead, key, keylen);
1852 static int sa_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
1854 struct sa_tfm_ctx *ctx = crypto_tfm_ctx(crypto_aead_tfm(tfm));
1856 return crypto_aead_setauthsize(ctx->fallback.aead, authsize);
1859 static int sa_aead_cbc_sha1_setkey(struct crypto_aead *authenc,
1860 const u8 *key, unsigned int keylen)
1862 struct algo_data ad = { 0 };
1864 ad.ealg_id = SA_EALG_ID_AES_CBC;
1865 ad.aalg_id = SA_AALG_ID_HMAC_SHA1;
1866 ad.hash_size = SHA1_DIGEST_SIZE;
1867 ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA1;
1869 return sa_aead_setkey(authenc, key, keylen, &ad);
1872 static int sa_aead_cbc_sha256_setkey(struct crypto_aead *authenc,
1873 const u8 *key, unsigned int keylen)
1875 struct algo_data ad = { 0 };
1877 ad.ealg_id = SA_EALG_ID_AES_CBC;
1878 ad.aalg_id = SA_AALG_ID_HMAC_SHA2_256;
1879 ad.hash_size = SHA256_DIGEST_SIZE;
1880 ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA256;
1882 return sa_aead_setkey(authenc, key, keylen, &ad);
1885 static int sa_aead_run(struct aead_request *req, u8 *iv, int enc)
1887 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1888 struct sa_tfm_ctx *ctx = crypto_aead_ctx(tfm);
1889 struct sa_req sa_req = { 0 };
1890 size_t auth_size, enc_size;
1892 enc_size = req->cryptlen;
1893 auth_size = req->assoclen + req->cryptlen;
1896 enc_size -= crypto_aead_authsize(tfm);
1897 auth_size -= crypto_aead_authsize(tfm);
1900 if (auth_size > SA_MAX_DATA_SZ ||
1901 (auth_size >= SA_UNSAFE_DATA_SZ_MIN &&
1902 auth_size <= SA_UNSAFE_DATA_SZ_MAX)) {
1903 struct aead_request *subreq = aead_request_ctx(req);
1906 aead_request_set_tfm(subreq, ctx->fallback.aead);
1907 aead_request_set_callback(subreq, req->base.flags,
1908 req->base.complete, req->base.data);
1909 aead_request_set_crypt(subreq, req->src, req->dst,
1910 req->cryptlen, req->iv);
1911 aead_request_set_ad(subreq, req->assoclen);
1913 ret = enc ? crypto_aead_encrypt(subreq) :
1914 crypto_aead_decrypt(subreq);
1918 sa_req.enc_offset = req->assoclen;
1919 sa_req.enc_size = enc_size;
1920 sa_req.auth_size = auth_size;
1921 sa_req.size = auth_size;
1923 sa_req.type = CRYPTO_ALG_TYPE_AEAD;
1925 sa_req.callback = sa_aead_dma_in_callback;
1926 sa_req.mdata_size = 52;
1927 sa_req.base = &req->base;
1929 sa_req.src = req->src;
1930 sa_req.dst = req->dst;
1932 return sa_run(&sa_req);
1935 /* AEAD algorithm encrypt interface function */
1936 static int sa_aead_encrypt(struct aead_request *req)
1938 return sa_aead_run(req, req->iv, 1);
1941 /* AEAD algorithm decrypt interface function */
1942 static int sa_aead_decrypt(struct aead_request *req)
1944 return sa_aead_run(req, req->iv, 0);
1947 static struct sa_alg_tmpl sa_algs[] = {
1949 .type = CRYPTO_ALG_TYPE_SKCIPHER,
1951 .base.cra_name = "cbc(aes)",
1952 .base.cra_driver_name = "cbc-aes-sa2ul",
1953 .base.cra_priority = 30000,
1954 .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
1955 CRYPTO_ALG_KERN_DRIVER_ONLY |
1957 CRYPTO_ALG_NEED_FALLBACK,
1958 .base.cra_blocksize = AES_BLOCK_SIZE,
1959 .base.cra_ctxsize = sizeof(struct sa_tfm_ctx),
1960 .base.cra_module = THIS_MODULE,
1961 .init = sa_cipher_cra_init,
1962 .exit = sa_cipher_cra_exit,
1963 .min_keysize = AES_MIN_KEY_SIZE,
1964 .max_keysize = AES_MAX_KEY_SIZE,
1965 .ivsize = AES_BLOCK_SIZE,
1966 .setkey = sa_aes_cbc_setkey,
1967 .encrypt = sa_encrypt,
1968 .decrypt = sa_decrypt,
1972 .type = CRYPTO_ALG_TYPE_SKCIPHER,
1974 .base.cra_name = "ecb(aes)",
1975 .base.cra_driver_name = "ecb-aes-sa2ul",
1976 .base.cra_priority = 30000,
1977 .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
1978 CRYPTO_ALG_KERN_DRIVER_ONLY |
1980 CRYPTO_ALG_NEED_FALLBACK,
1981 .base.cra_blocksize = AES_BLOCK_SIZE,
1982 .base.cra_ctxsize = sizeof(struct sa_tfm_ctx),
1983 .base.cra_module = THIS_MODULE,
1984 .init = sa_cipher_cra_init,
1985 .exit = sa_cipher_cra_exit,
1986 .min_keysize = AES_MIN_KEY_SIZE,
1987 .max_keysize = AES_MAX_KEY_SIZE,
1988 .setkey = sa_aes_ecb_setkey,
1989 .encrypt = sa_encrypt,
1990 .decrypt = sa_decrypt,
1994 .type = CRYPTO_ALG_TYPE_SKCIPHER,
1996 .base.cra_name = "cbc(des3_ede)",
1997 .base.cra_driver_name = "cbc-des3-sa2ul",
1998 .base.cra_priority = 30000,
1999 .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
2000 CRYPTO_ALG_KERN_DRIVER_ONLY |
2002 CRYPTO_ALG_NEED_FALLBACK,
2003 .base.cra_blocksize = DES_BLOCK_SIZE,
2004 .base.cra_ctxsize = sizeof(struct sa_tfm_ctx),
2005 .base.cra_module = THIS_MODULE,
2006 .init = sa_cipher_cra_init,
2007 .exit = sa_cipher_cra_exit,
2008 .min_keysize = 3 * DES_KEY_SIZE,
2009 .max_keysize = 3 * DES_KEY_SIZE,
2010 .ivsize = DES_BLOCK_SIZE,
2011 .setkey = sa_3des_cbc_setkey,
2012 .encrypt = sa_encrypt,
2013 .decrypt = sa_decrypt,
2017 .type = CRYPTO_ALG_TYPE_SKCIPHER,
2019 .base.cra_name = "ecb(des3_ede)",
2020 .base.cra_driver_name = "ecb-des3-sa2ul",
2021 .base.cra_priority = 30000,
2022 .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
2023 CRYPTO_ALG_KERN_DRIVER_ONLY |
2025 CRYPTO_ALG_NEED_FALLBACK,
2026 .base.cra_blocksize = DES_BLOCK_SIZE,
2027 .base.cra_ctxsize = sizeof(struct sa_tfm_ctx),
2028 .base.cra_module = THIS_MODULE,
2029 .init = sa_cipher_cra_init,
2030 .exit = sa_cipher_cra_exit,
2031 .min_keysize = 3 * DES_KEY_SIZE,
2032 .max_keysize = 3 * DES_KEY_SIZE,
2033 .setkey = sa_3des_ecb_setkey,
2034 .encrypt = sa_encrypt,
2035 .decrypt = sa_decrypt,
2039 .type = CRYPTO_ALG_TYPE_AHASH,
2043 .cra_driver_name = "sha1-sa2ul",
2044 .cra_priority = 400,
2045 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2047 CRYPTO_ALG_KERN_DRIVER_ONLY |
2048 CRYPTO_ALG_NEED_FALLBACK,
2049 .cra_blocksize = SHA1_BLOCK_SIZE,
2050 .cra_ctxsize = sizeof(struct sa_tfm_ctx),
2051 .cra_module = THIS_MODULE,
2052 .cra_init = sa_sha1_cra_init,
2053 .cra_exit = sa_sha_cra_exit,
2055 .halg.digestsize = SHA1_DIGEST_SIZE,
2056 .halg.statesize = sizeof(struct sa_sha_req_ctx) +
2057 sizeof(struct sha1_state),
2058 .init = sa_sha_init,
2059 .update = sa_sha_update,
2060 .final = sa_sha_final,
2061 .finup = sa_sha_finup,
2062 .digest = sa_sha_digest,
2063 .export = sa_sha_export,
2064 .import = sa_sha_import,
2068 .type = CRYPTO_ALG_TYPE_AHASH,
2071 .cra_name = "sha256",
2072 .cra_driver_name = "sha256-sa2ul",
2073 .cra_priority = 400,
2074 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2076 CRYPTO_ALG_KERN_DRIVER_ONLY |
2077 CRYPTO_ALG_NEED_FALLBACK,
2078 .cra_blocksize = SHA256_BLOCK_SIZE,
2079 .cra_ctxsize = sizeof(struct sa_tfm_ctx),
2080 .cra_module = THIS_MODULE,
2081 .cra_init = sa_sha256_cra_init,
2082 .cra_exit = sa_sha_cra_exit,
2084 .halg.digestsize = SHA256_DIGEST_SIZE,
2085 .halg.statesize = sizeof(struct sa_sha_req_ctx) +
2086 sizeof(struct sha256_state),
2087 .init = sa_sha_init,
2088 .update = sa_sha_update,
2089 .final = sa_sha_final,
2090 .finup = sa_sha_finup,
2091 .digest = sa_sha_digest,
2092 .export = sa_sha_export,
2093 .import = sa_sha_import,
2097 .type = CRYPTO_ALG_TYPE_AHASH,
2100 .cra_name = "sha512",
2101 .cra_driver_name = "sha512-sa2ul",
2102 .cra_priority = 400,
2103 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2105 CRYPTO_ALG_KERN_DRIVER_ONLY |
2106 CRYPTO_ALG_NEED_FALLBACK,
2107 .cra_blocksize = SHA512_BLOCK_SIZE,
2108 .cra_ctxsize = sizeof(struct sa_tfm_ctx),
2109 .cra_module = THIS_MODULE,
2110 .cra_init = sa_sha512_cra_init,
2111 .cra_exit = sa_sha_cra_exit,
2113 .halg.digestsize = SHA512_DIGEST_SIZE,
2114 .halg.statesize = sizeof(struct sa_sha_req_ctx) +
2115 sizeof(struct sha512_state),
2116 .init = sa_sha_init,
2117 .update = sa_sha_update,
2118 .final = sa_sha_final,
2119 .finup = sa_sha_finup,
2120 .digest = sa_sha_digest,
2121 .export = sa_sha_export,
2122 .import = sa_sha_import,
2126 .type = CRYPTO_ALG_TYPE_AEAD,
2129 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2131 "authenc(hmac(sha1),cbc(aes))-sa2ul",
2132 .cra_blocksize = AES_BLOCK_SIZE,
2133 .cra_flags = CRYPTO_ALG_TYPE_AEAD |
2134 CRYPTO_ALG_KERN_DRIVER_ONLY |
2136 CRYPTO_ALG_NEED_FALLBACK,
2137 .cra_ctxsize = sizeof(struct sa_tfm_ctx),
2138 .cra_module = THIS_MODULE,
2139 .cra_priority = 3000,
2141 .ivsize = AES_BLOCK_SIZE,
2142 .maxauthsize = SHA1_DIGEST_SIZE,
2144 .init = sa_cra_init_aead_sha1,
2145 .exit = sa_exit_tfm_aead,
2146 .setkey = sa_aead_cbc_sha1_setkey,
2147 .setauthsize = sa_aead_setauthsize,
2148 .encrypt = sa_aead_encrypt,
2149 .decrypt = sa_aead_decrypt,
2153 .type = CRYPTO_ALG_TYPE_AEAD,
2156 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2158 "authenc(hmac(sha256),cbc(aes))-sa2ul",
2159 .cra_blocksize = AES_BLOCK_SIZE,
2160 .cra_flags = CRYPTO_ALG_TYPE_AEAD |
2161 CRYPTO_ALG_KERN_DRIVER_ONLY |
2163 CRYPTO_ALG_NEED_FALLBACK,
2164 .cra_ctxsize = sizeof(struct sa_tfm_ctx),
2165 .cra_module = THIS_MODULE,
2167 .cra_priority = 3000,
2169 .ivsize = AES_BLOCK_SIZE,
2170 .maxauthsize = SHA256_DIGEST_SIZE,
2172 .init = sa_cra_init_aead_sha256,
2173 .exit = sa_exit_tfm_aead,
2174 .setkey = sa_aead_cbc_sha256_setkey,
2175 .setauthsize = sa_aead_setauthsize,
2176 .encrypt = sa_aead_encrypt,
2177 .decrypt = sa_aead_decrypt,
2182 /* Register the algorithms in crypto framework */
2183 static void sa_register_algos(const struct device *dev)
2189 for (i = 0; i < ARRAY_SIZE(sa_algs); i++) {
2190 type = sa_algs[i].type;
2191 if (type == CRYPTO_ALG_TYPE_SKCIPHER) {
2192 alg_name = sa_algs[i].alg.skcipher.base.cra_name;
2193 err = crypto_register_skcipher(&sa_algs[i].alg.skcipher);
2194 } else if (type == CRYPTO_ALG_TYPE_AHASH) {
2195 alg_name = sa_algs[i].alg.ahash.halg.base.cra_name;
2196 err = crypto_register_ahash(&sa_algs[i].alg.ahash);
2197 } else if (type == CRYPTO_ALG_TYPE_AEAD) {
2198 alg_name = sa_algs[i].alg.aead.base.cra_name;
2199 err = crypto_register_aead(&sa_algs[i].alg.aead);
2202 "un-supported crypto algorithm (%d)",
2208 dev_err(dev, "Failed to register '%s'\n", alg_name);
2210 sa_algs[i].registered = true;
2214 /* Unregister the algorithms in crypto framework */
2215 static void sa_unregister_algos(const struct device *dev)
2220 for (i = 0; i < ARRAY_SIZE(sa_algs); i++) {
2221 type = sa_algs[i].type;
2222 if (!sa_algs[i].registered)
2224 if (type == CRYPTO_ALG_TYPE_SKCIPHER)
2225 crypto_unregister_skcipher(&sa_algs[i].alg.skcipher);
2226 else if (type == CRYPTO_ALG_TYPE_AHASH)
2227 crypto_unregister_ahash(&sa_algs[i].alg.ahash);
2228 else if (type == CRYPTO_ALG_TYPE_AEAD)
2229 crypto_unregister_aead(&sa_algs[i].alg.aead);
2231 sa_algs[i].registered = false;
2235 static int sa_init_mem(struct sa_crypto_data *dev_data)
2237 struct device *dev = &dev_data->pdev->dev;
2238 /* Setup dma pool for security context buffers */
2239 dev_data->sc_pool = dma_pool_create("keystone-sc", dev,
2240 SA_CTX_MAX_SZ, 64, 0);
2241 if (!dev_data->sc_pool) {
2242 dev_err(dev, "Failed to create dma pool");
2249 static int sa_dma_init(struct sa_crypto_data *dd)
2252 struct dma_slave_config cfg;
2258 ret = dma_coerce_mask_and_coherent(dd->dev, DMA_BIT_MASK(48));
2262 dd->dma_rx1 = dma_request_chan(dd->dev, "rx1");
2263 if (IS_ERR(dd->dma_rx1))
2264 return dev_err_probe(dd->dev, PTR_ERR(dd->dma_rx1),
2265 "Unable to request rx1 DMA channel\n");
2267 dd->dma_rx2 = dma_request_chan(dd->dev, "rx2");
2268 if (IS_ERR(dd->dma_rx2)) {
2269 dma_release_channel(dd->dma_rx1);
2270 return dev_err_probe(dd->dev, PTR_ERR(dd->dma_rx2),
2271 "Unable to request rx2 DMA channel\n");
2274 dd->dma_tx = dma_request_chan(dd->dev, "tx");
2275 if (IS_ERR(dd->dma_tx)) {
2276 ret = dev_err_probe(dd->dev, PTR_ERR(dd->dma_tx),
2277 "Unable to request tx DMA channel\n");
2281 memzero_explicit(&cfg, sizeof(cfg));
2283 cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
2284 cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
2285 cfg.src_maxburst = 4;
2286 cfg.dst_maxburst = 4;
2288 ret = dmaengine_slave_config(dd->dma_rx1, &cfg);
2290 dev_err(dd->dev, "can't configure IN dmaengine slave: %d\n",
2295 ret = dmaengine_slave_config(dd->dma_rx2, &cfg);
2297 dev_err(dd->dev, "can't configure IN dmaengine slave: %d\n",
2302 ret = dmaengine_slave_config(dd->dma_tx, &cfg);
2304 dev_err(dd->dev, "can't configure OUT dmaengine slave: %d\n",
2312 dma_release_channel(dd->dma_rx1);
2313 dma_release_channel(dd->dma_rx2);
2318 static int sa_link_child(struct device *dev, void *data)
2320 struct device *parent = data;
2322 device_link_add(dev, parent, DL_FLAG_AUTOPROBE_CONSUMER);
2327 static int sa_ul_probe(struct platform_device *pdev)
2329 struct device *dev = &pdev->dev;
2330 struct device_node *node = dev->of_node;
2331 struct resource *res;
2332 static void __iomem *saul_base;
2333 struct sa_crypto_data *dev_data;
2337 dev_data = devm_kzalloc(dev, sizeof(*dev_data), GFP_KERNEL);
2342 dev_data->dev = dev;
2343 dev_data->pdev = pdev;
2344 platform_set_drvdata(pdev, dev_data);
2345 dev_set_drvdata(sa_k3_dev, dev_data);
2347 pm_runtime_enable(dev);
2348 ret = pm_runtime_get_sync(dev);
2350 dev_err(&pdev->dev, "%s: failed to get sync: %d\n", __func__,
2355 sa_init_mem(dev_data);
2356 ret = sa_dma_init(dev_data);
2358 goto disable_pm_runtime;
2360 spin_lock_init(&dev_data->scid_lock);
2361 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
2362 saul_base = devm_ioremap_resource(dev, res);
2364 dev_data->base = saul_base;
2365 val = SA_EEC_ENCSS_EN | SA_EEC_AUTHSS_EN | SA_EEC_CTXCACH_EN |
2366 SA_EEC_CPPI_PORT_IN_EN | SA_EEC_CPPI_PORT_OUT_EN |
2369 writel_relaxed(val, saul_base + SA_ENGINE_ENABLE_CONTROL);
2371 sa_register_algos(dev);
2373 ret = of_platform_populate(node, NULL, NULL, &pdev->dev);
2377 device_for_each_child(&pdev->dev, &pdev->dev, sa_link_child);
2382 sa_unregister_algos(&pdev->dev);
2384 dma_release_channel(dev_data->dma_rx2);
2385 dma_release_channel(dev_data->dma_rx1);
2386 dma_release_channel(dev_data->dma_tx);
2388 dma_pool_destroy(dev_data->sc_pool);
2391 pm_runtime_put_sync(&pdev->dev);
2392 pm_runtime_disable(&pdev->dev);
2397 static int sa_ul_remove(struct platform_device *pdev)
2399 struct sa_crypto_data *dev_data = platform_get_drvdata(pdev);
2401 sa_unregister_algos(&pdev->dev);
2403 dma_release_channel(dev_data->dma_rx2);
2404 dma_release_channel(dev_data->dma_rx1);
2405 dma_release_channel(dev_data->dma_tx);
2407 dma_pool_destroy(dev_data->sc_pool);
2409 platform_set_drvdata(pdev, NULL);
2411 pm_runtime_put_sync(&pdev->dev);
2412 pm_runtime_disable(&pdev->dev);
2417 static const struct of_device_id of_match[] = {
2418 {.compatible = "ti,j721e-sa2ul",},
2419 {.compatible = "ti,am654-sa2ul",},
2422 MODULE_DEVICE_TABLE(of, of_match);
2424 static struct platform_driver sa_ul_driver = {
2425 .probe = sa_ul_probe,
2426 .remove = sa_ul_remove,
2428 .name = "saul-crypto",
2429 .of_match_table = of_match,
2432 module_platform_driver(sa_ul_driver);
2433 MODULE_LICENSE("GPL v2");