1 // SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0-only)
2 /* Copyright(c) 2014 - 2020 Intel Corporation */
3 #include <linux/module.h>
4 #include <linux/slab.h>
5 #include <linux/crypto.h>
6 #include <crypto/internal/aead.h>
7 #include <crypto/internal/skcipher.h>
8 #include <crypto/aes.h>
9 #include <crypto/sha1.h>
10 #include <crypto/sha2.h>
11 #include <crypto/hash.h>
12 #include <crypto/hmac.h>
13 #include <crypto/algapi.h>
14 #include <crypto/authenc.h>
15 #include <crypto/scatterwalk.h>
16 #include <crypto/xts.h>
17 #include <linux/dma-mapping.h>
18 #include "adf_accel_devices.h"
19 #include "adf_transport.h"
20 #include "adf_common_drv.h"
21 #include "qat_crypto.h"
22 #include "icp_qat_hw.h"
23 #include "icp_qat_fw.h"
24 #include "icp_qat_fw_la.h"
26 #define QAT_AES_HW_CONFIG_ENC(alg, mode) \
27 ICP_QAT_HW_CIPHER_CONFIG_BUILD(mode, alg, \
28 ICP_QAT_HW_CIPHER_NO_CONVERT, \
29 ICP_QAT_HW_CIPHER_ENCRYPT)
31 #define QAT_AES_HW_CONFIG_DEC(alg, mode) \
32 ICP_QAT_HW_CIPHER_CONFIG_BUILD(mode, alg, \
33 ICP_QAT_HW_CIPHER_KEY_CONVERT, \
34 ICP_QAT_HW_CIPHER_DECRYPT)
36 #define HW_CAP_AES_V2(accel_dev) \
37 (GET_HW_DATA(accel_dev)->accel_capabilities_mask & \
38 ICP_ACCEL_CAPABILITIES_AES_V2)
40 static DEFINE_MUTEX(algs_lock);
41 static unsigned int active_devs;
49 struct qat_alg_buf_list {
53 struct qat_alg_buf bufers[];
54 } __packed __aligned(64);
56 /* Common content descriptor */
59 struct qat_enc { /* Encrypt content desc */
60 struct icp_qat_hw_cipher_algo_blk cipher;
61 struct icp_qat_hw_auth_algo_blk hash;
63 struct qat_dec { /* Decrypt content desc */
64 struct icp_qat_hw_auth_algo_blk hash;
65 struct icp_qat_hw_cipher_algo_blk cipher;
70 struct qat_alg_aead_ctx {
71 struct qat_alg_cd *enc_cd;
72 struct qat_alg_cd *dec_cd;
73 dma_addr_t enc_cd_paddr;
74 dma_addr_t dec_cd_paddr;
75 struct icp_qat_fw_la_bulk_req enc_fw_req;
76 struct icp_qat_fw_la_bulk_req dec_fw_req;
77 struct crypto_shash *hash_tfm;
78 enum icp_qat_hw_auth_algo qat_hash_alg;
79 struct qat_crypto_instance *inst;
81 struct sha1_state sha1;
82 struct sha256_state sha256;
83 struct sha512_state sha512;
85 char ipad[SHA512_BLOCK_SIZE]; /* sufficient for SHA-1/SHA-256 as well */
86 char opad[SHA512_BLOCK_SIZE];
89 struct qat_alg_skcipher_ctx {
90 struct icp_qat_hw_cipher_algo_blk *enc_cd;
91 struct icp_qat_hw_cipher_algo_blk *dec_cd;
92 dma_addr_t enc_cd_paddr;
93 dma_addr_t dec_cd_paddr;
94 struct icp_qat_fw_la_bulk_req enc_fw_req;
95 struct icp_qat_fw_la_bulk_req dec_fw_req;
96 struct qat_crypto_instance *inst;
97 struct crypto_skcipher *ftfm;
102 static int qat_get_inter_state_size(enum icp_qat_hw_auth_algo qat_hash_alg)
104 switch (qat_hash_alg) {
105 case ICP_QAT_HW_AUTH_ALGO_SHA1:
106 return ICP_QAT_HW_SHA1_STATE1_SZ;
107 case ICP_QAT_HW_AUTH_ALGO_SHA256:
108 return ICP_QAT_HW_SHA256_STATE1_SZ;
109 case ICP_QAT_HW_AUTH_ALGO_SHA512:
110 return ICP_QAT_HW_SHA512_STATE1_SZ;
117 static int qat_alg_do_precomputes(struct icp_qat_hw_auth_algo_blk *hash,
118 struct qat_alg_aead_ctx *ctx,
120 unsigned int auth_keylen)
122 SHASH_DESC_ON_STACK(shash, ctx->hash_tfm);
123 int block_size = crypto_shash_blocksize(ctx->hash_tfm);
124 int digest_size = crypto_shash_digestsize(ctx->hash_tfm);
125 __be32 *hash_state_out;
126 __be64 *hash512_state_out;
129 memset(ctx->ipad, 0, block_size);
130 memset(ctx->opad, 0, block_size);
131 shash->tfm = ctx->hash_tfm;
133 if (auth_keylen > block_size) {
134 int ret = crypto_shash_digest(shash, auth_key,
135 auth_keylen, ctx->ipad);
139 memcpy(ctx->opad, ctx->ipad, digest_size);
141 memcpy(ctx->ipad, auth_key, auth_keylen);
142 memcpy(ctx->opad, auth_key, auth_keylen);
145 for (i = 0; i < block_size; i++) {
146 char *ipad_ptr = ctx->ipad + i;
147 char *opad_ptr = ctx->opad + i;
148 *ipad_ptr ^= HMAC_IPAD_VALUE;
149 *opad_ptr ^= HMAC_OPAD_VALUE;
152 if (crypto_shash_init(shash))
155 if (crypto_shash_update(shash, ctx->ipad, block_size))
158 hash_state_out = (__be32 *)hash->sha.state1;
159 hash512_state_out = (__be64 *)hash_state_out;
161 switch (ctx->qat_hash_alg) {
162 case ICP_QAT_HW_AUTH_ALGO_SHA1:
163 if (crypto_shash_export(shash, &ctx->sha1))
165 for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
166 *hash_state_out = cpu_to_be32(ctx->sha1.state[i]);
168 case ICP_QAT_HW_AUTH_ALGO_SHA256:
169 if (crypto_shash_export(shash, &ctx->sha256))
171 for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
172 *hash_state_out = cpu_to_be32(ctx->sha256.state[i]);
174 case ICP_QAT_HW_AUTH_ALGO_SHA512:
175 if (crypto_shash_export(shash, &ctx->sha512))
177 for (i = 0; i < digest_size >> 3; i++, hash512_state_out++)
178 *hash512_state_out = cpu_to_be64(ctx->sha512.state[i]);
184 if (crypto_shash_init(shash))
187 if (crypto_shash_update(shash, ctx->opad, block_size))
190 offset = round_up(qat_get_inter_state_size(ctx->qat_hash_alg), 8);
194 hash_state_out = (__be32 *)(hash->sha.state1 + offset);
195 hash512_state_out = (__be64 *)hash_state_out;
197 switch (ctx->qat_hash_alg) {
198 case ICP_QAT_HW_AUTH_ALGO_SHA1:
199 if (crypto_shash_export(shash, &ctx->sha1))
201 for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
202 *hash_state_out = cpu_to_be32(ctx->sha1.state[i]);
204 case ICP_QAT_HW_AUTH_ALGO_SHA256:
205 if (crypto_shash_export(shash, &ctx->sha256))
207 for (i = 0; i < digest_size >> 2; i++, hash_state_out++)
208 *hash_state_out = cpu_to_be32(ctx->sha256.state[i]);
210 case ICP_QAT_HW_AUTH_ALGO_SHA512:
211 if (crypto_shash_export(shash, &ctx->sha512))
213 for (i = 0; i < digest_size >> 3; i++, hash512_state_out++)
214 *hash512_state_out = cpu_to_be64(ctx->sha512.state[i]);
219 memzero_explicit(ctx->ipad, block_size);
220 memzero_explicit(ctx->opad, block_size);
224 static void qat_alg_init_common_hdr(struct icp_qat_fw_comn_req_hdr *header)
227 ICP_QAT_FW_COMN_HDR_FLAGS_BUILD(ICP_QAT_FW_COMN_REQ_FLAG_SET);
228 header->service_type = ICP_QAT_FW_COMN_REQ_CPM_FW_LA;
229 header->comn_req_flags =
230 ICP_QAT_FW_COMN_FLAGS_BUILD(QAT_COMN_CD_FLD_TYPE_64BIT_ADR,
231 QAT_COMN_PTR_TYPE_SGL);
232 ICP_QAT_FW_LA_PARTIAL_SET(header->serv_specif_flags,
233 ICP_QAT_FW_LA_PARTIAL_NONE);
234 ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(header->serv_specif_flags,
235 ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
236 ICP_QAT_FW_LA_PROTO_SET(header->serv_specif_flags,
237 ICP_QAT_FW_LA_NO_PROTO);
238 ICP_QAT_FW_LA_UPDATE_STATE_SET(header->serv_specif_flags,
239 ICP_QAT_FW_LA_NO_UPDATE_STATE);
242 static int qat_alg_aead_init_enc_session(struct crypto_aead *aead_tfm,
244 struct crypto_authenc_keys *keys,
247 struct qat_alg_aead_ctx *ctx = crypto_aead_ctx(aead_tfm);
248 unsigned int digestsize = crypto_aead_authsize(aead_tfm);
249 struct qat_enc *enc_ctx = &ctx->enc_cd->qat_enc_cd;
250 struct icp_qat_hw_cipher_algo_blk *cipher = &enc_ctx->cipher;
251 struct icp_qat_hw_auth_algo_blk *hash =
252 (struct icp_qat_hw_auth_algo_blk *)((char *)enc_ctx +
253 sizeof(struct icp_qat_hw_auth_setup) + keys->enckeylen);
254 struct icp_qat_fw_la_bulk_req *req_tmpl = &ctx->enc_fw_req;
255 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
256 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
257 void *ptr = &req_tmpl->cd_ctrl;
258 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
259 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
262 cipher->aes.cipher_config.val = QAT_AES_HW_CONFIG_ENC(alg, mode);
263 memcpy(cipher->aes.key, keys->enckey, keys->enckeylen);
264 hash->sha.inner_setup.auth_config.config =
265 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
266 ctx->qat_hash_alg, digestsize);
267 hash->sha.inner_setup.auth_counter.counter =
268 cpu_to_be32(crypto_shash_blocksize(ctx->hash_tfm));
270 if (qat_alg_do_precomputes(hash, ctx, keys->authkey, keys->authkeylen))
274 qat_alg_init_common_hdr(header);
275 header->service_cmd_id = ICP_QAT_FW_LA_CMD_CIPHER_HASH;
276 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
277 ICP_QAT_FW_LA_DIGEST_IN_BUFFER);
278 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
279 ICP_QAT_FW_LA_RET_AUTH_RES);
280 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
281 ICP_QAT_FW_LA_NO_CMP_AUTH_RES);
282 cd_pars->u.s.content_desc_addr = ctx->enc_cd_paddr;
283 cd_pars->u.s.content_desc_params_sz = sizeof(struct qat_alg_cd) >> 3;
285 /* Cipher CD config setup */
286 cipher_cd_ctrl->cipher_key_sz = keys->enckeylen >> 3;
287 cipher_cd_ctrl->cipher_state_sz = AES_BLOCK_SIZE >> 3;
288 cipher_cd_ctrl->cipher_cfg_offset = 0;
289 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl, ICP_QAT_FW_SLICE_CIPHER);
290 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl, ICP_QAT_FW_SLICE_AUTH);
291 /* Auth CD config setup */
292 hash_cd_ctrl->hash_cfg_offset = ((char *)hash - (char *)cipher) >> 3;
293 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
294 hash_cd_ctrl->inner_res_sz = digestsize;
295 hash_cd_ctrl->final_sz = digestsize;
297 switch (ctx->qat_hash_alg) {
298 case ICP_QAT_HW_AUTH_ALGO_SHA1:
299 hash_cd_ctrl->inner_state1_sz =
300 round_up(ICP_QAT_HW_SHA1_STATE1_SZ, 8);
301 hash_cd_ctrl->inner_state2_sz =
302 round_up(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
304 case ICP_QAT_HW_AUTH_ALGO_SHA256:
305 hash_cd_ctrl->inner_state1_sz = ICP_QAT_HW_SHA256_STATE1_SZ;
306 hash_cd_ctrl->inner_state2_sz = ICP_QAT_HW_SHA256_STATE2_SZ;
308 case ICP_QAT_HW_AUTH_ALGO_SHA512:
309 hash_cd_ctrl->inner_state1_sz = ICP_QAT_HW_SHA512_STATE1_SZ;
310 hash_cd_ctrl->inner_state2_sz = ICP_QAT_HW_SHA512_STATE2_SZ;
315 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
316 ((sizeof(struct icp_qat_hw_auth_setup) +
317 round_up(hash_cd_ctrl->inner_state1_sz, 8)) >> 3);
318 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl, ICP_QAT_FW_SLICE_AUTH);
319 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl, ICP_QAT_FW_SLICE_DRAM_WR);
323 static int qat_alg_aead_init_dec_session(struct crypto_aead *aead_tfm,
325 struct crypto_authenc_keys *keys,
328 struct qat_alg_aead_ctx *ctx = crypto_aead_ctx(aead_tfm);
329 unsigned int digestsize = crypto_aead_authsize(aead_tfm);
330 struct qat_dec *dec_ctx = &ctx->dec_cd->qat_dec_cd;
331 struct icp_qat_hw_auth_algo_blk *hash = &dec_ctx->hash;
332 struct icp_qat_hw_cipher_algo_blk *cipher =
333 (struct icp_qat_hw_cipher_algo_blk *)((char *)dec_ctx +
334 sizeof(struct icp_qat_hw_auth_setup) +
335 roundup(crypto_shash_digestsize(ctx->hash_tfm), 8) * 2);
336 struct icp_qat_fw_la_bulk_req *req_tmpl = &ctx->dec_fw_req;
337 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req_tmpl->cd_pars;
338 struct icp_qat_fw_comn_req_hdr *header = &req_tmpl->comn_hdr;
339 void *ptr = &req_tmpl->cd_ctrl;
340 struct icp_qat_fw_cipher_cd_ctrl_hdr *cipher_cd_ctrl = ptr;
341 struct icp_qat_fw_auth_cd_ctrl_hdr *hash_cd_ctrl = ptr;
342 struct icp_qat_fw_la_auth_req_params *auth_param =
343 (struct icp_qat_fw_la_auth_req_params *)
344 ((char *)&req_tmpl->serv_specif_rqpars +
345 sizeof(struct icp_qat_fw_la_cipher_req_params));
348 cipher->aes.cipher_config.val = QAT_AES_HW_CONFIG_DEC(alg, mode);
349 memcpy(cipher->aes.key, keys->enckey, keys->enckeylen);
350 hash->sha.inner_setup.auth_config.config =
351 ICP_QAT_HW_AUTH_CONFIG_BUILD(ICP_QAT_HW_AUTH_MODE1,
354 hash->sha.inner_setup.auth_counter.counter =
355 cpu_to_be32(crypto_shash_blocksize(ctx->hash_tfm));
357 if (qat_alg_do_precomputes(hash, ctx, keys->authkey, keys->authkeylen))
361 qat_alg_init_common_hdr(header);
362 header->service_cmd_id = ICP_QAT_FW_LA_CMD_HASH_CIPHER;
363 ICP_QAT_FW_LA_DIGEST_IN_BUFFER_SET(header->serv_specif_flags,
364 ICP_QAT_FW_LA_DIGEST_IN_BUFFER);
365 ICP_QAT_FW_LA_RET_AUTH_SET(header->serv_specif_flags,
366 ICP_QAT_FW_LA_NO_RET_AUTH_RES);
367 ICP_QAT_FW_LA_CMP_AUTH_SET(header->serv_specif_flags,
368 ICP_QAT_FW_LA_CMP_AUTH_RES);
369 cd_pars->u.s.content_desc_addr = ctx->dec_cd_paddr;
370 cd_pars->u.s.content_desc_params_sz = sizeof(struct qat_alg_cd) >> 3;
372 /* Cipher CD config setup */
373 cipher_cd_ctrl->cipher_key_sz = keys->enckeylen >> 3;
374 cipher_cd_ctrl->cipher_state_sz = AES_BLOCK_SIZE >> 3;
375 cipher_cd_ctrl->cipher_cfg_offset =
376 (sizeof(struct icp_qat_hw_auth_setup) +
377 roundup(crypto_shash_digestsize(ctx->hash_tfm), 8) * 2) >> 3;
378 ICP_QAT_FW_COMN_CURR_ID_SET(cipher_cd_ctrl, ICP_QAT_FW_SLICE_CIPHER);
379 ICP_QAT_FW_COMN_NEXT_ID_SET(cipher_cd_ctrl, ICP_QAT_FW_SLICE_DRAM_WR);
381 /* Auth CD config setup */
382 hash_cd_ctrl->hash_cfg_offset = 0;
383 hash_cd_ctrl->hash_flags = ICP_QAT_FW_AUTH_HDR_FLAG_NO_NESTED;
384 hash_cd_ctrl->inner_res_sz = digestsize;
385 hash_cd_ctrl->final_sz = digestsize;
387 switch (ctx->qat_hash_alg) {
388 case ICP_QAT_HW_AUTH_ALGO_SHA1:
389 hash_cd_ctrl->inner_state1_sz =
390 round_up(ICP_QAT_HW_SHA1_STATE1_SZ, 8);
391 hash_cd_ctrl->inner_state2_sz =
392 round_up(ICP_QAT_HW_SHA1_STATE2_SZ, 8);
394 case ICP_QAT_HW_AUTH_ALGO_SHA256:
395 hash_cd_ctrl->inner_state1_sz = ICP_QAT_HW_SHA256_STATE1_SZ;
396 hash_cd_ctrl->inner_state2_sz = ICP_QAT_HW_SHA256_STATE2_SZ;
398 case ICP_QAT_HW_AUTH_ALGO_SHA512:
399 hash_cd_ctrl->inner_state1_sz = ICP_QAT_HW_SHA512_STATE1_SZ;
400 hash_cd_ctrl->inner_state2_sz = ICP_QAT_HW_SHA512_STATE2_SZ;
406 hash_cd_ctrl->inner_state2_offset = hash_cd_ctrl->hash_cfg_offset +
407 ((sizeof(struct icp_qat_hw_auth_setup) +
408 round_up(hash_cd_ctrl->inner_state1_sz, 8)) >> 3);
409 auth_param->auth_res_sz = digestsize;
410 ICP_QAT_FW_COMN_CURR_ID_SET(hash_cd_ctrl, ICP_QAT_FW_SLICE_AUTH);
411 ICP_QAT_FW_COMN_NEXT_ID_SET(hash_cd_ctrl, ICP_QAT_FW_SLICE_CIPHER);
415 static void qat_alg_skcipher_init_com(struct qat_alg_skcipher_ctx *ctx,
416 struct icp_qat_fw_la_bulk_req *req,
417 struct icp_qat_hw_cipher_algo_blk *cd,
418 const u8 *key, unsigned int keylen)
420 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req->cd_pars;
421 struct icp_qat_fw_comn_req_hdr *header = &req->comn_hdr;
422 struct icp_qat_fw_cipher_cd_ctrl_hdr *cd_ctrl = (void *)&req->cd_ctrl;
423 bool aes_v2_capable = HW_CAP_AES_V2(ctx->inst->accel_dev);
424 int mode = ctx->mode;
426 qat_alg_init_common_hdr(header);
427 header->service_cmd_id = ICP_QAT_FW_LA_CMD_CIPHER;
428 cd_pars->u.s.content_desc_params_sz =
429 sizeof(struct icp_qat_hw_cipher_algo_blk) >> 3;
431 if (aes_v2_capable && mode == ICP_QAT_HW_CIPHER_CTR_MODE) {
432 ICP_QAT_FW_LA_SLICE_TYPE_SET(header->serv_specif_flags,
433 ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE);
434 keylen = round_up(keylen, 16);
435 memcpy(cd->ucs_aes.key, key, keylen);
437 memcpy(cd->aes.key, key, keylen);
440 /* Cipher CD config setup */
441 cd_ctrl->cipher_key_sz = keylen >> 3;
442 cd_ctrl->cipher_state_sz = AES_BLOCK_SIZE >> 3;
443 cd_ctrl->cipher_cfg_offset = 0;
444 ICP_QAT_FW_COMN_CURR_ID_SET(cd_ctrl, ICP_QAT_FW_SLICE_CIPHER);
445 ICP_QAT_FW_COMN_NEXT_ID_SET(cd_ctrl, ICP_QAT_FW_SLICE_DRAM_WR);
448 static void qat_alg_skcipher_init_enc(struct qat_alg_skcipher_ctx *ctx,
449 int alg, const u8 *key,
450 unsigned int keylen, int mode)
452 struct icp_qat_hw_cipher_algo_blk *enc_cd = ctx->enc_cd;
453 struct icp_qat_fw_la_bulk_req *req = &ctx->enc_fw_req;
454 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req->cd_pars;
456 qat_alg_skcipher_init_com(ctx, req, enc_cd, key, keylen);
457 cd_pars->u.s.content_desc_addr = ctx->enc_cd_paddr;
458 enc_cd->aes.cipher_config.val = QAT_AES_HW_CONFIG_ENC(alg, mode);
461 static void qat_alg_skcipher_init_dec(struct qat_alg_skcipher_ctx *ctx,
462 int alg, const u8 *key,
463 unsigned int keylen, int mode)
465 struct icp_qat_hw_cipher_algo_blk *dec_cd = ctx->dec_cd;
466 struct icp_qat_fw_la_bulk_req *req = &ctx->dec_fw_req;
467 struct icp_qat_fw_comn_req_hdr_cd_pars *cd_pars = &req->cd_pars;
469 qat_alg_skcipher_init_com(ctx, req, dec_cd, key, keylen);
470 cd_pars->u.s.content_desc_addr = ctx->dec_cd_paddr;
472 if (mode != ICP_QAT_HW_CIPHER_CTR_MODE)
473 dec_cd->aes.cipher_config.val =
474 QAT_AES_HW_CONFIG_DEC(alg, mode);
476 dec_cd->aes.cipher_config.val =
477 QAT_AES_HW_CONFIG_ENC(alg, mode);
480 static int qat_alg_validate_key(int key_len, int *alg, int mode)
482 if (mode != ICP_QAT_HW_CIPHER_XTS_MODE) {
484 case AES_KEYSIZE_128:
485 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
487 case AES_KEYSIZE_192:
488 *alg = ICP_QAT_HW_CIPHER_ALGO_AES192;
490 case AES_KEYSIZE_256:
491 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
498 case AES_KEYSIZE_128 << 1:
499 *alg = ICP_QAT_HW_CIPHER_ALGO_AES128;
501 case AES_KEYSIZE_256 << 1:
502 *alg = ICP_QAT_HW_CIPHER_ALGO_AES256;
511 static int qat_alg_aead_init_sessions(struct crypto_aead *tfm, const u8 *key,
512 unsigned int keylen, int mode)
514 struct crypto_authenc_keys keys;
517 if (crypto_authenc_extractkeys(&keys, key, keylen))
520 if (qat_alg_validate_key(keys.enckeylen, &alg, mode))
523 if (qat_alg_aead_init_enc_session(tfm, alg, &keys, mode))
526 if (qat_alg_aead_init_dec_session(tfm, alg, &keys, mode))
529 memzero_explicit(&keys, sizeof(keys));
532 memzero_explicit(&keys, sizeof(keys));
535 memzero_explicit(&keys, sizeof(keys));
539 static int qat_alg_skcipher_init_sessions(struct qat_alg_skcipher_ctx *ctx,
546 if (qat_alg_validate_key(keylen, &alg, mode))
549 qat_alg_skcipher_init_enc(ctx, alg, key, keylen, mode);
550 qat_alg_skcipher_init_dec(ctx, alg, key, keylen, mode);
554 static int qat_alg_aead_rekey(struct crypto_aead *tfm, const u8 *key,
557 struct qat_alg_aead_ctx *ctx = crypto_aead_ctx(tfm);
559 memset(ctx->enc_cd, 0, sizeof(*ctx->enc_cd));
560 memset(ctx->dec_cd, 0, sizeof(*ctx->dec_cd));
561 memset(&ctx->enc_fw_req, 0, sizeof(ctx->enc_fw_req));
562 memset(&ctx->dec_fw_req, 0, sizeof(ctx->dec_fw_req));
564 return qat_alg_aead_init_sessions(tfm, key, keylen,
565 ICP_QAT_HW_CIPHER_CBC_MODE);
568 static int qat_alg_aead_newkey(struct crypto_aead *tfm, const u8 *key,
571 struct qat_alg_aead_ctx *ctx = crypto_aead_ctx(tfm);
572 struct qat_crypto_instance *inst = NULL;
573 int node = get_current_node();
577 inst = qat_crypto_get_instance_node(node);
580 dev = &GET_DEV(inst->accel_dev);
582 ctx->enc_cd = dma_alloc_coherent(dev, sizeof(*ctx->enc_cd),
589 ctx->dec_cd = dma_alloc_coherent(dev, sizeof(*ctx->dec_cd),
597 ret = qat_alg_aead_init_sessions(tfm, key, keylen,
598 ICP_QAT_HW_CIPHER_CBC_MODE);
605 memset(ctx->dec_cd, 0, sizeof(struct qat_alg_cd));
606 dma_free_coherent(dev, sizeof(struct qat_alg_cd),
607 ctx->dec_cd, ctx->dec_cd_paddr);
610 memset(ctx->enc_cd, 0, sizeof(struct qat_alg_cd));
611 dma_free_coherent(dev, sizeof(struct qat_alg_cd),
612 ctx->enc_cd, ctx->enc_cd_paddr);
616 qat_crypto_put_instance(inst);
620 static int qat_alg_aead_setkey(struct crypto_aead *tfm, const u8 *key,
623 struct qat_alg_aead_ctx *ctx = crypto_aead_ctx(tfm);
626 return qat_alg_aead_rekey(tfm, key, keylen);
628 return qat_alg_aead_newkey(tfm, key, keylen);
631 static void qat_alg_free_bufl(struct qat_crypto_instance *inst,
632 struct qat_crypto_request *qat_req)
634 struct device *dev = &GET_DEV(inst->accel_dev);
635 struct qat_alg_buf_list *bl = qat_req->buf.bl;
636 struct qat_alg_buf_list *blout = qat_req->buf.blout;
637 dma_addr_t blp = qat_req->buf.blp;
638 dma_addr_t blpout = qat_req->buf.bloutp;
639 size_t sz = qat_req->buf.sz;
640 size_t sz_out = qat_req->buf.sz_out;
643 for (i = 0; i < bl->num_bufs; i++)
644 dma_unmap_single(dev, bl->bufers[i].addr,
645 bl->bufers[i].len, DMA_BIDIRECTIONAL);
647 dma_unmap_single(dev, blp, sz, DMA_TO_DEVICE);
650 /* If out of place operation dma unmap only data */
651 int bufless = blout->num_bufs - blout->num_mapped_bufs;
653 for (i = bufless; i < blout->num_bufs; i++) {
654 dma_unmap_single(dev, blout->bufers[i].addr,
655 blout->bufers[i].len,
658 dma_unmap_single(dev, blpout, sz_out, DMA_TO_DEVICE);
663 static int qat_alg_sgl_to_bufl(struct qat_crypto_instance *inst,
664 struct scatterlist *sgl,
665 struct scatterlist *sglout,
666 struct qat_crypto_request *qat_req)
668 struct device *dev = &GET_DEV(inst->accel_dev);
670 int n = sg_nents(sgl);
671 struct qat_alg_buf_list *bufl;
672 struct qat_alg_buf_list *buflout = NULL;
674 dma_addr_t bloutp = 0;
675 struct scatterlist *sg;
676 size_t sz_out, sz = struct_size(bufl, bufers, n + 1);
681 bufl = kzalloc_node(sz, GFP_ATOMIC,
682 dev_to_node(&GET_DEV(inst->accel_dev)));
686 blp = dma_map_single(dev, bufl, sz, DMA_TO_DEVICE);
687 if (unlikely(dma_mapping_error(dev, blp)))
690 for_each_sg(sgl, sg, n, i) {
696 bufl->bufers[y].addr = dma_map_single(dev, sg_virt(sg),
699 bufl->bufers[y].len = sg->length;
700 if (unlikely(dma_mapping_error(dev, bufl->bufers[y].addr)))
704 bufl->num_bufs = sg_nctr;
705 qat_req->buf.bl = bufl;
706 qat_req->buf.blp = blp;
707 qat_req->buf.sz = sz;
708 /* Handle out of place operation */
710 struct qat_alg_buf *bufers;
712 n = sg_nents(sglout);
713 sz_out = struct_size(buflout, bufers, n + 1);
715 buflout = kzalloc_node(sz_out, GFP_ATOMIC,
716 dev_to_node(&GET_DEV(inst->accel_dev)));
717 if (unlikely(!buflout))
719 bloutp = dma_map_single(dev, buflout, sz_out, DMA_TO_DEVICE);
720 if (unlikely(dma_mapping_error(dev, bloutp)))
722 bufers = buflout->bufers;
723 for_each_sg(sglout, sg, n, i) {
729 bufers[y].addr = dma_map_single(dev, sg_virt(sg),
732 if (unlikely(dma_mapping_error(dev, bufers[y].addr)))
734 bufers[y].len = sg->length;
737 buflout->num_bufs = sg_nctr;
738 buflout->num_mapped_bufs = sg_nctr;
739 qat_req->buf.blout = buflout;
740 qat_req->buf.bloutp = bloutp;
741 qat_req->buf.sz_out = sz_out;
743 /* Otherwise set the src and dst to the same address */
744 qat_req->buf.bloutp = qat_req->buf.blp;
745 qat_req->buf.sz_out = 0;
750 n = sg_nents(sglout);
751 for (i = 0; i < n; i++)
752 if (!dma_mapping_error(dev, buflout->bufers[i].addr))
753 dma_unmap_single(dev, buflout->bufers[i].addr,
754 buflout->bufers[i].len,
756 if (!dma_mapping_error(dev, bloutp))
757 dma_unmap_single(dev, bloutp, sz_out, DMA_TO_DEVICE);
762 for (i = 0; i < n; i++)
763 if (!dma_mapping_error(dev, bufl->bufers[i].addr))
764 dma_unmap_single(dev, bufl->bufers[i].addr,
768 if (!dma_mapping_error(dev, blp))
769 dma_unmap_single(dev, blp, sz, DMA_TO_DEVICE);
772 dev_err(dev, "Failed to map buf for dma\n");
776 static void qat_aead_alg_callback(struct icp_qat_fw_la_resp *qat_resp,
777 struct qat_crypto_request *qat_req)
779 struct qat_alg_aead_ctx *ctx = qat_req->aead_ctx;
780 struct qat_crypto_instance *inst = ctx->inst;
781 struct aead_request *areq = qat_req->aead_req;
782 u8 stat_filed = qat_resp->comn_resp.comn_status;
783 int res = 0, qat_res = ICP_QAT_FW_COMN_RESP_CRYPTO_STAT_GET(stat_filed);
785 qat_alg_free_bufl(inst, qat_req);
786 if (unlikely(qat_res != ICP_QAT_FW_COMN_STATUS_FLAG_OK))
788 areq->base.complete(&areq->base, res);
791 static void qat_alg_update_iv_ctr_mode(struct qat_crypto_request *qat_req)
793 struct skcipher_request *sreq = qat_req->skcipher_req;
798 memcpy(qat_req->iv, sreq->iv, AES_BLOCK_SIZE);
800 iv_lo = be64_to_cpu(qat_req->iv_lo);
801 iv_hi = be64_to_cpu(qat_req->iv_hi);
804 iv_lo += DIV_ROUND_UP(sreq->cryptlen, AES_BLOCK_SIZE);
805 if (iv_lo < iv_lo_prev)
808 qat_req->iv_lo = cpu_to_be64(iv_lo);
809 qat_req->iv_hi = cpu_to_be64(iv_hi);
812 static void qat_alg_update_iv_cbc_mode(struct qat_crypto_request *qat_req)
814 struct skcipher_request *sreq = qat_req->skcipher_req;
815 int offset = sreq->cryptlen - AES_BLOCK_SIZE;
816 struct scatterlist *sgl;
818 if (qat_req->encryption)
823 scatterwalk_map_and_copy(qat_req->iv, sgl, offset, AES_BLOCK_SIZE, 0);
826 static void qat_alg_update_iv(struct qat_crypto_request *qat_req)
828 struct qat_alg_skcipher_ctx *ctx = qat_req->skcipher_ctx;
829 struct device *dev = &GET_DEV(ctx->inst->accel_dev);
832 case ICP_QAT_HW_CIPHER_CTR_MODE:
833 qat_alg_update_iv_ctr_mode(qat_req);
835 case ICP_QAT_HW_CIPHER_CBC_MODE:
836 qat_alg_update_iv_cbc_mode(qat_req);
838 case ICP_QAT_HW_CIPHER_XTS_MODE:
841 dev_warn(dev, "Unsupported IV update for cipher mode %d\n",
846 static void qat_skcipher_alg_callback(struct icp_qat_fw_la_resp *qat_resp,
847 struct qat_crypto_request *qat_req)
849 struct qat_alg_skcipher_ctx *ctx = qat_req->skcipher_ctx;
850 struct qat_crypto_instance *inst = ctx->inst;
851 struct skcipher_request *sreq = qat_req->skcipher_req;
852 u8 stat_filed = qat_resp->comn_resp.comn_status;
853 int res = 0, qat_res = ICP_QAT_FW_COMN_RESP_CRYPTO_STAT_GET(stat_filed);
855 qat_alg_free_bufl(inst, qat_req);
856 if (unlikely(qat_res != ICP_QAT_FW_COMN_STATUS_FLAG_OK))
859 if (qat_req->encryption)
860 qat_alg_update_iv(qat_req);
862 memcpy(sreq->iv, qat_req->iv, AES_BLOCK_SIZE);
864 sreq->base.complete(&sreq->base, res);
867 void qat_alg_callback(void *resp)
869 struct icp_qat_fw_la_resp *qat_resp = resp;
870 struct qat_crypto_request *qat_req =
871 (void *)(__force long)qat_resp->opaque_data;
873 qat_req->cb(qat_resp, qat_req);
876 static int qat_alg_aead_dec(struct aead_request *areq)
878 struct crypto_aead *aead_tfm = crypto_aead_reqtfm(areq);
879 struct crypto_tfm *tfm = crypto_aead_tfm(aead_tfm);
880 struct qat_alg_aead_ctx *ctx = crypto_tfm_ctx(tfm);
881 struct qat_crypto_request *qat_req = aead_request_ctx(areq);
882 struct icp_qat_fw_la_cipher_req_params *cipher_param;
883 struct icp_qat_fw_la_auth_req_params *auth_param;
884 struct icp_qat_fw_la_bulk_req *msg;
885 int digst_size = crypto_aead_authsize(aead_tfm);
889 cipher_len = areq->cryptlen - digst_size;
890 if (cipher_len % AES_BLOCK_SIZE != 0)
893 ret = qat_alg_sgl_to_bufl(ctx->inst, areq->src, areq->dst, qat_req);
898 *msg = ctx->dec_fw_req;
899 qat_req->aead_ctx = ctx;
900 qat_req->aead_req = areq;
901 qat_req->cb = qat_aead_alg_callback;
902 qat_req->req.comn_mid.opaque_data = (u64)(__force long)qat_req;
903 qat_req->req.comn_mid.src_data_addr = qat_req->buf.blp;
904 qat_req->req.comn_mid.dest_data_addr = qat_req->buf.bloutp;
905 cipher_param = (void *)&qat_req->req.serv_specif_rqpars;
906 cipher_param->cipher_length = cipher_len;
907 cipher_param->cipher_offset = areq->assoclen;
908 memcpy(cipher_param->u.cipher_IV_array, areq->iv, AES_BLOCK_SIZE);
909 auth_param = (void *)((u8 *)cipher_param + sizeof(*cipher_param));
910 auth_param->auth_off = 0;
911 auth_param->auth_len = areq->assoclen + cipher_param->cipher_length;
913 ret = adf_send_message(ctx->inst->sym_tx, (u32 *)msg);
914 } while (ret == -EAGAIN && ctr++ < 10);
916 if (ret == -EAGAIN) {
917 qat_alg_free_bufl(ctx->inst, qat_req);
923 static int qat_alg_aead_enc(struct aead_request *areq)
925 struct crypto_aead *aead_tfm = crypto_aead_reqtfm(areq);
926 struct crypto_tfm *tfm = crypto_aead_tfm(aead_tfm);
927 struct qat_alg_aead_ctx *ctx = crypto_tfm_ctx(tfm);
928 struct qat_crypto_request *qat_req = aead_request_ctx(areq);
929 struct icp_qat_fw_la_cipher_req_params *cipher_param;
930 struct icp_qat_fw_la_auth_req_params *auth_param;
931 struct icp_qat_fw_la_bulk_req *msg;
935 if (areq->cryptlen % AES_BLOCK_SIZE != 0)
938 ret = qat_alg_sgl_to_bufl(ctx->inst, areq->src, areq->dst, qat_req);
943 *msg = ctx->enc_fw_req;
944 qat_req->aead_ctx = ctx;
945 qat_req->aead_req = areq;
946 qat_req->cb = qat_aead_alg_callback;
947 qat_req->req.comn_mid.opaque_data = (u64)(__force long)qat_req;
948 qat_req->req.comn_mid.src_data_addr = qat_req->buf.blp;
949 qat_req->req.comn_mid.dest_data_addr = qat_req->buf.bloutp;
950 cipher_param = (void *)&qat_req->req.serv_specif_rqpars;
951 auth_param = (void *)((u8 *)cipher_param + sizeof(*cipher_param));
953 memcpy(cipher_param->u.cipher_IV_array, iv, AES_BLOCK_SIZE);
954 cipher_param->cipher_length = areq->cryptlen;
955 cipher_param->cipher_offset = areq->assoclen;
957 auth_param->auth_off = 0;
958 auth_param->auth_len = areq->assoclen + areq->cryptlen;
961 ret = adf_send_message(ctx->inst->sym_tx, (u32 *)msg);
962 } while (ret == -EAGAIN && ctr++ < 10);
964 if (ret == -EAGAIN) {
965 qat_alg_free_bufl(ctx->inst, qat_req);
971 static int qat_alg_skcipher_rekey(struct qat_alg_skcipher_ctx *ctx,
972 const u8 *key, unsigned int keylen,
975 memset(ctx->enc_cd, 0, sizeof(*ctx->enc_cd));
976 memset(ctx->dec_cd, 0, sizeof(*ctx->dec_cd));
977 memset(&ctx->enc_fw_req, 0, sizeof(ctx->enc_fw_req));
978 memset(&ctx->dec_fw_req, 0, sizeof(ctx->dec_fw_req));
980 return qat_alg_skcipher_init_sessions(ctx, key, keylen, mode);
983 static int qat_alg_skcipher_newkey(struct qat_alg_skcipher_ctx *ctx,
984 const u8 *key, unsigned int keylen,
987 struct qat_crypto_instance *inst = NULL;
989 int node = get_current_node();
992 inst = qat_crypto_get_instance_node(node);
995 dev = &GET_DEV(inst->accel_dev);
997 ctx->enc_cd = dma_alloc_coherent(dev, sizeof(*ctx->enc_cd),
1002 goto out_free_instance;
1004 ctx->dec_cd = dma_alloc_coherent(dev, sizeof(*ctx->dec_cd),
1012 ret = qat_alg_skcipher_init_sessions(ctx, key, keylen, mode);
1019 memset(ctx->dec_cd, 0, sizeof(*ctx->dec_cd));
1020 dma_free_coherent(dev, sizeof(*ctx->dec_cd),
1021 ctx->dec_cd, ctx->dec_cd_paddr);
1024 memset(ctx->enc_cd, 0, sizeof(*ctx->enc_cd));
1025 dma_free_coherent(dev, sizeof(*ctx->enc_cd),
1026 ctx->enc_cd, ctx->enc_cd_paddr);
1030 qat_crypto_put_instance(inst);
1034 static int qat_alg_skcipher_setkey(struct crypto_skcipher *tfm,
1035 const u8 *key, unsigned int keylen,
1038 struct qat_alg_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
1043 return qat_alg_skcipher_rekey(ctx, key, keylen, mode);
1045 return qat_alg_skcipher_newkey(ctx, key, keylen, mode);
1048 static int qat_alg_skcipher_cbc_setkey(struct crypto_skcipher *tfm,
1049 const u8 *key, unsigned int keylen)
1051 return qat_alg_skcipher_setkey(tfm, key, keylen,
1052 ICP_QAT_HW_CIPHER_CBC_MODE);
1055 static int qat_alg_skcipher_ctr_setkey(struct crypto_skcipher *tfm,
1056 const u8 *key, unsigned int keylen)
1058 return qat_alg_skcipher_setkey(tfm, key, keylen,
1059 ICP_QAT_HW_CIPHER_CTR_MODE);
1062 static int qat_alg_skcipher_xts_setkey(struct crypto_skcipher *tfm,
1063 const u8 *key, unsigned int keylen)
1065 struct qat_alg_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
1068 ret = xts_verify_key(tfm, key, keylen);
1072 if (keylen >> 1 == AES_KEYSIZE_192) {
1073 ret = crypto_skcipher_setkey(ctx->ftfm, key, keylen);
1077 ctx->fallback = true;
1082 ctx->fallback = false;
1084 return qat_alg_skcipher_setkey(tfm, key, keylen,
1085 ICP_QAT_HW_CIPHER_XTS_MODE);
1088 static int qat_alg_skcipher_encrypt(struct skcipher_request *req)
1090 struct crypto_skcipher *stfm = crypto_skcipher_reqtfm(req);
1091 struct crypto_tfm *tfm = crypto_skcipher_tfm(stfm);
1092 struct qat_alg_skcipher_ctx *ctx = crypto_tfm_ctx(tfm);
1093 struct qat_crypto_request *qat_req = skcipher_request_ctx(req);
1094 struct icp_qat_fw_la_cipher_req_params *cipher_param;
1095 struct icp_qat_fw_la_bulk_req *msg;
1098 if (req->cryptlen == 0)
1101 ret = qat_alg_sgl_to_bufl(ctx->inst, req->src, req->dst, qat_req);
1105 msg = &qat_req->req;
1106 *msg = ctx->enc_fw_req;
1107 qat_req->skcipher_ctx = ctx;
1108 qat_req->skcipher_req = req;
1109 qat_req->cb = qat_skcipher_alg_callback;
1110 qat_req->req.comn_mid.opaque_data = (u64)(__force long)qat_req;
1111 qat_req->req.comn_mid.src_data_addr = qat_req->buf.blp;
1112 qat_req->req.comn_mid.dest_data_addr = qat_req->buf.bloutp;
1113 qat_req->encryption = true;
1114 cipher_param = (void *)&qat_req->req.serv_specif_rqpars;
1115 cipher_param->cipher_length = req->cryptlen;
1116 cipher_param->cipher_offset = 0;
1117 memcpy(cipher_param->u.cipher_IV_array, req->iv, AES_BLOCK_SIZE);
1120 ret = adf_send_message(ctx->inst->sym_tx, (u32 *)msg);
1121 } while (ret == -EAGAIN && ctr++ < 10);
1123 if (ret == -EAGAIN) {
1124 qat_alg_free_bufl(ctx->inst, qat_req);
1127 return -EINPROGRESS;
1130 static int qat_alg_skcipher_blk_encrypt(struct skcipher_request *req)
1132 if (req->cryptlen % AES_BLOCK_SIZE != 0)
1135 return qat_alg_skcipher_encrypt(req);
1138 static int qat_alg_skcipher_xts_encrypt(struct skcipher_request *req)
1140 struct crypto_skcipher *stfm = crypto_skcipher_reqtfm(req);
1141 struct qat_alg_skcipher_ctx *ctx = crypto_skcipher_ctx(stfm);
1142 struct skcipher_request *nreq = skcipher_request_ctx(req);
1144 if (req->cryptlen < XTS_BLOCK_SIZE)
1147 if (ctx->fallback) {
1148 memcpy(nreq, req, sizeof(*req));
1149 skcipher_request_set_tfm(nreq, ctx->ftfm);
1150 return crypto_skcipher_encrypt(nreq);
1153 return qat_alg_skcipher_encrypt(req);
1156 static int qat_alg_skcipher_decrypt(struct skcipher_request *req)
1158 struct crypto_skcipher *stfm = crypto_skcipher_reqtfm(req);
1159 struct crypto_tfm *tfm = crypto_skcipher_tfm(stfm);
1160 struct qat_alg_skcipher_ctx *ctx = crypto_tfm_ctx(tfm);
1161 struct qat_crypto_request *qat_req = skcipher_request_ctx(req);
1162 struct icp_qat_fw_la_cipher_req_params *cipher_param;
1163 struct icp_qat_fw_la_bulk_req *msg;
1166 if (req->cryptlen == 0)
1169 ret = qat_alg_sgl_to_bufl(ctx->inst, req->src, req->dst, qat_req);
1173 msg = &qat_req->req;
1174 *msg = ctx->dec_fw_req;
1175 qat_req->skcipher_ctx = ctx;
1176 qat_req->skcipher_req = req;
1177 qat_req->cb = qat_skcipher_alg_callback;
1178 qat_req->req.comn_mid.opaque_data = (u64)(__force long)qat_req;
1179 qat_req->req.comn_mid.src_data_addr = qat_req->buf.blp;
1180 qat_req->req.comn_mid.dest_data_addr = qat_req->buf.bloutp;
1181 qat_req->encryption = false;
1182 cipher_param = (void *)&qat_req->req.serv_specif_rqpars;
1183 cipher_param->cipher_length = req->cryptlen;
1184 cipher_param->cipher_offset = 0;
1185 memcpy(cipher_param->u.cipher_IV_array, req->iv, AES_BLOCK_SIZE);
1187 qat_alg_update_iv(qat_req);
1190 ret = adf_send_message(ctx->inst->sym_tx, (u32 *)msg);
1191 } while (ret == -EAGAIN && ctr++ < 10);
1193 if (ret == -EAGAIN) {
1194 qat_alg_free_bufl(ctx->inst, qat_req);
1197 return -EINPROGRESS;
1200 static int qat_alg_skcipher_blk_decrypt(struct skcipher_request *req)
1202 if (req->cryptlen % AES_BLOCK_SIZE != 0)
1205 return qat_alg_skcipher_decrypt(req);
1208 static int qat_alg_skcipher_xts_decrypt(struct skcipher_request *req)
1210 struct crypto_skcipher *stfm = crypto_skcipher_reqtfm(req);
1211 struct qat_alg_skcipher_ctx *ctx = crypto_skcipher_ctx(stfm);
1212 struct skcipher_request *nreq = skcipher_request_ctx(req);
1214 if (req->cryptlen < XTS_BLOCK_SIZE)
1217 if (ctx->fallback) {
1218 memcpy(nreq, req, sizeof(*req));
1219 skcipher_request_set_tfm(nreq, ctx->ftfm);
1220 return crypto_skcipher_decrypt(nreq);
1223 return qat_alg_skcipher_decrypt(req);
1226 static int qat_alg_aead_init(struct crypto_aead *tfm,
1227 enum icp_qat_hw_auth_algo hash,
1228 const char *hash_name)
1230 struct qat_alg_aead_ctx *ctx = crypto_aead_ctx(tfm);
1232 ctx->hash_tfm = crypto_alloc_shash(hash_name, 0, 0);
1233 if (IS_ERR(ctx->hash_tfm))
1234 return PTR_ERR(ctx->hash_tfm);
1235 ctx->qat_hash_alg = hash;
1236 crypto_aead_set_reqsize(tfm, sizeof(struct qat_crypto_request));
1240 static int qat_alg_aead_sha1_init(struct crypto_aead *tfm)
1242 return qat_alg_aead_init(tfm, ICP_QAT_HW_AUTH_ALGO_SHA1, "sha1");
1245 static int qat_alg_aead_sha256_init(struct crypto_aead *tfm)
1247 return qat_alg_aead_init(tfm, ICP_QAT_HW_AUTH_ALGO_SHA256, "sha256");
1250 static int qat_alg_aead_sha512_init(struct crypto_aead *tfm)
1252 return qat_alg_aead_init(tfm, ICP_QAT_HW_AUTH_ALGO_SHA512, "sha512");
1255 static void qat_alg_aead_exit(struct crypto_aead *tfm)
1257 struct qat_alg_aead_ctx *ctx = crypto_aead_ctx(tfm);
1258 struct qat_crypto_instance *inst = ctx->inst;
1261 crypto_free_shash(ctx->hash_tfm);
1266 dev = &GET_DEV(inst->accel_dev);
1268 memset(ctx->enc_cd, 0, sizeof(struct qat_alg_cd));
1269 dma_free_coherent(dev, sizeof(struct qat_alg_cd),
1270 ctx->enc_cd, ctx->enc_cd_paddr);
1273 memset(ctx->dec_cd, 0, sizeof(struct qat_alg_cd));
1274 dma_free_coherent(dev, sizeof(struct qat_alg_cd),
1275 ctx->dec_cd, ctx->dec_cd_paddr);
1277 qat_crypto_put_instance(inst);
1280 static int qat_alg_skcipher_init_tfm(struct crypto_skcipher *tfm)
1282 crypto_skcipher_set_reqsize(tfm, sizeof(struct qat_crypto_request));
1286 static int qat_alg_skcipher_init_xts_tfm(struct crypto_skcipher *tfm)
1288 struct qat_alg_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
1291 ctx->ftfm = crypto_alloc_skcipher("xts(aes)", 0,
1292 CRYPTO_ALG_NEED_FALLBACK);
1293 if (IS_ERR(ctx->ftfm))
1294 return PTR_ERR(ctx->ftfm);
1296 reqsize = max(sizeof(struct qat_crypto_request),
1297 sizeof(struct skcipher_request) +
1298 crypto_skcipher_reqsize(ctx->ftfm));
1299 crypto_skcipher_set_reqsize(tfm, reqsize);
1304 static void qat_alg_skcipher_exit_tfm(struct crypto_skcipher *tfm)
1306 struct qat_alg_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
1307 struct qat_crypto_instance *inst = ctx->inst;
1313 dev = &GET_DEV(inst->accel_dev);
1315 memset(ctx->enc_cd, 0,
1316 sizeof(struct icp_qat_hw_cipher_algo_blk));
1317 dma_free_coherent(dev,
1318 sizeof(struct icp_qat_hw_cipher_algo_blk),
1319 ctx->enc_cd, ctx->enc_cd_paddr);
1322 memset(ctx->dec_cd, 0,
1323 sizeof(struct icp_qat_hw_cipher_algo_blk));
1324 dma_free_coherent(dev,
1325 sizeof(struct icp_qat_hw_cipher_algo_blk),
1326 ctx->dec_cd, ctx->dec_cd_paddr);
1328 qat_crypto_put_instance(inst);
1331 static void qat_alg_skcipher_exit_xts_tfm(struct crypto_skcipher *tfm)
1333 struct qat_alg_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
1336 crypto_free_skcipher(ctx->ftfm);
1338 qat_alg_skcipher_exit_tfm(tfm);
1341 static struct aead_alg qat_aeads[] = { {
1343 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1344 .cra_driver_name = "qat_aes_cbc_hmac_sha1",
1345 .cra_priority = 4001,
1346 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
1347 .cra_blocksize = AES_BLOCK_SIZE,
1348 .cra_ctxsize = sizeof(struct qat_alg_aead_ctx),
1349 .cra_module = THIS_MODULE,
1351 .init = qat_alg_aead_sha1_init,
1352 .exit = qat_alg_aead_exit,
1353 .setkey = qat_alg_aead_setkey,
1354 .decrypt = qat_alg_aead_dec,
1355 .encrypt = qat_alg_aead_enc,
1356 .ivsize = AES_BLOCK_SIZE,
1357 .maxauthsize = SHA1_DIGEST_SIZE,
1360 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1361 .cra_driver_name = "qat_aes_cbc_hmac_sha256",
1362 .cra_priority = 4001,
1363 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
1364 .cra_blocksize = AES_BLOCK_SIZE,
1365 .cra_ctxsize = sizeof(struct qat_alg_aead_ctx),
1366 .cra_module = THIS_MODULE,
1368 .init = qat_alg_aead_sha256_init,
1369 .exit = qat_alg_aead_exit,
1370 .setkey = qat_alg_aead_setkey,
1371 .decrypt = qat_alg_aead_dec,
1372 .encrypt = qat_alg_aead_enc,
1373 .ivsize = AES_BLOCK_SIZE,
1374 .maxauthsize = SHA256_DIGEST_SIZE,
1377 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1378 .cra_driver_name = "qat_aes_cbc_hmac_sha512",
1379 .cra_priority = 4001,
1380 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
1381 .cra_blocksize = AES_BLOCK_SIZE,
1382 .cra_ctxsize = sizeof(struct qat_alg_aead_ctx),
1383 .cra_module = THIS_MODULE,
1385 .init = qat_alg_aead_sha512_init,
1386 .exit = qat_alg_aead_exit,
1387 .setkey = qat_alg_aead_setkey,
1388 .decrypt = qat_alg_aead_dec,
1389 .encrypt = qat_alg_aead_enc,
1390 .ivsize = AES_BLOCK_SIZE,
1391 .maxauthsize = SHA512_DIGEST_SIZE,
1394 static struct skcipher_alg qat_skciphers[] = { {
1395 .base.cra_name = "cbc(aes)",
1396 .base.cra_driver_name = "qat_aes_cbc",
1397 .base.cra_priority = 4001,
1398 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
1399 .base.cra_blocksize = AES_BLOCK_SIZE,
1400 .base.cra_ctxsize = sizeof(struct qat_alg_skcipher_ctx),
1401 .base.cra_alignmask = 0,
1402 .base.cra_module = THIS_MODULE,
1404 .init = qat_alg_skcipher_init_tfm,
1405 .exit = qat_alg_skcipher_exit_tfm,
1406 .setkey = qat_alg_skcipher_cbc_setkey,
1407 .decrypt = qat_alg_skcipher_blk_decrypt,
1408 .encrypt = qat_alg_skcipher_blk_encrypt,
1409 .min_keysize = AES_MIN_KEY_SIZE,
1410 .max_keysize = AES_MAX_KEY_SIZE,
1411 .ivsize = AES_BLOCK_SIZE,
1413 .base.cra_name = "ctr(aes)",
1414 .base.cra_driver_name = "qat_aes_ctr",
1415 .base.cra_priority = 4001,
1416 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
1417 .base.cra_blocksize = 1,
1418 .base.cra_ctxsize = sizeof(struct qat_alg_skcipher_ctx),
1419 .base.cra_alignmask = 0,
1420 .base.cra_module = THIS_MODULE,
1422 .init = qat_alg_skcipher_init_tfm,
1423 .exit = qat_alg_skcipher_exit_tfm,
1424 .setkey = qat_alg_skcipher_ctr_setkey,
1425 .decrypt = qat_alg_skcipher_decrypt,
1426 .encrypt = qat_alg_skcipher_encrypt,
1427 .min_keysize = AES_MIN_KEY_SIZE,
1428 .max_keysize = AES_MAX_KEY_SIZE,
1429 .ivsize = AES_BLOCK_SIZE,
1431 .base.cra_name = "xts(aes)",
1432 .base.cra_driver_name = "qat_aes_xts",
1433 .base.cra_priority = 4001,
1434 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK |
1435 CRYPTO_ALG_ALLOCATES_MEMORY,
1436 .base.cra_blocksize = AES_BLOCK_SIZE,
1437 .base.cra_ctxsize = sizeof(struct qat_alg_skcipher_ctx),
1438 .base.cra_alignmask = 0,
1439 .base.cra_module = THIS_MODULE,
1441 .init = qat_alg_skcipher_init_xts_tfm,
1442 .exit = qat_alg_skcipher_exit_xts_tfm,
1443 .setkey = qat_alg_skcipher_xts_setkey,
1444 .decrypt = qat_alg_skcipher_xts_decrypt,
1445 .encrypt = qat_alg_skcipher_xts_encrypt,
1446 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1447 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1448 .ivsize = AES_BLOCK_SIZE,
1451 int qat_algs_register(void)
1455 mutex_lock(&algs_lock);
1456 if (++active_devs != 1)
1459 ret = crypto_register_skciphers(qat_skciphers,
1460 ARRAY_SIZE(qat_skciphers));
1464 ret = crypto_register_aeads(qat_aeads, ARRAY_SIZE(qat_aeads));
1469 mutex_unlock(&algs_lock);
1473 crypto_unregister_skciphers(qat_skciphers, ARRAY_SIZE(qat_skciphers));
1477 void qat_algs_unregister(void)
1479 mutex_lock(&algs_lock);
1480 if (--active_devs != 0)
1483 crypto_unregister_aeads(qat_aeads, ARRAY_SIZE(qat_aeads));
1484 crypto_unregister_skciphers(qat_skciphers, ARRAY_SIZE(qat_skciphers));
1487 mutex_unlock(&algs_lock);