4 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/algapi.h>
14 #include <crypto/ctr.h>
15 #include <crypto/internal/skcipher.h>
16 #include <linux/err.h>
17 #include <linux/init.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/slab.h>
22 struct crypto_rfc3686_ctx {
23 struct crypto_skcipher *child;
24 u8 nonce[CTR_RFC3686_NONCE_SIZE];
27 struct crypto_rfc3686_req_ctx {
28 u8 iv[CTR_RFC3686_BLOCK_SIZE];
29 struct skcipher_request subreq CRYPTO_MINALIGN_ATTR;
32 static void crypto_ctr_crypt_final(struct skcipher_walk *walk,
33 struct crypto_cipher *tfm)
35 unsigned int bsize = crypto_cipher_blocksize(tfm);
36 unsigned long alignmask = crypto_cipher_alignmask(tfm);
37 u8 *ctrblk = walk->iv;
38 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
39 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
40 u8 *src = walk->src.virt.addr;
41 u8 *dst = walk->dst.virt.addr;
42 unsigned int nbytes = walk->nbytes;
44 crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
45 crypto_xor_cpy(dst, keystream, src, nbytes);
47 crypto_inc(ctrblk, bsize);
50 static int crypto_ctr_crypt_segment(struct skcipher_walk *walk,
51 struct crypto_cipher *tfm)
53 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
54 crypto_cipher_alg(tfm)->cia_encrypt;
55 unsigned int bsize = crypto_cipher_blocksize(tfm);
56 u8 *ctrblk = walk->iv;
57 u8 *src = walk->src.virt.addr;
58 u8 *dst = walk->dst.virt.addr;
59 unsigned int nbytes = walk->nbytes;
62 /* create keystream */
63 fn(crypto_cipher_tfm(tfm), dst, ctrblk);
64 crypto_xor(dst, src, bsize);
66 /* increment counter in counterblock */
67 crypto_inc(ctrblk, bsize);
71 } while ((nbytes -= bsize) >= bsize);
76 static int crypto_ctr_crypt_inplace(struct skcipher_walk *walk,
77 struct crypto_cipher *tfm)
79 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
80 crypto_cipher_alg(tfm)->cia_encrypt;
81 unsigned int bsize = crypto_cipher_blocksize(tfm);
82 unsigned long alignmask = crypto_cipher_alignmask(tfm);
83 unsigned int nbytes = walk->nbytes;
84 u8 *ctrblk = walk->iv;
85 u8 *src = walk->src.virt.addr;
86 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
87 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
90 /* create keystream */
91 fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
92 crypto_xor(src, keystream, bsize);
94 /* increment counter in counterblock */
95 crypto_inc(ctrblk, bsize);
98 } while ((nbytes -= bsize) >= bsize);
103 static int crypto_ctr_crypt(struct skcipher_request *req)
105 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
106 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
107 const unsigned int bsize = crypto_cipher_blocksize(cipher);
108 struct skcipher_walk walk;
112 err = skcipher_walk_virt(&walk, req, false);
114 while (walk.nbytes >= bsize) {
115 if (walk.src.virt.addr == walk.dst.virt.addr)
116 nbytes = crypto_ctr_crypt_inplace(&walk, cipher);
118 nbytes = crypto_ctr_crypt_segment(&walk, cipher);
120 err = skcipher_walk_done(&walk, nbytes);
124 crypto_ctr_crypt_final(&walk, cipher);
125 err = skcipher_walk_done(&walk, 0);
131 static int crypto_ctr_create(struct crypto_template *tmpl, struct rtattr **tb)
133 struct skcipher_instance *inst;
134 struct crypto_alg *alg;
137 inst = skcipher_alloc_instance_simple(tmpl, tb, &alg);
139 return PTR_ERR(inst);
141 /* Block size must be >= 4 bytes. */
143 if (alg->cra_blocksize < 4)
146 /* If this is false we'd fail the alignment of crypto_inc. */
147 if (alg->cra_blocksize % 4)
150 /* CTR mode is a stream cipher. */
151 inst->alg.base.cra_blocksize = 1;
154 * To simplify the implementation, configure the skcipher walk to only
155 * give a partial block at the very end, never earlier.
157 inst->alg.chunksize = alg->cra_blocksize;
159 inst->alg.encrypt = crypto_ctr_crypt;
160 inst->alg.decrypt = crypto_ctr_crypt;
162 err = skcipher_register_instance(tmpl, inst);
174 static struct crypto_template crypto_ctr_tmpl = {
176 .create = crypto_ctr_create,
177 .module = THIS_MODULE,
180 static int crypto_rfc3686_setkey(struct crypto_skcipher *parent,
181 const u8 *key, unsigned int keylen)
183 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(parent);
184 struct crypto_skcipher *child = ctx->child;
187 /* the nonce is stored in bytes at end of key */
188 if (keylen < CTR_RFC3686_NONCE_SIZE)
191 memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
192 CTR_RFC3686_NONCE_SIZE);
194 keylen -= CTR_RFC3686_NONCE_SIZE;
196 crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
197 crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
198 CRYPTO_TFM_REQ_MASK);
199 err = crypto_skcipher_setkey(child, key, keylen);
200 crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) &
201 CRYPTO_TFM_RES_MASK);
206 static int crypto_rfc3686_crypt(struct skcipher_request *req)
208 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
209 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
210 struct crypto_skcipher *child = ctx->child;
211 unsigned long align = crypto_skcipher_alignmask(tfm);
212 struct crypto_rfc3686_req_ctx *rctx =
213 (void *)PTR_ALIGN((u8 *)skcipher_request_ctx(req), align + 1);
214 struct skcipher_request *subreq = &rctx->subreq;
217 /* set up counter block */
218 memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
219 memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE);
221 /* initialize counter portion of counter block */
222 *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
225 skcipher_request_set_tfm(subreq, child);
226 skcipher_request_set_callback(subreq, req->base.flags,
227 req->base.complete, req->base.data);
228 skcipher_request_set_crypt(subreq, req->src, req->dst,
231 return crypto_skcipher_encrypt(subreq);
234 static int crypto_rfc3686_init_tfm(struct crypto_skcipher *tfm)
236 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
237 struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
238 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
239 struct crypto_skcipher *cipher;
241 unsigned int reqsize;
243 cipher = crypto_spawn_skcipher(spawn);
245 return PTR_ERR(cipher);
249 align = crypto_skcipher_alignmask(tfm);
250 align &= ~(crypto_tfm_ctx_alignment() - 1);
251 reqsize = align + sizeof(struct crypto_rfc3686_req_ctx) +
252 crypto_skcipher_reqsize(cipher);
253 crypto_skcipher_set_reqsize(tfm, reqsize);
258 static void crypto_rfc3686_exit_tfm(struct crypto_skcipher *tfm)
260 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
262 crypto_free_skcipher(ctx->child);
265 static void crypto_rfc3686_free(struct skcipher_instance *inst)
267 struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
269 crypto_drop_skcipher(spawn);
273 static int crypto_rfc3686_create(struct crypto_template *tmpl,
276 struct crypto_attr_type *algt;
277 struct skcipher_instance *inst;
278 struct skcipher_alg *alg;
279 struct crypto_skcipher_spawn *spawn;
280 const char *cipher_name;
285 algt = crypto_get_attr_type(tb);
287 return PTR_ERR(algt);
289 if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
292 cipher_name = crypto_attr_alg_name(tb[1]);
293 if (IS_ERR(cipher_name))
294 return PTR_ERR(cipher_name);
296 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
300 mask = crypto_requires_sync(algt->type, algt->mask) |
301 crypto_requires_off(algt->type, algt->mask,
302 CRYPTO_ALG_NEED_FALLBACK);
304 spawn = skcipher_instance_ctx(inst);
306 crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst));
307 err = crypto_grab_skcipher(spawn, cipher_name, 0, mask);
311 alg = crypto_spawn_skcipher_alg(spawn);
313 /* We only support 16-byte blocks. */
315 if (crypto_skcipher_alg_ivsize(alg) != CTR_RFC3686_BLOCK_SIZE)
318 /* Not a stream cipher? */
319 if (alg->base.cra_blocksize != 1)
323 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
324 "rfc3686(%s)", alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
326 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
327 "rfc3686(%s)", alg->base.cra_driver_name) >=
331 inst->alg.base.cra_priority = alg->base.cra_priority;
332 inst->alg.base.cra_blocksize = 1;
333 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
335 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
337 inst->alg.ivsize = CTR_RFC3686_IV_SIZE;
338 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
339 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) +
340 CTR_RFC3686_NONCE_SIZE;
341 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) +
342 CTR_RFC3686_NONCE_SIZE;
344 inst->alg.setkey = crypto_rfc3686_setkey;
345 inst->alg.encrypt = crypto_rfc3686_crypt;
346 inst->alg.decrypt = crypto_rfc3686_crypt;
348 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
350 inst->alg.init = crypto_rfc3686_init_tfm;
351 inst->alg.exit = crypto_rfc3686_exit_tfm;
353 inst->free = crypto_rfc3686_free;
355 err = skcipher_register_instance(tmpl, inst);
363 crypto_drop_skcipher(spawn);
369 static struct crypto_template crypto_rfc3686_tmpl = {
371 .create = crypto_rfc3686_create,
372 .module = THIS_MODULE,
375 static int __init crypto_ctr_module_init(void)
379 err = crypto_register_template(&crypto_ctr_tmpl);
383 err = crypto_register_template(&crypto_rfc3686_tmpl);
391 crypto_unregister_template(&crypto_ctr_tmpl);
395 static void __exit crypto_ctr_module_exit(void)
397 crypto_unregister_template(&crypto_rfc3686_tmpl);
398 crypto_unregister_template(&crypto_ctr_tmpl);
401 module_init(crypto_ctr_module_init);
402 module_exit(crypto_ctr_module_exit);
404 MODULE_LICENSE("GPL");
405 MODULE_DESCRIPTION("CTR block cipher mode of operation");
406 MODULE_ALIAS_CRYPTO("rfc3686");
407 MODULE_ALIAS_CRYPTO("ctr");