1 // SPDX-License-Identifier: GPL-2.0
3 * Crypto user configuration API.
5 * Copyright (C) 2017-2018 Corentin Labbe <clabbe@baylibre.com>
9 #include <linux/crypto.h>
10 #include <linux/cryptouser.h>
11 #include <linux/sched.h>
12 #include <net/netlink.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/internal/rng.h>
15 #include <crypto/akcipher.h>
16 #include <crypto/kpp.h>
17 #include <crypto/internal/cryptouser.h>
21 #define null_terminated(x) (strnlen(x, sizeof(x)) < sizeof(x))
23 static DEFINE_MUTEX(crypto_cfg_mutex);
25 extern struct sock *crypto_nlsk;
27 struct crypto_dump_info {
28 struct sk_buff *in_skb;
29 struct sk_buff *out_skb;
34 static int crypto_report_aead(struct sk_buff *skb, struct crypto_alg *alg)
36 struct crypto_stat_aead raead;
39 memset(&raead, 0, sizeof(raead));
41 strscpy(raead.type, "aead", sizeof(raead.type));
43 v64 = atomic64_read(&alg->encrypt_cnt);
44 raead.stat_encrypt_cnt = v64;
45 v64 = atomic64_read(&alg->encrypt_tlen);
46 raead.stat_encrypt_tlen = v64;
47 v64 = atomic64_read(&alg->decrypt_cnt);
48 raead.stat_decrypt_cnt = v64;
49 v64 = atomic64_read(&alg->decrypt_tlen);
50 raead.stat_decrypt_tlen = v64;
51 v64 = atomic64_read(&alg->aead_err_cnt);
52 raead.stat_aead_err_cnt = v64;
54 return nla_put(skb, CRYPTOCFGA_STAT_AEAD, sizeof(raead), &raead);
57 static int crypto_report_cipher(struct sk_buff *skb, struct crypto_alg *alg)
59 struct crypto_stat_cipher rcipher;
62 memset(&rcipher, 0, sizeof(rcipher));
64 strscpy(rcipher.type, "cipher", sizeof(rcipher.type));
66 v64 = atomic64_read(&alg->encrypt_cnt);
67 rcipher.stat_encrypt_cnt = v64;
68 v64 = atomic64_read(&alg->encrypt_tlen);
69 rcipher.stat_encrypt_tlen = v64;
70 v64 = atomic64_read(&alg->decrypt_cnt);
71 rcipher.stat_decrypt_cnt = v64;
72 v64 = atomic64_read(&alg->decrypt_tlen);
73 rcipher.stat_decrypt_tlen = v64;
74 v64 = atomic64_read(&alg->cipher_err_cnt);
75 rcipher.stat_cipher_err_cnt = v64;
77 return nla_put(skb, CRYPTOCFGA_STAT_CIPHER, sizeof(rcipher), &rcipher);
80 static int crypto_report_comp(struct sk_buff *skb, struct crypto_alg *alg)
82 struct crypto_stat_compress rcomp;
85 memset(&rcomp, 0, sizeof(rcomp));
87 strscpy(rcomp.type, "compression", sizeof(rcomp.type));
88 v64 = atomic64_read(&alg->compress_cnt);
89 rcomp.stat_compress_cnt = v64;
90 v64 = atomic64_read(&alg->compress_tlen);
91 rcomp.stat_compress_tlen = v64;
92 v64 = atomic64_read(&alg->decompress_cnt);
93 rcomp.stat_decompress_cnt = v64;
94 v64 = atomic64_read(&alg->decompress_tlen);
95 rcomp.stat_decompress_tlen = v64;
96 v64 = atomic64_read(&alg->compress_err_cnt);
97 rcomp.stat_compress_err_cnt = v64;
99 return nla_put(skb, CRYPTOCFGA_STAT_COMPRESS, sizeof(rcomp), &rcomp);
102 static int crypto_report_acomp(struct sk_buff *skb, struct crypto_alg *alg)
104 struct crypto_stat_compress racomp;
107 memset(&racomp, 0, sizeof(racomp));
109 strscpy(racomp.type, "acomp", sizeof(racomp.type));
110 v64 = atomic64_read(&alg->compress_cnt);
111 racomp.stat_compress_cnt = v64;
112 v64 = atomic64_read(&alg->compress_tlen);
113 racomp.stat_compress_tlen = v64;
114 v64 = atomic64_read(&alg->decompress_cnt);
115 racomp.stat_decompress_cnt = v64;
116 v64 = atomic64_read(&alg->decompress_tlen);
117 racomp.stat_decompress_tlen = v64;
118 v64 = atomic64_read(&alg->compress_err_cnt);
119 racomp.stat_compress_err_cnt = v64;
121 return nla_put(skb, CRYPTOCFGA_STAT_ACOMP, sizeof(racomp), &racomp);
124 static int crypto_report_akcipher(struct sk_buff *skb, struct crypto_alg *alg)
126 struct crypto_stat_akcipher rakcipher;
129 memset(&rakcipher, 0, sizeof(rakcipher));
131 strscpy(rakcipher.type, "akcipher", sizeof(rakcipher.type));
132 v64 = atomic64_read(&alg->encrypt_cnt);
133 rakcipher.stat_encrypt_cnt = v64;
134 v64 = atomic64_read(&alg->encrypt_tlen);
135 rakcipher.stat_encrypt_tlen = v64;
136 v64 = atomic64_read(&alg->decrypt_cnt);
137 rakcipher.stat_decrypt_cnt = v64;
138 v64 = atomic64_read(&alg->decrypt_tlen);
139 rakcipher.stat_decrypt_tlen = v64;
140 v64 = atomic64_read(&alg->sign_cnt);
141 rakcipher.stat_sign_cnt = v64;
142 v64 = atomic64_read(&alg->verify_cnt);
143 rakcipher.stat_verify_cnt = v64;
144 v64 = atomic64_read(&alg->akcipher_err_cnt);
145 rakcipher.stat_akcipher_err_cnt = v64;
147 return nla_put(skb, CRYPTOCFGA_STAT_AKCIPHER,
148 sizeof(rakcipher), &rakcipher);
151 static int crypto_report_kpp(struct sk_buff *skb, struct crypto_alg *alg)
153 struct crypto_stat_kpp rkpp;
156 memset(&rkpp, 0, sizeof(rkpp));
158 strscpy(rkpp.type, "kpp", sizeof(rkpp.type));
160 v = atomic64_read(&alg->setsecret_cnt);
161 rkpp.stat_setsecret_cnt = v;
162 v = atomic64_read(&alg->generate_public_key_cnt);
163 rkpp.stat_generate_public_key_cnt = v;
164 v = atomic64_read(&alg->compute_shared_secret_cnt);
165 rkpp.stat_compute_shared_secret_cnt = v;
166 v = atomic64_read(&alg->kpp_err_cnt);
167 rkpp.stat_kpp_err_cnt = v;
169 return nla_put(skb, CRYPTOCFGA_STAT_KPP, sizeof(rkpp), &rkpp);
172 static int crypto_report_ahash(struct sk_buff *skb, struct crypto_alg *alg)
174 struct crypto_stat_hash rhash;
177 memset(&rhash, 0, sizeof(rhash));
179 strscpy(rhash.type, "ahash", sizeof(rhash.type));
181 v64 = atomic64_read(&alg->hash_cnt);
182 rhash.stat_hash_cnt = v64;
183 v64 = atomic64_read(&alg->hash_tlen);
184 rhash.stat_hash_tlen = v64;
185 v64 = atomic64_read(&alg->hash_err_cnt);
186 rhash.stat_hash_err_cnt = v64;
188 return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash);
191 static int crypto_report_shash(struct sk_buff *skb, struct crypto_alg *alg)
193 struct crypto_stat_hash rhash;
196 memset(&rhash, 0, sizeof(rhash));
198 strscpy(rhash.type, "shash", sizeof(rhash.type));
200 v64 = atomic64_read(&alg->hash_cnt);
201 rhash.stat_hash_cnt = v64;
202 v64 = atomic64_read(&alg->hash_tlen);
203 rhash.stat_hash_tlen = v64;
204 v64 = atomic64_read(&alg->hash_err_cnt);
205 rhash.stat_hash_err_cnt = v64;
207 return nla_put(skb, CRYPTOCFGA_STAT_HASH, sizeof(rhash), &rhash);
210 static int crypto_report_rng(struct sk_buff *skb, struct crypto_alg *alg)
212 struct crypto_stat_rng rrng;
215 memset(&rrng, 0, sizeof(rrng));
217 strscpy(rrng.type, "rng", sizeof(rrng.type));
219 v64 = atomic64_read(&alg->generate_cnt);
220 rrng.stat_generate_cnt = v64;
221 v64 = atomic64_read(&alg->generate_tlen);
222 rrng.stat_generate_tlen = v64;
223 v64 = atomic64_read(&alg->seed_cnt);
224 rrng.stat_seed_cnt = v64;
225 v64 = atomic64_read(&alg->rng_err_cnt);
226 rrng.stat_rng_err_cnt = v64;
228 return nla_put(skb, CRYPTOCFGA_STAT_RNG, sizeof(rrng), &rrng);
231 static int crypto_reportstat_one(struct crypto_alg *alg,
232 struct crypto_user_alg *ualg,
235 memset(ualg, 0, sizeof(*ualg));
237 strscpy(ualg->cru_name, alg->cra_name, sizeof(ualg->cru_name));
238 strscpy(ualg->cru_driver_name, alg->cra_driver_name,
239 sizeof(ualg->cru_driver_name));
240 strscpy(ualg->cru_module_name, module_name(alg->cra_module),
241 sizeof(ualg->cru_module_name));
245 ualg->cru_flags = alg->cra_flags;
246 ualg->cru_refcnt = refcount_read(&alg->cra_refcnt);
248 if (nla_put_u32(skb, CRYPTOCFGA_PRIORITY_VAL, alg->cra_priority))
249 goto nla_put_failure;
250 if (alg->cra_flags & CRYPTO_ALG_LARVAL) {
251 struct crypto_stat_larval rl;
253 memset(&rl, 0, sizeof(rl));
254 strscpy(rl.type, "larval", sizeof(rl.type));
255 if (nla_put(skb, CRYPTOCFGA_STAT_LARVAL, sizeof(rl), &rl))
256 goto nla_put_failure;
260 switch (alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL)) {
261 case CRYPTO_ALG_TYPE_AEAD:
262 if (crypto_report_aead(skb, alg))
263 goto nla_put_failure;
265 case CRYPTO_ALG_TYPE_SKCIPHER:
266 if (crypto_report_cipher(skb, alg))
267 goto nla_put_failure;
269 case CRYPTO_ALG_TYPE_BLKCIPHER:
270 if (crypto_report_cipher(skb, alg))
271 goto nla_put_failure;
273 case CRYPTO_ALG_TYPE_CIPHER:
274 if (crypto_report_cipher(skb, alg))
275 goto nla_put_failure;
277 case CRYPTO_ALG_TYPE_COMPRESS:
278 if (crypto_report_comp(skb, alg))
279 goto nla_put_failure;
281 case CRYPTO_ALG_TYPE_ACOMPRESS:
282 if (crypto_report_acomp(skb, alg))
283 goto nla_put_failure;
285 case CRYPTO_ALG_TYPE_SCOMPRESS:
286 if (crypto_report_acomp(skb, alg))
287 goto nla_put_failure;
289 case CRYPTO_ALG_TYPE_AKCIPHER:
290 if (crypto_report_akcipher(skb, alg))
291 goto nla_put_failure;
293 case CRYPTO_ALG_TYPE_KPP:
294 if (crypto_report_kpp(skb, alg))
295 goto nla_put_failure;
297 case CRYPTO_ALG_TYPE_AHASH:
298 if (crypto_report_ahash(skb, alg))
299 goto nla_put_failure;
301 case CRYPTO_ALG_TYPE_HASH:
302 if (crypto_report_shash(skb, alg))
303 goto nla_put_failure;
305 case CRYPTO_ALG_TYPE_RNG:
306 if (crypto_report_rng(skb, alg))
307 goto nla_put_failure;
310 pr_err("ERROR: Unhandled alg %d in %s\n",
311 alg->cra_flags & (CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_LARVAL),
322 static int crypto_reportstat_alg(struct crypto_alg *alg,
323 struct crypto_dump_info *info)
325 struct sk_buff *in_skb = info->in_skb;
326 struct sk_buff *skb = info->out_skb;
327 struct nlmsghdr *nlh;
328 struct crypto_user_alg *ualg;
331 nlh = nlmsg_put(skb, NETLINK_CB(in_skb).portid, info->nlmsg_seq,
332 CRYPTO_MSG_GETSTAT, sizeof(*ualg), info->nlmsg_flags);
338 ualg = nlmsg_data(nlh);
340 err = crypto_reportstat_one(alg, ualg, skb);
342 nlmsg_cancel(skb, nlh);
352 int crypto_reportstat(struct sk_buff *in_skb, struct nlmsghdr *in_nlh,
353 struct nlattr **attrs)
355 struct crypto_user_alg *p = nlmsg_data(in_nlh);
356 struct crypto_alg *alg;
358 struct crypto_dump_info info;
361 if (!null_terminated(p->cru_name) || !null_terminated(p->cru_driver_name))
364 alg = crypto_alg_match(p, 0);
369 skb = nlmsg_new(NLMSG_DEFAULT_SIZE, GFP_ATOMIC);
373 info.in_skb = in_skb;
375 info.nlmsg_seq = in_nlh->nlmsg_seq;
376 info.nlmsg_flags = 0;
378 err = crypto_reportstat_alg(alg, &info);
386 return nlmsg_unicast(crypto_nlsk, skb, NETLINK_CB(in_skb).portid);
389 int crypto_dump_reportstat(struct sk_buff *skb, struct netlink_callback *cb)
391 struct crypto_alg *alg;
392 struct crypto_dump_info info;
400 info.in_skb = cb->skb;
402 info.nlmsg_seq = cb->nlh->nlmsg_seq;
403 info.nlmsg_flags = NLM_F_MULTI;
405 list_for_each_entry(alg, &crypto_alg_list, cra_list) {
406 err = crypto_reportstat_alg(alg, &info);
417 int crypto_dump_reportstat_done(struct netlink_callback *cb)
422 MODULE_LICENSE("GPL");