1 // SPDX-License-Identifier: GPL-2.0
3 * Crypto user configuration API.
5 * Copyright (C) 2017-2018 Corentin Labbe <clabbe@baylibre.com>
9 #include <linux/crypto.h>
10 #include <linux/cryptouser.h>
11 #include <linux/sched.h>
12 #include <net/netlink.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/internal/rng.h>
15 #include <crypto/akcipher.h>
16 #include <crypto/kpp.h>
17 #include <crypto/internal/cryptouser.h>
21 #define null_terminated(x) (strnlen(x, sizeof(x)) < sizeof(x))
23 static DEFINE_MUTEX(crypto_cfg_mutex
);
25 extern struct sock
*crypto_nlsk
;
27 struct crypto_dump_info
{
28 struct sk_buff
*in_skb
;
29 struct sk_buff
*out_skb
;
34 static int crypto_report_aead(struct sk_buff
*skb
, struct crypto_alg
*alg
)
36 struct crypto_stat_aead raead
;
38 memset(&raead
, 0, sizeof(raead
));
40 strscpy(raead
.type
, "aead", sizeof(raead
.type
));
42 raead
.stat_encrypt_cnt
= atomic64_read(&alg
->stats
.aead
.encrypt_cnt
);
43 raead
.stat_encrypt_tlen
= atomic64_read(&alg
->stats
.aead
.encrypt_tlen
);
44 raead
.stat_decrypt_cnt
= atomic64_read(&alg
->stats
.aead
.decrypt_cnt
);
45 raead
.stat_decrypt_tlen
= atomic64_read(&alg
->stats
.aead
.decrypt_tlen
);
46 raead
.stat_err_cnt
= atomic64_read(&alg
->stats
.aead
.err_cnt
);
48 return nla_put(skb
, CRYPTOCFGA_STAT_AEAD
, sizeof(raead
), &raead
);
51 static int crypto_report_cipher(struct sk_buff
*skb
, struct crypto_alg
*alg
)
53 struct crypto_stat_cipher rcipher
;
55 memset(&rcipher
, 0, sizeof(rcipher
));
57 strscpy(rcipher
.type
, "cipher", sizeof(rcipher
.type
));
59 rcipher
.stat_encrypt_cnt
= atomic64_read(&alg
->stats
.cipher
.encrypt_cnt
);
60 rcipher
.stat_encrypt_tlen
= atomic64_read(&alg
->stats
.cipher
.encrypt_tlen
);
61 rcipher
.stat_decrypt_cnt
= atomic64_read(&alg
->stats
.cipher
.decrypt_cnt
);
62 rcipher
.stat_decrypt_tlen
= atomic64_read(&alg
->stats
.cipher
.decrypt_tlen
);
63 rcipher
.stat_err_cnt
= atomic64_read(&alg
->stats
.cipher
.err_cnt
);
65 return nla_put(skb
, CRYPTOCFGA_STAT_CIPHER
, sizeof(rcipher
), &rcipher
);
68 static int crypto_report_comp(struct sk_buff
*skb
, struct crypto_alg
*alg
)
70 struct crypto_stat_compress rcomp
;
72 memset(&rcomp
, 0, sizeof(rcomp
));
74 strscpy(rcomp
.type
, "compression", sizeof(rcomp
.type
));
75 rcomp
.stat_compress_cnt
= atomic64_read(&alg
->stats
.compress
.compress_cnt
);
76 rcomp
.stat_compress_tlen
= atomic64_read(&alg
->stats
.compress
.compress_tlen
);
77 rcomp
.stat_decompress_cnt
= atomic64_read(&alg
->stats
.compress
.decompress_cnt
);
78 rcomp
.stat_decompress_tlen
= atomic64_read(&alg
->stats
.compress
.decompress_tlen
);
79 rcomp
.stat_err_cnt
= atomic64_read(&alg
->stats
.compress
.err_cnt
);
81 return nla_put(skb
, CRYPTOCFGA_STAT_COMPRESS
, sizeof(rcomp
), &rcomp
);
84 static int crypto_report_acomp(struct sk_buff
*skb
, struct crypto_alg
*alg
)
86 struct crypto_stat_compress racomp
;
88 memset(&racomp
, 0, sizeof(racomp
));
90 strscpy(racomp
.type
, "acomp", sizeof(racomp
.type
));
91 racomp
.stat_compress_cnt
= atomic64_read(&alg
->stats
.compress
.compress_cnt
);
92 racomp
.stat_compress_tlen
= atomic64_read(&alg
->stats
.compress
.compress_tlen
);
93 racomp
.stat_decompress_cnt
= atomic64_read(&alg
->stats
.compress
.decompress_cnt
);
94 racomp
.stat_decompress_tlen
= atomic64_read(&alg
->stats
.compress
.decompress_tlen
);
95 racomp
.stat_err_cnt
= atomic64_read(&alg
->stats
.compress
.err_cnt
);
97 return nla_put(skb
, CRYPTOCFGA_STAT_ACOMP
, sizeof(racomp
), &racomp
);
100 static int crypto_report_akcipher(struct sk_buff
*skb
, struct crypto_alg
*alg
)
102 struct crypto_stat_akcipher rakcipher
;
104 memset(&rakcipher
, 0, sizeof(rakcipher
));
106 strscpy(rakcipher
.type
, "akcipher", sizeof(rakcipher
.type
));
107 rakcipher
.stat_encrypt_cnt
= atomic64_read(&alg
->stats
.akcipher
.encrypt_cnt
);
108 rakcipher
.stat_encrypt_tlen
= atomic64_read(&alg
->stats
.akcipher
.encrypt_tlen
);
109 rakcipher
.stat_decrypt_cnt
= atomic64_read(&alg
->stats
.akcipher
.decrypt_cnt
);
110 rakcipher
.stat_decrypt_tlen
= atomic64_read(&alg
->stats
.akcipher
.decrypt_tlen
);
111 rakcipher
.stat_sign_cnt
= atomic64_read(&alg
->stats
.akcipher
.sign_cnt
);
112 rakcipher
.stat_verify_cnt
= atomic64_read(&alg
->stats
.akcipher
.verify_cnt
);
113 rakcipher
.stat_err_cnt
= atomic64_read(&alg
->stats
.akcipher
.err_cnt
);
115 return nla_put(skb
, CRYPTOCFGA_STAT_AKCIPHER
,
116 sizeof(rakcipher
), &rakcipher
);
119 static int crypto_report_kpp(struct sk_buff
*skb
, struct crypto_alg
*alg
)
121 struct crypto_stat_kpp rkpp
;
123 memset(&rkpp
, 0, sizeof(rkpp
));
125 strscpy(rkpp
.type
, "kpp", sizeof(rkpp
.type
));
127 rkpp
.stat_setsecret_cnt
= atomic64_read(&alg
->stats
.kpp
.setsecret_cnt
);
128 rkpp
.stat_generate_public_key_cnt
= atomic64_read(&alg
->stats
.kpp
.generate_public_key_cnt
);
129 rkpp
.stat_compute_shared_secret_cnt
= atomic64_read(&alg
->stats
.kpp
.compute_shared_secret_cnt
);
130 rkpp
.stat_err_cnt
= atomic64_read(&alg
->stats
.kpp
.err_cnt
);
132 return nla_put(skb
, CRYPTOCFGA_STAT_KPP
, sizeof(rkpp
), &rkpp
);
135 static int crypto_report_ahash(struct sk_buff
*skb
, struct crypto_alg
*alg
)
137 struct crypto_stat_hash rhash
;
139 memset(&rhash
, 0, sizeof(rhash
));
141 strscpy(rhash
.type
, "ahash", sizeof(rhash
.type
));
143 rhash
.stat_hash_cnt
= atomic64_read(&alg
->stats
.hash
.hash_cnt
);
144 rhash
.stat_hash_tlen
= atomic64_read(&alg
->stats
.hash
.hash_tlen
);
145 rhash
.stat_err_cnt
= atomic64_read(&alg
->stats
.hash
.err_cnt
);
147 return nla_put(skb
, CRYPTOCFGA_STAT_HASH
, sizeof(rhash
), &rhash
);
150 static int crypto_report_shash(struct sk_buff
*skb
, struct crypto_alg
*alg
)
152 struct crypto_stat_hash rhash
;
154 memset(&rhash
, 0, sizeof(rhash
));
156 strscpy(rhash
.type
, "shash", sizeof(rhash
.type
));
158 rhash
.stat_hash_cnt
= atomic64_read(&alg
->stats
.hash
.hash_cnt
);
159 rhash
.stat_hash_tlen
= atomic64_read(&alg
->stats
.hash
.hash_tlen
);
160 rhash
.stat_err_cnt
= atomic64_read(&alg
->stats
.hash
.err_cnt
);
162 return nla_put(skb
, CRYPTOCFGA_STAT_HASH
, sizeof(rhash
), &rhash
);
165 static int crypto_report_rng(struct sk_buff
*skb
, struct crypto_alg
*alg
)
167 struct crypto_stat_rng rrng
;
169 memset(&rrng
, 0, sizeof(rrng
));
171 strscpy(rrng
.type
, "rng", sizeof(rrng
.type
));
173 rrng
.stat_generate_cnt
= atomic64_read(&alg
->stats
.rng
.generate_cnt
);
174 rrng
.stat_generate_tlen
= atomic64_read(&alg
->stats
.rng
.generate_tlen
);
175 rrng
.stat_seed_cnt
= atomic64_read(&alg
->stats
.rng
.seed_cnt
);
176 rrng
.stat_err_cnt
= atomic64_read(&alg
->stats
.rng
.err_cnt
);
178 return nla_put(skb
, CRYPTOCFGA_STAT_RNG
, sizeof(rrng
), &rrng
);
181 static int crypto_reportstat_one(struct crypto_alg
*alg
,
182 struct crypto_user_alg
*ualg
,
185 memset(ualg
, 0, sizeof(*ualg
));
187 strscpy(ualg
->cru_name
, alg
->cra_name
, sizeof(ualg
->cru_name
));
188 strscpy(ualg
->cru_driver_name
, alg
->cra_driver_name
,
189 sizeof(ualg
->cru_driver_name
));
190 strscpy(ualg
->cru_module_name
, module_name(alg
->cra_module
),
191 sizeof(ualg
->cru_module_name
));
195 ualg
->cru_flags
= alg
->cra_flags
;
196 ualg
->cru_refcnt
= refcount_read(&alg
->cra_refcnt
);
198 if (nla_put_u32(skb
, CRYPTOCFGA_PRIORITY_VAL
, alg
->cra_priority
))
199 goto nla_put_failure
;
200 if (alg
->cra_flags
& CRYPTO_ALG_LARVAL
) {
201 struct crypto_stat_larval rl
;
203 memset(&rl
, 0, sizeof(rl
));
204 strscpy(rl
.type
, "larval", sizeof(rl
.type
));
205 if (nla_put(skb
, CRYPTOCFGA_STAT_LARVAL
, sizeof(rl
), &rl
))
206 goto nla_put_failure
;
210 switch (alg
->cra_flags
& (CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_LARVAL
)) {
211 case CRYPTO_ALG_TYPE_AEAD
:
212 if (crypto_report_aead(skb
, alg
))
213 goto nla_put_failure
;
215 case CRYPTO_ALG_TYPE_SKCIPHER
:
216 if (crypto_report_cipher(skb
, alg
))
217 goto nla_put_failure
;
219 case CRYPTO_ALG_TYPE_BLKCIPHER
:
220 if (crypto_report_cipher(skb
, alg
))
221 goto nla_put_failure
;
223 case CRYPTO_ALG_TYPE_CIPHER
:
224 if (crypto_report_cipher(skb
, alg
))
225 goto nla_put_failure
;
227 case CRYPTO_ALG_TYPE_COMPRESS
:
228 if (crypto_report_comp(skb
, alg
))
229 goto nla_put_failure
;
231 case CRYPTO_ALG_TYPE_ACOMPRESS
:
232 if (crypto_report_acomp(skb
, alg
))
233 goto nla_put_failure
;
235 case CRYPTO_ALG_TYPE_SCOMPRESS
:
236 if (crypto_report_acomp(skb
, alg
))
237 goto nla_put_failure
;
239 case CRYPTO_ALG_TYPE_AKCIPHER
:
240 if (crypto_report_akcipher(skb
, alg
))
241 goto nla_put_failure
;
243 case CRYPTO_ALG_TYPE_KPP
:
244 if (crypto_report_kpp(skb
, alg
))
245 goto nla_put_failure
;
247 case CRYPTO_ALG_TYPE_AHASH
:
248 if (crypto_report_ahash(skb
, alg
))
249 goto nla_put_failure
;
251 case CRYPTO_ALG_TYPE_HASH
:
252 if (crypto_report_shash(skb
, alg
))
253 goto nla_put_failure
;
255 case CRYPTO_ALG_TYPE_RNG
:
256 if (crypto_report_rng(skb
, alg
))
257 goto nla_put_failure
;
260 pr_err("ERROR: Unhandled alg %d in %s\n",
261 alg
->cra_flags
& (CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_LARVAL
),
272 static int crypto_reportstat_alg(struct crypto_alg
*alg
,
273 struct crypto_dump_info
*info
)
275 struct sk_buff
*in_skb
= info
->in_skb
;
276 struct sk_buff
*skb
= info
->out_skb
;
277 struct nlmsghdr
*nlh
;
278 struct crypto_user_alg
*ualg
;
281 nlh
= nlmsg_put(skb
, NETLINK_CB(in_skb
).portid
, info
->nlmsg_seq
,
282 CRYPTO_MSG_GETSTAT
, sizeof(*ualg
), info
->nlmsg_flags
);
288 ualg
= nlmsg_data(nlh
);
290 err
= crypto_reportstat_one(alg
, ualg
, skb
);
292 nlmsg_cancel(skb
, nlh
);
302 int crypto_reportstat(struct sk_buff
*in_skb
, struct nlmsghdr
*in_nlh
,
303 struct nlattr
**attrs
)
305 struct crypto_user_alg
*p
= nlmsg_data(in_nlh
);
306 struct crypto_alg
*alg
;
308 struct crypto_dump_info info
;
311 if (!null_terminated(p
->cru_name
) || !null_terminated(p
->cru_driver_name
))
314 alg
= crypto_alg_match(p
, 0);
319 skb
= nlmsg_new(NLMSG_DEFAULT_SIZE
, GFP_ATOMIC
);
323 info
.in_skb
= in_skb
;
325 info
.nlmsg_seq
= in_nlh
->nlmsg_seq
;
326 info
.nlmsg_flags
= 0;
328 err
= crypto_reportstat_alg(alg
, &info
);
336 return nlmsg_unicast(crypto_nlsk
, skb
, NETLINK_CB(in_skb
).portid
);
339 MODULE_LICENSE("GPL");