1 // SPDX-License-Identifier: GPL-2.0
3 * Crypto user configuration API.
5 * Copyright (C) 2017-2018 Corentin Labbe <clabbe@baylibre.com>
9 #include <linux/crypto.h>
10 #include <linux/cryptouser.h>
11 #include <linux/sched.h>
12 #include <net/netlink.h>
14 #include <crypto/internal/skcipher.h>
15 #include <crypto/internal/rng.h>
16 #include <crypto/akcipher.h>
17 #include <crypto/kpp.h>
18 #include <crypto/internal/cryptouser.h>
22 #define null_terminated(x) (strnlen(x, sizeof(x)) < sizeof(x))
24 struct crypto_dump_info
{
25 struct sk_buff
*in_skb
;
26 struct sk_buff
*out_skb
;
31 static int crypto_report_aead(struct sk_buff
*skb
, struct crypto_alg
*alg
)
33 struct crypto_stat_aead raead
;
35 memset(&raead
, 0, sizeof(raead
));
37 strscpy(raead
.type
, "aead", sizeof(raead
.type
));
39 raead
.stat_encrypt_cnt
= atomic64_read(&alg
->stats
.aead
.encrypt_cnt
);
40 raead
.stat_encrypt_tlen
= atomic64_read(&alg
->stats
.aead
.encrypt_tlen
);
41 raead
.stat_decrypt_cnt
= atomic64_read(&alg
->stats
.aead
.decrypt_cnt
);
42 raead
.stat_decrypt_tlen
= atomic64_read(&alg
->stats
.aead
.decrypt_tlen
);
43 raead
.stat_err_cnt
= atomic64_read(&alg
->stats
.aead
.err_cnt
);
45 return nla_put(skb
, CRYPTOCFGA_STAT_AEAD
, sizeof(raead
), &raead
);
48 static int crypto_report_cipher(struct sk_buff
*skb
, struct crypto_alg
*alg
)
50 struct crypto_stat_cipher rcipher
;
52 memset(&rcipher
, 0, sizeof(rcipher
));
54 strscpy(rcipher
.type
, "cipher", sizeof(rcipher
.type
));
56 rcipher
.stat_encrypt_cnt
= atomic64_read(&alg
->stats
.cipher
.encrypt_cnt
);
57 rcipher
.stat_encrypt_tlen
= atomic64_read(&alg
->stats
.cipher
.encrypt_tlen
);
58 rcipher
.stat_decrypt_cnt
= atomic64_read(&alg
->stats
.cipher
.decrypt_cnt
);
59 rcipher
.stat_decrypt_tlen
= atomic64_read(&alg
->stats
.cipher
.decrypt_tlen
);
60 rcipher
.stat_err_cnt
= atomic64_read(&alg
->stats
.cipher
.err_cnt
);
62 return nla_put(skb
, CRYPTOCFGA_STAT_CIPHER
, sizeof(rcipher
), &rcipher
);
65 static int crypto_report_comp(struct sk_buff
*skb
, struct crypto_alg
*alg
)
67 struct crypto_stat_compress rcomp
;
69 memset(&rcomp
, 0, sizeof(rcomp
));
71 strscpy(rcomp
.type
, "compression", sizeof(rcomp
.type
));
72 rcomp
.stat_compress_cnt
= atomic64_read(&alg
->stats
.compress
.compress_cnt
);
73 rcomp
.stat_compress_tlen
= atomic64_read(&alg
->stats
.compress
.compress_tlen
);
74 rcomp
.stat_decompress_cnt
= atomic64_read(&alg
->stats
.compress
.decompress_cnt
);
75 rcomp
.stat_decompress_tlen
= atomic64_read(&alg
->stats
.compress
.decompress_tlen
);
76 rcomp
.stat_err_cnt
= atomic64_read(&alg
->stats
.compress
.err_cnt
);
78 return nla_put(skb
, CRYPTOCFGA_STAT_COMPRESS
, sizeof(rcomp
), &rcomp
);
81 static int crypto_report_acomp(struct sk_buff
*skb
, struct crypto_alg
*alg
)
83 struct crypto_stat_compress racomp
;
85 memset(&racomp
, 0, sizeof(racomp
));
87 strscpy(racomp
.type
, "acomp", sizeof(racomp
.type
));
88 racomp
.stat_compress_cnt
= atomic64_read(&alg
->stats
.compress
.compress_cnt
);
89 racomp
.stat_compress_tlen
= atomic64_read(&alg
->stats
.compress
.compress_tlen
);
90 racomp
.stat_decompress_cnt
= atomic64_read(&alg
->stats
.compress
.decompress_cnt
);
91 racomp
.stat_decompress_tlen
= atomic64_read(&alg
->stats
.compress
.decompress_tlen
);
92 racomp
.stat_err_cnt
= atomic64_read(&alg
->stats
.compress
.err_cnt
);
94 return nla_put(skb
, CRYPTOCFGA_STAT_ACOMP
, sizeof(racomp
), &racomp
);
97 static int crypto_report_akcipher(struct sk_buff
*skb
, struct crypto_alg
*alg
)
99 struct crypto_stat_akcipher rakcipher
;
101 memset(&rakcipher
, 0, sizeof(rakcipher
));
103 strscpy(rakcipher
.type
, "akcipher", sizeof(rakcipher
.type
));
104 rakcipher
.stat_encrypt_cnt
= atomic64_read(&alg
->stats
.akcipher
.encrypt_cnt
);
105 rakcipher
.stat_encrypt_tlen
= atomic64_read(&alg
->stats
.akcipher
.encrypt_tlen
);
106 rakcipher
.stat_decrypt_cnt
= atomic64_read(&alg
->stats
.akcipher
.decrypt_cnt
);
107 rakcipher
.stat_decrypt_tlen
= atomic64_read(&alg
->stats
.akcipher
.decrypt_tlen
);
108 rakcipher
.stat_sign_cnt
= atomic64_read(&alg
->stats
.akcipher
.sign_cnt
);
109 rakcipher
.stat_verify_cnt
= atomic64_read(&alg
->stats
.akcipher
.verify_cnt
);
110 rakcipher
.stat_err_cnt
= atomic64_read(&alg
->stats
.akcipher
.err_cnt
);
112 return nla_put(skb
, CRYPTOCFGA_STAT_AKCIPHER
,
113 sizeof(rakcipher
), &rakcipher
);
116 static int crypto_report_kpp(struct sk_buff
*skb
, struct crypto_alg
*alg
)
118 struct crypto_stat_kpp rkpp
;
120 memset(&rkpp
, 0, sizeof(rkpp
));
122 strscpy(rkpp
.type
, "kpp", sizeof(rkpp
.type
));
124 rkpp
.stat_setsecret_cnt
= atomic64_read(&alg
->stats
.kpp
.setsecret_cnt
);
125 rkpp
.stat_generate_public_key_cnt
= atomic64_read(&alg
->stats
.kpp
.generate_public_key_cnt
);
126 rkpp
.stat_compute_shared_secret_cnt
= atomic64_read(&alg
->stats
.kpp
.compute_shared_secret_cnt
);
127 rkpp
.stat_err_cnt
= atomic64_read(&alg
->stats
.kpp
.err_cnt
);
129 return nla_put(skb
, CRYPTOCFGA_STAT_KPP
, sizeof(rkpp
), &rkpp
);
132 static int crypto_report_ahash(struct sk_buff
*skb
, struct crypto_alg
*alg
)
134 struct crypto_stat_hash rhash
;
136 memset(&rhash
, 0, sizeof(rhash
));
138 strscpy(rhash
.type
, "ahash", sizeof(rhash
.type
));
140 rhash
.stat_hash_cnt
= atomic64_read(&alg
->stats
.hash
.hash_cnt
);
141 rhash
.stat_hash_tlen
= atomic64_read(&alg
->stats
.hash
.hash_tlen
);
142 rhash
.stat_err_cnt
= atomic64_read(&alg
->stats
.hash
.err_cnt
);
144 return nla_put(skb
, CRYPTOCFGA_STAT_HASH
, sizeof(rhash
), &rhash
);
147 static int crypto_report_shash(struct sk_buff
*skb
, struct crypto_alg
*alg
)
149 struct crypto_stat_hash rhash
;
151 memset(&rhash
, 0, sizeof(rhash
));
153 strscpy(rhash
.type
, "shash", sizeof(rhash
.type
));
155 rhash
.stat_hash_cnt
= atomic64_read(&alg
->stats
.hash
.hash_cnt
);
156 rhash
.stat_hash_tlen
= atomic64_read(&alg
->stats
.hash
.hash_tlen
);
157 rhash
.stat_err_cnt
= atomic64_read(&alg
->stats
.hash
.err_cnt
);
159 return nla_put(skb
, CRYPTOCFGA_STAT_HASH
, sizeof(rhash
), &rhash
);
162 static int crypto_report_rng(struct sk_buff
*skb
, struct crypto_alg
*alg
)
164 struct crypto_stat_rng rrng
;
166 memset(&rrng
, 0, sizeof(rrng
));
168 strscpy(rrng
.type
, "rng", sizeof(rrng
.type
));
170 rrng
.stat_generate_cnt
= atomic64_read(&alg
->stats
.rng
.generate_cnt
);
171 rrng
.stat_generate_tlen
= atomic64_read(&alg
->stats
.rng
.generate_tlen
);
172 rrng
.stat_seed_cnt
= atomic64_read(&alg
->stats
.rng
.seed_cnt
);
173 rrng
.stat_err_cnt
= atomic64_read(&alg
->stats
.rng
.err_cnt
);
175 return nla_put(skb
, CRYPTOCFGA_STAT_RNG
, sizeof(rrng
), &rrng
);
178 static int crypto_reportstat_one(struct crypto_alg
*alg
,
179 struct crypto_user_alg
*ualg
,
182 memset(ualg
, 0, sizeof(*ualg
));
184 strscpy(ualg
->cru_name
, alg
->cra_name
, sizeof(ualg
->cru_name
));
185 strscpy(ualg
->cru_driver_name
, alg
->cra_driver_name
,
186 sizeof(ualg
->cru_driver_name
));
187 strscpy(ualg
->cru_module_name
, module_name(alg
->cra_module
),
188 sizeof(ualg
->cru_module_name
));
192 ualg
->cru_flags
= alg
->cra_flags
;
193 ualg
->cru_refcnt
= refcount_read(&alg
->cra_refcnt
);
195 if (nla_put_u32(skb
, CRYPTOCFGA_PRIORITY_VAL
, alg
->cra_priority
))
196 goto nla_put_failure
;
197 if (alg
->cra_flags
& CRYPTO_ALG_LARVAL
) {
198 struct crypto_stat_larval rl
;
200 memset(&rl
, 0, sizeof(rl
));
201 strscpy(rl
.type
, "larval", sizeof(rl
.type
));
202 if (nla_put(skb
, CRYPTOCFGA_STAT_LARVAL
, sizeof(rl
), &rl
))
203 goto nla_put_failure
;
207 switch (alg
->cra_flags
& (CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_LARVAL
)) {
208 case CRYPTO_ALG_TYPE_AEAD
:
209 if (crypto_report_aead(skb
, alg
))
210 goto nla_put_failure
;
212 case CRYPTO_ALG_TYPE_SKCIPHER
:
213 if (crypto_report_cipher(skb
, alg
))
214 goto nla_put_failure
;
216 case CRYPTO_ALG_TYPE_BLKCIPHER
:
217 if (crypto_report_cipher(skb
, alg
))
218 goto nla_put_failure
;
220 case CRYPTO_ALG_TYPE_CIPHER
:
221 if (crypto_report_cipher(skb
, alg
))
222 goto nla_put_failure
;
224 case CRYPTO_ALG_TYPE_COMPRESS
:
225 if (crypto_report_comp(skb
, alg
))
226 goto nla_put_failure
;
228 case CRYPTO_ALG_TYPE_ACOMPRESS
:
229 if (crypto_report_acomp(skb
, alg
))
230 goto nla_put_failure
;
232 case CRYPTO_ALG_TYPE_SCOMPRESS
:
233 if (crypto_report_acomp(skb
, alg
))
234 goto nla_put_failure
;
236 case CRYPTO_ALG_TYPE_AKCIPHER
:
237 if (crypto_report_akcipher(skb
, alg
))
238 goto nla_put_failure
;
240 case CRYPTO_ALG_TYPE_KPP
:
241 if (crypto_report_kpp(skb
, alg
))
242 goto nla_put_failure
;
244 case CRYPTO_ALG_TYPE_AHASH
:
245 if (crypto_report_ahash(skb
, alg
))
246 goto nla_put_failure
;
248 case CRYPTO_ALG_TYPE_HASH
:
249 if (crypto_report_shash(skb
, alg
))
250 goto nla_put_failure
;
252 case CRYPTO_ALG_TYPE_RNG
:
253 if (crypto_report_rng(skb
, alg
))
254 goto nla_put_failure
;
257 pr_err("ERROR: Unhandled alg %d in %s\n",
258 alg
->cra_flags
& (CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_LARVAL
),
269 static int crypto_reportstat_alg(struct crypto_alg
*alg
,
270 struct crypto_dump_info
*info
)
272 struct sk_buff
*in_skb
= info
->in_skb
;
273 struct sk_buff
*skb
= info
->out_skb
;
274 struct nlmsghdr
*nlh
;
275 struct crypto_user_alg
*ualg
;
278 nlh
= nlmsg_put(skb
, NETLINK_CB(in_skb
).portid
, info
->nlmsg_seq
,
279 CRYPTO_MSG_GETSTAT
, sizeof(*ualg
), info
->nlmsg_flags
);
285 ualg
= nlmsg_data(nlh
);
287 err
= crypto_reportstat_one(alg
, ualg
, skb
);
289 nlmsg_cancel(skb
, nlh
);
299 int crypto_reportstat(struct sk_buff
*in_skb
, struct nlmsghdr
*in_nlh
,
300 struct nlattr
**attrs
)
302 struct net
*net
= sock_net(in_skb
->sk
);
303 struct crypto_user_alg
*p
= nlmsg_data(in_nlh
);
304 struct crypto_alg
*alg
;
306 struct crypto_dump_info info
;
309 if (!null_terminated(p
->cru_name
) || !null_terminated(p
->cru_driver_name
))
312 alg
= crypto_alg_match(p
, 0);
317 skb
= nlmsg_new(NLMSG_DEFAULT_SIZE
, GFP_ATOMIC
);
321 info
.in_skb
= in_skb
;
323 info
.nlmsg_seq
= in_nlh
->nlmsg_seq
;
324 info
.nlmsg_flags
= 0;
326 err
= crypto_reportstat_alg(alg
, &info
);
336 return nlmsg_unicast(net
->crypto_nlsk
, skb
, NETLINK_CB(in_skb
).portid
);
339 MODULE_LICENSE("GPL");