1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Linear symmetric key cipher operations.
5 * Generic encrypt/decrypt wrapper for ciphers.
7 * Copyright (c) 2023 Herbert Xu <herbert@gondor.apana.org.au>
10 #include <linux/cryptouser.h>
11 #include <linux/err.h>
12 #include <linux/export.h>
13 #include <linux/kernel.h>
14 #include <linux/seq_file.h>
15 #include <linux/slab.h>
16 #include <linux/string.h>
17 #include <net/netlink.h>
20 static inline struct crypto_lskcipher
*__crypto_lskcipher_cast(
21 struct crypto_tfm
*tfm
)
23 return container_of(tfm
, struct crypto_lskcipher
, base
);
26 static inline struct lskcipher_alg
*__crypto_lskcipher_alg(
27 struct crypto_alg
*alg
)
29 return container_of(alg
, struct lskcipher_alg
, co
.base
);
32 static int lskcipher_setkey_unaligned(struct crypto_lskcipher
*tfm
,
33 const u8
*key
, unsigned int keylen
)
35 unsigned long alignmask
= crypto_lskcipher_alignmask(tfm
);
36 struct lskcipher_alg
*cipher
= crypto_lskcipher_alg(tfm
);
37 u8
*buffer
, *alignbuffer
;
41 absize
= keylen
+ alignmask
;
42 buffer
= kmalloc(absize
, GFP_ATOMIC
);
46 alignbuffer
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
47 memcpy(alignbuffer
, key
, keylen
);
48 ret
= cipher
->setkey(tfm
, alignbuffer
, keylen
);
49 kfree_sensitive(buffer
);
53 int crypto_lskcipher_setkey(struct crypto_lskcipher
*tfm
, const u8
*key
,
56 unsigned long alignmask
= crypto_lskcipher_alignmask(tfm
);
57 struct lskcipher_alg
*cipher
= crypto_lskcipher_alg(tfm
);
59 if (keylen
< cipher
->co
.min_keysize
|| keylen
> cipher
->co
.max_keysize
)
62 if ((unsigned long)key
& alignmask
)
63 return lskcipher_setkey_unaligned(tfm
, key
, keylen
);
65 return cipher
->setkey(tfm
, key
, keylen
);
67 EXPORT_SYMBOL_GPL(crypto_lskcipher_setkey
);
69 static int crypto_lskcipher_crypt_unaligned(
70 struct crypto_lskcipher
*tfm
, const u8
*src
, u8
*dst
, unsigned len
,
71 u8
*iv
, int (*crypt
)(struct crypto_lskcipher
*tfm
, const u8
*src
,
72 u8
*dst
, unsigned len
, u8
*iv
, u32 flags
))
74 unsigned statesize
= crypto_lskcipher_statesize(tfm
);
75 unsigned ivsize
= crypto_lskcipher_ivsize(tfm
);
76 unsigned bs
= crypto_lskcipher_blocksize(tfm
);
77 unsigned cs
= crypto_lskcipher_chunksize(tfm
);
82 BUILD_BUG_ON(MAX_CIPHER_BLOCKSIZE
> PAGE_SIZE
||
83 MAX_CIPHER_ALIGNMASK
>= PAGE_SIZE
);
85 tiv
= kmalloc(PAGE_SIZE
, GFP_ATOMIC
);
89 memcpy(tiv
, iv
, ivsize
+ statesize
);
91 p
= kmalloc(PAGE_SIZE
, GFP_ATOMIC
);
97 unsigned chunk
= min((unsigned)PAGE_SIZE
, len
);
103 memcpy(p
, src
, chunk
);
104 err
= crypt(tfm
, p
, p
, chunk
, tiv
, CRYPTO_LSKCIPHER_FLAG_FINAL
);
108 memcpy(dst
, p
, chunk
);
114 err
= len
? -EINVAL
: 0;
117 memcpy(iv
, tiv
, ivsize
+ statesize
);
119 kfree_sensitive(tiv
);
123 static int crypto_lskcipher_crypt(struct crypto_lskcipher
*tfm
, const u8
*src
,
124 u8
*dst
, unsigned len
, u8
*iv
,
125 int (*crypt
)(struct crypto_lskcipher
*tfm
,
126 const u8
*src
, u8
*dst
,
127 unsigned len
, u8
*iv
,
130 unsigned long alignmask
= crypto_lskcipher_alignmask(tfm
);
132 if (((unsigned long)src
| (unsigned long)dst
| (unsigned long)iv
) &
134 return crypto_lskcipher_crypt_unaligned(tfm
, src
, dst
, len
, iv
,
137 return crypt(tfm
, src
, dst
, len
, iv
, CRYPTO_LSKCIPHER_FLAG_FINAL
);
140 int crypto_lskcipher_encrypt(struct crypto_lskcipher
*tfm
, const u8
*src
,
141 u8
*dst
, unsigned len
, u8
*iv
)
143 struct lskcipher_alg
*alg
= crypto_lskcipher_alg(tfm
);
145 return crypto_lskcipher_crypt(tfm
, src
, dst
, len
, iv
, alg
->encrypt
);
147 EXPORT_SYMBOL_GPL(crypto_lskcipher_encrypt
);
149 int crypto_lskcipher_decrypt(struct crypto_lskcipher
*tfm
, const u8
*src
,
150 u8
*dst
, unsigned len
, u8
*iv
)
152 struct lskcipher_alg
*alg
= crypto_lskcipher_alg(tfm
);
154 return crypto_lskcipher_crypt(tfm
, src
, dst
, len
, iv
, alg
->decrypt
);
156 EXPORT_SYMBOL_GPL(crypto_lskcipher_decrypt
);
158 static int crypto_lskcipher_crypt_sg(struct skcipher_request
*req
,
159 int (*crypt
)(struct crypto_lskcipher
*tfm
,
160 const u8
*src
, u8
*dst
,
161 unsigned len
, u8
*ivs
,
164 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
165 struct crypto_lskcipher
**ctx
= crypto_skcipher_ctx(skcipher
);
166 u8
*ivs
= skcipher_request_ctx(req
);
167 struct crypto_lskcipher
*tfm
= *ctx
;
168 struct skcipher_walk walk
;
173 ivsize
= crypto_lskcipher_ivsize(tfm
);
174 ivs
= PTR_ALIGN(ivs
, crypto_skcipher_alignmask(skcipher
) + 1);
175 memcpy(ivs
, req
->iv
, ivsize
);
177 flags
= req
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
;
179 if (req
->base
.flags
& CRYPTO_SKCIPHER_REQ_CONT
)
180 flags
|= CRYPTO_LSKCIPHER_FLAG_CONT
;
182 if (!(req
->base
.flags
& CRYPTO_SKCIPHER_REQ_NOTFINAL
))
183 flags
|= CRYPTO_LSKCIPHER_FLAG_FINAL
;
185 err
= skcipher_walk_virt(&walk
, req
, false);
187 while (walk
.nbytes
) {
188 err
= crypt(tfm
, walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
190 flags
& ~(walk
.nbytes
== walk
.total
?
191 0 : CRYPTO_LSKCIPHER_FLAG_FINAL
));
192 err
= skcipher_walk_done(&walk
, err
);
193 flags
|= CRYPTO_LSKCIPHER_FLAG_CONT
;
196 memcpy(req
->iv
, ivs
, ivsize
);
201 int crypto_lskcipher_encrypt_sg(struct skcipher_request
*req
)
203 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
204 struct crypto_lskcipher
**ctx
= crypto_skcipher_ctx(skcipher
);
205 struct lskcipher_alg
*alg
= crypto_lskcipher_alg(*ctx
);
207 return crypto_lskcipher_crypt_sg(req
, alg
->encrypt
);
210 int crypto_lskcipher_decrypt_sg(struct skcipher_request
*req
)
212 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
213 struct crypto_lskcipher
**ctx
= crypto_skcipher_ctx(skcipher
);
214 struct lskcipher_alg
*alg
= crypto_lskcipher_alg(*ctx
);
216 return crypto_lskcipher_crypt_sg(req
, alg
->decrypt
);
219 static void crypto_lskcipher_exit_tfm(struct crypto_tfm
*tfm
)
221 struct crypto_lskcipher
*skcipher
= __crypto_lskcipher_cast(tfm
);
222 struct lskcipher_alg
*alg
= crypto_lskcipher_alg(skcipher
);
227 static int crypto_lskcipher_init_tfm(struct crypto_tfm
*tfm
)
229 struct crypto_lskcipher
*skcipher
= __crypto_lskcipher_cast(tfm
);
230 struct lskcipher_alg
*alg
= crypto_lskcipher_alg(skcipher
);
233 skcipher
->base
.exit
= crypto_lskcipher_exit_tfm
;
236 return alg
->init(skcipher
);
241 static void crypto_lskcipher_free_instance(struct crypto_instance
*inst
)
243 struct lskcipher_instance
*skcipher
=
244 container_of(inst
, struct lskcipher_instance
, s
.base
);
246 skcipher
->free(skcipher
);
249 static void __maybe_unused
crypto_lskcipher_show(
250 struct seq_file
*m
, struct crypto_alg
*alg
)
252 struct lskcipher_alg
*skcipher
= __crypto_lskcipher_alg(alg
);
254 seq_printf(m
, "type : lskcipher\n");
255 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
256 seq_printf(m
, "min keysize : %u\n", skcipher
->co
.min_keysize
);
257 seq_printf(m
, "max keysize : %u\n", skcipher
->co
.max_keysize
);
258 seq_printf(m
, "ivsize : %u\n", skcipher
->co
.ivsize
);
259 seq_printf(m
, "chunksize : %u\n", skcipher
->co
.chunksize
);
260 seq_printf(m
, "statesize : %u\n", skcipher
->co
.statesize
);
263 static int __maybe_unused
crypto_lskcipher_report(
264 struct sk_buff
*skb
, struct crypto_alg
*alg
)
266 struct lskcipher_alg
*skcipher
= __crypto_lskcipher_alg(alg
);
267 struct crypto_report_blkcipher rblkcipher
;
269 memset(&rblkcipher
, 0, sizeof(rblkcipher
));
271 strscpy(rblkcipher
.type
, "lskcipher", sizeof(rblkcipher
.type
));
272 strscpy(rblkcipher
.geniv
, "<none>", sizeof(rblkcipher
.geniv
));
274 rblkcipher
.blocksize
= alg
->cra_blocksize
;
275 rblkcipher
.min_keysize
= skcipher
->co
.min_keysize
;
276 rblkcipher
.max_keysize
= skcipher
->co
.max_keysize
;
277 rblkcipher
.ivsize
= skcipher
->co
.ivsize
;
279 return nla_put(skb
, CRYPTOCFGA_REPORT_BLKCIPHER
,
280 sizeof(rblkcipher
), &rblkcipher
);
283 static const struct crypto_type crypto_lskcipher_type
= {
284 .extsize
= crypto_alg_extsize
,
285 .init_tfm
= crypto_lskcipher_init_tfm
,
286 .free
= crypto_lskcipher_free_instance
,
287 #ifdef CONFIG_PROC_FS
288 .show
= crypto_lskcipher_show
,
290 #if IS_ENABLED(CONFIG_CRYPTO_USER)
291 .report
= crypto_lskcipher_report
,
293 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
294 .maskset
= CRYPTO_ALG_TYPE_MASK
,
295 .type
= CRYPTO_ALG_TYPE_LSKCIPHER
,
296 .tfmsize
= offsetof(struct crypto_lskcipher
, base
),
299 static void crypto_lskcipher_exit_tfm_sg(struct crypto_tfm
*tfm
)
301 struct crypto_lskcipher
**ctx
= crypto_tfm_ctx(tfm
);
303 crypto_free_lskcipher(*ctx
);
306 int crypto_init_lskcipher_ops_sg(struct crypto_tfm
*tfm
)
308 struct crypto_lskcipher
**ctx
= crypto_tfm_ctx(tfm
);
309 struct crypto_alg
*calg
= tfm
->__crt_alg
;
310 struct crypto_lskcipher
*skcipher
;
312 if (!crypto_mod_get(calg
))
315 skcipher
= crypto_create_tfm(calg
, &crypto_lskcipher_type
);
316 if (IS_ERR(skcipher
)) {
317 crypto_mod_put(calg
);
318 return PTR_ERR(skcipher
);
322 tfm
->exit
= crypto_lskcipher_exit_tfm_sg
;
327 int crypto_grab_lskcipher(struct crypto_lskcipher_spawn
*spawn
,
328 struct crypto_instance
*inst
,
329 const char *name
, u32 type
, u32 mask
)
331 spawn
->base
.frontend
= &crypto_lskcipher_type
;
332 return crypto_grab_spawn(&spawn
->base
, inst
, name
, type
, mask
);
334 EXPORT_SYMBOL_GPL(crypto_grab_lskcipher
);
336 struct crypto_lskcipher
*crypto_alloc_lskcipher(const char *alg_name
,
339 return crypto_alloc_tfm(alg_name
, &crypto_lskcipher_type
, type
, mask
);
341 EXPORT_SYMBOL_GPL(crypto_alloc_lskcipher
);
343 static int lskcipher_prepare_alg(struct lskcipher_alg
*alg
)
345 struct crypto_alg
*base
= &alg
->co
.base
;
348 err
= skcipher_prepare_alg_common(&alg
->co
);
352 if (alg
->co
.chunksize
& (alg
->co
.chunksize
- 1))
355 base
->cra_type
= &crypto_lskcipher_type
;
356 base
->cra_flags
|= CRYPTO_ALG_TYPE_LSKCIPHER
;
361 int crypto_register_lskcipher(struct lskcipher_alg
*alg
)
363 struct crypto_alg
*base
= &alg
->co
.base
;
366 err
= lskcipher_prepare_alg(alg
);
370 return crypto_register_alg(base
);
372 EXPORT_SYMBOL_GPL(crypto_register_lskcipher
);
374 void crypto_unregister_lskcipher(struct lskcipher_alg
*alg
)
376 crypto_unregister_alg(&alg
->co
.base
);
378 EXPORT_SYMBOL_GPL(crypto_unregister_lskcipher
);
380 int crypto_register_lskciphers(struct lskcipher_alg
*algs
, int count
)
384 for (i
= 0; i
< count
; i
++) {
385 ret
= crypto_register_lskcipher(&algs
[i
]);
393 for (--i
; i
>= 0; --i
)
394 crypto_unregister_lskcipher(&algs
[i
]);
398 EXPORT_SYMBOL_GPL(crypto_register_lskciphers
);
400 void crypto_unregister_lskciphers(struct lskcipher_alg
*algs
, int count
)
404 for (i
= count
- 1; i
>= 0; --i
)
405 crypto_unregister_lskcipher(&algs
[i
]);
407 EXPORT_SYMBOL_GPL(crypto_unregister_lskciphers
);
409 int lskcipher_register_instance(struct crypto_template
*tmpl
,
410 struct lskcipher_instance
*inst
)
414 if (WARN_ON(!inst
->free
))
417 err
= lskcipher_prepare_alg(&inst
->alg
);
421 return crypto_register_instance(tmpl
, lskcipher_crypto_instance(inst
));
423 EXPORT_SYMBOL_GPL(lskcipher_register_instance
);
425 static int lskcipher_setkey_simple(struct crypto_lskcipher
*tfm
, const u8
*key
,
428 struct crypto_lskcipher
*cipher
= lskcipher_cipher_simple(tfm
);
430 crypto_lskcipher_clear_flags(cipher
, CRYPTO_TFM_REQ_MASK
);
431 crypto_lskcipher_set_flags(cipher
, crypto_lskcipher_get_flags(tfm
) &
432 CRYPTO_TFM_REQ_MASK
);
433 return crypto_lskcipher_setkey(cipher
, key
, keylen
);
436 static int lskcipher_init_tfm_simple(struct crypto_lskcipher
*tfm
)
438 struct lskcipher_instance
*inst
= lskcipher_alg_instance(tfm
);
439 struct crypto_lskcipher
**ctx
= crypto_lskcipher_ctx(tfm
);
440 struct crypto_lskcipher_spawn
*spawn
;
441 struct crypto_lskcipher
*cipher
;
443 spawn
= lskcipher_instance_ctx(inst
);
444 cipher
= crypto_spawn_lskcipher(spawn
);
446 return PTR_ERR(cipher
);
452 static void lskcipher_exit_tfm_simple(struct crypto_lskcipher
*tfm
)
454 struct crypto_lskcipher
**ctx
= crypto_lskcipher_ctx(tfm
);
456 crypto_free_lskcipher(*ctx
);
459 static void lskcipher_free_instance_simple(struct lskcipher_instance
*inst
)
461 crypto_drop_lskcipher(lskcipher_instance_ctx(inst
));
466 * lskcipher_alloc_instance_simple - allocate instance of simple block cipher
468 * Allocate an lskcipher_instance for a simple block cipher mode of operation,
469 * e.g. cbc or ecb. The instance context will have just a single crypto_spawn,
470 * that for the underlying cipher. The {min,max}_keysize, ivsize, blocksize,
471 * alignmask, and priority are set from the underlying cipher but can be
472 * overridden if needed. The tfm context defaults to
473 * struct crypto_lskcipher *, and default ->setkey(), ->init(), and
474 * ->exit() methods are installed.
476 * @tmpl: the template being instantiated
477 * @tb: the template parameters
479 * Return: a pointer to the new instance, or an ERR_PTR(). The caller still
480 * needs to register the instance.
482 struct lskcipher_instance
*lskcipher_alloc_instance_simple(
483 struct crypto_template
*tmpl
, struct rtattr
**tb
)
486 struct lskcipher_instance
*inst
;
487 struct crypto_lskcipher_spawn
*spawn
;
488 char ecb_name
[CRYPTO_MAX_ALG_NAME
];
489 struct lskcipher_alg
*cipher_alg
;
490 const char *cipher_name
;
493 err
= crypto_check_attr_type(tb
, CRYPTO_ALG_TYPE_LSKCIPHER
, &mask
);
497 cipher_name
= crypto_attr_alg_name(tb
[1]);
498 if (IS_ERR(cipher_name
))
499 return ERR_CAST(cipher_name
);
501 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
503 return ERR_PTR(-ENOMEM
);
505 spawn
= lskcipher_instance_ctx(inst
);
506 err
= crypto_grab_lskcipher(spawn
,
507 lskcipher_crypto_instance(inst
),
508 cipher_name
, 0, mask
);
511 if (err
== -ENOENT
&& !!memcmp(tmpl
->name
, "ecb", 4)) {
513 if (snprintf(ecb_name
, CRYPTO_MAX_ALG_NAME
, "ecb(%s)",
514 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
517 err
= crypto_grab_lskcipher(spawn
,
518 lskcipher_crypto_instance(inst
),
525 cipher_alg
= crypto_lskcipher_spawn_alg(spawn
);
527 err
= crypto_inst_setname(lskcipher_crypto_instance(inst
), tmpl
->name
,
528 &cipher_alg
->co
.base
);
536 len
= strscpy(ecb_name
, &cipher_alg
->co
.base
.cra_name
[4],
541 if (ecb_name
[len
- 1] != ')')
544 ecb_name
[len
- 1] = 0;
547 if (snprintf(inst
->alg
.co
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
548 "%s(%s)", tmpl
->name
, ecb_name
) >=
552 if (strcmp(ecb_name
, cipher_name
) &&
553 snprintf(inst
->alg
.co
.base
.cra_driver_name
,
555 "%s(%s)", tmpl
->name
, cipher_name
) >=
559 /* Don't allow nesting. */
561 if ((cipher_alg
->co
.base
.cra_flags
& CRYPTO_ALG_INSTANCE
))
566 if (cipher_alg
->co
.ivsize
)
569 inst
->free
= lskcipher_free_instance_simple
;
571 /* Default algorithm properties, can be overridden */
572 inst
->alg
.co
.base
.cra_blocksize
= cipher_alg
->co
.base
.cra_blocksize
;
573 inst
->alg
.co
.base
.cra_alignmask
= cipher_alg
->co
.base
.cra_alignmask
;
574 inst
->alg
.co
.base
.cra_priority
= cipher_alg
->co
.base
.cra_priority
;
575 inst
->alg
.co
.min_keysize
= cipher_alg
->co
.min_keysize
;
576 inst
->alg
.co
.max_keysize
= cipher_alg
->co
.max_keysize
;
577 inst
->alg
.co
.ivsize
= cipher_alg
->co
.base
.cra_blocksize
;
578 inst
->alg
.co
.statesize
= cipher_alg
->co
.statesize
;
580 /* Use struct crypto_lskcipher * by default, can be overridden */
581 inst
->alg
.co
.base
.cra_ctxsize
= sizeof(struct crypto_lskcipher
*);
582 inst
->alg
.setkey
= lskcipher_setkey_simple
;
583 inst
->alg
.init
= lskcipher_init_tfm_simple
;
584 inst
->alg
.exit
= lskcipher_exit_tfm_simple
;
589 lskcipher_free_instance_simple(inst
);
592 EXPORT_SYMBOL_GPL(lskcipher_alloc_instance_simple
);