1 // SPDX-License-Identifier: GPL-2.0
2 #include <linux/crypto.h>
3 #include <linux/kernel.h>
4 #include <linux/module.h>
5 #include <linux/printk.h>
7 #include <crypto/aes.h>
8 #include <crypto/skcipher.h>
9 #include <crypto/scatterwalk.h>
10 #include <crypto/ctr.h>
11 #include <crypto/internal/des.h>
12 #include <crypto/xts.h>
14 #include "nitrox_dev.h"
15 #include "nitrox_common.h"
16 #include "nitrox_req.h"
18 struct nitrox_cipher
{
20 enum flexi_cipher value
;
24 * supported cipher list
26 static const struct nitrox_cipher flexi_cipher_table
[] = {
27 { "null", CIPHER_NULL
},
28 { "cbc(des3_ede)", CIPHER_3DES_CBC
},
29 { "ecb(des3_ede)", CIPHER_3DES_ECB
},
30 { "cbc(aes)", CIPHER_AES_CBC
},
31 { "ecb(aes)", CIPHER_AES_ECB
},
32 { "cfb(aes)", CIPHER_AES_CFB
},
33 { "rfc3686(ctr(aes))", CIPHER_AES_CTR
},
34 { "xts(aes)", CIPHER_AES_XTS
},
35 { "cts(cbc(aes))", CIPHER_AES_CBC_CTS
},
36 { NULL
, CIPHER_INVALID
}
39 static enum flexi_cipher
flexi_cipher_type(const char *name
)
41 const struct nitrox_cipher
*cipher
= flexi_cipher_table
;
43 while (cipher
->name
) {
44 if (!strcmp(cipher
->name
, name
))
51 static void free_src_sglist(struct skcipher_request
*skreq
)
53 struct nitrox_kcrypt_request
*nkreq
= skcipher_request_ctx(skreq
);
58 static void free_dst_sglist(struct skcipher_request
*skreq
)
60 struct nitrox_kcrypt_request
*nkreq
= skcipher_request_ctx(skreq
);
65 static void nitrox_skcipher_callback(void *arg
, int err
)
67 struct skcipher_request
*skreq
= arg
;
69 free_src_sglist(skreq
);
70 free_dst_sglist(skreq
);
72 pr_err_ratelimited("request failed status 0x%0x\n", err
);
76 skcipher_request_complete(skreq
, err
);
79 static void nitrox_cbc_cipher_callback(void *arg
, int err
)
81 struct skcipher_request
*skreq
= arg
;
82 struct nitrox_kcrypt_request
*nkreq
= skcipher_request_ctx(skreq
);
83 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(skreq
);
84 int ivsize
= crypto_skcipher_ivsize(cipher
);
85 unsigned int start
= skreq
->cryptlen
- ivsize
;
88 nitrox_skcipher_callback(arg
, err
);
92 if (nkreq
->creq
.ctrl
.s
.arg
== ENCRYPT
) {
93 scatterwalk_map_and_copy(skreq
->iv
, skreq
->dst
, start
, ivsize
,
96 if (skreq
->src
!= skreq
->dst
) {
97 scatterwalk_map_and_copy(skreq
->iv
, skreq
->src
, start
,
100 memcpy(skreq
->iv
, nkreq
->iv_out
, ivsize
);
101 kfree(nkreq
->iv_out
);
105 nitrox_skcipher_callback(arg
, err
);
108 static int nitrox_skcipher_init(struct crypto_skcipher
*tfm
)
110 struct nitrox_crypto_ctx
*nctx
= crypto_skcipher_ctx(tfm
);
111 struct crypto_ctx_hdr
*chdr
;
113 /* get the first device */
114 nctx
->ndev
= nitrox_get_first_device();
118 /* allocate nitrox crypto context */
119 chdr
= crypto_alloc_context(nctx
->ndev
);
121 nitrox_put_device(nctx
->ndev
);
125 nctx
->callback
= nitrox_skcipher_callback
;
127 nctx
->u
.ctx_handle
= (uintptr_t)((u8
*)chdr
->vaddr
+
128 sizeof(struct ctx_hdr
));
129 crypto_skcipher_set_reqsize(tfm
, crypto_skcipher_reqsize(tfm
) +
130 sizeof(struct nitrox_kcrypt_request
));
134 static int nitrox_cbc_init(struct crypto_skcipher
*tfm
)
137 struct nitrox_crypto_ctx
*nctx
= crypto_skcipher_ctx(tfm
);
139 err
= nitrox_skcipher_init(tfm
);
143 nctx
->callback
= nitrox_cbc_cipher_callback
;
147 static void nitrox_skcipher_exit(struct crypto_skcipher
*tfm
)
149 struct nitrox_crypto_ctx
*nctx
= crypto_skcipher_ctx(tfm
);
151 /* free the nitrox crypto context */
152 if (nctx
->u
.ctx_handle
) {
153 struct flexi_crypto_context
*fctx
= nctx
->u
.fctx
;
155 memzero_explicit(&fctx
->crypto
, sizeof(struct crypto_keys
));
156 memzero_explicit(&fctx
->auth
, sizeof(struct auth_keys
));
157 crypto_free_context((void *)nctx
->chdr
);
159 nitrox_put_device(nctx
->ndev
);
161 nctx
->u
.ctx_handle
= 0;
165 static inline int nitrox_skcipher_setkey(struct crypto_skcipher
*cipher
,
166 int aes_keylen
, const u8
*key
,
169 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(cipher
);
170 struct nitrox_crypto_ctx
*nctx
= crypto_tfm_ctx(tfm
);
171 struct flexi_crypto_context
*fctx
;
172 union fc_ctx_flags
*flags
;
173 enum flexi_cipher cipher_type
;
176 name
= crypto_tfm_alg_name(tfm
);
177 cipher_type
= flexi_cipher_type(name
);
178 if (unlikely(cipher_type
== CIPHER_INVALID
)) {
179 pr_err("unsupported cipher: %s\n", name
);
183 /* fill crypto context */
185 flags
= &fctx
->flags
;
187 flags
->w0
.cipher_type
= cipher_type
;
188 flags
->w0
.aes_keylen
= aes_keylen
;
189 flags
->w0
.iv_source
= IV_FROM_DPTR
;
190 flags
->f
= cpu_to_be64(*(u64
*)&flags
->w0
);
191 /* copy the key to context */
192 memcpy(fctx
->crypto
.u
.key
, key
, keylen
);
197 static int nitrox_aes_setkey(struct crypto_skcipher
*cipher
, const u8
*key
,
202 aes_keylen
= flexi_aes_keylen(keylen
);
205 return nitrox_skcipher_setkey(cipher
, aes_keylen
, key
, keylen
);
208 static int alloc_src_sglist(struct skcipher_request
*skreq
, int ivsize
)
210 struct nitrox_kcrypt_request
*nkreq
= skcipher_request_ctx(skreq
);
211 int nents
= sg_nents(skreq
->src
) + 1;
214 /* Allocate buffer to hold IV and input scatterlist array */
215 ret
= alloc_src_req_buf(nkreq
, nents
, ivsize
);
219 nitrox_creq_copy_iv(nkreq
->src
, skreq
->iv
, ivsize
);
220 nitrox_creq_set_src_sg(nkreq
, nents
, ivsize
, skreq
->src
,
226 static int alloc_dst_sglist(struct skcipher_request
*skreq
, int ivsize
)
228 struct nitrox_kcrypt_request
*nkreq
= skcipher_request_ctx(skreq
);
229 int nents
= sg_nents(skreq
->dst
) + 3;
232 /* Allocate buffer to hold ORH, COMPLETION and output scatterlist
235 ret
= alloc_dst_req_buf(nkreq
, nents
);
239 nitrox_creq_set_orh(nkreq
);
240 nitrox_creq_set_comp(nkreq
);
241 nitrox_creq_set_dst_sg(nkreq
, nents
, ivsize
, skreq
->dst
,
247 static int nitrox_skcipher_crypt(struct skcipher_request
*skreq
, bool enc
)
249 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(skreq
);
250 struct nitrox_crypto_ctx
*nctx
= crypto_skcipher_ctx(cipher
);
251 struct nitrox_kcrypt_request
*nkreq
= skcipher_request_ctx(skreq
);
252 int ivsize
= crypto_skcipher_ivsize(cipher
);
253 struct se_crypto_request
*creq
;
257 creq
->flags
= skreq
->base
.flags
;
258 creq
->gfp
= (skreq
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
259 GFP_KERNEL
: GFP_ATOMIC
;
261 /* fill the request */
262 creq
->ctrl
.value
= 0;
263 creq
->opcode
= FLEXI_CRYPTO_ENCRYPT_HMAC
;
264 creq
->ctrl
.s
.arg
= (enc
? ENCRYPT
: DECRYPT
);
265 /* param0: length of the data to be encrypted */
266 creq
->gph
.param0
= cpu_to_be16(skreq
->cryptlen
);
267 creq
->gph
.param1
= 0;
268 /* param2: encryption data offset */
269 creq
->gph
.param2
= cpu_to_be16(ivsize
);
270 creq
->gph
.param3
= 0;
272 creq
->ctx_handle
= nctx
->u
.ctx_handle
;
273 creq
->ctrl
.s
.ctxl
= sizeof(struct flexi_crypto_context
);
275 ret
= alloc_src_sglist(skreq
, ivsize
);
279 ret
= alloc_dst_sglist(skreq
, ivsize
);
281 free_src_sglist(skreq
);
285 /* send the crypto request */
286 return nitrox_process_se_request(nctx
->ndev
, creq
, nctx
->callback
,
290 static int nitrox_cbc_decrypt(struct skcipher_request
*skreq
)
292 struct nitrox_kcrypt_request
*nkreq
= skcipher_request_ctx(skreq
);
293 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(skreq
);
294 int ivsize
= crypto_skcipher_ivsize(cipher
);
295 gfp_t flags
= (skreq
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ?
296 GFP_KERNEL
: GFP_ATOMIC
;
297 unsigned int start
= skreq
->cryptlen
- ivsize
;
299 if (skreq
->src
!= skreq
->dst
)
300 return nitrox_skcipher_crypt(skreq
, false);
302 nkreq
->iv_out
= kmalloc(ivsize
, flags
);
306 scatterwalk_map_and_copy(nkreq
->iv_out
, skreq
->src
, start
, ivsize
, 0);
307 return nitrox_skcipher_crypt(skreq
, false);
310 static int nitrox_aes_encrypt(struct skcipher_request
*skreq
)
312 return nitrox_skcipher_crypt(skreq
, true);
315 static int nitrox_aes_decrypt(struct skcipher_request
*skreq
)
317 return nitrox_skcipher_crypt(skreq
, false);
320 static int nitrox_3des_setkey(struct crypto_skcipher
*cipher
,
321 const u8
*key
, unsigned int keylen
)
323 return verify_skcipher_des3_key(cipher
, key
) ?:
324 nitrox_skcipher_setkey(cipher
, 0, key
, keylen
);
327 static int nitrox_3des_encrypt(struct skcipher_request
*skreq
)
329 return nitrox_skcipher_crypt(skreq
, true);
332 static int nitrox_3des_decrypt(struct skcipher_request
*skreq
)
334 return nitrox_skcipher_crypt(skreq
, false);
337 static int nitrox_aes_xts_setkey(struct crypto_skcipher
*cipher
,
338 const u8
*key
, unsigned int keylen
)
340 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(cipher
);
341 struct nitrox_crypto_ctx
*nctx
= crypto_tfm_ctx(tfm
);
342 struct flexi_crypto_context
*fctx
;
345 ret
= xts_check_key(tfm
, key
, keylen
);
351 aes_keylen
= flexi_aes_keylen(keylen
);
357 memcpy(fctx
->auth
.u
.key2
, (key
+ keylen
), keylen
);
359 return nitrox_skcipher_setkey(cipher
, aes_keylen
, key
, keylen
);
362 static int nitrox_aes_ctr_rfc3686_setkey(struct crypto_skcipher
*cipher
,
363 const u8
*key
, unsigned int keylen
)
365 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(cipher
);
366 struct nitrox_crypto_ctx
*nctx
= crypto_tfm_ctx(tfm
);
367 struct flexi_crypto_context
*fctx
;
370 if (keylen
< CTR_RFC3686_NONCE_SIZE
)
375 memcpy(fctx
->crypto
.iv
, key
+ (keylen
- CTR_RFC3686_NONCE_SIZE
),
376 CTR_RFC3686_NONCE_SIZE
);
378 keylen
-= CTR_RFC3686_NONCE_SIZE
;
380 aes_keylen
= flexi_aes_keylen(keylen
);
383 return nitrox_skcipher_setkey(cipher
, aes_keylen
, key
, keylen
);
386 static struct skcipher_alg nitrox_skciphers
[] = { {
388 .cra_name
= "cbc(aes)",
389 .cra_driver_name
= "n5_cbc(aes)",
390 .cra_priority
= PRIO
,
391 .cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_ALLOCATES_MEMORY
,
392 .cra_blocksize
= AES_BLOCK_SIZE
,
393 .cra_ctxsize
= sizeof(struct nitrox_crypto_ctx
),
395 .cra_module
= THIS_MODULE
,
397 .min_keysize
= AES_MIN_KEY_SIZE
,
398 .max_keysize
= AES_MAX_KEY_SIZE
,
399 .ivsize
= AES_BLOCK_SIZE
,
400 .setkey
= nitrox_aes_setkey
,
401 .encrypt
= nitrox_aes_encrypt
,
402 .decrypt
= nitrox_cbc_decrypt
,
403 .init
= nitrox_cbc_init
,
404 .exit
= nitrox_skcipher_exit
,
407 .cra_name
= "ecb(aes)",
408 .cra_driver_name
= "n5_ecb(aes)",
409 .cra_priority
= PRIO
,
410 .cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_ALLOCATES_MEMORY
,
411 .cra_blocksize
= AES_BLOCK_SIZE
,
412 .cra_ctxsize
= sizeof(struct nitrox_crypto_ctx
),
414 .cra_module
= THIS_MODULE
,
416 .min_keysize
= AES_MIN_KEY_SIZE
,
417 .max_keysize
= AES_MAX_KEY_SIZE
,
418 .ivsize
= AES_BLOCK_SIZE
,
419 .setkey
= nitrox_aes_setkey
,
420 .encrypt
= nitrox_aes_encrypt
,
421 .decrypt
= nitrox_aes_decrypt
,
422 .init
= nitrox_skcipher_init
,
423 .exit
= nitrox_skcipher_exit
,
426 .cra_name
= "cfb(aes)",
427 .cra_driver_name
= "n5_cfb(aes)",
428 .cra_priority
= PRIO
,
429 .cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_ALLOCATES_MEMORY
,
430 .cra_blocksize
= AES_BLOCK_SIZE
,
431 .cra_ctxsize
= sizeof(struct nitrox_crypto_ctx
),
433 .cra_module
= THIS_MODULE
,
435 .min_keysize
= AES_MIN_KEY_SIZE
,
436 .max_keysize
= AES_MAX_KEY_SIZE
,
437 .ivsize
= AES_BLOCK_SIZE
,
438 .setkey
= nitrox_aes_setkey
,
439 .encrypt
= nitrox_aes_encrypt
,
440 .decrypt
= nitrox_aes_decrypt
,
441 .init
= nitrox_skcipher_init
,
442 .exit
= nitrox_skcipher_exit
,
445 .cra_name
= "xts(aes)",
446 .cra_driver_name
= "n5_xts(aes)",
447 .cra_priority
= PRIO
,
448 .cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_ALLOCATES_MEMORY
,
449 .cra_blocksize
= AES_BLOCK_SIZE
,
450 .cra_ctxsize
= sizeof(struct nitrox_crypto_ctx
),
452 .cra_module
= THIS_MODULE
,
454 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
455 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
456 .ivsize
= AES_BLOCK_SIZE
,
457 .setkey
= nitrox_aes_xts_setkey
,
458 .encrypt
= nitrox_aes_encrypt
,
459 .decrypt
= nitrox_aes_decrypt
,
460 .init
= nitrox_skcipher_init
,
461 .exit
= nitrox_skcipher_exit
,
464 .cra_name
= "rfc3686(ctr(aes))",
465 .cra_driver_name
= "n5_rfc3686(ctr(aes))",
466 .cra_priority
= PRIO
,
467 .cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_ALLOCATES_MEMORY
,
469 .cra_ctxsize
= sizeof(struct nitrox_crypto_ctx
),
471 .cra_module
= THIS_MODULE
,
473 .min_keysize
= AES_MIN_KEY_SIZE
+ CTR_RFC3686_NONCE_SIZE
,
474 .max_keysize
= AES_MAX_KEY_SIZE
+ CTR_RFC3686_NONCE_SIZE
,
475 .ivsize
= CTR_RFC3686_IV_SIZE
,
476 .init
= nitrox_skcipher_init
,
477 .exit
= nitrox_skcipher_exit
,
478 .setkey
= nitrox_aes_ctr_rfc3686_setkey
,
479 .encrypt
= nitrox_aes_encrypt
,
480 .decrypt
= nitrox_aes_decrypt
,
483 .cra_name
= "cts(cbc(aes))",
484 .cra_driver_name
= "n5_cts(cbc(aes))",
485 .cra_priority
= PRIO
,
486 .cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_ALLOCATES_MEMORY
,
487 .cra_blocksize
= AES_BLOCK_SIZE
,
488 .cra_ctxsize
= sizeof(struct nitrox_crypto_ctx
),
490 .cra_module
= THIS_MODULE
,
492 .min_keysize
= AES_MIN_KEY_SIZE
,
493 .max_keysize
= AES_MAX_KEY_SIZE
,
494 .ivsize
= AES_BLOCK_SIZE
,
495 .setkey
= nitrox_aes_setkey
,
496 .encrypt
= nitrox_aes_encrypt
,
497 .decrypt
= nitrox_aes_decrypt
,
498 .init
= nitrox_skcipher_init
,
499 .exit
= nitrox_skcipher_exit
,
502 .cra_name
= "cbc(des3_ede)",
503 .cra_driver_name
= "n5_cbc(des3_ede)",
504 .cra_priority
= PRIO
,
505 .cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_ALLOCATES_MEMORY
,
506 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
507 .cra_ctxsize
= sizeof(struct nitrox_crypto_ctx
),
509 .cra_module
= THIS_MODULE
,
511 .min_keysize
= DES3_EDE_KEY_SIZE
,
512 .max_keysize
= DES3_EDE_KEY_SIZE
,
513 .ivsize
= DES3_EDE_BLOCK_SIZE
,
514 .setkey
= nitrox_3des_setkey
,
515 .encrypt
= nitrox_3des_encrypt
,
516 .decrypt
= nitrox_cbc_decrypt
,
517 .init
= nitrox_cbc_init
,
518 .exit
= nitrox_skcipher_exit
,
521 .cra_name
= "ecb(des3_ede)",
522 .cra_driver_name
= "n5_ecb(des3_ede)",
523 .cra_priority
= PRIO
,
524 .cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_ALLOCATES_MEMORY
,
525 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
526 .cra_ctxsize
= sizeof(struct nitrox_crypto_ctx
),
528 .cra_module
= THIS_MODULE
,
530 .min_keysize
= DES3_EDE_KEY_SIZE
,
531 .max_keysize
= DES3_EDE_KEY_SIZE
,
532 .ivsize
= DES3_EDE_BLOCK_SIZE
,
533 .setkey
= nitrox_3des_setkey
,
534 .encrypt
= nitrox_3des_encrypt
,
535 .decrypt
= nitrox_3des_decrypt
,
536 .init
= nitrox_skcipher_init
,
537 .exit
= nitrox_skcipher_exit
,
542 int nitrox_register_skciphers(void)
544 return crypto_register_skciphers(nitrox_skciphers
,
545 ARRAY_SIZE(nitrox_skciphers
));
548 void nitrox_unregister_skciphers(void)
550 crypto_unregister_skciphers(nitrox_skciphers
,
551 ARRAY_SIZE(nitrox_skciphers
));