1 // SPDX-License-Identifier: GPL-2.0
2 #include <linux/kernel.h>
3 #include <linux/printk.h>
4 #include <linux/crypto.h>
5 #include <linux/rtnetlink.h>
7 #include <crypto/aead.h>
8 #include <crypto/authenc.h>
9 #include <crypto/des.h>
10 #include <crypto/sha.h>
11 #include <crypto/internal/aead.h>
12 #include <crypto/scatterwalk.h>
13 #include <crypto/gcm.h>
15 #include "nitrox_dev.h"
16 #include "nitrox_common.h"
17 #include "nitrox_req.h"
19 #define GCM_AES_SALT_SIZE 4
23 #ifdef __BIG_ENDIAN_BITFIELD
34 static int nitrox_aes_gcm_setkey(struct crypto_aead
*aead
, const u8
*key
,
38 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
39 struct flexi_crypto_context
*fctx
;
40 union fc_ctx_flags flags
;
42 aes_keylen
= flexi_aes_keylen(keylen
);
46 /* fill crypto context */
48 flags
.f
= be64_to_cpu(fctx
->flags
.f
);
49 flags
.w0
.aes_keylen
= aes_keylen
;
50 fctx
->flags
.f
= cpu_to_be64(flags
.f
);
52 /* copy enc key to context */
53 memset(&fctx
->crypto
, 0, sizeof(fctx
->crypto
));
54 memcpy(fctx
->crypto
.u
.key
, key
, keylen
);
59 static int nitrox_aead_setauthsize(struct crypto_aead
*aead
,
60 unsigned int authsize
)
62 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
63 struct flexi_crypto_context
*fctx
= nctx
->u
.fctx
;
64 union fc_ctx_flags flags
;
66 flags
.f
= be64_to_cpu(fctx
->flags
.f
);
67 flags
.w0
.mac_len
= authsize
;
68 fctx
->flags
.f
= cpu_to_be64(flags
.f
);
70 aead
->authsize
= authsize
;
75 static int nitrox_aes_gcm_setauthsize(struct crypto_aead
*aead
,
76 unsigned int authsize
)
91 return nitrox_aead_setauthsize(aead
, authsize
);
94 static int alloc_src_sglist(struct nitrox_kcrypt_request
*nkreq
,
95 struct scatterlist
*src
, char *iv
, int ivsize
,
98 int nents
= sg_nents_for_len(src
, buflen
);
106 /* Allocate buffer to hold IV and input scatterlist array */
107 ret
= alloc_src_req_buf(nkreq
, nents
, ivsize
);
111 nitrox_creq_copy_iv(nkreq
->src
, iv
, ivsize
);
112 nitrox_creq_set_src_sg(nkreq
, nents
, ivsize
, src
, buflen
);
117 static int alloc_dst_sglist(struct nitrox_kcrypt_request
*nkreq
,
118 struct scatterlist
*dst
, int ivsize
, int buflen
)
120 int nents
= sg_nents_for_len(dst
, buflen
);
126 /* IV, ORH, COMPLETION entries */
128 /* Allocate buffer to hold ORH, COMPLETION and output scatterlist
131 ret
= alloc_dst_req_buf(nkreq
, nents
);
135 nitrox_creq_set_orh(nkreq
);
136 nitrox_creq_set_comp(nkreq
);
137 nitrox_creq_set_dst_sg(nkreq
, nents
, ivsize
, dst
, buflen
);
142 static void free_src_sglist(struct nitrox_kcrypt_request
*nkreq
)
147 static void free_dst_sglist(struct nitrox_kcrypt_request
*nkreq
)
152 static int nitrox_set_creq(struct nitrox_aead_rctx
*rctx
)
154 struct se_crypto_request
*creq
= &rctx
->nkreq
.creq
;
158 creq
->flags
= rctx
->flags
;
159 creq
->gfp
= (rctx
->flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ? GFP_KERNEL
:
162 creq
->ctrl
.value
= 0;
163 creq
->opcode
= FLEXI_CRYPTO_ENCRYPT_HMAC
;
164 creq
->ctrl
.s
.arg
= rctx
->ctrl_arg
;
166 creq
->gph
.param0
= cpu_to_be16(rctx
->cryptlen
);
167 creq
->gph
.param1
= cpu_to_be16(rctx
->cryptlen
+ rctx
->assoclen
);
168 creq
->gph
.param2
= cpu_to_be16(rctx
->ivsize
+ rctx
->assoclen
);
169 param3
.iv_offset
= 0;
170 param3
.auth_offset
= rctx
->ivsize
;
171 creq
->gph
.param3
= cpu_to_be16(param3
.param
);
173 creq
->ctx_handle
= rctx
->ctx_handle
;
174 creq
->ctrl
.s
.ctxl
= sizeof(struct flexi_crypto_context
);
176 ret
= alloc_src_sglist(&rctx
->nkreq
, rctx
->src
, rctx
->iv
, rctx
->ivsize
,
181 ret
= alloc_dst_sglist(&rctx
->nkreq
, rctx
->dst
, rctx
->ivsize
,
184 free_src_sglist(&rctx
->nkreq
);
191 static void nitrox_aead_callback(void *arg
, int err
)
193 struct aead_request
*areq
= arg
;
194 struct nitrox_aead_rctx
*rctx
= aead_request_ctx(areq
);
196 free_src_sglist(&rctx
->nkreq
);
197 free_dst_sglist(&rctx
->nkreq
);
199 pr_err_ratelimited("request failed status 0x%0x\n", err
);
203 areq
->base
.complete(&areq
->base
, err
);
206 static inline bool nitrox_aes_gcm_assoclen_supported(unsigned int assoclen
)
214 static int nitrox_aes_gcm_enc(struct aead_request
*areq
)
216 struct crypto_aead
*aead
= crypto_aead_reqtfm(areq
);
217 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
218 struct nitrox_aead_rctx
*rctx
= aead_request_ctx(areq
);
219 struct se_crypto_request
*creq
= &rctx
->nkreq
.creq
;
220 struct flexi_crypto_context
*fctx
= nctx
->u
.fctx
;
223 if (!nitrox_aes_gcm_assoclen_supported(areq
->assoclen
))
226 memcpy(fctx
->crypto
.iv
, areq
->iv
, GCM_AES_SALT_SIZE
);
228 rctx
->cryptlen
= areq
->cryptlen
;
229 rctx
->assoclen
= areq
->assoclen
;
230 rctx
->srclen
= areq
->assoclen
+ areq
->cryptlen
;
231 rctx
->dstlen
= rctx
->srclen
+ aead
->authsize
;
232 rctx
->iv
= &areq
->iv
[GCM_AES_SALT_SIZE
];
233 rctx
->ivsize
= GCM_AES_IV_SIZE
- GCM_AES_SALT_SIZE
;
234 rctx
->flags
= areq
->base
.flags
;
235 rctx
->ctx_handle
= nctx
->u
.ctx_handle
;
236 rctx
->src
= areq
->src
;
237 rctx
->dst
= areq
->dst
;
238 rctx
->ctrl_arg
= ENCRYPT
;
239 ret
= nitrox_set_creq(rctx
);
243 /* send the crypto request */
244 return nitrox_process_se_request(nctx
->ndev
, creq
, nitrox_aead_callback
,
248 static int nitrox_aes_gcm_dec(struct aead_request
*areq
)
250 struct crypto_aead
*aead
= crypto_aead_reqtfm(areq
);
251 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
252 struct nitrox_aead_rctx
*rctx
= aead_request_ctx(areq
);
253 struct se_crypto_request
*creq
= &rctx
->nkreq
.creq
;
254 struct flexi_crypto_context
*fctx
= nctx
->u
.fctx
;
257 if (!nitrox_aes_gcm_assoclen_supported(areq
->assoclen
))
260 memcpy(fctx
->crypto
.iv
, areq
->iv
, GCM_AES_SALT_SIZE
);
262 rctx
->cryptlen
= areq
->cryptlen
- aead
->authsize
;
263 rctx
->assoclen
= areq
->assoclen
;
264 rctx
->srclen
= areq
->cryptlen
+ areq
->assoclen
;
265 rctx
->dstlen
= rctx
->srclen
- aead
->authsize
;
266 rctx
->iv
= &areq
->iv
[GCM_AES_SALT_SIZE
];
267 rctx
->ivsize
= GCM_AES_IV_SIZE
- GCM_AES_SALT_SIZE
;
268 rctx
->flags
= areq
->base
.flags
;
269 rctx
->ctx_handle
= nctx
->u
.ctx_handle
;
270 rctx
->src
= areq
->src
;
271 rctx
->dst
= areq
->dst
;
272 rctx
->ctrl_arg
= DECRYPT
;
273 ret
= nitrox_set_creq(rctx
);
277 /* send the crypto request */
278 return nitrox_process_se_request(nctx
->ndev
, creq
, nitrox_aead_callback
,
282 static int nitrox_aead_init(struct crypto_aead
*aead
)
284 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
285 struct crypto_ctx_hdr
*chdr
;
287 /* get the first device */
288 nctx
->ndev
= nitrox_get_first_device();
292 /* allocate nitrox crypto context */
293 chdr
= crypto_alloc_context(nctx
->ndev
);
295 nitrox_put_device(nctx
->ndev
);
299 nctx
->u
.ctx_handle
= (uintptr_t)((u8
*)chdr
->vaddr
+
300 sizeof(struct ctx_hdr
));
301 nctx
->u
.fctx
->flags
.f
= 0;
306 static int nitrox_gcm_common_init(struct crypto_aead
*aead
)
309 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
310 union fc_ctx_flags
*flags
;
312 ret
= nitrox_aead_init(aead
);
316 flags
= &nctx
->u
.fctx
->flags
;
317 flags
->w0
.cipher_type
= CIPHER_AES_GCM
;
318 flags
->w0
.hash_type
= AUTH_NULL
;
319 flags
->w0
.iv_source
= IV_FROM_DPTR
;
320 /* ask microcode to calculate ipad/opad */
321 flags
->w0
.auth_input_type
= 1;
322 flags
->f
= be64_to_cpu(flags
->f
);
327 static int nitrox_aes_gcm_init(struct crypto_aead
*aead
)
331 ret
= nitrox_gcm_common_init(aead
);
335 crypto_aead_set_reqsize(aead
,
336 sizeof(struct aead_request
) +
337 sizeof(struct nitrox_aead_rctx
));
342 static void nitrox_aead_exit(struct crypto_aead
*aead
)
344 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
346 /* free the nitrox crypto context */
347 if (nctx
->u
.ctx_handle
) {
348 struct flexi_crypto_context
*fctx
= nctx
->u
.fctx
;
350 memzero_explicit(&fctx
->crypto
, sizeof(struct crypto_keys
));
351 memzero_explicit(&fctx
->auth
, sizeof(struct auth_keys
));
352 crypto_free_context((void *)nctx
->chdr
);
354 nitrox_put_device(nctx
->ndev
);
356 nctx
->u
.ctx_handle
= 0;
360 static int nitrox_rfc4106_setkey(struct crypto_aead
*aead
, const u8
*key
,
363 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
364 struct flexi_crypto_context
*fctx
= nctx
->u
.fctx
;
367 if (keylen
< GCM_AES_SALT_SIZE
)
370 keylen
-= GCM_AES_SALT_SIZE
;
371 ret
= nitrox_aes_gcm_setkey(aead
, key
, keylen
);
375 memcpy(fctx
->crypto
.iv
, key
+ keylen
, GCM_AES_SALT_SIZE
);
379 static int nitrox_rfc4106_setauthsize(struct crypto_aead
*aead
,
380 unsigned int authsize
)
391 return nitrox_aead_setauthsize(aead
, authsize
);
394 static int nitrox_rfc4106_set_aead_rctx_sglist(struct aead_request
*areq
)
396 struct nitrox_rfc4106_rctx
*rctx
= aead_request_ctx(areq
);
397 struct nitrox_aead_rctx
*aead_rctx
= &rctx
->base
;
398 unsigned int assoclen
= areq
->assoclen
- GCM_RFC4106_IV_SIZE
;
399 struct scatterlist
*sg
;
401 if (areq
->assoclen
!= 16 && areq
->assoclen
!= 20)
404 scatterwalk_map_and_copy(rctx
->assoc
, areq
->src
, 0, assoclen
, 0);
405 sg_init_table(rctx
->src
, 3);
406 sg_set_buf(rctx
->src
, rctx
->assoc
, assoclen
);
407 sg
= scatterwalk_ffwd(rctx
->src
+ 1, areq
->src
, areq
->assoclen
);
408 if (sg
!= rctx
->src
+ 1)
409 sg_chain(rctx
->src
, 2, sg
);
411 if (areq
->src
!= areq
->dst
) {
412 sg_init_table(rctx
->dst
, 3);
413 sg_set_buf(rctx
->dst
, rctx
->assoc
, assoclen
);
414 sg
= scatterwalk_ffwd(rctx
->dst
+ 1, areq
->dst
, areq
->assoclen
);
415 if (sg
!= rctx
->dst
+ 1)
416 sg_chain(rctx
->dst
, 2, sg
);
419 aead_rctx
->src
= rctx
->src
;
420 aead_rctx
->dst
= (areq
->src
== areq
->dst
) ? rctx
->src
: rctx
->dst
;
425 static void nitrox_rfc4106_callback(void *arg
, int err
)
427 struct aead_request
*areq
= arg
;
428 struct nitrox_rfc4106_rctx
*rctx
= aead_request_ctx(areq
);
429 struct nitrox_kcrypt_request
*nkreq
= &rctx
->base
.nkreq
;
431 free_src_sglist(nkreq
);
432 free_dst_sglist(nkreq
);
434 pr_err_ratelimited("request failed status 0x%0x\n", err
);
438 areq
->base
.complete(&areq
->base
, err
);
441 static int nitrox_rfc4106_enc(struct aead_request
*areq
)
443 struct crypto_aead
*aead
= crypto_aead_reqtfm(areq
);
444 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
445 struct nitrox_rfc4106_rctx
*rctx
= aead_request_ctx(areq
);
446 struct nitrox_aead_rctx
*aead_rctx
= &rctx
->base
;
447 struct se_crypto_request
*creq
= &aead_rctx
->nkreq
.creq
;
450 aead_rctx
->cryptlen
= areq
->cryptlen
;
451 aead_rctx
->assoclen
= areq
->assoclen
- GCM_RFC4106_IV_SIZE
;
452 aead_rctx
->srclen
= aead_rctx
->assoclen
+ aead_rctx
->cryptlen
;
453 aead_rctx
->dstlen
= aead_rctx
->srclen
+ aead
->authsize
;
454 aead_rctx
->iv
= areq
->iv
;
455 aead_rctx
->ivsize
= GCM_RFC4106_IV_SIZE
;
456 aead_rctx
->flags
= areq
->base
.flags
;
457 aead_rctx
->ctx_handle
= nctx
->u
.ctx_handle
;
458 aead_rctx
->ctrl_arg
= ENCRYPT
;
460 ret
= nitrox_rfc4106_set_aead_rctx_sglist(areq
);
464 ret
= nitrox_set_creq(aead_rctx
);
468 /* send the crypto request */
469 return nitrox_process_se_request(nctx
->ndev
, creq
,
470 nitrox_rfc4106_callback
, areq
);
473 static int nitrox_rfc4106_dec(struct aead_request
*areq
)
475 struct crypto_aead
*aead
= crypto_aead_reqtfm(areq
);
476 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
477 struct nitrox_rfc4106_rctx
*rctx
= aead_request_ctx(areq
);
478 struct nitrox_aead_rctx
*aead_rctx
= &rctx
->base
;
479 struct se_crypto_request
*creq
= &aead_rctx
->nkreq
.creq
;
482 aead_rctx
->cryptlen
= areq
->cryptlen
- aead
->authsize
;
483 aead_rctx
->assoclen
= areq
->assoclen
- GCM_RFC4106_IV_SIZE
;
485 areq
->cryptlen
- GCM_RFC4106_IV_SIZE
+ areq
->assoclen
;
486 aead_rctx
->dstlen
= aead_rctx
->srclen
- aead
->authsize
;
487 aead_rctx
->iv
= areq
->iv
;
488 aead_rctx
->ivsize
= GCM_RFC4106_IV_SIZE
;
489 aead_rctx
->flags
= areq
->base
.flags
;
490 aead_rctx
->ctx_handle
= nctx
->u
.ctx_handle
;
491 aead_rctx
->ctrl_arg
= DECRYPT
;
493 ret
= nitrox_rfc4106_set_aead_rctx_sglist(areq
);
497 ret
= nitrox_set_creq(aead_rctx
);
501 /* send the crypto request */
502 return nitrox_process_se_request(nctx
->ndev
, creq
,
503 nitrox_rfc4106_callback
, areq
);
506 static int nitrox_rfc4106_init(struct crypto_aead
*aead
)
510 ret
= nitrox_gcm_common_init(aead
);
514 crypto_aead_set_reqsize(aead
, sizeof(struct aead_request
) +
515 sizeof(struct nitrox_rfc4106_rctx
));
520 static struct aead_alg nitrox_aeads
[] = { {
522 .cra_name
= "gcm(aes)",
523 .cra_driver_name
= "n5_aes_gcm",
524 .cra_priority
= PRIO
,
525 .cra_flags
= CRYPTO_ALG_ASYNC
,
527 .cra_ctxsize
= sizeof(struct nitrox_crypto_ctx
),
529 .cra_module
= THIS_MODULE
,
531 .setkey
= nitrox_aes_gcm_setkey
,
532 .setauthsize
= nitrox_aes_gcm_setauthsize
,
533 .encrypt
= nitrox_aes_gcm_enc
,
534 .decrypt
= nitrox_aes_gcm_dec
,
535 .init
= nitrox_aes_gcm_init
,
536 .exit
= nitrox_aead_exit
,
537 .ivsize
= GCM_AES_IV_SIZE
,
538 .maxauthsize
= AES_BLOCK_SIZE
,
541 .cra_name
= "rfc4106(gcm(aes))",
542 .cra_driver_name
= "n5_rfc4106",
543 .cra_priority
= PRIO
,
544 .cra_flags
= CRYPTO_ALG_ASYNC
,
546 .cra_ctxsize
= sizeof(struct nitrox_crypto_ctx
),
548 .cra_module
= THIS_MODULE
,
550 .setkey
= nitrox_rfc4106_setkey
,
551 .setauthsize
= nitrox_rfc4106_setauthsize
,
552 .encrypt
= nitrox_rfc4106_enc
,
553 .decrypt
= nitrox_rfc4106_dec
,
554 .init
= nitrox_rfc4106_init
,
555 .exit
= nitrox_aead_exit
,
556 .ivsize
= GCM_RFC4106_IV_SIZE
,
557 .maxauthsize
= AES_BLOCK_SIZE
,
560 int nitrox_register_aeads(void)
562 return crypto_register_aeads(nitrox_aeads
, ARRAY_SIZE(nitrox_aeads
));
565 void nitrox_unregister_aeads(void)
567 crypto_unregister_aeads(nitrox_aeads
, ARRAY_SIZE(nitrox_aeads
));