1 // SPDX-License-Identifier: GPL-2.0
2 #include <linux/kernel.h>
3 #include <linux/printk.h>
4 #include <linux/crypto.h>
5 #include <linux/rtnetlink.h>
7 #include <crypto/aead.h>
8 #include <crypto/authenc.h>
9 #include <crypto/des.h>
10 #include <crypto/internal/aead.h>
11 #include <crypto/scatterwalk.h>
12 #include <crypto/gcm.h>
14 #include "nitrox_dev.h"
15 #include "nitrox_common.h"
16 #include "nitrox_req.h"
18 #define GCM_AES_SALT_SIZE 4
22 #ifdef __BIG_ENDIAN_BITFIELD
33 static int nitrox_aes_gcm_setkey(struct crypto_aead
*aead
, const u8
*key
,
37 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
38 struct flexi_crypto_context
*fctx
;
39 union fc_ctx_flags flags
;
41 aes_keylen
= flexi_aes_keylen(keylen
);
45 /* fill crypto context */
47 flags
.fu
= be64_to_cpu(fctx
->flags
.f
);
48 flags
.w0
.aes_keylen
= aes_keylen
;
49 fctx
->flags
.f
= cpu_to_be64(flags
.fu
);
51 /* copy enc key to context */
52 memset(&fctx
->crypto
, 0, sizeof(fctx
->crypto
));
53 memcpy(fctx
->crypto
.u
.key
, key
, keylen
);
58 static int nitrox_aead_setauthsize(struct crypto_aead
*aead
,
59 unsigned int authsize
)
61 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
62 struct flexi_crypto_context
*fctx
= nctx
->u
.fctx
;
63 union fc_ctx_flags flags
;
65 flags
.fu
= be64_to_cpu(fctx
->flags
.f
);
66 flags
.w0
.mac_len
= authsize
;
67 fctx
->flags
.f
= cpu_to_be64(flags
.fu
);
69 aead
->authsize
= authsize
;
74 static int nitrox_aes_gcm_setauthsize(struct crypto_aead
*aead
,
75 unsigned int authsize
)
90 return nitrox_aead_setauthsize(aead
, authsize
);
93 static int alloc_src_sglist(struct nitrox_kcrypt_request
*nkreq
,
94 struct scatterlist
*src
, char *iv
, int ivsize
,
97 int nents
= sg_nents_for_len(src
, buflen
);
105 /* Allocate buffer to hold IV and input scatterlist array */
106 ret
= alloc_src_req_buf(nkreq
, nents
, ivsize
);
110 nitrox_creq_copy_iv(nkreq
->src
, iv
, ivsize
);
111 nitrox_creq_set_src_sg(nkreq
, nents
, ivsize
, src
, buflen
);
116 static int alloc_dst_sglist(struct nitrox_kcrypt_request
*nkreq
,
117 struct scatterlist
*dst
, int ivsize
, int buflen
)
119 int nents
= sg_nents_for_len(dst
, buflen
);
125 /* IV, ORH, COMPLETION entries */
127 /* Allocate buffer to hold ORH, COMPLETION and output scatterlist
130 ret
= alloc_dst_req_buf(nkreq
, nents
);
134 nitrox_creq_set_orh(nkreq
);
135 nitrox_creq_set_comp(nkreq
);
136 nitrox_creq_set_dst_sg(nkreq
, nents
, ivsize
, dst
, buflen
);
141 static void free_src_sglist(struct nitrox_kcrypt_request
*nkreq
)
146 static void free_dst_sglist(struct nitrox_kcrypt_request
*nkreq
)
151 static int nitrox_set_creq(struct nitrox_aead_rctx
*rctx
)
153 struct se_crypto_request
*creq
= &rctx
->nkreq
.creq
;
157 creq
->flags
= rctx
->flags
;
158 creq
->gfp
= (rctx
->flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ? GFP_KERNEL
:
161 creq
->ctrl
.value
= 0;
162 creq
->opcode
= FLEXI_CRYPTO_ENCRYPT_HMAC
;
163 creq
->ctrl
.s
.arg
= rctx
->ctrl_arg
;
165 creq
->gph
.param0
= cpu_to_be16(rctx
->cryptlen
);
166 creq
->gph
.param1
= cpu_to_be16(rctx
->cryptlen
+ rctx
->assoclen
);
167 creq
->gph
.param2
= cpu_to_be16(rctx
->ivsize
+ rctx
->assoclen
);
168 param3
.iv_offset
= 0;
169 param3
.auth_offset
= rctx
->ivsize
;
170 creq
->gph
.param3
= cpu_to_be16(param3
.param
);
172 creq
->ctx_handle
= rctx
->ctx_handle
;
173 creq
->ctrl
.s
.ctxl
= sizeof(struct flexi_crypto_context
);
175 ret
= alloc_src_sglist(&rctx
->nkreq
, rctx
->src
, rctx
->iv
, rctx
->ivsize
,
180 ret
= alloc_dst_sglist(&rctx
->nkreq
, rctx
->dst
, rctx
->ivsize
,
183 free_src_sglist(&rctx
->nkreq
);
190 static void nitrox_aead_callback(void *arg
, int err
)
192 struct aead_request
*areq
= arg
;
193 struct nitrox_aead_rctx
*rctx
= aead_request_ctx(areq
);
195 free_src_sglist(&rctx
->nkreq
);
196 free_dst_sglist(&rctx
->nkreq
);
198 pr_err_ratelimited("request failed status 0x%0x\n", err
);
202 areq
->base
.complete(&areq
->base
, err
);
205 static inline bool nitrox_aes_gcm_assoclen_supported(unsigned int assoclen
)
213 static int nitrox_aes_gcm_enc(struct aead_request
*areq
)
215 struct crypto_aead
*aead
= crypto_aead_reqtfm(areq
);
216 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
217 struct nitrox_aead_rctx
*rctx
= aead_request_ctx(areq
);
218 struct se_crypto_request
*creq
= &rctx
->nkreq
.creq
;
219 struct flexi_crypto_context
*fctx
= nctx
->u
.fctx
;
222 if (!nitrox_aes_gcm_assoclen_supported(areq
->assoclen
))
225 memcpy(fctx
->crypto
.iv
, areq
->iv
, GCM_AES_SALT_SIZE
);
227 rctx
->cryptlen
= areq
->cryptlen
;
228 rctx
->assoclen
= areq
->assoclen
;
229 rctx
->srclen
= areq
->assoclen
+ areq
->cryptlen
;
230 rctx
->dstlen
= rctx
->srclen
+ aead
->authsize
;
231 rctx
->iv
= &areq
->iv
[GCM_AES_SALT_SIZE
];
232 rctx
->ivsize
= GCM_AES_IV_SIZE
- GCM_AES_SALT_SIZE
;
233 rctx
->flags
= areq
->base
.flags
;
234 rctx
->ctx_handle
= nctx
->u
.ctx_handle
;
235 rctx
->src
= areq
->src
;
236 rctx
->dst
= areq
->dst
;
237 rctx
->ctrl_arg
= ENCRYPT
;
238 ret
= nitrox_set_creq(rctx
);
242 /* send the crypto request */
243 return nitrox_process_se_request(nctx
->ndev
, creq
, nitrox_aead_callback
,
247 static int nitrox_aes_gcm_dec(struct aead_request
*areq
)
249 struct crypto_aead
*aead
= crypto_aead_reqtfm(areq
);
250 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
251 struct nitrox_aead_rctx
*rctx
= aead_request_ctx(areq
);
252 struct se_crypto_request
*creq
= &rctx
->nkreq
.creq
;
253 struct flexi_crypto_context
*fctx
= nctx
->u
.fctx
;
256 if (!nitrox_aes_gcm_assoclen_supported(areq
->assoclen
))
259 memcpy(fctx
->crypto
.iv
, areq
->iv
, GCM_AES_SALT_SIZE
);
261 rctx
->cryptlen
= areq
->cryptlen
- aead
->authsize
;
262 rctx
->assoclen
= areq
->assoclen
;
263 rctx
->srclen
= areq
->cryptlen
+ areq
->assoclen
;
264 rctx
->dstlen
= rctx
->srclen
- aead
->authsize
;
265 rctx
->iv
= &areq
->iv
[GCM_AES_SALT_SIZE
];
266 rctx
->ivsize
= GCM_AES_IV_SIZE
- GCM_AES_SALT_SIZE
;
267 rctx
->flags
= areq
->base
.flags
;
268 rctx
->ctx_handle
= nctx
->u
.ctx_handle
;
269 rctx
->src
= areq
->src
;
270 rctx
->dst
= areq
->dst
;
271 rctx
->ctrl_arg
= DECRYPT
;
272 ret
= nitrox_set_creq(rctx
);
276 /* send the crypto request */
277 return nitrox_process_se_request(nctx
->ndev
, creq
, nitrox_aead_callback
,
281 static int nitrox_aead_init(struct crypto_aead
*aead
)
283 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
284 struct crypto_ctx_hdr
*chdr
;
286 /* get the first device */
287 nctx
->ndev
= nitrox_get_first_device();
291 /* allocate nitrox crypto context */
292 chdr
= crypto_alloc_context(nctx
->ndev
);
294 nitrox_put_device(nctx
->ndev
);
298 nctx
->u
.ctx_handle
= (uintptr_t)((u8
*)chdr
->vaddr
+
299 sizeof(struct ctx_hdr
));
300 nctx
->u
.fctx
->flags
.f
= 0;
305 static int nitrox_gcm_common_init(struct crypto_aead
*aead
)
308 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
309 union fc_ctx_flags
*flags
;
311 ret
= nitrox_aead_init(aead
);
315 flags
= &nctx
->u
.fctx
->flags
;
316 flags
->w0
.cipher_type
= CIPHER_AES_GCM
;
317 flags
->w0
.hash_type
= AUTH_NULL
;
318 flags
->w0
.iv_source
= IV_FROM_DPTR
;
319 /* ask microcode to calculate ipad/opad */
320 flags
->w0
.auth_input_type
= 1;
321 flags
->f
= cpu_to_be64(flags
->fu
);
326 static int nitrox_aes_gcm_init(struct crypto_aead
*aead
)
330 ret
= nitrox_gcm_common_init(aead
);
334 crypto_aead_set_reqsize(aead
,
335 sizeof(struct aead_request
) +
336 sizeof(struct nitrox_aead_rctx
));
341 static void nitrox_aead_exit(struct crypto_aead
*aead
)
343 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
345 /* free the nitrox crypto context */
346 if (nctx
->u
.ctx_handle
) {
347 struct flexi_crypto_context
*fctx
= nctx
->u
.fctx
;
349 memzero_explicit(&fctx
->crypto
, sizeof(struct crypto_keys
));
350 memzero_explicit(&fctx
->auth
, sizeof(struct auth_keys
));
351 crypto_free_context((void *)nctx
->chdr
);
353 nitrox_put_device(nctx
->ndev
);
355 nctx
->u
.ctx_handle
= 0;
359 static int nitrox_rfc4106_setkey(struct crypto_aead
*aead
, const u8
*key
,
362 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
363 struct flexi_crypto_context
*fctx
= nctx
->u
.fctx
;
366 if (keylen
< GCM_AES_SALT_SIZE
)
369 keylen
-= GCM_AES_SALT_SIZE
;
370 ret
= nitrox_aes_gcm_setkey(aead
, key
, keylen
);
374 memcpy(fctx
->crypto
.iv
, key
+ keylen
, GCM_AES_SALT_SIZE
);
378 static int nitrox_rfc4106_setauthsize(struct crypto_aead
*aead
,
379 unsigned int authsize
)
390 return nitrox_aead_setauthsize(aead
, authsize
);
393 static int nitrox_rfc4106_set_aead_rctx_sglist(struct aead_request
*areq
)
395 struct nitrox_rfc4106_rctx
*rctx
= aead_request_ctx(areq
);
396 struct nitrox_aead_rctx
*aead_rctx
= &rctx
->base
;
397 unsigned int assoclen
= areq
->assoclen
- GCM_RFC4106_IV_SIZE
;
398 struct scatterlist
*sg
;
400 if (areq
->assoclen
!= 16 && areq
->assoclen
!= 20)
403 scatterwalk_map_and_copy(rctx
->assoc
, areq
->src
, 0, assoclen
, 0);
404 sg_init_table(rctx
->src
, 3);
405 sg_set_buf(rctx
->src
, rctx
->assoc
, assoclen
);
406 sg
= scatterwalk_ffwd(rctx
->src
+ 1, areq
->src
, areq
->assoclen
);
407 if (sg
!= rctx
->src
+ 1)
408 sg_chain(rctx
->src
, 2, sg
);
410 if (areq
->src
!= areq
->dst
) {
411 sg_init_table(rctx
->dst
, 3);
412 sg_set_buf(rctx
->dst
, rctx
->assoc
, assoclen
);
413 sg
= scatterwalk_ffwd(rctx
->dst
+ 1, areq
->dst
, areq
->assoclen
);
414 if (sg
!= rctx
->dst
+ 1)
415 sg_chain(rctx
->dst
, 2, sg
);
418 aead_rctx
->src
= rctx
->src
;
419 aead_rctx
->dst
= (areq
->src
== areq
->dst
) ? rctx
->src
: rctx
->dst
;
424 static void nitrox_rfc4106_callback(void *arg
, int err
)
426 struct aead_request
*areq
= arg
;
427 struct nitrox_rfc4106_rctx
*rctx
= aead_request_ctx(areq
);
428 struct nitrox_kcrypt_request
*nkreq
= &rctx
->base
.nkreq
;
430 free_src_sglist(nkreq
);
431 free_dst_sglist(nkreq
);
433 pr_err_ratelimited("request failed status 0x%0x\n", err
);
437 areq
->base
.complete(&areq
->base
, err
);
440 static int nitrox_rfc4106_enc(struct aead_request
*areq
)
442 struct crypto_aead
*aead
= crypto_aead_reqtfm(areq
);
443 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
444 struct nitrox_rfc4106_rctx
*rctx
= aead_request_ctx(areq
);
445 struct nitrox_aead_rctx
*aead_rctx
= &rctx
->base
;
446 struct se_crypto_request
*creq
= &aead_rctx
->nkreq
.creq
;
449 aead_rctx
->cryptlen
= areq
->cryptlen
;
450 aead_rctx
->assoclen
= areq
->assoclen
- GCM_RFC4106_IV_SIZE
;
451 aead_rctx
->srclen
= aead_rctx
->assoclen
+ aead_rctx
->cryptlen
;
452 aead_rctx
->dstlen
= aead_rctx
->srclen
+ aead
->authsize
;
453 aead_rctx
->iv
= areq
->iv
;
454 aead_rctx
->ivsize
= GCM_RFC4106_IV_SIZE
;
455 aead_rctx
->flags
= areq
->base
.flags
;
456 aead_rctx
->ctx_handle
= nctx
->u
.ctx_handle
;
457 aead_rctx
->ctrl_arg
= ENCRYPT
;
459 ret
= nitrox_rfc4106_set_aead_rctx_sglist(areq
);
463 ret
= nitrox_set_creq(aead_rctx
);
467 /* send the crypto request */
468 return nitrox_process_se_request(nctx
->ndev
, creq
,
469 nitrox_rfc4106_callback
, areq
);
472 static int nitrox_rfc4106_dec(struct aead_request
*areq
)
474 struct crypto_aead
*aead
= crypto_aead_reqtfm(areq
);
475 struct nitrox_crypto_ctx
*nctx
= crypto_aead_ctx(aead
);
476 struct nitrox_rfc4106_rctx
*rctx
= aead_request_ctx(areq
);
477 struct nitrox_aead_rctx
*aead_rctx
= &rctx
->base
;
478 struct se_crypto_request
*creq
= &aead_rctx
->nkreq
.creq
;
481 aead_rctx
->cryptlen
= areq
->cryptlen
- aead
->authsize
;
482 aead_rctx
->assoclen
= areq
->assoclen
- GCM_RFC4106_IV_SIZE
;
484 areq
->cryptlen
- GCM_RFC4106_IV_SIZE
+ areq
->assoclen
;
485 aead_rctx
->dstlen
= aead_rctx
->srclen
- aead
->authsize
;
486 aead_rctx
->iv
= areq
->iv
;
487 aead_rctx
->ivsize
= GCM_RFC4106_IV_SIZE
;
488 aead_rctx
->flags
= areq
->base
.flags
;
489 aead_rctx
->ctx_handle
= nctx
->u
.ctx_handle
;
490 aead_rctx
->ctrl_arg
= DECRYPT
;
492 ret
= nitrox_rfc4106_set_aead_rctx_sglist(areq
);
496 ret
= nitrox_set_creq(aead_rctx
);
500 /* send the crypto request */
501 return nitrox_process_se_request(nctx
->ndev
, creq
,
502 nitrox_rfc4106_callback
, areq
);
505 static int nitrox_rfc4106_init(struct crypto_aead
*aead
)
509 ret
= nitrox_gcm_common_init(aead
);
513 crypto_aead_set_reqsize(aead
, sizeof(struct aead_request
) +
514 sizeof(struct nitrox_rfc4106_rctx
));
519 static struct aead_alg nitrox_aeads
[] = { {
521 .cra_name
= "gcm(aes)",
522 .cra_driver_name
= "n5_aes_gcm",
523 .cra_priority
= PRIO
,
524 .cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_ALLOCATES_MEMORY
,
526 .cra_ctxsize
= sizeof(struct nitrox_crypto_ctx
),
528 .cra_module
= THIS_MODULE
,
530 .setkey
= nitrox_aes_gcm_setkey
,
531 .setauthsize
= nitrox_aes_gcm_setauthsize
,
532 .encrypt
= nitrox_aes_gcm_enc
,
533 .decrypt
= nitrox_aes_gcm_dec
,
534 .init
= nitrox_aes_gcm_init
,
535 .exit
= nitrox_aead_exit
,
536 .ivsize
= GCM_AES_IV_SIZE
,
537 .maxauthsize
= AES_BLOCK_SIZE
,
540 .cra_name
= "rfc4106(gcm(aes))",
541 .cra_driver_name
= "n5_rfc4106",
542 .cra_priority
= PRIO
,
543 .cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_ALLOCATES_MEMORY
,
545 .cra_ctxsize
= sizeof(struct nitrox_crypto_ctx
),
547 .cra_module
= THIS_MODULE
,
549 .setkey
= nitrox_rfc4106_setkey
,
550 .setauthsize
= nitrox_rfc4106_setauthsize
,
551 .encrypt
= nitrox_rfc4106_enc
,
552 .decrypt
= nitrox_rfc4106_dec
,
553 .init
= nitrox_rfc4106_init
,
554 .exit
= nitrox_aead_exit
,
555 .ivsize
= GCM_RFC4106_IV_SIZE
,
556 .maxauthsize
= AES_BLOCK_SIZE
,
559 int nitrox_register_aeads(void)
561 return crypto_register_aeads(nitrox_aeads
, ARRAY_SIZE(nitrox_aeads
));
564 void nitrox_unregister_aeads(void)
566 crypto_unregister_aeads(nitrox_aeads
, ARRAY_SIZE(nitrox_aeads
));