1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) 2017 Marvell
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
8 #include <asm/unaligned.h>
9 #include <linux/device.h>
10 #include <linux/dma-mapping.h>
11 #include <linux/dmapool.h>
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/chacha.h>
16 #include <crypto/ctr.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/gcm.h>
19 #include <crypto/ghash.h>
20 #include <crypto/poly1305.h>
21 #include <crypto/sha1.h>
22 #include <crypto/sha2.h>
23 #include <crypto/sm3.h>
24 #include <crypto/sm4.h>
25 #include <crypto/xts.h>
26 #include <crypto/skcipher.h>
27 #include <crypto/internal/aead.h>
28 #include <crypto/internal/skcipher.h>
32 enum safexcel_cipher_direction
{
37 enum safexcel_cipher_alg
{
45 struct safexcel_cipher_ctx
{
46 struct safexcel_context base
;
47 struct safexcel_crypto_priv
*priv
;
50 enum safexcel_cipher_alg alg
;
51 u8 aead
; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
52 u8 xcm
; /* 0=authenc, 1=GCM, 2 reserved for CCM */
60 unsigned int key_len
, xts
;
62 /* All the below is AEAD specific */
66 struct crypto_cipher
*hkaes
;
67 struct crypto_aead
*fback
;
70 struct safexcel_cipher_req
{
71 enum safexcel_cipher_direction direction
;
72 /* Number of result descriptors associated to the request */
78 static int safexcel_skcipher_iv(struct safexcel_cipher_ctx
*ctx
, u8
*iv
,
79 struct safexcel_command_desc
*cdesc
)
81 if (ctx
->mode
== CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
) {
82 cdesc
->control_data
.options
|= EIP197_OPTION_4_TOKEN_IV_CMD
;
84 cdesc
->control_data
.token
[0] = ctx
->nonce
;
86 memcpy(&cdesc
->control_data
.token
[1], iv
, 8);
87 /* 32 bit counter, start at 0 or 1 (big endian!) */
88 cdesc
->control_data
.token
[3] =
89 (__force u32
)cpu_to_be32(ctx
->ctrinit
);
92 if (ctx
->alg
== SAFEXCEL_CHACHA20
) {
93 cdesc
->control_data
.options
|= EIP197_OPTION_4_TOKEN_IV_CMD
;
94 /* 96 bit nonce part */
95 memcpy(&cdesc
->control_data
.token
[0], &iv
[4], 12);
97 cdesc
->control_data
.token
[3] = *(u32
*)iv
;
101 cdesc
->control_data
.options
|= ctx
->ivmask
;
102 memcpy(cdesc
->control_data
.token
, iv
, ctx
->blocksz
);
103 return ctx
->blocksz
/ sizeof(u32
);
106 static void safexcel_skcipher_token(struct safexcel_cipher_ctx
*ctx
, u8
*iv
,
107 struct safexcel_command_desc
*cdesc
,
108 struct safexcel_token
*atoken
,
111 struct safexcel_token
*token
;
114 ivlen
= safexcel_skcipher_iv(ctx
, iv
, cdesc
);
116 /* No space in cdesc, instruction moves to atoken */
117 cdesc
->additional_cdata_size
= 1;
120 /* Everything fits in cdesc */
121 token
= (struct safexcel_token
*)(cdesc
->control_data
.token
+ 2);
122 /* Need to pad with NOP */
123 eip197_noop_token(&token
[1]);
126 token
->opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
127 token
->packet_length
= length
;
128 token
->stat
= EIP197_TOKEN_STAT_LAST_PACKET
|
129 EIP197_TOKEN_STAT_LAST_HASH
;
130 token
->instructions
= EIP197_TOKEN_INS_LAST
|
131 EIP197_TOKEN_INS_TYPE_CRYPTO
|
132 EIP197_TOKEN_INS_TYPE_OUTPUT
;
135 static void safexcel_aead_iv(struct safexcel_cipher_ctx
*ctx
, u8
*iv
,
136 struct safexcel_command_desc
*cdesc
)
138 if (ctx
->mode
== CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
||
139 ctx
->aead
& EIP197_AEAD_TYPE_IPSEC_ESP
) { /* _ESP and _ESP_GMAC */
141 cdesc
->control_data
.token
[0] = ctx
->nonce
;
143 memcpy(&cdesc
->control_data
.token
[1], iv
, 8);
144 /* 32 bit counter, start at 0 or 1 (big endian!) */
145 cdesc
->control_data
.token
[3] =
146 (__force u32
)cpu_to_be32(ctx
->ctrinit
);
149 if (ctx
->xcm
== EIP197_XCM_MODE_GCM
|| ctx
->alg
== SAFEXCEL_CHACHA20
) {
151 memcpy(&cdesc
->control_data
.token
[0], iv
, 12);
152 /* 32 bit counter, start at 0 or 1 (big endian!) */
153 cdesc
->control_data
.token
[3] =
154 (__force u32
)cpu_to_be32(ctx
->ctrinit
);
158 memcpy(cdesc
->control_data
.token
, iv
, ctx
->blocksz
);
161 static void safexcel_aead_token(struct safexcel_cipher_ctx
*ctx
, u8
*iv
,
162 struct safexcel_command_desc
*cdesc
,
163 struct safexcel_token
*atoken
,
164 enum safexcel_cipher_direction direction
,
165 u32 cryptlen
, u32 assoclen
, u32 digestsize
)
167 struct safexcel_token
*aadref
;
168 int atoksize
= 2; /* Start with minimum size */
169 int assocadj
= assoclen
- ctx
->aadskip
, aadalign
;
171 /* Always 4 dwords of embedded IV for AEAD modes */
172 cdesc
->control_data
.options
|= EIP197_OPTION_4_TOKEN_IV_CMD
;
174 if (direction
== SAFEXCEL_DECRYPT
)
175 cryptlen
-= digestsize
;
177 if (unlikely(ctx
->xcm
== EIP197_XCM_MODE_CCM
)) {
178 /* Construct IV block B0 for the CBC-MAC */
179 u8
*final_iv
= (u8
*)cdesc
->control_data
.token
;
180 u8
*cbcmaciv
= (u8
*)&atoken
[1];
181 __le32
*aadlen
= (__le32
*)&atoken
[5];
183 if (ctx
->aead
== EIP197_AEAD_TYPE_IPSEC_ESP
) {
185 cdesc
->control_data
.token
[0] = ctx
->nonce
;
186 /* Fixup flags byte */
187 *(__le32
*)cbcmaciv
=
188 cpu_to_le32(ctx
->nonce
|
189 ((assocadj
> 0) << 6) |
190 ((digestsize
- 2) << 2));
192 memcpy(&cdesc
->control_data
.token
[1], iv
, 8);
193 memcpy(cbcmaciv
+ 4, iv
, 8);
194 /* Start counter at 0 */
195 cdesc
->control_data
.token
[3] = 0;
197 *(__be32
*)(cbcmaciv
+ 12) = cpu_to_be32(cryptlen
);
199 /* Variable length IV part */
200 memcpy(final_iv
, iv
, 15 - iv
[0]);
201 memcpy(cbcmaciv
, iv
, 15 - iv
[0]);
202 /* Start variable length counter at 0 */
203 memset(final_iv
+ 15 - iv
[0], 0, iv
[0] + 1);
204 memset(cbcmaciv
+ 15 - iv
[0], 0, iv
[0] - 1);
205 /* fixup flags byte */
206 cbcmaciv
[0] |= ((assocadj
> 0) << 6) |
207 ((digestsize
- 2) << 2);
208 /* insert lower 2 bytes of message length */
209 cbcmaciv
[14] = cryptlen
>> 8;
210 cbcmaciv
[15] = cryptlen
& 255;
213 atoken
->opcode
= EIP197_TOKEN_OPCODE_INSERT
;
214 atoken
->packet_length
= AES_BLOCK_SIZE
+
215 ((assocadj
> 0) << 1);
217 atoken
->instructions
= EIP197_TOKEN_INS_ORIGIN_TOKEN
|
218 EIP197_TOKEN_INS_TYPE_HASH
;
220 if (likely(assocadj
)) {
221 *aadlen
= cpu_to_le32((assocadj
>> 8) |
222 (assocadj
& 255) << 8);
230 /* Process AAD data */
232 atoken
->opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
233 atoken
->packet_length
= assocadj
;
235 atoken
->instructions
= EIP197_TOKEN_INS_TYPE_HASH
;
238 /* For CCM only, align AAD data towards hash engine */
239 atoken
->opcode
= EIP197_TOKEN_OPCODE_INSERT
;
240 aadalign
= (assocadj
+ 2) & 15;
241 atoken
->packet_length
= assocadj
&& aadalign
?
244 if (likely(cryptlen
)) {
246 atoken
->instructions
= EIP197_TOKEN_INS_TYPE_HASH
;
248 atoken
->stat
= EIP197_TOKEN_STAT_LAST_HASH
;
249 atoken
->instructions
= EIP197_TOKEN_INS_LAST
|
250 EIP197_TOKEN_INS_TYPE_HASH
;
253 safexcel_aead_iv(ctx
, iv
, cdesc
);
255 /* Process AAD data */
257 atoken
->opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
258 atoken
->packet_length
= assocadj
;
259 atoken
->stat
= EIP197_TOKEN_STAT_LAST_HASH
;
260 atoken
->instructions
= EIP197_TOKEN_INS_LAST
|
261 EIP197_TOKEN_INS_TYPE_HASH
;
265 if (ctx
->aead
== EIP197_AEAD_TYPE_IPSEC_ESP
) {
266 /* For ESP mode (and not GMAC), skip over the IV */
267 atoken
->opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
268 atoken
->packet_length
= EIP197_AEAD_IPSEC_IV_SIZE
;
270 atoken
->instructions
= 0;
273 } else if (unlikely(ctx
->alg
== SAFEXCEL_CHACHA20
&&
274 direction
== SAFEXCEL_DECRYPT
)) {
275 /* Poly-chacha decryption needs a dummy NOP here ... */
276 atoken
->opcode
= EIP197_TOKEN_OPCODE_INSERT
;
277 atoken
->packet_length
= 16; /* According to Op Manual */
279 atoken
->instructions
= 0;
285 /* For GCM and CCM, obtain enc(Y0) */
286 atoken
->opcode
= EIP197_TOKEN_OPCODE_INSERT_REMRES
;
287 atoken
->packet_length
= 0;
289 atoken
->instructions
= AES_BLOCK_SIZE
;
292 atoken
->opcode
= EIP197_TOKEN_OPCODE_INSERT
;
293 atoken
->packet_length
= AES_BLOCK_SIZE
;
295 atoken
->instructions
= EIP197_TOKEN_INS_TYPE_OUTPUT
|
296 EIP197_TOKEN_INS_TYPE_CRYPTO
;
301 if (likely(cryptlen
|| ctx
->alg
== SAFEXCEL_CHACHA20
)) {
302 /* Fixup stat field for AAD direction instruction */
305 /* Process crypto data */
306 atoken
->opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
307 atoken
->packet_length
= cryptlen
;
309 if (unlikely(ctx
->aead
== EIP197_AEAD_TYPE_IPSEC_ESP_GMAC
)) {
310 /* Fixup instruction field for AAD dir instruction */
311 aadref
->instructions
= EIP197_TOKEN_INS_TYPE_HASH
;
313 /* Do not send to crypt engine in case of GMAC */
314 atoken
->instructions
= EIP197_TOKEN_INS_LAST
|
315 EIP197_TOKEN_INS_TYPE_HASH
|
316 EIP197_TOKEN_INS_TYPE_OUTPUT
;
318 atoken
->instructions
= EIP197_TOKEN_INS_LAST
|
319 EIP197_TOKEN_INS_TYPE_CRYPTO
|
320 EIP197_TOKEN_INS_TYPE_HASH
|
321 EIP197_TOKEN_INS_TYPE_OUTPUT
;
325 if (unlikely(ctx
->xcm
== EIP197_XCM_MODE_CCM
&& cryptlen
)) {
327 /* For CCM only, pad crypto data to the hash engine */
330 atoken
->opcode
= EIP197_TOKEN_OPCODE_INSERT
;
331 atoken
->packet_length
= 16 - cryptlen
;
332 atoken
->stat
= EIP197_TOKEN_STAT_LAST_HASH
;
333 atoken
->instructions
= EIP197_TOKEN_INS_TYPE_HASH
;
335 atoken
->stat
= EIP197_TOKEN_STAT_LAST_HASH
;
341 if (direction
== SAFEXCEL_ENCRYPT
) {
343 atoken
->opcode
= EIP197_TOKEN_OPCODE_INSERT
;
344 atoken
->packet_length
= digestsize
;
345 atoken
->stat
= EIP197_TOKEN_STAT_LAST_HASH
|
346 EIP197_TOKEN_STAT_LAST_PACKET
;
347 atoken
->instructions
= EIP197_TOKEN_INS_TYPE_OUTPUT
|
348 EIP197_TOKEN_INS_INSERT_HASH_DIGEST
;
351 atoken
->opcode
= EIP197_TOKEN_OPCODE_RETRIEVE
;
352 atoken
->packet_length
= digestsize
;
353 atoken
->stat
= EIP197_TOKEN_STAT_LAST_HASH
|
354 EIP197_TOKEN_STAT_LAST_PACKET
;
355 atoken
->instructions
= EIP197_TOKEN_INS_INSERT_HASH_DIGEST
;
360 atoken
->opcode
= EIP197_TOKEN_OPCODE_VERIFY
;
361 atoken
->packet_length
= digestsize
|
362 EIP197_TOKEN_HASH_RESULT_VERIFY
;
363 atoken
->stat
= EIP197_TOKEN_STAT_LAST_HASH
|
364 EIP197_TOKEN_STAT_LAST_PACKET
;
365 atoken
->instructions
= EIP197_TOKEN_INS_TYPE_OUTPUT
;
368 /* Fixup length of the token in the command descriptor */
369 cdesc
->additional_cdata_size
= atoksize
;
372 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher
*ctfm
,
373 const u8
*key
, unsigned int len
)
375 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(ctfm
);
376 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
377 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
378 struct crypto_aes_ctx aes
;
381 ret
= aes_expandkey(&aes
, key
, len
);
385 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
) {
386 for (i
= 0; i
< len
/ sizeof(u32
); i
++) {
387 if (le32_to_cpu(ctx
->key
[i
]) != aes
.key_enc
[i
]) {
388 ctx
->base
.needs_inv
= true;
394 for (i
= 0; i
< len
/ sizeof(u32
); i
++)
395 ctx
->key
[i
] = cpu_to_le32(aes
.key_enc
[i
]);
399 memzero_explicit(&aes
, sizeof(aes
));
403 static int safexcel_aead_setkey(struct crypto_aead
*ctfm
, const u8
*key
,
406 struct crypto_tfm
*tfm
= crypto_aead_tfm(ctfm
);
407 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
408 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
409 struct crypto_authenc_keys keys
;
410 struct crypto_aes_ctx aes
;
411 int err
= -EINVAL
, i
;
414 if (unlikely(crypto_authenc_extractkeys(&keys
, key
, len
)))
417 if (ctx
->mode
== CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
) {
418 /* Must have at least space for the nonce here */
419 if (unlikely(keys
.enckeylen
< CTR_RFC3686_NONCE_SIZE
))
421 /* last 4 bytes of key are the nonce! */
422 ctx
->nonce
= *(u32
*)(keys
.enckey
+ keys
.enckeylen
-
423 CTR_RFC3686_NONCE_SIZE
);
424 /* exclude the nonce here */
425 keys
.enckeylen
-= CTR_RFC3686_NONCE_SIZE
;
431 err
= verify_aead_des_key(ctfm
, keys
.enckey
, keys
.enckeylen
);
436 err
= verify_aead_des3_key(ctfm
, keys
.enckey
, keys
.enckeylen
);
441 err
= aes_expandkey(&aes
, keys
.enckey
, keys
.enckeylen
);
446 if (unlikely(keys
.enckeylen
!= SM4_KEY_SIZE
))
450 dev_err(priv
->dev
, "aead: unsupported cipher algorithm\n");
454 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
) {
455 for (i
= 0; i
< keys
.enckeylen
/ sizeof(u32
); i
++) {
456 if (le32_to_cpu(ctx
->key
[i
]) !=
457 ((u32
*)keys
.enckey
)[i
]) {
458 ctx
->base
.needs_inv
= true;
465 switch (ctx
->hash_alg
) {
466 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1
:
467 alg
= "safexcel-sha1";
469 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224
:
470 alg
= "safexcel-sha224";
472 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256
:
473 alg
= "safexcel-sha256";
475 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384
:
476 alg
= "safexcel-sha384";
478 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512
:
479 alg
= "safexcel-sha512";
481 case CONTEXT_CONTROL_CRYPTO_ALG_SM3
:
482 alg
= "safexcel-sm3";
485 dev_err(priv
->dev
, "aead: unsupported hash algorithm\n");
489 if (safexcel_hmac_setkey(&ctx
->base
, keys
.authkey
, keys
.authkeylen
,
493 /* Now copy the keys into the context */
494 for (i
= 0; i
< keys
.enckeylen
/ sizeof(u32
); i
++)
495 ctx
->key
[i
] = cpu_to_le32(((u32
*)keys
.enckey
)[i
]);
496 ctx
->key_len
= keys
.enckeylen
;
498 memzero_explicit(&keys
, sizeof(keys
));
502 memzero_explicit(&keys
, sizeof(keys
));
506 static int safexcel_context_control(struct safexcel_cipher_ctx
*ctx
,
507 struct crypto_async_request
*async
,
508 struct safexcel_cipher_req
*sreq
,
509 struct safexcel_command_desc
*cdesc
)
511 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
512 int ctrl_size
= ctx
->key_len
/ sizeof(u32
);
514 cdesc
->control_data
.control1
= ctx
->mode
;
517 /* Take in account the ipad+opad digests */
519 ctrl_size
+= ctx
->state_sz
/ sizeof(u32
);
520 cdesc
->control_data
.control0
=
521 CONTEXT_CONTROL_KEY_EN
|
522 CONTEXT_CONTROL_DIGEST_XCM
|
524 CONTEXT_CONTROL_SIZE(ctrl_size
);
525 } else if (ctx
->alg
== SAFEXCEL_CHACHA20
) {
526 /* Chacha20-Poly1305 */
527 cdesc
->control_data
.control0
=
528 CONTEXT_CONTROL_KEY_EN
|
529 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20
|
530 (sreq
->direction
== SAFEXCEL_ENCRYPT
?
531 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT
:
532 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN
) |
534 CONTEXT_CONTROL_SIZE(ctrl_size
);
537 ctrl_size
+= ctx
->state_sz
/ sizeof(u32
) * 2;
538 cdesc
->control_data
.control0
=
539 CONTEXT_CONTROL_KEY_EN
|
540 CONTEXT_CONTROL_DIGEST_HMAC
|
542 CONTEXT_CONTROL_SIZE(ctrl_size
);
545 if (sreq
->direction
== SAFEXCEL_ENCRYPT
&&
546 (ctx
->xcm
== EIP197_XCM_MODE_CCM
||
547 ctx
->aead
== EIP197_AEAD_TYPE_IPSEC_ESP_GMAC
))
548 cdesc
->control_data
.control0
|=
549 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT
;
550 else if (sreq
->direction
== SAFEXCEL_ENCRYPT
)
551 cdesc
->control_data
.control0
|=
552 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT
;
553 else if (ctx
->xcm
== EIP197_XCM_MODE_CCM
)
554 cdesc
->control_data
.control0
|=
555 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN
;
557 cdesc
->control_data
.control0
|=
558 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN
;
560 if (sreq
->direction
== SAFEXCEL_ENCRYPT
)
561 cdesc
->control_data
.control0
=
562 CONTEXT_CONTROL_TYPE_CRYPTO_OUT
|
563 CONTEXT_CONTROL_KEY_EN
|
564 CONTEXT_CONTROL_SIZE(ctrl_size
);
566 cdesc
->control_data
.control0
=
567 CONTEXT_CONTROL_TYPE_CRYPTO_IN
|
568 CONTEXT_CONTROL_KEY_EN
|
569 CONTEXT_CONTROL_SIZE(ctrl_size
);
572 if (ctx
->alg
== SAFEXCEL_DES
) {
573 cdesc
->control_data
.control0
|=
574 CONTEXT_CONTROL_CRYPTO_ALG_DES
;
575 } else if (ctx
->alg
== SAFEXCEL_3DES
) {
576 cdesc
->control_data
.control0
|=
577 CONTEXT_CONTROL_CRYPTO_ALG_3DES
;
578 } else if (ctx
->alg
== SAFEXCEL_AES
) {
579 switch (ctx
->key_len
>> ctx
->xts
) {
580 case AES_KEYSIZE_128
:
581 cdesc
->control_data
.control0
|=
582 CONTEXT_CONTROL_CRYPTO_ALG_AES128
;
584 case AES_KEYSIZE_192
:
585 cdesc
->control_data
.control0
|=
586 CONTEXT_CONTROL_CRYPTO_ALG_AES192
;
588 case AES_KEYSIZE_256
:
589 cdesc
->control_data
.control0
|=
590 CONTEXT_CONTROL_CRYPTO_ALG_AES256
;
593 dev_err(priv
->dev
, "aes keysize not supported: %u\n",
594 ctx
->key_len
>> ctx
->xts
);
597 } else if (ctx
->alg
== SAFEXCEL_CHACHA20
) {
598 cdesc
->control_data
.control0
|=
599 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20
;
600 } else if (ctx
->alg
== SAFEXCEL_SM4
) {
601 cdesc
->control_data
.control0
|=
602 CONTEXT_CONTROL_CRYPTO_ALG_SM4
;
608 static int safexcel_handle_req_result(struct safexcel_crypto_priv
*priv
, int ring
,
609 struct crypto_async_request
*async
,
610 struct scatterlist
*src
,
611 struct scatterlist
*dst
,
612 unsigned int cryptlen
,
613 struct safexcel_cipher_req
*sreq
,
614 bool *should_complete
, int *ret
)
616 struct skcipher_request
*areq
= skcipher_request_cast(async
);
617 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(areq
);
618 struct safexcel_cipher_ctx
*ctx
= crypto_skcipher_ctx(skcipher
);
619 struct safexcel_result_desc
*rdesc
;
624 if (unlikely(!sreq
->rdescs
))
627 while (sreq
->rdescs
--) {
628 rdesc
= safexcel_ring_next_rptr(priv
, &priv
->ring
[ring
].rdr
);
631 "cipher: result: could not retrieve the result descriptor\n");
632 *ret
= PTR_ERR(rdesc
);
637 *ret
= safexcel_rdesc_check_errors(priv
, rdesc
);
642 safexcel_complete(priv
, ring
);
645 dma_unmap_sg(priv
->dev
, src
, sreq
->nr_src
, DMA_BIDIRECTIONAL
);
647 dma_unmap_sg(priv
->dev
, src
, sreq
->nr_src
, DMA_TO_DEVICE
);
648 dma_unmap_sg(priv
->dev
, dst
, sreq
->nr_dst
, DMA_FROM_DEVICE
);
652 * Update IV in req from last crypto output word for CBC modes
654 if ((!ctx
->aead
) && (ctx
->mode
== CONTEXT_CONTROL_CRYPTO_MODE_CBC
) &&
655 (sreq
->direction
== SAFEXCEL_ENCRYPT
)) {
656 /* For encrypt take the last output word */
657 sg_pcopy_to_buffer(dst
, sreq
->nr_dst
, areq
->iv
,
658 crypto_skcipher_ivsize(skcipher
),
660 crypto_skcipher_ivsize(skcipher
)));
663 *should_complete
= true;
668 static int safexcel_send_req(struct crypto_async_request
*base
, int ring
,
669 struct safexcel_cipher_req
*sreq
,
670 struct scatterlist
*src
, struct scatterlist
*dst
,
671 unsigned int cryptlen
, unsigned int assoclen
,
672 unsigned int digestsize
, u8
*iv
, int *commands
,
675 struct skcipher_request
*areq
= skcipher_request_cast(base
);
676 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(areq
);
677 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(base
->tfm
);
678 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
679 struct safexcel_command_desc
*cdesc
;
680 struct safexcel_command_desc
*first_cdesc
= NULL
;
681 struct safexcel_result_desc
*rdesc
, *first_rdesc
= NULL
;
682 struct scatterlist
*sg
;
684 unsigned int totlen_src
= cryptlen
+ assoclen
;
685 unsigned int totlen_dst
= totlen_src
;
686 struct safexcel_token
*atoken
;
687 int n_cdesc
= 0, n_rdesc
= 0;
688 int queued
, i
, ret
= 0;
691 sreq
->nr_src
= sg_nents_for_len(src
, totlen_src
);
695 * AEAD has auth tag appended to output for encrypt and
696 * removed from the output for decrypt!
698 if (sreq
->direction
== SAFEXCEL_DECRYPT
)
699 totlen_dst
-= digestsize
;
701 totlen_dst
+= digestsize
;
703 memcpy(ctx
->base
.ctxr
->data
+ ctx
->key_len
/ sizeof(u32
),
704 &ctx
->base
.ipad
, ctx
->state_sz
);
706 memcpy(ctx
->base
.ctxr
->data
+ (ctx
->key_len
+
707 ctx
->state_sz
) / sizeof(u32
), &ctx
->base
.opad
,
709 } else if ((ctx
->mode
== CONTEXT_CONTROL_CRYPTO_MODE_CBC
) &&
710 (sreq
->direction
== SAFEXCEL_DECRYPT
)) {
712 * Save IV from last crypto input word for CBC modes in decrypt
713 * direction. Need to do this first in case of inplace operation
714 * as it will be overwritten.
716 sg_pcopy_to_buffer(src
, sreq
->nr_src
, areq
->iv
,
717 crypto_skcipher_ivsize(skcipher
),
719 crypto_skcipher_ivsize(skcipher
)));
722 sreq
->nr_dst
= sg_nents_for_len(dst
, totlen_dst
);
725 * Remember actual input length, source buffer length may be
726 * updated in case of inline operation below.
732 sreq
->nr_src
= max(sreq
->nr_src
, sreq
->nr_dst
);
733 sreq
->nr_dst
= sreq
->nr_src
;
734 if (unlikely((totlen_src
|| totlen_dst
) &&
735 (sreq
->nr_src
<= 0))) {
736 dev_err(priv
->dev
, "In-place buffer not large enough (need %d bytes)!",
737 max(totlen_src
, totlen_dst
));
740 dma_map_sg(priv
->dev
, src
, sreq
->nr_src
, DMA_BIDIRECTIONAL
);
742 if (unlikely(totlen_src
&& (sreq
->nr_src
<= 0))) {
743 dev_err(priv
->dev
, "Source buffer not large enough (need %d bytes)!",
747 dma_map_sg(priv
->dev
, src
, sreq
->nr_src
, DMA_TO_DEVICE
);
749 if (unlikely(totlen_dst
&& (sreq
->nr_dst
<= 0))) {
750 dev_err(priv
->dev
, "Dest buffer not large enough (need %d bytes)!",
752 dma_unmap_sg(priv
->dev
, src
, sreq
->nr_src
,
756 dma_map_sg(priv
->dev
, dst
, sreq
->nr_dst
, DMA_FROM_DEVICE
);
759 memcpy(ctx
->base
.ctxr
->data
, ctx
->key
, ctx
->key_len
);
763 * The EIP97 cannot deal with zero length input packets!
764 * So stuff a dummy command descriptor indicating a 1 byte
765 * (dummy) input packet, using the context record as source.
767 first_cdesc
= safexcel_add_cdesc(priv
, ring
,
768 1, 1, ctx
->base
.ctxr_dma
,
769 1, 1, ctx
->base
.ctxr_dma
,
771 if (IS_ERR(first_cdesc
)) {
772 /* No space left in the command descriptor ring */
773 ret
= PTR_ERR(first_cdesc
);
780 /* command descriptors */
781 for_each_sg(src
, sg
, sreq
->nr_src
, i
) {
782 int len
= sg_dma_len(sg
);
784 /* Do not overflow the request */
788 cdesc
= safexcel_add_cdesc(priv
, ring
, !n_cdesc
,
790 sg_dma_address(sg
), len
, totlen
,
791 ctx
->base
.ctxr_dma
, &atoken
);
793 /* No space left in the command descriptor ring */
794 ret
= PTR_ERR(cdesc
);
807 /* Add context control words and token to first command descriptor */
808 safexcel_context_control(ctx
, base
, sreq
, first_cdesc
);
810 safexcel_aead_token(ctx
, iv
, first_cdesc
, atoken
,
811 sreq
->direction
, cryptlen
,
812 assoclen
, digestsize
);
814 safexcel_skcipher_token(ctx
, iv
, first_cdesc
, atoken
,
817 /* result descriptors */
818 for_each_sg(dst
, sg
, sreq
->nr_dst
, i
) {
819 bool last
= (i
== sreq
->nr_dst
- 1);
820 u32 len
= sg_dma_len(sg
);
822 /* only allow the part of the buffer we know we need */
823 if (len
> totlen_dst
)
829 /* skip over AAD space in buffer - not written */
831 if (assoclen
>= len
) {
835 rdesc
= safexcel_add_rdesc(priv
, ring
, first
, last
,
841 rdesc
= safexcel_add_rdesc(priv
, ring
, first
, last
,
846 /* No space left in the result descriptor ring */
847 ret
= PTR_ERR(rdesc
);
857 if (unlikely(first
)) {
859 * Special case: AEAD decrypt with only AAD data.
860 * In this case there is NO output data from the engine,
861 * but the engine still needs a result descriptor!
862 * Create a dummy one just for catching the result token.
864 rdesc
= safexcel_add_rdesc(priv
, ring
, true, true, 0, 0);
866 /* No space left in the result descriptor ring */
867 ret
= PTR_ERR(rdesc
);
874 safexcel_rdr_req_set(priv
, ring
, first_rdesc
, base
);
881 for (i
= 0; i
< n_rdesc
; i
++)
882 safexcel_ring_rollback_wptr(priv
, &priv
->ring
[ring
].rdr
);
884 for (i
= 0; i
< n_cdesc
; i
++)
885 safexcel_ring_rollback_wptr(priv
, &priv
->ring
[ring
].cdr
);
888 dma_unmap_sg(priv
->dev
, src
, sreq
->nr_src
, DMA_BIDIRECTIONAL
);
890 dma_unmap_sg(priv
->dev
, src
, sreq
->nr_src
, DMA_TO_DEVICE
);
891 dma_unmap_sg(priv
->dev
, dst
, sreq
->nr_dst
, DMA_FROM_DEVICE
);
897 static int safexcel_handle_inv_result(struct safexcel_crypto_priv
*priv
,
899 struct crypto_async_request
*base
,
900 struct safexcel_cipher_req
*sreq
,
901 bool *should_complete
, int *ret
)
903 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(base
->tfm
);
904 struct safexcel_result_desc
*rdesc
;
905 int ndesc
= 0, enq_ret
;
909 if (unlikely(!sreq
->rdescs
))
912 while (sreq
->rdescs
--) {
913 rdesc
= safexcel_ring_next_rptr(priv
, &priv
->ring
[ring
].rdr
);
916 "cipher: invalidate: could not retrieve the result descriptor\n");
917 *ret
= PTR_ERR(rdesc
);
922 *ret
= safexcel_rdesc_check_errors(priv
, rdesc
);
927 safexcel_complete(priv
, ring
);
929 if (ctx
->base
.exit_inv
) {
930 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
933 *should_complete
= true;
938 ring
= safexcel_select_ring(priv
);
939 ctx
->base
.ring
= ring
;
941 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
942 enq_ret
= crypto_enqueue_request(&priv
->ring
[ring
].queue
, base
);
943 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
945 if (enq_ret
!= -EINPROGRESS
)
948 queue_work(priv
->ring
[ring
].workqueue
,
949 &priv
->ring
[ring
].work_data
.work
);
951 *should_complete
= false;
956 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv
*priv
,
958 struct crypto_async_request
*async
,
959 bool *should_complete
, int *ret
)
961 struct skcipher_request
*req
= skcipher_request_cast(async
);
962 struct safexcel_cipher_req
*sreq
= skcipher_request_ctx(req
);
965 if (sreq
->needs_inv
) {
966 sreq
->needs_inv
= false;
967 err
= safexcel_handle_inv_result(priv
, ring
, async
, sreq
,
968 should_complete
, ret
);
970 err
= safexcel_handle_req_result(priv
, ring
, async
, req
->src
,
971 req
->dst
, req
->cryptlen
, sreq
,
972 should_complete
, ret
);
978 static int safexcel_aead_handle_result(struct safexcel_crypto_priv
*priv
,
980 struct crypto_async_request
*async
,
981 bool *should_complete
, int *ret
)
983 struct aead_request
*req
= aead_request_cast(async
);
984 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
985 struct safexcel_cipher_req
*sreq
= aead_request_ctx(req
);
988 if (sreq
->needs_inv
) {
989 sreq
->needs_inv
= false;
990 err
= safexcel_handle_inv_result(priv
, ring
, async
, sreq
,
991 should_complete
, ret
);
993 err
= safexcel_handle_req_result(priv
, ring
, async
, req
->src
,
995 req
->cryptlen
+ crypto_aead_authsize(tfm
),
996 sreq
, should_complete
, ret
);
1002 static int safexcel_cipher_send_inv(struct crypto_async_request
*base
,
1003 int ring
, int *commands
, int *results
)
1005 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(base
->tfm
);
1006 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
1009 ret
= safexcel_invalidate_cache(base
, priv
, ctx
->base
.ctxr_dma
, ring
);
1019 static int safexcel_skcipher_send(struct crypto_async_request
*async
, int ring
,
1020 int *commands
, int *results
)
1022 struct skcipher_request
*req
= skcipher_request_cast(async
);
1023 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1024 struct safexcel_cipher_req
*sreq
= skcipher_request_ctx(req
);
1025 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
1028 BUG_ON(!(priv
->flags
& EIP197_TRC_CACHE
) && sreq
->needs_inv
);
1030 if (sreq
->needs_inv
) {
1031 ret
= safexcel_cipher_send_inv(async
, ring
, commands
, results
);
1033 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
1034 u8 input_iv
[AES_BLOCK_SIZE
];
1037 * Save input IV in case of CBC decrypt mode
1038 * Will be overwritten with output IV prior to use!
1040 memcpy(input_iv
, req
->iv
, crypto_skcipher_ivsize(skcipher
));
1042 ret
= safexcel_send_req(async
, ring
, sreq
, req
->src
,
1043 req
->dst
, req
->cryptlen
, 0, 0, input_iv
,
1047 sreq
->rdescs
= *results
;
1051 static int safexcel_aead_send(struct crypto_async_request
*async
, int ring
,
1052 int *commands
, int *results
)
1054 struct aead_request
*req
= aead_request_cast(async
);
1055 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1056 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1057 struct safexcel_cipher_req
*sreq
= aead_request_ctx(req
);
1058 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
1061 BUG_ON(!(priv
->flags
& EIP197_TRC_CACHE
) && sreq
->needs_inv
);
1063 if (sreq
->needs_inv
)
1064 ret
= safexcel_cipher_send_inv(async
, ring
, commands
, results
);
1066 ret
= safexcel_send_req(async
, ring
, sreq
, req
->src
, req
->dst
,
1067 req
->cryptlen
, req
->assoclen
,
1068 crypto_aead_authsize(tfm
), req
->iv
,
1070 sreq
->rdescs
= *results
;
1074 static int safexcel_cipher_exit_inv(struct crypto_tfm
*tfm
,
1075 struct crypto_async_request
*base
,
1076 struct safexcel_cipher_req
*sreq
,
1077 struct safexcel_inv_result
*result
)
1079 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1080 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
1081 int ring
= ctx
->base
.ring
;
1083 init_completion(&result
->completion
);
1085 ctx
= crypto_tfm_ctx(base
->tfm
);
1086 ctx
->base
.exit_inv
= true;
1087 sreq
->needs_inv
= true;
1089 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
1090 crypto_enqueue_request(&priv
->ring
[ring
].queue
, base
);
1091 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
1093 queue_work(priv
->ring
[ring
].workqueue
,
1094 &priv
->ring
[ring
].work_data
.work
);
1096 wait_for_completion(&result
->completion
);
1098 if (result
->error
) {
1100 "cipher: sync: invalidate: completion error %d\n",
1102 return result
->error
;
1108 static int safexcel_skcipher_exit_inv(struct crypto_tfm
*tfm
)
1110 EIP197_REQUEST_ON_STACK(req
, skcipher
, EIP197_SKCIPHER_REQ_SIZE
);
1111 struct safexcel_cipher_req
*sreq
= skcipher_request_ctx(req
);
1112 struct safexcel_inv_result result
= {};
1114 memset(req
, 0, sizeof(struct skcipher_request
));
1116 skcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
1117 safexcel_inv_complete
, &result
);
1118 skcipher_request_set_tfm(req
, __crypto_skcipher_cast(tfm
));
1120 return safexcel_cipher_exit_inv(tfm
, &req
->base
, sreq
, &result
);
1123 static int safexcel_aead_exit_inv(struct crypto_tfm
*tfm
)
1125 EIP197_REQUEST_ON_STACK(req
, aead
, EIP197_AEAD_REQ_SIZE
);
1126 struct safexcel_cipher_req
*sreq
= aead_request_ctx(req
);
1127 struct safexcel_inv_result result
= {};
1129 memset(req
, 0, sizeof(struct aead_request
));
1131 aead_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
1132 safexcel_inv_complete
, &result
);
1133 aead_request_set_tfm(req
, __crypto_aead_cast(tfm
));
1135 return safexcel_cipher_exit_inv(tfm
, &req
->base
, sreq
, &result
);
1138 static int safexcel_queue_req(struct crypto_async_request
*base
,
1139 struct safexcel_cipher_req
*sreq
,
1140 enum safexcel_cipher_direction dir
)
1142 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(base
->tfm
);
1143 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
1146 sreq
->needs_inv
= false;
1147 sreq
->direction
= dir
;
1149 if (ctx
->base
.ctxr
) {
1150 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.needs_inv
) {
1151 sreq
->needs_inv
= true;
1152 ctx
->base
.needs_inv
= false;
1155 ctx
->base
.ring
= safexcel_select_ring(priv
);
1156 ctx
->base
.ctxr
= dma_pool_zalloc(priv
->context_pool
,
1157 EIP197_GFP_FLAGS(*base
),
1158 &ctx
->base
.ctxr_dma
);
1159 if (!ctx
->base
.ctxr
)
1163 ring
= ctx
->base
.ring
;
1165 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
1166 ret
= crypto_enqueue_request(&priv
->ring
[ring
].queue
, base
);
1167 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
1169 queue_work(priv
->ring
[ring
].workqueue
,
1170 &priv
->ring
[ring
].work_data
.work
);
1175 static int safexcel_encrypt(struct skcipher_request
*req
)
1177 return safexcel_queue_req(&req
->base
, skcipher_request_ctx(req
),
1181 static int safexcel_decrypt(struct skcipher_request
*req
)
1183 return safexcel_queue_req(&req
->base
, skcipher_request_ctx(req
),
1187 static int safexcel_skcipher_cra_init(struct crypto_tfm
*tfm
)
1189 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1190 struct safexcel_alg_template
*tmpl
=
1191 container_of(tfm
->__crt_alg
, struct safexcel_alg_template
,
1194 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm
),
1195 sizeof(struct safexcel_cipher_req
));
1197 ctx
->base
.priv
= tmpl
->priv
;
1199 ctx
->base
.send
= safexcel_skcipher_send
;
1200 ctx
->base
.handle_result
= safexcel_skcipher_handle_result
;
1201 ctx
->ivmask
= EIP197_OPTION_4_TOKEN_IV_CMD
;
1206 static int safexcel_cipher_cra_exit(struct crypto_tfm
*tfm
)
1208 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1210 memzero_explicit(ctx
->key
, sizeof(ctx
->key
));
1212 /* context not allocated, skip invalidation */
1213 if (!ctx
->base
.ctxr
)
1216 memzero_explicit(ctx
->base
.ctxr
->data
, sizeof(ctx
->base
.ctxr
->data
));
1220 static void safexcel_skcipher_cra_exit(struct crypto_tfm
*tfm
)
1222 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1223 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
1226 if (safexcel_cipher_cra_exit(tfm
))
1229 if (priv
->flags
& EIP197_TRC_CACHE
) {
1230 ret
= safexcel_skcipher_exit_inv(tfm
);
1232 dev_warn(priv
->dev
, "skcipher: invalidation error %d\n",
1235 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
1236 ctx
->base
.ctxr_dma
);
1240 static void safexcel_aead_cra_exit(struct crypto_tfm
*tfm
)
1242 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1243 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
1246 if (safexcel_cipher_cra_exit(tfm
))
1249 if (priv
->flags
& EIP197_TRC_CACHE
) {
1250 ret
= safexcel_aead_exit_inv(tfm
);
1252 dev_warn(priv
->dev
, "aead: invalidation error %d\n",
1255 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
1256 ctx
->base
.ctxr_dma
);
1260 static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm
*tfm
)
1262 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1264 safexcel_skcipher_cra_init(tfm
);
1265 ctx
->alg
= SAFEXCEL_AES
;
1266 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_ECB
;
1268 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
1272 struct safexcel_alg_template safexcel_alg_ecb_aes
= {
1273 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1274 .algo_mask
= SAFEXCEL_ALG_AES
,
1276 .setkey
= safexcel_skcipher_aes_setkey
,
1277 .encrypt
= safexcel_encrypt
,
1278 .decrypt
= safexcel_decrypt
,
1279 .min_keysize
= AES_MIN_KEY_SIZE
,
1280 .max_keysize
= AES_MAX_KEY_SIZE
,
1282 .cra_name
= "ecb(aes)",
1283 .cra_driver_name
= "safexcel-ecb-aes",
1284 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1285 .cra_flags
= CRYPTO_ALG_ASYNC
|
1286 CRYPTO_ALG_ALLOCATES_MEMORY
|
1287 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1288 .cra_blocksize
= AES_BLOCK_SIZE
,
1289 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1291 .cra_init
= safexcel_skcipher_aes_ecb_cra_init
,
1292 .cra_exit
= safexcel_skcipher_cra_exit
,
1293 .cra_module
= THIS_MODULE
,
1298 static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm
*tfm
)
1300 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1302 safexcel_skcipher_cra_init(tfm
);
1303 ctx
->alg
= SAFEXCEL_AES
;
1304 ctx
->blocksz
= AES_BLOCK_SIZE
;
1305 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CBC
;
1309 struct safexcel_alg_template safexcel_alg_cbc_aes
= {
1310 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1311 .algo_mask
= SAFEXCEL_ALG_AES
,
1313 .setkey
= safexcel_skcipher_aes_setkey
,
1314 .encrypt
= safexcel_encrypt
,
1315 .decrypt
= safexcel_decrypt
,
1316 .min_keysize
= AES_MIN_KEY_SIZE
,
1317 .max_keysize
= AES_MAX_KEY_SIZE
,
1318 .ivsize
= AES_BLOCK_SIZE
,
1320 .cra_name
= "cbc(aes)",
1321 .cra_driver_name
= "safexcel-cbc-aes",
1322 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1323 .cra_flags
= CRYPTO_ALG_ASYNC
|
1324 CRYPTO_ALG_ALLOCATES_MEMORY
|
1325 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1326 .cra_blocksize
= AES_BLOCK_SIZE
,
1327 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1329 .cra_init
= safexcel_skcipher_aes_cbc_cra_init
,
1330 .cra_exit
= safexcel_skcipher_cra_exit
,
1331 .cra_module
= THIS_MODULE
,
1336 static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm
*tfm
)
1338 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1340 safexcel_skcipher_cra_init(tfm
);
1341 ctx
->alg
= SAFEXCEL_AES
;
1342 ctx
->blocksz
= AES_BLOCK_SIZE
;
1343 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CFB
;
1347 struct safexcel_alg_template safexcel_alg_cfb_aes
= {
1348 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1349 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_AES_XFB
,
1351 .setkey
= safexcel_skcipher_aes_setkey
,
1352 .encrypt
= safexcel_encrypt
,
1353 .decrypt
= safexcel_decrypt
,
1354 .min_keysize
= AES_MIN_KEY_SIZE
,
1355 .max_keysize
= AES_MAX_KEY_SIZE
,
1356 .ivsize
= AES_BLOCK_SIZE
,
1358 .cra_name
= "cfb(aes)",
1359 .cra_driver_name
= "safexcel-cfb-aes",
1360 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1361 .cra_flags
= CRYPTO_ALG_ASYNC
|
1362 CRYPTO_ALG_ALLOCATES_MEMORY
|
1363 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1365 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1367 .cra_init
= safexcel_skcipher_aes_cfb_cra_init
,
1368 .cra_exit
= safexcel_skcipher_cra_exit
,
1369 .cra_module
= THIS_MODULE
,
1374 static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm
*tfm
)
1376 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1378 safexcel_skcipher_cra_init(tfm
);
1379 ctx
->alg
= SAFEXCEL_AES
;
1380 ctx
->blocksz
= AES_BLOCK_SIZE
;
1381 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_OFB
;
1385 struct safexcel_alg_template safexcel_alg_ofb_aes
= {
1386 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1387 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_AES_XFB
,
1389 .setkey
= safexcel_skcipher_aes_setkey
,
1390 .encrypt
= safexcel_encrypt
,
1391 .decrypt
= safexcel_decrypt
,
1392 .min_keysize
= AES_MIN_KEY_SIZE
,
1393 .max_keysize
= AES_MAX_KEY_SIZE
,
1394 .ivsize
= AES_BLOCK_SIZE
,
1396 .cra_name
= "ofb(aes)",
1397 .cra_driver_name
= "safexcel-ofb-aes",
1398 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1399 .cra_flags
= CRYPTO_ALG_ASYNC
|
1400 CRYPTO_ALG_ALLOCATES_MEMORY
|
1401 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1403 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1405 .cra_init
= safexcel_skcipher_aes_ofb_cra_init
,
1406 .cra_exit
= safexcel_skcipher_cra_exit
,
1407 .cra_module
= THIS_MODULE
,
1412 static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher
*ctfm
,
1413 const u8
*key
, unsigned int len
)
1415 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(ctfm
);
1416 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1417 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
1418 struct crypto_aes_ctx aes
;
1420 unsigned int keylen
;
1422 /* last 4 bytes of key are the nonce! */
1423 ctx
->nonce
= *(u32
*)(key
+ len
- CTR_RFC3686_NONCE_SIZE
);
1424 /* exclude the nonce here */
1425 keylen
= len
- CTR_RFC3686_NONCE_SIZE
;
1426 ret
= aes_expandkey(&aes
, key
, keylen
);
1430 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
) {
1431 for (i
= 0; i
< keylen
/ sizeof(u32
); i
++) {
1432 if (le32_to_cpu(ctx
->key
[i
]) != aes
.key_enc
[i
]) {
1433 ctx
->base
.needs_inv
= true;
1439 for (i
= 0; i
< keylen
/ sizeof(u32
); i
++)
1440 ctx
->key
[i
] = cpu_to_le32(aes
.key_enc
[i
]);
1442 ctx
->key_len
= keylen
;
1444 memzero_explicit(&aes
, sizeof(aes
));
1448 static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm
*tfm
)
1450 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1452 safexcel_skcipher_cra_init(tfm
);
1453 ctx
->alg
= SAFEXCEL_AES
;
1454 ctx
->blocksz
= AES_BLOCK_SIZE
;
1455 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
;
1459 struct safexcel_alg_template safexcel_alg_ctr_aes
= {
1460 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1461 .algo_mask
= SAFEXCEL_ALG_AES
,
1463 .setkey
= safexcel_skcipher_aesctr_setkey
,
1464 .encrypt
= safexcel_encrypt
,
1465 .decrypt
= safexcel_decrypt
,
1466 /* Add nonce size */
1467 .min_keysize
= AES_MIN_KEY_SIZE
+ CTR_RFC3686_NONCE_SIZE
,
1468 .max_keysize
= AES_MAX_KEY_SIZE
+ CTR_RFC3686_NONCE_SIZE
,
1469 .ivsize
= CTR_RFC3686_IV_SIZE
,
1471 .cra_name
= "rfc3686(ctr(aes))",
1472 .cra_driver_name
= "safexcel-ctr-aes",
1473 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1474 .cra_flags
= CRYPTO_ALG_ASYNC
|
1475 CRYPTO_ALG_ALLOCATES_MEMORY
|
1476 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1478 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1480 .cra_init
= safexcel_skcipher_aes_ctr_cra_init
,
1481 .cra_exit
= safexcel_skcipher_cra_exit
,
1482 .cra_module
= THIS_MODULE
,
1487 static int safexcel_des_setkey(struct crypto_skcipher
*ctfm
, const u8
*key
,
1490 struct safexcel_cipher_ctx
*ctx
= crypto_skcipher_ctx(ctfm
);
1491 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
1494 ret
= verify_skcipher_des_key(ctfm
, key
);
1498 /* if context exits and key changed, need to invalidate it */
1499 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
)
1500 if (memcmp(ctx
->key
, key
, len
))
1501 ctx
->base
.needs_inv
= true;
1503 memcpy(ctx
->key
, key
, len
);
1509 static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm
*tfm
)
1511 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1513 safexcel_skcipher_cra_init(tfm
);
1514 ctx
->alg
= SAFEXCEL_DES
;
1515 ctx
->blocksz
= DES_BLOCK_SIZE
;
1516 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
1517 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CBC
;
1521 struct safexcel_alg_template safexcel_alg_cbc_des
= {
1522 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1523 .algo_mask
= SAFEXCEL_ALG_DES
,
1525 .setkey
= safexcel_des_setkey
,
1526 .encrypt
= safexcel_encrypt
,
1527 .decrypt
= safexcel_decrypt
,
1528 .min_keysize
= DES_KEY_SIZE
,
1529 .max_keysize
= DES_KEY_SIZE
,
1530 .ivsize
= DES_BLOCK_SIZE
,
1532 .cra_name
= "cbc(des)",
1533 .cra_driver_name
= "safexcel-cbc-des",
1534 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1535 .cra_flags
= CRYPTO_ALG_ASYNC
|
1536 CRYPTO_ALG_ALLOCATES_MEMORY
|
1537 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1538 .cra_blocksize
= DES_BLOCK_SIZE
,
1539 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1541 .cra_init
= safexcel_skcipher_des_cbc_cra_init
,
1542 .cra_exit
= safexcel_skcipher_cra_exit
,
1543 .cra_module
= THIS_MODULE
,
1548 static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm
*tfm
)
1550 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1552 safexcel_skcipher_cra_init(tfm
);
1553 ctx
->alg
= SAFEXCEL_DES
;
1554 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_ECB
;
1556 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
1560 struct safexcel_alg_template safexcel_alg_ecb_des
= {
1561 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1562 .algo_mask
= SAFEXCEL_ALG_DES
,
1564 .setkey
= safexcel_des_setkey
,
1565 .encrypt
= safexcel_encrypt
,
1566 .decrypt
= safexcel_decrypt
,
1567 .min_keysize
= DES_KEY_SIZE
,
1568 .max_keysize
= DES_KEY_SIZE
,
1570 .cra_name
= "ecb(des)",
1571 .cra_driver_name
= "safexcel-ecb-des",
1572 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1573 .cra_flags
= CRYPTO_ALG_ASYNC
|
1574 CRYPTO_ALG_ALLOCATES_MEMORY
|
1575 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1576 .cra_blocksize
= DES_BLOCK_SIZE
,
1577 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1579 .cra_init
= safexcel_skcipher_des_ecb_cra_init
,
1580 .cra_exit
= safexcel_skcipher_cra_exit
,
1581 .cra_module
= THIS_MODULE
,
1586 static int safexcel_des3_ede_setkey(struct crypto_skcipher
*ctfm
,
1587 const u8
*key
, unsigned int len
)
1589 struct safexcel_cipher_ctx
*ctx
= crypto_skcipher_ctx(ctfm
);
1590 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
1593 err
= verify_skcipher_des3_key(ctfm
, key
);
1597 /* if context exits and key changed, need to invalidate it */
1598 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
)
1599 if (memcmp(ctx
->key
, key
, len
))
1600 ctx
->base
.needs_inv
= true;
1602 memcpy(ctx
->key
, key
, len
);
1608 static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm
*tfm
)
1610 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1612 safexcel_skcipher_cra_init(tfm
);
1613 ctx
->alg
= SAFEXCEL_3DES
;
1614 ctx
->blocksz
= DES3_EDE_BLOCK_SIZE
;
1615 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
1616 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CBC
;
1620 struct safexcel_alg_template safexcel_alg_cbc_des3_ede
= {
1621 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1622 .algo_mask
= SAFEXCEL_ALG_DES
,
1624 .setkey
= safexcel_des3_ede_setkey
,
1625 .encrypt
= safexcel_encrypt
,
1626 .decrypt
= safexcel_decrypt
,
1627 .min_keysize
= DES3_EDE_KEY_SIZE
,
1628 .max_keysize
= DES3_EDE_KEY_SIZE
,
1629 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1631 .cra_name
= "cbc(des3_ede)",
1632 .cra_driver_name
= "safexcel-cbc-des3_ede",
1633 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1634 .cra_flags
= CRYPTO_ALG_ASYNC
|
1635 CRYPTO_ALG_ALLOCATES_MEMORY
|
1636 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1637 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1638 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1640 .cra_init
= safexcel_skcipher_des3_cbc_cra_init
,
1641 .cra_exit
= safexcel_skcipher_cra_exit
,
1642 .cra_module
= THIS_MODULE
,
1647 static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm
*tfm
)
1649 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1651 safexcel_skcipher_cra_init(tfm
);
1652 ctx
->alg
= SAFEXCEL_3DES
;
1653 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_ECB
;
1655 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
1659 struct safexcel_alg_template safexcel_alg_ecb_des3_ede
= {
1660 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1661 .algo_mask
= SAFEXCEL_ALG_DES
,
1663 .setkey
= safexcel_des3_ede_setkey
,
1664 .encrypt
= safexcel_encrypt
,
1665 .decrypt
= safexcel_decrypt
,
1666 .min_keysize
= DES3_EDE_KEY_SIZE
,
1667 .max_keysize
= DES3_EDE_KEY_SIZE
,
1669 .cra_name
= "ecb(des3_ede)",
1670 .cra_driver_name
= "safexcel-ecb-des3_ede",
1671 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1672 .cra_flags
= CRYPTO_ALG_ASYNC
|
1673 CRYPTO_ALG_ALLOCATES_MEMORY
|
1674 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1675 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1676 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1678 .cra_init
= safexcel_skcipher_des3_ecb_cra_init
,
1679 .cra_exit
= safexcel_skcipher_cra_exit
,
1680 .cra_module
= THIS_MODULE
,
1685 static int safexcel_aead_encrypt(struct aead_request
*req
)
1687 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
1689 return safexcel_queue_req(&req
->base
, creq
, SAFEXCEL_ENCRYPT
);
1692 static int safexcel_aead_decrypt(struct aead_request
*req
)
1694 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
1696 return safexcel_queue_req(&req
->base
, creq
, SAFEXCEL_DECRYPT
);
1699 static int safexcel_aead_cra_init(struct crypto_tfm
*tfm
)
1701 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1702 struct safexcel_alg_template
*tmpl
=
1703 container_of(tfm
->__crt_alg
, struct safexcel_alg_template
,
1706 crypto_aead_set_reqsize(__crypto_aead_cast(tfm
),
1707 sizeof(struct safexcel_cipher_req
));
1709 ctx
->base
.priv
= tmpl
->priv
;
1711 ctx
->alg
= SAFEXCEL_AES
; /* default */
1712 ctx
->blocksz
= AES_BLOCK_SIZE
;
1713 ctx
->ivmask
= EIP197_OPTION_4_TOKEN_IV_CMD
;
1715 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CBC
; /* default */
1717 ctx
->base
.send
= safexcel_aead_send
;
1718 ctx
->base
.handle_result
= safexcel_aead_handle_result
;
1722 static int safexcel_aead_sha1_cra_init(struct crypto_tfm
*tfm
)
1724 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1726 safexcel_aead_cra_init(tfm
);
1727 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA1
;
1728 ctx
->state_sz
= SHA1_DIGEST_SIZE
;
1732 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes
= {
1733 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1734 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA1
,
1736 .setkey
= safexcel_aead_setkey
,
1737 .encrypt
= safexcel_aead_encrypt
,
1738 .decrypt
= safexcel_aead_decrypt
,
1739 .ivsize
= AES_BLOCK_SIZE
,
1740 .maxauthsize
= SHA1_DIGEST_SIZE
,
1742 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
1743 .cra_driver_name
= "safexcel-authenc-hmac-sha1-cbc-aes",
1744 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1745 .cra_flags
= CRYPTO_ALG_ASYNC
|
1746 CRYPTO_ALG_ALLOCATES_MEMORY
|
1747 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1748 .cra_blocksize
= AES_BLOCK_SIZE
,
1749 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1751 .cra_init
= safexcel_aead_sha1_cra_init
,
1752 .cra_exit
= safexcel_aead_cra_exit
,
1753 .cra_module
= THIS_MODULE
,
1758 static int safexcel_aead_sha256_cra_init(struct crypto_tfm
*tfm
)
1760 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1762 safexcel_aead_cra_init(tfm
);
1763 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA256
;
1764 ctx
->state_sz
= SHA256_DIGEST_SIZE
;
1768 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes
= {
1769 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1770 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA2_256
,
1772 .setkey
= safexcel_aead_setkey
,
1773 .encrypt
= safexcel_aead_encrypt
,
1774 .decrypt
= safexcel_aead_decrypt
,
1775 .ivsize
= AES_BLOCK_SIZE
,
1776 .maxauthsize
= SHA256_DIGEST_SIZE
,
1778 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
1779 .cra_driver_name
= "safexcel-authenc-hmac-sha256-cbc-aes",
1780 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1781 .cra_flags
= CRYPTO_ALG_ASYNC
|
1782 CRYPTO_ALG_ALLOCATES_MEMORY
|
1783 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1784 .cra_blocksize
= AES_BLOCK_SIZE
,
1785 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1787 .cra_init
= safexcel_aead_sha256_cra_init
,
1788 .cra_exit
= safexcel_aead_cra_exit
,
1789 .cra_module
= THIS_MODULE
,
1794 static int safexcel_aead_sha224_cra_init(struct crypto_tfm
*tfm
)
1796 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1798 safexcel_aead_cra_init(tfm
);
1799 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA224
;
1800 ctx
->state_sz
= SHA256_DIGEST_SIZE
;
1804 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes
= {
1805 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1806 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA2_256
,
1808 .setkey
= safexcel_aead_setkey
,
1809 .encrypt
= safexcel_aead_encrypt
,
1810 .decrypt
= safexcel_aead_decrypt
,
1811 .ivsize
= AES_BLOCK_SIZE
,
1812 .maxauthsize
= SHA224_DIGEST_SIZE
,
1814 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
1815 .cra_driver_name
= "safexcel-authenc-hmac-sha224-cbc-aes",
1816 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1817 .cra_flags
= CRYPTO_ALG_ASYNC
|
1818 CRYPTO_ALG_ALLOCATES_MEMORY
|
1819 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1820 .cra_blocksize
= AES_BLOCK_SIZE
,
1821 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1823 .cra_init
= safexcel_aead_sha224_cra_init
,
1824 .cra_exit
= safexcel_aead_cra_exit
,
1825 .cra_module
= THIS_MODULE
,
1830 static int safexcel_aead_sha512_cra_init(struct crypto_tfm
*tfm
)
1832 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1834 safexcel_aead_cra_init(tfm
);
1835 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA512
;
1836 ctx
->state_sz
= SHA512_DIGEST_SIZE
;
1840 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes
= {
1841 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1842 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA2_512
,
1844 .setkey
= safexcel_aead_setkey
,
1845 .encrypt
= safexcel_aead_encrypt
,
1846 .decrypt
= safexcel_aead_decrypt
,
1847 .ivsize
= AES_BLOCK_SIZE
,
1848 .maxauthsize
= SHA512_DIGEST_SIZE
,
1850 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
1851 .cra_driver_name
= "safexcel-authenc-hmac-sha512-cbc-aes",
1852 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1853 .cra_flags
= CRYPTO_ALG_ASYNC
|
1854 CRYPTO_ALG_ALLOCATES_MEMORY
|
1855 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1856 .cra_blocksize
= AES_BLOCK_SIZE
,
1857 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1859 .cra_init
= safexcel_aead_sha512_cra_init
,
1860 .cra_exit
= safexcel_aead_cra_exit
,
1861 .cra_module
= THIS_MODULE
,
1866 static int safexcel_aead_sha384_cra_init(struct crypto_tfm
*tfm
)
1868 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1870 safexcel_aead_cra_init(tfm
);
1871 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA384
;
1872 ctx
->state_sz
= SHA512_DIGEST_SIZE
;
1876 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes
= {
1877 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1878 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA2_512
,
1880 .setkey
= safexcel_aead_setkey
,
1881 .encrypt
= safexcel_aead_encrypt
,
1882 .decrypt
= safexcel_aead_decrypt
,
1883 .ivsize
= AES_BLOCK_SIZE
,
1884 .maxauthsize
= SHA384_DIGEST_SIZE
,
1886 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
1887 .cra_driver_name
= "safexcel-authenc-hmac-sha384-cbc-aes",
1888 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1889 .cra_flags
= CRYPTO_ALG_ASYNC
|
1890 CRYPTO_ALG_ALLOCATES_MEMORY
|
1891 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1892 .cra_blocksize
= AES_BLOCK_SIZE
,
1893 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1895 .cra_init
= safexcel_aead_sha384_cra_init
,
1896 .cra_exit
= safexcel_aead_cra_exit
,
1897 .cra_module
= THIS_MODULE
,
1902 static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm
*tfm
)
1904 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1906 safexcel_aead_sha1_cra_init(tfm
);
1907 ctx
->alg
= SAFEXCEL_3DES
; /* override default */
1908 ctx
->blocksz
= DES3_EDE_BLOCK_SIZE
;
1909 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
1913 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede
= {
1914 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1915 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA1
,
1917 .setkey
= safexcel_aead_setkey
,
1918 .encrypt
= safexcel_aead_encrypt
,
1919 .decrypt
= safexcel_aead_decrypt
,
1920 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1921 .maxauthsize
= SHA1_DIGEST_SIZE
,
1923 .cra_name
= "authenc(hmac(sha1),cbc(des3_ede))",
1924 .cra_driver_name
= "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1925 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1926 .cra_flags
= CRYPTO_ALG_ASYNC
|
1927 CRYPTO_ALG_ALLOCATES_MEMORY
|
1928 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1929 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1930 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1932 .cra_init
= safexcel_aead_sha1_des3_cra_init
,
1933 .cra_exit
= safexcel_aead_cra_exit
,
1934 .cra_module
= THIS_MODULE
,
1939 static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm
*tfm
)
1941 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1943 safexcel_aead_sha256_cra_init(tfm
);
1944 ctx
->alg
= SAFEXCEL_3DES
; /* override default */
1945 ctx
->blocksz
= DES3_EDE_BLOCK_SIZE
;
1946 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
1950 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede
= {
1951 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1952 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA2_256
,
1954 .setkey
= safexcel_aead_setkey
,
1955 .encrypt
= safexcel_aead_encrypt
,
1956 .decrypt
= safexcel_aead_decrypt
,
1957 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1958 .maxauthsize
= SHA256_DIGEST_SIZE
,
1960 .cra_name
= "authenc(hmac(sha256),cbc(des3_ede))",
1961 .cra_driver_name
= "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1962 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1963 .cra_flags
= CRYPTO_ALG_ASYNC
|
1964 CRYPTO_ALG_ALLOCATES_MEMORY
|
1965 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1966 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1967 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1969 .cra_init
= safexcel_aead_sha256_des3_cra_init
,
1970 .cra_exit
= safexcel_aead_cra_exit
,
1971 .cra_module
= THIS_MODULE
,
1976 static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm
*tfm
)
1978 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1980 safexcel_aead_sha224_cra_init(tfm
);
1981 ctx
->alg
= SAFEXCEL_3DES
; /* override default */
1982 ctx
->blocksz
= DES3_EDE_BLOCK_SIZE
;
1983 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
1987 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede
= {
1988 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1989 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA2_256
,
1991 .setkey
= safexcel_aead_setkey
,
1992 .encrypt
= safexcel_aead_encrypt
,
1993 .decrypt
= safexcel_aead_decrypt
,
1994 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1995 .maxauthsize
= SHA224_DIGEST_SIZE
,
1997 .cra_name
= "authenc(hmac(sha224),cbc(des3_ede))",
1998 .cra_driver_name
= "safexcel-authenc-hmac-sha224-cbc-des3_ede",
1999 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2000 .cra_flags
= CRYPTO_ALG_ASYNC
|
2001 CRYPTO_ALG_ALLOCATES_MEMORY
|
2002 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2003 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2004 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2006 .cra_init
= safexcel_aead_sha224_des3_cra_init
,
2007 .cra_exit
= safexcel_aead_cra_exit
,
2008 .cra_module
= THIS_MODULE
,
2013 static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm
*tfm
)
2015 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2017 safexcel_aead_sha512_cra_init(tfm
);
2018 ctx
->alg
= SAFEXCEL_3DES
; /* override default */
2019 ctx
->blocksz
= DES3_EDE_BLOCK_SIZE
;
2020 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
2024 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede
= {
2025 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2026 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA2_512
,
2028 .setkey
= safexcel_aead_setkey
,
2029 .encrypt
= safexcel_aead_encrypt
,
2030 .decrypt
= safexcel_aead_decrypt
,
2031 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2032 .maxauthsize
= SHA512_DIGEST_SIZE
,
2034 .cra_name
= "authenc(hmac(sha512),cbc(des3_ede))",
2035 .cra_driver_name
= "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2036 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2037 .cra_flags
= CRYPTO_ALG_ASYNC
|
2038 CRYPTO_ALG_ALLOCATES_MEMORY
|
2039 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2040 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2041 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2043 .cra_init
= safexcel_aead_sha512_des3_cra_init
,
2044 .cra_exit
= safexcel_aead_cra_exit
,
2045 .cra_module
= THIS_MODULE
,
2050 static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm
*tfm
)
2052 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2054 safexcel_aead_sha384_cra_init(tfm
);
2055 ctx
->alg
= SAFEXCEL_3DES
; /* override default */
2056 ctx
->blocksz
= DES3_EDE_BLOCK_SIZE
;
2057 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
2061 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede
= {
2062 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2063 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA2_512
,
2065 .setkey
= safexcel_aead_setkey
,
2066 .encrypt
= safexcel_aead_encrypt
,
2067 .decrypt
= safexcel_aead_decrypt
,
2068 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2069 .maxauthsize
= SHA384_DIGEST_SIZE
,
2071 .cra_name
= "authenc(hmac(sha384),cbc(des3_ede))",
2072 .cra_driver_name
= "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2073 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2074 .cra_flags
= CRYPTO_ALG_ASYNC
|
2075 CRYPTO_ALG_ALLOCATES_MEMORY
|
2076 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2077 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2078 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2080 .cra_init
= safexcel_aead_sha384_des3_cra_init
,
2081 .cra_exit
= safexcel_aead_cra_exit
,
2082 .cra_module
= THIS_MODULE
,
2087 static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm
*tfm
)
2089 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2091 safexcel_aead_sha1_cra_init(tfm
);
2092 ctx
->alg
= SAFEXCEL_DES
; /* override default */
2093 ctx
->blocksz
= DES_BLOCK_SIZE
;
2094 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
2098 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des
= {
2099 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2100 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA1
,
2102 .setkey
= safexcel_aead_setkey
,
2103 .encrypt
= safexcel_aead_encrypt
,
2104 .decrypt
= safexcel_aead_decrypt
,
2105 .ivsize
= DES_BLOCK_SIZE
,
2106 .maxauthsize
= SHA1_DIGEST_SIZE
,
2108 .cra_name
= "authenc(hmac(sha1),cbc(des))",
2109 .cra_driver_name
= "safexcel-authenc-hmac-sha1-cbc-des",
2110 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2111 .cra_flags
= CRYPTO_ALG_ASYNC
|
2112 CRYPTO_ALG_ALLOCATES_MEMORY
|
2113 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2114 .cra_blocksize
= DES_BLOCK_SIZE
,
2115 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2117 .cra_init
= safexcel_aead_sha1_des_cra_init
,
2118 .cra_exit
= safexcel_aead_cra_exit
,
2119 .cra_module
= THIS_MODULE
,
2124 static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm
*tfm
)
2126 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2128 safexcel_aead_sha256_cra_init(tfm
);
2129 ctx
->alg
= SAFEXCEL_DES
; /* override default */
2130 ctx
->blocksz
= DES_BLOCK_SIZE
;
2131 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
2135 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des
= {
2136 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2137 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA2_256
,
2139 .setkey
= safexcel_aead_setkey
,
2140 .encrypt
= safexcel_aead_encrypt
,
2141 .decrypt
= safexcel_aead_decrypt
,
2142 .ivsize
= DES_BLOCK_SIZE
,
2143 .maxauthsize
= SHA256_DIGEST_SIZE
,
2145 .cra_name
= "authenc(hmac(sha256),cbc(des))",
2146 .cra_driver_name
= "safexcel-authenc-hmac-sha256-cbc-des",
2147 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2148 .cra_flags
= CRYPTO_ALG_ASYNC
|
2149 CRYPTO_ALG_ALLOCATES_MEMORY
|
2150 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2151 .cra_blocksize
= DES_BLOCK_SIZE
,
2152 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2154 .cra_init
= safexcel_aead_sha256_des_cra_init
,
2155 .cra_exit
= safexcel_aead_cra_exit
,
2156 .cra_module
= THIS_MODULE
,
2161 static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm
*tfm
)
2163 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2165 safexcel_aead_sha224_cra_init(tfm
);
2166 ctx
->alg
= SAFEXCEL_DES
; /* override default */
2167 ctx
->blocksz
= DES_BLOCK_SIZE
;
2168 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
2172 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des
= {
2173 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2174 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA2_256
,
2176 .setkey
= safexcel_aead_setkey
,
2177 .encrypt
= safexcel_aead_encrypt
,
2178 .decrypt
= safexcel_aead_decrypt
,
2179 .ivsize
= DES_BLOCK_SIZE
,
2180 .maxauthsize
= SHA224_DIGEST_SIZE
,
2182 .cra_name
= "authenc(hmac(sha224),cbc(des))",
2183 .cra_driver_name
= "safexcel-authenc-hmac-sha224-cbc-des",
2184 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2185 .cra_flags
= CRYPTO_ALG_ASYNC
|
2186 CRYPTO_ALG_ALLOCATES_MEMORY
|
2187 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2188 .cra_blocksize
= DES_BLOCK_SIZE
,
2189 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2191 .cra_init
= safexcel_aead_sha224_des_cra_init
,
2192 .cra_exit
= safexcel_aead_cra_exit
,
2193 .cra_module
= THIS_MODULE
,
2198 static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm
*tfm
)
2200 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2202 safexcel_aead_sha512_cra_init(tfm
);
2203 ctx
->alg
= SAFEXCEL_DES
; /* override default */
2204 ctx
->blocksz
= DES_BLOCK_SIZE
;
2205 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
2209 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des
= {
2210 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2211 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA2_512
,
2213 .setkey
= safexcel_aead_setkey
,
2214 .encrypt
= safexcel_aead_encrypt
,
2215 .decrypt
= safexcel_aead_decrypt
,
2216 .ivsize
= DES_BLOCK_SIZE
,
2217 .maxauthsize
= SHA512_DIGEST_SIZE
,
2219 .cra_name
= "authenc(hmac(sha512),cbc(des))",
2220 .cra_driver_name
= "safexcel-authenc-hmac-sha512-cbc-des",
2221 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2222 .cra_flags
= CRYPTO_ALG_ASYNC
|
2223 CRYPTO_ALG_ALLOCATES_MEMORY
|
2224 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2225 .cra_blocksize
= DES_BLOCK_SIZE
,
2226 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2228 .cra_init
= safexcel_aead_sha512_des_cra_init
,
2229 .cra_exit
= safexcel_aead_cra_exit
,
2230 .cra_module
= THIS_MODULE
,
2235 static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm
*tfm
)
2237 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2239 safexcel_aead_sha384_cra_init(tfm
);
2240 ctx
->alg
= SAFEXCEL_DES
; /* override default */
2241 ctx
->blocksz
= DES_BLOCK_SIZE
;
2242 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
2246 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des
= {
2247 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2248 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA2_512
,
2250 .setkey
= safexcel_aead_setkey
,
2251 .encrypt
= safexcel_aead_encrypt
,
2252 .decrypt
= safexcel_aead_decrypt
,
2253 .ivsize
= DES_BLOCK_SIZE
,
2254 .maxauthsize
= SHA384_DIGEST_SIZE
,
2256 .cra_name
= "authenc(hmac(sha384),cbc(des))",
2257 .cra_driver_name
= "safexcel-authenc-hmac-sha384-cbc-des",
2258 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2259 .cra_flags
= CRYPTO_ALG_ASYNC
|
2260 CRYPTO_ALG_ALLOCATES_MEMORY
|
2261 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2262 .cra_blocksize
= DES_BLOCK_SIZE
,
2263 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2265 .cra_init
= safexcel_aead_sha384_des_cra_init
,
2266 .cra_exit
= safexcel_aead_cra_exit
,
2267 .cra_module
= THIS_MODULE
,
2272 static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm
*tfm
)
2274 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2276 safexcel_aead_sha1_cra_init(tfm
);
2277 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
; /* override default */
2281 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes
= {
2282 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2283 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA1
,
2285 .setkey
= safexcel_aead_setkey
,
2286 .encrypt
= safexcel_aead_encrypt
,
2287 .decrypt
= safexcel_aead_decrypt
,
2288 .ivsize
= CTR_RFC3686_IV_SIZE
,
2289 .maxauthsize
= SHA1_DIGEST_SIZE
,
2291 .cra_name
= "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2292 .cra_driver_name
= "safexcel-authenc-hmac-sha1-ctr-aes",
2293 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2294 .cra_flags
= CRYPTO_ALG_ASYNC
|
2295 CRYPTO_ALG_ALLOCATES_MEMORY
|
2296 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2298 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2300 .cra_init
= safexcel_aead_sha1_ctr_cra_init
,
2301 .cra_exit
= safexcel_aead_cra_exit
,
2302 .cra_module
= THIS_MODULE
,
2307 static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm
*tfm
)
2309 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2311 safexcel_aead_sha256_cra_init(tfm
);
2312 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
; /* override default */
2316 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes
= {
2317 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2318 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA2_256
,
2320 .setkey
= safexcel_aead_setkey
,
2321 .encrypt
= safexcel_aead_encrypt
,
2322 .decrypt
= safexcel_aead_decrypt
,
2323 .ivsize
= CTR_RFC3686_IV_SIZE
,
2324 .maxauthsize
= SHA256_DIGEST_SIZE
,
2326 .cra_name
= "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2327 .cra_driver_name
= "safexcel-authenc-hmac-sha256-ctr-aes",
2328 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2329 .cra_flags
= CRYPTO_ALG_ASYNC
|
2330 CRYPTO_ALG_ALLOCATES_MEMORY
|
2331 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2333 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2335 .cra_init
= safexcel_aead_sha256_ctr_cra_init
,
2336 .cra_exit
= safexcel_aead_cra_exit
,
2337 .cra_module
= THIS_MODULE
,
2342 static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm
*tfm
)
2344 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2346 safexcel_aead_sha224_cra_init(tfm
);
2347 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
; /* override default */
2351 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes
= {
2352 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2353 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA2_256
,
2355 .setkey
= safexcel_aead_setkey
,
2356 .encrypt
= safexcel_aead_encrypt
,
2357 .decrypt
= safexcel_aead_decrypt
,
2358 .ivsize
= CTR_RFC3686_IV_SIZE
,
2359 .maxauthsize
= SHA224_DIGEST_SIZE
,
2361 .cra_name
= "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2362 .cra_driver_name
= "safexcel-authenc-hmac-sha224-ctr-aes",
2363 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2364 .cra_flags
= CRYPTO_ALG_ASYNC
|
2365 CRYPTO_ALG_ALLOCATES_MEMORY
|
2366 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2368 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2370 .cra_init
= safexcel_aead_sha224_ctr_cra_init
,
2371 .cra_exit
= safexcel_aead_cra_exit
,
2372 .cra_module
= THIS_MODULE
,
2377 static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm
*tfm
)
2379 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2381 safexcel_aead_sha512_cra_init(tfm
);
2382 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
; /* override default */
2386 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes
= {
2387 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2388 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA2_512
,
2390 .setkey
= safexcel_aead_setkey
,
2391 .encrypt
= safexcel_aead_encrypt
,
2392 .decrypt
= safexcel_aead_decrypt
,
2393 .ivsize
= CTR_RFC3686_IV_SIZE
,
2394 .maxauthsize
= SHA512_DIGEST_SIZE
,
2396 .cra_name
= "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2397 .cra_driver_name
= "safexcel-authenc-hmac-sha512-ctr-aes",
2398 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2399 .cra_flags
= CRYPTO_ALG_ASYNC
|
2400 CRYPTO_ALG_ALLOCATES_MEMORY
|
2401 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2403 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2405 .cra_init
= safexcel_aead_sha512_ctr_cra_init
,
2406 .cra_exit
= safexcel_aead_cra_exit
,
2407 .cra_module
= THIS_MODULE
,
2412 static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm
*tfm
)
2414 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2416 safexcel_aead_sha384_cra_init(tfm
);
2417 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
; /* override default */
2421 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes
= {
2422 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2423 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA2_512
,
2425 .setkey
= safexcel_aead_setkey
,
2426 .encrypt
= safexcel_aead_encrypt
,
2427 .decrypt
= safexcel_aead_decrypt
,
2428 .ivsize
= CTR_RFC3686_IV_SIZE
,
2429 .maxauthsize
= SHA384_DIGEST_SIZE
,
2431 .cra_name
= "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2432 .cra_driver_name
= "safexcel-authenc-hmac-sha384-ctr-aes",
2433 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2434 .cra_flags
= CRYPTO_ALG_ASYNC
|
2435 CRYPTO_ALG_ALLOCATES_MEMORY
|
2436 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2438 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2440 .cra_init
= safexcel_aead_sha384_ctr_cra_init
,
2441 .cra_exit
= safexcel_aead_cra_exit
,
2442 .cra_module
= THIS_MODULE
,
2447 static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher
*ctfm
,
2448 const u8
*key
, unsigned int len
)
2450 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(ctfm
);
2451 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2452 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
2453 struct crypto_aes_ctx aes
;
2455 unsigned int keylen
;
2457 /* Check for illegal XTS keys */
2458 ret
= xts_verify_key(ctfm
, key
, len
);
2462 /* Only half of the key data is cipher key */
2463 keylen
= (len
>> 1);
2464 ret
= aes_expandkey(&aes
, key
, keylen
);
2468 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
) {
2469 for (i
= 0; i
< keylen
/ sizeof(u32
); i
++) {
2470 if (le32_to_cpu(ctx
->key
[i
]) != aes
.key_enc
[i
]) {
2471 ctx
->base
.needs_inv
= true;
2477 for (i
= 0; i
< keylen
/ sizeof(u32
); i
++)
2478 ctx
->key
[i
] = cpu_to_le32(aes
.key_enc
[i
]);
2480 /* The other half is the tweak key */
2481 ret
= aes_expandkey(&aes
, (u8
*)(key
+ keylen
), keylen
);
2485 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
) {
2486 for (i
= 0; i
< keylen
/ sizeof(u32
); i
++) {
2487 if (le32_to_cpu(ctx
->key
[i
+ keylen
/ sizeof(u32
)]) !=
2489 ctx
->base
.needs_inv
= true;
2495 for (i
= 0; i
< keylen
/ sizeof(u32
); i
++)
2496 ctx
->key
[i
+ keylen
/ sizeof(u32
)] =
2497 cpu_to_le32(aes
.key_enc
[i
]);
2499 ctx
->key_len
= keylen
<< 1;
2501 memzero_explicit(&aes
, sizeof(aes
));
2505 static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm
*tfm
)
2507 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2509 safexcel_skcipher_cra_init(tfm
);
2510 ctx
->alg
= SAFEXCEL_AES
;
2511 ctx
->blocksz
= AES_BLOCK_SIZE
;
2513 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_XTS
;
2517 static int safexcel_encrypt_xts(struct skcipher_request
*req
)
2519 if (req
->cryptlen
< XTS_BLOCK_SIZE
)
2521 return safexcel_queue_req(&req
->base
, skcipher_request_ctx(req
),
2525 static int safexcel_decrypt_xts(struct skcipher_request
*req
)
2527 if (req
->cryptlen
< XTS_BLOCK_SIZE
)
2529 return safexcel_queue_req(&req
->base
, skcipher_request_ctx(req
),
2533 struct safexcel_alg_template safexcel_alg_xts_aes
= {
2534 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
2535 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_AES_XTS
,
2537 .setkey
= safexcel_skcipher_aesxts_setkey
,
2538 .encrypt
= safexcel_encrypt_xts
,
2539 .decrypt
= safexcel_decrypt_xts
,
2540 /* XTS actually uses 2 AES keys glued together */
2541 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
2542 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
2543 .ivsize
= XTS_BLOCK_SIZE
,
2545 .cra_name
= "xts(aes)",
2546 .cra_driver_name
= "safexcel-xts-aes",
2547 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2548 .cra_flags
= CRYPTO_ALG_ASYNC
|
2549 CRYPTO_ALG_ALLOCATES_MEMORY
|
2550 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2551 .cra_blocksize
= XTS_BLOCK_SIZE
,
2552 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2554 .cra_init
= safexcel_skcipher_aes_xts_cra_init
,
2555 .cra_exit
= safexcel_skcipher_cra_exit
,
2556 .cra_module
= THIS_MODULE
,
2561 static int safexcel_aead_gcm_setkey(struct crypto_aead
*ctfm
, const u8
*key
,
2564 struct crypto_tfm
*tfm
= crypto_aead_tfm(ctfm
);
2565 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2566 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
2567 struct crypto_aes_ctx aes
;
2568 u32 hashkey
[AES_BLOCK_SIZE
>> 2];
2571 ret
= aes_expandkey(&aes
, key
, len
);
2573 memzero_explicit(&aes
, sizeof(aes
));
2577 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
) {
2578 for (i
= 0; i
< len
/ sizeof(u32
); i
++) {
2579 if (le32_to_cpu(ctx
->key
[i
]) != aes
.key_enc
[i
]) {
2580 ctx
->base
.needs_inv
= true;
2586 for (i
= 0; i
< len
/ sizeof(u32
); i
++)
2587 ctx
->key
[i
] = cpu_to_le32(aes
.key_enc
[i
]);
2591 /* Compute hash key by encrypting zeroes with cipher key */
2592 crypto_cipher_clear_flags(ctx
->hkaes
, CRYPTO_TFM_REQ_MASK
);
2593 crypto_cipher_set_flags(ctx
->hkaes
, crypto_aead_get_flags(ctfm
) &
2594 CRYPTO_TFM_REQ_MASK
);
2595 ret
= crypto_cipher_setkey(ctx
->hkaes
, key
, len
);
2599 memset(hashkey
, 0, AES_BLOCK_SIZE
);
2600 crypto_cipher_encrypt_one(ctx
->hkaes
, (u8
*)hashkey
, (u8
*)hashkey
);
2602 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
) {
2603 for (i
= 0; i
< AES_BLOCK_SIZE
/ sizeof(u32
); i
++) {
2604 if (be32_to_cpu(ctx
->base
.ipad
.be
[i
]) != hashkey
[i
]) {
2605 ctx
->base
.needs_inv
= true;
2611 for (i
= 0; i
< AES_BLOCK_SIZE
/ sizeof(u32
); i
++)
2612 ctx
->base
.ipad
.be
[i
] = cpu_to_be32(hashkey
[i
]);
2614 memzero_explicit(hashkey
, AES_BLOCK_SIZE
);
2615 memzero_explicit(&aes
, sizeof(aes
));
2619 static int safexcel_aead_gcm_cra_init(struct crypto_tfm
*tfm
)
2621 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2623 safexcel_aead_cra_init(tfm
);
2624 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_GHASH
;
2625 ctx
->state_sz
= GHASH_BLOCK_SIZE
;
2626 ctx
->xcm
= EIP197_XCM_MODE_GCM
;
2627 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_XCM
; /* override default */
2629 ctx
->hkaes
= crypto_alloc_cipher("aes", 0, 0);
2630 return PTR_ERR_OR_ZERO(ctx
->hkaes
);
2633 static void safexcel_aead_gcm_cra_exit(struct crypto_tfm
*tfm
)
2635 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2637 crypto_free_cipher(ctx
->hkaes
);
2638 safexcel_aead_cra_exit(tfm
);
2641 static int safexcel_aead_gcm_setauthsize(struct crypto_aead
*tfm
,
2642 unsigned int authsize
)
2644 return crypto_gcm_check_authsize(authsize
);
2647 struct safexcel_alg_template safexcel_alg_gcm
= {
2648 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2649 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_GHASH
,
2651 .setkey
= safexcel_aead_gcm_setkey
,
2652 .setauthsize
= safexcel_aead_gcm_setauthsize
,
2653 .encrypt
= safexcel_aead_encrypt
,
2654 .decrypt
= safexcel_aead_decrypt
,
2655 .ivsize
= GCM_AES_IV_SIZE
,
2656 .maxauthsize
= GHASH_DIGEST_SIZE
,
2658 .cra_name
= "gcm(aes)",
2659 .cra_driver_name
= "safexcel-gcm-aes",
2660 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2661 .cra_flags
= CRYPTO_ALG_ASYNC
|
2662 CRYPTO_ALG_ALLOCATES_MEMORY
|
2663 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2665 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2667 .cra_init
= safexcel_aead_gcm_cra_init
,
2668 .cra_exit
= safexcel_aead_gcm_cra_exit
,
2669 .cra_module
= THIS_MODULE
,
2674 static int safexcel_aead_ccm_setkey(struct crypto_aead
*ctfm
, const u8
*key
,
2677 struct crypto_tfm
*tfm
= crypto_aead_tfm(ctfm
);
2678 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2679 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
2680 struct crypto_aes_ctx aes
;
2683 ret
= aes_expandkey(&aes
, key
, len
);
2685 memzero_explicit(&aes
, sizeof(aes
));
2689 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
) {
2690 for (i
= 0; i
< len
/ sizeof(u32
); i
++) {
2691 if (le32_to_cpu(ctx
->key
[i
]) != aes
.key_enc
[i
]) {
2692 ctx
->base
.needs_inv
= true;
2698 for (i
= 0; i
< len
/ sizeof(u32
); i
++) {
2699 ctx
->key
[i
] = cpu_to_le32(aes
.key_enc
[i
]);
2700 ctx
->base
.ipad
.be
[i
+ 2 * AES_BLOCK_SIZE
/ sizeof(u32
)] =
2701 cpu_to_be32(aes
.key_enc
[i
]);
2705 ctx
->state_sz
= 2 * AES_BLOCK_SIZE
+ len
;
2707 if (len
== AES_KEYSIZE_192
)
2708 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC192
;
2709 else if (len
== AES_KEYSIZE_256
)
2710 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC256
;
2712 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC128
;
2714 memzero_explicit(&aes
, sizeof(aes
));
2718 static int safexcel_aead_ccm_cra_init(struct crypto_tfm
*tfm
)
2720 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2722 safexcel_aead_cra_init(tfm
);
2723 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC128
;
2724 ctx
->state_sz
= 3 * AES_BLOCK_SIZE
;
2725 ctx
->xcm
= EIP197_XCM_MODE_CCM
;
2726 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_XCM
; /* override default */
2731 static int safexcel_aead_ccm_setauthsize(struct crypto_aead
*tfm
,
2732 unsigned int authsize
)
2734 /* Borrowed from crypto/ccm.c */
2751 static int safexcel_ccm_encrypt(struct aead_request
*req
)
2753 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
2755 if (req
->iv
[0] < 1 || req
->iv
[0] > 7)
2758 return safexcel_queue_req(&req
->base
, creq
, SAFEXCEL_ENCRYPT
);
2761 static int safexcel_ccm_decrypt(struct aead_request
*req
)
2763 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
2765 if (req
->iv
[0] < 1 || req
->iv
[0] > 7)
2768 return safexcel_queue_req(&req
->base
, creq
, SAFEXCEL_DECRYPT
);
2771 struct safexcel_alg_template safexcel_alg_ccm
= {
2772 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2773 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_CBC_MAC_ALL
,
2775 .setkey
= safexcel_aead_ccm_setkey
,
2776 .setauthsize
= safexcel_aead_ccm_setauthsize
,
2777 .encrypt
= safexcel_ccm_encrypt
,
2778 .decrypt
= safexcel_ccm_decrypt
,
2779 .ivsize
= AES_BLOCK_SIZE
,
2780 .maxauthsize
= AES_BLOCK_SIZE
,
2782 .cra_name
= "ccm(aes)",
2783 .cra_driver_name
= "safexcel-ccm-aes",
2784 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2785 .cra_flags
= CRYPTO_ALG_ASYNC
|
2786 CRYPTO_ALG_ALLOCATES_MEMORY
|
2787 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2789 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2791 .cra_init
= safexcel_aead_ccm_cra_init
,
2792 .cra_exit
= safexcel_aead_cra_exit
,
2793 .cra_module
= THIS_MODULE
,
2798 static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx
*ctx
,
2801 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
2803 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
)
2804 if (memcmp(ctx
->key
, key
, CHACHA_KEY_SIZE
))
2805 ctx
->base
.needs_inv
= true;
2807 memcpy(ctx
->key
, key
, CHACHA_KEY_SIZE
);
2808 ctx
->key_len
= CHACHA_KEY_SIZE
;
2811 static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher
*ctfm
,
2812 const u8
*key
, unsigned int len
)
2814 struct safexcel_cipher_ctx
*ctx
= crypto_skcipher_ctx(ctfm
);
2816 if (len
!= CHACHA_KEY_SIZE
)
2819 safexcel_chacha20_setkey(ctx
, key
);
2824 static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm
*tfm
)
2826 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2828 safexcel_skcipher_cra_init(tfm
);
2829 ctx
->alg
= SAFEXCEL_CHACHA20
;
2831 ctx
->mode
= CONTEXT_CONTROL_CHACHA20_MODE_256_32
;
2835 struct safexcel_alg_template safexcel_alg_chacha20
= {
2836 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
2837 .algo_mask
= SAFEXCEL_ALG_CHACHA20
,
2839 .setkey
= safexcel_skcipher_chacha20_setkey
,
2840 .encrypt
= safexcel_encrypt
,
2841 .decrypt
= safexcel_decrypt
,
2842 .min_keysize
= CHACHA_KEY_SIZE
,
2843 .max_keysize
= CHACHA_KEY_SIZE
,
2844 .ivsize
= CHACHA_IV_SIZE
,
2846 .cra_name
= "chacha20",
2847 .cra_driver_name
= "safexcel-chacha20",
2848 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2849 .cra_flags
= CRYPTO_ALG_ASYNC
|
2850 CRYPTO_ALG_ALLOCATES_MEMORY
|
2851 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2853 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2855 .cra_init
= safexcel_skcipher_chacha20_cra_init
,
2856 .cra_exit
= safexcel_skcipher_cra_exit
,
2857 .cra_module
= THIS_MODULE
,
2862 static int safexcel_aead_chachapoly_setkey(struct crypto_aead
*ctfm
,
2863 const u8
*key
, unsigned int len
)
2865 struct safexcel_cipher_ctx
*ctx
= crypto_aead_ctx(ctfm
);
2867 if (ctx
->aead
== EIP197_AEAD_TYPE_IPSEC_ESP
&&
2868 len
> EIP197_AEAD_IPSEC_NONCE_SIZE
) {
2869 /* ESP variant has nonce appended to key */
2870 len
-= EIP197_AEAD_IPSEC_NONCE_SIZE
;
2871 ctx
->nonce
= *(u32
*)(key
+ len
);
2873 if (len
!= CHACHA_KEY_SIZE
)
2876 safexcel_chacha20_setkey(ctx
, key
);
2881 static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead
*tfm
,
2882 unsigned int authsize
)
2884 if (authsize
!= POLY1305_DIGEST_SIZE
)
2889 static int safexcel_aead_chachapoly_crypt(struct aead_request
*req
,
2890 enum safexcel_cipher_direction dir
)
2892 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
2893 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2894 struct crypto_tfm
*tfm
= crypto_aead_tfm(aead
);
2895 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2896 struct aead_request
*subreq
= aead_request_ctx(req
);
2897 u32 key
[CHACHA_KEY_SIZE
/ sizeof(u32
) + 1];
2901 * Instead of wasting time detecting umpteen silly corner cases,
2902 * just dump all "small" requests to the fallback implementation.
2903 * HW would not be faster on such small requests anyway.
2905 if (likely((ctx
->aead
!= EIP197_AEAD_TYPE_IPSEC_ESP
||
2906 req
->assoclen
>= EIP197_AEAD_IPSEC_IV_SIZE
) &&
2907 req
->cryptlen
> POLY1305_DIGEST_SIZE
)) {
2908 return safexcel_queue_req(&req
->base
, creq
, dir
);
2911 /* HW cannot do full (AAD+payload) zero length, use fallback */
2912 memcpy(key
, ctx
->key
, CHACHA_KEY_SIZE
);
2913 if (ctx
->aead
== EIP197_AEAD_TYPE_IPSEC_ESP
) {
2914 /* ESP variant has nonce appended to the key */
2915 key
[CHACHA_KEY_SIZE
/ sizeof(u32
)] = ctx
->nonce
;
2916 ret
= crypto_aead_setkey(ctx
->fback
, (u8
*)key
,
2918 EIP197_AEAD_IPSEC_NONCE_SIZE
);
2920 ret
= crypto_aead_setkey(ctx
->fback
, (u8
*)key
,
2924 crypto_aead_clear_flags(aead
, CRYPTO_TFM_REQ_MASK
);
2925 crypto_aead_set_flags(aead
, crypto_aead_get_flags(ctx
->fback
) &
2926 CRYPTO_TFM_REQ_MASK
);
2930 aead_request_set_tfm(subreq
, ctx
->fback
);
2931 aead_request_set_callback(subreq
, req
->base
.flags
, req
->base
.complete
,
2933 aead_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
2935 aead_request_set_ad(subreq
, req
->assoclen
);
2937 return (dir
== SAFEXCEL_ENCRYPT
) ?
2938 crypto_aead_encrypt(subreq
) :
2939 crypto_aead_decrypt(subreq
);
2942 static int safexcel_aead_chachapoly_encrypt(struct aead_request
*req
)
2944 return safexcel_aead_chachapoly_crypt(req
, SAFEXCEL_ENCRYPT
);
2947 static int safexcel_aead_chachapoly_decrypt(struct aead_request
*req
)
2949 return safexcel_aead_chachapoly_crypt(req
, SAFEXCEL_DECRYPT
);
2952 static int safexcel_aead_fallback_cra_init(struct crypto_tfm
*tfm
)
2954 struct crypto_aead
*aead
= __crypto_aead_cast(tfm
);
2955 struct aead_alg
*alg
= crypto_aead_alg(aead
);
2956 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2958 safexcel_aead_cra_init(tfm
);
2960 /* Allocate fallback implementation */
2961 ctx
->fback
= crypto_alloc_aead(alg
->base
.cra_name
, 0,
2963 CRYPTO_ALG_NEED_FALLBACK
);
2964 if (IS_ERR(ctx
->fback
))
2965 return PTR_ERR(ctx
->fback
);
2967 crypto_aead_set_reqsize(aead
, max(sizeof(struct safexcel_cipher_req
),
2968 sizeof(struct aead_request
) +
2969 crypto_aead_reqsize(ctx
->fback
)));
2974 static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm
*tfm
)
2976 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2978 safexcel_aead_fallback_cra_init(tfm
);
2979 ctx
->alg
= SAFEXCEL_CHACHA20
;
2980 ctx
->mode
= CONTEXT_CONTROL_CHACHA20_MODE_256_32
|
2981 CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK
;
2983 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_POLY1305
;
2984 ctx
->state_sz
= 0; /* Precomputed by HW */
2988 static void safexcel_aead_fallback_cra_exit(struct crypto_tfm
*tfm
)
2990 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2992 crypto_free_aead(ctx
->fback
);
2993 safexcel_aead_cra_exit(tfm
);
2996 struct safexcel_alg_template safexcel_alg_chachapoly
= {
2997 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2998 .algo_mask
= SAFEXCEL_ALG_CHACHA20
| SAFEXCEL_ALG_POLY1305
,
3000 .setkey
= safexcel_aead_chachapoly_setkey
,
3001 .setauthsize
= safexcel_aead_chachapoly_setauthsize
,
3002 .encrypt
= safexcel_aead_chachapoly_encrypt
,
3003 .decrypt
= safexcel_aead_chachapoly_decrypt
,
3004 .ivsize
= CHACHAPOLY_IV_SIZE
,
3005 .maxauthsize
= POLY1305_DIGEST_SIZE
,
3007 .cra_name
= "rfc7539(chacha20,poly1305)",
3008 .cra_driver_name
= "safexcel-chacha20-poly1305",
3009 /* +1 to put it above HW chacha + SW poly */
3010 .cra_priority
= SAFEXCEL_CRA_PRIORITY
+ 1,
3011 .cra_flags
= CRYPTO_ALG_ASYNC
|
3012 CRYPTO_ALG_ALLOCATES_MEMORY
|
3013 CRYPTO_ALG_KERN_DRIVER_ONLY
|
3014 CRYPTO_ALG_NEED_FALLBACK
,
3016 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3018 .cra_init
= safexcel_aead_chachapoly_cra_init
,
3019 .cra_exit
= safexcel_aead_fallback_cra_exit
,
3020 .cra_module
= THIS_MODULE
,
3025 static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm
*tfm
)
3027 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3030 ret
= safexcel_aead_chachapoly_cra_init(tfm
);
3031 ctx
->aead
= EIP197_AEAD_TYPE_IPSEC_ESP
;
3032 ctx
->aadskip
= EIP197_AEAD_IPSEC_IV_SIZE
;
3036 struct safexcel_alg_template safexcel_alg_chachapoly_esp
= {
3037 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
3038 .algo_mask
= SAFEXCEL_ALG_CHACHA20
| SAFEXCEL_ALG_POLY1305
,
3040 .setkey
= safexcel_aead_chachapoly_setkey
,
3041 .setauthsize
= safexcel_aead_chachapoly_setauthsize
,
3042 .encrypt
= safexcel_aead_chachapoly_encrypt
,
3043 .decrypt
= safexcel_aead_chachapoly_decrypt
,
3044 .ivsize
= CHACHAPOLY_IV_SIZE
- EIP197_AEAD_IPSEC_NONCE_SIZE
,
3045 .maxauthsize
= POLY1305_DIGEST_SIZE
,
3047 .cra_name
= "rfc7539esp(chacha20,poly1305)",
3048 .cra_driver_name
= "safexcel-chacha20-poly1305-esp",
3049 /* +1 to put it above HW chacha + SW poly */
3050 .cra_priority
= SAFEXCEL_CRA_PRIORITY
+ 1,
3051 .cra_flags
= CRYPTO_ALG_ASYNC
|
3052 CRYPTO_ALG_ALLOCATES_MEMORY
|
3053 CRYPTO_ALG_KERN_DRIVER_ONLY
|
3054 CRYPTO_ALG_NEED_FALLBACK
,
3056 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3058 .cra_init
= safexcel_aead_chachapolyesp_cra_init
,
3059 .cra_exit
= safexcel_aead_fallback_cra_exit
,
3060 .cra_module
= THIS_MODULE
,
3065 static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher
*ctfm
,
3066 const u8
*key
, unsigned int len
)
3068 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(ctfm
);
3069 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3070 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
3072 if (len
!= SM4_KEY_SIZE
)
3075 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
)
3076 if (memcmp(ctx
->key
, key
, SM4_KEY_SIZE
))
3077 ctx
->base
.needs_inv
= true;
3079 memcpy(ctx
->key
, key
, SM4_KEY_SIZE
);
3080 ctx
->key_len
= SM4_KEY_SIZE
;
3085 static int safexcel_sm4_blk_encrypt(struct skcipher_request
*req
)
3087 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3088 if (req
->cryptlen
& (SM4_BLOCK_SIZE
- 1))
3091 return safexcel_queue_req(&req
->base
, skcipher_request_ctx(req
),
3095 static int safexcel_sm4_blk_decrypt(struct skcipher_request
*req
)
3097 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3098 if (req
->cryptlen
& (SM4_BLOCK_SIZE
- 1))
3101 return safexcel_queue_req(&req
->base
, skcipher_request_ctx(req
),
3105 static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm
*tfm
)
3107 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3109 safexcel_skcipher_cra_init(tfm
);
3110 ctx
->alg
= SAFEXCEL_SM4
;
3111 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_ECB
;
3113 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
3117 struct safexcel_alg_template safexcel_alg_ecb_sm4
= {
3118 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
3119 .algo_mask
= SAFEXCEL_ALG_SM4
,
3121 .setkey
= safexcel_skcipher_sm4_setkey
,
3122 .encrypt
= safexcel_sm4_blk_encrypt
,
3123 .decrypt
= safexcel_sm4_blk_decrypt
,
3124 .min_keysize
= SM4_KEY_SIZE
,
3125 .max_keysize
= SM4_KEY_SIZE
,
3127 .cra_name
= "ecb(sm4)",
3128 .cra_driver_name
= "safexcel-ecb-sm4",
3129 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3130 .cra_flags
= CRYPTO_ALG_ASYNC
|
3131 CRYPTO_ALG_ALLOCATES_MEMORY
|
3132 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3133 .cra_blocksize
= SM4_BLOCK_SIZE
,
3134 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3136 .cra_init
= safexcel_skcipher_sm4_ecb_cra_init
,
3137 .cra_exit
= safexcel_skcipher_cra_exit
,
3138 .cra_module
= THIS_MODULE
,
3143 static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm
*tfm
)
3145 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3147 safexcel_skcipher_cra_init(tfm
);
3148 ctx
->alg
= SAFEXCEL_SM4
;
3149 ctx
->blocksz
= SM4_BLOCK_SIZE
;
3150 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CBC
;
3154 struct safexcel_alg_template safexcel_alg_cbc_sm4
= {
3155 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
3156 .algo_mask
= SAFEXCEL_ALG_SM4
,
3158 .setkey
= safexcel_skcipher_sm4_setkey
,
3159 .encrypt
= safexcel_sm4_blk_encrypt
,
3160 .decrypt
= safexcel_sm4_blk_decrypt
,
3161 .min_keysize
= SM4_KEY_SIZE
,
3162 .max_keysize
= SM4_KEY_SIZE
,
3163 .ivsize
= SM4_BLOCK_SIZE
,
3165 .cra_name
= "cbc(sm4)",
3166 .cra_driver_name
= "safexcel-cbc-sm4",
3167 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3168 .cra_flags
= CRYPTO_ALG_ASYNC
|
3169 CRYPTO_ALG_ALLOCATES_MEMORY
|
3170 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3171 .cra_blocksize
= SM4_BLOCK_SIZE
,
3172 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3174 .cra_init
= safexcel_skcipher_sm4_cbc_cra_init
,
3175 .cra_exit
= safexcel_skcipher_cra_exit
,
3176 .cra_module
= THIS_MODULE
,
3181 static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm
*tfm
)
3183 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3185 safexcel_skcipher_cra_init(tfm
);
3186 ctx
->alg
= SAFEXCEL_SM4
;
3187 ctx
->blocksz
= SM4_BLOCK_SIZE
;
3188 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_OFB
;
3192 struct safexcel_alg_template safexcel_alg_ofb_sm4
= {
3193 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
3194 .algo_mask
= SAFEXCEL_ALG_SM4
| SAFEXCEL_ALG_AES_XFB
,
3196 .setkey
= safexcel_skcipher_sm4_setkey
,
3197 .encrypt
= safexcel_encrypt
,
3198 .decrypt
= safexcel_decrypt
,
3199 .min_keysize
= SM4_KEY_SIZE
,
3200 .max_keysize
= SM4_KEY_SIZE
,
3201 .ivsize
= SM4_BLOCK_SIZE
,
3203 .cra_name
= "ofb(sm4)",
3204 .cra_driver_name
= "safexcel-ofb-sm4",
3205 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3206 .cra_flags
= CRYPTO_ALG_ASYNC
|
3207 CRYPTO_ALG_ALLOCATES_MEMORY
|
3208 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3210 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3212 .cra_init
= safexcel_skcipher_sm4_ofb_cra_init
,
3213 .cra_exit
= safexcel_skcipher_cra_exit
,
3214 .cra_module
= THIS_MODULE
,
3219 static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm
*tfm
)
3221 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3223 safexcel_skcipher_cra_init(tfm
);
3224 ctx
->alg
= SAFEXCEL_SM4
;
3225 ctx
->blocksz
= SM4_BLOCK_SIZE
;
3226 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CFB
;
3230 struct safexcel_alg_template safexcel_alg_cfb_sm4
= {
3231 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
3232 .algo_mask
= SAFEXCEL_ALG_SM4
| SAFEXCEL_ALG_AES_XFB
,
3234 .setkey
= safexcel_skcipher_sm4_setkey
,
3235 .encrypt
= safexcel_encrypt
,
3236 .decrypt
= safexcel_decrypt
,
3237 .min_keysize
= SM4_KEY_SIZE
,
3238 .max_keysize
= SM4_KEY_SIZE
,
3239 .ivsize
= SM4_BLOCK_SIZE
,
3241 .cra_name
= "cfb(sm4)",
3242 .cra_driver_name
= "safexcel-cfb-sm4",
3243 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3244 .cra_flags
= CRYPTO_ALG_ASYNC
|
3245 CRYPTO_ALG_ALLOCATES_MEMORY
|
3246 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3248 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3250 .cra_init
= safexcel_skcipher_sm4_cfb_cra_init
,
3251 .cra_exit
= safexcel_skcipher_cra_exit
,
3252 .cra_module
= THIS_MODULE
,
3257 static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher
*ctfm
,
3258 const u8
*key
, unsigned int len
)
3260 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(ctfm
);
3261 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3263 /* last 4 bytes of key are the nonce! */
3264 ctx
->nonce
= *(u32
*)(key
+ len
- CTR_RFC3686_NONCE_SIZE
);
3265 /* exclude the nonce here */
3266 len
-= CTR_RFC3686_NONCE_SIZE
;
3268 return safexcel_skcipher_sm4_setkey(ctfm
, key
, len
);
3271 static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm
*tfm
)
3273 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3275 safexcel_skcipher_cra_init(tfm
);
3276 ctx
->alg
= SAFEXCEL_SM4
;
3277 ctx
->blocksz
= SM4_BLOCK_SIZE
;
3278 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
;
3282 struct safexcel_alg_template safexcel_alg_ctr_sm4
= {
3283 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
3284 .algo_mask
= SAFEXCEL_ALG_SM4
,
3286 .setkey
= safexcel_skcipher_sm4ctr_setkey
,
3287 .encrypt
= safexcel_encrypt
,
3288 .decrypt
= safexcel_decrypt
,
3289 /* Add nonce size */
3290 .min_keysize
= SM4_KEY_SIZE
+ CTR_RFC3686_NONCE_SIZE
,
3291 .max_keysize
= SM4_KEY_SIZE
+ CTR_RFC3686_NONCE_SIZE
,
3292 .ivsize
= CTR_RFC3686_IV_SIZE
,
3294 .cra_name
= "rfc3686(ctr(sm4))",
3295 .cra_driver_name
= "safexcel-ctr-sm4",
3296 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3297 .cra_flags
= CRYPTO_ALG_ASYNC
|
3298 CRYPTO_ALG_ALLOCATES_MEMORY
|
3299 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3301 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3303 .cra_init
= safexcel_skcipher_sm4_ctr_cra_init
,
3304 .cra_exit
= safexcel_skcipher_cra_exit
,
3305 .cra_module
= THIS_MODULE
,
3310 static int safexcel_aead_sm4_blk_encrypt(struct aead_request
*req
)
3312 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3313 if (req
->cryptlen
& (SM4_BLOCK_SIZE
- 1))
3316 return safexcel_queue_req(&req
->base
, aead_request_ctx(req
),
3320 static int safexcel_aead_sm4_blk_decrypt(struct aead_request
*req
)
3322 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
3324 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3325 if ((req
->cryptlen
- crypto_aead_authsize(tfm
)) & (SM4_BLOCK_SIZE
- 1))
3328 return safexcel_queue_req(&req
->base
, aead_request_ctx(req
),
3332 static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm
*tfm
)
3334 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3336 safexcel_aead_cra_init(tfm
);
3337 ctx
->alg
= SAFEXCEL_SM4
;
3338 ctx
->blocksz
= SM4_BLOCK_SIZE
;
3339 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA1
;
3340 ctx
->state_sz
= SHA1_DIGEST_SIZE
;
3344 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4
= {
3345 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
3346 .algo_mask
= SAFEXCEL_ALG_SM4
| SAFEXCEL_ALG_SHA1
,
3348 .setkey
= safexcel_aead_setkey
,
3349 .encrypt
= safexcel_aead_sm4_blk_encrypt
,
3350 .decrypt
= safexcel_aead_sm4_blk_decrypt
,
3351 .ivsize
= SM4_BLOCK_SIZE
,
3352 .maxauthsize
= SHA1_DIGEST_SIZE
,
3354 .cra_name
= "authenc(hmac(sha1),cbc(sm4))",
3355 .cra_driver_name
= "safexcel-authenc-hmac-sha1-cbc-sm4",
3356 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3357 .cra_flags
= CRYPTO_ALG_ASYNC
|
3358 CRYPTO_ALG_ALLOCATES_MEMORY
|
3359 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3360 .cra_blocksize
= SM4_BLOCK_SIZE
,
3361 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3363 .cra_init
= safexcel_aead_sm4cbc_sha1_cra_init
,
3364 .cra_exit
= safexcel_aead_cra_exit
,
3365 .cra_module
= THIS_MODULE
,
3370 static int safexcel_aead_fallback_setkey(struct crypto_aead
*ctfm
,
3371 const u8
*key
, unsigned int len
)
3373 struct crypto_tfm
*tfm
= crypto_aead_tfm(ctfm
);
3374 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3376 /* Keep fallback cipher synchronized */
3377 return crypto_aead_setkey(ctx
->fback
, (u8
*)key
, len
) ?:
3378 safexcel_aead_setkey(ctfm
, key
, len
);
3381 static int safexcel_aead_fallback_setauthsize(struct crypto_aead
*ctfm
,
3382 unsigned int authsize
)
3384 struct crypto_tfm
*tfm
= crypto_aead_tfm(ctfm
);
3385 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3387 /* Keep fallback cipher synchronized */
3388 return crypto_aead_setauthsize(ctx
->fback
, authsize
);
3391 static int safexcel_aead_fallback_crypt(struct aead_request
*req
,
3392 enum safexcel_cipher_direction dir
)
3394 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
3395 struct crypto_tfm
*tfm
= crypto_aead_tfm(aead
);
3396 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3397 struct aead_request
*subreq
= aead_request_ctx(req
);
3399 aead_request_set_tfm(subreq
, ctx
->fback
);
3400 aead_request_set_callback(subreq
, req
->base
.flags
, req
->base
.complete
,
3402 aead_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
3404 aead_request_set_ad(subreq
, req
->assoclen
);
3406 return (dir
== SAFEXCEL_ENCRYPT
) ?
3407 crypto_aead_encrypt(subreq
) :
3408 crypto_aead_decrypt(subreq
);
3411 static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request
*req
)
3413 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
3415 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3416 if (req
->cryptlen
& (SM4_BLOCK_SIZE
- 1))
3418 else if (req
->cryptlen
|| req
->assoclen
) /* If input length > 0 only */
3419 return safexcel_queue_req(&req
->base
, creq
, SAFEXCEL_ENCRYPT
);
3421 /* HW cannot do full (AAD+payload) zero length, use fallback */
3422 return safexcel_aead_fallback_crypt(req
, SAFEXCEL_ENCRYPT
);
3425 static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request
*req
)
3427 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
3428 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
3430 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3431 if ((req
->cryptlen
- crypto_aead_authsize(tfm
)) & (SM4_BLOCK_SIZE
- 1))
3433 else if (req
->cryptlen
> crypto_aead_authsize(tfm
) || req
->assoclen
)
3434 /* If input length > 0 only */
3435 return safexcel_queue_req(&req
->base
, creq
, SAFEXCEL_DECRYPT
);
3437 /* HW cannot do full (AAD+payload) zero length, use fallback */
3438 return safexcel_aead_fallback_crypt(req
, SAFEXCEL_DECRYPT
);
3441 static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm
*tfm
)
3443 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3445 safexcel_aead_fallback_cra_init(tfm
);
3446 ctx
->alg
= SAFEXCEL_SM4
;
3447 ctx
->blocksz
= SM4_BLOCK_SIZE
;
3448 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_SM3
;
3449 ctx
->state_sz
= SM3_DIGEST_SIZE
;
3453 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4
= {
3454 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
3455 .algo_mask
= SAFEXCEL_ALG_SM4
| SAFEXCEL_ALG_SM3
,
3457 .setkey
= safexcel_aead_fallback_setkey
,
3458 .setauthsize
= safexcel_aead_fallback_setauthsize
,
3459 .encrypt
= safexcel_aead_sm4cbc_sm3_encrypt
,
3460 .decrypt
= safexcel_aead_sm4cbc_sm3_decrypt
,
3461 .ivsize
= SM4_BLOCK_SIZE
,
3462 .maxauthsize
= SM3_DIGEST_SIZE
,
3464 .cra_name
= "authenc(hmac(sm3),cbc(sm4))",
3465 .cra_driver_name
= "safexcel-authenc-hmac-sm3-cbc-sm4",
3466 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3467 .cra_flags
= CRYPTO_ALG_ASYNC
|
3468 CRYPTO_ALG_ALLOCATES_MEMORY
|
3469 CRYPTO_ALG_KERN_DRIVER_ONLY
|
3470 CRYPTO_ALG_NEED_FALLBACK
,
3471 .cra_blocksize
= SM4_BLOCK_SIZE
,
3472 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3474 .cra_init
= safexcel_aead_sm4cbc_sm3_cra_init
,
3475 .cra_exit
= safexcel_aead_fallback_cra_exit
,
3476 .cra_module
= THIS_MODULE
,
3481 static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm
*tfm
)
3483 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3485 safexcel_aead_sm4cbc_sha1_cra_init(tfm
);
3486 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
;
3490 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4
= {
3491 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
3492 .algo_mask
= SAFEXCEL_ALG_SM4
| SAFEXCEL_ALG_SHA1
,
3494 .setkey
= safexcel_aead_setkey
,
3495 .encrypt
= safexcel_aead_encrypt
,
3496 .decrypt
= safexcel_aead_decrypt
,
3497 .ivsize
= CTR_RFC3686_IV_SIZE
,
3498 .maxauthsize
= SHA1_DIGEST_SIZE
,
3500 .cra_name
= "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3501 .cra_driver_name
= "safexcel-authenc-hmac-sha1-ctr-sm4",
3502 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3503 .cra_flags
= CRYPTO_ALG_ASYNC
|
3504 CRYPTO_ALG_ALLOCATES_MEMORY
|
3505 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3507 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3509 .cra_init
= safexcel_aead_sm4ctr_sha1_cra_init
,
3510 .cra_exit
= safexcel_aead_cra_exit
,
3511 .cra_module
= THIS_MODULE
,
3516 static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm
*tfm
)
3518 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3520 safexcel_aead_sm4cbc_sm3_cra_init(tfm
);
3521 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
;
3525 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4
= {
3526 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
3527 .algo_mask
= SAFEXCEL_ALG_SM4
| SAFEXCEL_ALG_SM3
,
3529 .setkey
= safexcel_aead_setkey
,
3530 .encrypt
= safexcel_aead_encrypt
,
3531 .decrypt
= safexcel_aead_decrypt
,
3532 .ivsize
= CTR_RFC3686_IV_SIZE
,
3533 .maxauthsize
= SM3_DIGEST_SIZE
,
3535 .cra_name
= "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3536 .cra_driver_name
= "safexcel-authenc-hmac-sm3-ctr-sm4",
3537 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3538 .cra_flags
= CRYPTO_ALG_ASYNC
|
3539 CRYPTO_ALG_ALLOCATES_MEMORY
|
3540 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3542 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3544 .cra_init
= safexcel_aead_sm4ctr_sm3_cra_init
,
3545 .cra_exit
= safexcel_aead_cra_exit
,
3546 .cra_module
= THIS_MODULE
,
3551 static int safexcel_rfc4106_gcm_setkey(struct crypto_aead
*ctfm
, const u8
*key
,
3554 struct crypto_tfm
*tfm
= crypto_aead_tfm(ctfm
);
3555 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3557 /* last 4 bytes of key are the nonce! */
3558 ctx
->nonce
= *(u32
*)(key
+ len
- CTR_RFC3686_NONCE_SIZE
);
3560 len
-= CTR_RFC3686_NONCE_SIZE
;
3561 return safexcel_aead_gcm_setkey(ctfm
, key
, len
);
3564 static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead
*tfm
,
3565 unsigned int authsize
)
3567 return crypto_rfc4106_check_authsize(authsize
);
3570 static int safexcel_rfc4106_encrypt(struct aead_request
*req
)
3572 return crypto_ipsec_check_assoclen(req
->assoclen
) ?:
3573 safexcel_aead_encrypt(req
);
3576 static int safexcel_rfc4106_decrypt(struct aead_request
*req
)
3578 return crypto_ipsec_check_assoclen(req
->assoclen
) ?:
3579 safexcel_aead_decrypt(req
);
3582 static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm
*tfm
)
3584 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3587 ret
= safexcel_aead_gcm_cra_init(tfm
);
3588 ctx
->aead
= EIP197_AEAD_TYPE_IPSEC_ESP
;
3589 ctx
->aadskip
= EIP197_AEAD_IPSEC_IV_SIZE
;
3593 struct safexcel_alg_template safexcel_alg_rfc4106_gcm
= {
3594 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
3595 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_GHASH
,
3597 .setkey
= safexcel_rfc4106_gcm_setkey
,
3598 .setauthsize
= safexcel_rfc4106_gcm_setauthsize
,
3599 .encrypt
= safexcel_rfc4106_encrypt
,
3600 .decrypt
= safexcel_rfc4106_decrypt
,
3601 .ivsize
= GCM_RFC4106_IV_SIZE
,
3602 .maxauthsize
= GHASH_DIGEST_SIZE
,
3604 .cra_name
= "rfc4106(gcm(aes))",
3605 .cra_driver_name
= "safexcel-rfc4106-gcm-aes",
3606 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3607 .cra_flags
= CRYPTO_ALG_ASYNC
|
3608 CRYPTO_ALG_ALLOCATES_MEMORY
|
3609 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3611 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3613 .cra_init
= safexcel_rfc4106_gcm_cra_init
,
3614 .cra_exit
= safexcel_aead_gcm_cra_exit
,
3619 static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead
*tfm
,
3620 unsigned int authsize
)
3622 if (authsize
!= GHASH_DIGEST_SIZE
)
3628 static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm
*tfm
)
3630 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3633 ret
= safexcel_aead_gcm_cra_init(tfm
);
3634 ctx
->aead
= EIP197_AEAD_TYPE_IPSEC_ESP_GMAC
;
3638 struct safexcel_alg_template safexcel_alg_rfc4543_gcm
= {
3639 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
3640 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_GHASH
,
3642 .setkey
= safexcel_rfc4106_gcm_setkey
,
3643 .setauthsize
= safexcel_rfc4543_gcm_setauthsize
,
3644 .encrypt
= safexcel_rfc4106_encrypt
,
3645 .decrypt
= safexcel_rfc4106_decrypt
,
3646 .ivsize
= GCM_RFC4543_IV_SIZE
,
3647 .maxauthsize
= GHASH_DIGEST_SIZE
,
3649 .cra_name
= "rfc4543(gcm(aes))",
3650 .cra_driver_name
= "safexcel-rfc4543-gcm-aes",
3651 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3652 .cra_flags
= CRYPTO_ALG_ASYNC
|
3653 CRYPTO_ALG_ALLOCATES_MEMORY
|
3654 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3656 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3658 .cra_init
= safexcel_rfc4543_gcm_cra_init
,
3659 .cra_exit
= safexcel_aead_gcm_cra_exit
,
3664 static int safexcel_rfc4309_ccm_setkey(struct crypto_aead
*ctfm
, const u8
*key
,
3667 struct crypto_tfm
*tfm
= crypto_aead_tfm(ctfm
);
3668 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3670 /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3671 *(u8
*)&ctx
->nonce
= EIP197_AEAD_IPSEC_COUNTER_SIZE
- 1;
3672 /* last 3 bytes of key are the nonce! */
3673 memcpy((u8
*)&ctx
->nonce
+ 1, key
+ len
-
3674 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE
,
3675 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE
);
3677 len
-= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE
;
3678 return safexcel_aead_ccm_setkey(ctfm
, key
, len
);
3681 static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead
*tfm
,
3682 unsigned int authsize
)
3684 /* Borrowed from crypto/ccm.c */
3697 static int safexcel_rfc4309_ccm_encrypt(struct aead_request
*req
)
3699 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
3701 /* Borrowed from crypto/ccm.c */
3702 if (req
->assoclen
!= 16 && req
->assoclen
!= 20)
3705 return safexcel_queue_req(&req
->base
, creq
, SAFEXCEL_ENCRYPT
);
3708 static int safexcel_rfc4309_ccm_decrypt(struct aead_request
*req
)
3710 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
3712 /* Borrowed from crypto/ccm.c */
3713 if (req
->assoclen
!= 16 && req
->assoclen
!= 20)
3716 return safexcel_queue_req(&req
->base
, creq
, SAFEXCEL_DECRYPT
);
3719 static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm
*tfm
)
3721 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3724 ret
= safexcel_aead_ccm_cra_init(tfm
);
3725 ctx
->aead
= EIP197_AEAD_TYPE_IPSEC_ESP
;
3726 ctx
->aadskip
= EIP197_AEAD_IPSEC_IV_SIZE
;
3730 struct safexcel_alg_template safexcel_alg_rfc4309_ccm
= {
3731 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
3732 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_CBC_MAC_ALL
,
3734 .setkey
= safexcel_rfc4309_ccm_setkey
,
3735 .setauthsize
= safexcel_rfc4309_ccm_setauthsize
,
3736 .encrypt
= safexcel_rfc4309_ccm_encrypt
,
3737 .decrypt
= safexcel_rfc4309_ccm_decrypt
,
3738 .ivsize
= EIP197_AEAD_IPSEC_IV_SIZE
,
3739 .maxauthsize
= AES_BLOCK_SIZE
,
3741 .cra_name
= "rfc4309(ccm(aes))",
3742 .cra_driver_name
= "safexcel-rfc4309-ccm-aes",
3743 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3744 .cra_flags
= CRYPTO_ALG_ASYNC
|
3745 CRYPTO_ALG_ALLOCATES_MEMORY
|
3746 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3748 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3750 .cra_init
= safexcel_rfc4309_ccm_cra_init
,
3751 .cra_exit
= safexcel_aead_cra_exit
,
3752 .cra_module
= THIS_MODULE
,