1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) 2017 Marvell
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
8 #include <asm/unaligned.h>
9 #include <linux/device.h>
10 #include <linux/dma-mapping.h>
11 #include <linux/dmapool.h>
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/chacha.h>
16 #include <crypto/ctr.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/gcm.h>
19 #include <crypto/ghash.h>
20 #include <crypto/poly1305.h>
21 #include <crypto/sha.h>
22 #include <crypto/sm3.h>
23 #include <crypto/sm4.h>
24 #include <crypto/xts.h>
25 #include <crypto/skcipher.h>
26 #include <crypto/internal/aead.h>
27 #include <crypto/internal/skcipher.h>
31 enum safexcel_cipher_direction
{
36 enum safexcel_cipher_alg
{
44 struct safexcel_cipher_ctx
{
45 struct safexcel_context base
;
46 struct safexcel_crypto_priv
*priv
;
49 enum safexcel_cipher_alg alg
;
50 u8 aead
; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
51 u8 xcm
; /* 0=authenc, 1=GCM, 2 reserved for CCM */
59 unsigned int key_len
, xts
;
61 /* All the below is AEAD specific */
64 __be32 ipad
[SHA512_DIGEST_SIZE
/ sizeof(u32
)];
65 __be32 opad
[SHA512_DIGEST_SIZE
/ sizeof(u32
)];
67 struct crypto_cipher
*hkaes
;
68 struct crypto_aead
*fback
;
71 struct safexcel_cipher_req
{
72 enum safexcel_cipher_direction direction
;
73 /* Number of result descriptors associated to the request */
79 static int safexcel_skcipher_iv(struct safexcel_cipher_ctx
*ctx
, u8
*iv
,
80 struct safexcel_command_desc
*cdesc
)
82 if (ctx
->mode
== CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
) {
83 cdesc
->control_data
.options
|= EIP197_OPTION_4_TOKEN_IV_CMD
;
85 cdesc
->control_data
.token
[0] = ctx
->nonce
;
87 memcpy(&cdesc
->control_data
.token
[1], iv
, 8);
88 /* 32 bit counter, start at 0 or 1 (big endian!) */
89 cdesc
->control_data
.token
[3] =
90 (__force u32
)cpu_to_be32(ctx
->ctrinit
);
93 if (ctx
->alg
== SAFEXCEL_CHACHA20
) {
94 cdesc
->control_data
.options
|= EIP197_OPTION_4_TOKEN_IV_CMD
;
95 /* 96 bit nonce part */
96 memcpy(&cdesc
->control_data
.token
[0], &iv
[4], 12);
98 cdesc
->control_data
.token
[3] = *(u32
*)iv
;
102 cdesc
->control_data
.options
|= ctx
->ivmask
;
103 memcpy(cdesc
->control_data
.token
, iv
, ctx
->blocksz
);
104 return ctx
->blocksz
/ sizeof(u32
);
107 static void safexcel_skcipher_token(struct safexcel_cipher_ctx
*ctx
, u8
*iv
,
108 struct safexcel_command_desc
*cdesc
,
109 struct safexcel_token
*atoken
,
112 struct safexcel_token
*token
;
115 ivlen
= safexcel_skcipher_iv(ctx
, iv
, cdesc
);
117 /* No space in cdesc, instruction moves to atoken */
118 cdesc
->additional_cdata_size
= 1;
121 /* Everything fits in cdesc */
122 token
= (struct safexcel_token
*)(cdesc
->control_data
.token
+ 2);
123 /* Need to pad with NOP */
124 eip197_noop_token(&token
[1]);
127 token
->opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
128 token
->packet_length
= length
;
129 token
->stat
= EIP197_TOKEN_STAT_LAST_PACKET
|
130 EIP197_TOKEN_STAT_LAST_HASH
;
131 token
->instructions
= EIP197_TOKEN_INS_LAST
|
132 EIP197_TOKEN_INS_TYPE_CRYPTO
|
133 EIP197_TOKEN_INS_TYPE_OUTPUT
;
136 static void safexcel_aead_iv(struct safexcel_cipher_ctx
*ctx
, u8
*iv
,
137 struct safexcel_command_desc
*cdesc
)
139 if (ctx
->mode
== CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
||
140 ctx
->aead
& EIP197_AEAD_TYPE_IPSEC_ESP
) { /* _ESP and _ESP_GMAC */
142 cdesc
->control_data
.token
[0] = ctx
->nonce
;
144 memcpy(&cdesc
->control_data
.token
[1], iv
, 8);
145 /* 32 bit counter, start at 0 or 1 (big endian!) */
146 cdesc
->control_data
.token
[3] =
147 (__force u32
)cpu_to_be32(ctx
->ctrinit
);
150 if (ctx
->xcm
== EIP197_XCM_MODE_GCM
|| ctx
->alg
== SAFEXCEL_CHACHA20
) {
152 memcpy(&cdesc
->control_data
.token
[0], iv
, 12);
153 /* 32 bit counter, start at 0 or 1 (big endian!) */
154 cdesc
->control_data
.token
[3] =
155 (__force u32
)cpu_to_be32(ctx
->ctrinit
);
159 memcpy(cdesc
->control_data
.token
, iv
, ctx
->blocksz
);
162 static void safexcel_aead_token(struct safexcel_cipher_ctx
*ctx
, u8
*iv
,
163 struct safexcel_command_desc
*cdesc
,
164 struct safexcel_token
*atoken
,
165 enum safexcel_cipher_direction direction
,
166 u32 cryptlen
, u32 assoclen
, u32 digestsize
)
168 struct safexcel_token
*aadref
;
169 int atoksize
= 2; /* Start with minimum size */
170 int assocadj
= assoclen
- ctx
->aadskip
, aadalign
;
172 /* Always 4 dwords of embedded IV for AEAD modes */
173 cdesc
->control_data
.options
|= EIP197_OPTION_4_TOKEN_IV_CMD
;
175 if (direction
== SAFEXCEL_DECRYPT
)
176 cryptlen
-= digestsize
;
178 if (unlikely(ctx
->xcm
== EIP197_XCM_MODE_CCM
)) {
179 /* Construct IV block B0 for the CBC-MAC */
180 u8
*final_iv
= (u8
*)cdesc
->control_data
.token
;
181 u8
*cbcmaciv
= (u8
*)&atoken
[1];
182 __le32
*aadlen
= (__le32
*)&atoken
[5];
184 if (ctx
->aead
== EIP197_AEAD_TYPE_IPSEC_ESP
) {
186 cdesc
->control_data
.token
[0] = ctx
->nonce
;
187 /* Fixup flags byte */
188 *(__le32
*)cbcmaciv
=
189 cpu_to_le32(ctx
->nonce
|
190 ((assocadj
> 0) << 6) |
191 ((digestsize
- 2) << 2));
193 memcpy(&cdesc
->control_data
.token
[1], iv
, 8);
194 memcpy(cbcmaciv
+ 4, iv
, 8);
195 /* Start counter at 0 */
196 cdesc
->control_data
.token
[3] = 0;
198 *(__be32
*)(cbcmaciv
+ 12) = cpu_to_be32(cryptlen
);
200 /* Variable length IV part */
201 memcpy(final_iv
, iv
, 15 - iv
[0]);
202 memcpy(cbcmaciv
, iv
, 15 - iv
[0]);
203 /* Start variable length counter at 0 */
204 memset(final_iv
+ 15 - iv
[0], 0, iv
[0] + 1);
205 memset(cbcmaciv
+ 15 - iv
[0], 0, iv
[0] - 1);
206 /* fixup flags byte */
207 cbcmaciv
[0] |= ((assocadj
> 0) << 6) |
208 ((digestsize
- 2) << 2);
209 /* insert lower 2 bytes of message length */
210 cbcmaciv
[14] = cryptlen
>> 8;
211 cbcmaciv
[15] = cryptlen
& 255;
214 atoken
->opcode
= EIP197_TOKEN_OPCODE_INSERT
;
215 atoken
->packet_length
= AES_BLOCK_SIZE
+
216 ((assocadj
> 0) << 1);
218 atoken
->instructions
= EIP197_TOKEN_INS_ORIGIN_TOKEN
|
219 EIP197_TOKEN_INS_TYPE_HASH
;
221 if (likely(assocadj
)) {
222 *aadlen
= cpu_to_le32((assocadj
>> 8) |
223 (assocadj
& 255) << 8);
231 /* Process AAD data */
233 atoken
->opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
234 atoken
->packet_length
= assocadj
;
236 atoken
->instructions
= EIP197_TOKEN_INS_TYPE_HASH
;
239 /* For CCM only, align AAD data towards hash engine */
240 atoken
->opcode
= EIP197_TOKEN_OPCODE_INSERT
;
241 aadalign
= (assocadj
+ 2) & 15;
242 atoken
->packet_length
= assocadj
&& aadalign
?
245 if (likely(cryptlen
)) {
247 atoken
->instructions
= EIP197_TOKEN_INS_TYPE_HASH
;
249 atoken
->stat
= EIP197_TOKEN_STAT_LAST_HASH
;
250 atoken
->instructions
= EIP197_TOKEN_INS_LAST
|
251 EIP197_TOKEN_INS_TYPE_HASH
;
254 safexcel_aead_iv(ctx
, iv
, cdesc
);
256 /* Process AAD data */
258 atoken
->opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
259 atoken
->packet_length
= assocadj
;
260 atoken
->stat
= EIP197_TOKEN_STAT_LAST_HASH
;
261 atoken
->instructions
= EIP197_TOKEN_INS_LAST
|
262 EIP197_TOKEN_INS_TYPE_HASH
;
266 if (ctx
->aead
== EIP197_AEAD_TYPE_IPSEC_ESP
) {
267 /* For ESP mode (and not GMAC), skip over the IV */
268 atoken
->opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
269 atoken
->packet_length
= EIP197_AEAD_IPSEC_IV_SIZE
;
271 atoken
->instructions
= 0;
274 } else if (unlikely(ctx
->alg
== SAFEXCEL_CHACHA20
&&
275 direction
== SAFEXCEL_DECRYPT
)) {
276 /* Poly-chacha decryption needs a dummy NOP here ... */
277 atoken
->opcode
= EIP197_TOKEN_OPCODE_INSERT
;
278 atoken
->packet_length
= 16; /* According to Op Manual */
280 atoken
->instructions
= 0;
286 /* For GCM and CCM, obtain enc(Y0) */
287 atoken
->opcode
= EIP197_TOKEN_OPCODE_INSERT_REMRES
;
288 atoken
->packet_length
= 0;
290 atoken
->instructions
= AES_BLOCK_SIZE
;
293 atoken
->opcode
= EIP197_TOKEN_OPCODE_INSERT
;
294 atoken
->packet_length
= AES_BLOCK_SIZE
;
296 atoken
->instructions
= EIP197_TOKEN_INS_TYPE_OUTPUT
|
297 EIP197_TOKEN_INS_TYPE_CRYPTO
;
302 if (likely(cryptlen
|| ctx
->alg
== SAFEXCEL_CHACHA20
)) {
303 /* Fixup stat field for AAD direction instruction */
306 /* Process crypto data */
307 atoken
->opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
308 atoken
->packet_length
= cryptlen
;
310 if (unlikely(ctx
->aead
== EIP197_AEAD_TYPE_IPSEC_ESP_GMAC
)) {
311 /* Fixup instruction field for AAD dir instruction */
312 aadref
->instructions
= EIP197_TOKEN_INS_TYPE_HASH
;
314 /* Do not send to crypt engine in case of GMAC */
315 atoken
->instructions
= EIP197_TOKEN_INS_LAST
|
316 EIP197_TOKEN_INS_TYPE_HASH
|
317 EIP197_TOKEN_INS_TYPE_OUTPUT
;
319 atoken
->instructions
= EIP197_TOKEN_INS_LAST
|
320 EIP197_TOKEN_INS_TYPE_CRYPTO
|
321 EIP197_TOKEN_INS_TYPE_HASH
|
322 EIP197_TOKEN_INS_TYPE_OUTPUT
;
326 if (unlikely(ctx
->xcm
== EIP197_XCM_MODE_CCM
&& cryptlen
)) {
328 /* For CCM only, pad crypto data to the hash engine */
331 atoken
->opcode
= EIP197_TOKEN_OPCODE_INSERT
;
332 atoken
->packet_length
= 16 - cryptlen
;
333 atoken
->stat
= EIP197_TOKEN_STAT_LAST_HASH
;
334 atoken
->instructions
= EIP197_TOKEN_INS_TYPE_HASH
;
336 atoken
->stat
= EIP197_TOKEN_STAT_LAST_HASH
;
342 if (direction
== SAFEXCEL_ENCRYPT
) {
344 atoken
->opcode
= EIP197_TOKEN_OPCODE_INSERT
;
345 atoken
->packet_length
= digestsize
;
346 atoken
->stat
= EIP197_TOKEN_STAT_LAST_HASH
|
347 EIP197_TOKEN_STAT_LAST_PACKET
;
348 atoken
->instructions
= EIP197_TOKEN_INS_TYPE_OUTPUT
|
349 EIP197_TOKEN_INS_INSERT_HASH_DIGEST
;
352 atoken
->opcode
= EIP197_TOKEN_OPCODE_RETRIEVE
;
353 atoken
->packet_length
= digestsize
;
354 atoken
->stat
= EIP197_TOKEN_STAT_LAST_HASH
|
355 EIP197_TOKEN_STAT_LAST_PACKET
;
356 atoken
->instructions
= EIP197_TOKEN_INS_INSERT_HASH_DIGEST
;
361 atoken
->opcode
= EIP197_TOKEN_OPCODE_VERIFY
;
362 atoken
->packet_length
= digestsize
|
363 EIP197_TOKEN_HASH_RESULT_VERIFY
;
364 atoken
->stat
= EIP197_TOKEN_STAT_LAST_HASH
|
365 EIP197_TOKEN_STAT_LAST_PACKET
;
366 atoken
->instructions
= EIP197_TOKEN_INS_TYPE_OUTPUT
;
369 /* Fixup length of the token in the command descriptor */
370 cdesc
->additional_cdata_size
= atoksize
;
373 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher
*ctfm
,
374 const u8
*key
, unsigned int len
)
376 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(ctfm
);
377 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
378 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
379 struct crypto_aes_ctx aes
;
382 ret
= aes_expandkey(&aes
, key
, len
);
386 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
) {
387 for (i
= 0; i
< len
/ sizeof(u32
); i
++) {
388 if (le32_to_cpu(ctx
->key
[i
]) != aes
.key_enc
[i
]) {
389 ctx
->base
.needs_inv
= true;
395 for (i
= 0; i
< len
/ sizeof(u32
); i
++)
396 ctx
->key
[i
] = cpu_to_le32(aes
.key_enc
[i
]);
400 memzero_explicit(&aes
, sizeof(aes
));
404 static int safexcel_aead_setkey(struct crypto_aead
*ctfm
, const u8
*key
,
407 struct crypto_tfm
*tfm
= crypto_aead_tfm(ctfm
);
408 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
409 struct safexcel_ahash_export_state istate
, ostate
;
410 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
411 struct crypto_authenc_keys keys
;
412 struct crypto_aes_ctx aes
;
413 int err
= -EINVAL
, i
;
415 if (unlikely(crypto_authenc_extractkeys(&keys
, key
, len
)))
418 if (ctx
->mode
== CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
) {
419 /* Must have at least space for the nonce here */
420 if (unlikely(keys
.enckeylen
< CTR_RFC3686_NONCE_SIZE
))
422 /* last 4 bytes of key are the nonce! */
423 ctx
->nonce
= *(u32
*)(keys
.enckey
+ keys
.enckeylen
-
424 CTR_RFC3686_NONCE_SIZE
);
425 /* exclude the nonce here */
426 keys
.enckeylen
-= CTR_RFC3686_NONCE_SIZE
;
432 err
= verify_aead_des_key(ctfm
, keys
.enckey
, keys
.enckeylen
);
437 err
= verify_aead_des3_key(ctfm
, keys
.enckey
, keys
.enckeylen
);
442 err
= aes_expandkey(&aes
, keys
.enckey
, keys
.enckeylen
);
447 if (unlikely(keys
.enckeylen
!= SM4_KEY_SIZE
))
451 dev_err(priv
->dev
, "aead: unsupported cipher algorithm\n");
455 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
) {
456 for (i
= 0; i
< keys
.enckeylen
/ sizeof(u32
); i
++) {
457 if (le32_to_cpu(ctx
->key
[i
]) !=
458 ((u32
*)keys
.enckey
)[i
]) {
459 ctx
->base
.needs_inv
= true;
466 switch (ctx
->hash_alg
) {
467 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1
:
468 if (safexcel_hmac_setkey("safexcel-sha1", keys
.authkey
,
469 keys
.authkeylen
, &istate
, &ostate
))
472 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224
:
473 if (safexcel_hmac_setkey("safexcel-sha224", keys
.authkey
,
474 keys
.authkeylen
, &istate
, &ostate
))
477 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256
:
478 if (safexcel_hmac_setkey("safexcel-sha256", keys
.authkey
,
479 keys
.authkeylen
, &istate
, &ostate
))
482 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384
:
483 if (safexcel_hmac_setkey("safexcel-sha384", keys
.authkey
,
484 keys
.authkeylen
, &istate
, &ostate
))
487 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512
:
488 if (safexcel_hmac_setkey("safexcel-sha512", keys
.authkey
,
489 keys
.authkeylen
, &istate
, &ostate
))
492 case CONTEXT_CONTROL_CRYPTO_ALG_SM3
:
493 if (safexcel_hmac_setkey("safexcel-sm3", keys
.authkey
,
494 keys
.authkeylen
, &istate
, &ostate
))
498 dev_err(priv
->dev
, "aead: unsupported hash algorithm\n");
502 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
&&
503 (memcmp(ctx
->ipad
, istate
.state
, ctx
->state_sz
) ||
504 memcmp(ctx
->opad
, ostate
.state
, ctx
->state_sz
)))
505 ctx
->base
.needs_inv
= true;
507 /* Now copy the keys into the context */
508 for (i
= 0; i
< keys
.enckeylen
/ sizeof(u32
); i
++)
509 ctx
->key
[i
] = cpu_to_le32(((u32
*)keys
.enckey
)[i
]);
510 ctx
->key_len
= keys
.enckeylen
;
512 memcpy(ctx
->ipad
, &istate
.state
, ctx
->state_sz
);
513 memcpy(ctx
->opad
, &ostate
.state
, ctx
->state_sz
);
515 memzero_explicit(&keys
, sizeof(keys
));
519 memzero_explicit(&keys
, sizeof(keys
));
523 static int safexcel_context_control(struct safexcel_cipher_ctx
*ctx
,
524 struct crypto_async_request
*async
,
525 struct safexcel_cipher_req
*sreq
,
526 struct safexcel_command_desc
*cdesc
)
528 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
529 int ctrl_size
= ctx
->key_len
/ sizeof(u32
);
531 cdesc
->control_data
.control1
= ctx
->mode
;
534 /* Take in account the ipad+opad digests */
536 ctrl_size
+= ctx
->state_sz
/ sizeof(u32
);
537 cdesc
->control_data
.control0
=
538 CONTEXT_CONTROL_KEY_EN
|
539 CONTEXT_CONTROL_DIGEST_XCM
|
541 CONTEXT_CONTROL_SIZE(ctrl_size
);
542 } else if (ctx
->alg
== SAFEXCEL_CHACHA20
) {
543 /* Chacha20-Poly1305 */
544 cdesc
->control_data
.control0
=
545 CONTEXT_CONTROL_KEY_EN
|
546 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20
|
547 (sreq
->direction
== SAFEXCEL_ENCRYPT
?
548 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT
:
549 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN
) |
551 CONTEXT_CONTROL_SIZE(ctrl_size
);
554 ctrl_size
+= ctx
->state_sz
/ sizeof(u32
) * 2;
555 cdesc
->control_data
.control0
=
556 CONTEXT_CONTROL_KEY_EN
|
557 CONTEXT_CONTROL_DIGEST_HMAC
|
559 CONTEXT_CONTROL_SIZE(ctrl_size
);
562 if (sreq
->direction
== SAFEXCEL_ENCRYPT
&&
563 (ctx
->xcm
== EIP197_XCM_MODE_CCM
||
564 ctx
->aead
== EIP197_AEAD_TYPE_IPSEC_ESP_GMAC
))
565 cdesc
->control_data
.control0
|=
566 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT
;
567 else if (sreq
->direction
== SAFEXCEL_ENCRYPT
)
568 cdesc
->control_data
.control0
|=
569 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT
;
570 else if (ctx
->xcm
== EIP197_XCM_MODE_CCM
)
571 cdesc
->control_data
.control0
|=
572 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN
;
574 cdesc
->control_data
.control0
|=
575 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN
;
577 if (sreq
->direction
== SAFEXCEL_ENCRYPT
)
578 cdesc
->control_data
.control0
=
579 CONTEXT_CONTROL_TYPE_CRYPTO_OUT
|
580 CONTEXT_CONTROL_KEY_EN
|
581 CONTEXT_CONTROL_SIZE(ctrl_size
);
583 cdesc
->control_data
.control0
=
584 CONTEXT_CONTROL_TYPE_CRYPTO_IN
|
585 CONTEXT_CONTROL_KEY_EN
|
586 CONTEXT_CONTROL_SIZE(ctrl_size
);
589 if (ctx
->alg
== SAFEXCEL_DES
) {
590 cdesc
->control_data
.control0
|=
591 CONTEXT_CONTROL_CRYPTO_ALG_DES
;
592 } else if (ctx
->alg
== SAFEXCEL_3DES
) {
593 cdesc
->control_data
.control0
|=
594 CONTEXT_CONTROL_CRYPTO_ALG_3DES
;
595 } else if (ctx
->alg
== SAFEXCEL_AES
) {
596 switch (ctx
->key_len
>> ctx
->xts
) {
597 case AES_KEYSIZE_128
:
598 cdesc
->control_data
.control0
|=
599 CONTEXT_CONTROL_CRYPTO_ALG_AES128
;
601 case AES_KEYSIZE_192
:
602 cdesc
->control_data
.control0
|=
603 CONTEXT_CONTROL_CRYPTO_ALG_AES192
;
605 case AES_KEYSIZE_256
:
606 cdesc
->control_data
.control0
|=
607 CONTEXT_CONTROL_CRYPTO_ALG_AES256
;
610 dev_err(priv
->dev
, "aes keysize not supported: %u\n",
611 ctx
->key_len
>> ctx
->xts
);
614 } else if (ctx
->alg
== SAFEXCEL_CHACHA20
) {
615 cdesc
->control_data
.control0
|=
616 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20
;
617 } else if (ctx
->alg
== SAFEXCEL_SM4
) {
618 cdesc
->control_data
.control0
|=
619 CONTEXT_CONTROL_CRYPTO_ALG_SM4
;
625 static int safexcel_handle_req_result(struct safexcel_crypto_priv
*priv
, int ring
,
626 struct crypto_async_request
*async
,
627 struct scatterlist
*src
,
628 struct scatterlist
*dst
,
629 unsigned int cryptlen
,
630 struct safexcel_cipher_req
*sreq
,
631 bool *should_complete
, int *ret
)
633 struct skcipher_request
*areq
= skcipher_request_cast(async
);
634 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(areq
);
635 struct safexcel_cipher_ctx
*ctx
= crypto_skcipher_ctx(skcipher
);
636 struct safexcel_result_desc
*rdesc
;
641 if (unlikely(!sreq
->rdescs
))
644 while (sreq
->rdescs
--) {
645 rdesc
= safexcel_ring_next_rptr(priv
, &priv
->ring
[ring
].rdr
);
648 "cipher: result: could not retrieve the result descriptor\n");
649 *ret
= PTR_ERR(rdesc
);
654 *ret
= safexcel_rdesc_check_errors(priv
, rdesc
);
659 safexcel_complete(priv
, ring
);
662 dma_unmap_sg(priv
->dev
, src
, sreq
->nr_src
, DMA_BIDIRECTIONAL
);
664 dma_unmap_sg(priv
->dev
, src
, sreq
->nr_src
, DMA_TO_DEVICE
);
665 dma_unmap_sg(priv
->dev
, dst
, sreq
->nr_dst
, DMA_FROM_DEVICE
);
669 * Update IV in req from last crypto output word for CBC modes
671 if ((!ctx
->aead
) && (ctx
->mode
== CONTEXT_CONTROL_CRYPTO_MODE_CBC
) &&
672 (sreq
->direction
== SAFEXCEL_ENCRYPT
)) {
673 /* For encrypt take the last output word */
674 sg_pcopy_to_buffer(dst
, sreq
->nr_dst
, areq
->iv
,
675 crypto_skcipher_ivsize(skcipher
),
677 crypto_skcipher_ivsize(skcipher
)));
680 *should_complete
= true;
685 static int safexcel_send_req(struct crypto_async_request
*base
, int ring
,
686 struct safexcel_cipher_req
*sreq
,
687 struct scatterlist
*src
, struct scatterlist
*dst
,
688 unsigned int cryptlen
, unsigned int assoclen
,
689 unsigned int digestsize
, u8
*iv
, int *commands
,
692 struct skcipher_request
*areq
= skcipher_request_cast(base
);
693 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(areq
);
694 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(base
->tfm
);
695 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
696 struct safexcel_command_desc
*cdesc
;
697 struct safexcel_command_desc
*first_cdesc
= NULL
;
698 struct safexcel_result_desc
*rdesc
, *first_rdesc
= NULL
;
699 struct scatterlist
*sg
;
701 unsigned int totlen_src
= cryptlen
+ assoclen
;
702 unsigned int totlen_dst
= totlen_src
;
703 struct safexcel_token
*atoken
;
704 int n_cdesc
= 0, n_rdesc
= 0;
705 int queued
, i
, ret
= 0;
708 sreq
->nr_src
= sg_nents_for_len(src
, totlen_src
);
712 * AEAD has auth tag appended to output for encrypt and
713 * removed from the output for decrypt!
715 if (sreq
->direction
== SAFEXCEL_DECRYPT
)
716 totlen_dst
-= digestsize
;
718 totlen_dst
+= digestsize
;
720 memcpy(ctx
->base
.ctxr
->data
+ ctx
->key_len
/ sizeof(u32
),
721 ctx
->ipad
, ctx
->state_sz
);
723 memcpy(ctx
->base
.ctxr
->data
+ (ctx
->key_len
+
724 ctx
->state_sz
) / sizeof(u32
), ctx
->opad
,
726 } else if ((ctx
->mode
== CONTEXT_CONTROL_CRYPTO_MODE_CBC
) &&
727 (sreq
->direction
== SAFEXCEL_DECRYPT
)) {
729 * Save IV from last crypto input word for CBC modes in decrypt
730 * direction. Need to do this first in case of inplace operation
731 * as it will be overwritten.
733 sg_pcopy_to_buffer(src
, sreq
->nr_src
, areq
->iv
,
734 crypto_skcipher_ivsize(skcipher
),
736 crypto_skcipher_ivsize(skcipher
)));
739 sreq
->nr_dst
= sg_nents_for_len(dst
, totlen_dst
);
742 * Remember actual input length, source buffer length may be
743 * updated in case of inline operation below.
749 sreq
->nr_src
= max(sreq
->nr_src
, sreq
->nr_dst
);
750 sreq
->nr_dst
= sreq
->nr_src
;
751 if (unlikely((totlen_src
|| totlen_dst
) &&
752 (sreq
->nr_src
<= 0))) {
753 dev_err(priv
->dev
, "In-place buffer not large enough (need %d bytes)!",
754 max(totlen_src
, totlen_dst
));
757 dma_map_sg(priv
->dev
, src
, sreq
->nr_src
, DMA_BIDIRECTIONAL
);
759 if (unlikely(totlen_src
&& (sreq
->nr_src
<= 0))) {
760 dev_err(priv
->dev
, "Source buffer not large enough (need %d bytes)!",
764 dma_map_sg(priv
->dev
, src
, sreq
->nr_src
, DMA_TO_DEVICE
);
766 if (unlikely(totlen_dst
&& (sreq
->nr_dst
<= 0))) {
767 dev_err(priv
->dev
, "Dest buffer not large enough (need %d bytes)!",
769 dma_unmap_sg(priv
->dev
, src
, sreq
->nr_src
,
773 dma_map_sg(priv
->dev
, dst
, sreq
->nr_dst
, DMA_FROM_DEVICE
);
776 memcpy(ctx
->base
.ctxr
->data
, ctx
->key
, ctx
->key_len
);
780 * The EIP97 cannot deal with zero length input packets!
781 * So stuff a dummy command descriptor indicating a 1 byte
782 * (dummy) input packet, using the context record as source.
784 first_cdesc
= safexcel_add_cdesc(priv
, ring
,
785 1, 1, ctx
->base
.ctxr_dma
,
786 1, 1, ctx
->base
.ctxr_dma
,
788 if (IS_ERR(first_cdesc
)) {
789 /* No space left in the command descriptor ring */
790 ret
= PTR_ERR(first_cdesc
);
797 /* command descriptors */
798 for_each_sg(src
, sg
, sreq
->nr_src
, i
) {
799 int len
= sg_dma_len(sg
);
801 /* Do not overflow the request */
805 cdesc
= safexcel_add_cdesc(priv
, ring
, !n_cdesc
,
807 sg_dma_address(sg
), len
, totlen
,
808 ctx
->base
.ctxr_dma
, &atoken
);
810 /* No space left in the command descriptor ring */
811 ret
= PTR_ERR(cdesc
);
824 /* Add context control words and token to first command descriptor */
825 safexcel_context_control(ctx
, base
, sreq
, first_cdesc
);
827 safexcel_aead_token(ctx
, iv
, first_cdesc
, atoken
,
828 sreq
->direction
, cryptlen
,
829 assoclen
, digestsize
);
831 safexcel_skcipher_token(ctx
, iv
, first_cdesc
, atoken
,
834 /* result descriptors */
835 for_each_sg(dst
, sg
, sreq
->nr_dst
, i
) {
836 bool last
= (i
== sreq
->nr_dst
- 1);
837 u32 len
= sg_dma_len(sg
);
839 /* only allow the part of the buffer we know we need */
840 if (len
> totlen_dst
)
846 /* skip over AAD space in buffer - not written */
848 if (assoclen
>= len
) {
852 rdesc
= safexcel_add_rdesc(priv
, ring
, first
, last
,
858 rdesc
= safexcel_add_rdesc(priv
, ring
, first
, last
,
863 /* No space left in the result descriptor ring */
864 ret
= PTR_ERR(rdesc
);
874 if (unlikely(first
)) {
876 * Special case: AEAD decrypt with only AAD data.
877 * In this case there is NO output data from the engine,
878 * but the engine still needs a result descriptor!
879 * Create a dummy one just for catching the result token.
881 rdesc
= safexcel_add_rdesc(priv
, ring
, true, true, 0, 0);
883 /* No space left in the result descriptor ring */
884 ret
= PTR_ERR(rdesc
);
891 safexcel_rdr_req_set(priv
, ring
, first_rdesc
, base
);
898 for (i
= 0; i
< n_rdesc
; i
++)
899 safexcel_ring_rollback_wptr(priv
, &priv
->ring
[ring
].rdr
);
901 for (i
= 0; i
< n_cdesc
; i
++)
902 safexcel_ring_rollback_wptr(priv
, &priv
->ring
[ring
].cdr
);
905 dma_unmap_sg(priv
->dev
, src
, sreq
->nr_src
, DMA_BIDIRECTIONAL
);
907 dma_unmap_sg(priv
->dev
, src
, sreq
->nr_src
, DMA_TO_DEVICE
);
908 dma_unmap_sg(priv
->dev
, dst
, sreq
->nr_dst
, DMA_FROM_DEVICE
);
914 static int safexcel_handle_inv_result(struct safexcel_crypto_priv
*priv
,
916 struct crypto_async_request
*base
,
917 struct safexcel_cipher_req
*sreq
,
918 bool *should_complete
, int *ret
)
920 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(base
->tfm
);
921 struct safexcel_result_desc
*rdesc
;
922 int ndesc
= 0, enq_ret
;
926 if (unlikely(!sreq
->rdescs
))
929 while (sreq
->rdescs
--) {
930 rdesc
= safexcel_ring_next_rptr(priv
, &priv
->ring
[ring
].rdr
);
933 "cipher: invalidate: could not retrieve the result descriptor\n");
934 *ret
= PTR_ERR(rdesc
);
939 *ret
= safexcel_rdesc_check_errors(priv
, rdesc
);
944 safexcel_complete(priv
, ring
);
946 if (ctx
->base
.exit_inv
) {
947 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
950 *should_complete
= true;
955 ring
= safexcel_select_ring(priv
);
956 ctx
->base
.ring
= ring
;
958 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
959 enq_ret
= crypto_enqueue_request(&priv
->ring
[ring
].queue
, base
);
960 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
962 if (enq_ret
!= -EINPROGRESS
)
965 queue_work(priv
->ring
[ring
].workqueue
,
966 &priv
->ring
[ring
].work_data
.work
);
968 *should_complete
= false;
973 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv
*priv
,
975 struct crypto_async_request
*async
,
976 bool *should_complete
, int *ret
)
978 struct skcipher_request
*req
= skcipher_request_cast(async
);
979 struct safexcel_cipher_req
*sreq
= skcipher_request_ctx(req
);
982 if (sreq
->needs_inv
) {
983 sreq
->needs_inv
= false;
984 err
= safexcel_handle_inv_result(priv
, ring
, async
, sreq
,
985 should_complete
, ret
);
987 err
= safexcel_handle_req_result(priv
, ring
, async
, req
->src
,
988 req
->dst
, req
->cryptlen
, sreq
,
989 should_complete
, ret
);
995 static int safexcel_aead_handle_result(struct safexcel_crypto_priv
*priv
,
997 struct crypto_async_request
*async
,
998 bool *should_complete
, int *ret
)
1000 struct aead_request
*req
= aead_request_cast(async
);
1001 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1002 struct safexcel_cipher_req
*sreq
= aead_request_ctx(req
);
1005 if (sreq
->needs_inv
) {
1006 sreq
->needs_inv
= false;
1007 err
= safexcel_handle_inv_result(priv
, ring
, async
, sreq
,
1008 should_complete
, ret
);
1010 err
= safexcel_handle_req_result(priv
, ring
, async
, req
->src
,
1012 req
->cryptlen
+ crypto_aead_authsize(tfm
),
1013 sreq
, should_complete
, ret
);
1019 static int safexcel_cipher_send_inv(struct crypto_async_request
*base
,
1020 int ring
, int *commands
, int *results
)
1022 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(base
->tfm
);
1023 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
1026 ret
= safexcel_invalidate_cache(base
, priv
, ctx
->base
.ctxr_dma
, ring
);
1036 static int safexcel_skcipher_send(struct crypto_async_request
*async
, int ring
,
1037 int *commands
, int *results
)
1039 struct skcipher_request
*req
= skcipher_request_cast(async
);
1040 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1041 struct safexcel_cipher_req
*sreq
= skcipher_request_ctx(req
);
1042 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
1045 BUG_ON(!(priv
->flags
& EIP197_TRC_CACHE
) && sreq
->needs_inv
);
1047 if (sreq
->needs_inv
) {
1048 ret
= safexcel_cipher_send_inv(async
, ring
, commands
, results
);
1050 struct crypto_skcipher
*skcipher
= crypto_skcipher_reqtfm(req
);
1051 u8 input_iv
[AES_BLOCK_SIZE
];
1054 * Save input IV in case of CBC decrypt mode
1055 * Will be overwritten with output IV prior to use!
1057 memcpy(input_iv
, req
->iv
, crypto_skcipher_ivsize(skcipher
));
1059 ret
= safexcel_send_req(async
, ring
, sreq
, req
->src
,
1060 req
->dst
, req
->cryptlen
, 0, 0, input_iv
,
1064 sreq
->rdescs
= *results
;
1068 static int safexcel_aead_send(struct crypto_async_request
*async
, int ring
,
1069 int *commands
, int *results
)
1071 struct aead_request
*req
= aead_request_cast(async
);
1072 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1073 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1074 struct safexcel_cipher_req
*sreq
= aead_request_ctx(req
);
1075 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
1078 BUG_ON(!(priv
->flags
& EIP197_TRC_CACHE
) && sreq
->needs_inv
);
1080 if (sreq
->needs_inv
)
1081 ret
= safexcel_cipher_send_inv(async
, ring
, commands
, results
);
1083 ret
= safexcel_send_req(async
, ring
, sreq
, req
->src
, req
->dst
,
1084 req
->cryptlen
, req
->assoclen
,
1085 crypto_aead_authsize(tfm
), req
->iv
,
1087 sreq
->rdescs
= *results
;
1091 static int safexcel_cipher_exit_inv(struct crypto_tfm
*tfm
,
1092 struct crypto_async_request
*base
,
1093 struct safexcel_cipher_req
*sreq
,
1094 struct safexcel_inv_result
*result
)
1096 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1097 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
1098 int ring
= ctx
->base
.ring
;
1100 init_completion(&result
->completion
);
1102 ctx
= crypto_tfm_ctx(base
->tfm
);
1103 ctx
->base
.exit_inv
= true;
1104 sreq
->needs_inv
= true;
1106 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
1107 crypto_enqueue_request(&priv
->ring
[ring
].queue
, base
);
1108 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
1110 queue_work(priv
->ring
[ring
].workqueue
,
1111 &priv
->ring
[ring
].work_data
.work
);
1113 wait_for_completion(&result
->completion
);
1115 if (result
->error
) {
1117 "cipher: sync: invalidate: completion error %d\n",
1119 return result
->error
;
1125 static int safexcel_skcipher_exit_inv(struct crypto_tfm
*tfm
)
1127 EIP197_REQUEST_ON_STACK(req
, skcipher
, EIP197_SKCIPHER_REQ_SIZE
);
1128 struct safexcel_cipher_req
*sreq
= skcipher_request_ctx(req
);
1129 struct safexcel_inv_result result
= {};
1131 memset(req
, 0, sizeof(struct skcipher_request
));
1133 skcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
1134 safexcel_inv_complete
, &result
);
1135 skcipher_request_set_tfm(req
, __crypto_skcipher_cast(tfm
));
1137 return safexcel_cipher_exit_inv(tfm
, &req
->base
, sreq
, &result
);
1140 static int safexcel_aead_exit_inv(struct crypto_tfm
*tfm
)
1142 EIP197_REQUEST_ON_STACK(req
, aead
, EIP197_AEAD_REQ_SIZE
);
1143 struct safexcel_cipher_req
*sreq
= aead_request_ctx(req
);
1144 struct safexcel_inv_result result
= {};
1146 memset(req
, 0, sizeof(struct aead_request
));
1148 aead_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
1149 safexcel_inv_complete
, &result
);
1150 aead_request_set_tfm(req
, __crypto_aead_cast(tfm
));
1152 return safexcel_cipher_exit_inv(tfm
, &req
->base
, sreq
, &result
);
1155 static int safexcel_queue_req(struct crypto_async_request
*base
,
1156 struct safexcel_cipher_req
*sreq
,
1157 enum safexcel_cipher_direction dir
)
1159 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(base
->tfm
);
1160 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
1163 sreq
->needs_inv
= false;
1164 sreq
->direction
= dir
;
1166 if (ctx
->base
.ctxr
) {
1167 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.needs_inv
) {
1168 sreq
->needs_inv
= true;
1169 ctx
->base
.needs_inv
= false;
1172 ctx
->base
.ring
= safexcel_select_ring(priv
);
1173 ctx
->base
.ctxr
= dma_pool_zalloc(priv
->context_pool
,
1174 EIP197_GFP_FLAGS(*base
),
1175 &ctx
->base
.ctxr_dma
);
1176 if (!ctx
->base
.ctxr
)
1180 ring
= ctx
->base
.ring
;
1182 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
1183 ret
= crypto_enqueue_request(&priv
->ring
[ring
].queue
, base
);
1184 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
1186 queue_work(priv
->ring
[ring
].workqueue
,
1187 &priv
->ring
[ring
].work_data
.work
);
1192 static int safexcel_encrypt(struct skcipher_request
*req
)
1194 return safexcel_queue_req(&req
->base
, skcipher_request_ctx(req
),
1198 static int safexcel_decrypt(struct skcipher_request
*req
)
1200 return safexcel_queue_req(&req
->base
, skcipher_request_ctx(req
),
1204 static int safexcel_skcipher_cra_init(struct crypto_tfm
*tfm
)
1206 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1207 struct safexcel_alg_template
*tmpl
=
1208 container_of(tfm
->__crt_alg
, struct safexcel_alg_template
,
1211 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm
),
1212 sizeof(struct safexcel_cipher_req
));
1214 ctx
->priv
= tmpl
->priv
;
1216 ctx
->base
.send
= safexcel_skcipher_send
;
1217 ctx
->base
.handle_result
= safexcel_skcipher_handle_result
;
1218 ctx
->ivmask
= EIP197_OPTION_4_TOKEN_IV_CMD
;
1223 static int safexcel_cipher_cra_exit(struct crypto_tfm
*tfm
)
1225 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1227 memzero_explicit(ctx
->key
, sizeof(ctx
->key
));
1229 /* context not allocated, skip invalidation */
1230 if (!ctx
->base
.ctxr
)
1233 memzero_explicit(ctx
->base
.ctxr
->data
, sizeof(ctx
->base
.ctxr
->data
));
1237 static void safexcel_skcipher_cra_exit(struct crypto_tfm
*tfm
)
1239 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1240 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
1243 if (safexcel_cipher_cra_exit(tfm
))
1246 if (priv
->flags
& EIP197_TRC_CACHE
) {
1247 ret
= safexcel_skcipher_exit_inv(tfm
);
1249 dev_warn(priv
->dev
, "skcipher: invalidation error %d\n",
1252 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
1253 ctx
->base
.ctxr_dma
);
1257 static void safexcel_aead_cra_exit(struct crypto_tfm
*tfm
)
1259 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1260 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
1263 if (safexcel_cipher_cra_exit(tfm
))
1266 if (priv
->flags
& EIP197_TRC_CACHE
) {
1267 ret
= safexcel_aead_exit_inv(tfm
);
1269 dev_warn(priv
->dev
, "aead: invalidation error %d\n",
1272 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
1273 ctx
->base
.ctxr_dma
);
1277 static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm
*tfm
)
1279 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1281 safexcel_skcipher_cra_init(tfm
);
1282 ctx
->alg
= SAFEXCEL_AES
;
1283 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_ECB
;
1285 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
1289 struct safexcel_alg_template safexcel_alg_ecb_aes
= {
1290 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1291 .algo_mask
= SAFEXCEL_ALG_AES
,
1293 .setkey
= safexcel_skcipher_aes_setkey
,
1294 .encrypt
= safexcel_encrypt
,
1295 .decrypt
= safexcel_decrypt
,
1296 .min_keysize
= AES_MIN_KEY_SIZE
,
1297 .max_keysize
= AES_MAX_KEY_SIZE
,
1299 .cra_name
= "ecb(aes)",
1300 .cra_driver_name
= "safexcel-ecb-aes",
1301 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1302 .cra_flags
= CRYPTO_ALG_ASYNC
|
1303 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1304 .cra_blocksize
= AES_BLOCK_SIZE
,
1305 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1307 .cra_init
= safexcel_skcipher_aes_ecb_cra_init
,
1308 .cra_exit
= safexcel_skcipher_cra_exit
,
1309 .cra_module
= THIS_MODULE
,
1314 static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm
*tfm
)
1316 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1318 safexcel_skcipher_cra_init(tfm
);
1319 ctx
->alg
= SAFEXCEL_AES
;
1320 ctx
->blocksz
= AES_BLOCK_SIZE
;
1321 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CBC
;
1325 struct safexcel_alg_template safexcel_alg_cbc_aes
= {
1326 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1327 .algo_mask
= SAFEXCEL_ALG_AES
,
1329 .setkey
= safexcel_skcipher_aes_setkey
,
1330 .encrypt
= safexcel_encrypt
,
1331 .decrypt
= safexcel_decrypt
,
1332 .min_keysize
= AES_MIN_KEY_SIZE
,
1333 .max_keysize
= AES_MAX_KEY_SIZE
,
1334 .ivsize
= AES_BLOCK_SIZE
,
1336 .cra_name
= "cbc(aes)",
1337 .cra_driver_name
= "safexcel-cbc-aes",
1338 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1339 .cra_flags
= CRYPTO_ALG_ASYNC
|
1340 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1341 .cra_blocksize
= AES_BLOCK_SIZE
,
1342 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1344 .cra_init
= safexcel_skcipher_aes_cbc_cra_init
,
1345 .cra_exit
= safexcel_skcipher_cra_exit
,
1346 .cra_module
= THIS_MODULE
,
1351 static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm
*tfm
)
1353 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1355 safexcel_skcipher_cra_init(tfm
);
1356 ctx
->alg
= SAFEXCEL_AES
;
1357 ctx
->blocksz
= AES_BLOCK_SIZE
;
1358 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CFB
;
1362 struct safexcel_alg_template safexcel_alg_cfb_aes
= {
1363 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1364 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_AES_XFB
,
1366 .setkey
= safexcel_skcipher_aes_setkey
,
1367 .encrypt
= safexcel_encrypt
,
1368 .decrypt
= safexcel_decrypt
,
1369 .min_keysize
= AES_MIN_KEY_SIZE
,
1370 .max_keysize
= AES_MAX_KEY_SIZE
,
1371 .ivsize
= AES_BLOCK_SIZE
,
1373 .cra_name
= "cfb(aes)",
1374 .cra_driver_name
= "safexcel-cfb-aes",
1375 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1376 .cra_flags
= CRYPTO_ALG_ASYNC
|
1377 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1379 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1381 .cra_init
= safexcel_skcipher_aes_cfb_cra_init
,
1382 .cra_exit
= safexcel_skcipher_cra_exit
,
1383 .cra_module
= THIS_MODULE
,
1388 static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm
*tfm
)
1390 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1392 safexcel_skcipher_cra_init(tfm
);
1393 ctx
->alg
= SAFEXCEL_AES
;
1394 ctx
->blocksz
= AES_BLOCK_SIZE
;
1395 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_OFB
;
1399 struct safexcel_alg_template safexcel_alg_ofb_aes
= {
1400 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1401 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_AES_XFB
,
1403 .setkey
= safexcel_skcipher_aes_setkey
,
1404 .encrypt
= safexcel_encrypt
,
1405 .decrypt
= safexcel_decrypt
,
1406 .min_keysize
= AES_MIN_KEY_SIZE
,
1407 .max_keysize
= AES_MAX_KEY_SIZE
,
1408 .ivsize
= AES_BLOCK_SIZE
,
1410 .cra_name
= "ofb(aes)",
1411 .cra_driver_name
= "safexcel-ofb-aes",
1412 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1413 .cra_flags
= CRYPTO_ALG_ASYNC
|
1414 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1416 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1418 .cra_init
= safexcel_skcipher_aes_ofb_cra_init
,
1419 .cra_exit
= safexcel_skcipher_cra_exit
,
1420 .cra_module
= THIS_MODULE
,
1425 static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher
*ctfm
,
1426 const u8
*key
, unsigned int len
)
1428 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(ctfm
);
1429 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1430 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
1431 struct crypto_aes_ctx aes
;
1433 unsigned int keylen
;
1435 /* last 4 bytes of key are the nonce! */
1436 ctx
->nonce
= *(u32
*)(key
+ len
- CTR_RFC3686_NONCE_SIZE
);
1437 /* exclude the nonce here */
1438 keylen
= len
- CTR_RFC3686_NONCE_SIZE
;
1439 ret
= aes_expandkey(&aes
, key
, keylen
);
1443 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
) {
1444 for (i
= 0; i
< keylen
/ sizeof(u32
); i
++) {
1445 if (le32_to_cpu(ctx
->key
[i
]) != aes
.key_enc
[i
]) {
1446 ctx
->base
.needs_inv
= true;
1452 for (i
= 0; i
< keylen
/ sizeof(u32
); i
++)
1453 ctx
->key
[i
] = cpu_to_le32(aes
.key_enc
[i
]);
1455 ctx
->key_len
= keylen
;
1457 memzero_explicit(&aes
, sizeof(aes
));
1461 static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm
*tfm
)
1463 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1465 safexcel_skcipher_cra_init(tfm
);
1466 ctx
->alg
= SAFEXCEL_AES
;
1467 ctx
->blocksz
= AES_BLOCK_SIZE
;
1468 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
;
1472 struct safexcel_alg_template safexcel_alg_ctr_aes
= {
1473 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1474 .algo_mask
= SAFEXCEL_ALG_AES
,
1476 .setkey
= safexcel_skcipher_aesctr_setkey
,
1477 .encrypt
= safexcel_encrypt
,
1478 .decrypt
= safexcel_decrypt
,
1479 /* Add nonce size */
1480 .min_keysize
= AES_MIN_KEY_SIZE
+ CTR_RFC3686_NONCE_SIZE
,
1481 .max_keysize
= AES_MAX_KEY_SIZE
+ CTR_RFC3686_NONCE_SIZE
,
1482 .ivsize
= CTR_RFC3686_IV_SIZE
,
1484 .cra_name
= "rfc3686(ctr(aes))",
1485 .cra_driver_name
= "safexcel-ctr-aes",
1486 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1487 .cra_flags
= CRYPTO_ALG_ASYNC
|
1488 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1490 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1492 .cra_init
= safexcel_skcipher_aes_ctr_cra_init
,
1493 .cra_exit
= safexcel_skcipher_cra_exit
,
1494 .cra_module
= THIS_MODULE
,
1499 static int safexcel_des_setkey(struct crypto_skcipher
*ctfm
, const u8
*key
,
1502 struct safexcel_cipher_ctx
*ctx
= crypto_skcipher_ctx(ctfm
);
1503 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
1506 ret
= verify_skcipher_des_key(ctfm
, key
);
1510 /* if context exits and key changed, need to invalidate it */
1511 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
)
1512 if (memcmp(ctx
->key
, key
, len
))
1513 ctx
->base
.needs_inv
= true;
1515 memcpy(ctx
->key
, key
, len
);
1521 static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm
*tfm
)
1523 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1525 safexcel_skcipher_cra_init(tfm
);
1526 ctx
->alg
= SAFEXCEL_DES
;
1527 ctx
->blocksz
= DES_BLOCK_SIZE
;
1528 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
1529 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CBC
;
1533 struct safexcel_alg_template safexcel_alg_cbc_des
= {
1534 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1535 .algo_mask
= SAFEXCEL_ALG_DES
,
1537 .setkey
= safexcel_des_setkey
,
1538 .encrypt
= safexcel_encrypt
,
1539 .decrypt
= safexcel_decrypt
,
1540 .min_keysize
= DES_KEY_SIZE
,
1541 .max_keysize
= DES_KEY_SIZE
,
1542 .ivsize
= DES_BLOCK_SIZE
,
1544 .cra_name
= "cbc(des)",
1545 .cra_driver_name
= "safexcel-cbc-des",
1546 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1547 .cra_flags
= CRYPTO_ALG_ASYNC
|
1548 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1549 .cra_blocksize
= DES_BLOCK_SIZE
,
1550 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1552 .cra_init
= safexcel_skcipher_des_cbc_cra_init
,
1553 .cra_exit
= safexcel_skcipher_cra_exit
,
1554 .cra_module
= THIS_MODULE
,
1559 static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm
*tfm
)
1561 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1563 safexcel_skcipher_cra_init(tfm
);
1564 ctx
->alg
= SAFEXCEL_DES
;
1565 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_ECB
;
1567 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
1571 struct safexcel_alg_template safexcel_alg_ecb_des
= {
1572 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1573 .algo_mask
= SAFEXCEL_ALG_DES
,
1575 .setkey
= safexcel_des_setkey
,
1576 .encrypt
= safexcel_encrypt
,
1577 .decrypt
= safexcel_decrypt
,
1578 .min_keysize
= DES_KEY_SIZE
,
1579 .max_keysize
= DES_KEY_SIZE
,
1581 .cra_name
= "ecb(des)",
1582 .cra_driver_name
= "safexcel-ecb-des",
1583 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1584 .cra_flags
= CRYPTO_ALG_ASYNC
|
1585 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1586 .cra_blocksize
= DES_BLOCK_SIZE
,
1587 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1589 .cra_init
= safexcel_skcipher_des_ecb_cra_init
,
1590 .cra_exit
= safexcel_skcipher_cra_exit
,
1591 .cra_module
= THIS_MODULE
,
1596 static int safexcel_des3_ede_setkey(struct crypto_skcipher
*ctfm
,
1597 const u8
*key
, unsigned int len
)
1599 struct safexcel_cipher_ctx
*ctx
= crypto_skcipher_ctx(ctfm
);
1600 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
1603 err
= verify_skcipher_des3_key(ctfm
, key
);
1607 /* if context exits and key changed, need to invalidate it */
1608 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
)
1609 if (memcmp(ctx
->key
, key
, len
))
1610 ctx
->base
.needs_inv
= true;
1612 memcpy(ctx
->key
, key
, len
);
1618 static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm
*tfm
)
1620 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1622 safexcel_skcipher_cra_init(tfm
);
1623 ctx
->alg
= SAFEXCEL_3DES
;
1624 ctx
->blocksz
= DES3_EDE_BLOCK_SIZE
;
1625 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
1626 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CBC
;
1630 struct safexcel_alg_template safexcel_alg_cbc_des3_ede
= {
1631 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1632 .algo_mask
= SAFEXCEL_ALG_DES
,
1634 .setkey
= safexcel_des3_ede_setkey
,
1635 .encrypt
= safexcel_encrypt
,
1636 .decrypt
= safexcel_decrypt
,
1637 .min_keysize
= DES3_EDE_KEY_SIZE
,
1638 .max_keysize
= DES3_EDE_KEY_SIZE
,
1639 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1641 .cra_name
= "cbc(des3_ede)",
1642 .cra_driver_name
= "safexcel-cbc-des3_ede",
1643 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1644 .cra_flags
= CRYPTO_ALG_ASYNC
|
1645 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1646 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1647 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1649 .cra_init
= safexcel_skcipher_des3_cbc_cra_init
,
1650 .cra_exit
= safexcel_skcipher_cra_exit
,
1651 .cra_module
= THIS_MODULE
,
1656 static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm
*tfm
)
1658 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1660 safexcel_skcipher_cra_init(tfm
);
1661 ctx
->alg
= SAFEXCEL_3DES
;
1662 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_ECB
;
1664 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
1668 struct safexcel_alg_template safexcel_alg_ecb_des3_ede
= {
1669 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
1670 .algo_mask
= SAFEXCEL_ALG_DES
,
1672 .setkey
= safexcel_des3_ede_setkey
,
1673 .encrypt
= safexcel_encrypt
,
1674 .decrypt
= safexcel_decrypt
,
1675 .min_keysize
= DES3_EDE_KEY_SIZE
,
1676 .max_keysize
= DES3_EDE_KEY_SIZE
,
1678 .cra_name
= "ecb(des3_ede)",
1679 .cra_driver_name
= "safexcel-ecb-des3_ede",
1680 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1681 .cra_flags
= CRYPTO_ALG_ASYNC
|
1682 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1683 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1684 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1686 .cra_init
= safexcel_skcipher_des3_ecb_cra_init
,
1687 .cra_exit
= safexcel_skcipher_cra_exit
,
1688 .cra_module
= THIS_MODULE
,
1693 static int safexcel_aead_encrypt(struct aead_request
*req
)
1695 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
1697 return safexcel_queue_req(&req
->base
, creq
, SAFEXCEL_ENCRYPT
);
1700 static int safexcel_aead_decrypt(struct aead_request
*req
)
1702 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
1704 return safexcel_queue_req(&req
->base
, creq
, SAFEXCEL_DECRYPT
);
1707 static int safexcel_aead_cra_init(struct crypto_tfm
*tfm
)
1709 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1710 struct safexcel_alg_template
*tmpl
=
1711 container_of(tfm
->__crt_alg
, struct safexcel_alg_template
,
1714 crypto_aead_set_reqsize(__crypto_aead_cast(tfm
),
1715 sizeof(struct safexcel_cipher_req
));
1717 ctx
->priv
= tmpl
->priv
;
1719 ctx
->alg
= SAFEXCEL_AES
; /* default */
1720 ctx
->blocksz
= AES_BLOCK_SIZE
;
1721 ctx
->ivmask
= EIP197_OPTION_4_TOKEN_IV_CMD
;
1723 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CBC
; /* default */
1725 ctx
->base
.send
= safexcel_aead_send
;
1726 ctx
->base
.handle_result
= safexcel_aead_handle_result
;
1730 static int safexcel_aead_sha1_cra_init(struct crypto_tfm
*tfm
)
1732 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1734 safexcel_aead_cra_init(tfm
);
1735 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA1
;
1736 ctx
->state_sz
= SHA1_DIGEST_SIZE
;
1740 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes
= {
1741 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1742 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA1
,
1744 .setkey
= safexcel_aead_setkey
,
1745 .encrypt
= safexcel_aead_encrypt
,
1746 .decrypt
= safexcel_aead_decrypt
,
1747 .ivsize
= AES_BLOCK_SIZE
,
1748 .maxauthsize
= SHA1_DIGEST_SIZE
,
1750 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
1751 .cra_driver_name
= "safexcel-authenc-hmac-sha1-cbc-aes",
1752 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1753 .cra_flags
= CRYPTO_ALG_ASYNC
|
1754 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1755 .cra_blocksize
= AES_BLOCK_SIZE
,
1756 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1758 .cra_init
= safexcel_aead_sha1_cra_init
,
1759 .cra_exit
= safexcel_aead_cra_exit
,
1760 .cra_module
= THIS_MODULE
,
1765 static int safexcel_aead_sha256_cra_init(struct crypto_tfm
*tfm
)
1767 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1769 safexcel_aead_cra_init(tfm
);
1770 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA256
;
1771 ctx
->state_sz
= SHA256_DIGEST_SIZE
;
1775 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes
= {
1776 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1777 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA2_256
,
1779 .setkey
= safexcel_aead_setkey
,
1780 .encrypt
= safexcel_aead_encrypt
,
1781 .decrypt
= safexcel_aead_decrypt
,
1782 .ivsize
= AES_BLOCK_SIZE
,
1783 .maxauthsize
= SHA256_DIGEST_SIZE
,
1785 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
1786 .cra_driver_name
= "safexcel-authenc-hmac-sha256-cbc-aes",
1787 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1788 .cra_flags
= CRYPTO_ALG_ASYNC
|
1789 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1790 .cra_blocksize
= AES_BLOCK_SIZE
,
1791 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1793 .cra_init
= safexcel_aead_sha256_cra_init
,
1794 .cra_exit
= safexcel_aead_cra_exit
,
1795 .cra_module
= THIS_MODULE
,
1800 static int safexcel_aead_sha224_cra_init(struct crypto_tfm
*tfm
)
1802 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1804 safexcel_aead_cra_init(tfm
);
1805 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA224
;
1806 ctx
->state_sz
= SHA256_DIGEST_SIZE
;
1810 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes
= {
1811 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1812 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA2_256
,
1814 .setkey
= safexcel_aead_setkey
,
1815 .encrypt
= safexcel_aead_encrypt
,
1816 .decrypt
= safexcel_aead_decrypt
,
1817 .ivsize
= AES_BLOCK_SIZE
,
1818 .maxauthsize
= SHA224_DIGEST_SIZE
,
1820 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
1821 .cra_driver_name
= "safexcel-authenc-hmac-sha224-cbc-aes",
1822 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1823 .cra_flags
= CRYPTO_ALG_ASYNC
|
1824 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1825 .cra_blocksize
= AES_BLOCK_SIZE
,
1826 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1828 .cra_init
= safexcel_aead_sha224_cra_init
,
1829 .cra_exit
= safexcel_aead_cra_exit
,
1830 .cra_module
= THIS_MODULE
,
1835 static int safexcel_aead_sha512_cra_init(struct crypto_tfm
*tfm
)
1837 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1839 safexcel_aead_cra_init(tfm
);
1840 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA512
;
1841 ctx
->state_sz
= SHA512_DIGEST_SIZE
;
1845 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes
= {
1846 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1847 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA2_512
,
1849 .setkey
= safexcel_aead_setkey
,
1850 .encrypt
= safexcel_aead_encrypt
,
1851 .decrypt
= safexcel_aead_decrypt
,
1852 .ivsize
= AES_BLOCK_SIZE
,
1853 .maxauthsize
= SHA512_DIGEST_SIZE
,
1855 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
1856 .cra_driver_name
= "safexcel-authenc-hmac-sha512-cbc-aes",
1857 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1858 .cra_flags
= CRYPTO_ALG_ASYNC
|
1859 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1860 .cra_blocksize
= AES_BLOCK_SIZE
,
1861 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1863 .cra_init
= safexcel_aead_sha512_cra_init
,
1864 .cra_exit
= safexcel_aead_cra_exit
,
1865 .cra_module
= THIS_MODULE
,
1870 static int safexcel_aead_sha384_cra_init(struct crypto_tfm
*tfm
)
1872 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1874 safexcel_aead_cra_init(tfm
);
1875 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA384
;
1876 ctx
->state_sz
= SHA512_DIGEST_SIZE
;
1880 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes
= {
1881 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1882 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA2_512
,
1884 .setkey
= safexcel_aead_setkey
,
1885 .encrypt
= safexcel_aead_encrypt
,
1886 .decrypt
= safexcel_aead_decrypt
,
1887 .ivsize
= AES_BLOCK_SIZE
,
1888 .maxauthsize
= SHA384_DIGEST_SIZE
,
1890 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
1891 .cra_driver_name
= "safexcel-authenc-hmac-sha384-cbc-aes",
1892 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1893 .cra_flags
= CRYPTO_ALG_ASYNC
|
1894 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1895 .cra_blocksize
= AES_BLOCK_SIZE
,
1896 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1898 .cra_init
= safexcel_aead_sha384_cra_init
,
1899 .cra_exit
= safexcel_aead_cra_exit
,
1900 .cra_module
= THIS_MODULE
,
1905 static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm
*tfm
)
1907 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1909 safexcel_aead_sha1_cra_init(tfm
);
1910 ctx
->alg
= SAFEXCEL_3DES
; /* override default */
1911 ctx
->blocksz
= DES3_EDE_BLOCK_SIZE
;
1912 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
1916 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede
= {
1917 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1918 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA1
,
1920 .setkey
= safexcel_aead_setkey
,
1921 .encrypt
= safexcel_aead_encrypt
,
1922 .decrypt
= safexcel_aead_decrypt
,
1923 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1924 .maxauthsize
= SHA1_DIGEST_SIZE
,
1926 .cra_name
= "authenc(hmac(sha1),cbc(des3_ede))",
1927 .cra_driver_name
= "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1928 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1929 .cra_flags
= CRYPTO_ALG_ASYNC
|
1930 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1931 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1932 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1934 .cra_init
= safexcel_aead_sha1_des3_cra_init
,
1935 .cra_exit
= safexcel_aead_cra_exit
,
1936 .cra_module
= THIS_MODULE
,
1941 static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm
*tfm
)
1943 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1945 safexcel_aead_sha256_cra_init(tfm
);
1946 ctx
->alg
= SAFEXCEL_3DES
; /* override default */
1947 ctx
->blocksz
= DES3_EDE_BLOCK_SIZE
;
1948 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
1952 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede
= {
1953 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1954 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA2_256
,
1956 .setkey
= safexcel_aead_setkey
,
1957 .encrypt
= safexcel_aead_encrypt
,
1958 .decrypt
= safexcel_aead_decrypt
,
1959 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1960 .maxauthsize
= SHA256_DIGEST_SIZE
,
1962 .cra_name
= "authenc(hmac(sha256),cbc(des3_ede))",
1963 .cra_driver_name
= "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1964 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1965 .cra_flags
= CRYPTO_ALG_ASYNC
|
1966 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1967 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
1968 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1970 .cra_init
= safexcel_aead_sha256_des3_cra_init
,
1971 .cra_exit
= safexcel_aead_cra_exit
,
1972 .cra_module
= THIS_MODULE
,
1977 static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm
*tfm
)
1979 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1981 safexcel_aead_sha224_cra_init(tfm
);
1982 ctx
->alg
= SAFEXCEL_3DES
; /* override default */
1983 ctx
->blocksz
= DES3_EDE_BLOCK_SIZE
;
1984 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
1988 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede
= {
1989 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1990 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA2_256
,
1992 .setkey
= safexcel_aead_setkey
,
1993 .encrypt
= safexcel_aead_encrypt
,
1994 .decrypt
= safexcel_aead_decrypt
,
1995 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1996 .maxauthsize
= SHA224_DIGEST_SIZE
,
1998 .cra_name
= "authenc(hmac(sha224),cbc(des3_ede))",
1999 .cra_driver_name
= "safexcel-authenc-hmac-sha224-cbc-des3_ede",
2000 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2001 .cra_flags
= CRYPTO_ALG_ASYNC
|
2002 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2003 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2004 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2006 .cra_init
= safexcel_aead_sha224_des3_cra_init
,
2007 .cra_exit
= safexcel_aead_cra_exit
,
2008 .cra_module
= THIS_MODULE
,
2013 static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm
*tfm
)
2015 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2017 safexcel_aead_sha512_cra_init(tfm
);
2018 ctx
->alg
= SAFEXCEL_3DES
; /* override default */
2019 ctx
->blocksz
= DES3_EDE_BLOCK_SIZE
;
2020 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
2024 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede
= {
2025 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2026 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA2_512
,
2028 .setkey
= safexcel_aead_setkey
,
2029 .encrypt
= safexcel_aead_encrypt
,
2030 .decrypt
= safexcel_aead_decrypt
,
2031 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2032 .maxauthsize
= SHA512_DIGEST_SIZE
,
2034 .cra_name
= "authenc(hmac(sha512),cbc(des3_ede))",
2035 .cra_driver_name
= "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2036 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2037 .cra_flags
= CRYPTO_ALG_ASYNC
|
2038 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2039 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2040 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2042 .cra_init
= safexcel_aead_sha512_des3_cra_init
,
2043 .cra_exit
= safexcel_aead_cra_exit
,
2044 .cra_module
= THIS_MODULE
,
2049 static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm
*tfm
)
2051 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2053 safexcel_aead_sha384_cra_init(tfm
);
2054 ctx
->alg
= SAFEXCEL_3DES
; /* override default */
2055 ctx
->blocksz
= DES3_EDE_BLOCK_SIZE
;
2056 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
2060 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede
= {
2061 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2062 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA2_512
,
2064 .setkey
= safexcel_aead_setkey
,
2065 .encrypt
= safexcel_aead_encrypt
,
2066 .decrypt
= safexcel_aead_decrypt
,
2067 .ivsize
= DES3_EDE_BLOCK_SIZE
,
2068 .maxauthsize
= SHA384_DIGEST_SIZE
,
2070 .cra_name
= "authenc(hmac(sha384),cbc(des3_ede))",
2071 .cra_driver_name
= "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2072 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2073 .cra_flags
= CRYPTO_ALG_ASYNC
|
2074 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2075 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
2076 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2078 .cra_init
= safexcel_aead_sha384_des3_cra_init
,
2079 .cra_exit
= safexcel_aead_cra_exit
,
2080 .cra_module
= THIS_MODULE
,
2085 static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm
*tfm
)
2087 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2089 safexcel_aead_sha1_cra_init(tfm
);
2090 ctx
->alg
= SAFEXCEL_DES
; /* override default */
2091 ctx
->blocksz
= DES_BLOCK_SIZE
;
2092 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
2096 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des
= {
2097 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2098 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA1
,
2100 .setkey
= safexcel_aead_setkey
,
2101 .encrypt
= safexcel_aead_encrypt
,
2102 .decrypt
= safexcel_aead_decrypt
,
2103 .ivsize
= DES_BLOCK_SIZE
,
2104 .maxauthsize
= SHA1_DIGEST_SIZE
,
2106 .cra_name
= "authenc(hmac(sha1),cbc(des))",
2107 .cra_driver_name
= "safexcel-authenc-hmac-sha1-cbc-des",
2108 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2109 .cra_flags
= CRYPTO_ALG_ASYNC
|
2110 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2111 .cra_blocksize
= DES_BLOCK_SIZE
,
2112 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2114 .cra_init
= safexcel_aead_sha1_des_cra_init
,
2115 .cra_exit
= safexcel_aead_cra_exit
,
2116 .cra_module
= THIS_MODULE
,
2121 static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm
*tfm
)
2123 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2125 safexcel_aead_sha256_cra_init(tfm
);
2126 ctx
->alg
= SAFEXCEL_DES
; /* override default */
2127 ctx
->blocksz
= DES_BLOCK_SIZE
;
2128 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
2132 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des
= {
2133 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2134 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA2_256
,
2136 .setkey
= safexcel_aead_setkey
,
2137 .encrypt
= safexcel_aead_encrypt
,
2138 .decrypt
= safexcel_aead_decrypt
,
2139 .ivsize
= DES_BLOCK_SIZE
,
2140 .maxauthsize
= SHA256_DIGEST_SIZE
,
2142 .cra_name
= "authenc(hmac(sha256),cbc(des))",
2143 .cra_driver_name
= "safexcel-authenc-hmac-sha256-cbc-des",
2144 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2145 .cra_flags
= CRYPTO_ALG_ASYNC
|
2146 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2147 .cra_blocksize
= DES_BLOCK_SIZE
,
2148 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2150 .cra_init
= safexcel_aead_sha256_des_cra_init
,
2151 .cra_exit
= safexcel_aead_cra_exit
,
2152 .cra_module
= THIS_MODULE
,
2157 static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm
*tfm
)
2159 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2161 safexcel_aead_sha224_cra_init(tfm
);
2162 ctx
->alg
= SAFEXCEL_DES
; /* override default */
2163 ctx
->blocksz
= DES_BLOCK_SIZE
;
2164 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
2168 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des
= {
2169 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2170 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA2_256
,
2172 .setkey
= safexcel_aead_setkey
,
2173 .encrypt
= safexcel_aead_encrypt
,
2174 .decrypt
= safexcel_aead_decrypt
,
2175 .ivsize
= DES_BLOCK_SIZE
,
2176 .maxauthsize
= SHA224_DIGEST_SIZE
,
2178 .cra_name
= "authenc(hmac(sha224),cbc(des))",
2179 .cra_driver_name
= "safexcel-authenc-hmac-sha224-cbc-des",
2180 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2181 .cra_flags
= CRYPTO_ALG_ASYNC
|
2182 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2183 .cra_blocksize
= DES_BLOCK_SIZE
,
2184 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2186 .cra_init
= safexcel_aead_sha224_des_cra_init
,
2187 .cra_exit
= safexcel_aead_cra_exit
,
2188 .cra_module
= THIS_MODULE
,
2193 static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm
*tfm
)
2195 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2197 safexcel_aead_sha512_cra_init(tfm
);
2198 ctx
->alg
= SAFEXCEL_DES
; /* override default */
2199 ctx
->blocksz
= DES_BLOCK_SIZE
;
2200 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
2204 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des
= {
2205 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2206 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA2_512
,
2208 .setkey
= safexcel_aead_setkey
,
2209 .encrypt
= safexcel_aead_encrypt
,
2210 .decrypt
= safexcel_aead_decrypt
,
2211 .ivsize
= DES_BLOCK_SIZE
,
2212 .maxauthsize
= SHA512_DIGEST_SIZE
,
2214 .cra_name
= "authenc(hmac(sha512),cbc(des))",
2215 .cra_driver_name
= "safexcel-authenc-hmac-sha512-cbc-des",
2216 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2217 .cra_flags
= CRYPTO_ALG_ASYNC
|
2218 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2219 .cra_blocksize
= DES_BLOCK_SIZE
,
2220 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2222 .cra_init
= safexcel_aead_sha512_des_cra_init
,
2223 .cra_exit
= safexcel_aead_cra_exit
,
2224 .cra_module
= THIS_MODULE
,
2229 static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm
*tfm
)
2231 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2233 safexcel_aead_sha384_cra_init(tfm
);
2234 ctx
->alg
= SAFEXCEL_DES
; /* override default */
2235 ctx
->blocksz
= DES_BLOCK_SIZE
;
2236 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
2240 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des
= {
2241 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2242 .algo_mask
= SAFEXCEL_ALG_DES
| SAFEXCEL_ALG_SHA2_512
,
2244 .setkey
= safexcel_aead_setkey
,
2245 .encrypt
= safexcel_aead_encrypt
,
2246 .decrypt
= safexcel_aead_decrypt
,
2247 .ivsize
= DES_BLOCK_SIZE
,
2248 .maxauthsize
= SHA384_DIGEST_SIZE
,
2250 .cra_name
= "authenc(hmac(sha384),cbc(des))",
2251 .cra_driver_name
= "safexcel-authenc-hmac-sha384-cbc-des",
2252 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2253 .cra_flags
= CRYPTO_ALG_ASYNC
|
2254 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2255 .cra_blocksize
= DES_BLOCK_SIZE
,
2256 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2258 .cra_init
= safexcel_aead_sha384_des_cra_init
,
2259 .cra_exit
= safexcel_aead_cra_exit
,
2260 .cra_module
= THIS_MODULE
,
2265 static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm
*tfm
)
2267 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2269 safexcel_aead_sha1_cra_init(tfm
);
2270 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
; /* override default */
2274 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes
= {
2275 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2276 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA1
,
2278 .setkey
= safexcel_aead_setkey
,
2279 .encrypt
= safexcel_aead_encrypt
,
2280 .decrypt
= safexcel_aead_decrypt
,
2281 .ivsize
= CTR_RFC3686_IV_SIZE
,
2282 .maxauthsize
= SHA1_DIGEST_SIZE
,
2284 .cra_name
= "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2285 .cra_driver_name
= "safexcel-authenc-hmac-sha1-ctr-aes",
2286 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2287 .cra_flags
= CRYPTO_ALG_ASYNC
|
2288 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2290 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2292 .cra_init
= safexcel_aead_sha1_ctr_cra_init
,
2293 .cra_exit
= safexcel_aead_cra_exit
,
2294 .cra_module
= THIS_MODULE
,
2299 static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm
*tfm
)
2301 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2303 safexcel_aead_sha256_cra_init(tfm
);
2304 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
; /* override default */
2308 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes
= {
2309 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2310 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA2_256
,
2312 .setkey
= safexcel_aead_setkey
,
2313 .encrypt
= safexcel_aead_encrypt
,
2314 .decrypt
= safexcel_aead_decrypt
,
2315 .ivsize
= CTR_RFC3686_IV_SIZE
,
2316 .maxauthsize
= SHA256_DIGEST_SIZE
,
2318 .cra_name
= "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2319 .cra_driver_name
= "safexcel-authenc-hmac-sha256-ctr-aes",
2320 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2321 .cra_flags
= CRYPTO_ALG_ASYNC
|
2322 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2324 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2326 .cra_init
= safexcel_aead_sha256_ctr_cra_init
,
2327 .cra_exit
= safexcel_aead_cra_exit
,
2328 .cra_module
= THIS_MODULE
,
2333 static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm
*tfm
)
2335 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2337 safexcel_aead_sha224_cra_init(tfm
);
2338 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
; /* override default */
2342 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes
= {
2343 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2344 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA2_256
,
2346 .setkey
= safexcel_aead_setkey
,
2347 .encrypt
= safexcel_aead_encrypt
,
2348 .decrypt
= safexcel_aead_decrypt
,
2349 .ivsize
= CTR_RFC3686_IV_SIZE
,
2350 .maxauthsize
= SHA224_DIGEST_SIZE
,
2352 .cra_name
= "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2353 .cra_driver_name
= "safexcel-authenc-hmac-sha224-ctr-aes",
2354 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2355 .cra_flags
= CRYPTO_ALG_ASYNC
|
2356 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2358 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2360 .cra_init
= safexcel_aead_sha224_ctr_cra_init
,
2361 .cra_exit
= safexcel_aead_cra_exit
,
2362 .cra_module
= THIS_MODULE
,
2367 static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm
*tfm
)
2369 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2371 safexcel_aead_sha512_cra_init(tfm
);
2372 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
; /* override default */
2376 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes
= {
2377 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2378 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA2_512
,
2380 .setkey
= safexcel_aead_setkey
,
2381 .encrypt
= safexcel_aead_encrypt
,
2382 .decrypt
= safexcel_aead_decrypt
,
2383 .ivsize
= CTR_RFC3686_IV_SIZE
,
2384 .maxauthsize
= SHA512_DIGEST_SIZE
,
2386 .cra_name
= "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2387 .cra_driver_name
= "safexcel-authenc-hmac-sha512-ctr-aes",
2388 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2389 .cra_flags
= CRYPTO_ALG_ASYNC
|
2390 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2392 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2394 .cra_init
= safexcel_aead_sha512_ctr_cra_init
,
2395 .cra_exit
= safexcel_aead_cra_exit
,
2396 .cra_module
= THIS_MODULE
,
2401 static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm
*tfm
)
2403 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2405 safexcel_aead_sha384_cra_init(tfm
);
2406 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
; /* override default */
2410 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes
= {
2411 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2412 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_SHA2_512
,
2414 .setkey
= safexcel_aead_setkey
,
2415 .encrypt
= safexcel_aead_encrypt
,
2416 .decrypt
= safexcel_aead_decrypt
,
2417 .ivsize
= CTR_RFC3686_IV_SIZE
,
2418 .maxauthsize
= SHA384_DIGEST_SIZE
,
2420 .cra_name
= "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2421 .cra_driver_name
= "safexcel-authenc-hmac-sha384-ctr-aes",
2422 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2423 .cra_flags
= CRYPTO_ALG_ASYNC
|
2424 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2426 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2428 .cra_init
= safexcel_aead_sha384_ctr_cra_init
,
2429 .cra_exit
= safexcel_aead_cra_exit
,
2430 .cra_module
= THIS_MODULE
,
2435 static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher
*ctfm
,
2436 const u8
*key
, unsigned int len
)
2438 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(ctfm
);
2439 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2440 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
2441 struct crypto_aes_ctx aes
;
2443 unsigned int keylen
;
2445 /* Check for illegal XTS keys */
2446 ret
= xts_verify_key(ctfm
, key
, len
);
2450 /* Only half of the key data is cipher key */
2451 keylen
= (len
>> 1);
2452 ret
= aes_expandkey(&aes
, key
, keylen
);
2456 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
) {
2457 for (i
= 0; i
< keylen
/ sizeof(u32
); i
++) {
2458 if (le32_to_cpu(ctx
->key
[i
]) != aes
.key_enc
[i
]) {
2459 ctx
->base
.needs_inv
= true;
2465 for (i
= 0; i
< keylen
/ sizeof(u32
); i
++)
2466 ctx
->key
[i
] = cpu_to_le32(aes
.key_enc
[i
]);
2468 /* The other half is the tweak key */
2469 ret
= aes_expandkey(&aes
, (u8
*)(key
+ keylen
), keylen
);
2473 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
) {
2474 for (i
= 0; i
< keylen
/ sizeof(u32
); i
++) {
2475 if (le32_to_cpu(ctx
->key
[i
+ keylen
/ sizeof(u32
)]) !=
2477 ctx
->base
.needs_inv
= true;
2483 for (i
= 0; i
< keylen
/ sizeof(u32
); i
++)
2484 ctx
->key
[i
+ keylen
/ sizeof(u32
)] =
2485 cpu_to_le32(aes
.key_enc
[i
]);
2487 ctx
->key_len
= keylen
<< 1;
2489 memzero_explicit(&aes
, sizeof(aes
));
2493 static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm
*tfm
)
2495 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2497 safexcel_skcipher_cra_init(tfm
);
2498 ctx
->alg
= SAFEXCEL_AES
;
2499 ctx
->blocksz
= AES_BLOCK_SIZE
;
2501 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_XTS
;
2505 static int safexcel_encrypt_xts(struct skcipher_request
*req
)
2507 if (req
->cryptlen
< XTS_BLOCK_SIZE
)
2509 return safexcel_queue_req(&req
->base
, skcipher_request_ctx(req
),
2513 static int safexcel_decrypt_xts(struct skcipher_request
*req
)
2515 if (req
->cryptlen
< XTS_BLOCK_SIZE
)
2517 return safexcel_queue_req(&req
->base
, skcipher_request_ctx(req
),
2521 struct safexcel_alg_template safexcel_alg_xts_aes
= {
2522 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
2523 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_AES_XTS
,
2525 .setkey
= safexcel_skcipher_aesxts_setkey
,
2526 .encrypt
= safexcel_encrypt_xts
,
2527 .decrypt
= safexcel_decrypt_xts
,
2528 /* XTS actually uses 2 AES keys glued together */
2529 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
2530 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
2531 .ivsize
= XTS_BLOCK_SIZE
,
2533 .cra_name
= "xts(aes)",
2534 .cra_driver_name
= "safexcel-xts-aes",
2535 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2536 .cra_flags
= CRYPTO_ALG_ASYNC
|
2537 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2538 .cra_blocksize
= XTS_BLOCK_SIZE
,
2539 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2541 .cra_init
= safexcel_skcipher_aes_xts_cra_init
,
2542 .cra_exit
= safexcel_skcipher_cra_exit
,
2543 .cra_module
= THIS_MODULE
,
2548 static int safexcel_aead_gcm_setkey(struct crypto_aead
*ctfm
, const u8
*key
,
2551 struct crypto_tfm
*tfm
= crypto_aead_tfm(ctfm
);
2552 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2553 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
2554 struct crypto_aes_ctx aes
;
2555 u32 hashkey
[AES_BLOCK_SIZE
>> 2];
2558 ret
= aes_expandkey(&aes
, key
, len
);
2560 memzero_explicit(&aes
, sizeof(aes
));
2564 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
) {
2565 for (i
= 0; i
< len
/ sizeof(u32
); i
++) {
2566 if (le32_to_cpu(ctx
->key
[i
]) != aes
.key_enc
[i
]) {
2567 ctx
->base
.needs_inv
= true;
2573 for (i
= 0; i
< len
/ sizeof(u32
); i
++)
2574 ctx
->key
[i
] = cpu_to_le32(aes
.key_enc
[i
]);
2578 /* Compute hash key by encrypting zeroes with cipher key */
2579 crypto_cipher_clear_flags(ctx
->hkaes
, CRYPTO_TFM_REQ_MASK
);
2580 crypto_cipher_set_flags(ctx
->hkaes
, crypto_aead_get_flags(ctfm
) &
2581 CRYPTO_TFM_REQ_MASK
);
2582 ret
= crypto_cipher_setkey(ctx
->hkaes
, key
, len
);
2586 memset(hashkey
, 0, AES_BLOCK_SIZE
);
2587 crypto_cipher_encrypt_one(ctx
->hkaes
, (u8
*)hashkey
, (u8
*)hashkey
);
2589 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
) {
2590 for (i
= 0; i
< AES_BLOCK_SIZE
/ sizeof(u32
); i
++) {
2591 if (be32_to_cpu(ctx
->ipad
[i
]) != hashkey
[i
]) {
2592 ctx
->base
.needs_inv
= true;
2598 for (i
= 0; i
< AES_BLOCK_SIZE
/ sizeof(u32
); i
++)
2599 ctx
->ipad
[i
] = cpu_to_be32(hashkey
[i
]);
2601 memzero_explicit(hashkey
, AES_BLOCK_SIZE
);
2602 memzero_explicit(&aes
, sizeof(aes
));
2606 static int safexcel_aead_gcm_cra_init(struct crypto_tfm
*tfm
)
2608 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2610 safexcel_aead_cra_init(tfm
);
2611 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_GHASH
;
2612 ctx
->state_sz
= GHASH_BLOCK_SIZE
;
2613 ctx
->xcm
= EIP197_XCM_MODE_GCM
;
2614 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_XCM
; /* override default */
2616 ctx
->hkaes
= crypto_alloc_cipher("aes", 0, 0);
2617 return PTR_ERR_OR_ZERO(ctx
->hkaes
);
2620 static void safexcel_aead_gcm_cra_exit(struct crypto_tfm
*tfm
)
2622 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2624 crypto_free_cipher(ctx
->hkaes
);
2625 safexcel_aead_cra_exit(tfm
);
2628 static int safexcel_aead_gcm_setauthsize(struct crypto_aead
*tfm
,
2629 unsigned int authsize
)
2631 return crypto_gcm_check_authsize(authsize
);
2634 struct safexcel_alg_template safexcel_alg_gcm
= {
2635 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2636 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_GHASH
,
2638 .setkey
= safexcel_aead_gcm_setkey
,
2639 .setauthsize
= safexcel_aead_gcm_setauthsize
,
2640 .encrypt
= safexcel_aead_encrypt
,
2641 .decrypt
= safexcel_aead_decrypt
,
2642 .ivsize
= GCM_AES_IV_SIZE
,
2643 .maxauthsize
= GHASH_DIGEST_SIZE
,
2645 .cra_name
= "gcm(aes)",
2646 .cra_driver_name
= "safexcel-gcm-aes",
2647 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2648 .cra_flags
= CRYPTO_ALG_ASYNC
|
2649 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2651 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2653 .cra_init
= safexcel_aead_gcm_cra_init
,
2654 .cra_exit
= safexcel_aead_gcm_cra_exit
,
2655 .cra_module
= THIS_MODULE
,
2660 static int safexcel_aead_ccm_setkey(struct crypto_aead
*ctfm
, const u8
*key
,
2663 struct crypto_tfm
*tfm
= crypto_aead_tfm(ctfm
);
2664 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2665 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
2666 struct crypto_aes_ctx aes
;
2669 ret
= aes_expandkey(&aes
, key
, len
);
2671 memzero_explicit(&aes
, sizeof(aes
));
2675 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
) {
2676 for (i
= 0; i
< len
/ sizeof(u32
); i
++) {
2677 if (le32_to_cpu(ctx
->key
[i
]) != aes
.key_enc
[i
]) {
2678 ctx
->base
.needs_inv
= true;
2684 for (i
= 0; i
< len
/ sizeof(u32
); i
++) {
2685 ctx
->key
[i
] = cpu_to_le32(aes
.key_enc
[i
]);
2686 ctx
->ipad
[i
+ 2 * AES_BLOCK_SIZE
/ sizeof(u32
)] =
2687 cpu_to_be32(aes
.key_enc
[i
]);
2691 ctx
->state_sz
= 2 * AES_BLOCK_SIZE
+ len
;
2693 if (len
== AES_KEYSIZE_192
)
2694 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC192
;
2695 else if (len
== AES_KEYSIZE_256
)
2696 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC256
;
2698 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC128
;
2700 memzero_explicit(&aes
, sizeof(aes
));
2704 static int safexcel_aead_ccm_cra_init(struct crypto_tfm
*tfm
)
2706 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2708 safexcel_aead_cra_init(tfm
);
2709 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC128
;
2710 ctx
->state_sz
= 3 * AES_BLOCK_SIZE
;
2711 ctx
->xcm
= EIP197_XCM_MODE_CCM
;
2712 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_XCM
; /* override default */
2717 static int safexcel_aead_ccm_setauthsize(struct crypto_aead
*tfm
,
2718 unsigned int authsize
)
2720 /* Borrowed from crypto/ccm.c */
2737 static int safexcel_ccm_encrypt(struct aead_request
*req
)
2739 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
2741 if (req
->iv
[0] < 1 || req
->iv
[0] > 7)
2744 return safexcel_queue_req(&req
->base
, creq
, SAFEXCEL_ENCRYPT
);
2747 static int safexcel_ccm_decrypt(struct aead_request
*req
)
2749 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
2751 if (req
->iv
[0] < 1 || req
->iv
[0] > 7)
2754 return safexcel_queue_req(&req
->base
, creq
, SAFEXCEL_DECRYPT
);
2757 struct safexcel_alg_template safexcel_alg_ccm
= {
2758 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2759 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_CBC_MAC_ALL
,
2761 .setkey
= safexcel_aead_ccm_setkey
,
2762 .setauthsize
= safexcel_aead_ccm_setauthsize
,
2763 .encrypt
= safexcel_ccm_encrypt
,
2764 .decrypt
= safexcel_ccm_decrypt
,
2765 .ivsize
= AES_BLOCK_SIZE
,
2766 .maxauthsize
= AES_BLOCK_SIZE
,
2768 .cra_name
= "ccm(aes)",
2769 .cra_driver_name
= "safexcel-ccm-aes",
2770 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2771 .cra_flags
= CRYPTO_ALG_ASYNC
|
2772 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2774 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2776 .cra_init
= safexcel_aead_ccm_cra_init
,
2777 .cra_exit
= safexcel_aead_cra_exit
,
2778 .cra_module
= THIS_MODULE
,
2783 static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx
*ctx
,
2786 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
2788 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
)
2789 if (memcmp(ctx
->key
, key
, CHACHA_KEY_SIZE
))
2790 ctx
->base
.needs_inv
= true;
2792 memcpy(ctx
->key
, key
, CHACHA_KEY_SIZE
);
2793 ctx
->key_len
= CHACHA_KEY_SIZE
;
2796 static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher
*ctfm
,
2797 const u8
*key
, unsigned int len
)
2799 struct safexcel_cipher_ctx
*ctx
= crypto_skcipher_ctx(ctfm
);
2801 if (len
!= CHACHA_KEY_SIZE
)
2804 safexcel_chacha20_setkey(ctx
, key
);
2809 static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm
*tfm
)
2811 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2813 safexcel_skcipher_cra_init(tfm
);
2814 ctx
->alg
= SAFEXCEL_CHACHA20
;
2816 ctx
->mode
= CONTEXT_CONTROL_CHACHA20_MODE_256_32
;
2820 struct safexcel_alg_template safexcel_alg_chacha20
= {
2821 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
2822 .algo_mask
= SAFEXCEL_ALG_CHACHA20
,
2824 .setkey
= safexcel_skcipher_chacha20_setkey
,
2825 .encrypt
= safexcel_encrypt
,
2826 .decrypt
= safexcel_decrypt
,
2827 .min_keysize
= CHACHA_KEY_SIZE
,
2828 .max_keysize
= CHACHA_KEY_SIZE
,
2829 .ivsize
= CHACHA_IV_SIZE
,
2831 .cra_name
= "chacha20",
2832 .cra_driver_name
= "safexcel-chacha20",
2833 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2834 .cra_flags
= CRYPTO_ALG_ASYNC
|
2835 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2837 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
2839 .cra_init
= safexcel_skcipher_chacha20_cra_init
,
2840 .cra_exit
= safexcel_skcipher_cra_exit
,
2841 .cra_module
= THIS_MODULE
,
2846 static int safexcel_aead_chachapoly_setkey(struct crypto_aead
*ctfm
,
2847 const u8
*key
, unsigned int len
)
2849 struct safexcel_cipher_ctx
*ctx
= crypto_aead_ctx(ctfm
);
2851 if (ctx
->aead
== EIP197_AEAD_TYPE_IPSEC_ESP
&&
2852 len
> EIP197_AEAD_IPSEC_NONCE_SIZE
) {
2853 /* ESP variant has nonce appended to key */
2854 len
-= EIP197_AEAD_IPSEC_NONCE_SIZE
;
2855 ctx
->nonce
= *(u32
*)(key
+ len
);
2857 if (len
!= CHACHA_KEY_SIZE
)
2860 safexcel_chacha20_setkey(ctx
, key
);
2865 static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead
*tfm
,
2866 unsigned int authsize
)
2868 if (authsize
!= POLY1305_DIGEST_SIZE
)
2873 static int safexcel_aead_chachapoly_crypt(struct aead_request
*req
,
2874 enum safexcel_cipher_direction dir
)
2876 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
2877 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
2878 struct crypto_tfm
*tfm
= crypto_aead_tfm(aead
);
2879 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2880 struct aead_request
*subreq
= aead_request_ctx(req
);
2881 u32 key
[CHACHA_KEY_SIZE
/ sizeof(u32
) + 1];
2885 * Instead of wasting time detecting umpteen silly corner cases,
2886 * just dump all "small" requests to the fallback implementation.
2887 * HW would not be faster on such small requests anyway.
2889 if (likely((ctx
->aead
!= EIP197_AEAD_TYPE_IPSEC_ESP
||
2890 req
->assoclen
>= EIP197_AEAD_IPSEC_IV_SIZE
) &&
2891 req
->cryptlen
> POLY1305_DIGEST_SIZE
)) {
2892 return safexcel_queue_req(&req
->base
, creq
, dir
);
2895 /* HW cannot do full (AAD+payload) zero length, use fallback */
2896 memcpy(key
, ctx
->key
, CHACHA_KEY_SIZE
);
2897 if (ctx
->aead
== EIP197_AEAD_TYPE_IPSEC_ESP
) {
2898 /* ESP variant has nonce appended to the key */
2899 key
[CHACHA_KEY_SIZE
/ sizeof(u32
)] = ctx
->nonce
;
2900 ret
= crypto_aead_setkey(ctx
->fback
, (u8
*)key
,
2902 EIP197_AEAD_IPSEC_NONCE_SIZE
);
2904 ret
= crypto_aead_setkey(ctx
->fback
, (u8
*)key
,
2908 crypto_aead_clear_flags(aead
, CRYPTO_TFM_REQ_MASK
);
2909 crypto_aead_set_flags(aead
, crypto_aead_get_flags(ctx
->fback
) &
2910 CRYPTO_TFM_REQ_MASK
);
2914 aead_request_set_tfm(subreq
, ctx
->fback
);
2915 aead_request_set_callback(subreq
, req
->base
.flags
, req
->base
.complete
,
2917 aead_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
2919 aead_request_set_ad(subreq
, req
->assoclen
);
2921 return (dir
== SAFEXCEL_ENCRYPT
) ?
2922 crypto_aead_encrypt(subreq
) :
2923 crypto_aead_decrypt(subreq
);
2926 static int safexcel_aead_chachapoly_encrypt(struct aead_request
*req
)
2928 return safexcel_aead_chachapoly_crypt(req
, SAFEXCEL_ENCRYPT
);
2931 static int safexcel_aead_chachapoly_decrypt(struct aead_request
*req
)
2933 return safexcel_aead_chachapoly_crypt(req
, SAFEXCEL_DECRYPT
);
2936 static int safexcel_aead_fallback_cra_init(struct crypto_tfm
*tfm
)
2938 struct crypto_aead
*aead
= __crypto_aead_cast(tfm
);
2939 struct aead_alg
*alg
= crypto_aead_alg(aead
);
2940 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2942 safexcel_aead_cra_init(tfm
);
2944 /* Allocate fallback implementation */
2945 ctx
->fback
= crypto_alloc_aead(alg
->base
.cra_name
, 0,
2947 CRYPTO_ALG_NEED_FALLBACK
);
2948 if (IS_ERR(ctx
->fback
))
2949 return PTR_ERR(ctx
->fback
);
2951 crypto_aead_set_reqsize(aead
, max(sizeof(struct safexcel_cipher_req
),
2952 sizeof(struct aead_request
) +
2953 crypto_aead_reqsize(ctx
->fback
)));
2958 static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm
*tfm
)
2960 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2962 safexcel_aead_fallback_cra_init(tfm
);
2963 ctx
->alg
= SAFEXCEL_CHACHA20
;
2964 ctx
->mode
= CONTEXT_CONTROL_CHACHA20_MODE_256_32
|
2965 CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK
;
2967 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_POLY1305
;
2968 ctx
->state_sz
= 0; /* Precomputed by HW */
2972 static void safexcel_aead_fallback_cra_exit(struct crypto_tfm
*tfm
)
2974 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2976 crypto_free_aead(ctx
->fback
);
2977 safexcel_aead_cra_exit(tfm
);
2980 struct safexcel_alg_template safexcel_alg_chachapoly
= {
2981 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
2982 .algo_mask
= SAFEXCEL_ALG_CHACHA20
| SAFEXCEL_ALG_POLY1305
,
2984 .setkey
= safexcel_aead_chachapoly_setkey
,
2985 .setauthsize
= safexcel_aead_chachapoly_setauthsize
,
2986 .encrypt
= safexcel_aead_chachapoly_encrypt
,
2987 .decrypt
= safexcel_aead_chachapoly_decrypt
,
2988 .ivsize
= CHACHAPOLY_IV_SIZE
,
2989 .maxauthsize
= POLY1305_DIGEST_SIZE
,
2991 .cra_name
= "rfc7539(chacha20,poly1305)",
2992 .cra_driver_name
= "safexcel-chacha20-poly1305",
2993 /* +1 to put it above HW chacha + SW poly */
2994 .cra_priority
= SAFEXCEL_CRA_PRIORITY
+ 1,
2995 .cra_flags
= CRYPTO_ALG_ASYNC
|
2996 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2997 CRYPTO_ALG_NEED_FALLBACK
,
2999 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3001 .cra_init
= safexcel_aead_chachapoly_cra_init
,
3002 .cra_exit
= safexcel_aead_fallback_cra_exit
,
3003 .cra_module
= THIS_MODULE
,
3008 static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm
*tfm
)
3010 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3013 ret
= safexcel_aead_chachapoly_cra_init(tfm
);
3014 ctx
->aead
= EIP197_AEAD_TYPE_IPSEC_ESP
;
3015 ctx
->aadskip
= EIP197_AEAD_IPSEC_IV_SIZE
;
3019 struct safexcel_alg_template safexcel_alg_chachapoly_esp
= {
3020 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
3021 .algo_mask
= SAFEXCEL_ALG_CHACHA20
| SAFEXCEL_ALG_POLY1305
,
3023 .setkey
= safexcel_aead_chachapoly_setkey
,
3024 .setauthsize
= safexcel_aead_chachapoly_setauthsize
,
3025 .encrypt
= safexcel_aead_chachapoly_encrypt
,
3026 .decrypt
= safexcel_aead_chachapoly_decrypt
,
3027 .ivsize
= CHACHAPOLY_IV_SIZE
- EIP197_AEAD_IPSEC_NONCE_SIZE
,
3028 .maxauthsize
= POLY1305_DIGEST_SIZE
,
3030 .cra_name
= "rfc7539esp(chacha20,poly1305)",
3031 .cra_driver_name
= "safexcel-chacha20-poly1305-esp",
3032 /* +1 to put it above HW chacha + SW poly */
3033 .cra_priority
= SAFEXCEL_CRA_PRIORITY
+ 1,
3034 .cra_flags
= CRYPTO_ALG_ASYNC
|
3035 CRYPTO_ALG_KERN_DRIVER_ONLY
|
3036 CRYPTO_ALG_NEED_FALLBACK
,
3038 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3040 .cra_init
= safexcel_aead_chachapolyesp_cra_init
,
3041 .cra_exit
= safexcel_aead_fallback_cra_exit
,
3042 .cra_module
= THIS_MODULE
,
3047 static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher
*ctfm
,
3048 const u8
*key
, unsigned int len
)
3050 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(ctfm
);
3051 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3052 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
3054 if (len
!= SM4_KEY_SIZE
)
3057 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr_dma
)
3058 if (memcmp(ctx
->key
, key
, SM4_KEY_SIZE
))
3059 ctx
->base
.needs_inv
= true;
3061 memcpy(ctx
->key
, key
, SM4_KEY_SIZE
);
3062 ctx
->key_len
= SM4_KEY_SIZE
;
3067 static int safexcel_sm4_blk_encrypt(struct skcipher_request
*req
)
3069 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3070 if (req
->cryptlen
& (SM4_BLOCK_SIZE
- 1))
3073 return safexcel_queue_req(&req
->base
, skcipher_request_ctx(req
),
3077 static int safexcel_sm4_blk_decrypt(struct skcipher_request
*req
)
3079 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3080 if (req
->cryptlen
& (SM4_BLOCK_SIZE
- 1))
3083 return safexcel_queue_req(&req
->base
, skcipher_request_ctx(req
),
3087 static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm
*tfm
)
3089 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3091 safexcel_skcipher_cra_init(tfm
);
3092 ctx
->alg
= SAFEXCEL_SM4
;
3093 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_ECB
;
3095 ctx
->ivmask
= EIP197_OPTION_2_TOKEN_IV_CMD
;
3099 struct safexcel_alg_template safexcel_alg_ecb_sm4
= {
3100 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
3101 .algo_mask
= SAFEXCEL_ALG_SM4
,
3103 .setkey
= safexcel_skcipher_sm4_setkey
,
3104 .encrypt
= safexcel_sm4_blk_encrypt
,
3105 .decrypt
= safexcel_sm4_blk_decrypt
,
3106 .min_keysize
= SM4_KEY_SIZE
,
3107 .max_keysize
= SM4_KEY_SIZE
,
3109 .cra_name
= "ecb(sm4)",
3110 .cra_driver_name
= "safexcel-ecb-sm4",
3111 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3112 .cra_flags
= CRYPTO_ALG_ASYNC
|
3113 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3114 .cra_blocksize
= SM4_BLOCK_SIZE
,
3115 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3117 .cra_init
= safexcel_skcipher_sm4_ecb_cra_init
,
3118 .cra_exit
= safexcel_skcipher_cra_exit
,
3119 .cra_module
= THIS_MODULE
,
3124 static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm
*tfm
)
3126 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3128 safexcel_skcipher_cra_init(tfm
);
3129 ctx
->alg
= SAFEXCEL_SM4
;
3130 ctx
->blocksz
= SM4_BLOCK_SIZE
;
3131 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CBC
;
3135 struct safexcel_alg_template safexcel_alg_cbc_sm4
= {
3136 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
3137 .algo_mask
= SAFEXCEL_ALG_SM4
,
3139 .setkey
= safexcel_skcipher_sm4_setkey
,
3140 .encrypt
= safexcel_sm4_blk_encrypt
,
3141 .decrypt
= safexcel_sm4_blk_decrypt
,
3142 .min_keysize
= SM4_KEY_SIZE
,
3143 .max_keysize
= SM4_KEY_SIZE
,
3144 .ivsize
= SM4_BLOCK_SIZE
,
3146 .cra_name
= "cbc(sm4)",
3147 .cra_driver_name
= "safexcel-cbc-sm4",
3148 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3149 .cra_flags
= CRYPTO_ALG_ASYNC
|
3150 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3151 .cra_blocksize
= SM4_BLOCK_SIZE
,
3152 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3154 .cra_init
= safexcel_skcipher_sm4_cbc_cra_init
,
3155 .cra_exit
= safexcel_skcipher_cra_exit
,
3156 .cra_module
= THIS_MODULE
,
3161 static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm
*tfm
)
3163 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3165 safexcel_skcipher_cra_init(tfm
);
3166 ctx
->alg
= SAFEXCEL_SM4
;
3167 ctx
->blocksz
= SM4_BLOCK_SIZE
;
3168 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_OFB
;
3172 struct safexcel_alg_template safexcel_alg_ofb_sm4
= {
3173 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
3174 .algo_mask
= SAFEXCEL_ALG_SM4
| SAFEXCEL_ALG_AES_XFB
,
3176 .setkey
= safexcel_skcipher_sm4_setkey
,
3177 .encrypt
= safexcel_encrypt
,
3178 .decrypt
= safexcel_decrypt
,
3179 .min_keysize
= SM4_KEY_SIZE
,
3180 .max_keysize
= SM4_KEY_SIZE
,
3181 .ivsize
= SM4_BLOCK_SIZE
,
3183 .cra_name
= "ofb(sm4)",
3184 .cra_driver_name
= "safexcel-ofb-sm4",
3185 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3186 .cra_flags
= CRYPTO_ALG_ASYNC
|
3187 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3189 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3191 .cra_init
= safexcel_skcipher_sm4_ofb_cra_init
,
3192 .cra_exit
= safexcel_skcipher_cra_exit
,
3193 .cra_module
= THIS_MODULE
,
3198 static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm
*tfm
)
3200 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3202 safexcel_skcipher_cra_init(tfm
);
3203 ctx
->alg
= SAFEXCEL_SM4
;
3204 ctx
->blocksz
= SM4_BLOCK_SIZE
;
3205 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CFB
;
3209 struct safexcel_alg_template safexcel_alg_cfb_sm4
= {
3210 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
3211 .algo_mask
= SAFEXCEL_ALG_SM4
| SAFEXCEL_ALG_AES_XFB
,
3213 .setkey
= safexcel_skcipher_sm4_setkey
,
3214 .encrypt
= safexcel_encrypt
,
3215 .decrypt
= safexcel_decrypt
,
3216 .min_keysize
= SM4_KEY_SIZE
,
3217 .max_keysize
= SM4_KEY_SIZE
,
3218 .ivsize
= SM4_BLOCK_SIZE
,
3220 .cra_name
= "cfb(sm4)",
3221 .cra_driver_name
= "safexcel-cfb-sm4",
3222 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3223 .cra_flags
= CRYPTO_ALG_ASYNC
|
3224 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3226 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3228 .cra_init
= safexcel_skcipher_sm4_cfb_cra_init
,
3229 .cra_exit
= safexcel_skcipher_cra_exit
,
3230 .cra_module
= THIS_MODULE
,
3235 static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher
*ctfm
,
3236 const u8
*key
, unsigned int len
)
3238 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(ctfm
);
3239 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3241 /* last 4 bytes of key are the nonce! */
3242 ctx
->nonce
= *(u32
*)(key
+ len
- CTR_RFC3686_NONCE_SIZE
);
3243 /* exclude the nonce here */
3244 len
-= CTR_RFC3686_NONCE_SIZE
;
3246 return safexcel_skcipher_sm4_setkey(ctfm
, key
, len
);
3249 static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm
*tfm
)
3251 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3253 safexcel_skcipher_cra_init(tfm
);
3254 ctx
->alg
= SAFEXCEL_SM4
;
3255 ctx
->blocksz
= SM4_BLOCK_SIZE
;
3256 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
;
3260 struct safexcel_alg_template safexcel_alg_ctr_sm4
= {
3261 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
3262 .algo_mask
= SAFEXCEL_ALG_SM4
,
3264 .setkey
= safexcel_skcipher_sm4ctr_setkey
,
3265 .encrypt
= safexcel_encrypt
,
3266 .decrypt
= safexcel_decrypt
,
3267 /* Add nonce size */
3268 .min_keysize
= SM4_KEY_SIZE
+ CTR_RFC3686_NONCE_SIZE
,
3269 .max_keysize
= SM4_KEY_SIZE
+ CTR_RFC3686_NONCE_SIZE
,
3270 .ivsize
= CTR_RFC3686_IV_SIZE
,
3272 .cra_name
= "rfc3686(ctr(sm4))",
3273 .cra_driver_name
= "safexcel-ctr-sm4",
3274 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3275 .cra_flags
= CRYPTO_ALG_ASYNC
|
3276 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3278 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3280 .cra_init
= safexcel_skcipher_sm4_ctr_cra_init
,
3281 .cra_exit
= safexcel_skcipher_cra_exit
,
3282 .cra_module
= THIS_MODULE
,
3287 static int safexcel_aead_sm4_blk_encrypt(struct aead_request
*req
)
3289 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3290 if (req
->cryptlen
& (SM4_BLOCK_SIZE
- 1))
3293 return safexcel_queue_req(&req
->base
, aead_request_ctx(req
),
3297 static int safexcel_aead_sm4_blk_decrypt(struct aead_request
*req
)
3299 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
3301 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3302 if ((req
->cryptlen
- crypto_aead_authsize(tfm
)) & (SM4_BLOCK_SIZE
- 1))
3305 return safexcel_queue_req(&req
->base
, aead_request_ctx(req
),
3309 static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm
*tfm
)
3311 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3313 safexcel_aead_cra_init(tfm
);
3314 ctx
->alg
= SAFEXCEL_SM4
;
3315 ctx
->blocksz
= SM4_BLOCK_SIZE
;
3316 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA1
;
3317 ctx
->state_sz
= SHA1_DIGEST_SIZE
;
3321 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4
= {
3322 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
3323 .algo_mask
= SAFEXCEL_ALG_SM4
| SAFEXCEL_ALG_SHA1
,
3325 .setkey
= safexcel_aead_setkey
,
3326 .encrypt
= safexcel_aead_sm4_blk_encrypt
,
3327 .decrypt
= safexcel_aead_sm4_blk_decrypt
,
3328 .ivsize
= SM4_BLOCK_SIZE
,
3329 .maxauthsize
= SHA1_DIGEST_SIZE
,
3331 .cra_name
= "authenc(hmac(sha1),cbc(sm4))",
3332 .cra_driver_name
= "safexcel-authenc-hmac-sha1-cbc-sm4",
3333 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3334 .cra_flags
= CRYPTO_ALG_ASYNC
|
3335 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3336 .cra_blocksize
= SM4_BLOCK_SIZE
,
3337 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3339 .cra_init
= safexcel_aead_sm4cbc_sha1_cra_init
,
3340 .cra_exit
= safexcel_aead_cra_exit
,
3341 .cra_module
= THIS_MODULE
,
3346 static int safexcel_aead_fallback_setkey(struct crypto_aead
*ctfm
,
3347 const u8
*key
, unsigned int len
)
3349 struct crypto_tfm
*tfm
= crypto_aead_tfm(ctfm
);
3350 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3352 /* Keep fallback cipher synchronized */
3353 return crypto_aead_setkey(ctx
->fback
, (u8
*)key
, len
) ?:
3354 safexcel_aead_setkey(ctfm
, key
, len
);
3357 static int safexcel_aead_fallback_setauthsize(struct crypto_aead
*ctfm
,
3358 unsigned int authsize
)
3360 struct crypto_tfm
*tfm
= crypto_aead_tfm(ctfm
);
3361 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3363 /* Keep fallback cipher synchronized */
3364 return crypto_aead_setauthsize(ctx
->fback
, authsize
);
3367 static int safexcel_aead_fallback_crypt(struct aead_request
*req
,
3368 enum safexcel_cipher_direction dir
)
3370 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
3371 struct crypto_tfm
*tfm
= crypto_aead_tfm(aead
);
3372 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3373 struct aead_request
*subreq
= aead_request_ctx(req
);
3375 aead_request_set_tfm(subreq
, ctx
->fback
);
3376 aead_request_set_callback(subreq
, req
->base
.flags
, req
->base
.complete
,
3378 aead_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
3380 aead_request_set_ad(subreq
, req
->assoclen
);
3382 return (dir
== SAFEXCEL_ENCRYPT
) ?
3383 crypto_aead_encrypt(subreq
) :
3384 crypto_aead_decrypt(subreq
);
3387 static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request
*req
)
3389 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
3391 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3392 if (req
->cryptlen
& (SM4_BLOCK_SIZE
- 1))
3394 else if (req
->cryptlen
|| req
->assoclen
) /* If input length > 0 only */
3395 return safexcel_queue_req(&req
->base
, creq
, SAFEXCEL_ENCRYPT
);
3397 /* HW cannot do full (AAD+payload) zero length, use fallback */
3398 return safexcel_aead_fallback_crypt(req
, SAFEXCEL_ENCRYPT
);
3401 static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request
*req
)
3403 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
3404 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
3406 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3407 if ((req
->cryptlen
- crypto_aead_authsize(tfm
)) & (SM4_BLOCK_SIZE
- 1))
3409 else if (req
->cryptlen
> crypto_aead_authsize(tfm
) || req
->assoclen
)
3410 /* If input length > 0 only */
3411 return safexcel_queue_req(&req
->base
, creq
, SAFEXCEL_DECRYPT
);
3413 /* HW cannot do full (AAD+payload) zero length, use fallback */
3414 return safexcel_aead_fallback_crypt(req
, SAFEXCEL_DECRYPT
);
3417 static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm
*tfm
)
3419 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3421 safexcel_aead_fallback_cra_init(tfm
);
3422 ctx
->alg
= SAFEXCEL_SM4
;
3423 ctx
->blocksz
= SM4_BLOCK_SIZE
;
3424 ctx
->hash_alg
= CONTEXT_CONTROL_CRYPTO_ALG_SM3
;
3425 ctx
->state_sz
= SM3_DIGEST_SIZE
;
3429 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4
= {
3430 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
3431 .algo_mask
= SAFEXCEL_ALG_SM4
| SAFEXCEL_ALG_SM3
,
3433 .setkey
= safexcel_aead_fallback_setkey
,
3434 .setauthsize
= safexcel_aead_fallback_setauthsize
,
3435 .encrypt
= safexcel_aead_sm4cbc_sm3_encrypt
,
3436 .decrypt
= safexcel_aead_sm4cbc_sm3_decrypt
,
3437 .ivsize
= SM4_BLOCK_SIZE
,
3438 .maxauthsize
= SM3_DIGEST_SIZE
,
3440 .cra_name
= "authenc(hmac(sm3),cbc(sm4))",
3441 .cra_driver_name
= "safexcel-authenc-hmac-sm3-cbc-sm4",
3442 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3443 .cra_flags
= CRYPTO_ALG_ASYNC
|
3444 CRYPTO_ALG_KERN_DRIVER_ONLY
|
3445 CRYPTO_ALG_NEED_FALLBACK
,
3446 .cra_blocksize
= SM4_BLOCK_SIZE
,
3447 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3449 .cra_init
= safexcel_aead_sm4cbc_sm3_cra_init
,
3450 .cra_exit
= safexcel_aead_fallback_cra_exit
,
3451 .cra_module
= THIS_MODULE
,
3456 static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm
*tfm
)
3458 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3460 safexcel_aead_sm4cbc_sha1_cra_init(tfm
);
3461 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
;
3465 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4
= {
3466 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
3467 .algo_mask
= SAFEXCEL_ALG_SM4
| SAFEXCEL_ALG_SHA1
,
3469 .setkey
= safexcel_aead_setkey
,
3470 .encrypt
= safexcel_aead_encrypt
,
3471 .decrypt
= safexcel_aead_decrypt
,
3472 .ivsize
= CTR_RFC3686_IV_SIZE
,
3473 .maxauthsize
= SHA1_DIGEST_SIZE
,
3475 .cra_name
= "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3476 .cra_driver_name
= "safexcel-authenc-hmac-sha1-ctr-sm4",
3477 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3478 .cra_flags
= CRYPTO_ALG_ASYNC
|
3479 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3481 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3483 .cra_init
= safexcel_aead_sm4ctr_sha1_cra_init
,
3484 .cra_exit
= safexcel_aead_cra_exit
,
3485 .cra_module
= THIS_MODULE
,
3490 static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm
*tfm
)
3492 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3494 safexcel_aead_sm4cbc_sm3_cra_init(tfm
);
3495 ctx
->mode
= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD
;
3499 struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4
= {
3500 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
3501 .algo_mask
= SAFEXCEL_ALG_SM4
| SAFEXCEL_ALG_SM3
,
3503 .setkey
= safexcel_aead_setkey
,
3504 .encrypt
= safexcel_aead_encrypt
,
3505 .decrypt
= safexcel_aead_decrypt
,
3506 .ivsize
= CTR_RFC3686_IV_SIZE
,
3507 .maxauthsize
= SM3_DIGEST_SIZE
,
3509 .cra_name
= "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3510 .cra_driver_name
= "safexcel-authenc-hmac-sm3-ctr-sm4",
3511 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3512 .cra_flags
= CRYPTO_ALG_ASYNC
|
3513 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3515 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3517 .cra_init
= safexcel_aead_sm4ctr_sm3_cra_init
,
3518 .cra_exit
= safexcel_aead_cra_exit
,
3519 .cra_module
= THIS_MODULE
,
3524 static int safexcel_rfc4106_gcm_setkey(struct crypto_aead
*ctfm
, const u8
*key
,
3527 struct crypto_tfm
*tfm
= crypto_aead_tfm(ctfm
);
3528 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3530 /* last 4 bytes of key are the nonce! */
3531 ctx
->nonce
= *(u32
*)(key
+ len
- CTR_RFC3686_NONCE_SIZE
);
3533 len
-= CTR_RFC3686_NONCE_SIZE
;
3534 return safexcel_aead_gcm_setkey(ctfm
, key
, len
);
3537 static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead
*tfm
,
3538 unsigned int authsize
)
3540 return crypto_rfc4106_check_authsize(authsize
);
3543 static int safexcel_rfc4106_encrypt(struct aead_request
*req
)
3545 return crypto_ipsec_check_assoclen(req
->assoclen
) ?:
3546 safexcel_aead_encrypt(req
);
3549 static int safexcel_rfc4106_decrypt(struct aead_request
*req
)
3551 return crypto_ipsec_check_assoclen(req
->assoclen
) ?:
3552 safexcel_aead_decrypt(req
);
3555 static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm
*tfm
)
3557 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3560 ret
= safexcel_aead_gcm_cra_init(tfm
);
3561 ctx
->aead
= EIP197_AEAD_TYPE_IPSEC_ESP
;
3562 ctx
->aadskip
= EIP197_AEAD_IPSEC_IV_SIZE
;
3566 struct safexcel_alg_template safexcel_alg_rfc4106_gcm
= {
3567 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
3568 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_GHASH
,
3570 .setkey
= safexcel_rfc4106_gcm_setkey
,
3571 .setauthsize
= safexcel_rfc4106_gcm_setauthsize
,
3572 .encrypt
= safexcel_rfc4106_encrypt
,
3573 .decrypt
= safexcel_rfc4106_decrypt
,
3574 .ivsize
= GCM_RFC4106_IV_SIZE
,
3575 .maxauthsize
= GHASH_DIGEST_SIZE
,
3577 .cra_name
= "rfc4106(gcm(aes))",
3578 .cra_driver_name
= "safexcel-rfc4106-gcm-aes",
3579 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3580 .cra_flags
= CRYPTO_ALG_ASYNC
|
3581 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3583 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3585 .cra_init
= safexcel_rfc4106_gcm_cra_init
,
3586 .cra_exit
= safexcel_aead_gcm_cra_exit
,
3591 static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead
*tfm
,
3592 unsigned int authsize
)
3594 if (authsize
!= GHASH_DIGEST_SIZE
)
3600 static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm
*tfm
)
3602 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3605 ret
= safexcel_aead_gcm_cra_init(tfm
);
3606 ctx
->aead
= EIP197_AEAD_TYPE_IPSEC_ESP_GMAC
;
3610 struct safexcel_alg_template safexcel_alg_rfc4543_gcm
= {
3611 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
3612 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_GHASH
,
3614 .setkey
= safexcel_rfc4106_gcm_setkey
,
3615 .setauthsize
= safexcel_rfc4543_gcm_setauthsize
,
3616 .encrypt
= safexcel_rfc4106_encrypt
,
3617 .decrypt
= safexcel_rfc4106_decrypt
,
3618 .ivsize
= GCM_RFC4543_IV_SIZE
,
3619 .maxauthsize
= GHASH_DIGEST_SIZE
,
3621 .cra_name
= "rfc4543(gcm(aes))",
3622 .cra_driver_name
= "safexcel-rfc4543-gcm-aes",
3623 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3624 .cra_flags
= CRYPTO_ALG_ASYNC
|
3625 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3627 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3629 .cra_init
= safexcel_rfc4543_gcm_cra_init
,
3630 .cra_exit
= safexcel_aead_gcm_cra_exit
,
3635 static int safexcel_rfc4309_ccm_setkey(struct crypto_aead
*ctfm
, const u8
*key
,
3638 struct crypto_tfm
*tfm
= crypto_aead_tfm(ctfm
);
3639 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3641 /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3642 *(u8
*)&ctx
->nonce
= EIP197_AEAD_IPSEC_COUNTER_SIZE
- 1;
3643 /* last 3 bytes of key are the nonce! */
3644 memcpy((u8
*)&ctx
->nonce
+ 1, key
+ len
-
3645 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE
,
3646 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE
);
3648 len
-= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE
;
3649 return safexcel_aead_ccm_setkey(ctfm
, key
, len
);
3652 static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead
*tfm
,
3653 unsigned int authsize
)
3655 /* Borrowed from crypto/ccm.c */
3668 static int safexcel_rfc4309_ccm_encrypt(struct aead_request
*req
)
3670 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
3672 /* Borrowed from crypto/ccm.c */
3673 if (req
->assoclen
!= 16 && req
->assoclen
!= 20)
3676 return safexcel_queue_req(&req
->base
, creq
, SAFEXCEL_ENCRYPT
);
3679 static int safexcel_rfc4309_ccm_decrypt(struct aead_request
*req
)
3681 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
3683 /* Borrowed from crypto/ccm.c */
3684 if (req
->assoclen
!= 16 && req
->assoclen
!= 20)
3687 return safexcel_queue_req(&req
->base
, creq
, SAFEXCEL_DECRYPT
);
3690 static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm
*tfm
)
3692 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
3695 ret
= safexcel_aead_ccm_cra_init(tfm
);
3696 ctx
->aead
= EIP197_AEAD_TYPE_IPSEC_ESP
;
3697 ctx
->aadskip
= EIP197_AEAD_IPSEC_IV_SIZE
;
3701 struct safexcel_alg_template safexcel_alg_rfc4309_ccm
= {
3702 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
3703 .algo_mask
= SAFEXCEL_ALG_AES
| SAFEXCEL_ALG_CBC_MAC_ALL
,
3705 .setkey
= safexcel_rfc4309_ccm_setkey
,
3706 .setauthsize
= safexcel_rfc4309_ccm_setauthsize
,
3707 .encrypt
= safexcel_rfc4309_ccm_encrypt
,
3708 .decrypt
= safexcel_rfc4309_ccm_decrypt
,
3709 .ivsize
= EIP197_AEAD_IPSEC_IV_SIZE
,
3710 .maxauthsize
= AES_BLOCK_SIZE
,
3712 .cra_name
= "rfc4309(ccm(aes))",
3713 .cra_driver_name
= "safexcel-rfc4309-ccm-aes",
3714 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3715 .cra_flags
= CRYPTO_ALG_ASYNC
|
3716 CRYPTO_ALG_KERN_DRIVER_ONLY
,
3718 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
3720 .cra_init
= safexcel_rfc4309_ccm_cra_init
,
3721 .cra_exit
= safexcel_aead_cra_exit
,
3722 .cra_module
= THIS_MODULE
,