1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * AMCC SoC PPC4xx Crypto Driver
5 * Copyright (c) 2008 Applied Micro Circuits Corporation.
6 * All rights reserved. James Hsiao <jhsiao@amcc.com>
8 * This file implements the Linux crypto algorithms.
11 #include <linux/kernel.h>
12 #include <linux/interrupt.h>
13 #include <linux/spinlock_types.h>
14 #include <linux/scatterlist.h>
15 #include <linux/crypto.h>
16 #include <linux/hash.h>
17 #include <crypto/internal/hash.h>
18 #include <linux/dma-mapping.h>
19 #include <crypto/algapi.h>
20 #include <crypto/aead.h>
21 #include <crypto/aes.h>
22 #include <crypto/gcm.h>
23 #include <crypto/sha1.h>
24 #include <crypto/ctr.h>
25 #include <crypto/skcipher.h>
26 #include "crypto4xx_reg_def.h"
27 #include "crypto4xx_core.h"
28 #include "crypto4xx_sa.h"
30 static void set_dynamic_sa_command_0(struct dynamic_sa_ctl
*sa
, u32 save_h
,
31 u32 save_iv
, u32 ld_h
, u32 ld_iv
,
32 u32 hdr_proc
, u32 h
, u32 c
, u32 pad_type
,
33 u32 op_grp
, u32 op
, u32 dir
)
35 sa
->sa_command_0
.w
= 0;
36 sa
->sa_command_0
.bf
.save_hash_state
= save_h
;
37 sa
->sa_command_0
.bf
.save_iv
= save_iv
;
38 sa
->sa_command_0
.bf
.load_hash_state
= ld_h
;
39 sa
->sa_command_0
.bf
.load_iv
= ld_iv
;
40 sa
->sa_command_0
.bf
.hdr_proc
= hdr_proc
;
41 sa
->sa_command_0
.bf
.hash_alg
= h
;
42 sa
->sa_command_0
.bf
.cipher_alg
= c
;
43 sa
->sa_command_0
.bf
.pad_type
= pad_type
& 3;
44 sa
->sa_command_0
.bf
.extend_pad
= pad_type
>> 2;
45 sa
->sa_command_0
.bf
.op_group
= op_grp
;
46 sa
->sa_command_0
.bf
.opcode
= op
;
47 sa
->sa_command_0
.bf
.dir
= dir
;
50 static void set_dynamic_sa_command_1(struct dynamic_sa_ctl
*sa
, u32 cm
,
51 u32 hmac_mc
, u32 cfb
, u32 esn
,
52 u32 sn_mask
, u32 mute
, u32 cp_pad
,
53 u32 cp_pay
, u32 cp_hdr
)
55 sa
->sa_command_1
.w
= 0;
56 sa
->sa_command_1
.bf
.crypto_mode31
= (cm
& 4) >> 2;
57 sa
->sa_command_1
.bf
.crypto_mode9_8
= cm
& 3;
58 sa
->sa_command_1
.bf
.feedback_mode
= cfb
;
59 sa
->sa_command_1
.bf
.sa_rev
= 1;
60 sa
->sa_command_1
.bf
.hmac_muting
= hmac_mc
;
61 sa
->sa_command_1
.bf
.extended_seq_num
= esn
;
62 sa
->sa_command_1
.bf
.seq_num_mask
= sn_mask
;
63 sa
->sa_command_1
.bf
.mutable_bit_proc
= mute
;
64 sa
->sa_command_1
.bf
.copy_pad
= cp_pad
;
65 sa
->sa_command_1
.bf
.copy_payload
= cp_pay
;
66 sa
->sa_command_1
.bf
.copy_hdr
= cp_hdr
;
69 static inline int crypto4xx_crypt(struct skcipher_request
*req
,
70 const unsigned int ivlen
, bool decrypt
,
73 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
74 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
75 __le32 iv
[AES_IV_SIZE
];
77 if (check_blocksize
&& !IS_ALIGNED(req
->cryptlen
, AES_BLOCK_SIZE
))
81 crypto4xx_memcpy_to_le32(iv
, req
->iv
, ivlen
);
83 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, req
->dst
,
84 req
->cryptlen
, iv
, ivlen
, decrypt
? ctx
->sa_in
: ctx
->sa_out
,
85 ctx
->sa_len
, 0, NULL
);
88 int crypto4xx_encrypt_noiv_block(struct skcipher_request
*req
)
90 return crypto4xx_crypt(req
, 0, false, true);
93 int crypto4xx_encrypt_iv_stream(struct skcipher_request
*req
)
95 return crypto4xx_crypt(req
, AES_IV_SIZE
, false, false);
98 int crypto4xx_decrypt_noiv_block(struct skcipher_request
*req
)
100 return crypto4xx_crypt(req
, 0, true, true);
103 int crypto4xx_decrypt_iv_stream(struct skcipher_request
*req
)
105 return crypto4xx_crypt(req
, AES_IV_SIZE
, true, false);
108 int crypto4xx_encrypt_iv_block(struct skcipher_request
*req
)
110 return crypto4xx_crypt(req
, AES_IV_SIZE
, false, true);
113 int crypto4xx_decrypt_iv_block(struct skcipher_request
*req
)
115 return crypto4xx_crypt(req
, AES_IV_SIZE
, true, true);
121 static int crypto4xx_setkey_aes(struct crypto_skcipher
*cipher
,
127 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
128 struct dynamic_sa_ctl
*sa
;
131 if (keylen
!= AES_KEYSIZE_256
&& keylen
!= AES_KEYSIZE_192
&&
132 keylen
!= AES_KEYSIZE_128
)
136 if (ctx
->sa_in
|| ctx
->sa_out
)
137 crypto4xx_free_sa(ctx
);
139 rc
= crypto4xx_alloc_sa(ctx
, SA_AES128_LEN
+ (keylen
-16) / 4);
146 set_dynamic_sa_command_0(sa
, SA_NOT_SAVE_HASH
, (cm
== CRYPTO_MODE_ECB
?
147 SA_NOT_SAVE_IV
: SA_SAVE_IV
),
148 SA_NOT_LOAD_HASH
, (cm
== CRYPTO_MODE_ECB
?
149 SA_LOAD_IV_FROM_SA
: SA_LOAD_IV_FROM_STATE
),
150 SA_NO_HEADER_PROC
, SA_HASH_ALG_NULL
,
151 SA_CIPHER_ALG_AES
, SA_PAD_TYPE_ZERO
,
152 SA_OP_GROUP_BASIC
, SA_OPCODE_DECRYPT
,
155 set_dynamic_sa_command_1(sa
, cm
, SA_HASH_MODE_HASH
,
156 fb
, SA_EXTENDED_SN_OFF
,
157 SA_SEQ_MASK_OFF
, SA_MC_ENABLE
,
158 SA_NOT_COPY_PAD
, SA_NOT_COPY_PAYLOAD
,
160 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa
),
162 sa
->sa_contents
.w
= SA_AES_CONTENTS
| (keylen
<< 2);
163 sa
->sa_command_1
.bf
.key_len
= keylen
>> 3;
165 memcpy(ctx
->sa_out
, ctx
->sa_in
, ctx
->sa_len
* 4);
167 sa
->sa_command_0
.bf
.dir
= DIR_OUTBOUND
;
169 * SA_OPCODE_ENCRYPT is the same value as SA_OPCODE_DECRYPT.
170 * it's the DIR_(IN|OUT)BOUND that matters
172 sa
->sa_command_0
.bf
.opcode
= SA_OPCODE_ENCRYPT
;
177 int crypto4xx_setkey_aes_cbc(struct crypto_skcipher
*cipher
,
178 const u8
*key
, unsigned int keylen
)
180 return crypto4xx_setkey_aes(cipher
, key
, keylen
, CRYPTO_MODE_CBC
,
181 CRYPTO_FEEDBACK_MODE_NO_FB
);
184 int crypto4xx_setkey_aes_ecb(struct crypto_skcipher
*cipher
,
185 const u8
*key
, unsigned int keylen
)
187 return crypto4xx_setkey_aes(cipher
, key
, keylen
, CRYPTO_MODE_ECB
,
188 CRYPTO_FEEDBACK_MODE_NO_FB
);
191 int crypto4xx_setkey_rfc3686(struct crypto_skcipher
*cipher
,
192 const u8
*key
, unsigned int keylen
)
194 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
197 rc
= crypto4xx_setkey_aes(cipher
, key
, keylen
- CTR_RFC3686_NONCE_SIZE
,
198 CRYPTO_MODE_CTR
, CRYPTO_FEEDBACK_MODE_NO_FB
);
202 ctx
->iv_nonce
= cpu_to_le32p((u32
*)&key
[keylen
-
203 CTR_RFC3686_NONCE_SIZE
]);
208 int crypto4xx_rfc3686_encrypt(struct skcipher_request
*req
)
210 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
211 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
212 __le32 iv
[AES_IV_SIZE
/ 4] = {
214 cpu_to_le32p((u32
*) req
->iv
),
215 cpu_to_le32p((u32
*) (req
->iv
+ 4)),
218 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, req
->dst
,
219 req
->cryptlen
, iv
, AES_IV_SIZE
,
220 ctx
->sa_out
, ctx
->sa_len
, 0, NULL
);
223 int crypto4xx_rfc3686_decrypt(struct skcipher_request
*req
)
225 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
226 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
227 __le32 iv
[AES_IV_SIZE
/ 4] = {
229 cpu_to_le32p((u32
*) req
->iv
),
230 cpu_to_le32p((u32
*) (req
->iv
+ 4)),
233 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, req
->dst
,
234 req
->cryptlen
, iv
, AES_IV_SIZE
,
235 ctx
->sa_out
, ctx
->sa_len
, 0, NULL
);
239 crypto4xx_ctr_crypt(struct skcipher_request
*req
, bool encrypt
)
241 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
242 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
243 size_t iv_len
= crypto_skcipher_ivsize(cipher
);
244 unsigned int counter
= be32_to_cpup((__be32
*)(req
->iv
+ iv_len
- 4));
245 unsigned int nblks
= ALIGN(req
->cryptlen
, AES_BLOCK_SIZE
) /
249 * The hardware uses only the last 32-bits as the counter while the
250 * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
251 * the whole IV is a counter. So fallback if the counter is going to
254 if (counter
+ nblks
< counter
) {
255 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq
, ctx
->sw_cipher
.cipher
);
258 skcipher_request_set_sync_tfm(subreq
, ctx
->sw_cipher
.cipher
);
259 skcipher_request_set_callback(subreq
, req
->base
.flags
,
261 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
,
262 req
->cryptlen
, req
->iv
);
263 ret
= encrypt
? crypto_skcipher_encrypt(subreq
)
264 : crypto_skcipher_decrypt(subreq
);
265 skcipher_request_zero(subreq
);
269 return encrypt
? crypto4xx_encrypt_iv_stream(req
)
270 : crypto4xx_decrypt_iv_stream(req
);
273 static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx
*ctx
,
274 struct crypto_skcipher
*cipher
,
278 crypto_sync_skcipher_clear_flags(ctx
->sw_cipher
.cipher
,
279 CRYPTO_TFM_REQ_MASK
);
280 crypto_sync_skcipher_set_flags(ctx
->sw_cipher
.cipher
,
281 crypto_skcipher_get_flags(cipher
) & CRYPTO_TFM_REQ_MASK
);
282 return crypto_sync_skcipher_setkey(ctx
->sw_cipher
.cipher
, key
, keylen
);
285 int crypto4xx_setkey_aes_ctr(struct crypto_skcipher
*cipher
,
286 const u8
*key
, unsigned int keylen
)
288 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
291 rc
= crypto4xx_sk_setup_fallback(ctx
, cipher
, key
, keylen
);
295 return crypto4xx_setkey_aes(cipher
, key
, keylen
,
296 CRYPTO_MODE_CTR
, CRYPTO_FEEDBACK_MODE_NO_FB
);
299 int crypto4xx_encrypt_ctr(struct skcipher_request
*req
)
301 return crypto4xx_ctr_crypt(req
, true);
304 int crypto4xx_decrypt_ctr(struct skcipher_request
*req
)
306 return crypto4xx_ctr_crypt(req
, false);
309 static inline bool crypto4xx_aead_need_fallback(struct aead_request
*req
,
311 bool is_ccm
, bool decrypt
)
313 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
315 /* authsize has to be a multiple of 4 */
316 if (aead
->authsize
& 3)
320 * hardware does not handle cases where plaintext
321 * is less than a block.
323 if (len
< AES_BLOCK_SIZE
)
326 /* assoc len needs to be a multiple of 4 and <= 1020 */
327 if (req
->assoclen
& 0x3 || req
->assoclen
> 1020)
330 /* CCM supports only counter field length of 2 and 4 bytes */
331 if (is_ccm
&& !(req
->iv
[0] == 1 || req
->iv
[0] == 3))
337 static int crypto4xx_aead_fallback(struct aead_request
*req
,
338 struct crypto4xx_ctx
*ctx
, bool do_decrypt
)
340 struct aead_request
*subreq
= aead_request_ctx(req
);
342 aead_request_set_tfm(subreq
, ctx
->sw_cipher
.aead
);
343 aead_request_set_callback(subreq
, req
->base
.flags
,
344 req
->base
.complete
, req
->base
.data
);
345 aead_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
347 aead_request_set_ad(subreq
, req
->assoclen
);
348 return do_decrypt
? crypto_aead_decrypt(subreq
) :
349 crypto_aead_encrypt(subreq
);
352 static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx
*ctx
,
353 struct crypto_aead
*cipher
,
357 crypto_aead_clear_flags(ctx
->sw_cipher
.aead
, CRYPTO_TFM_REQ_MASK
);
358 crypto_aead_set_flags(ctx
->sw_cipher
.aead
,
359 crypto_aead_get_flags(cipher
) & CRYPTO_TFM_REQ_MASK
);
360 return crypto_aead_setkey(ctx
->sw_cipher
.aead
, key
, keylen
);
367 int crypto4xx_setkey_aes_ccm(struct crypto_aead
*cipher
, const u8
*key
,
370 struct crypto_tfm
*tfm
= crypto_aead_tfm(cipher
);
371 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(tfm
);
372 struct dynamic_sa_ctl
*sa
;
375 rc
= crypto4xx_aead_setup_fallback(ctx
, cipher
, key
, keylen
);
379 if (ctx
->sa_in
|| ctx
->sa_out
)
380 crypto4xx_free_sa(ctx
);
382 rc
= crypto4xx_alloc_sa(ctx
, SA_AES128_CCM_LEN
+ (keylen
- 16) / 4);
387 sa
= (struct dynamic_sa_ctl
*) ctx
->sa_in
;
388 sa
->sa_contents
.w
= SA_AES_CCM_CONTENTS
| (keylen
<< 2);
390 set_dynamic_sa_command_0(sa
, SA_SAVE_HASH
, SA_NOT_SAVE_IV
,
391 SA_LOAD_HASH_FROM_SA
, SA_LOAD_IV_FROM_STATE
,
392 SA_NO_HEADER_PROC
, SA_HASH_ALG_CBC_MAC
,
394 SA_PAD_TYPE_ZERO
, SA_OP_GROUP_BASIC
,
395 SA_OPCODE_HASH_DECRYPT
, DIR_INBOUND
);
397 set_dynamic_sa_command_1(sa
, CRYPTO_MODE_CTR
, SA_HASH_MODE_HASH
,
398 CRYPTO_FEEDBACK_MODE_NO_FB
, SA_EXTENDED_SN_OFF
,
399 SA_SEQ_MASK_OFF
, SA_MC_ENABLE
,
400 SA_NOT_COPY_PAD
, SA_COPY_PAYLOAD
,
403 sa
->sa_command_1
.bf
.key_len
= keylen
>> 3;
405 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa
), key
, keylen
);
407 memcpy(ctx
->sa_out
, ctx
->sa_in
, ctx
->sa_len
* 4);
408 sa
= (struct dynamic_sa_ctl
*) ctx
->sa_out
;
410 set_dynamic_sa_command_0(sa
, SA_SAVE_HASH
, SA_NOT_SAVE_IV
,
411 SA_LOAD_HASH_FROM_SA
, SA_LOAD_IV_FROM_STATE
,
412 SA_NO_HEADER_PROC
, SA_HASH_ALG_CBC_MAC
,
414 SA_PAD_TYPE_ZERO
, SA_OP_GROUP_BASIC
,
415 SA_OPCODE_ENCRYPT_HASH
, DIR_OUTBOUND
);
417 set_dynamic_sa_command_1(sa
, CRYPTO_MODE_CTR
, SA_HASH_MODE_HASH
,
418 CRYPTO_FEEDBACK_MODE_NO_FB
, SA_EXTENDED_SN_OFF
,
419 SA_SEQ_MASK_OFF
, SA_MC_ENABLE
,
420 SA_COPY_PAD
, SA_COPY_PAYLOAD
,
423 sa
->sa_command_1
.bf
.key_len
= keylen
>> 3;
427 static int crypto4xx_crypt_aes_ccm(struct aead_request
*req
, bool decrypt
)
429 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
430 struct crypto4xx_aead_reqctx
*rctx
= aead_request_ctx(req
);
431 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
433 u32 tmp_sa
[SA_AES128_CCM_LEN
+ 4];
434 struct dynamic_sa_ctl
*sa
= (struct dynamic_sa_ctl
*)tmp_sa
;
435 unsigned int len
= req
->cryptlen
;
438 len
-= crypto_aead_authsize(aead
);
440 if (crypto4xx_aead_need_fallback(req
, len
, true, decrypt
))
441 return crypto4xx_aead_fallback(req
, ctx
, decrypt
);
443 memcpy(tmp_sa
, decrypt
? ctx
->sa_in
: ctx
->sa_out
, ctx
->sa_len
* 4);
444 sa
->sa_command_0
.bf
.digest_len
= crypto_aead_authsize(aead
) >> 2;
446 if (req
->iv
[0] == 1) {
447 /* CRYPTO_MODE_AES_ICM */
448 sa
->sa_command_1
.bf
.crypto_mode9_8
= 1;
451 iv
[3] = cpu_to_le32(0);
452 crypto4xx_memcpy_to_le32(iv
, req
->iv
, 16 - (req
->iv
[0] + 1));
454 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, req
->dst
,
456 sa
, ctx
->sa_len
, req
->assoclen
, rctx
->dst
);
459 int crypto4xx_encrypt_aes_ccm(struct aead_request
*req
)
461 return crypto4xx_crypt_aes_ccm(req
, false);
464 int crypto4xx_decrypt_aes_ccm(struct aead_request
*req
)
466 return crypto4xx_crypt_aes_ccm(req
, true);
469 int crypto4xx_setauthsize_aead(struct crypto_aead
*cipher
,
470 unsigned int authsize
)
472 struct crypto_tfm
*tfm
= crypto_aead_tfm(cipher
);
473 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(tfm
);
475 return crypto_aead_setauthsize(ctx
->sw_cipher
.aead
, authsize
);
482 static int crypto4xx_aes_gcm_validate_keylen(unsigned int keylen
)
494 static int crypto4xx_compute_gcm_hash_key_sw(__le32
*hash_start
, const u8
*key
,
497 struct crypto_aes_ctx ctx
;
498 uint8_t src
[16] = { 0 };
501 rc
= aes_expandkey(&ctx
, key
, keylen
);
503 pr_err("aes_expandkey() failed: %d\n", rc
);
507 aes_encrypt(&ctx
, src
, src
);
508 crypto4xx_memcpy_to_le32(hash_start
, src
, 16);
509 memzero_explicit(&ctx
, sizeof(ctx
));
513 int crypto4xx_setkey_aes_gcm(struct crypto_aead
*cipher
,
514 const u8
*key
, unsigned int keylen
)
516 struct crypto_tfm
*tfm
= crypto_aead_tfm(cipher
);
517 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(tfm
);
518 struct dynamic_sa_ctl
*sa
;
521 if (crypto4xx_aes_gcm_validate_keylen(keylen
) != 0)
524 rc
= crypto4xx_aead_setup_fallback(ctx
, cipher
, key
, keylen
);
528 if (ctx
->sa_in
|| ctx
->sa_out
)
529 crypto4xx_free_sa(ctx
);
531 rc
= crypto4xx_alloc_sa(ctx
, SA_AES128_GCM_LEN
+ (keylen
- 16) / 4);
535 sa
= (struct dynamic_sa_ctl
*) ctx
->sa_in
;
537 sa
->sa_contents
.w
= SA_AES_GCM_CONTENTS
| (keylen
<< 2);
538 set_dynamic_sa_command_0(sa
, SA_SAVE_HASH
, SA_NOT_SAVE_IV
,
539 SA_LOAD_HASH_FROM_SA
, SA_LOAD_IV_FROM_STATE
,
540 SA_NO_HEADER_PROC
, SA_HASH_ALG_GHASH
,
541 SA_CIPHER_ALG_AES
, SA_PAD_TYPE_ZERO
,
542 SA_OP_GROUP_BASIC
, SA_OPCODE_HASH_DECRYPT
,
544 set_dynamic_sa_command_1(sa
, CRYPTO_MODE_CTR
, SA_HASH_MODE_HASH
,
545 CRYPTO_FEEDBACK_MODE_NO_FB
, SA_EXTENDED_SN_OFF
,
546 SA_SEQ_MASK_ON
, SA_MC_DISABLE
,
547 SA_NOT_COPY_PAD
, SA_COPY_PAYLOAD
,
550 sa
->sa_command_1
.bf
.key_len
= keylen
>> 3;
552 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa
),
555 rc
= crypto4xx_compute_gcm_hash_key_sw(get_dynamic_sa_inner_digest(sa
),
558 pr_err("GCM hash key setting failed = %d\n", rc
);
562 memcpy(ctx
->sa_out
, ctx
->sa_in
, ctx
->sa_len
* 4);
563 sa
= (struct dynamic_sa_ctl
*) ctx
->sa_out
;
564 sa
->sa_command_0
.bf
.dir
= DIR_OUTBOUND
;
565 sa
->sa_command_0
.bf
.opcode
= SA_OPCODE_ENCRYPT_HASH
;
569 crypto4xx_free_sa(ctx
);
573 static inline int crypto4xx_crypt_aes_gcm(struct aead_request
*req
,
576 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
577 struct crypto4xx_aead_reqctx
*rctx
= aead_request_ctx(req
);
579 unsigned int len
= req
->cryptlen
;
582 len
-= crypto_aead_authsize(crypto_aead_reqtfm(req
));
584 if (crypto4xx_aead_need_fallback(req
, len
, false, decrypt
))
585 return crypto4xx_aead_fallback(req
, ctx
, decrypt
);
587 crypto4xx_memcpy_to_le32(iv
, req
->iv
, GCM_AES_IV_SIZE
);
588 iv
[3] = cpu_to_le32(1);
590 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, req
->dst
,
592 decrypt
? ctx
->sa_in
: ctx
->sa_out
,
593 ctx
->sa_len
, req
->assoclen
, rctx
->dst
);
596 int crypto4xx_encrypt_aes_gcm(struct aead_request
*req
)
598 return crypto4xx_crypt_aes_gcm(req
, false);
601 int crypto4xx_decrypt_aes_gcm(struct aead_request
*req
)
603 return crypto4xx_crypt_aes_gcm(req
, true);
607 * HASH SHA1 Functions
609 static int crypto4xx_hash_alg_init(struct crypto_tfm
*tfm
,
614 struct crypto_alg
*alg
= tfm
->__crt_alg
;
615 struct crypto4xx_alg
*my_alg
;
616 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(tfm
);
617 struct dynamic_sa_hash160
*sa
;
620 my_alg
= container_of(__crypto_ahash_alg(alg
), struct crypto4xx_alg
,
622 ctx
->dev
= my_alg
->dev
;
625 if (ctx
->sa_in
|| ctx
->sa_out
)
626 crypto4xx_free_sa(ctx
);
628 rc
= crypto4xx_alloc_sa(ctx
, sa_len
);
632 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
633 sizeof(struct crypto4xx_ctx
));
634 sa
= (struct dynamic_sa_hash160
*)ctx
->sa_in
;
635 set_dynamic_sa_command_0(&sa
->ctrl
, SA_SAVE_HASH
, SA_NOT_SAVE_IV
,
636 SA_NOT_LOAD_HASH
, SA_LOAD_IV_FROM_SA
,
637 SA_NO_HEADER_PROC
, ha
, SA_CIPHER_ALG_NULL
,
638 SA_PAD_TYPE_ZERO
, SA_OP_GROUP_BASIC
,
639 SA_OPCODE_HASH
, DIR_INBOUND
);
640 set_dynamic_sa_command_1(&sa
->ctrl
, 0, SA_HASH_MODE_HASH
,
641 CRYPTO_FEEDBACK_MODE_NO_FB
, SA_EXTENDED_SN_OFF
,
642 SA_SEQ_MASK_OFF
, SA_MC_ENABLE
,
643 SA_NOT_COPY_PAD
, SA_NOT_COPY_PAYLOAD
,
645 /* Need to zero hash digest in SA */
646 memset(sa
->inner_digest
, 0, sizeof(sa
->inner_digest
));
647 memset(sa
->outer_digest
, 0, sizeof(sa
->outer_digest
));
652 int crypto4xx_hash_init(struct ahash_request
*req
)
654 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
656 struct dynamic_sa_ctl
*sa
;
659 ds
= crypto_ahash_digestsize(
660 __crypto_ahash_cast(req
->base
.tfm
));
661 sa
->sa_command_0
.bf
.digest_len
= ds
>> 2;
662 sa
->sa_command_0
.bf
.load_hash_state
= SA_LOAD_HASH_FROM_SA
;
667 int crypto4xx_hash_update(struct ahash_request
*req
)
669 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(req
);
670 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
671 struct scatterlist dst
;
672 unsigned int ds
= crypto_ahash_digestsize(ahash
);
674 sg_init_one(&dst
, req
->result
, ds
);
676 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, &dst
,
677 req
->nbytes
, NULL
, 0, ctx
->sa_in
,
678 ctx
->sa_len
, 0, NULL
);
681 int crypto4xx_hash_final(struct ahash_request
*req
)
686 int crypto4xx_hash_digest(struct ahash_request
*req
)
688 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(req
);
689 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
690 struct scatterlist dst
;
691 unsigned int ds
= crypto_ahash_digestsize(ahash
);
693 sg_init_one(&dst
, req
->result
, ds
);
695 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, &dst
,
696 req
->nbytes
, NULL
, 0, ctx
->sa_in
,
697 ctx
->sa_len
, 0, NULL
);
703 int crypto4xx_sha1_alg_init(struct crypto_tfm
*tfm
)
705 return crypto4xx_hash_alg_init(tfm
, SA_HASH160_LEN
, SA_HASH_ALG_SHA1
,