1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * AMCC SoC PPC4xx Crypto Driver
5 * Copyright (c) 2008 Applied Micro Circuits Corporation.
6 * All rights reserved. James Hsiao <jhsiao@amcc.com>
8 * This file implements the Linux crypto algorithms.
11 #include <linux/kernel.h>
12 #include <linux/interrupt.h>
13 #include <linux/spinlock_types.h>
14 #include <linux/scatterlist.h>
15 #include <linux/crypto.h>
16 #include <linux/hash.h>
17 #include <crypto/internal/hash.h>
18 #include <linux/dma-mapping.h>
19 #include <crypto/algapi.h>
20 #include <crypto/aead.h>
21 #include <crypto/aes.h>
22 #include <crypto/gcm.h>
23 #include <crypto/sha.h>
24 #include <crypto/ctr.h>
25 #include <crypto/skcipher.h>
26 #include "crypto4xx_reg_def.h"
27 #include "crypto4xx_core.h"
28 #include "crypto4xx_sa.h"
30 static void set_dynamic_sa_command_0(struct dynamic_sa_ctl
*sa
, u32 save_h
,
31 u32 save_iv
, u32 ld_h
, u32 ld_iv
,
32 u32 hdr_proc
, u32 h
, u32 c
, u32 pad_type
,
33 u32 op_grp
, u32 op
, u32 dir
)
35 sa
->sa_command_0
.w
= 0;
36 sa
->sa_command_0
.bf
.save_hash_state
= save_h
;
37 sa
->sa_command_0
.bf
.save_iv
= save_iv
;
38 sa
->sa_command_0
.bf
.load_hash_state
= ld_h
;
39 sa
->sa_command_0
.bf
.load_iv
= ld_iv
;
40 sa
->sa_command_0
.bf
.hdr_proc
= hdr_proc
;
41 sa
->sa_command_0
.bf
.hash_alg
= h
;
42 sa
->sa_command_0
.bf
.cipher_alg
= c
;
43 sa
->sa_command_0
.bf
.pad_type
= pad_type
& 3;
44 sa
->sa_command_0
.bf
.extend_pad
= pad_type
>> 2;
45 sa
->sa_command_0
.bf
.op_group
= op_grp
;
46 sa
->sa_command_0
.bf
.opcode
= op
;
47 sa
->sa_command_0
.bf
.dir
= dir
;
50 static void set_dynamic_sa_command_1(struct dynamic_sa_ctl
*sa
, u32 cm
,
51 u32 hmac_mc
, u32 cfb
, u32 esn
,
52 u32 sn_mask
, u32 mute
, u32 cp_pad
,
53 u32 cp_pay
, u32 cp_hdr
)
55 sa
->sa_command_1
.w
= 0;
56 sa
->sa_command_1
.bf
.crypto_mode31
= (cm
& 4) >> 2;
57 sa
->sa_command_1
.bf
.crypto_mode9_8
= cm
& 3;
58 sa
->sa_command_1
.bf
.feedback_mode
= cfb
,
59 sa
->sa_command_1
.bf
.sa_rev
= 1;
60 sa
->sa_command_1
.bf
.hmac_muting
= hmac_mc
;
61 sa
->sa_command_1
.bf
.extended_seq_num
= esn
;
62 sa
->sa_command_1
.bf
.seq_num_mask
= sn_mask
;
63 sa
->sa_command_1
.bf
.mutable_bit_proc
= mute
;
64 sa
->sa_command_1
.bf
.copy_pad
= cp_pad
;
65 sa
->sa_command_1
.bf
.copy_payload
= cp_pay
;
66 sa
->sa_command_1
.bf
.copy_hdr
= cp_hdr
;
69 static inline int crypto4xx_crypt(struct skcipher_request
*req
,
70 const unsigned int ivlen
, bool decrypt
,
73 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
74 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
75 __le32 iv
[AES_IV_SIZE
];
77 if (check_blocksize
&& !IS_ALIGNED(req
->cryptlen
, AES_BLOCK_SIZE
))
81 crypto4xx_memcpy_to_le32(iv
, req
->iv
, ivlen
);
83 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, req
->dst
,
84 req
->cryptlen
, iv
, ivlen
, decrypt
? ctx
->sa_in
: ctx
->sa_out
,
85 ctx
->sa_len
, 0, NULL
);
88 int crypto4xx_encrypt_noiv_block(struct skcipher_request
*req
)
90 return crypto4xx_crypt(req
, 0, false, true);
93 int crypto4xx_encrypt_iv_stream(struct skcipher_request
*req
)
95 return crypto4xx_crypt(req
, AES_IV_SIZE
, false, false);
98 int crypto4xx_decrypt_noiv_block(struct skcipher_request
*req
)
100 return crypto4xx_crypt(req
, 0, true, true);
103 int crypto4xx_decrypt_iv_stream(struct skcipher_request
*req
)
105 return crypto4xx_crypt(req
, AES_IV_SIZE
, true, false);
108 int crypto4xx_encrypt_iv_block(struct skcipher_request
*req
)
110 return crypto4xx_crypt(req
, AES_IV_SIZE
, false, true);
113 int crypto4xx_decrypt_iv_block(struct skcipher_request
*req
)
115 return crypto4xx_crypt(req
, AES_IV_SIZE
, true, true);
121 static int crypto4xx_setkey_aes(struct crypto_skcipher
*cipher
,
127 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
128 struct dynamic_sa_ctl
*sa
;
131 if (keylen
!= AES_KEYSIZE_256
&& keylen
!= AES_KEYSIZE_192
&&
132 keylen
!= AES_KEYSIZE_128
)
136 if (ctx
->sa_in
|| ctx
->sa_out
)
137 crypto4xx_free_sa(ctx
);
139 rc
= crypto4xx_alloc_sa(ctx
, SA_AES128_LEN
+ (keylen
-16) / 4);
146 set_dynamic_sa_command_0(sa
, SA_NOT_SAVE_HASH
, (cm
== CRYPTO_MODE_ECB
?
147 SA_NOT_SAVE_IV
: SA_SAVE_IV
),
148 SA_NOT_LOAD_HASH
, (cm
== CRYPTO_MODE_ECB
?
149 SA_LOAD_IV_FROM_SA
: SA_LOAD_IV_FROM_STATE
),
150 SA_NO_HEADER_PROC
, SA_HASH_ALG_NULL
,
151 SA_CIPHER_ALG_AES
, SA_PAD_TYPE_ZERO
,
152 SA_OP_GROUP_BASIC
, SA_OPCODE_DECRYPT
,
155 set_dynamic_sa_command_1(sa
, cm
, SA_HASH_MODE_HASH
,
156 fb
, SA_EXTENDED_SN_OFF
,
157 SA_SEQ_MASK_OFF
, SA_MC_ENABLE
,
158 SA_NOT_COPY_PAD
, SA_NOT_COPY_PAYLOAD
,
160 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa
),
162 sa
->sa_contents
.w
= SA_AES_CONTENTS
| (keylen
<< 2);
163 sa
->sa_command_1
.bf
.key_len
= keylen
>> 3;
165 memcpy(ctx
->sa_out
, ctx
->sa_in
, ctx
->sa_len
* 4);
167 sa
->sa_command_0
.bf
.dir
= DIR_OUTBOUND
;
169 * SA_OPCODE_ENCRYPT is the same value as SA_OPCODE_DECRYPT.
170 * it's the DIR_(IN|OUT)BOUND that matters
172 sa
->sa_command_0
.bf
.opcode
= SA_OPCODE_ENCRYPT
;
177 int crypto4xx_setkey_aes_cbc(struct crypto_skcipher
*cipher
,
178 const u8
*key
, unsigned int keylen
)
180 return crypto4xx_setkey_aes(cipher
, key
, keylen
, CRYPTO_MODE_CBC
,
181 CRYPTO_FEEDBACK_MODE_NO_FB
);
184 int crypto4xx_setkey_aes_cfb(struct crypto_skcipher
*cipher
,
185 const u8
*key
, unsigned int keylen
)
187 return crypto4xx_setkey_aes(cipher
, key
, keylen
, CRYPTO_MODE_CFB
,
188 CRYPTO_FEEDBACK_MODE_128BIT_CFB
);
191 int crypto4xx_setkey_aes_ecb(struct crypto_skcipher
*cipher
,
192 const u8
*key
, unsigned int keylen
)
194 return crypto4xx_setkey_aes(cipher
, key
, keylen
, CRYPTO_MODE_ECB
,
195 CRYPTO_FEEDBACK_MODE_NO_FB
);
198 int crypto4xx_setkey_aes_ofb(struct crypto_skcipher
*cipher
,
199 const u8
*key
, unsigned int keylen
)
201 return crypto4xx_setkey_aes(cipher
, key
, keylen
, CRYPTO_MODE_OFB
,
202 CRYPTO_FEEDBACK_MODE_64BIT_OFB
);
205 int crypto4xx_setkey_rfc3686(struct crypto_skcipher
*cipher
,
206 const u8
*key
, unsigned int keylen
)
208 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
211 rc
= crypto4xx_setkey_aes(cipher
, key
, keylen
- CTR_RFC3686_NONCE_SIZE
,
212 CRYPTO_MODE_CTR
, CRYPTO_FEEDBACK_MODE_NO_FB
);
216 ctx
->iv_nonce
= cpu_to_le32p((u32
*)&key
[keylen
-
217 CTR_RFC3686_NONCE_SIZE
]);
222 int crypto4xx_rfc3686_encrypt(struct skcipher_request
*req
)
224 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
225 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
226 __le32 iv
[AES_IV_SIZE
/ 4] = {
228 cpu_to_le32p((u32
*) req
->iv
),
229 cpu_to_le32p((u32
*) (req
->iv
+ 4)),
232 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, req
->dst
,
233 req
->cryptlen
, iv
, AES_IV_SIZE
,
234 ctx
->sa_out
, ctx
->sa_len
, 0, NULL
);
237 int crypto4xx_rfc3686_decrypt(struct skcipher_request
*req
)
239 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
240 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
241 __le32 iv
[AES_IV_SIZE
/ 4] = {
243 cpu_to_le32p((u32
*) req
->iv
),
244 cpu_to_le32p((u32
*) (req
->iv
+ 4)),
247 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, req
->dst
,
248 req
->cryptlen
, iv
, AES_IV_SIZE
,
249 ctx
->sa_out
, ctx
->sa_len
, 0, NULL
);
253 crypto4xx_ctr_crypt(struct skcipher_request
*req
, bool encrypt
)
255 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
256 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
257 size_t iv_len
= crypto_skcipher_ivsize(cipher
);
258 unsigned int counter
= be32_to_cpup((__be32
*)(req
->iv
+ iv_len
- 4));
259 unsigned int nblks
= ALIGN(req
->cryptlen
, AES_BLOCK_SIZE
) /
263 * The hardware uses only the last 32-bits as the counter while the
264 * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
265 * the whole IV is a counter. So fallback if the counter is going to
268 if (counter
+ nblks
< counter
) {
269 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq
, ctx
->sw_cipher
.cipher
);
272 skcipher_request_set_sync_tfm(subreq
, ctx
->sw_cipher
.cipher
);
273 skcipher_request_set_callback(subreq
, req
->base
.flags
,
275 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
,
276 req
->cryptlen
, req
->iv
);
277 ret
= encrypt
? crypto_skcipher_encrypt(subreq
)
278 : crypto_skcipher_decrypt(subreq
);
279 skcipher_request_zero(subreq
);
283 return encrypt
? crypto4xx_encrypt_iv_stream(req
)
284 : crypto4xx_decrypt_iv_stream(req
);
287 static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx
*ctx
,
288 struct crypto_skcipher
*cipher
,
292 crypto_sync_skcipher_clear_flags(ctx
->sw_cipher
.cipher
,
293 CRYPTO_TFM_REQ_MASK
);
294 crypto_sync_skcipher_set_flags(ctx
->sw_cipher
.cipher
,
295 crypto_skcipher_get_flags(cipher
) & CRYPTO_TFM_REQ_MASK
);
296 return crypto_sync_skcipher_setkey(ctx
->sw_cipher
.cipher
, key
, keylen
);
299 int crypto4xx_setkey_aes_ctr(struct crypto_skcipher
*cipher
,
300 const u8
*key
, unsigned int keylen
)
302 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
305 rc
= crypto4xx_sk_setup_fallback(ctx
, cipher
, key
, keylen
);
309 return crypto4xx_setkey_aes(cipher
, key
, keylen
,
310 CRYPTO_MODE_CTR
, CRYPTO_FEEDBACK_MODE_NO_FB
);
313 int crypto4xx_encrypt_ctr(struct skcipher_request
*req
)
315 return crypto4xx_ctr_crypt(req
, true);
318 int crypto4xx_decrypt_ctr(struct skcipher_request
*req
)
320 return crypto4xx_ctr_crypt(req
, false);
323 static inline bool crypto4xx_aead_need_fallback(struct aead_request
*req
,
325 bool is_ccm
, bool decrypt
)
327 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
329 /* authsize has to be a multiple of 4 */
330 if (aead
->authsize
& 3)
334 * hardware does not handle cases where plaintext
335 * is less than a block.
337 if (len
< AES_BLOCK_SIZE
)
340 /* assoc len needs to be a multiple of 4 and <= 1020 */
341 if (req
->assoclen
& 0x3 || req
->assoclen
> 1020)
344 /* CCM supports only counter field length of 2 and 4 bytes */
345 if (is_ccm
&& !(req
->iv
[0] == 1 || req
->iv
[0] == 3))
351 static int crypto4xx_aead_fallback(struct aead_request
*req
,
352 struct crypto4xx_ctx
*ctx
, bool do_decrypt
)
354 struct aead_request
*subreq
= aead_request_ctx(req
);
356 aead_request_set_tfm(subreq
, ctx
->sw_cipher
.aead
);
357 aead_request_set_callback(subreq
, req
->base
.flags
,
358 req
->base
.complete
, req
->base
.data
);
359 aead_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
361 aead_request_set_ad(subreq
, req
->assoclen
);
362 return do_decrypt
? crypto_aead_decrypt(subreq
) :
363 crypto_aead_encrypt(subreq
);
366 static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx
*ctx
,
367 struct crypto_aead
*cipher
,
371 crypto_aead_clear_flags(ctx
->sw_cipher
.aead
, CRYPTO_TFM_REQ_MASK
);
372 crypto_aead_set_flags(ctx
->sw_cipher
.aead
,
373 crypto_aead_get_flags(cipher
) & CRYPTO_TFM_REQ_MASK
);
374 return crypto_aead_setkey(ctx
->sw_cipher
.aead
, key
, keylen
);
381 int crypto4xx_setkey_aes_ccm(struct crypto_aead
*cipher
, const u8
*key
,
384 struct crypto_tfm
*tfm
= crypto_aead_tfm(cipher
);
385 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(tfm
);
386 struct dynamic_sa_ctl
*sa
;
389 rc
= crypto4xx_aead_setup_fallback(ctx
, cipher
, key
, keylen
);
393 if (ctx
->sa_in
|| ctx
->sa_out
)
394 crypto4xx_free_sa(ctx
);
396 rc
= crypto4xx_alloc_sa(ctx
, SA_AES128_CCM_LEN
+ (keylen
- 16) / 4);
401 sa
= (struct dynamic_sa_ctl
*) ctx
->sa_in
;
402 sa
->sa_contents
.w
= SA_AES_CCM_CONTENTS
| (keylen
<< 2);
404 set_dynamic_sa_command_0(sa
, SA_SAVE_HASH
, SA_NOT_SAVE_IV
,
405 SA_LOAD_HASH_FROM_SA
, SA_LOAD_IV_FROM_STATE
,
406 SA_NO_HEADER_PROC
, SA_HASH_ALG_CBC_MAC
,
408 SA_PAD_TYPE_ZERO
, SA_OP_GROUP_BASIC
,
409 SA_OPCODE_HASH_DECRYPT
, DIR_INBOUND
);
411 set_dynamic_sa_command_1(sa
, CRYPTO_MODE_CTR
, SA_HASH_MODE_HASH
,
412 CRYPTO_FEEDBACK_MODE_NO_FB
, SA_EXTENDED_SN_OFF
,
413 SA_SEQ_MASK_OFF
, SA_MC_ENABLE
,
414 SA_NOT_COPY_PAD
, SA_COPY_PAYLOAD
,
417 sa
->sa_command_1
.bf
.key_len
= keylen
>> 3;
419 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa
), key
, keylen
);
421 memcpy(ctx
->sa_out
, ctx
->sa_in
, ctx
->sa_len
* 4);
422 sa
= (struct dynamic_sa_ctl
*) ctx
->sa_out
;
424 set_dynamic_sa_command_0(sa
, SA_SAVE_HASH
, SA_NOT_SAVE_IV
,
425 SA_LOAD_HASH_FROM_SA
, SA_LOAD_IV_FROM_STATE
,
426 SA_NO_HEADER_PROC
, SA_HASH_ALG_CBC_MAC
,
428 SA_PAD_TYPE_ZERO
, SA_OP_GROUP_BASIC
,
429 SA_OPCODE_ENCRYPT_HASH
, DIR_OUTBOUND
);
431 set_dynamic_sa_command_1(sa
, CRYPTO_MODE_CTR
, SA_HASH_MODE_HASH
,
432 CRYPTO_FEEDBACK_MODE_NO_FB
, SA_EXTENDED_SN_OFF
,
433 SA_SEQ_MASK_OFF
, SA_MC_ENABLE
,
434 SA_COPY_PAD
, SA_COPY_PAYLOAD
,
437 sa
->sa_command_1
.bf
.key_len
= keylen
>> 3;
441 static int crypto4xx_crypt_aes_ccm(struct aead_request
*req
, bool decrypt
)
443 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
444 struct crypto4xx_aead_reqctx
*rctx
= aead_request_ctx(req
);
445 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
447 u32 tmp_sa
[SA_AES128_CCM_LEN
+ 4];
448 struct dynamic_sa_ctl
*sa
= (struct dynamic_sa_ctl
*)tmp_sa
;
449 unsigned int len
= req
->cryptlen
;
452 len
-= crypto_aead_authsize(aead
);
454 if (crypto4xx_aead_need_fallback(req
, len
, true, decrypt
))
455 return crypto4xx_aead_fallback(req
, ctx
, decrypt
);
457 memcpy(tmp_sa
, decrypt
? ctx
->sa_in
: ctx
->sa_out
, ctx
->sa_len
* 4);
458 sa
->sa_command_0
.bf
.digest_len
= crypto_aead_authsize(aead
) >> 2;
460 if (req
->iv
[0] == 1) {
461 /* CRYPTO_MODE_AES_ICM */
462 sa
->sa_command_1
.bf
.crypto_mode9_8
= 1;
465 iv
[3] = cpu_to_le32(0);
466 crypto4xx_memcpy_to_le32(iv
, req
->iv
, 16 - (req
->iv
[0] + 1));
468 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, req
->dst
,
470 sa
, ctx
->sa_len
, req
->assoclen
, rctx
->dst
);
473 int crypto4xx_encrypt_aes_ccm(struct aead_request
*req
)
475 return crypto4xx_crypt_aes_ccm(req
, false);
478 int crypto4xx_decrypt_aes_ccm(struct aead_request
*req
)
480 return crypto4xx_crypt_aes_ccm(req
, true);
483 int crypto4xx_setauthsize_aead(struct crypto_aead
*cipher
,
484 unsigned int authsize
)
486 struct crypto_tfm
*tfm
= crypto_aead_tfm(cipher
);
487 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(tfm
);
489 return crypto_aead_setauthsize(ctx
->sw_cipher
.aead
, authsize
);
496 static int crypto4xx_aes_gcm_validate_keylen(unsigned int keylen
)
508 static int crypto4xx_compute_gcm_hash_key_sw(__le32
*hash_start
, const u8
*key
,
511 struct crypto_aes_ctx ctx
;
512 uint8_t src
[16] = { 0 };
515 rc
= aes_expandkey(&ctx
, key
, keylen
);
517 pr_err("aes_expandkey() failed: %d\n", rc
);
521 aes_encrypt(&ctx
, src
, src
);
522 crypto4xx_memcpy_to_le32(hash_start
, src
, 16);
523 memzero_explicit(&ctx
, sizeof(ctx
));
527 int crypto4xx_setkey_aes_gcm(struct crypto_aead
*cipher
,
528 const u8
*key
, unsigned int keylen
)
530 struct crypto_tfm
*tfm
= crypto_aead_tfm(cipher
);
531 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(tfm
);
532 struct dynamic_sa_ctl
*sa
;
535 if (crypto4xx_aes_gcm_validate_keylen(keylen
) != 0)
538 rc
= crypto4xx_aead_setup_fallback(ctx
, cipher
, key
, keylen
);
542 if (ctx
->sa_in
|| ctx
->sa_out
)
543 crypto4xx_free_sa(ctx
);
545 rc
= crypto4xx_alloc_sa(ctx
, SA_AES128_GCM_LEN
+ (keylen
- 16) / 4);
549 sa
= (struct dynamic_sa_ctl
*) ctx
->sa_in
;
551 sa
->sa_contents
.w
= SA_AES_GCM_CONTENTS
| (keylen
<< 2);
552 set_dynamic_sa_command_0(sa
, SA_SAVE_HASH
, SA_NOT_SAVE_IV
,
553 SA_LOAD_HASH_FROM_SA
, SA_LOAD_IV_FROM_STATE
,
554 SA_NO_HEADER_PROC
, SA_HASH_ALG_GHASH
,
555 SA_CIPHER_ALG_AES
, SA_PAD_TYPE_ZERO
,
556 SA_OP_GROUP_BASIC
, SA_OPCODE_HASH_DECRYPT
,
558 set_dynamic_sa_command_1(sa
, CRYPTO_MODE_CTR
, SA_HASH_MODE_HASH
,
559 CRYPTO_FEEDBACK_MODE_NO_FB
, SA_EXTENDED_SN_OFF
,
560 SA_SEQ_MASK_ON
, SA_MC_DISABLE
,
561 SA_NOT_COPY_PAD
, SA_COPY_PAYLOAD
,
564 sa
->sa_command_1
.bf
.key_len
= keylen
>> 3;
566 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa
),
569 rc
= crypto4xx_compute_gcm_hash_key_sw(get_dynamic_sa_inner_digest(sa
),
572 pr_err("GCM hash key setting failed = %d\n", rc
);
576 memcpy(ctx
->sa_out
, ctx
->sa_in
, ctx
->sa_len
* 4);
577 sa
= (struct dynamic_sa_ctl
*) ctx
->sa_out
;
578 sa
->sa_command_0
.bf
.dir
= DIR_OUTBOUND
;
579 sa
->sa_command_0
.bf
.opcode
= SA_OPCODE_ENCRYPT_HASH
;
583 crypto4xx_free_sa(ctx
);
587 static inline int crypto4xx_crypt_aes_gcm(struct aead_request
*req
,
590 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
591 struct crypto4xx_aead_reqctx
*rctx
= aead_request_ctx(req
);
593 unsigned int len
= req
->cryptlen
;
596 len
-= crypto_aead_authsize(crypto_aead_reqtfm(req
));
598 if (crypto4xx_aead_need_fallback(req
, len
, false, decrypt
))
599 return crypto4xx_aead_fallback(req
, ctx
, decrypt
);
601 crypto4xx_memcpy_to_le32(iv
, req
->iv
, GCM_AES_IV_SIZE
);
602 iv
[3] = cpu_to_le32(1);
604 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, req
->dst
,
606 decrypt
? ctx
->sa_in
: ctx
->sa_out
,
607 ctx
->sa_len
, req
->assoclen
, rctx
->dst
);
610 int crypto4xx_encrypt_aes_gcm(struct aead_request
*req
)
612 return crypto4xx_crypt_aes_gcm(req
, false);
615 int crypto4xx_decrypt_aes_gcm(struct aead_request
*req
)
617 return crypto4xx_crypt_aes_gcm(req
, true);
621 * HASH SHA1 Functions
623 static int crypto4xx_hash_alg_init(struct crypto_tfm
*tfm
,
628 struct crypto_alg
*alg
= tfm
->__crt_alg
;
629 struct crypto4xx_alg
*my_alg
;
630 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(tfm
);
631 struct dynamic_sa_hash160
*sa
;
634 my_alg
= container_of(__crypto_ahash_alg(alg
), struct crypto4xx_alg
,
636 ctx
->dev
= my_alg
->dev
;
639 if (ctx
->sa_in
|| ctx
->sa_out
)
640 crypto4xx_free_sa(ctx
);
642 rc
= crypto4xx_alloc_sa(ctx
, sa_len
);
646 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
647 sizeof(struct crypto4xx_ctx
));
648 sa
= (struct dynamic_sa_hash160
*)ctx
->sa_in
;
649 set_dynamic_sa_command_0(&sa
->ctrl
, SA_SAVE_HASH
, SA_NOT_SAVE_IV
,
650 SA_NOT_LOAD_HASH
, SA_LOAD_IV_FROM_SA
,
651 SA_NO_HEADER_PROC
, ha
, SA_CIPHER_ALG_NULL
,
652 SA_PAD_TYPE_ZERO
, SA_OP_GROUP_BASIC
,
653 SA_OPCODE_HASH
, DIR_INBOUND
);
654 set_dynamic_sa_command_1(&sa
->ctrl
, 0, SA_HASH_MODE_HASH
,
655 CRYPTO_FEEDBACK_MODE_NO_FB
, SA_EXTENDED_SN_OFF
,
656 SA_SEQ_MASK_OFF
, SA_MC_ENABLE
,
657 SA_NOT_COPY_PAD
, SA_NOT_COPY_PAYLOAD
,
659 /* Need to zero hash digest in SA */
660 memset(sa
->inner_digest
, 0, sizeof(sa
->inner_digest
));
661 memset(sa
->outer_digest
, 0, sizeof(sa
->outer_digest
));
666 int crypto4xx_hash_init(struct ahash_request
*req
)
668 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
670 struct dynamic_sa_ctl
*sa
;
673 ds
= crypto_ahash_digestsize(
674 __crypto_ahash_cast(req
->base
.tfm
));
675 sa
->sa_command_0
.bf
.digest_len
= ds
>> 2;
676 sa
->sa_command_0
.bf
.load_hash_state
= SA_LOAD_HASH_FROM_SA
;
681 int crypto4xx_hash_update(struct ahash_request
*req
)
683 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(req
);
684 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
685 struct scatterlist dst
;
686 unsigned int ds
= crypto_ahash_digestsize(ahash
);
688 sg_init_one(&dst
, req
->result
, ds
);
690 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, &dst
,
691 req
->nbytes
, NULL
, 0, ctx
->sa_in
,
692 ctx
->sa_len
, 0, NULL
);
695 int crypto4xx_hash_final(struct ahash_request
*req
)
700 int crypto4xx_hash_digest(struct ahash_request
*req
)
702 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(req
);
703 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
704 struct scatterlist dst
;
705 unsigned int ds
= crypto_ahash_digestsize(ahash
);
707 sg_init_one(&dst
, req
->result
, ds
);
709 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, &dst
,
710 req
->nbytes
, NULL
, 0, ctx
->sa_in
,
711 ctx
->sa_len
, 0, NULL
);
717 int crypto4xx_sha1_alg_init(struct crypto_tfm
*tfm
)
719 return crypto4xx_hash_alg_init(tfm
, SA_HASH160_LEN
, SA_HASH_ALG_SHA1
,