2 * AMCC SoC PPC4xx Crypto Driver
4 * Copyright (c) 2008 Applied Micro Circuits Corporation.
5 * All rights reserved. James Hsiao <jhsiao@amcc.com>
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
17 * This file implements the Linux crypto algorithms.
20 #include <linux/kernel.h>
21 #include <linux/interrupt.h>
22 #include <linux/spinlock_types.h>
23 #include <linux/scatterlist.h>
24 #include <linux/crypto.h>
25 #include <linux/hash.h>
26 #include <crypto/internal/hash.h>
27 #include <linux/dma-mapping.h>
28 #include <crypto/algapi.h>
29 #include <crypto/aead.h>
30 #include <crypto/aes.h>
31 #include <crypto/gcm.h>
32 #include <crypto/sha.h>
33 #include <crypto/ctr.h>
34 #include <crypto/skcipher.h>
35 #include "crypto4xx_reg_def.h"
36 #include "crypto4xx_core.h"
37 #include "crypto4xx_sa.h"
39 static void set_dynamic_sa_command_0(struct dynamic_sa_ctl
*sa
, u32 save_h
,
40 u32 save_iv
, u32 ld_h
, u32 ld_iv
,
41 u32 hdr_proc
, u32 h
, u32 c
, u32 pad_type
,
42 u32 op_grp
, u32 op
, u32 dir
)
44 sa
->sa_command_0
.w
= 0;
45 sa
->sa_command_0
.bf
.save_hash_state
= save_h
;
46 sa
->sa_command_0
.bf
.save_iv
= save_iv
;
47 sa
->sa_command_0
.bf
.load_hash_state
= ld_h
;
48 sa
->sa_command_0
.bf
.load_iv
= ld_iv
;
49 sa
->sa_command_0
.bf
.hdr_proc
= hdr_proc
;
50 sa
->sa_command_0
.bf
.hash_alg
= h
;
51 sa
->sa_command_0
.bf
.cipher_alg
= c
;
52 sa
->sa_command_0
.bf
.pad_type
= pad_type
& 3;
53 sa
->sa_command_0
.bf
.extend_pad
= pad_type
>> 2;
54 sa
->sa_command_0
.bf
.op_group
= op_grp
;
55 sa
->sa_command_0
.bf
.opcode
= op
;
56 sa
->sa_command_0
.bf
.dir
= dir
;
59 static void set_dynamic_sa_command_1(struct dynamic_sa_ctl
*sa
, u32 cm
,
60 u32 hmac_mc
, u32 cfb
, u32 esn
,
61 u32 sn_mask
, u32 mute
, u32 cp_pad
,
62 u32 cp_pay
, u32 cp_hdr
)
64 sa
->sa_command_1
.w
= 0;
65 sa
->sa_command_1
.bf
.crypto_mode31
= (cm
& 4) >> 2;
66 sa
->sa_command_1
.bf
.crypto_mode9_8
= cm
& 3;
67 sa
->sa_command_1
.bf
.feedback_mode
= cfb
,
68 sa
->sa_command_1
.bf
.sa_rev
= 1;
69 sa
->sa_command_1
.bf
.hmac_muting
= hmac_mc
;
70 sa
->sa_command_1
.bf
.extended_seq_num
= esn
;
71 sa
->sa_command_1
.bf
.seq_num_mask
= sn_mask
;
72 sa
->sa_command_1
.bf
.mutable_bit_proc
= mute
;
73 sa
->sa_command_1
.bf
.copy_pad
= cp_pad
;
74 sa
->sa_command_1
.bf
.copy_payload
= cp_pay
;
75 sa
->sa_command_1
.bf
.copy_hdr
= cp_hdr
;
78 static inline int crypto4xx_crypt(struct skcipher_request
*req
,
79 const unsigned int ivlen
, bool decrypt
)
81 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
82 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
83 __le32 iv
[AES_IV_SIZE
];
86 crypto4xx_memcpy_to_le32(iv
, req
->iv
, ivlen
);
88 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, req
->dst
,
89 req
->cryptlen
, iv
, ivlen
, decrypt
? ctx
->sa_in
: ctx
->sa_out
,
90 ctx
->sa_len
, 0, NULL
);
93 int crypto4xx_encrypt_noiv(struct skcipher_request
*req
)
95 return crypto4xx_crypt(req
, 0, false);
98 int crypto4xx_encrypt_iv(struct skcipher_request
*req
)
100 return crypto4xx_crypt(req
, AES_IV_SIZE
, false);
103 int crypto4xx_decrypt_noiv(struct skcipher_request
*req
)
105 return crypto4xx_crypt(req
, 0, true);
108 int crypto4xx_decrypt_iv(struct skcipher_request
*req
)
110 return crypto4xx_crypt(req
, AES_IV_SIZE
, true);
116 static int crypto4xx_setkey_aes(struct crypto_skcipher
*cipher
,
122 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
123 struct dynamic_sa_ctl
*sa
;
126 if (keylen
!= AES_KEYSIZE_256
&&
127 keylen
!= AES_KEYSIZE_192
&& keylen
!= AES_KEYSIZE_128
) {
128 crypto_skcipher_set_flags(cipher
,
129 CRYPTO_TFM_RES_BAD_KEY_LEN
);
134 if (ctx
->sa_in
|| ctx
->sa_out
)
135 crypto4xx_free_sa(ctx
);
137 rc
= crypto4xx_alloc_sa(ctx
, SA_AES128_LEN
+ (keylen
-16) / 4);
144 set_dynamic_sa_command_0(sa
, SA_NOT_SAVE_HASH
, (cm
== CRYPTO_MODE_CBC
?
145 SA_SAVE_IV
: SA_NOT_SAVE_IV
),
146 SA_LOAD_HASH_FROM_SA
, SA_LOAD_IV_FROM_STATE
,
147 SA_NO_HEADER_PROC
, SA_HASH_ALG_NULL
,
148 SA_CIPHER_ALG_AES
, SA_PAD_TYPE_ZERO
,
149 SA_OP_GROUP_BASIC
, SA_OPCODE_DECRYPT
,
152 set_dynamic_sa_command_1(sa
, cm
, SA_HASH_MODE_HASH
,
153 fb
, SA_EXTENDED_SN_OFF
,
154 SA_SEQ_MASK_OFF
, SA_MC_ENABLE
,
155 SA_NOT_COPY_PAD
, SA_NOT_COPY_PAYLOAD
,
157 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa
),
159 sa
->sa_contents
.w
= SA_AES_CONTENTS
| (keylen
<< 2);
160 sa
->sa_command_1
.bf
.key_len
= keylen
>> 3;
162 memcpy(ctx
->sa_out
, ctx
->sa_in
, ctx
->sa_len
* 4);
164 sa
->sa_command_0
.bf
.dir
= DIR_OUTBOUND
;
169 int crypto4xx_setkey_aes_cbc(struct crypto_skcipher
*cipher
,
170 const u8
*key
, unsigned int keylen
)
172 return crypto4xx_setkey_aes(cipher
, key
, keylen
, CRYPTO_MODE_CBC
,
173 CRYPTO_FEEDBACK_MODE_NO_FB
);
176 int crypto4xx_setkey_aes_cfb(struct crypto_skcipher
*cipher
,
177 const u8
*key
, unsigned int keylen
)
179 return crypto4xx_setkey_aes(cipher
, key
, keylen
, CRYPTO_MODE_CFB
,
180 CRYPTO_FEEDBACK_MODE_128BIT_CFB
);
183 int crypto4xx_setkey_aes_ecb(struct crypto_skcipher
*cipher
,
184 const u8
*key
, unsigned int keylen
)
186 return crypto4xx_setkey_aes(cipher
, key
, keylen
, CRYPTO_MODE_ECB
,
187 CRYPTO_FEEDBACK_MODE_NO_FB
);
190 int crypto4xx_setkey_aes_ofb(struct crypto_skcipher
*cipher
,
191 const u8
*key
, unsigned int keylen
)
193 return crypto4xx_setkey_aes(cipher
, key
, keylen
, CRYPTO_MODE_OFB
,
194 CRYPTO_FEEDBACK_MODE_64BIT_OFB
);
197 int crypto4xx_setkey_rfc3686(struct crypto_skcipher
*cipher
,
198 const u8
*key
, unsigned int keylen
)
200 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
203 rc
= crypto4xx_setkey_aes(cipher
, key
, keylen
- CTR_RFC3686_NONCE_SIZE
,
204 CRYPTO_MODE_CTR
, CRYPTO_FEEDBACK_MODE_NO_FB
);
208 ctx
->iv_nonce
= cpu_to_le32p((u32
*)&key
[keylen
-
209 CTR_RFC3686_NONCE_SIZE
]);
214 int crypto4xx_rfc3686_encrypt(struct skcipher_request
*req
)
216 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
217 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
218 __le32 iv
[AES_IV_SIZE
/ 4] = {
220 cpu_to_le32p((u32
*) req
->iv
),
221 cpu_to_le32p((u32
*) (req
->iv
+ 4)),
224 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, req
->dst
,
225 req
->cryptlen
, iv
, AES_IV_SIZE
,
226 ctx
->sa_out
, ctx
->sa_len
, 0, NULL
);
229 int crypto4xx_rfc3686_decrypt(struct skcipher_request
*req
)
231 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
232 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
233 __le32 iv
[AES_IV_SIZE
/ 4] = {
235 cpu_to_le32p((u32
*) req
->iv
),
236 cpu_to_le32p((u32
*) (req
->iv
+ 4)),
239 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, req
->dst
,
240 req
->cryptlen
, iv
, AES_IV_SIZE
,
241 ctx
->sa_out
, ctx
->sa_len
, 0, NULL
);
245 crypto4xx_ctr_crypt(struct skcipher_request
*req
, bool encrypt
)
247 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
248 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
249 size_t iv_len
= crypto_skcipher_ivsize(cipher
);
250 unsigned int counter
= be32_to_cpup((__be32
*)(req
->iv
+ iv_len
- 4));
251 unsigned int nblks
= ALIGN(req
->cryptlen
, AES_BLOCK_SIZE
) /
255 * The hardware uses only the last 32-bits as the counter while the
256 * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
257 * the whole IV is a counter. So fallback if the counter is going to
260 if (counter
+ nblks
< counter
) {
261 struct skcipher_request
*subreq
= skcipher_request_ctx(req
);
264 skcipher_request_set_tfm(subreq
, ctx
->sw_cipher
.cipher
);
265 skcipher_request_set_callback(subreq
, req
->base
.flags
,
267 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
,
268 req
->cryptlen
, req
->iv
);
269 ret
= encrypt
? crypto_skcipher_encrypt(subreq
)
270 : crypto_skcipher_decrypt(subreq
);
271 skcipher_request_zero(subreq
);
275 return encrypt
? crypto4xx_encrypt_iv(req
)
276 : crypto4xx_decrypt_iv(req
);
279 static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx
*ctx
,
280 struct crypto_skcipher
*cipher
,
286 crypto_skcipher_clear_flags(ctx
->sw_cipher
.cipher
,
287 CRYPTO_TFM_REQ_MASK
);
288 crypto_skcipher_set_flags(ctx
->sw_cipher
.cipher
,
289 crypto_skcipher_get_flags(cipher
) & CRYPTO_TFM_REQ_MASK
);
290 rc
= crypto_skcipher_setkey(ctx
->sw_cipher
.cipher
, key
, keylen
);
291 crypto_skcipher_clear_flags(cipher
, CRYPTO_TFM_RES_MASK
);
292 crypto_skcipher_set_flags(cipher
,
293 crypto_skcipher_get_flags(ctx
->sw_cipher
.cipher
) &
294 CRYPTO_TFM_RES_MASK
);
299 int crypto4xx_setkey_aes_ctr(struct crypto_skcipher
*cipher
,
300 const u8
*key
, unsigned int keylen
)
302 struct crypto4xx_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
305 rc
= crypto4xx_sk_setup_fallback(ctx
, cipher
, key
, keylen
);
309 return crypto4xx_setkey_aes(cipher
, key
, keylen
,
310 CRYPTO_MODE_CTR
, CRYPTO_FEEDBACK_MODE_NO_FB
);
313 int crypto4xx_encrypt_ctr(struct skcipher_request
*req
)
315 return crypto4xx_ctr_crypt(req
, true);
318 int crypto4xx_decrypt_ctr(struct skcipher_request
*req
)
320 return crypto4xx_ctr_crypt(req
, false);
323 static inline bool crypto4xx_aead_need_fallback(struct aead_request
*req
,
325 bool is_ccm
, bool decrypt
)
327 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
329 /* authsize has to be a multiple of 4 */
330 if (aead
->authsize
& 3)
334 * hardware does not handle cases where plaintext
335 * is less than a block.
337 if (len
< AES_BLOCK_SIZE
)
340 /* assoc len needs to be a multiple of 4 and <= 1020 */
341 if (req
->assoclen
& 0x3 || req
->assoclen
> 1020)
344 /* CCM supports only counter field length of 2 and 4 bytes */
345 if (is_ccm
&& !(req
->iv
[0] == 1 || req
->iv
[0] == 3))
351 static int crypto4xx_aead_fallback(struct aead_request
*req
,
352 struct crypto4xx_ctx
*ctx
, bool do_decrypt
)
354 struct aead_request
*subreq
= aead_request_ctx(req
);
356 aead_request_set_tfm(subreq
, ctx
->sw_cipher
.aead
);
357 aead_request_set_callback(subreq
, req
->base
.flags
,
358 req
->base
.complete
, req
->base
.data
);
359 aead_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
361 aead_request_set_ad(subreq
, req
->assoclen
);
362 return do_decrypt
? crypto_aead_decrypt(subreq
) :
363 crypto_aead_encrypt(subreq
);
366 static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx
*ctx
,
367 struct crypto_aead
*cipher
,
373 crypto_aead_clear_flags(ctx
->sw_cipher
.aead
, CRYPTO_TFM_REQ_MASK
);
374 crypto_aead_set_flags(ctx
->sw_cipher
.aead
,
375 crypto_aead_get_flags(cipher
) & CRYPTO_TFM_REQ_MASK
);
376 rc
= crypto_aead_setkey(ctx
->sw_cipher
.aead
, key
, keylen
);
377 crypto_aead_clear_flags(cipher
, CRYPTO_TFM_RES_MASK
);
378 crypto_aead_set_flags(cipher
,
379 crypto_aead_get_flags(ctx
->sw_cipher
.aead
) &
380 CRYPTO_TFM_RES_MASK
);
389 int crypto4xx_setkey_aes_ccm(struct crypto_aead
*cipher
, const u8
*key
,
392 struct crypto_tfm
*tfm
= crypto_aead_tfm(cipher
);
393 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(tfm
);
394 struct dynamic_sa_ctl
*sa
;
397 rc
= crypto4xx_aead_setup_fallback(ctx
, cipher
, key
, keylen
);
401 if (ctx
->sa_in
|| ctx
->sa_out
)
402 crypto4xx_free_sa(ctx
);
404 rc
= crypto4xx_alloc_sa(ctx
, SA_AES128_CCM_LEN
+ (keylen
- 16) / 4);
409 sa
= (struct dynamic_sa_ctl
*) ctx
->sa_in
;
410 sa
->sa_contents
.w
= SA_AES_CCM_CONTENTS
| (keylen
<< 2);
412 set_dynamic_sa_command_0(sa
, SA_SAVE_HASH
, SA_NOT_SAVE_IV
,
413 SA_LOAD_HASH_FROM_SA
, SA_LOAD_IV_FROM_STATE
,
414 SA_NO_HEADER_PROC
, SA_HASH_ALG_CBC_MAC
,
416 SA_PAD_TYPE_ZERO
, SA_OP_GROUP_BASIC
,
417 SA_OPCODE_HASH_DECRYPT
, DIR_INBOUND
);
419 set_dynamic_sa_command_1(sa
, CRYPTO_MODE_CTR
, SA_HASH_MODE_HASH
,
420 CRYPTO_FEEDBACK_MODE_NO_FB
, SA_EXTENDED_SN_OFF
,
421 SA_SEQ_MASK_OFF
, SA_MC_ENABLE
,
422 SA_NOT_COPY_PAD
, SA_COPY_PAYLOAD
,
425 sa
->sa_command_1
.bf
.key_len
= keylen
>> 3;
427 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa
), key
, keylen
);
429 memcpy(ctx
->sa_out
, ctx
->sa_in
, ctx
->sa_len
* 4);
430 sa
= (struct dynamic_sa_ctl
*) ctx
->sa_out
;
432 set_dynamic_sa_command_0(sa
, SA_SAVE_HASH
, SA_NOT_SAVE_IV
,
433 SA_LOAD_HASH_FROM_SA
, SA_LOAD_IV_FROM_STATE
,
434 SA_NO_HEADER_PROC
, SA_HASH_ALG_CBC_MAC
,
436 SA_PAD_TYPE_ZERO
, SA_OP_GROUP_BASIC
,
437 SA_OPCODE_ENCRYPT_HASH
, DIR_OUTBOUND
);
439 set_dynamic_sa_command_1(sa
, CRYPTO_MODE_CTR
, SA_HASH_MODE_HASH
,
440 CRYPTO_FEEDBACK_MODE_NO_FB
, SA_EXTENDED_SN_OFF
,
441 SA_SEQ_MASK_OFF
, SA_MC_ENABLE
,
442 SA_COPY_PAD
, SA_COPY_PAYLOAD
,
445 sa
->sa_command_1
.bf
.key_len
= keylen
>> 3;
449 static int crypto4xx_crypt_aes_ccm(struct aead_request
*req
, bool decrypt
)
451 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
452 struct crypto4xx_aead_reqctx
*rctx
= aead_request_ctx(req
);
453 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
455 u32 tmp_sa
[SA_AES128_CCM_LEN
+ 4];
456 struct dynamic_sa_ctl
*sa
= (struct dynamic_sa_ctl
*)tmp_sa
;
457 unsigned int len
= req
->cryptlen
;
460 len
-= crypto_aead_authsize(aead
);
462 if (crypto4xx_aead_need_fallback(req
, len
, true, decrypt
))
463 return crypto4xx_aead_fallback(req
, ctx
, decrypt
);
465 memcpy(tmp_sa
, decrypt
? ctx
->sa_in
: ctx
->sa_out
, ctx
->sa_len
* 4);
466 sa
->sa_command_0
.bf
.digest_len
= crypto_aead_authsize(aead
) >> 2;
468 if (req
->iv
[0] == 1) {
469 /* CRYPTO_MODE_AES_ICM */
470 sa
->sa_command_1
.bf
.crypto_mode9_8
= 1;
473 iv
[3] = cpu_to_le32(0);
474 crypto4xx_memcpy_to_le32(iv
, req
->iv
, 16 - (req
->iv
[0] + 1));
476 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, req
->dst
,
478 sa
, ctx
->sa_len
, req
->assoclen
, rctx
->dst
);
481 int crypto4xx_encrypt_aes_ccm(struct aead_request
*req
)
483 return crypto4xx_crypt_aes_ccm(req
, false);
486 int crypto4xx_decrypt_aes_ccm(struct aead_request
*req
)
488 return crypto4xx_crypt_aes_ccm(req
, true);
491 int crypto4xx_setauthsize_aead(struct crypto_aead
*cipher
,
492 unsigned int authsize
)
494 struct crypto_tfm
*tfm
= crypto_aead_tfm(cipher
);
495 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(tfm
);
497 return crypto_aead_setauthsize(ctx
->sw_cipher
.aead
, authsize
);
504 static int crypto4xx_aes_gcm_validate_keylen(unsigned int keylen
)
516 static int crypto4xx_compute_gcm_hash_key_sw(__le32
*hash_start
, const u8
*key
,
519 struct crypto_cipher
*aes_tfm
= NULL
;
520 uint8_t src
[16] = { 0 };
523 aes_tfm
= crypto_alloc_cipher("aes", 0, CRYPTO_ALG_ASYNC
|
524 CRYPTO_ALG_NEED_FALLBACK
);
525 if (IS_ERR(aes_tfm
)) {
526 rc
= PTR_ERR(aes_tfm
);
527 pr_warn("could not load aes cipher driver: %d\n", rc
);
531 rc
= crypto_cipher_setkey(aes_tfm
, key
, keylen
);
533 pr_err("setkey() failed: %d\n", rc
);
537 crypto_cipher_encrypt_one(aes_tfm
, src
, src
);
538 crypto4xx_memcpy_to_le32(hash_start
, src
, 16);
540 crypto_free_cipher(aes_tfm
);
544 int crypto4xx_setkey_aes_gcm(struct crypto_aead
*cipher
,
545 const u8
*key
, unsigned int keylen
)
547 struct crypto_tfm
*tfm
= crypto_aead_tfm(cipher
);
548 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(tfm
);
549 struct dynamic_sa_ctl
*sa
;
552 if (crypto4xx_aes_gcm_validate_keylen(keylen
) != 0) {
553 crypto_aead_set_flags(cipher
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
557 rc
= crypto4xx_aead_setup_fallback(ctx
, cipher
, key
, keylen
);
561 if (ctx
->sa_in
|| ctx
->sa_out
)
562 crypto4xx_free_sa(ctx
);
564 rc
= crypto4xx_alloc_sa(ctx
, SA_AES128_GCM_LEN
+ (keylen
- 16) / 4);
568 sa
= (struct dynamic_sa_ctl
*) ctx
->sa_in
;
570 sa
->sa_contents
.w
= SA_AES_GCM_CONTENTS
| (keylen
<< 2);
571 set_dynamic_sa_command_0(sa
, SA_SAVE_HASH
, SA_NOT_SAVE_IV
,
572 SA_LOAD_HASH_FROM_SA
, SA_LOAD_IV_FROM_STATE
,
573 SA_NO_HEADER_PROC
, SA_HASH_ALG_GHASH
,
574 SA_CIPHER_ALG_AES
, SA_PAD_TYPE_ZERO
,
575 SA_OP_GROUP_BASIC
, SA_OPCODE_HASH_DECRYPT
,
577 set_dynamic_sa_command_1(sa
, CRYPTO_MODE_CTR
, SA_HASH_MODE_HASH
,
578 CRYPTO_FEEDBACK_MODE_NO_FB
, SA_EXTENDED_SN_OFF
,
579 SA_SEQ_MASK_ON
, SA_MC_DISABLE
,
580 SA_NOT_COPY_PAD
, SA_COPY_PAYLOAD
,
583 sa
->sa_command_1
.bf
.key_len
= keylen
>> 3;
585 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa
),
588 rc
= crypto4xx_compute_gcm_hash_key_sw(get_dynamic_sa_inner_digest(sa
),
591 pr_err("GCM hash key setting failed = %d\n", rc
);
595 memcpy(ctx
->sa_out
, ctx
->sa_in
, ctx
->sa_len
* 4);
596 sa
= (struct dynamic_sa_ctl
*) ctx
->sa_out
;
597 sa
->sa_command_0
.bf
.dir
= DIR_OUTBOUND
;
598 sa
->sa_command_0
.bf
.opcode
= SA_OPCODE_ENCRYPT_HASH
;
602 crypto4xx_free_sa(ctx
);
606 static inline int crypto4xx_crypt_aes_gcm(struct aead_request
*req
,
609 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
610 struct crypto4xx_aead_reqctx
*rctx
= aead_request_ctx(req
);
612 unsigned int len
= req
->cryptlen
;
615 len
-= crypto_aead_authsize(crypto_aead_reqtfm(req
));
617 if (crypto4xx_aead_need_fallback(req
, len
, false, decrypt
))
618 return crypto4xx_aead_fallback(req
, ctx
, decrypt
);
620 crypto4xx_memcpy_to_le32(iv
, req
->iv
, GCM_AES_IV_SIZE
);
621 iv
[3] = cpu_to_le32(1);
623 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, req
->dst
,
625 decrypt
? ctx
->sa_in
: ctx
->sa_out
,
626 ctx
->sa_len
, req
->assoclen
, rctx
->dst
);
629 int crypto4xx_encrypt_aes_gcm(struct aead_request
*req
)
631 return crypto4xx_crypt_aes_gcm(req
, false);
634 int crypto4xx_decrypt_aes_gcm(struct aead_request
*req
)
636 return crypto4xx_crypt_aes_gcm(req
, true);
640 * HASH SHA1 Functions
642 static int crypto4xx_hash_alg_init(struct crypto_tfm
*tfm
,
647 struct crypto_alg
*alg
= tfm
->__crt_alg
;
648 struct crypto4xx_alg
*my_alg
;
649 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(tfm
);
650 struct dynamic_sa_hash160
*sa
;
653 my_alg
= container_of(__crypto_ahash_alg(alg
), struct crypto4xx_alg
,
655 ctx
->dev
= my_alg
->dev
;
658 if (ctx
->sa_in
|| ctx
->sa_out
)
659 crypto4xx_free_sa(ctx
);
661 rc
= crypto4xx_alloc_sa(ctx
, sa_len
);
665 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
666 sizeof(struct crypto4xx_ctx
));
667 sa
= (struct dynamic_sa_hash160
*)ctx
->sa_in
;
668 set_dynamic_sa_command_0(&sa
->ctrl
, SA_SAVE_HASH
, SA_NOT_SAVE_IV
,
669 SA_NOT_LOAD_HASH
, SA_LOAD_IV_FROM_SA
,
670 SA_NO_HEADER_PROC
, ha
, SA_CIPHER_ALG_NULL
,
671 SA_PAD_TYPE_ZERO
, SA_OP_GROUP_BASIC
,
672 SA_OPCODE_HASH
, DIR_INBOUND
);
673 set_dynamic_sa_command_1(&sa
->ctrl
, 0, SA_HASH_MODE_HASH
,
674 CRYPTO_FEEDBACK_MODE_NO_FB
, SA_EXTENDED_SN_OFF
,
675 SA_SEQ_MASK_OFF
, SA_MC_ENABLE
,
676 SA_NOT_COPY_PAD
, SA_NOT_COPY_PAYLOAD
,
678 /* Need to zero hash digest in SA */
679 memset(sa
->inner_digest
, 0, sizeof(sa
->inner_digest
));
680 memset(sa
->outer_digest
, 0, sizeof(sa
->outer_digest
));
685 int crypto4xx_hash_init(struct ahash_request
*req
)
687 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
689 struct dynamic_sa_ctl
*sa
;
692 ds
= crypto_ahash_digestsize(
693 __crypto_ahash_cast(req
->base
.tfm
));
694 sa
->sa_command_0
.bf
.digest_len
= ds
>> 2;
695 sa
->sa_command_0
.bf
.load_hash_state
= SA_LOAD_HASH_FROM_SA
;
700 int crypto4xx_hash_update(struct ahash_request
*req
)
702 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(req
);
703 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
704 struct scatterlist dst
;
705 unsigned int ds
= crypto_ahash_digestsize(ahash
);
707 sg_init_one(&dst
, req
->result
, ds
);
709 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, &dst
,
710 req
->nbytes
, NULL
, 0, ctx
->sa_in
,
711 ctx
->sa_len
, 0, NULL
);
714 int crypto4xx_hash_final(struct ahash_request
*req
)
719 int crypto4xx_hash_digest(struct ahash_request
*req
)
721 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(req
);
722 struct crypto4xx_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
723 struct scatterlist dst
;
724 unsigned int ds
= crypto_ahash_digestsize(ahash
);
726 sg_init_one(&dst
, req
->result
, ds
);
728 return crypto4xx_build_pd(&req
->base
, ctx
, req
->src
, &dst
,
729 req
->nbytes
, NULL
, 0, ctx
->sa_in
,
730 ctx
->sa_len
, 0, NULL
);
736 int crypto4xx_sha1_alg_init(struct crypto_tfm
*tfm
)
738 return crypto4xx_hash_alg_init(tfm
, SA_HASH160_LEN
, SA_HASH_ALG_SHA1
,