1 // SPDX-License-Identifier: GPL-2.0-only
2 // SPDX-FileCopyrightText: Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
4 * Crypto driver to handle HASH algorithms using NVIDIA Security Engine.
8 #include <linux/dma-mapping.h>
9 #include <linux/module.h>
10 #include <linux/of_device.h>
11 #include <linux/platform_device.h>
13 #include <crypto/aes.h>
14 #include <crypto/sha1.h>
15 #include <crypto/sha2.h>
16 #include <crypto/sha3.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/engine.h>
19 #include <crypto/scatterwalk.h>
20 #include <crypto/internal/hash.h>
24 struct tegra_sha_ctx
{
29 struct crypto_ahash
*fallback_tfm
;
32 struct tegra_sha_reqctx
{
33 struct scatterlist
*src_sg
;
34 struct tegra_se_datbuf datbuf
;
35 struct tegra_se_datbuf residue
;
36 struct tegra_se_datbuf digest
;
39 unsigned int total_len
;
40 unsigned int blk_size
;
43 u32 result
[HASH_RESULT_REG_COUNT
];
44 struct ahash_request fallback_req
;
47 static int tegra_sha_get_config(u32 alg
)
53 cfg
|= SE_SHA_ENC_ALG_SHA
;
54 cfg
|= SE_SHA_ENC_MODE_SHA1
;
57 case SE_ALG_HMAC_SHA224
:
58 cfg
|= SE_SHA_ENC_ALG_HMAC
;
61 cfg
|= SE_SHA_ENC_ALG_SHA
;
62 cfg
|= SE_SHA_ENC_MODE_SHA224
;
65 case SE_ALG_HMAC_SHA256
:
66 cfg
|= SE_SHA_ENC_ALG_HMAC
;
69 cfg
|= SE_SHA_ENC_ALG_SHA
;
70 cfg
|= SE_SHA_ENC_MODE_SHA256
;
73 case SE_ALG_HMAC_SHA384
:
74 cfg
|= SE_SHA_ENC_ALG_HMAC
;
77 cfg
|= SE_SHA_ENC_ALG_SHA
;
78 cfg
|= SE_SHA_ENC_MODE_SHA384
;
81 case SE_ALG_HMAC_SHA512
:
82 cfg
|= SE_SHA_ENC_ALG_HMAC
;
85 cfg
|= SE_SHA_ENC_ALG_SHA
;
86 cfg
|= SE_SHA_ENC_MODE_SHA512
;
90 cfg
|= SE_SHA_ENC_ALG_SHA
;
91 cfg
|= SE_SHA_ENC_MODE_SHA3_224
;
94 cfg
|= SE_SHA_ENC_ALG_SHA
;
95 cfg
|= SE_SHA_ENC_MODE_SHA3_256
;
98 cfg
|= SE_SHA_ENC_ALG_SHA
;
99 cfg
|= SE_SHA_ENC_MODE_SHA3_384
;
101 case SE_ALG_SHA3_512
:
102 cfg
|= SE_SHA_ENC_ALG_SHA
;
103 cfg
|= SE_SHA_ENC_MODE_SHA3_512
;
112 static int tegra_sha_fallback_init(struct ahash_request
*req
)
114 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
115 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
116 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
118 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback_tfm
);
119 rctx
->fallback_req
.base
.flags
= req
->base
.flags
&
120 CRYPTO_TFM_REQ_MAY_SLEEP
;
122 return crypto_ahash_init(&rctx
->fallback_req
);
125 static int tegra_sha_fallback_update(struct ahash_request
*req
)
127 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
128 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
129 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
131 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback_tfm
);
132 rctx
->fallback_req
.base
.flags
= req
->base
.flags
&
133 CRYPTO_TFM_REQ_MAY_SLEEP
;
134 rctx
->fallback_req
.nbytes
= req
->nbytes
;
135 rctx
->fallback_req
.src
= req
->src
;
137 return crypto_ahash_update(&rctx
->fallback_req
);
140 static int tegra_sha_fallback_final(struct ahash_request
*req
)
142 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
143 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
144 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
146 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback_tfm
);
147 rctx
->fallback_req
.base
.flags
= req
->base
.flags
&
148 CRYPTO_TFM_REQ_MAY_SLEEP
;
149 rctx
->fallback_req
.result
= req
->result
;
151 return crypto_ahash_final(&rctx
->fallback_req
);
154 static int tegra_sha_fallback_finup(struct ahash_request
*req
)
156 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
157 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
158 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
160 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback_tfm
);
161 rctx
->fallback_req
.base
.flags
= req
->base
.flags
&
162 CRYPTO_TFM_REQ_MAY_SLEEP
;
164 rctx
->fallback_req
.nbytes
= req
->nbytes
;
165 rctx
->fallback_req
.src
= req
->src
;
166 rctx
->fallback_req
.result
= req
->result
;
168 return crypto_ahash_finup(&rctx
->fallback_req
);
171 static int tegra_sha_fallback_digest(struct ahash_request
*req
)
173 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
174 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
175 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
177 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback_tfm
);
178 rctx
->fallback_req
.base
.flags
= req
->base
.flags
&
179 CRYPTO_TFM_REQ_MAY_SLEEP
;
181 rctx
->fallback_req
.nbytes
= req
->nbytes
;
182 rctx
->fallback_req
.src
= req
->src
;
183 rctx
->fallback_req
.result
= req
->result
;
185 return crypto_ahash_digest(&rctx
->fallback_req
);
188 static int tegra_sha_fallback_import(struct ahash_request
*req
, const void *in
)
190 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
191 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
192 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
194 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback_tfm
);
195 rctx
->fallback_req
.base
.flags
= req
->base
.flags
&
196 CRYPTO_TFM_REQ_MAY_SLEEP
;
198 return crypto_ahash_import(&rctx
->fallback_req
, in
);
201 static int tegra_sha_fallback_export(struct ahash_request
*req
, void *out
)
203 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
204 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
205 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
207 ahash_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback_tfm
);
208 rctx
->fallback_req
.base
.flags
= req
->base
.flags
&
209 CRYPTO_TFM_REQ_MAY_SLEEP
;
211 return crypto_ahash_export(&rctx
->fallback_req
, out
);
214 static int tegra_sha_prep_cmd(struct tegra_se
*se
, u32
*cpuvaddr
,
215 struct tegra_sha_reqctx
*rctx
)
217 u64 msg_len
, msg_left
;
220 msg_len
= rctx
->total_len
* 8;
221 msg_left
= rctx
->datbuf
.size
* 8;
224 * If IN_ADDR_HI_0.SZ > SHA_MSG_LEFT_[0-3] to the HASH engine,
225 * HW treats it as the last buffer and process the data.
226 * Therefore, add an extra byte to msg_left if it is not the
229 if (rctx
->task
& SHA_UPDATE
) {
234 cpuvaddr
[i
++] = host1x_opcode_setpayload(8);
235 cpuvaddr
[i
++] = se_host1x_opcode_incr_w(SE_SHA_MSG_LENGTH
);
236 cpuvaddr
[i
++] = lower_32_bits(msg_len
);
237 cpuvaddr
[i
++] = upper_32_bits(msg_len
);
240 cpuvaddr
[i
++] = lower_32_bits(msg_left
);
241 cpuvaddr
[i
++] = upper_32_bits(msg_left
);
244 cpuvaddr
[i
++] = host1x_opcode_setpayload(6);
245 cpuvaddr
[i
++] = se_host1x_opcode_incr_w(SE_SHA_CFG
);
246 cpuvaddr
[i
++] = rctx
->config
;
248 if (rctx
->task
& SHA_FIRST
) {
249 cpuvaddr
[i
++] = SE_SHA_TASK_HASH_INIT
;
250 rctx
->task
&= ~SHA_FIRST
;
255 cpuvaddr
[i
++] = rctx
->datbuf
.addr
;
256 cpuvaddr
[i
++] = (u32
)(SE_ADDR_HI_MSB(upper_32_bits(rctx
->datbuf
.addr
)) |
257 SE_ADDR_HI_SZ(rctx
->datbuf
.size
));
258 cpuvaddr
[i
++] = rctx
->digest
.addr
;
259 cpuvaddr
[i
++] = (u32
)(SE_ADDR_HI_MSB(upper_32_bits(rctx
->digest
.addr
)) |
260 SE_ADDR_HI_SZ(rctx
->digest
.size
));
262 cpuvaddr
[i
++] = host1x_opcode_setpayload(1);
263 cpuvaddr
[i
++] = se_host1x_opcode_nonincr_w(SE_SHA_CRYPTO_CFG
);
264 cpuvaddr
[i
++] = SE_AES_KEY_INDEX(rctx
->key_id
);
267 cpuvaddr
[i
++] = host1x_opcode_setpayload(1);
268 cpuvaddr
[i
++] = se_host1x_opcode_nonincr_w(SE_SHA_OPERATION
);
269 cpuvaddr
[i
++] = SE_SHA_OP_WRSTALL
|
272 cpuvaddr
[i
++] = se_host1x_opcode_nonincr(host1x_uclass_incr_syncpt_r(), 1);
273 cpuvaddr
[i
++] = host1x_uclass_incr_syncpt_cond_f(1) |
274 host1x_uclass_incr_syncpt_indx_f(se
->syncpt_id
);
276 dev_dbg(se
->dev
, "msg len %llu msg left %llu cfg %#x",
277 msg_len
, msg_left
, rctx
->config
);
282 static void tegra_sha_copy_hash_result(struct tegra_se
*se
, struct tegra_sha_reqctx
*rctx
)
286 for (i
= 0; i
< HASH_RESULT_REG_COUNT
; i
++)
287 rctx
->result
[i
] = readl(se
->base
+ se
->hw
->regs
->result
+ (i
* 4));
290 static void tegra_sha_paste_hash_result(struct tegra_se
*se
, struct tegra_sha_reqctx
*rctx
)
294 for (i
= 0; i
< HASH_RESULT_REG_COUNT
; i
++)
295 writel(rctx
->result
[i
],
296 se
->base
+ se
->hw
->regs
->result
+ (i
* 4));
299 static int tegra_sha_do_update(struct ahash_request
*req
)
301 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
302 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
303 unsigned int nblks
, nresidue
, size
, ret
;
304 u32
*cpuvaddr
= ctx
->se
->cmdbuf
->addr
;
306 nresidue
= (req
->nbytes
+ rctx
->residue
.size
) % rctx
->blk_size
;
307 nblks
= (req
->nbytes
+ rctx
->residue
.size
) / rctx
->blk_size
;
310 * If nbytes is a multiple of block size and there is no residue,
311 * then reserve the last block as residue during final() to process.
313 if (!nresidue
&& nblks
) {
314 nresidue
= rctx
->blk_size
;
318 rctx
->src_sg
= req
->src
;
319 rctx
->datbuf
.size
= (req
->nbytes
+ rctx
->residue
.size
) - nresidue
;
320 rctx
->total_len
+= rctx
->datbuf
.size
;
323 * If nbytes are less than a block size, copy it residue and
324 * return. The bytes will be processed in final()
327 scatterwalk_map_and_copy(rctx
->residue
.buf
+ rctx
->residue
.size
,
328 rctx
->src_sg
, 0, req
->nbytes
, 0);
330 rctx
->residue
.size
+= req
->nbytes
;
334 /* Copy the previous residue first */
335 if (rctx
->residue
.size
)
336 memcpy(rctx
->datbuf
.buf
, rctx
->residue
.buf
, rctx
->residue
.size
);
338 scatterwalk_map_and_copy(rctx
->datbuf
.buf
+ rctx
->residue
.size
,
339 rctx
->src_sg
, 0, req
->nbytes
- nresidue
, 0);
341 scatterwalk_map_and_copy(rctx
->residue
.buf
, rctx
->src_sg
,
342 req
->nbytes
- nresidue
, nresidue
, 0);
344 /* Update residue value with the residue after current block */
345 rctx
->residue
.size
= nresidue
;
347 rctx
->config
= tegra_sha_get_config(rctx
->alg
) |
351 * If this is not the first 'update' call, paste the previous copied
352 * intermediate results to the registers so that it gets picked up.
353 * This is to support the import/export functionality.
355 if (!(rctx
->task
& SHA_FIRST
))
356 tegra_sha_paste_hash_result(ctx
->se
, rctx
);
358 size
= tegra_sha_prep_cmd(ctx
->se
, cpuvaddr
, rctx
);
360 ret
= tegra_se_host1x_submit(ctx
->se
, size
);
363 * If this is not the final update, copy the intermediate results
364 * from the registers so that it can be used in the next 'update'
365 * call. This is to support the import/export functionality.
367 if (!(rctx
->task
& SHA_FINAL
))
368 tegra_sha_copy_hash_result(ctx
->se
, rctx
);
373 static int tegra_sha_do_final(struct ahash_request
*req
)
375 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
376 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
377 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
378 struct tegra_se
*se
= ctx
->se
;
379 u32
*cpuvaddr
= se
->cmdbuf
->addr
;
382 memcpy(rctx
->datbuf
.buf
, rctx
->residue
.buf
, rctx
->residue
.size
);
383 rctx
->datbuf
.size
= rctx
->residue
.size
;
384 rctx
->total_len
+= rctx
->residue
.size
;
386 rctx
->config
= tegra_sha_get_config(rctx
->alg
) |
389 size
= tegra_sha_prep_cmd(se
, cpuvaddr
, rctx
);
391 ret
= tegra_se_host1x_submit(se
, size
);
396 memcpy(req
->result
, rctx
->digest
.buf
, rctx
->digest
.size
);
399 dma_free_coherent(se
->dev
, SE_SHA_BUFLEN
,
400 rctx
->datbuf
.buf
, rctx
->datbuf
.addr
);
401 dma_free_coherent(se
->dev
, crypto_ahash_blocksize(tfm
),
402 rctx
->residue
.buf
, rctx
->residue
.addr
);
403 dma_free_coherent(se
->dev
, rctx
->digest
.size
, rctx
->digest
.buf
,
408 static int tegra_sha_do_one_req(struct crypto_engine
*engine
, void *areq
)
410 struct ahash_request
*req
= ahash_request_cast(areq
);
411 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
412 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
413 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
414 struct tegra_se
*se
= ctx
->se
;
417 if (rctx
->task
& SHA_UPDATE
) {
418 ret
= tegra_sha_do_update(req
);
419 rctx
->task
&= ~SHA_UPDATE
;
422 if (rctx
->task
& SHA_FINAL
) {
423 ret
= tegra_sha_do_final(req
);
424 rctx
->task
&= ~SHA_FINAL
;
427 crypto_finalize_hash_request(se
->engine
, req
, ret
);
432 static void tegra_sha_init_fallback(struct crypto_ahash
*tfm
, struct tegra_sha_ctx
*ctx
,
435 unsigned int statesize
;
437 ctx
->fallback_tfm
= crypto_alloc_ahash(algname
, 0, CRYPTO_ALG_ASYNC
|
438 CRYPTO_ALG_NEED_FALLBACK
);
440 if (IS_ERR(ctx
->fallback_tfm
)) {
441 dev_warn(ctx
->se
->dev
,
442 "failed to allocate fallback for %s\n", algname
);
443 ctx
->fallback_tfm
= NULL
;
447 statesize
= crypto_ahash_statesize(ctx
->fallback_tfm
);
449 if (statesize
> sizeof(struct tegra_sha_reqctx
))
450 crypto_ahash_set_statesize(tfm
, statesize
);
452 /* Update reqsize if fallback is added */
453 crypto_ahash_set_reqsize(tfm
,
454 sizeof(struct tegra_sha_reqctx
) +
455 crypto_ahash_reqsize(ctx
->fallback_tfm
));
458 static int tegra_sha_cra_init(struct crypto_tfm
*tfm
)
460 struct tegra_sha_ctx
*ctx
= crypto_tfm_ctx(tfm
);
461 struct crypto_ahash
*ahash_tfm
= __crypto_ahash_cast(tfm
);
462 struct ahash_alg
*alg
= __crypto_ahash_alg(tfm
->__crt_alg
);
463 struct tegra_se_alg
*se_alg
;
467 algname
= crypto_tfm_alg_name(tfm
);
468 se_alg
= container_of(alg
, struct tegra_se_alg
, alg
.ahash
.base
);
470 crypto_ahash_set_reqsize(ahash_tfm
, sizeof(struct tegra_sha_reqctx
));
472 ctx
->se
= se_alg
->se_dev
;
473 ctx
->fallback
= false;
476 ret
= se_algname_to_algid(algname
);
478 dev_err(ctx
->se
->dev
, "invalid algorithm\n");
482 if (se_alg
->alg_base
)
483 tegra_sha_init_fallback(ahash_tfm
, ctx
, algname
);
490 static void tegra_sha_cra_exit(struct crypto_tfm
*tfm
)
492 struct tegra_sha_ctx
*ctx
= crypto_tfm_ctx(tfm
);
494 if (ctx
->fallback_tfm
)
495 crypto_free_ahash(ctx
->fallback_tfm
);
497 tegra_key_invalidate(ctx
->se
, ctx
->key_id
, ctx
->alg
);
500 static int tegra_sha_init(struct ahash_request
*req
)
502 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
503 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
504 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
505 struct tegra_se
*se
= ctx
->se
;
508 return tegra_sha_fallback_init(req
);
511 rctx
->datbuf
.size
= 0;
512 rctx
->residue
.size
= 0;
513 rctx
->key_id
= ctx
->key_id
;
514 rctx
->task
= SHA_FIRST
;
515 rctx
->alg
= ctx
->alg
;
516 rctx
->blk_size
= crypto_ahash_blocksize(tfm
);
517 rctx
->digest
.size
= crypto_ahash_digestsize(tfm
);
519 rctx
->digest
.buf
= dma_alloc_coherent(se
->dev
, rctx
->digest
.size
,
520 &rctx
->digest
.addr
, GFP_KERNEL
);
521 if (!rctx
->digest
.buf
)
524 rctx
->residue
.buf
= dma_alloc_coherent(se
->dev
, rctx
->blk_size
,
525 &rctx
->residue
.addr
, GFP_KERNEL
);
526 if (!rctx
->residue
.buf
)
529 rctx
->datbuf
.buf
= dma_alloc_coherent(se
->dev
, SE_SHA_BUFLEN
,
530 &rctx
->datbuf
.addr
, GFP_KERNEL
);
531 if (!rctx
->datbuf
.buf
)
537 dma_free_coherent(se
->dev
, rctx
->blk_size
, rctx
->residue
.buf
,
540 dma_free_coherent(se
->dev
, SE_SHA_BUFLEN
, rctx
->datbuf
.buf
,
546 static int tegra_hmac_fallback_setkey(struct tegra_sha_ctx
*ctx
, const u8
*key
,
549 if (!ctx
->fallback_tfm
) {
550 dev_dbg(ctx
->se
->dev
, "invalid key length (%d)\n", keylen
);
554 ctx
->fallback
= true;
555 return crypto_ahash_setkey(ctx
->fallback_tfm
, key
, keylen
);
558 static int tegra_hmac_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
561 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
563 if (aes_check_keylen(keylen
))
564 return tegra_hmac_fallback_setkey(ctx
, key
, keylen
);
566 ctx
->fallback
= false;
568 return tegra_key_submit(ctx
->se
, key
, keylen
, ctx
->alg
, &ctx
->key_id
);
571 static int tegra_sha_update(struct ahash_request
*req
)
573 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
574 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
575 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
578 return tegra_sha_fallback_update(req
);
580 rctx
->task
|= SHA_UPDATE
;
582 return crypto_transfer_hash_request_to_engine(ctx
->se
->engine
, req
);
585 static int tegra_sha_final(struct ahash_request
*req
)
587 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
588 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
589 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
592 return tegra_sha_fallback_final(req
);
594 rctx
->task
|= SHA_FINAL
;
596 return crypto_transfer_hash_request_to_engine(ctx
->se
->engine
, req
);
599 static int tegra_sha_finup(struct ahash_request
*req
)
601 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
602 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
603 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
606 return tegra_sha_fallback_finup(req
);
608 rctx
->task
|= SHA_UPDATE
| SHA_FINAL
;
610 return crypto_transfer_hash_request_to_engine(ctx
->se
->engine
, req
);
613 static int tegra_sha_digest(struct ahash_request
*req
)
615 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
616 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
617 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
620 return tegra_sha_fallback_digest(req
);
623 rctx
->task
|= SHA_UPDATE
| SHA_FINAL
;
625 return crypto_transfer_hash_request_to_engine(ctx
->se
->engine
, req
);
628 static int tegra_sha_export(struct ahash_request
*req
, void *out
)
630 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
631 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
632 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
635 return tegra_sha_fallback_export(req
, out
);
637 memcpy(out
, rctx
, sizeof(*rctx
));
642 static int tegra_sha_import(struct ahash_request
*req
, const void *in
)
644 struct tegra_sha_reqctx
*rctx
= ahash_request_ctx(req
);
645 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
646 struct tegra_sha_ctx
*ctx
= crypto_ahash_ctx(tfm
);
649 return tegra_sha_fallback_import(req
, in
);
651 memcpy(rctx
, in
, sizeof(*rctx
));
656 static struct tegra_se_alg tegra_hash_algs
[] = {
658 .alg
.ahash
.op
.do_one_request
= tegra_sha_do_one_req
,
660 .init
= tegra_sha_init
,
661 .update
= tegra_sha_update
,
662 .final
= tegra_sha_final
,
663 .finup
= tegra_sha_finup
,
664 .digest
= tegra_sha_digest
,
665 .export
= tegra_sha_export
,
666 .import
= tegra_sha_import
,
667 .halg
.digestsize
= SHA1_DIGEST_SIZE
,
668 .halg
.statesize
= sizeof(struct tegra_sha_reqctx
),
671 .cra_driver_name
= "tegra-se-sha1",
673 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
,
674 .cra_blocksize
= SHA1_BLOCK_SIZE
,
675 .cra_ctxsize
= sizeof(struct tegra_sha_ctx
),
677 .cra_module
= THIS_MODULE
,
678 .cra_init
= tegra_sha_cra_init
,
679 .cra_exit
= tegra_sha_cra_exit
,
683 .alg
.ahash
.op
.do_one_request
= tegra_sha_do_one_req
,
685 .init
= tegra_sha_init
,
686 .update
= tegra_sha_update
,
687 .final
= tegra_sha_final
,
688 .finup
= tegra_sha_finup
,
689 .digest
= tegra_sha_digest
,
690 .export
= tegra_sha_export
,
691 .import
= tegra_sha_import
,
692 .halg
.digestsize
= SHA224_DIGEST_SIZE
,
693 .halg
.statesize
= sizeof(struct tegra_sha_reqctx
),
695 .cra_name
= "sha224",
696 .cra_driver_name
= "tegra-se-sha224",
698 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
,
699 .cra_blocksize
= SHA224_BLOCK_SIZE
,
700 .cra_ctxsize
= sizeof(struct tegra_sha_ctx
),
702 .cra_module
= THIS_MODULE
,
703 .cra_init
= tegra_sha_cra_init
,
704 .cra_exit
= tegra_sha_cra_exit
,
708 .alg
.ahash
.op
.do_one_request
= tegra_sha_do_one_req
,
710 .init
= tegra_sha_init
,
711 .update
= tegra_sha_update
,
712 .final
= tegra_sha_final
,
713 .finup
= tegra_sha_finup
,
714 .digest
= tegra_sha_digest
,
715 .export
= tegra_sha_export
,
716 .import
= tegra_sha_import
,
717 .halg
.digestsize
= SHA256_DIGEST_SIZE
,
718 .halg
.statesize
= sizeof(struct tegra_sha_reqctx
),
720 .cra_name
= "sha256",
721 .cra_driver_name
= "tegra-se-sha256",
723 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
,
724 .cra_blocksize
= SHA256_BLOCK_SIZE
,
725 .cra_ctxsize
= sizeof(struct tegra_sha_ctx
),
727 .cra_module
= THIS_MODULE
,
728 .cra_init
= tegra_sha_cra_init
,
729 .cra_exit
= tegra_sha_cra_exit
,
733 .alg
.ahash
.op
.do_one_request
= tegra_sha_do_one_req
,
735 .init
= tegra_sha_init
,
736 .update
= tegra_sha_update
,
737 .final
= tegra_sha_final
,
738 .finup
= tegra_sha_finup
,
739 .digest
= tegra_sha_digest
,
740 .export
= tegra_sha_export
,
741 .import
= tegra_sha_import
,
742 .halg
.digestsize
= SHA384_DIGEST_SIZE
,
743 .halg
.statesize
= sizeof(struct tegra_sha_reqctx
),
745 .cra_name
= "sha384",
746 .cra_driver_name
= "tegra-se-sha384",
748 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
,
749 .cra_blocksize
= SHA384_BLOCK_SIZE
,
750 .cra_ctxsize
= sizeof(struct tegra_sha_ctx
),
752 .cra_module
= THIS_MODULE
,
753 .cra_init
= tegra_sha_cra_init
,
754 .cra_exit
= tegra_sha_cra_exit
,
758 .alg
.ahash
.op
.do_one_request
= tegra_sha_do_one_req
,
760 .init
= tegra_sha_init
,
761 .update
= tegra_sha_update
,
762 .final
= tegra_sha_final
,
763 .finup
= tegra_sha_finup
,
764 .digest
= tegra_sha_digest
,
765 .export
= tegra_sha_export
,
766 .import
= tegra_sha_import
,
767 .halg
.digestsize
= SHA512_DIGEST_SIZE
,
768 .halg
.statesize
= sizeof(struct tegra_sha_reqctx
),
770 .cra_name
= "sha512",
771 .cra_driver_name
= "tegra-se-sha512",
773 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
,
774 .cra_blocksize
= SHA512_BLOCK_SIZE
,
775 .cra_ctxsize
= sizeof(struct tegra_sha_ctx
),
777 .cra_module
= THIS_MODULE
,
778 .cra_init
= tegra_sha_cra_init
,
779 .cra_exit
= tegra_sha_cra_exit
,
783 .alg
.ahash
.op
.do_one_request
= tegra_sha_do_one_req
,
785 .init
= tegra_sha_init
,
786 .update
= tegra_sha_update
,
787 .final
= tegra_sha_final
,
788 .finup
= tegra_sha_finup
,
789 .digest
= tegra_sha_digest
,
790 .export
= tegra_sha_export
,
791 .import
= tegra_sha_import
,
792 .halg
.digestsize
= SHA3_224_DIGEST_SIZE
,
793 .halg
.statesize
= sizeof(struct tegra_sha_reqctx
),
795 .cra_name
= "sha3-224",
796 .cra_driver_name
= "tegra-se-sha3-224",
798 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
,
799 .cra_blocksize
= SHA3_224_BLOCK_SIZE
,
800 .cra_ctxsize
= sizeof(struct tegra_sha_ctx
),
802 .cra_module
= THIS_MODULE
,
803 .cra_init
= tegra_sha_cra_init
,
804 .cra_exit
= tegra_sha_cra_exit
,
808 .alg
.ahash
.op
.do_one_request
= tegra_sha_do_one_req
,
810 .init
= tegra_sha_init
,
811 .update
= tegra_sha_update
,
812 .final
= tegra_sha_final
,
813 .finup
= tegra_sha_finup
,
814 .digest
= tegra_sha_digest
,
815 .export
= tegra_sha_export
,
816 .import
= tegra_sha_import
,
817 .halg
.digestsize
= SHA3_256_DIGEST_SIZE
,
818 .halg
.statesize
= sizeof(struct tegra_sha_reqctx
),
820 .cra_name
= "sha3-256",
821 .cra_driver_name
= "tegra-se-sha3-256",
823 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
,
824 .cra_blocksize
= SHA3_256_BLOCK_SIZE
,
825 .cra_ctxsize
= sizeof(struct tegra_sha_ctx
),
827 .cra_module
= THIS_MODULE
,
828 .cra_init
= tegra_sha_cra_init
,
829 .cra_exit
= tegra_sha_cra_exit
,
833 .alg
.ahash
.op
.do_one_request
= tegra_sha_do_one_req
,
835 .init
= tegra_sha_init
,
836 .update
= tegra_sha_update
,
837 .final
= tegra_sha_final
,
838 .finup
= tegra_sha_finup
,
839 .digest
= tegra_sha_digest
,
840 .export
= tegra_sha_export
,
841 .import
= tegra_sha_import
,
842 .halg
.digestsize
= SHA3_384_DIGEST_SIZE
,
843 .halg
.statesize
= sizeof(struct tegra_sha_reqctx
),
845 .cra_name
= "sha3-384",
846 .cra_driver_name
= "tegra-se-sha3-384",
848 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
,
849 .cra_blocksize
= SHA3_384_BLOCK_SIZE
,
850 .cra_ctxsize
= sizeof(struct tegra_sha_ctx
),
852 .cra_module
= THIS_MODULE
,
853 .cra_init
= tegra_sha_cra_init
,
854 .cra_exit
= tegra_sha_cra_exit
,
858 .alg
.ahash
.op
.do_one_request
= tegra_sha_do_one_req
,
860 .init
= tegra_sha_init
,
861 .update
= tegra_sha_update
,
862 .final
= tegra_sha_final
,
863 .finup
= tegra_sha_finup
,
864 .digest
= tegra_sha_digest
,
865 .export
= tegra_sha_export
,
866 .import
= tegra_sha_import
,
867 .halg
.digestsize
= SHA3_512_DIGEST_SIZE
,
868 .halg
.statesize
= sizeof(struct tegra_sha_reqctx
),
870 .cra_name
= "sha3-512",
871 .cra_driver_name
= "tegra-se-sha3-512",
873 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
,
874 .cra_blocksize
= SHA3_512_BLOCK_SIZE
,
875 .cra_ctxsize
= sizeof(struct tegra_sha_ctx
),
877 .cra_module
= THIS_MODULE
,
878 .cra_init
= tegra_sha_cra_init
,
879 .cra_exit
= tegra_sha_cra_exit
,
883 .alg_base
= "sha224",
884 .alg
.ahash
.op
.do_one_request
= tegra_sha_do_one_req
,
886 .init
= tegra_sha_init
,
887 .update
= tegra_sha_update
,
888 .final
= tegra_sha_final
,
889 .finup
= tegra_sha_finup
,
890 .digest
= tegra_sha_digest
,
891 .export
= tegra_sha_export
,
892 .import
= tegra_sha_import
,
893 .setkey
= tegra_hmac_setkey
,
894 .halg
.digestsize
= SHA224_DIGEST_SIZE
,
895 .halg
.statesize
= sizeof(struct tegra_sha_reqctx
),
897 .cra_name
= "hmac(sha224)",
898 .cra_driver_name
= "tegra-se-hmac-sha224",
900 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
| CRYPTO_ALG_NEED_FALLBACK
,
901 .cra_blocksize
= SHA224_BLOCK_SIZE
,
902 .cra_ctxsize
= sizeof(struct tegra_sha_ctx
),
904 .cra_module
= THIS_MODULE
,
905 .cra_init
= tegra_sha_cra_init
,
906 .cra_exit
= tegra_sha_cra_exit
,
910 .alg_base
= "sha256",
911 .alg
.ahash
.op
.do_one_request
= tegra_sha_do_one_req
,
913 .init
= tegra_sha_init
,
914 .update
= tegra_sha_update
,
915 .final
= tegra_sha_final
,
916 .finup
= tegra_sha_finup
,
917 .digest
= tegra_sha_digest
,
918 .export
= tegra_sha_export
,
919 .import
= tegra_sha_import
,
920 .setkey
= tegra_hmac_setkey
,
921 .halg
.digestsize
= SHA256_DIGEST_SIZE
,
922 .halg
.statesize
= sizeof(struct tegra_sha_reqctx
),
924 .cra_name
= "hmac(sha256)",
925 .cra_driver_name
= "tegra-se-hmac-sha256",
927 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
| CRYPTO_ALG_NEED_FALLBACK
,
928 .cra_blocksize
= SHA256_BLOCK_SIZE
,
929 .cra_ctxsize
= sizeof(struct tegra_sha_ctx
),
931 .cra_module
= THIS_MODULE
,
932 .cra_init
= tegra_sha_cra_init
,
933 .cra_exit
= tegra_sha_cra_exit
,
937 .alg_base
= "sha384",
938 .alg
.ahash
.op
.do_one_request
= tegra_sha_do_one_req
,
940 .init
= tegra_sha_init
,
941 .update
= tegra_sha_update
,
942 .final
= tegra_sha_final
,
943 .finup
= tegra_sha_finup
,
944 .digest
= tegra_sha_digest
,
945 .export
= tegra_sha_export
,
946 .import
= tegra_sha_import
,
947 .setkey
= tegra_hmac_setkey
,
948 .halg
.digestsize
= SHA384_DIGEST_SIZE
,
949 .halg
.statesize
= sizeof(struct tegra_sha_reqctx
),
951 .cra_name
= "hmac(sha384)",
952 .cra_driver_name
= "tegra-se-hmac-sha384",
954 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
| CRYPTO_ALG_NEED_FALLBACK
,
955 .cra_blocksize
= SHA384_BLOCK_SIZE
,
956 .cra_ctxsize
= sizeof(struct tegra_sha_ctx
),
958 .cra_module
= THIS_MODULE
,
959 .cra_init
= tegra_sha_cra_init
,
960 .cra_exit
= tegra_sha_cra_exit
,
964 .alg_base
= "sha512",
965 .alg
.ahash
.op
.do_one_request
= tegra_sha_do_one_req
,
967 .init
= tegra_sha_init
,
968 .update
= tegra_sha_update
,
969 .final
= tegra_sha_final
,
970 .finup
= tegra_sha_finup
,
971 .digest
= tegra_sha_digest
,
972 .export
= tegra_sha_export
,
973 .import
= tegra_sha_import
,
974 .setkey
= tegra_hmac_setkey
,
975 .halg
.digestsize
= SHA512_DIGEST_SIZE
,
976 .halg
.statesize
= sizeof(struct tegra_sha_reqctx
),
978 .cra_name
= "hmac(sha512)",
979 .cra_driver_name
= "tegra-se-hmac-sha512",
981 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
| CRYPTO_ALG_NEED_FALLBACK
,
982 .cra_blocksize
= SHA512_BLOCK_SIZE
,
983 .cra_ctxsize
= sizeof(struct tegra_sha_ctx
),
985 .cra_module
= THIS_MODULE
,
986 .cra_init
= tegra_sha_cra_init
,
987 .cra_exit
= tegra_sha_cra_exit
,
993 static int tegra_hash_kac_manifest(u32 user
, u32 alg
, u32 keylen
)
997 manifest
= SE_KAC_USER_NS
;
1000 case SE_ALG_HMAC_SHA224
:
1001 case SE_ALG_HMAC_SHA256
:
1002 case SE_ALG_HMAC_SHA384
:
1003 case SE_ALG_HMAC_SHA512
:
1004 manifest
|= SE_KAC_HMAC
;
1011 case AES_KEYSIZE_128
:
1012 manifest
|= SE_KAC_SIZE_128
;
1014 case AES_KEYSIZE_192
:
1015 manifest
|= SE_KAC_SIZE_192
;
1017 case AES_KEYSIZE_256
:
1019 manifest
|= SE_KAC_SIZE_256
;
1026 int tegra_init_hash(struct tegra_se
*se
)
1028 struct ahash_engine_alg
*alg
;
1031 se
->manifest
= tegra_hash_kac_manifest
;
1033 for (i
= 0; i
< ARRAY_SIZE(tegra_hash_algs
); i
++) {
1034 tegra_hash_algs
[i
].se_dev
= se
;
1035 alg
= &tegra_hash_algs
[i
].alg
.ahash
;
1037 ret
= crypto_engine_register_ahash(alg
);
1039 dev_err(se
->dev
, "failed to register %s\n",
1040 alg
->base
.halg
.base
.cra_name
);
1049 crypto_engine_unregister_ahash(&tegra_hash_algs
[i
].alg
.ahash
);
1054 void tegra_deinit_hash(struct tegra_se
*se
)
1058 for (i
= 0; i
< ARRAY_SIZE(tegra_hash_algs
); i
++)
1059 crypto_engine_unregister_ahash(&tegra_hash_algs
[i
].alg
.ahash
);