4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or https://opensource.org/licenses/CDDL-1.0.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
23 * Copyright 2010 Sun Microsystems, Inc. All rights reserved.
24 * Use is subject to license terms.
27 #include <sys/zfs_context.h>
28 #include <sys/crypto/common.h>
29 #include <sys/crypto/spi.h>
30 #include <sys/crypto/icp.h>
32 #include <sha2/sha2_impl.h>
35 * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
36 * by KCF to one of the entry points.
39 #define PROV_SHA2_CTX(ctx) ((sha2_ctx_t *)(ctx)->cc_provider_private)
40 #define PROV_SHA2_HMAC_CTX(ctx) ((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
42 /* to extract the digest length passed as mechanism parameter */
43 #define PROV_SHA2_GET_DIGEST_LEN(m, len) { \
44 if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t))) \
45 (len) = (uint32_t)*((ulong_t *)(m)->cm_param); \
48 memcpy(&tmp_ulong, (m)->cm_param, sizeof (ulong_t)); \
49 (len) = (uint32_t)tmp_ulong; \
53 #define PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) { \
54 SHA2Init(mech, ctx); \
55 SHA2Update(ctx, key, len); \
56 SHA2Final(digest, ctx); \
60 * Mechanism info structure passed to KCF during registration.
62 static const crypto_mech_info_t sha2_mech_info_tab
[] = {
64 {SUN_CKM_SHA256
, SHA256_MECH_INFO_TYPE
,
65 CRYPTO_FG_DIGEST
| CRYPTO_FG_DIGEST_ATOMIC
},
67 {SUN_CKM_SHA256_HMAC
, SHA256_HMAC_MECH_INFO_TYPE
,
68 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
},
69 /* SHA256-HMAC GENERAL */
70 {SUN_CKM_SHA256_HMAC_GENERAL
, SHA256_HMAC_GEN_MECH_INFO_TYPE
,
71 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
},
73 {SUN_CKM_SHA384
, SHA384_MECH_INFO_TYPE
,
74 CRYPTO_FG_DIGEST
| CRYPTO_FG_DIGEST_ATOMIC
},
76 {SUN_CKM_SHA384_HMAC
, SHA384_HMAC_MECH_INFO_TYPE
,
77 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
},
78 /* SHA384-HMAC GENERAL */
79 {SUN_CKM_SHA384_HMAC_GENERAL
, SHA384_HMAC_GEN_MECH_INFO_TYPE
,
80 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
},
82 {SUN_CKM_SHA512
, SHA512_MECH_INFO_TYPE
,
83 CRYPTO_FG_DIGEST
| CRYPTO_FG_DIGEST_ATOMIC
},
85 {SUN_CKM_SHA512_HMAC
, SHA512_HMAC_MECH_INFO_TYPE
,
86 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
},
87 /* SHA512-HMAC GENERAL */
88 {SUN_CKM_SHA512_HMAC_GENERAL
, SHA512_HMAC_GEN_MECH_INFO_TYPE
,
89 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
},
92 static int sha2_digest_init(crypto_ctx_t
*, crypto_mechanism_t
*);
93 static int sha2_digest(crypto_ctx_t
*, crypto_data_t
*, crypto_data_t
*);
94 static int sha2_digest_update(crypto_ctx_t
*, crypto_data_t
*);
95 static int sha2_digest_final(crypto_ctx_t
*, crypto_data_t
*);
96 static int sha2_digest_atomic(crypto_mechanism_t
*, crypto_data_t
*,
99 static const crypto_digest_ops_t sha2_digest_ops
= {
100 .digest_init
= sha2_digest_init
,
101 .digest
= sha2_digest
,
102 .digest_update
= sha2_digest_update
,
103 .digest_final
= sha2_digest_final
,
104 .digest_atomic
= sha2_digest_atomic
107 static int sha2_mac_init(crypto_ctx_t
*, crypto_mechanism_t
*, crypto_key_t
*,
108 crypto_spi_ctx_template_t
);
109 static int sha2_mac_update(crypto_ctx_t
*, crypto_data_t
*);
110 static int sha2_mac_final(crypto_ctx_t
*, crypto_data_t
*);
111 static int sha2_mac_atomic(crypto_mechanism_t
*, crypto_key_t
*,
112 crypto_data_t
*, crypto_data_t
*, crypto_spi_ctx_template_t
);
113 static int sha2_mac_verify_atomic(crypto_mechanism_t
*, crypto_key_t
*,
114 crypto_data_t
*, crypto_data_t
*, crypto_spi_ctx_template_t
);
116 static const crypto_mac_ops_t sha2_mac_ops
= {
117 .mac_init
= sha2_mac_init
,
119 .mac_update
= sha2_mac_update
,
120 .mac_final
= sha2_mac_final
,
121 .mac_atomic
= sha2_mac_atomic
,
122 .mac_verify_atomic
= sha2_mac_verify_atomic
125 static int sha2_create_ctx_template(crypto_mechanism_t
*, crypto_key_t
*,
126 crypto_spi_ctx_template_t
*, size_t *);
127 static int sha2_free_context(crypto_ctx_t
*);
129 static const crypto_ctx_ops_t sha2_ctx_ops
= {
130 .create_ctx_template
= sha2_create_ctx_template
,
131 .free_context
= sha2_free_context
134 static const crypto_ops_t sha2_crypto_ops
= {
141 static const crypto_provider_info_t sha2_prov_info
= {
142 "SHA2 Software Provider",
144 sizeof (sha2_mech_info_tab
) / sizeof (crypto_mech_info_t
),
148 static crypto_kcf_provider_handle_t sha2_prov_handle
= 0;
156 * Register with KCF. If the registration fails, log an
157 * error but do not uninstall the module, since the functionality
158 * provided by misc/sha2 should still be available.
160 if ((ret
= crypto_register_provider(&sha2_prov_info
,
161 &sha2_prov_handle
)) != CRYPTO_SUCCESS
)
162 cmn_err(CE_WARN
, "sha2 _init: "
163 "crypto_register_provider() failed (0x%x)", ret
);
173 if (sha2_prov_handle
!= 0) {
174 if ((ret
= crypto_unregister_provider(sha2_prov_handle
)) !=
177 "sha2 _fini: crypto_unregister_provider() "
178 "failed (0x%x)", ret
);
181 sha2_prov_handle
= 0;
188 * KCF software provider digest entry points.
192 sha2_digest_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
)
196 * Allocate and initialize SHA2 context.
198 ctx
->cc_provider_private
= kmem_alloc(sizeof (sha2_ctx_t
), KM_SLEEP
);
199 if (ctx
->cc_provider_private
== NULL
)
200 return (CRYPTO_HOST_MEMORY
);
202 PROV_SHA2_CTX(ctx
)->sc_mech_type
= mechanism
->cm_type
;
203 SHA2Init(mechanism
->cm_type
, &PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
);
205 return (CRYPTO_SUCCESS
);
209 * Helper SHA2 digest update function for uio data.
212 sha2_digest_update_uio(SHA2_CTX
*sha2_ctx
, crypto_data_t
*data
)
214 off_t offset
= data
->cd_offset
;
215 size_t length
= data
->cd_length
;
219 /* we support only kernel buffer */
220 if (zfs_uio_segflg(data
->cd_uio
) != UIO_SYSSPACE
)
221 return (CRYPTO_ARGUMENTS_BAD
);
224 * Jump to the first iovec containing data to be
227 offset
= zfs_uio_index_at_offset(data
->cd_uio
, offset
, &vec_idx
);
228 if (vec_idx
== zfs_uio_iovcnt(data
->cd_uio
)) {
230 * The caller specified an offset that is larger than the
231 * total size of the buffers it provided.
233 return (CRYPTO_DATA_LEN_RANGE
);
237 * Now do the digesting on the iovecs.
239 while (vec_idx
< zfs_uio_iovcnt(data
->cd_uio
) && length
> 0) {
240 cur_len
= MIN(zfs_uio_iovlen(data
->cd_uio
, vec_idx
) -
243 SHA2Update(sha2_ctx
, (uint8_t *)zfs_uio_iovbase(data
->cd_uio
,
244 vec_idx
) + offset
, cur_len
);
250 if (vec_idx
== zfs_uio_iovcnt(data
->cd_uio
) && length
> 0) {
252 * The end of the specified iovec's was reached but
253 * the length requested could not be processed, i.e.
254 * The caller requested to digest more data than it provided.
256 return (CRYPTO_DATA_LEN_RANGE
);
259 return (CRYPTO_SUCCESS
);
263 * Helper SHA2 digest final function for uio data.
264 * digest_len is the length of the desired digest. If digest_len
265 * is smaller than the default SHA2 digest length, the caller
266 * must pass a scratch buffer, digest_scratch, which must
267 * be at least the algorithm's digest length bytes.
270 sha2_digest_final_uio(SHA2_CTX
*sha2_ctx
, crypto_data_t
*digest
,
271 ulong_t digest_len
, uchar_t
*digest_scratch
)
273 off_t offset
= digest
->cd_offset
;
276 /* we support only kernel buffer */
277 if (zfs_uio_segflg(digest
->cd_uio
) != UIO_SYSSPACE
)
278 return (CRYPTO_ARGUMENTS_BAD
);
281 * Jump to the first iovec containing ptr to the digest to
284 offset
= zfs_uio_index_at_offset(digest
->cd_uio
, offset
, &vec_idx
);
285 if (vec_idx
== zfs_uio_iovcnt(digest
->cd_uio
)) {
287 * The caller specified an offset that is
288 * larger than the total size of the buffers
291 return (CRYPTO_DATA_LEN_RANGE
);
294 if (offset
+ digest_len
<=
295 zfs_uio_iovlen(digest
->cd_uio
, vec_idx
)) {
297 * The computed SHA2 digest will fit in the current
300 if (((sha2_ctx
->algotype
<= SHA256_HMAC_GEN_MECH_INFO_TYPE
) &&
301 (digest_len
!= SHA256_DIGEST_LENGTH
)) ||
302 ((sha2_ctx
->algotype
> SHA256_HMAC_GEN_MECH_INFO_TYPE
) &&
303 (digest_len
!= SHA512_DIGEST_LENGTH
))) {
305 * The caller requested a short digest. Digest
306 * into a scratch buffer and return to
307 * the user only what was requested.
309 SHA2Final(digest_scratch
, sha2_ctx
);
312 zfs_uio_iovbase(digest
->cd_uio
, vec_idx
) + offset
,
313 digest_scratch
, digest_len
);
315 SHA2Final((uchar_t
*)zfs_uio_iovbase(digest
->
316 cd_uio
, vec_idx
) + offset
,
322 * The computed digest will be crossing one or more iovec's.
323 * This is bad performance-wise but we need to support it.
324 * Allocate a small scratch buffer on the stack and
325 * copy it piece meal to the specified digest iovec's.
327 uchar_t digest_tmp
[SHA512_DIGEST_LENGTH
];
328 off_t scratch_offset
= 0;
329 size_t length
= digest_len
;
332 SHA2Final(digest_tmp
, sha2_ctx
);
334 while (vec_idx
< zfs_uio_iovcnt(digest
->cd_uio
) && length
> 0) {
336 MIN(zfs_uio_iovlen(digest
->cd_uio
, vec_idx
) -
339 zfs_uio_iovbase(digest
->cd_uio
, vec_idx
) + offset
,
340 digest_tmp
+ scratch_offset
,
345 scratch_offset
+= cur_len
;
349 if (vec_idx
== zfs_uio_iovcnt(digest
->cd_uio
) && length
> 0) {
351 * The end of the specified iovec's was reached but
352 * the length requested could not be processed, i.e.
353 * The caller requested to digest more data than it
356 return (CRYPTO_DATA_LEN_RANGE
);
360 return (CRYPTO_SUCCESS
);
364 sha2_digest(crypto_ctx_t
*ctx
, crypto_data_t
*data
, crypto_data_t
*digest
)
366 int ret
= CRYPTO_SUCCESS
;
367 uint_t sha_digest_len
;
369 ASSERT(ctx
->cc_provider_private
!= NULL
);
371 switch (PROV_SHA2_CTX(ctx
)->sc_mech_type
) {
372 case SHA256_MECH_INFO_TYPE
:
373 sha_digest_len
= SHA256_DIGEST_LENGTH
;
375 case SHA384_MECH_INFO_TYPE
:
376 sha_digest_len
= SHA384_DIGEST_LENGTH
;
378 case SHA512_MECH_INFO_TYPE
:
379 sha_digest_len
= SHA512_DIGEST_LENGTH
;
382 return (CRYPTO_MECHANISM_INVALID
);
386 * We need to just return the length needed to store the output.
387 * We should not destroy the context for the following cases.
389 if ((digest
->cd_length
== 0) ||
390 (digest
->cd_length
< sha_digest_len
)) {
391 digest
->cd_length
= sha_digest_len
;
392 return (CRYPTO_BUFFER_TOO_SMALL
);
396 * Do the SHA2 update on the specified input data.
398 switch (data
->cd_format
) {
399 case CRYPTO_DATA_RAW
:
400 SHA2Update(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
401 (uint8_t *)data
->cd_raw
.iov_base
+ data
->cd_offset
,
404 case CRYPTO_DATA_UIO
:
405 ret
= sha2_digest_update_uio(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
409 ret
= CRYPTO_ARGUMENTS_BAD
;
412 if (ret
!= CRYPTO_SUCCESS
) {
413 /* the update failed, free context and bail */
414 kmem_free(ctx
->cc_provider_private
, sizeof (sha2_ctx_t
));
415 ctx
->cc_provider_private
= NULL
;
416 digest
->cd_length
= 0;
421 * Do a SHA2 final, must be done separately since the digest
422 * type can be different than the input data type.
424 switch (digest
->cd_format
) {
425 case CRYPTO_DATA_RAW
:
426 SHA2Final((unsigned char *)digest
->cd_raw
.iov_base
+
427 digest
->cd_offset
, &PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
);
429 case CRYPTO_DATA_UIO
:
430 ret
= sha2_digest_final_uio(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
431 digest
, sha_digest_len
, NULL
);
434 ret
= CRYPTO_ARGUMENTS_BAD
;
437 /* all done, free context and return */
439 if (ret
== CRYPTO_SUCCESS
)
440 digest
->cd_length
= sha_digest_len
;
442 digest
->cd_length
= 0;
444 kmem_free(ctx
->cc_provider_private
, sizeof (sha2_ctx_t
));
445 ctx
->cc_provider_private
= NULL
;
450 sha2_digest_update(crypto_ctx_t
*ctx
, crypto_data_t
*data
)
452 int ret
= CRYPTO_SUCCESS
;
454 ASSERT(ctx
->cc_provider_private
!= NULL
);
457 * Do the SHA2 update on the specified input data.
459 switch (data
->cd_format
) {
460 case CRYPTO_DATA_RAW
:
461 SHA2Update(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
462 (uint8_t *)data
->cd_raw
.iov_base
+ data
->cd_offset
,
465 case CRYPTO_DATA_UIO
:
466 ret
= sha2_digest_update_uio(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
470 ret
= CRYPTO_ARGUMENTS_BAD
;
477 sha2_digest_final(crypto_ctx_t
*ctx
, crypto_data_t
*digest
)
479 int ret
= CRYPTO_SUCCESS
;
480 uint_t sha_digest_len
;
482 ASSERT(ctx
->cc_provider_private
!= NULL
);
484 switch (PROV_SHA2_CTX(ctx
)->sc_mech_type
) {
485 case SHA256_MECH_INFO_TYPE
:
486 sha_digest_len
= SHA256_DIGEST_LENGTH
;
488 case SHA384_MECH_INFO_TYPE
:
489 sha_digest_len
= SHA384_DIGEST_LENGTH
;
491 case SHA512_MECH_INFO_TYPE
:
492 sha_digest_len
= SHA512_DIGEST_LENGTH
;
495 return (CRYPTO_MECHANISM_INVALID
);
499 * We need to just return the length needed to store the output.
500 * We should not destroy the context for the following cases.
502 if ((digest
->cd_length
== 0) ||
503 (digest
->cd_length
< sha_digest_len
)) {
504 digest
->cd_length
= sha_digest_len
;
505 return (CRYPTO_BUFFER_TOO_SMALL
);
511 switch (digest
->cd_format
) {
512 case CRYPTO_DATA_RAW
:
513 SHA2Final((unsigned char *)digest
->cd_raw
.iov_base
+
514 digest
->cd_offset
, &PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
);
516 case CRYPTO_DATA_UIO
:
517 ret
= sha2_digest_final_uio(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
518 digest
, sha_digest_len
, NULL
);
521 ret
= CRYPTO_ARGUMENTS_BAD
;
524 /* all done, free context and return */
526 if (ret
== CRYPTO_SUCCESS
)
527 digest
->cd_length
= sha_digest_len
;
529 digest
->cd_length
= 0;
531 kmem_free(ctx
->cc_provider_private
, sizeof (sha2_ctx_t
));
532 ctx
->cc_provider_private
= NULL
;
538 sha2_digest_atomic(crypto_mechanism_t
*mechanism
, crypto_data_t
*data
,
539 crypto_data_t
*digest
)
541 int ret
= CRYPTO_SUCCESS
;
543 uint32_t sha_digest_len
;
549 SHA2Init(mechanism
->cm_type
, &sha2_ctx
);
551 switch (data
->cd_format
) {
552 case CRYPTO_DATA_RAW
:
553 SHA2Update(&sha2_ctx
, (uint8_t *)data
->
554 cd_raw
.iov_base
+ data
->cd_offset
, data
->cd_length
);
556 case CRYPTO_DATA_UIO
:
557 ret
= sha2_digest_update_uio(&sha2_ctx
, data
);
560 ret
= CRYPTO_ARGUMENTS_BAD
;
564 * Do the SHA updates on the specified input data.
567 if (ret
!= CRYPTO_SUCCESS
) {
568 /* the update failed, bail */
569 digest
->cd_length
= 0;
573 if (mechanism
->cm_type
<= SHA256_HMAC_GEN_MECH_INFO_TYPE
)
574 sha_digest_len
= SHA256_DIGEST_LENGTH
;
576 sha_digest_len
= SHA512_DIGEST_LENGTH
;
579 * Do a SHA2 final, must be done separately since the digest
580 * type can be different than the input data type.
582 switch (digest
->cd_format
) {
583 case CRYPTO_DATA_RAW
:
584 SHA2Final((unsigned char *)digest
->cd_raw
.iov_base
+
585 digest
->cd_offset
, &sha2_ctx
);
587 case CRYPTO_DATA_UIO
:
588 ret
= sha2_digest_final_uio(&sha2_ctx
, digest
,
589 sha_digest_len
, NULL
);
592 ret
= CRYPTO_ARGUMENTS_BAD
;
595 if (ret
== CRYPTO_SUCCESS
)
596 digest
->cd_length
= sha_digest_len
;
598 digest
->cd_length
= 0;
604 * KCF software provider mac entry points.
606 * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
609 * The initialization routine initializes what we denote
610 * as the inner and outer contexts by doing
611 * - for inner context: SHA2(key XOR ipad)
612 * - for outer context: SHA2(key XOR opad)
615 * Each subsequent SHA2 HMAC update will result in an
616 * update of the inner context with the specified data.
619 * The SHA2 HMAC final will do a SHA2 final operation on the
620 * inner context, and the resulting digest will be used
621 * as the data for an update on the outer context. Last
622 * but not least, a SHA2 final on the outer context will
623 * be performed to obtain the SHA2 HMAC digest to return
628 * Initialize a SHA2-HMAC context.
631 sha2_mac_init_ctx(sha2_hmac_ctx_t
*ctx
, void *keyval
, uint_t length_in_bytes
)
633 uint64_t ipad
[SHA512_HMAC_BLOCK_SIZE
/ sizeof (uint64_t)] = {0};
634 uint64_t opad
[SHA512_HMAC_BLOCK_SIZE
/ sizeof (uint64_t)] = {0};
635 int i
, block_size
, blocks_per_int64
;
637 /* Determine the block size */
638 if (ctx
->hc_mech_type
<= SHA256_HMAC_GEN_MECH_INFO_TYPE
) {
639 block_size
= SHA256_HMAC_BLOCK_SIZE
;
640 blocks_per_int64
= SHA256_HMAC_BLOCK_SIZE
/ sizeof (uint64_t);
642 block_size
= SHA512_HMAC_BLOCK_SIZE
;
643 blocks_per_int64
= SHA512_HMAC_BLOCK_SIZE
/ sizeof (uint64_t);
646 (void) memset(ipad
, 0, block_size
);
647 (void) memset(opad
, 0, block_size
);
649 if (keyval
!= NULL
) {
650 (void) memcpy(ipad
, keyval
, length_in_bytes
);
651 (void) memcpy(opad
, keyval
, length_in_bytes
);
653 ASSERT0(length_in_bytes
);
656 /* XOR key with ipad (0x36) and opad (0x5c) */
657 for (i
= 0; i
< blocks_per_int64
; i
++) {
658 ipad
[i
] ^= 0x3636363636363636;
659 opad
[i
] ^= 0x5c5c5c5c5c5c5c5c;
662 /* perform SHA2 on ipad */
663 SHA2Init(ctx
->hc_mech_type
, &ctx
->hc_icontext
);
664 SHA2Update(&ctx
->hc_icontext
, (uint8_t *)ipad
, block_size
);
666 /* perform SHA2 on opad */
667 SHA2Init(ctx
->hc_mech_type
, &ctx
->hc_ocontext
);
668 SHA2Update(&ctx
->hc_ocontext
, (uint8_t *)opad
, block_size
);
674 sha2_mac_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
675 crypto_key_t
*key
, crypto_spi_ctx_template_t ctx_template
)
677 int ret
= CRYPTO_SUCCESS
;
678 uint_t keylen_in_bytes
= CRYPTO_BITS2BYTES(key
->ck_length
);
679 uint_t sha_digest_len
, sha_hmac_block_size
;
682 * Set the digest length and block size to values appropriate to the
685 switch (mechanism
->cm_type
) {
686 case SHA256_HMAC_MECH_INFO_TYPE
:
687 case SHA256_HMAC_GEN_MECH_INFO_TYPE
:
688 sha_digest_len
= SHA256_DIGEST_LENGTH
;
689 sha_hmac_block_size
= SHA256_HMAC_BLOCK_SIZE
;
691 case SHA384_HMAC_MECH_INFO_TYPE
:
692 case SHA384_HMAC_GEN_MECH_INFO_TYPE
:
693 case SHA512_HMAC_MECH_INFO_TYPE
:
694 case SHA512_HMAC_GEN_MECH_INFO_TYPE
:
695 sha_digest_len
= SHA512_DIGEST_LENGTH
;
696 sha_hmac_block_size
= SHA512_HMAC_BLOCK_SIZE
;
699 return (CRYPTO_MECHANISM_INVALID
);
702 ctx
->cc_provider_private
=
703 kmem_alloc(sizeof (sha2_hmac_ctx_t
), KM_SLEEP
);
704 if (ctx
->cc_provider_private
== NULL
)
705 return (CRYPTO_HOST_MEMORY
);
707 PROV_SHA2_HMAC_CTX(ctx
)->hc_mech_type
= mechanism
->cm_type
;
708 if (ctx_template
!= NULL
) {
709 /* reuse context template */
710 memcpy(PROV_SHA2_HMAC_CTX(ctx
), ctx_template
,
711 sizeof (sha2_hmac_ctx_t
));
713 /* no context template, compute context */
714 if (keylen_in_bytes
> sha_hmac_block_size
) {
715 uchar_t digested_key
[SHA512_DIGEST_LENGTH
];
716 sha2_hmac_ctx_t
*hmac_ctx
= ctx
->cc_provider_private
;
719 * Hash the passed-in key to get a smaller key.
720 * The inner context is used since it hasn't been
723 PROV_SHA2_DIGEST_KEY(mechanism
->cm_type
/ 3,
724 &hmac_ctx
->hc_icontext
,
725 key
->ck_data
, keylen_in_bytes
, digested_key
);
726 sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx
),
727 digested_key
, sha_digest_len
);
729 sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx
),
730 key
->ck_data
, keylen_in_bytes
);
735 * Get the mechanism parameters, if applicable.
737 if (mechanism
->cm_type
% 3 == 2) {
738 if (mechanism
->cm_param
== NULL
||
739 mechanism
->cm_param_len
!= sizeof (ulong_t
)) {
740 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
742 PROV_SHA2_GET_DIGEST_LEN(mechanism
,
743 PROV_SHA2_HMAC_CTX(ctx
)->hc_digest_len
);
744 if (PROV_SHA2_HMAC_CTX(ctx
)->hc_digest_len
>
746 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
750 if (ret
!= CRYPTO_SUCCESS
) {
751 memset(ctx
->cc_provider_private
, 0, sizeof (sha2_hmac_ctx_t
));
752 kmem_free(ctx
->cc_provider_private
, sizeof (sha2_hmac_ctx_t
));
753 ctx
->cc_provider_private
= NULL
;
760 sha2_mac_update(crypto_ctx_t
*ctx
, crypto_data_t
*data
)
762 int ret
= CRYPTO_SUCCESS
;
764 ASSERT(ctx
->cc_provider_private
!= NULL
);
767 * Do a SHA2 update of the inner context using the specified
770 switch (data
->cd_format
) {
771 case CRYPTO_DATA_RAW
:
772 SHA2Update(&PROV_SHA2_HMAC_CTX(ctx
)->hc_icontext
,
773 (uint8_t *)data
->cd_raw
.iov_base
+ data
->cd_offset
,
776 case CRYPTO_DATA_UIO
:
777 ret
= sha2_digest_update_uio(
778 &PROV_SHA2_HMAC_CTX(ctx
)->hc_icontext
, data
);
781 ret
= CRYPTO_ARGUMENTS_BAD
;
788 sha2_mac_final(crypto_ctx_t
*ctx
, crypto_data_t
*mac
)
790 int ret
= CRYPTO_SUCCESS
;
791 uchar_t digest
[SHA512_DIGEST_LENGTH
];
792 uint32_t digest_len
, sha_digest_len
;
794 ASSERT(ctx
->cc_provider_private
!= NULL
);
796 /* Set the digest lengths to values appropriate to the mechanism */
797 switch (PROV_SHA2_HMAC_CTX(ctx
)->hc_mech_type
) {
798 case SHA256_HMAC_MECH_INFO_TYPE
:
799 sha_digest_len
= digest_len
= SHA256_DIGEST_LENGTH
;
801 case SHA384_HMAC_MECH_INFO_TYPE
:
802 sha_digest_len
= digest_len
= SHA384_DIGEST_LENGTH
;
804 case SHA512_HMAC_MECH_INFO_TYPE
:
805 sha_digest_len
= digest_len
= SHA512_DIGEST_LENGTH
;
807 case SHA256_HMAC_GEN_MECH_INFO_TYPE
:
808 sha_digest_len
= SHA256_DIGEST_LENGTH
;
809 digest_len
= PROV_SHA2_HMAC_CTX(ctx
)->hc_digest_len
;
811 case SHA384_HMAC_GEN_MECH_INFO_TYPE
:
812 case SHA512_HMAC_GEN_MECH_INFO_TYPE
:
813 sha_digest_len
= SHA512_DIGEST_LENGTH
;
814 digest_len
= PROV_SHA2_HMAC_CTX(ctx
)->hc_digest_len
;
817 return (CRYPTO_ARGUMENTS_BAD
);
821 * We need to just return the length needed to store the output.
822 * We should not destroy the context for the following cases.
824 if ((mac
->cd_length
== 0) || (mac
->cd_length
< digest_len
)) {
825 mac
->cd_length
= digest_len
;
826 return (CRYPTO_BUFFER_TOO_SMALL
);
830 * Do a SHA2 final on the inner context.
832 SHA2Final(digest
, &PROV_SHA2_HMAC_CTX(ctx
)->hc_icontext
);
835 * Do a SHA2 update on the outer context, feeding the inner
838 SHA2Update(&PROV_SHA2_HMAC_CTX(ctx
)->hc_ocontext
, digest
,
842 * Do a SHA2 final on the outer context, storing the computing
843 * digest in the users buffer.
845 switch (mac
->cd_format
) {
846 case CRYPTO_DATA_RAW
:
847 if (digest_len
!= sha_digest_len
) {
849 * The caller requested a short digest. Digest
850 * into a scratch buffer and return to
851 * the user only what was requested.
854 &PROV_SHA2_HMAC_CTX(ctx
)->hc_ocontext
);
855 memcpy((unsigned char *)mac
->cd_raw
.iov_base
+
856 mac
->cd_offset
, digest
, digest_len
);
858 SHA2Final((unsigned char *)mac
->cd_raw
.iov_base
+
860 &PROV_SHA2_HMAC_CTX(ctx
)->hc_ocontext
);
863 case CRYPTO_DATA_UIO
:
864 ret
= sha2_digest_final_uio(
865 &PROV_SHA2_HMAC_CTX(ctx
)->hc_ocontext
, mac
,
869 ret
= CRYPTO_ARGUMENTS_BAD
;
872 if (ret
== CRYPTO_SUCCESS
)
873 mac
->cd_length
= digest_len
;
877 memset(ctx
->cc_provider_private
, 0, sizeof (sha2_hmac_ctx_t
));
878 kmem_free(ctx
->cc_provider_private
, sizeof (sha2_hmac_ctx_t
));
879 ctx
->cc_provider_private
= NULL
;
884 #define SHA2_MAC_UPDATE(data, ctx, ret) { \
885 switch (data->cd_format) { \
886 case CRYPTO_DATA_RAW: \
887 SHA2Update(&(ctx).hc_icontext, \
888 (uint8_t *)data->cd_raw.iov_base + \
889 data->cd_offset, data->cd_length); \
891 case CRYPTO_DATA_UIO: \
892 ret = sha2_digest_update_uio(&(ctx).hc_icontext, data); \
895 ret = CRYPTO_ARGUMENTS_BAD; \
900 sha2_mac_atomic(crypto_mechanism_t
*mechanism
,
901 crypto_key_t
*key
, crypto_data_t
*data
, crypto_data_t
*mac
,
902 crypto_spi_ctx_template_t ctx_template
)
904 int ret
= CRYPTO_SUCCESS
;
905 uchar_t digest
[SHA512_DIGEST_LENGTH
];
906 sha2_hmac_ctx_t sha2_hmac_ctx
;
907 uint32_t sha_digest_len
, digest_len
, sha_hmac_block_size
;
908 uint_t keylen_in_bytes
= CRYPTO_BITS2BYTES(key
->ck_length
);
911 * Set the digest length and block size to values appropriate to the
914 switch (mechanism
->cm_type
) {
915 case SHA256_HMAC_MECH_INFO_TYPE
:
916 case SHA256_HMAC_GEN_MECH_INFO_TYPE
:
917 sha_digest_len
= digest_len
= SHA256_DIGEST_LENGTH
;
918 sha_hmac_block_size
= SHA256_HMAC_BLOCK_SIZE
;
920 case SHA384_HMAC_MECH_INFO_TYPE
:
921 case SHA384_HMAC_GEN_MECH_INFO_TYPE
:
922 case SHA512_HMAC_MECH_INFO_TYPE
:
923 case SHA512_HMAC_GEN_MECH_INFO_TYPE
:
924 sha_digest_len
= digest_len
= SHA512_DIGEST_LENGTH
;
925 sha_hmac_block_size
= SHA512_HMAC_BLOCK_SIZE
;
928 return (CRYPTO_MECHANISM_INVALID
);
931 if (ctx_template
!= NULL
) {
932 /* reuse context template */
933 memcpy(&sha2_hmac_ctx
, ctx_template
, sizeof (sha2_hmac_ctx_t
));
935 sha2_hmac_ctx
.hc_mech_type
= mechanism
->cm_type
;
936 /* no context template, initialize context */
937 if (keylen_in_bytes
> sha_hmac_block_size
) {
939 * Hash the passed-in key to get a smaller key.
940 * The inner context is used since it hasn't been
943 PROV_SHA2_DIGEST_KEY(mechanism
->cm_type
/ 3,
944 &sha2_hmac_ctx
.hc_icontext
,
945 key
->ck_data
, keylen_in_bytes
, digest
);
946 sha2_mac_init_ctx(&sha2_hmac_ctx
, digest
,
949 sha2_mac_init_ctx(&sha2_hmac_ctx
, key
->ck_data
,
954 /* get the mechanism parameters, if applicable */
955 if ((mechanism
->cm_type
% 3) == 2) {
956 if (mechanism
->cm_param
== NULL
||
957 mechanism
->cm_param_len
!= sizeof (ulong_t
)) {
958 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
961 PROV_SHA2_GET_DIGEST_LEN(mechanism
, digest_len
);
962 if (digest_len
> sha_digest_len
) {
963 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
968 /* do a SHA2 update of the inner context using the specified data */
969 SHA2_MAC_UPDATE(data
, sha2_hmac_ctx
, ret
);
970 if (ret
!= CRYPTO_SUCCESS
)
971 /* the update failed, free context and bail */
975 * Do a SHA2 final on the inner context.
977 SHA2Final(digest
, &sha2_hmac_ctx
.hc_icontext
);
980 * Do an SHA2 update on the outer context, feeding the inner
983 * HMAC-SHA384 needs special handling as the outer hash needs only 48
984 * bytes of the inner hash value.
986 if (mechanism
->cm_type
== SHA384_HMAC_MECH_INFO_TYPE
||
987 mechanism
->cm_type
== SHA384_HMAC_GEN_MECH_INFO_TYPE
)
988 SHA2Update(&sha2_hmac_ctx
.hc_ocontext
, digest
,
989 SHA384_DIGEST_LENGTH
);
991 SHA2Update(&sha2_hmac_ctx
.hc_ocontext
, digest
, sha_digest_len
);
994 * Do a SHA2 final on the outer context, storing the computed
995 * digest in the users buffer.
997 switch (mac
->cd_format
) {
998 case CRYPTO_DATA_RAW
:
999 if (digest_len
!= sha_digest_len
) {
1001 * The caller requested a short digest. Digest
1002 * into a scratch buffer and return to
1003 * the user only what was requested.
1005 SHA2Final(digest
, &sha2_hmac_ctx
.hc_ocontext
);
1006 memcpy((unsigned char *)mac
->cd_raw
.iov_base
+
1007 mac
->cd_offset
, digest
, digest_len
);
1009 SHA2Final((unsigned char *)mac
->cd_raw
.iov_base
+
1010 mac
->cd_offset
, &sha2_hmac_ctx
.hc_ocontext
);
1013 case CRYPTO_DATA_UIO
:
1014 ret
= sha2_digest_final_uio(&sha2_hmac_ctx
.hc_ocontext
, mac
,
1015 digest_len
, digest
);
1018 ret
= CRYPTO_ARGUMENTS_BAD
;
1021 if (ret
== CRYPTO_SUCCESS
) {
1022 mac
->cd_length
= digest_len
;
1023 return (CRYPTO_SUCCESS
);
1026 memset(&sha2_hmac_ctx
, 0, sizeof (sha2_hmac_ctx_t
));
1032 sha2_mac_verify_atomic(crypto_mechanism_t
*mechanism
,
1033 crypto_key_t
*key
, crypto_data_t
*data
, crypto_data_t
*mac
,
1034 crypto_spi_ctx_template_t ctx_template
)
1036 int ret
= CRYPTO_SUCCESS
;
1037 uchar_t digest
[SHA512_DIGEST_LENGTH
];
1038 sha2_hmac_ctx_t sha2_hmac_ctx
;
1039 uint32_t sha_digest_len
, digest_len
, sha_hmac_block_size
;
1040 uint_t keylen_in_bytes
= CRYPTO_BITS2BYTES(key
->ck_length
);
1043 * Set the digest length and block size to values appropriate to the
1046 switch (mechanism
->cm_type
) {
1047 case SHA256_HMAC_MECH_INFO_TYPE
:
1048 case SHA256_HMAC_GEN_MECH_INFO_TYPE
:
1049 sha_digest_len
= digest_len
= SHA256_DIGEST_LENGTH
;
1050 sha_hmac_block_size
= SHA256_HMAC_BLOCK_SIZE
;
1052 case SHA384_HMAC_MECH_INFO_TYPE
:
1053 case SHA384_HMAC_GEN_MECH_INFO_TYPE
:
1054 case SHA512_HMAC_MECH_INFO_TYPE
:
1055 case SHA512_HMAC_GEN_MECH_INFO_TYPE
:
1056 sha_digest_len
= digest_len
= SHA512_DIGEST_LENGTH
;
1057 sha_hmac_block_size
= SHA512_HMAC_BLOCK_SIZE
;
1060 return (CRYPTO_MECHANISM_INVALID
);
1063 if (ctx_template
!= NULL
) {
1064 /* reuse context template */
1065 memcpy(&sha2_hmac_ctx
, ctx_template
, sizeof (sha2_hmac_ctx_t
));
1067 sha2_hmac_ctx
.hc_mech_type
= mechanism
->cm_type
;
1068 /* no context template, initialize context */
1069 if (keylen_in_bytes
> sha_hmac_block_size
) {
1071 * Hash the passed-in key to get a smaller key.
1072 * The inner context is used since it hasn't been
1075 PROV_SHA2_DIGEST_KEY(mechanism
->cm_type
/ 3,
1076 &sha2_hmac_ctx
.hc_icontext
,
1077 key
->ck_data
, keylen_in_bytes
, digest
);
1078 sha2_mac_init_ctx(&sha2_hmac_ctx
, digest
,
1081 sha2_mac_init_ctx(&sha2_hmac_ctx
, key
->ck_data
,
1086 /* get the mechanism parameters, if applicable */
1087 if (mechanism
->cm_type
% 3 == 2) {
1088 if (mechanism
->cm_param
== NULL
||
1089 mechanism
->cm_param_len
!= sizeof (ulong_t
)) {
1090 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
1093 PROV_SHA2_GET_DIGEST_LEN(mechanism
, digest_len
);
1094 if (digest_len
> sha_digest_len
) {
1095 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
1100 if (mac
->cd_length
!= digest_len
) {
1101 ret
= CRYPTO_INVALID_MAC
;
1105 /* do a SHA2 update of the inner context using the specified data */
1106 SHA2_MAC_UPDATE(data
, sha2_hmac_ctx
, ret
);
1107 if (ret
!= CRYPTO_SUCCESS
)
1108 /* the update failed, free context and bail */
1111 /* do a SHA2 final on the inner context */
1112 SHA2Final(digest
, &sha2_hmac_ctx
.hc_icontext
);
1115 * Do an SHA2 update on the outer context, feeding the inner
1118 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1119 * bytes of the inner hash value.
1121 if (mechanism
->cm_type
== SHA384_HMAC_MECH_INFO_TYPE
||
1122 mechanism
->cm_type
== SHA384_HMAC_GEN_MECH_INFO_TYPE
)
1123 SHA2Update(&sha2_hmac_ctx
.hc_ocontext
, digest
,
1124 SHA384_DIGEST_LENGTH
);
1126 SHA2Update(&sha2_hmac_ctx
.hc_ocontext
, digest
, sha_digest_len
);
1129 * Do a SHA2 final on the outer context, storing the computed
1130 * digest in the users buffer.
1132 SHA2Final(digest
, &sha2_hmac_ctx
.hc_ocontext
);
1135 * Compare the computed digest against the expected digest passed
1139 switch (mac
->cd_format
) {
1141 case CRYPTO_DATA_RAW
:
1142 if (memcmp(digest
, (unsigned char *)mac
->cd_raw
.iov_base
+
1143 mac
->cd_offset
, digest_len
) != 0)
1144 ret
= CRYPTO_INVALID_MAC
;
1147 case CRYPTO_DATA_UIO
: {
1148 off_t offset
= mac
->cd_offset
;
1150 off_t scratch_offset
= 0;
1151 size_t length
= digest_len
;
1154 /* we support only kernel buffer */
1155 if (zfs_uio_segflg(mac
->cd_uio
) != UIO_SYSSPACE
)
1156 return (CRYPTO_ARGUMENTS_BAD
);
1158 /* jump to the first iovec containing the expected digest */
1159 offset
= zfs_uio_index_at_offset(mac
->cd_uio
, offset
, &vec_idx
);
1160 if (vec_idx
== zfs_uio_iovcnt(mac
->cd_uio
)) {
1162 * The caller specified an offset that is
1163 * larger than the total size of the buffers
1166 ret
= CRYPTO_DATA_LEN_RANGE
;
1170 /* do the comparison of computed digest vs specified one */
1171 while (vec_idx
< zfs_uio_iovcnt(mac
->cd_uio
) && length
> 0) {
1172 cur_len
= MIN(zfs_uio_iovlen(mac
->cd_uio
, vec_idx
) -
1175 if (memcmp(digest
+ scratch_offset
,
1176 zfs_uio_iovbase(mac
->cd_uio
, vec_idx
) + offset
,
1178 ret
= CRYPTO_INVALID_MAC
;
1184 scratch_offset
+= cur_len
;
1191 ret
= CRYPTO_ARGUMENTS_BAD
;
1196 memset(&sha2_hmac_ctx
, 0, sizeof (sha2_hmac_ctx_t
));
1202 * KCF software provider context management entry points.
1206 sha2_create_ctx_template(crypto_mechanism_t
*mechanism
, crypto_key_t
*key
,
1207 crypto_spi_ctx_template_t
*ctx_template
, size_t *ctx_template_size
)
1209 sha2_hmac_ctx_t
*sha2_hmac_ctx_tmpl
;
1210 uint_t keylen_in_bytes
= CRYPTO_BITS2BYTES(key
->ck_length
);
1211 uint32_t sha_digest_len
, sha_hmac_block_size
;
1214 * Set the digest length and block size to values appropriate to the
1217 switch (mechanism
->cm_type
) {
1218 case SHA256_HMAC_MECH_INFO_TYPE
:
1219 case SHA256_HMAC_GEN_MECH_INFO_TYPE
:
1220 sha_digest_len
= SHA256_DIGEST_LENGTH
;
1221 sha_hmac_block_size
= SHA256_HMAC_BLOCK_SIZE
;
1223 case SHA384_HMAC_MECH_INFO_TYPE
:
1224 case SHA384_HMAC_GEN_MECH_INFO_TYPE
:
1225 case SHA512_HMAC_MECH_INFO_TYPE
:
1226 case SHA512_HMAC_GEN_MECH_INFO_TYPE
:
1227 sha_digest_len
= SHA512_DIGEST_LENGTH
;
1228 sha_hmac_block_size
= SHA512_HMAC_BLOCK_SIZE
;
1231 return (CRYPTO_MECHANISM_INVALID
);
1235 * Allocate and initialize SHA2 context.
1237 sha2_hmac_ctx_tmpl
= kmem_alloc(sizeof (sha2_hmac_ctx_t
), KM_SLEEP
);
1238 if (sha2_hmac_ctx_tmpl
== NULL
)
1239 return (CRYPTO_HOST_MEMORY
);
1241 sha2_hmac_ctx_tmpl
->hc_mech_type
= mechanism
->cm_type
;
1243 if (keylen_in_bytes
> sha_hmac_block_size
) {
1244 uchar_t digested_key
[SHA512_DIGEST_LENGTH
];
1247 * Hash the passed-in key to get a smaller key.
1248 * The inner context is used since it hasn't been
1251 PROV_SHA2_DIGEST_KEY(mechanism
->cm_type
/ 3,
1252 &sha2_hmac_ctx_tmpl
->hc_icontext
,
1253 key
->ck_data
, keylen_in_bytes
, digested_key
);
1254 sha2_mac_init_ctx(sha2_hmac_ctx_tmpl
, digested_key
,
1257 sha2_mac_init_ctx(sha2_hmac_ctx_tmpl
, key
->ck_data
,
1261 *ctx_template
= (crypto_spi_ctx_template_t
)sha2_hmac_ctx_tmpl
;
1262 *ctx_template_size
= sizeof (sha2_hmac_ctx_t
);
1264 return (CRYPTO_SUCCESS
);
1268 sha2_free_context(crypto_ctx_t
*ctx
)
1272 if (ctx
->cc_provider_private
== NULL
)
1273 return (CRYPTO_SUCCESS
);
1276 * We have to free either SHA2 or SHA2-HMAC contexts, which
1277 * have different lengths.
1279 * Note: Below is dependent on the mechanism ordering.
1282 if (PROV_SHA2_CTX(ctx
)->sc_mech_type
% 3 == 0)
1283 ctx_len
= sizeof (sha2_ctx_t
);
1285 ctx_len
= sizeof (sha2_hmac_ctx_t
);
1287 memset(ctx
->cc_provider_private
, 0, ctx_len
);
1288 kmem_free(ctx
->cc_provider_private
, ctx_len
);
1289 ctx
->cc_provider_private
= NULL
;
1291 return (CRYPTO_SUCCESS
);