4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
25 #include <sys/errno.h>
26 #include <sys/types.h>
28 #include <sys/sysmacros.h>
29 #include <sys/crypto/common.h>
30 #include <sys/crypto/impl.h>
31 #include <sys/crypto/api.h>
32 #include <sys/crypto/spi.h>
33 #include <sys/crypto/sched_impl.h>
35 #define CRYPTO_OPS_OFFSET(f) offsetof(crypto_ops_t, co_##f)
36 #define CRYPTO_CIPHER_MAC_OFFSET(f) offsetof(crypto_dual_cipher_mac_ops_t, f)
38 static int crypto_mac_decrypt_common(crypto_mechanism_t
*,
39 crypto_mechanism_t
*, crypto_dual_data_t
*, crypto_key_t
*, crypto_key_t
*,
40 crypto_ctx_template_t
, crypto_ctx_template_t
, crypto_data_t
*,
41 crypto_data_t
*, crypto_call_req_t
*, boolean_t
);
43 static int crypto_mac_decrypt_common_prov(crypto_provider_t provider
,
44 crypto_session_id_t sid
, crypto_mechanism_t
*, crypto_mechanism_t
*,
45 crypto_dual_data_t
*, crypto_key_t
*, crypto_key_t
*,
46 crypto_ctx_template_t
, crypto_ctx_template_t
, crypto_data_t
*,
47 crypto_data_t
*, crypto_call_req_t
*, boolean_t
);
50 crypto_encrypt_mac_prov(crypto_provider_t provider
, crypto_session_id_t sid
,
51 crypto_mechanism_t
*encr_mech
, crypto_mechanism_t
*mac_mech
,
52 crypto_data_t
*pt
, crypto_key_t
*encr_key
, crypto_key_t
*mac_key
,
53 crypto_ctx_template_t encr_tmpl
, crypto_ctx_template_t mac_tmpl
,
54 crypto_dual_data_t
*ct
, crypto_data_t
*mac
, crypto_call_req_t
*crq
)
57 * First try to find a provider for the encryption mechanism, that
58 * is also capable of the MAC mechanism.
62 kcf_provider_desc_t
*pd
= provider
;
63 kcf_provider_desc_t
*real_provider
= pd
;
64 kcf_ctx_template_t
*ctx_encr_tmpl
, *ctx_mac_tmpl
;
65 kcf_req_params_t params
;
66 kcf_encrypt_mac_ops_params_t
*cmops
;
67 crypto_spi_ctx_template_t spi_encr_tmpl
= NULL
, spi_mac_tmpl
= NULL
;
69 ASSERT(KCF_PROV_REFHELD(pd
));
71 if (pd
->pd_prov_type
== CRYPTO_LOGICAL_PROVIDER
) {
72 rv
= kcf_get_hardware_provider(encr_mech
->cm_type
, encr_key
,
73 mac_mech
->cm_type
, mac_key
, pd
, &real_provider
,
74 CRYPTO_FG_ENCRYPT_MAC_ATOMIC
);
76 if (rv
!= CRYPTO_SUCCESS
)
81 * For SW providers, check the validity of the context template
82 * It is very rare that the generation number mis-matches, so
83 * is acceptable to fail here, and let the consumer recover by
84 * freeing this tmpl and create a new one for the key and new SW
86 * Warning! will need to change when multiple software providers
87 * per mechanism are supported.
90 if (real_provider
->pd_prov_type
== CRYPTO_SW_PROVIDER
) {
91 if (encr_tmpl
!= NULL
) {
92 if (kcf_get_mech_entry(encr_mech
->cm_type
, &me
) !=
94 rv
= CRYPTO_MECHANISM_INVALID
;
97 ctx_encr_tmpl
= (kcf_ctx_template_t
*)encr_tmpl
;
98 if (ctx_encr_tmpl
->ct_generation
!= me
->me_gen_swprov
) {
99 rv
= CRYPTO_OLD_CTX_TEMPLATE
;
102 spi_encr_tmpl
= ctx_encr_tmpl
->ct_prov_tmpl
;
105 if (mac_tmpl
!= NULL
) {
106 if (kcf_get_mech_entry(mac_mech
->cm_type
, &me
) !=
108 rv
= CRYPTO_MECHANISM_INVALID
;
111 ctx_mac_tmpl
= (kcf_ctx_template_t
*)mac_tmpl
;
112 if (ctx_mac_tmpl
->ct_generation
!= me
->me_gen_swprov
) {
113 rv
= CRYPTO_OLD_CTX_TEMPLATE
;
116 spi_mac_tmpl
= ctx_mac_tmpl
->ct_prov_tmpl
;
120 /* The fast path for SW providers. */
121 if (CHECK_FASTPATH(crq
, real_provider
)) {
122 crypto_mechanism_t lencr_mech
;
123 crypto_mechanism_t lmac_mech
;
125 /* careful! structs assignments */
126 lencr_mech
= *encr_mech
;
127 KCF_SET_PROVIDER_MECHNUM(encr_mech
->cm_type
, real_provider
,
130 lmac_mech
= *mac_mech
;
131 KCF_SET_PROVIDER_MECHNUM(mac_mech
->cm_type
, real_provider
,
134 rv
= KCF_PROV_ENCRYPT_MAC_ATOMIC(real_provider
, sid
,
135 &lencr_mech
, encr_key
, &lmac_mech
, mac_key
, pt
, ct
,
136 mac
, spi_encr_tmpl
, spi_mac_tmpl
, KCF_SWFP_RHNDL(crq
));
138 KCF_PROV_INCRSTATS(pd
, rv
);
140 KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(¶ms
, KCF_OP_ATOMIC
,
141 sid
, encr_key
, mac_key
, pt
, ct
, mac
, spi_encr_tmpl
,
144 cmops
= &(params
.rp_u
.encrypt_mac_params
);
146 /* careful! structs assignments */
147 cmops
->em_encr_mech
= *encr_mech
;
148 KCF_SET_PROVIDER_MECHNUM(encr_mech
->cm_type
, real_provider
,
149 &cmops
->em_encr_mech
);
150 cmops
->em_framework_encr_mechtype
= encr_mech
->cm_type
;
152 cmops
->em_mac_mech
= *mac_mech
;
153 KCF_SET_PROVIDER_MECHNUM(mac_mech
->cm_type
, real_provider
,
154 &cmops
->em_mac_mech
);
155 cmops
->em_framework_mac_mechtype
= mac_mech
->cm_type
;
157 rv
= kcf_submit_request(real_provider
, NULL
, crq
, ¶ms
,
162 if (pd
->pd_prov_type
== CRYPTO_LOGICAL_PROVIDER
)
163 KCF_PROV_REFRELE(real_provider
);
168 * Performs a dual encrypt/mac atomic operation. The provider and session
169 * to use are determined by the KCF dispatcher.
172 crypto_encrypt_mac(crypto_mechanism_t
*encr_mech
,
173 crypto_mechanism_t
*mac_mech
, crypto_data_t
*pt
,
174 crypto_key_t
*encr_key
, crypto_key_t
*mac_key
,
175 crypto_ctx_template_t encr_tmpl
, crypto_ctx_template_t mac_tmpl
,
176 crypto_dual_data_t
*ct
, crypto_data_t
*mac
, crypto_call_req_t
*crq
)
179 * First try to find a provider for the encryption mechanism, that
180 * is also capable of the MAC mechanism.
183 kcf_mech_entry_t
*me
;
184 kcf_provider_desc_t
*pd
;
185 kcf_ctx_template_t
*ctx_encr_tmpl
, *ctx_mac_tmpl
;
186 kcf_req_params_t params
;
187 kcf_encrypt_mac_ops_params_t
*cmops
;
188 crypto_spi_ctx_template_t spi_encr_tmpl
= NULL
, spi_mac_tmpl
= NULL
;
189 crypto_mech_type_t prov_encr_mechid
, prov_mac_mechid
;
190 kcf_prov_tried_t
*list
= NULL
;
191 boolean_t encr_tmpl_checked
= B_FALSE
;
192 boolean_t mac_tmpl_checked
= B_FALSE
;
193 kcf_dual_req_t
*next_req
= NULL
;
196 /* pd is returned held on success */
197 pd
= kcf_get_dual_provider(encr_mech
, encr_key
, mac_mech
, mac_key
,
198 &me
, &prov_encr_mechid
,
199 &prov_mac_mechid
, &error
, list
,
200 CRYPTO_FG_ENCRYPT_ATOMIC
| CRYPTO_FG_ENCRYPT_MAC_ATOMIC
,
201 CRYPTO_FG_MAC_ATOMIC
| CRYPTO_FG_ENCRYPT_MAC_ATOMIC
,
205 kcf_free_triedlist(list
);
206 if (next_req
!= NULL
)
207 kmem_free(next_req
, sizeof (kcf_dual_req_t
));
212 * For SW providers, check the validity of the context template
213 * It is very rare that the generation number mis-matches, so
214 * is acceptable to fail here, and let the consumer recover by
215 * freeing this tmpl and create a new one for the key and new SW
217 * Warning! will need to change when multiple software providers
218 * per mechanism are supported.
221 if ((!encr_tmpl_checked
) && (pd
->pd_prov_type
== CRYPTO_SW_PROVIDER
)) {
222 if (encr_tmpl
!= NULL
) {
223 ctx_encr_tmpl
= (kcf_ctx_template_t
*)encr_tmpl
;
224 if (ctx_encr_tmpl
->ct_generation
!= me
->me_gen_swprov
) {
226 if (next_req
!= NULL
)
228 sizeof (kcf_dual_req_t
));
230 kcf_free_triedlist(list
);
232 KCF_PROV_REFRELE(pd
);
233 /* Which one is the the old one ? */
234 return (CRYPTO_OLD_CTX_TEMPLATE
);
236 spi_encr_tmpl
= ctx_encr_tmpl
->ct_prov_tmpl
;
238 encr_tmpl_checked
= B_TRUE
;
241 if (prov_mac_mechid
== CRYPTO_MECH_INVALID
) {
242 crypto_call_req_t encr_req
;
244 /* Need to emulate with 2 internal calls */
245 /* Allocate and initialize the MAC req for the callback */
248 if (next_req
== NULL
) {
249 next_req
= kcf_alloc_req(crq
);
251 if (next_req
== NULL
) {
252 KCF_PROV_REFRELE(pd
);
254 kcf_free_triedlist(list
);
255 return (CRYPTO_HOST_MEMORY
);
258 * Careful! we're wrapping-in mac_tmpl instead
259 * of an spi_mac_tmpl. The callback routine will
260 * have to validate mac_tmpl, and use the
261 * mac_ctx_tmpl, once it picks a MAC provider.
263 KCF_WRAP_MAC_OPS_PARAMS(&(next_req
->kr_params
),
264 KCF_OP_ATOMIC
, NULL
, mac_mech
, mac_key
,
265 (crypto_data_t
*)ct
, mac
, mac_tmpl
);
268 encr_req
.cr_flag
= crq
->cr_flag
;
269 encr_req
.cr_callback_func
= kcf_next_req
;
270 encr_req
.cr_callback_arg
= next_req
;
274 KCF_WRAP_ENCRYPT_OPS_PARAMS(¶ms
, KCF_OP_ATOMIC
,
275 pd
->pd_sid
, encr_mech
, encr_key
,
276 (crypto_data_t
*)ct
, NULL
, spi_encr_tmpl
);
278 KCF_WRAP_ENCRYPT_OPS_PARAMS(¶ms
, KCF_OP_ATOMIC
,
279 pd
->pd_sid
, encr_mech
, encr_key
, pt
,
280 (crypto_data_t
*)ct
, spi_encr_tmpl
);
283 error
= kcf_submit_request(pd
, NULL
, (crq
== NULL
) ? NULL
:
284 &encr_req
, ¶ms
, B_TRUE
);
287 case CRYPTO_SUCCESS
: {
292 * The encryption step is done. Reuse the encr_req
293 * for submitting the MAC step.
295 if (next_req
== NULL
) {
296 saveoffset
= ct
->dd_offset1
;
297 savelen
= ct
->dd_len1
;
299 saveoffset
= next_req
->kr_saveoffset
=
301 savelen
= next_req
->kr_savelen
= ct
->dd_len1
;
302 encr_req
.cr_callback_func
= kcf_last_req
;
305 ct
->dd_offset1
= ct
->dd_offset2
;
306 ct
->dd_len1
= ct
->dd_len2
;
308 error
= crypto_mac(mac_mech
, (crypto_data_t
*)ct
,
309 mac_key
, mac_tmpl
, mac
, (crq
== NULL
) ? NULL
:
312 if (error
!= CRYPTO_QUEUED
) {
313 ct
->dd_offset1
= saveoffset
;
314 ct
->dd_len1
= savelen
;
321 !(crq
->cr_flag
& CRYPTO_SKIP_REQID
))
322 crq
->cr_reqid
= encr_req
.cr_reqid
;
327 /* Add pd to the linked list of providers tried. */
328 if (IS_RECOVERABLE(error
)) {
329 if (kcf_insert_triedlist(&list
, pd
,
330 KCF_KMFLAG(crq
)) != NULL
)
334 if (error
!= CRYPTO_QUEUED
&& next_req
!= NULL
)
335 kmem_free(next_req
, sizeof (kcf_dual_req_t
));
337 kcf_free_triedlist(list
);
338 KCF_PROV_REFRELE(pd
);
341 if ((!mac_tmpl_checked
) && (pd
->pd_prov_type
== CRYPTO_SW_PROVIDER
)) {
342 if ((mac_tmpl
!= NULL
) &&
343 (prov_mac_mechid
!= CRYPTO_MECH_INVALID
)) {
344 ctx_mac_tmpl
= (kcf_ctx_template_t
*)mac_tmpl
;
345 if (ctx_mac_tmpl
->ct_generation
!= me
->me_gen_swprov
) {
347 if (next_req
!= NULL
)
349 sizeof (kcf_dual_req_t
));
351 kcf_free_triedlist(list
);
353 KCF_PROV_REFRELE(pd
);
354 /* Which one is the the old one ? */
355 return (CRYPTO_OLD_CTX_TEMPLATE
);
357 spi_mac_tmpl
= ctx_mac_tmpl
->ct_prov_tmpl
;
359 mac_tmpl_checked
= B_TRUE
;
362 /* The fast path for SW providers. */
363 if (CHECK_FASTPATH(crq
, pd
)) {
364 crypto_mechanism_t lencr_mech
;
365 crypto_mechanism_t lmac_mech
;
367 /* careful! structs assignments */
368 lencr_mech
= *encr_mech
;
369 lencr_mech
.cm_type
= prov_encr_mechid
;
370 lmac_mech
= *mac_mech
;
371 lmac_mech
.cm_type
= prov_mac_mechid
;
373 error
= KCF_PROV_ENCRYPT_MAC_ATOMIC(pd
, pd
->pd_sid
,
374 &lencr_mech
, encr_key
, &lmac_mech
, mac_key
, pt
, ct
,
375 mac
, spi_encr_tmpl
, spi_mac_tmpl
, KCF_SWFP_RHNDL(crq
));
377 KCF_PROV_INCRSTATS(pd
, error
);
379 KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(¶ms
, KCF_OP_ATOMIC
,
380 pd
->pd_sid
, encr_key
, mac_key
, pt
, ct
, mac
, spi_encr_tmpl
,
383 cmops
= &(params
.rp_u
.encrypt_mac_params
);
385 /* careful! structs assignments */
386 cmops
->em_encr_mech
= *encr_mech
;
387 cmops
->em_encr_mech
.cm_type
= prov_encr_mechid
;
388 cmops
->em_framework_encr_mechtype
= encr_mech
->cm_type
;
389 cmops
->em_mac_mech
= *mac_mech
;
390 cmops
->em_mac_mech
.cm_type
= prov_mac_mechid
;
391 cmops
->em_framework_mac_mechtype
= mac_mech
->cm_type
;
393 error
= kcf_submit_request(pd
, NULL
, crq
, ¶ms
, B_FALSE
);
396 if (error
!= CRYPTO_SUCCESS
&& error
!= CRYPTO_QUEUED
&&
397 IS_RECOVERABLE(error
)) {
398 /* Add pd to the linked list of providers tried. */
399 if (kcf_insert_triedlist(&list
, pd
, KCF_KMFLAG(crq
)) != NULL
)
403 if (next_req
!= NULL
)
404 kmem_free(next_req
, sizeof (kcf_dual_req_t
));
407 kcf_free_triedlist(list
);
409 KCF_PROV_REFRELE(pd
);
414 crypto_encrypt_mac_init_prov(crypto_provider_t provider
,
415 crypto_session_id_t sid
, crypto_mechanism_t
*encr_mech
,
416 crypto_mechanism_t
*mac_mech
, crypto_key_t
*encr_key
,
417 crypto_key_t
*mac_key
, crypto_ctx_template_t encr_tmpl
,
418 crypto_ctx_template_t mac_tmpl
, crypto_context_t
*ctxp
,
419 crypto_call_req_t
*cr
)
422 * First try to find a provider for the encryption mechanism, that
423 * is also capable of the MAC mechanism.
426 kcf_mech_entry_t
*me
;
427 kcf_provider_desc_t
*pd
= provider
;
428 kcf_provider_desc_t
*real_provider
= pd
;
429 kcf_ctx_template_t
*ctx_encr_tmpl
, *ctx_mac_tmpl
;
430 kcf_req_params_t params
;
431 kcf_encrypt_mac_ops_params_t
*cmops
;
432 crypto_spi_ctx_template_t spi_encr_tmpl
= NULL
, spi_mac_tmpl
= NULL
;
434 kcf_context_t
*encr_kcf_context
= NULL
;
436 ASSERT(KCF_PROV_REFHELD(pd
));
438 if (pd
->pd_prov_type
== CRYPTO_LOGICAL_PROVIDER
) {
439 rv
= kcf_get_hardware_provider(encr_mech
->cm_type
, encr_key
,
440 mac_mech
->cm_type
, mac_key
, pd
, &real_provider
,
441 CRYPTO_FG_ENCRYPT_MAC
);
443 if (rv
!= CRYPTO_SUCCESS
)
448 * For SW providers, check the validity of the context template
449 * It is very rare that the generation number mis-matches, so
450 * is acceptable to fail here, and let the consumer recover by
451 * freeing this tmpl and create a new one for the key and new SW
453 * Warning! will need to change when multiple software providers
454 * per mechanism are supported.
457 if (real_provider
->pd_prov_type
== CRYPTO_SW_PROVIDER
) {
458 if (encr_tmpl
!= NULL
) {
459 if (kcf_get_mech_entry(encr_mech
->cm_type
, &me
) !=
461 rv
= CRYPTO_MECHANISM_INVALID
;
464 ctx_encr_tmpl
= (kcf_ctx_template_t
*)encr_tmpl
;
465 if (ctx_encr_tmpl
->ct_generation
!= me
->me_gen_swprov
) {
466 rv
= CRYPTO_OLD_CTX_TEMPLATE
;
469 spi_encr_tmpl
= ctx_encr_tmpl
->ct_prov_tmpl
;
472 if (mac_tmpl
!= NULL
) {
473 if (kcf_get_mech_entry(mac_mech
->cm_type
, &me
) !=
475 rv
= CRYPTO_MECHANISM_INVALID
;
478 ctx_mac_tmpl
= (kcf_ctx_template_t
*)mac_tmpl
;
479 if (ctx_mac_tmpl
->ct_generation
!= me
->me_gen_swprov
) {
480 rv
= CRYPTO_OLD_CTX_TEMPLATE
;
483 spi_mac_tmpl
= ctx_mac_tmpl
->ct_prov_tmpl
;
487 ctx
= kcf_new_ctx(cr
, real_provider
, sid
);
489 rv
= CRYPTO_HOST_MEMORY
;
492 encr_kcf_context
= (kcf_context_t
*)ctx
->cc_framework_private
;
494 /* The fast path for SW providers. */
495 if (CHECK_FASTPATH(cr
, real_provider
)) {
496 crypto_mechanism_t lencr_mech
;
497 crypto_mechanism_t lmac_mech
;
499 /* careful! structs assignments */
500 lencr_mech
= *encr_mech
;
501 KCF_SET_PROVIDER_MECHNUM(encr_mech
->cm_type
, real_provider
,
504 lmac_mech
= *mac_mech
;
505 KCF_SET_PROVIDER_MECHNUM(mac_mech
->cm_type
, real_provider
,
508 rv
= KCF_PROV_ENCRYPT_MAC_INIT(real_provider
, ctx
, &lencr_mech
,
509 encr_key
, &lmac_mech
, mac_key
, spi_encr_tmpl
, spi_mac_tmpl
,
512 KCF_PROV_INCRSTATS(pd
, rv
);
514 KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(¶ms
, KCF_OP_INIT
,
515 sid
, encr_key
, mac_key
, NULL
, NULL
, NULL
,
516 spi_encr_tmpl
, spi_mac_tmpl
);
518 cmops
= &(params
.rp_u
.encrypt_mac_params
);
520 /* careful! structs assignments */
521 cmops
->em_encr_mech
= *encr_mech
;
522 KCF_SET_PROVIDER_MECHNUM(encr_mech
->cm_type
, real_provider
,
523 &cmops
->em_encr_mech
);
524 cmops
->em_framework_encr_mechtype
= encr_mech
->cm_type
;
526 cmops
->em_mac_mech
= *mac_mech
;
527 KCF_SET_PROVIDER_MECHNUM(mac_mech
->cm_type
, real_provider
,
528 &cmops
->em_mac_mech
);
529 cmops
->em_framework_mac_mechtype
= mac_mech
->cm_type
;
531 rv
= kcf_submit_request(real_provider
, ctx
, cr
, ¶ms
,
535 if (rv
!= CRYPTO_SUCCESS
&& rv
!= CRYPTO_QUEUED
) {
536 KCF_CONTEXT_REFRELE(encr_kcf_context
);
538 *ctxp
= (crypto_context_t
)ctx
;
541 if (pd
->pd_prov_type
== CRYPTO_LOGICAL_PROVIDER
)
542 KCF_PROV_REFRELE(real_provider
);
547 * Starts a multi-part dual encrypt/mac operation. The provider and session
548 * to use are determined by the KCF dispatcher.
552 crypto_encrypt_mac_init(crypto_mechanism_t
*encr_mech
,
553 crypto_mechanism_t
*mac_mech
, crypto_key_t
*encr_key
,
554 crypto_key_t
*mac_key
, crypto_ctx_template_t encr_tmpl
,
555 crypto_ctx_template_t mac_tmpl
, crypto_context_t
*ctxp
,
556 crypto_call_req_t
*cr
)
559 * First try to find a provider for the encryption mechanism, that
560 * is also capable of the MAC mechanism.
563 kcf_mech_entry_t
*me
;
564 kcf_provider_desc_t
*pd
;
565 kcf_ctx_template_t
*ctx_encr_tmpl
, *ctx_mac_tmpl
;
566 kcf_req_params_t params
;
567 kcf_encrypt_mac_ops_params_t
*cmops
;
568 crypto_spi_ctx_template_t spi_encr_tmpl
= NULL
, spi_mac_tmpl
= NULL
;
569 crypto_mech_type_t prov_encr_mechid
, prov_mac_mechid
;
570 kcf_prov_tried_t
*list
= NULL
;
571 boolean_t encr_tmpl_checked
= B_FALSE
;
572 boolean_t mac_tmpl_checked
= B_FALSE
;
573 crypto_ctx_t
*ctx
= NULL
;
574 kcf_context_t
*encr_kcf_context
= NULL
, *mac_kcf_context
;
575 crypto_call_flag_t save_flag
;
578 /* pd is returned held on success */
579 pd
= kcf_get_dual_provider(encr_mech
, encr_key
, mac_mech
, mac_key
,
580 &me
, &prov_encr_mechid
,
581 &prov_mac_mechid
, &error
, list
,
582 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_MAC
, CRYPTO_FG_MAC
, 0);
585 kcf_free_triedlist(list
);
590 * For SW providers, check the validity of the context template
591 * It is very rare that the generation number mis-matches, so
592 * is acceptable to fail here, and let the consumer recover by
593 * freeing this tmpl and create a new one for the key and new SW
595 * Warning! will need to change when multiple software providers
596 * per mechanism are supported.
599 if ((!encr_tmpl_checked
) && (pd
->pd_prov_type
== CRYPTO_SW_PROVIDER
)) {
600 if (encr_tmpl
!= NULL
) {
601 ctx_encr_tmpl
= (kcf_ctx_template_t
*)encr_tmpl
;
602 if (ctx_encr_tmpl
->ct_generation
!= me
->me_gen_swprov
) {
605 kcf_free_triedlist(list
);
606 if (encr_kcf_context
!= NULL
)
607 KCF_CONTEXT_REFRELE(encr_kcf_context
);
609 KCF_PROV_REFRELE(pd
);
610 /* Which one is the the old one ? */
611 return (CRYPTO_OLD_CTX_TEMPLATE
);
613 spi_encr_tmpl
= ctx_encr_tmpl
->ct_prov_tmpl
;
615 encr_tmpl_checked
= B_TRUE
;
618 if (prov_mac_mechid
== CRYPTO_MECH_INVALID
) {
619 /* Need to emulate with 2 internal calls */
622 * We avoid code complexity by limiting the pure async.
623 * case to be done using only a SW provider.
624 * XXX - Redo the emulation code below so that we can
625 * remove this limitation.
627 if (cr
!= NULL
&& pd
->pd_prov_type
== CRYPTO_HW_PROVIDER
) {
628 if ((kcf_insert_triedlist(&list
, pd
, KCF_KMFLAG(cr
))
632 kcf_free_triedlist(list
);
633 if (encr_kcf_context
!= NULL
)
634 KCF_CONTEXT_REFRELE(encr_kcf_context
);
635 KCF_PROV_REFRELE(pd
);
636 return (CRYPTO_HOST_MEMORY
);
639 if (ctx
== NULL
&& pd
->pd_prov_type
== CRYPTO_SW_PROVIDER
) {
640 ctx
= kcf_new_ctx(cr
, pd
, pd
->pd_sid
);
643 kcf_free_triedlist(list
);
644 if (encr_kcf_context
!= NULL
)
645 KCF_CONTEXT_REFRELE(encr_kcf_context
);
646 KCF_PROV_REFRELE(pd
);
647 return (CRYPTO_HOST_MEMORY
);
649 encr_kcf_context
= (kcf_context_t
*)
650 ctx
->cc_framework_private
;
653 * Trade-off speed vs avoidance of code complexity and
655 * Could do all the combinations of fastpath / synch / asynch
656 * for the encryption and the mac steps. Early attempts
657 * showed the code grew wild and bug-prone, for little gain.
658 * Therefore, the adaptative asynch case is not implemented.
659 * It's either pure synchronous, or pure asynchronous.
660 * We still preserve a fastpath for the pure synchronous
661 * requests to SW providers.
664 crypto_context_t mac_context
;
666 if (pd
->pd_prov_type
== CRYPTO_SW_PROVIDER
) {
667 crypto_mechanism_t lmech
= *encr_mech
;
669 lmech
.cm_type
= prov_encr_mechid
;
671 error
= KCF_PROV_ENCRYPT_INIT(pd
, ctx
, &lmech
,
672 encr_key
, spi_encr_tmpl
,
673 KCF_RHNDL(KM_SLEEP
));
676 * If we did the 'goto retry' then ctx may not
677 * be NULL. In general, we can't reuse another
678 * provider's context, so we free it now so
682 KCF_CONTEXT_REFRELE((kcf_context_t
*)
683 ctx
->cc_framework_private
);
684 encr_kcf_context
= NULL
;
686 error
= crypto_encrypt_init_prov(pd
, pd
->pd_sid
,
687 encr_mech
, encr_key
, &encr_tmpl
,
688 (crypto_context_t
*)&ctx
, NULL
);
690 if (error
== CRYPTO_SUCCESS
) {
691 encr_kcf_context
= (kcf_context_t
*)
692 ctx
->cc_framework_private
;
695 KCF_PROV_INCRSTATS(pd
, error
);
697 KCF_PROV_REFRELE(pd
);
699 if (error
!= CRYPTO_SUCCESS
) {
700 /* Can't be CRYPTO_QUEUED. return the failure */
702 kcf_free_triedlist(list
);
703 if (encr_kcf_context
!= NULL
)
704 KCF_CONTEXT_REFRELE(encr_kcf_context
);
708 error
= crypto_mac_init(mac_mech
, mac_key
, mac_tmpl
,
712 kcf_free_triedlist(list
);
714 if (error
!= CRYPTO_SUCCESS
) {
715 /* Should this be an ASSERT() ? */
717 KCF_CONTEXT_REFRELE(encr_kcf_context
);
719 encr_kcf_context
= (kcf_context_t
*)
720 ctx
->cc_framework_private
;
721 mac_kcf_context
= (kcf_context_t
*)
722 ((crypto_ctx_t
*)mac_context
)->
723 cc_framework_private
;
725 encr_kcf_context
->kc_secondctx
=
727 KCF_CONTEXT_REFHOLD(mac_kcf_context
);
729 *ctxp
= (crypto_context_t
)ctx
;
735 /* submit a pure asynchronous request. */
736 save_flag
= cr
->cr_flag
;
737 cr
->cr_flag
|= CRYPTO_ALWAYS_QUEUE
;
739 KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(¶ms
, KCF_OP_INIT
,
740 pd
->pd_sid
, encr_key
, mac_key
, NULL
, NULL
, NULL
,
741 spi_encr_tmpl
, spi_mac_tmpl
);
743 cmops
= &(params
.rp_u
.encrypt_mac_params
);
745 /* careful! structs assignments */
746 cmops
->em_encr_mech
= *encr_mech
;
748 * cmops->em_encr_mech.cm_type will be set when we get to
749 * kcf_emulate_dual() routine.
751 cmops
->em_framework_encr_mechtype
= encr_mech
->cm_type
;
752 cmops
->em_mac_mech
= *mac_mech
;
755 * cmops->em_mac_mech.cm_type will be set when we know the
758 cmops
->em_framework_mac_mechtype
= mac_mech
->cm_type
;
761 * non-NULL ctx->kc_secondctx tells common_submit_request
762 * that this request uses separate cipher and MAC contexts.
763 * That function will set ctx->kc_secondctx to the new
764 * MAC context, once it gets one.
766 encr_kcf_context
->kc_secondctx
= encr_kcf_context
;
768 error
= kcf_submit_request(pd
, ctx
, cr
, ¶ms
, B_FALSE
);
770 cr
->cr_flag
= save_flag
;
772 if (error
!= CRYPTO_SUCCESS
&& error
!= CRYPTO_QUEUED
) {
773 KCF_CONTEXT_REFRELE(encr_kcf_context
);
776 kcf_free_triedlist(list
);
777 *ctxp
= (crypto_context_t
)ctx
;
778 KCF_PROV_REFRELE(pd
);
782 if ((!mac_tmpl_checked
) && (pd
->pd_prov_type
== CRYPTO_SW_PROVIDER
)) {
783 if ((mac_tmpl
!= NULL
) &&
784 (prov_mac_mechid
!= CRYPTO_MECH_INVALID
)) {
785 ctx_mac_tmpl
= (kcf_ctx_template_t
*)mac_tmpl
;
786 if (ctx_mac_tmpl
->ct_generation
!= me
->me_gen_swprov
) {
789 kcf_free_triedlist(list
);
791 KCF_PROV_REFRELE(pd
);
792 /* Which one is the the old one ? */
793 return (CRYPTO_OLD_CTX_TEMPLATE
);
795 spi_mac_tmpl
= ctx_mac_tmpl
->ct_prov_tmpl
;
797 mac_tmpl_checked
= B_TRUE
;
801 ctx
= kcf_new_ctx(cr
, pd
, pd
->pd_sid
);
804 kcf_free_triedlist(list
);
806 KCF_PROV_REFRELE(pd
);
807 return (CRYPTO_HOST_MEMORY
);
809 encr_kcf_context
= (kcf_context_t
*)ctx
->cc_framework_private
;
812 /* The fast path for SW providers. */
813 if (CHECK_FASTPATH(cr
, pd
)) {
814 crypto_mechanism_t lencr_mech
;
815 crypto_mechanism_t lmac_mech
;
817 /* careful! structs assignments */
818 lencr_mech
= *encr_mech
;
819 lencr_mech
.cm_type
= prov_encr_mechid
;
820 lmac_mech
= *mac_mech
;
821 lmac_mech
.cm_type
= prov_mac_mechid
;
823 error
= KCF_PROV_ENCRYPT_MAC_INIT(pd
, ctx
, &lencr_mech
,
824 encr_key
, &lmac_mech
, mac_key
, spi_encr_tmpl
, spi_mac_tmpl
,
827 KCF_PROV_INCRSTATS(pd
, error
);
829 KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(¶ms
, KCF_OP_INIT
,
830 pd
->pd_sid
, encr_key
, mac_key
, NULL
, NULL
, NULL
,
831 spi_encr_tmpl
, spi_mac_tmpl
);
833 cmops
= &(params
.rp_u
.encrypt_mac_params
);
835 /* careful! structs assignments */
836 cmops
->em_encr_mech
= *encr_mech
;
837 cmops
->em_encr_mech
.cm_type
= prov_encr_mechid
;
838 cmops
->em_framework_encr_mechtype
= encr_mech
->cm_type
;
839 cmops
->em_mac_mech
= *mac_mech
;
840 cmops
->em_mac_mech
.cm_type
= prov_mac_mechid
;
841 cmops
->em_framework_mac_mechtype
= mac_mech
->cm_type
;
843 error
= kcf_submit_request(pd
, ctx
, cr
, ¶ms
, B_FALSE
);
846 if (error
!= CRYPTO_SUCCESS
&& error
!= CRYPTO_QUEUED
) {
847 if ((IS_RECOVERABLE(error
)) &&
848 (kcf_insert_triedlist(&list
, pd
, KCF_KMFLAG(cr
)) != NULL
))
851 KCF_CONTEXT_REFRELE(encr_kcf_context
);
853 *ctxp
= (crypto_context_t
)ctx
;
856 kcf_free_triedlist(list
);
858 KCF_PROV_REFRELE(pd
);
863 * Continues a multi-part dual encrypt/mac operation.
867 crypto_encrypt_mac_update(crypto_context_t context
,
868 crypto_data_t
*pt
, crypto_dual_data_t
*ct
, crypto_call_req_t
*cr
)
870 crypto_ctx_t
*ctx
= (crypto_ctx_t
*)context
, *mac_ctx
;
871 kcf_context_t
*kcf_ctx
, *kcf_mac_ctx
;
872 kcf_provider_desc_t
*pd
;
874 kcf_req_params_t params
;
877 ((kcf_ctx
= (kcf_context_t
*)ctx
->cc_framework_private
) == NULL
) ||
878 ((pd
= kcf_ctx
->kc_prov_desc
) == NULL
)) {
879 return (CRYPTO_INVALID_CONTEXT
);
882 ASSERT(pd
->pd_prov_type
!= CRYPTO_LOGICAL_PROVIDER
);
884 if ((kcf_mac_ctx
= kcf_ctx
->kc_secondctx
) != NULL
) {
887 crypto_call_flag_t save_flag
;
889 if (kcf_mac_ctx
->kc_prov_desc
== NULL
) {
890 error
= CRYPTO_INVALID_CONTEXT
;
893 mac_ctx
= &kcf_mac_ctx
->kc_glbl_ctx
;
895 /* First we submit the encryption request */
898 * 'ct' is always not NULL.
899 * A NULL 'pt' means in-place.
902 error
= crypto_encrypt_update(context
,
903 (crypto_data_t
*)ct
, NULL
, NULL
);
905 error
= crypto_encrypt_update(context
, pt
,
906 (crypto_data_t
*)ct
, NULL
);
908 if (error
!= CRYPTO_SUCCESS
)
912 * call mac_update when there is data to throw in
913 * the mix. Either an explicitly non-zero ct->dd_len2,
914 * or the last ciphertext portion.
916 save_offset
= ct
->dd_offset1
;
917 save_len
= ct
->dd_len1
;
918 if (ct
->dd_len2
== 0) {
920 * The previous encrypt step was an
921 * accumulation only and didn't produce any
924 if (ct
->dd_len1
== 0)
927 ct
->dd_offset1
= ct
->dd_offset2
;
928 ct
->dd_len1
= ct
->dd_len2
;
930 error
= crypto_mac_update((crypto_context_t
)mac_ctx
,
931 (crypto_data_t
*)ct
, NULL
);
933 ct
->dd_offset1
= save_offset
;
934 ct
->dd_len1
= save_len
;
938 /* submit a pure asynchronous request. */
939 save_flag
= cr
->cr_flag
;
940 cr
->cr_flag
|= CRYPTO_ALWAYS_QUEUE
;
942 KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(¶ms
, KCF_OP_UPDATE
,
943 pd
->pd_sid
, NULL
, NULL
, pt
, ct
, NULL
, NULL
, NULL
)
946 error
= kcf_submit_request(pd
, ctx
, cr
, ¶ms
, B_FALSE
);
948 cr
->cr_flag
= save_flag
;
952 /* The fast path for SW providers. */
953 if (CHECK_FASTPATH(cr
, pd
)) {
954 error
= KCF_PROV_ENCRYPT_MAC_UPDATE(pd
, ctx
, pt
, ct
, NULL
);
955 KCF_PROV_INCRSTATS(pd
, error
);
957 KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(¶ms
, KCF_OP_UPDATE
,
958 ctx
->cc_session
, NULL
, NULL
, pt
, ct
, NULL
, NULL
, NULL
);
960 error
= kcf_submit_request(pd
, ctx
, cr
, ¶ms
, B_FALSE
);
967 * Terminates a multi-part dual encrypt/mac operation.
970 int crypto_encrypt_mac_final(crypto_context_t context
, crypto_dual_data_t
*ct
,
971 crypto_data_t
*mac
, crypto_call_req_t
*cr
)
973 crypto_ctx_t
*ctx
= (crypto_ctx_t
*)context
, *mac_ctx
;
974 kcf_context_t
*kcf_ctx
, *kcf_mac_ctx
;
975 kcf_provider_desc_t
*pd
;
977 kcf_req_params_t params
;
980 ((kcf_ctx
= (kcf_context_t
*)ctx
->cc_framework_private
) == NULL
) ||
981 ((pd
= kcf_ctx
->kc_prov_desc
) == NULL
)) {
982 return (CRYPTO_INVALID_CONTEXT
);
985 ASSERT(pd
->pd_prov_type
!= CRYPTO_LOGICAL_PROVIDER
);
987 if ((kcf_mac_ctx
= kcf_ctx
->kc_secondctx
) != NULL
) {
990 crypto_context_t mac_context
;
991 crypto_call_flag_t save_flag
;
993 if (kcf_mac_ctx
->kc_prov_desc
== NULL
) {
994 return (CRYPTO_INVALID_CONTEXT
);
996 mac_ctx
= &kcf_mac_ctx
->kc_glbl_ctx
;
997 mac_context
= (crypto_context_t
)mac_ctx
;
1000 /* Get the last chunk of ciphertext */
1001 error
= crypto_encrypt_final(context
,
1002 (crypto_data_t
*)ct
, NULL
);
1004 if (error
!= CRYPTO_SUCCESS
) {
1006 * Needed here, because the caller of
1007 * crypto_encrypt_mac_final() lost all
1008 * refs to the mac_ctx.
1010 crypto_cancel_ctx(mac_context
);
1013 if (ct
->dd_len2
> 0) {
1014 save_offset
= ct
->dd_offset1
;
1015 save_len
= ct
->dd_len1
;
1016 ct
->dd_offset1
= ct
->dd_offset2
;
1017 ct
->dd_len1
= ct
->dd_len2
;
1019 error
= crypto_mac_update(mac_context
,
1020 (crypto_data_t
*)ct
, NULL
);
1022 ct
->dd_offset1
= save_offset
;
1023 ct
->dd_len1
= save_len
;
1025 if (error
!= CRYPTO_SUCCESS
) {
1026 crypto_cancel_ctx(mac_context
);
1031 /* and finally, collect the MAC */
1032 error
= crypto_mac_final(mac_context
, mac
, NULL
);
1036 /* submit a pure asynchronous request. */
1037 save_flag
= cr
->cr_flag
;
1038 cr
->cr_flag
|= CRYPTO_ALWAYS_QUEUE
;
1040 KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(¶ms
, KCF_OP_FINAL
,
1041 pd
->pd_sid
, NULL
, NULL
, NULL
, ct
, mac
, NULL
, NULL
)
1044 error
= kcf_submit_request(pd
, ctx
, cr
, ¶ms
, B_FALSE
);
1046 cr
->cr_flag
= save_flag
;
1049 /* The fast path for SW providers. */
1050 if (CHECK_FASTPATH(cr
, pd
)) {
1051 error
= KCF_PROV_ENCRYPT_MAC_FINAL(pd
, ctx
, ct
, mac
, NULL
);
1052 KCF_PROV_INCRSTATS(pd
, error
);
1054 KCF_WRAP_ENCRYPT_MAC_OPS_PARAMS(¶ms
, KCF_OP_FINAL
,
1055 ctx
->cc_session
, NULL
, NULL
, NULL
, ct
, mac
, NULL
, NULL
);
1056 error
= kcf_submit_request(pd
, ctx
, cr
, ¶ms
, B_FALSE
);
1059 /* Release the hold done in kcf_new_ctx() during init step. */
1060 KCF_CONTEXT_COND_RELEASE(error
, kcf_ctx
);
1065 * Performs an atomic dual mac/decrypt operation. The provider to use
1066 * is determined by the KCF dispatcher.
1069 crypto_mac_decrypt(crypto_mechanism_t
*mac_mech
,
1070 crypto_mechanism_t
*decr_mech
, crypto_dual_data_t
*ct
,
1071 crypto_key_t
*mac_key
, crypto_key_t
*decr_key
,
1072 crypto_ctx_template_t mac_tmpl
, crypto_ctx_template_t decr_tmpl
,
1073 crypto_data_t
*mac
, crypto_data_t
*pt
, crypto_call_req_t
*crq
)
1075 return (crypto_mac_decrypt_common(mac_mech
, decr_mech
, ct
, mac_key
,
1076 decr_key
, mac_tmpl
, decr_tmpl
, mac
, pt
, crq
, B_FALSE
));
1080 crypto_mac_decrypt_prov(crypto_provider_t provider
, crypto_session_id_t sid
,
1081 crypto_mechanism_t
*mac_mech
, crypto_mechanism_t
*decr_mech
,
1082 crypto_dual_data_t
*ct
, crypto_key_t
*mac_key
, crypto_key_t
*decr_key
,
1083 crypto_ctx_template_t mac_tmpl
, crypto_ctx_template_t decr_tmpl
,
1084 crypto_data_t
*mac
, crypto_data_t
*pt
, crypto_call_req_t
*crq
)
1086 return (crypto_mac_decrypt_common_prov(provider
, sid
, mac_mech
,
1087 decr_mech
, ct
, mac_key
, decr_key
, mac_tmpl
, decr_tmpl
, mac
, pt
,
1092 * Performs an atomic dual mac/decrypt operation. The provider to use
1093 * is determined by the KCF dispatcher. 'mac' specifies the expected
1094 * value for the MAC. The decryption is not performed if the computed
1095 * MAC does not match the expected MAC.
1098 crypto_mac_verify_decrypt(crypto_mechanism_t
*mac_mech
,
1099 crypto_mechanism_t
*decr_mech
, crypto_dual_data_t
*ct
,
1100 crypto_key_t
*mac_key
, crypto_key_t
*decr_key
,
1101 crypto_ctx_template_t mac_tmpl
, crypto_ctx_template_t decr_tmpl
,
1102 crypto_data_t
*mac
, crypto_data_t
*pt
, crypto_call_req_t
*crq
)
1104 return (crypto_mac_decrypt_common(mac_mech
, decr_mech
, ct
, mac_key
,
1105 decr_key
, mac_tmpl
, decr_tmpl
, mac
, pt
, crq
, B_TRUE
));
1109 crypto_mac_verify_decrypt_prov(crypto_provider_t provider
,
1110 crypto_session_id_t sid
, crypto_mechanism_t
*mac_mech
,
1111 crypto_mechanism_t
*decr_mech
, crypto_dual_data_t
*ct
,
1112 crypto_key_t
*mac_key
, crypto_key_t
*decr_key
,
1113 crypto_ctx_template_t mac_tmpl
, crypto_ctx_template_t decr_tmpl
,
1114 crypto_data_t
*mac
, crypto_data_t
*pt
, crypto_call_req_t
*crq
)
1116 return (crypto_mac_decrypt_common_prov(provider
, sid
, mac_mech
,
1117 decr_mech
, ct
, mac_key
, decr_key
, mac_tmpl
, decr_tmpl
, mac
, pt
,
1122 * Called by both crypto_mac_decrypt() and crypto_mac_verify_decrypt().
1123 * optionally verified if the MACs match before calling the decryption step.
1126 crypto_mac_decrypt_common(crypto_mechanism_t
*mac_mech
,
1127 crypto_mechanism_t
*decr_mech
, crypto_dual_data_t
*ct
,
1128 crypto_key_t
*mac_key
, crypto_key_t
*decr_key
,
1129 crypto_ctx_template_t mac_tmpl
, crypto_ctx_template_t decr_tmpl
,
1130 crypto_data_t
*mac
, crypto_data_t
*pt
, crypto_call_req_t
*crq
,
1131 boolean_t do_verify
)
1134 * First try to find a provider for the decryption mechanism, that
1135 * is also capable of the MAC mechanism.
1136 * We still favor optimizing the costlier decryption.
1139 kcf_mech_entry_t
*me
;
1140 kcf_provider_desc_t
*pd
;
1141 kcf_ctx_template_t
*ctx_decr_tmpl
, *ctx_mac_tmpl
;
1142 kcf_req_params_t params
;
1143 kcf_mac_decrypt_ops_params_t
*cmops
;
1144 crypto_spi_ctx_template_t spi_decr_tmpl
= NULL
, spi_mac_tmpl
= NULL
;
1145 crypto_mech_type_t prov_decr_mechid
, prov_mac_mechid
;
1146 kcf_prov_tried_t
*list
= NULL
;
1147 boolean_t decr_tmpl_checked
= B_FALSE
;
1148 boolean_t mac_tmpl_checked
= B_FALSE
;
1149 kcf_dual_req_t
*next_req
= NULL
;
1150 crypto_call_req_t mac_req
, *mac_reqp
= NULL
;
1153 /* pd is returned held on success */
1154 pd
= kcf_get_dual_provider(decr_mech
, decr_key
, mac_mech
, mac_key
,
1155 &me
, &prov_decr_mechid
,
1156 &prov_mac_mechid
, &error
, list
,
1157 CRYPTO_FG_DECRYPT_ATOMIC
| CRYPTO_FG_MAC_DECRYPT_ATOMIC
,
1158 CRYPTO_FG_MAC_ATOMIC
| CRYPTO_FG_MAC_DECRYPT_ATOMIC
, ct
->dd_len2
);
1161 kcf_free_triedlist(list
);
1162 if (next_req
!= NULL
)
1163 kmem_free(next_req
, sizeof (kcf_dual_req_t
));
1164 return (CRYPTO_MECH_NOT_SUPPORTED
);
1168 * For SW providers, check the validity of the context template
1169 * It is very rare that the generation number mis-matches, so
1170 * is acceptable to fail here, and let the consumer recover by
1171 * freeing this tmpl and create a new one for the key and new SW
1175 if ((!decr_tmpl_checked
) && (pd
->pd_prov_type
== CRYPTO_SW_PROVIDER
)) {
1176 if (decr_tmpl
!= NULL
) {
1177 ctx_decr_tmpl
= (kcf_ctx_template_t
*)decr_tmpl
;
1178 if (ctx_decr_tmpl
->ct_generation
!= me
->me_gen_swprov
) {
1179 if (next_req
!= NULL
)
1181 sizeof (kcf_dual_req_t
));
1183 kcf_free_triedlist(list
);
1184 KCF_PROV_REFRELE(pd
);
1186 /* Which one is the the old one ? */
1187 return (CRYPTO_OLD_CTX_TEMPLATE
);
1189 spi_decr_tmpl
= ctx_decr_tmpl
->ct_prov_tmpl
;
1191 decr_tmpl_checked
= B_TRUE
;
1193 if (prov_mac_mechid
== CRYPTO_MECH_INVALID
) {
1194 /* Need to emulate with 2 internal calls */
1196 /* Prepare the call_req to be submitted for the MAC step */
1200 if (next_req
== NULL
) {
1202 * allocate, initialize and prepare the
1203 * params for the next step only in the
1204 * first pass (not on every retry).
1206 next_req
= kcf_alloc_req(crq
);
1208 if (next_req
== NULL
) {
1209 KCF_PROV_REFRELE(pd
);
1211 kcf_free_triedlist(list
);
1212 return (CRYPTO_HOST_MEMORY
);
1214 KCF_WRAP_DECRYPT_OPS_PARAMS(
1215 &(next_req
->kr_params
), KCF_OP_ATOMIC
,
1216 NULL
, decr_mech
, decr_key
,
1217 (crypto_data_t
*)ct
, pt
, spi_decr_tmpl
);
1220 mac_req
.cr_flag
= (crq
!= NULL
) ? crq
->cr_flag
: 0;
1221 mac_req
.cr_flag
|= CRYPTO_SETDUAL
;
1222 mac_req
.cr_callback_func
= kcf_next_req
;
1223 mac_req
.cr_callback_arg
= next_req
;
1224 mac_reqp
= &mac_req
;
1227 /* 'pd' is the decryption provider. */
1230 error
= crypto_mac_verify(mac_mech
, (crypto_data_t
*)ct
,
1231 mac_key
, mac_tmpl
, mac
,
1232 (crq
== NULL
) ? NULL
: mac_reqp
);
1234 error
= crypto_mac(mac_mech
, (crypto_data_t
*)ct
,
1235 mac_key
, mac_tmpl
, mac
,
1236 (crq
== NULL
) ? NULL
: mac_reqp
);
1239 case CRYPTO_SUCCESS
: {
1243 if (next_req
== NULL
) {
1244 saveoffset
= ct
->dd_offset1
;
1245 savelen
= ct
->dd_len1
;
1247 saveoffset
= next_req
->kr_saveoffset
=
1249 savelen
= next_req
->kr_savelen
= ct
->dd_len1
;
1251 ASSERT(mac_reqp
!= NULL
);
1252 mac_req
.cr_flag
&= ~CRYPTO_SETDUAL
;
1253 mac_req
.cr_callback_func
= kcf_last_req
;
1255 ct
->dd_offset1
= ct
->dd_offset2
;
1256 ct
->dd_len1
= ct
->dd_len2
;
1258 if (CHECK_FASTPATH(crq
, pd
)) {
1259 crypto_mechanism_t lmech
;
1262 KCF_SET_PROVIDER_MECHNUM(decr_mech
->cm_type
,
1265 error
= KCF_PROV_DECRYPT_ATOMIC(pd
, pd
->pd_sid
,
1266 &lmech
, decr_key
, (crypto_data_t
*)ct
,
1267 (crypto_data_t
*)pt
, spi_decr_tmpl
,
1268 KCF_SWFP_RHNDL(mac_reqp
));
1270 KCF_PROV_INCRSTATS(pd
, error
);
1272 KCF_WRAP_DECRYPT_OPS_PARAMS(¶ms
,
1273 KCF_OP_ATOMIC
, pd
->pd_sid
, decr_mech
,
1274 decr_key
, (crypto_data_t
*)ct
, pt
,
1277 error
= kcf_submit_request(pd
, NULL
,
1278 (crq
== NULL
) ? NULL
: mac_reqp
,
1281 if (error
!= CRYPTO_QUEUED
) {
1282 KCF_PROV_INCRSTATS(pd
, error
);
1283 ct
->dd_offset1
= saveoffset
;
1284 ct
->dd_len1
= savelen
;
1290 if ((crq
!= NULL
) && (crq
->cr_flag
& CRYPTO_SKIP_REQID
))
1291 crq
->cr_reqid
= mac_req
.cr_reqid
;
1295 if (IS_RECOVERABLE(error
)) {
1296 if (kcf_insert_triedlist(&list
, pd
,
1297 KCF_KMFLAG(crq
)) != NULL
)
1301 if (error
!= CRYPTO_QUEUED
&& next_req
!= NULL
)
1302 kmem_free(next_req
, sizeof (kcf_dual_req_t
));
1304 kcf_free_triedlist(list
);
1305 KCF_PROV_REFRELE(pd
);
1309 if ((!mac_tmpl_checked
) && (pd
->pd_prov_type
== CRYPTO_SW_PROVIDER
)) {
1310 if ((mac_tmpl
!= NULL
) &&
1311 (prov_mac_mechid
!= CRYPTO_MECH_INVALID
)) {
1312 ctx_mac_tmpl
= (kcf_ctx_template_t
*)mac_tmpl
;
1313 if (ctx_mac_tmpl
->ct_generation
!= me
->me_gen_swprov
) {
1314 if (next_req
!= NULL
)
1316 sizeof (kcf_dual_req_t
));
1318 kcf_free_triedlist(list
);
1319 KCF_PROV_REFRELE(pd
);
1321 /* Which one is the the old one ? */
1322 return (CRYPTO_OLD_CTX_TEMPLATE
);
1324 spi_mac_tmpl
= ctx_mac_tmpl
->ct_prov_tmpl
;
1326 mac_tmpl_checked
= B_TRUE
;
1329 /* The fast path for SW providers. */
1330 if (CHECK_FASTPATH(crq
, pd
)) {
1331 crypto_mechanism_t lmac_mech
;
1332 crypto_mechanism_t ldecr_mech
;
1334 /* careful! structs assignments */
1335 ldecr_mech
= *decr_mech
;
1336 ldecr_mech
.cm_type
= prov_decr_mechid
;
1337 lmac_mech
= *mac_mech
;
1338 lmac_mech
.cm_type
= prov_mac_mechid
;
1341 error
= KCF_PROV_MAC_VERIFY_DECRYPT_ATOMIC(pd
,
1342 pd
->pd_sid
, &lmac_mech
, mac_key
, &ldecr_mech
,
1343 decr_key
, ct
, mac
, pt
, spi_mac_tmpl
, spi_decr_tmpl
,
1344 KCF_SWFP_RHNDL(crq
));
1346 error
= KCF_PROV_MAC_DECRYPT_ATOMIC(pd
, pd
->pd_sid
,
1347 &lmac_mech
, mac_key
, &ldecr_mech
, decr_key
,
1348 ct
, mac
, pt
, spi_mac_tmpl
, spi_decr_tmpl
,
1349 KCF_SWFP_RHNDL(crq
));
1351 KCF_PROV_INCRSTATS(pd
, error
);
1353 KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(¶ms
,
1354 (do_verify
) ? KCF_OP_MAC_VERIFY_DECRYPT_ATOMIC
:
1355 KCF_OP_ATOMIC
, pd
->pd_sid
, mac_key
, decr_key
, ct
, mac
, pt
,
1356 spi_mac_tmpl
, spi_decr_tmpl
);
1358 cmops
= &(params
.rp_u
.mac_decrypt_params
);
1360 /* careful! structs assignments */
1361 cmops
->md_decr_mech
= *decr_mech
;
1362 cmops
->md_decr_mech
.cm_type
= prov_decr_mechid
;
1363 cmops
->md_framework_decr_mechtype
= decr_mech
->cm_type
;
1364 cmops
->md_mac_mech
= *mac_mech
;
1365 cmops
->md_mac_mech
.cm_type
= prov_mac_mechid
;
1366 cmops
->md_framework_mac_mechtype
= mac_mech
->cm_type
;
1368 error
= kcf_submit_request(pd
, NULL
, crq
, ¶ms
, B_FALSE
);
1371 if (error
!= CRYPTO_SUCCESS
&& error
!= CRYPTO_QUEUED
&&
1372 IS_RECOVERABLE(error
)) {
1373 /* Add pd to the linked list of providers tried. */
1374 if (kcf_insert_triedlist(&list
, pd
, KCF_KMFLAG(crq
)) != NULL
)
1379 kcf_free_triedlist(list
);
1381 if (next_req
!= NULL
)
1382 kmem_free(next_req
, sizeof (kcf_dual_req_t
));
1383 KCF_PROV_REFRELE(pd
);
1388 crypto_mac_decrypt_common_prov(crypto_provider_t provider
,
1389 crypto_session_id_t sid
, crypto_mechanism_t
*mac_mech
,
1390 crypto_mechanism_t
*decr_mech
, crypto_dual_data_t
*ct
,
1391 crypto_key_t
*mac_key
, crypto_key_t
*decr_key
,
1392 crypto_ctx_template_t mac_tmpl
, crypto_ctx_template_t decr_tmpl
,
1393 crypto_data_t
*mac
, crypto_data_t
*pt
, crypto_call_req_t
*crq
,
1394 boolean_t do_verify
)
1397 * First try to find a provider for the decryption mechanism, that
1398 * is also capable of the MAC mechanism.
1399 * We still favor optimizing the costlier decryption.
1402 kcf_mech_entry_t
*me
;
1403 kcf_provider_desc_t
*pd
= provider
;
1404 kcf_provider_desc_t
*real_provider
= pd
;
1405 kcf_ctx_template_t
*ctx_decr_tmpl
, *ctx_mac_tmpl
;
1406 kcf_req_params_t params
;
1407 kcf_mac_decrypt_ops_params_t
*cmops
;
1408 crypto_spi_ctx_template_t spi_decr_tmpl
= NULL
, spi_mac_tmpl
= NULL
;
1410 ASSERT(KCF_PROV_REFHELD(pd
));
1412 if (pd
->pd_prov_type
== CRYPTO_LOGICAL_PROVIDER
) {
1413 error
= kcf_get_hardware_provider(decr_mech
->cm_type
, decr_key
,
1414 mac_mech
->cm_type
, mac_key
, pd
, &real_provider
,
1415 CRYPTO_FG_MAC_DECRYPT_ATOMIC
);
1417 if (error
!= CRYPTO_SUCCESS
)
1422 * For SW providers, check the validity of the context template
1423 * It is very rare that the generation number mis-matches, so
1424 * is acceptable to fail here, and let the consumer recover by
1425 * freeing this tmpl and create a new one for the key and new SW
1429 if (real_provider
->pd_prov_type
== CRYPTO_SW_PROVIDER
) {
1430 if (decr_tmpl
!= NULL
) {
1431 if (kcf_get_mech_entry(decr_mech
->cm_type
, &me
) !=
1433 error
= CRYPTO_MECHANISM_INVALID
;
1436 ctx_decr_tmpl
= (kcf_ctx_template_t
*)decr_tmpl
;
1437 if (ctx_decr_tmpl
->ct_generation
!= me
->me_gen_swprov
) {
1438 error
= CRYPTO_OLD_CTX_TEMPLATE
;
1441 spi_decr_tmpl
= ctx_decr_tmpl
->ct_prov_tmpl
;
1444 if (mac_tmpl
!= NULL
) {
1445 if (kcf_get_mech_entry(mac_mech
->cm_type
, &me
) !=
1447 error
= CRYPTO_MECHANISM_INVALID
;
1450 ctx_mac_tmpl
= (kcf_ctx_template_t
*)mac_tmpl
;
1451 if (ctx_mac_tmpl
->ct_generation
!= me
->me_gen_swprov
) {
1452 error
= CRYPTO_OLD_CTX_TEMPLATE
;
1455 spi_mac_tmpl
= ctx_mac_tmpl
->ct_prov_tmpl
;
1459 /* The fast path for SW providers. */
1460 if (CHECK_FASTPATH(crq
, pd
)) {
1461 crypto_mechanism_t lmac_mech
;
1462 crypto_mechanism_t ldecr_mech
;
1464 /* careful! structs assignments */
1465 ldecr_mech
= *decr_mech
;
1466 KCF_SET_PROVIDER_MECHNUM(decr_mech
->cm_type
, real_provider
,
1469 lmac_mech
= *mac_mech
;
1470 KCF_SET_PROVIDER_MECHNUM(mac_mech
->cm_type
, real_provider
,
1474 error
= KCF_PROV_MAC_VERIFY_DECRYPT_ATOMIC(
1475 real_provider
, sid
, &lmac_mech
, mac_key
,
1476 &ldecr_mech
, decr_key
, ct
, mac
, pt
, spi_mac_tmpl
,
1477 spi_decr_tmpl
, KCF_SWFP_RHNDL(crq
));
1479 error
= KCF_PROV_MAC_DECRYPT_ATOMIC(real_provider
, sid
,
1480 &lmac_mech
, mac_key
, &ldecr_mech
, decr_key
,
1481 ct
, mac
, pt
, spi_mac_tmpl
, spi_decr_tmpl
,
1482 KCF_SWFP_RHNDL(crq
));
1484 KCF_PROV_INCRSTATS(pd
, error
);
1486 KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(¶ms
,
1487 (do_verify
) ? KCF_OP_MAC_VERIFY_DECRYPT_ATOMIC
:
1488 KCF_OP_ATOMIC
, sid
, mac_key
, decr_key
, ct
, mac
, pt
,
1489 spi_mac_tmpl
, spi_decr_tmpl
);
1491 cmops
= &(params
.rp_u
.mac_decrypt_params
);
1493 /* careful! structs assignments */
1494 cmops
->md_decr_mech
= *decr_mech
;
1495 KCF_SET_PROVIDER_MECHNUM(decr_mech
->cm_type
, real_provider
,
1496 &cmops
->md_decr_mech
);
1497 cmops
->md_framework_decr_mechtype
= decr_mech
->cm_type
;
1499 cmops
->md_mac_mech
= *mac_mech
;
1500 KCF_SET_PROVIDER_MECHNUM(mac_mech
->cm_type
, real_provider
,
1501 &cmops
->md_mac_mech
);
1502 cmops
->md_framework_mac_mechtype
= mac_mech
->cm_type
;
1504 error
= kcf_submit_request(real_provider
, NULL
, crq
, ¶ms
,
1509 if (pd
->pd_prov_type
== CRYPTO_LOGICAL_PROVIDER
)
1510 KCF_PROV_REFRELE(real_provider
);
1515 * Starts a multi-part dual mac/decrypt operation. The provider to
1516 * use is determined by the KCF dispatcher.
1520 crypto_mac_decrypt_init(crypto_mechanism_t
*mac_mech
,
1521 crypto_mechanism_t
*decr_mech
, crypto_key_t
*mac_key
,
1522 crypto_key_t
*decr_key
, crypto_ctx_template_t mac_tmpl
,
1523 crypto_ctx_template_t decr_tmpl
, crypto_context_t
*ctxp
,
1524 crypto_call_req_t
*cr
)
1527 * First try to find a provider for the decryption mechanism, that
1528 * is also capable of the MAC mechanism.
1529 * We still favor optimizing the costlier decryption.
1532 kcf_mech_entry_t
*me
;
1533 kcf_provider_desc_t
*pd
;
1534 kcf_ctx_template_t
*ctx_decr_tmpl
, *ctx_mac_tmpl
;
1535 kcf_req_params_t params
;
1536 kcf_mac_decrypt_ops_params_t
*mdops
;
1537 crypto_spi_ctx_template_t spi_decr_tmpl
= NULL
, spi_mac_tmpl
= NULL
;
1538 crypto_mech_type_t prov_decr_mechid
, prov_mac_mechid
;
1539 kcf_prov_tried_t
*list
= NULL
;
1540 boolean_t decr_tmpl_checked
= B_FALSE
;
1541 boolean_t mac_tmpl_checked
= B_FALSE
;
1542 crypto_ctx_t
*ctx
= NULL
;
1543 kcf_context_t
*decr_kcf_context
= NULL
, *mac_kcf_context
= NULL
;
1544 crypto_call_flag_t save_flag
;
1547 /* pd is returned held on success */
1548 pd
= kcf_get_dual_provider(decr_mech
, decr_key
, mac_mech
, mac_key
,
1549 &me
, &prov_decr_mechid
,
1550 &prov_mac_mechid
, &error
, list
,
1551 CRYPTO_FG_DECRYPT
| CRYPTO_FG_MAC_DECRYPT
, CRYPTO_FG_MAC
, 0);
1554 kcf_free_triedlist(list
);
1559 * For SW providers, check the validity of the context template
1560 * It is very rare that the generation number mis-matches, so
1561 * is acceptable to fail here, and let the consumer recover by
1562 * freeing this tmpl and create a new one for the key and new SW
1564 * Warning! will need to change when multiple software providers
1565 * per mechanism are supported.
1568 if ((!decr_tmpl_checked
) && (pd
->pd_prov_type
== CRYPTO_SW_PROVIDER
)) {
1569 if (decr_tmpl
!= NULL
) {
1570 ctx_decr_tmpl
= (kcf_ctx_template_t
*)decr_tmpl
;
1571 if (ctx_decr_tmpl
->ct_generation
!= me
->me_gen_swprov
) {
1574 kcf_free_triedlist(list
);
1575 if (decr_kcf_context
!= NULL
)
1576 KCF_CONTEXT_REFRELE(decr_kcf_context
);
1578 KCF_PROV_REFRELE(pd
);
1579 /* Which one is the the old one ? */
1580 return (CRYPTO_OLD_CTX_TEMPLATE
);
1582 spi_decr_tmpl
= ctx_decr_tmpl
->ct_prov_tmpl
;
1584 decr_tmpl_checked
= B_TRUE
;
1587 if (prov_mac_mechid
== CRYPTO_MECH_INVALID
) {
1588 /* Need to emulate with 2 internal calls */
1591 * We avoid code complexity by limiting the pure async.
1592 * case to be done using only a SW provider.
1593 * XXX - Redo the emulation code below so that we can
1594 * remove this limitation.
1596 if (cr
!= NULL
&& pd
->pd_prov_type
== CRYPTO_HW_PROVIDER
) {
1597 if ((kcf_insert_triedlist(&list
, pd
, KCF_KMFLAG(cr
))
1601 kcf_free_triedlist(list
);
1602 if (decr_kcf_context
!= NULL
)
1603 KCF_CONTEXT_REFRELE(decr_kcf_context
);
1604 KCF_PROV_REFRELE(pd
);
1605 return (CRYPTO_HOST_MEMORY
);
1608 if (ctx
== NULL
&& pd
->pd_prov_type
== CRYPTO_SW_PROVIDER
) {
1609 ctx
= kcf_new_ctx(cr
, pd
, pd
->pd_sid
);
1612 kcf_free_triedlist(list
);
1613 if (decr_kcf_context
!= NULL
)
1614 KCF_CONTEXT_REFRELE(decr_kcf_context
);
1615 KCF_PROV_REFRELE(pd
);
1616 return (CRYPTO_HOST_MEMORY
);
1618 decr_kcf_context
= (kcf_context_t
*)
1619 ctx
->cc_framework_private
;
1622 * Trade-off speed vs avoidance of code complexity and
1624 * Could do all the combinations of fastpath / synch / asynch
1625 * for the decryption and the mac steps. Early attempts
1626 * showed the code grew wild and bug-prone, for little gain.
1627 * Therefore, the adaptative asynch case is not implemented.
1628 * It's either pure synchronous, or pure asynchronous.
1629 * We still preserve a fastpath for the pure synchronous
1630 * requests to SW providers.
1633 crypto_context_t mac_context
;
1635 error
= crypto_mac_init(mac_mech
, mac_key
, mac_tmpl
,
1636 &mac_context
, NULL
);
1638 if (error
!= CRYPTO_SUCCESS
) {
1639 /* Can't be CRYPTO_QUEUED. return the failure */
1641 kcf_free_triedlist(list
);
1643 if (decr_kcf_context
!= NULL
)
1644 KCF_CONTEXT_REFRELE(decr_kcf_context
);
1647 if (pd
->pd_prov_type
== CRYPTO_SW_PROVIDER
) {
1648 crypto_mechanism_t lmech
= *decr_mech
;
1650 lmech
.cm_type
= prov_decr_mechid
;
1652 error
= KCF_PROV_DECRYPT_INIT(pd
, ctx
, &lmech
,
1653 decr_key
, spi_decr_tmpl
,
1654 KCF_RHNDL(KM_SLEEP
));
1657 * If we did the 'goto retry' then ctx may not
1658 * be NULL. In general, we can't reuse another
1659 * provider's context, so we free it now so
1663 KCF_CONTEXT_REFRELE((kcf_context_t
*)
1664 ctx
->cc_framework_private
);
1665 decr_kcf_context
= NULL
;
1667 error
= crypto_decrypt_init_prov(pd
, pd
->pd_sid
,
1668 decr_mech
, decr_key
, &decr_tmpl
,
1669 (crypto_context_t
*)&ctx
, NULL
);
1671 if (error
== CRYPTO_SUCCESS
) {
1672 decr_kcf_context
= (kcf_context_t
*)
1673 ctx
->cc_framework_private
;
1677 KCF_PROV_INCRSTATS(pd
, error
);
1679 KCF_PROV_REFRELE(pd
);
1681 if (error
!= CRYPTO_SUCCESS
) {
1682 /* Can't be CRYPTO_QUEUED. return the failure */
1684 kcf_free_triedlist(list
);
1685 if (mac_kcf_context
!= NULL
)
1686 KCF_CONTEXT_REFRELE(mac_kcf_context
);
1690 mac_kcf_context
= (kcf_context_t
*)
1691 ((crypto_ctx_t
*)mac_context
)->
1692 cc_framework_private
;
1694 decr_kcf_context
= (kcf_context_t
*)
1695 ctx
->cc_framework_private
;
1698 * Here also, the mac context is second. The callback
1699 * case can't overwrite the context returned to
1702 decr_kcf_context
->kc_secondctx
= mac_kcf_context
;
1703 KCF_CONTEXT_REFHOLD(mac_kcf_context
);
1705 *ctxp
= (crypto_context_t
)ctx
;
1709 /* submit a pure asynchronous request. */
1710 save_flag
= cr
->cr_flag
;
1711 cr
->cr_flag
|= CRYPTO_ALWAYS_QUEUE
;
1713 KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(¶ms
, KCF_OP_INIT
,
1714 pd
->pd_sid
, mac_key
, decr_key
, NULL
, NULL
, NULL
,
1715 spi_mac_tmpl
, spi_decr_tmpl
);
1717 mdops
= &(params
.rp_u
.mac_decrypt_params
);
1719 /* careful! structs assignments */
1720 mdops
->md_decr_mech
= *decr_mech
;
1722 * mdops->md_decr_mech.cm_type will be set when we get to
1723 * kcf_emulate_dual() routine.
1725 mdops
->md_framework_decr_mechtype
= decr_mech
->cm_type
;
1726 mdops
->md_mac_mech
= *mac_mech
;
1729 * mdops->md_mac_mech.cm_type will be set when we know the
1732 mdops
->md_framework_mac_mechtype
= mac_mech
->cm_type
;
1735 * non-NULL ctx->kc_secondctx tells common_submit_request
1736 * that this request uses separate cipher and MAC contexts.
1737 * That function will set the MAC context's kc_secondctx to
1738 * this decrypt context.
1740 decr_kcf_context
->kc_secondctx
= decr_kcf_context
;
1742 error
= kcf_submit_request(pd
, ctx
, cr
, ¶ms
, B_FALSE
);
1744 cr
->cr_flag
= save_flag
;
1746 if (error
!= CRYPTO_SUCCESS
&& error
!= CRYPTO_QUEUED
) {
1747 KCF_CONTEXT_REFRELE(decr_kcf_context
);
1750 kcf_free_triedlist(list
);
1752 KCF_PROV_REFRELE(pd
);
1756 if ((!mac_tmpl_checked
) && (pd
->pd_prov_type
== CRYPTO_SW_PROVIDER
)) {
1757 if ((mac_tmpl
!= NULL
) &&
1758 (prov_mac_mechid
!= CRYPTO_MECH_INVALID
)) {
1759 ctx_mac_tmpl
= (kcf_ctx_template_t
*)mac_tmpl
;
1760 if (ctx_mac_tmpl
->ct_generation
!= me
->me_gen_swprov
) {
1763 kcf_free_triedlist(list
);
1765 KCF_PROV_REFRELE(pd
);
1766 /* Which one is the the old one ? */
1767 return (CRYPTO_OLD_CTX_TEMPLATE
);
1769 spi_mac_tmpl
= ctx_mac_tmpl
->ct_prov_tmpl
;
1771 mac_tmpl_checked
= B_TRUE
;
1775 ctx
= kcf_new_ctx(cr
, pd
, pd
->pd_sid
);
1777 error
= CRYPTO_HOST_MEMORY
;
1779 kcf_free_triedlist(list
);
1780 return (CRYPTO_HOST_MEMORY
);
1782 decr_kcf_context
= (kcf_context_t
*)ctx
->cc_framework_private
;
1785 /* The fast path for SW providers. */
1786 if (CHECK_FASTPATH(cr
, pd
)) {
1787 crypto_mechanism_t ldecr_mech
;
1788 crypto_mechanism_t lmac_mech
;
1790 /* careful! structs assignments */
1791 ldecr_mech
= *decr_mech
;
1792 ldecr_mech
.cm_type
= prov_decr_mechid
;
1793 lmac_mech
= *mac_mech
;
1794 lmac_mech
.cm_type
= prov_mac_mechid
;
1796 error
= KCF_PROV_MAC_DECRYPT_INIT(pd
, ctx
, &lmac_mech
,
1797 mac_key
, &ldecr_mech
, decr_key
, spi_mac_tmpl
, spi_decr_tmpl
,
1798 KCF_SWFP_RHNDL(cr
));
1800 KCF_PROV_INCRSTATS(pd
, error
);
1802 KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(¶ms
, KCF_OP_INIT
,
1803 pd
->pd_sid
, mac_key
, decr_key
, NULL
, NULL
, NULL
,
1804 spi_mac_tmpl
, spi_decr_tmpl
);
1806 mdops
= &(params
.rp_u
.mac_decrypt_params
);
1808 /* careful! structs assignments */
1809 mdops
->md_decr_mech
= *decr_mech
;
1810 mdops
->md_decr_mech
.cm_type
= prov_decr_mechid
;
1811 mdops
->md_framework_decr_mechtype
= decr_mech
->cm_type
;
1812 mdops
->md_mac_mech
= *mac_mech
;
1813 mdops
->md_mac_mech
.cm_type
= prov_mac_mechid
;
1814 mdops
->md_framework_mac_mechtype
= mac_mech
->cm_type
;
1816 error
= kcf_submit_request(pd
, ctx
, cr
, ¶ms
, B_FALSE
);
1819 if (error
!= CRYPTO_SUCCESS
&& error
!= CRYPTO_QUEUED
) {
1820 if ((IS_RECOVERABLE(error
)) &&
1821 (kcf_insert_triedlist(&list
, pd
, KCF_KMFLAG(cr
)) != NULL
))
1824 KCF_CONTEXT_REFRELE(decr_kcf_context
);
1826 *ctxp
= (crypto_context_t
)ctx
;
1829 kcf_free_triedlist(list
);
1831 KCF_PROV_REFRELE(pd
);
1836 crypto_mac_decrypt_init_prov(crypto_provider_t provider
,
1837 crypto_session_id_t sid
, crypto_mechanism_t
*mac_mech
,
1838 crypto_mechanism_t
*decr_mech
, crypto_key_t
*mac_key
,
1839 crypto_key_t
*decr_key
, crypto_ctx_template_t mac_tmpl
,
1840 crypto_ctx_template_t decr_tmpl
, crypto_context_t
*ctxp
,
1841 crypto_call_req_t
*cr
)
1844 * First try to find a provider for the decryption mechanism, that
1845 * is also capable of the MAC mechanism.
1846 * We still favor optimizing the costlier decryption.
1849 kcf_mech_entry_t
*me
;
1850 kcf_provider_desc_t
*pd
= provider
;
1851 kcf_provider_desc_t
*real_provider
= pd
;
1852 kcf_ctx_template_t
*ctx_decr_tmpl
, *ctx_mac_tmpl
;
1853 kcf_req_params_t params
;
1854 kcf_mac_decrypt_ops_params_t
*mdops
;
1855 crypto_spi_ctx_template_t spi_decr_tmpl
= NULL
, spi_mac_tmpl
= NULL
;
1857 kcf_context_t
*decr_kcf_context
= NULL
;
1859 ASSERT(KCF_PROV_REFHELD(pd
));
1861 if (pd
->pd_prov_type
== CRYPTO_LOGICAL_PROVIDER
) {
1862 rv
= kcf_get_hardware_provider(decr_mech
->cm_type
, decr_key
,
1863 mac_mech
->cm_type
, mac_key
, pd
, &real_provider
,
1864 CRYPTO_FG_MAC_DECRYPT
);
1866 if (rv
!= CRYPTO_SUCCESS
)
1871 * For SW providers, check the validity of the context template
1872 * It is very rare that the generation number mis-matches, so
1873 * is acceptable to fail here, and let the consumer recover by
1874 * freeing this tmpl and create a new one for the key and new SW
1876 * Warning! will need to change when multiple software providers
1877 * per mechanism are supported.
1880 if (real_provider
->pd_prov_type
== CRYPTO_SW_PROVIDER
) {
1881 if (decr_tmpl
!= NULL
) {
1882 if (kcf_get_mech_entry(decr_mech
->cm_type
, &me
) !=
1884 rv
= CRYPTO_MECHANISM_INVALID
;
1887 ctx_decr_tmpl
= (kcf_ctx_template_t
*)decr_tmpl
;
1888 if (ctx_decr_tmpl
->ct_generation
!= me
->me_gen_swprov
) {
1889 rv
= CRYPTO_OLD_CTX_TEMPLATE
;
1892 spi_decr_tmpl
= ctx_decr_tmpl
->ct_prov_tmpl
;
1895 if (mac_tmpl
!= NULL
) {
1896 if (kcf_get_mech_entry(mac_mech
->cm_type
, &me
) !=
1898 rv
= CRYPTO_MECHANISM_INVALID
;
1901 ctx_mac_tmpl
= (kcf_ctx_template_t
*)mac_tmpl
;
1902 if (ctx_mac_tmpl
->ct_generation
!= me
->me_gen_swprov
) {
1903 rv
= CRYPTO_OLD_CTX_TEMPLATE
;
1906 spi_mac_tmpl
= ctx_mac_tmpl
->ct_prov_tmpl
;
1910 ctx
= kcf_new_ctx(cr
, real_provider
, sid
);
1912 rv
= CRYPTO_HOST_MEMORY
;
1915 decr_kcf_context
= (kcf_context_t
*)ctx
->cc_framework_private
;
1917 /* The fast path for SW providers. */
1918 if (CHECK_FASTPATH(cr
, pd
)) {
1919 crypto_mechanism_t ldecr_mech
;
1920 crypto_mechanism_t lmac_mech
;
1922 /* careful! structs assignments */
1923 ldecr_mech
= *decr_mech
;
1924 KCF_SET_PROVIDER_MECHNUM(decr_mech
->cm_type
, real_provider
,
1927 lmac_mech
= *mac_mech
;
1928 KCF_SET_PROVIDER_MECHNUM(mac_mech
->cm_type
, real_provider
,
1931 rv
= KCF_PROV_MAC_DECRYPT_INIT(real_provider
, ctx
, &lmac_mech
,
1932 mac_key
, &ldecr_mech
, decr_key
, spi_mac_tmpl
, spi_decr_tmpl
,
1933 KCF_SWFP_RHNDL(cr
));
1935 KCF_PROV_INCRSTATS(pd
, rv
);
1937 KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(¶ms
, KCF_OP_INIT
,
1938 sid
, mac_key
, decr_key
, NULL
, NULL
, NULL
,
1939 spi_mac_tmpl
, spi_decr_tmpl
);
1941 mdops
= &(params
.rp_u
.mac_decrypt_params
);
1943 /* careful! structs assignments */
1944 mdops
->md_decr_mech
= *decr_mech
;
1945 KCF_SET_PROVIDER_MECHNUM(decr_mech
->cm_type
, real_provider
,
1946 &mdops
->md_decr_mech
);
1947 mdops
->md_framework_decr_mechtype
= decr_mech
->cm_type
;
1949 mdops
->md_mac_mech
= *mac_mech
;
1950 KCF_SET_PROVIDER_MECHNUM(mac_mech
->cm_type
, real_provider
,
1951 &mdops
->md_mac_mech
);
1952 mdops
->md_framework_mac_mechtype
= mac_mech
->cm_type
;
1954 rv
= kcf_submit_request(real_provider
, ctx
, cr
, ¶ms
,
1958 if (rv
!= CRYPTO_SUCCESS
&& rv
!= CRYPTO_QUEUED
) {
1959 KCF_CONTEXT_REFRELE(decr_kcf_context
);
1961 *ctxp
= (crypto_context_t
)ctx
;
1964 if (pd
->pd_prov_type
== CRYPTO_LOGICAL_PROVIDER
)
1965 KCF_PROV_REFRELE(real_provider
);
1969 * Continues a multi-part dual mac/decrypt operation.
1973 crypto_mac_decrypt_update(crypto_context_t context
,
1974 crypto_dual_data_t
*ct
, crypto_data_t
*pt
, crypto_call_req_t
*cr
)
1976 crypto_ctx_t
*ctx
= (crypto_ctx_t
*)context
, *mac_ctx
;
1977 kcf_context_t
*kcf_ctx
, *kcf_mac_ctx
;
1978 kcf_provider_desc_t
*pd
;
1980 kcf_req_params_t params
;
1982 if ((ctx
== NULL
) ||
1983 ((kcf_ctx
= (kcf_context_t
*)ctx
->cc_framework_private
) == NULL
) ||
1984 ((pd
= kcf_ctx
->kc_prov_desc
) == NULL
)) {
1985 return (CRYPTO_INVALID_CONTEXT
);
1988 ASSERT(pd
->pd_prov_type
!= CRYPTO_LOGICAL_PROVIDER
);
1990 if ((kcf_mac_ctx
= kcf_ctx
->kc_secondctx
) != NULL
) {
1993 crypto_call_flag_t save_flag
;
1995 if (kcf_mac_ctx
->kc_prov_desc
== NULL
) {
1996 error
= CRYPTO_INVALID_CONTEXT
;
1999 mac_ctx
= &kcf_mac_ctx
->kc_glbl_ctx
;
2001 /* First we submit the MAC request */
2004 * 'ct' is always not NULL.
2006 error
= crypto_mac_update((crypto_context_t
)mac_ctx
,
2007 (crypto_data_t
*)ct
, NULL
);
2009 if (error
!= CRYPTO_SUCCESS
)
2012 /* Decrypt a different length only when told so */
2014 save_offset
= ct
->dd_offset1
;
2015 save_len
= ct
->dd_len1
;
2017 if (ct
->dd_len2
> 0) {
2018 ct
->dd_offset1
= ct
->dd_offset2
;
2019 ct
->dd_len1
= ct
->dd_len2
;
2022 error
= crypto_decrypt_update(context
,
2023 (crypto_data_t
*)ct
, pt
, NULL
);
2025 ct
->dd_offset1
= save_offset
;
2026 ct
->dd_len1
= save_len
;
2030 /* submit a pure asynchronous request. */
2031 save_flag
= cr
->cr_flag
;
2032 cr
->cr_flag
|= CRYPTO_ALWAYS_QUEUE
;
2034 KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(¶ms
, KCF_OP_UPDATE
,
2035 pd
->pd_sid
, NULL
, NULL
, ct
, NULL
, pt
, NULL
, NULL
)
2038 error
= kcf_submit_request(pd
, ctx
, cr
, ¶ms
, B_FALSE
);
2040 cr
->cr_flag
= save_flag
;
2044 /* The fast path for SW providers. */
2045 if (CHECK_FASTPATH(cr
, pd
)) {
2046 error
= KCF_PROV_MAC_DECRYPT_UPDATE(pd
, ctx
, ct
, pt
, NULL
);
2047 KCF_PROV_INCRSTATS(pd
, error
);
2049 KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(¶ms
, KCF_OP_UPDATE
,
2050 ctx
->cc_session
, NULL
, NULL
, ct
, NULL
, pt
, NULL
, NULL
);
2052 error
= kcf_submit_request(pd
, ctx
, cr
, ¶ms
, B_FALSE
);
2059 * Terminates a multi-part dual mac/decrypt operation.
2063 crypto_mac_decrypt_final(crypto_context_t context
, crypto_data_t
*mac
,
2064 crypto_data_t
*pt
, crypto_call_req_t
*cr
)
2066 crypto_ctx_t
*ctx
= (crypto_ctx_t
*)context
, *mac_ctx
;
2067 kcf_context_t
*kcf_ctx
, *kcf_mac_ctx
;
2068 kcf_provider_desc_t
*pd
;
2070 kcf_req_params_t params
;
2072 if ((ctx
== NULL
) ||
2073 ((kcf_ctx
= (kcf_context_t
*)ctx
->cc_framework_private
) == NULL
) ||
2074 ((pd
= kcf_ctx
->kc_prov_desc
) == NULL
)) {
2075 return (CRYPTO_INVALID_CONTEXT
);
2078 ASSERT(pd
->pd_prov_type
!= CRYPTO_LOGICAL_PROVIDER
);
2080 if ((kcf_mac_ctx
= kcf_ctx
->kc_secondctx
) != NULL
) {
2081 crypto_call_flag_t save_flag
;
2083 if (kcf_mac_ctx
->kc_prov_desc
== NULL
) {
2084 error
= CRYPTO_INVALID_CONTEXT
;
2087 mac_ctx
= &kcf_mac_ctx
->kc_glbl_ctx
;
2089 /* First we collect the MAC */
2092 error
= crypto_mac_final((crypto_context_t
)mac_ctx
,
2095 if (error
!= CRYPTO_SUCCESS
) {
2096 crypto_cancel_ctx(ctx
);
2098 /* Get the last chunk of plaintext */
2099 error
= crypto_decrypt_final(context
, pt
, NULL
);
2104 /* submit a pure asynchronous request. */
2105 save_flag
= cr
->cr_flag
;
2106 cr
->cr_flag
|= CRYPTO_ALWAYS_QUEUE
;
2108 KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(¶ms
, KCF_OP_FINAL
,
2109 pd
->pd_sid
, NULL
, NULL
, NULL
, mac
, pt
, NULL
, NULL
)
2112 error
= kcf_submit_request(pd
, ctx
, cr
, ¶ms
, B_FALSE
);
2114 cr
->cr_flag
= save_flag
;
2119 /* The fast path for SW providers. */
2120 if (CHECK_FASTPATH(cr
, pd
)) {
2121 error
= KCF_PROV_MAC_DECRYPT_FINAL(pd
, ctx
, mac
, pt
, NULL
);
2122 KCF_PROV_INCRSTATS(pd
, error
);
2124 KCF_WRAP_MAC_DECRYPT_OPS_PARAMS(¶ms
, KCF_OP_FINAL
,
2125 ctx
->cc_session
, NULL
, NULL
, NULL
, mac
, pt
, NULL
, NULL
);
2127 error
= kcf_submit_request(pd
, ctx
, cr
, ¶ms
, B_FALSE
);
2130 /* Release the hold done in kcf_new_ctx() during init step. */
2131 KCF_CONTEXT_COND_RELEASE(error
, kcf_ctx
);
2136 * Digest/Encrypt dual operation. Project-private entry point, not part of
2141 crypto_digest_encrypt_update(crypto_context_t digest_ctx
,
2142 crypto_context_t encrypt_ctx
, crypto_data_t
*plaintext
,
2143 crypto_data_t
*ciphertext
, crypto_call_req_t
*crq
)
2147 * core functions needed by ioctl interface missing from impl.h
2149 return (CRYPTO_NOT_SUPPORTED
);
2153 * Decrypt/Digest dual operation. Project-private entry point, not part of
2158 crypto_decrypt_digest_update(crypto_context_t decryptctx
,
2159 crypto_context_t encrypt_ctx
, crypto_data_t
*ciphertext
,
2160 crypto_data_t
*plaintext
, crypto_call_req_t
*crq
)
2164 * core functions needed by ioctl interface missing from impl.h
2166 return (CRYPTO_NOT_SUPPORTED
);
2170 * Sign/Encrypt dual operation. Project-private entry point, not part of
2175 crypto_sign_encrypt_update(crypto_context_t sign_ctx
,
2176 crypto_context_t encrypt_ctx
, crypto_data_t
*plaintext
,
2177 crypto_data_t
*ciphertext
, crypto_call_req_t
*crq
)
2181 * core functions needed by ioctl interface missing from impl.h
2183 return (CRYPTO_NOT_SUPPORTED
);
2187 * Decrypt/Verify dual operation. Project-private entry point, not part of
2192 crypto_decrypt_verify_update(crypto_context_t decrypt_ctx
,
2193 crypto_context_t verify_ctx
, crypto_data_t
*ciphertext
,
2194 crypto_data_t
*plaintext
, crypto_call_req_t
*crq
)
2198 * core functions needed by ioctl interface missing from impl.h
2200 return (CRYPTO_NOT_SUPPORTED
);