4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or https://opensource.org/licenses/CDDL-1.0.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
26 * AES provider for the Kernel Cryptographic Framework (KCF)
29 #include <sys/zfs_context.h>
30 #include <sys/crypto/common.h>
31 #include <sys/crypto/impl.h>
32 #include <sys/crypto/spi.h>
33 #include <sys/crypto/icp.h>
34 #include <modes/modes.h>
36 #include <aes/aes_impl.h>
37 #include <modes/gcm_impl.h>
40 * Mechanism info structure passed to KCF during registration.
42 static const crypto_mech_info_t aes_mech_info_tab
[] = {
44 {SUN_CKM_AES_CCM
, AES_CCM_MECH_INFO_TYPE
,
45 CRYPTO_FG_ENCRYPT_ATOMIC
| CRYPTO_FG_DECRYPT_ATOMIC
},
47 {SUN_CKM_AES_GCM
, AES_GCM_MECH_INFO_TYPE
,
48 CRYPTO_FG_ENCRYPT_ATOMIC
| CRYPTO_FG_DECRYPT_ATOMIC
},
51 static int aes_common_init_ctx(aes_ctx_t
*, crypto_spi_ctx_template_t
*,
52 crypto_mechanism_t
*, crypto_key_t
*, int, boolean_t
);
54 static int aes_encrypt_atomic(crypto_mechanism_t
*, crypto_key_t
*,
55 crypto_data_t
*, crypto_data_t
*, crypto_spi_ctx_template_t
);
57 static int aes_decrypt_atomic(crypto_mechanism_t
*, crypto_key_t
*,
58 crypto_data_t
*, crypto_data_t
*, crypto_spi_ctx_template_t
);
60 static const crypto_cipher_ops_t aes_cipher_ops
= {
61 .encrypt_atomic
= aes_encrypt_atomic
,
62 .decrypt_atomic
= aes_decrypt_atomic
65 static int aes_create_ctx_template(crypto_mechanism_t
*, crypto_key_t
*,
66 crypto_spi_ctx_template_t
*, size_t *);
67 static int aes_free_context(crypto_ctx_t
*);
69 static const crypto_ctx_ops_t aes_ctx_ops
= {
70 .create_ctx_template
= aes_create_ctx_template
,
71 .free_context
= aes_free_context
74 static const crypto_ops_t aes_crypto_ops
= {
80 static const crypto_provider_info_t aes_prov_info
= {
81 "AES Software Provider",
83 sizeof (aes_mech_info_tab
) / sizeof (crypto_mech_info_t
),
87 static crypto_kcf_provider_handle_t aes_prov_handle
= 0;
92 /* Determine the fastest available implementation. */
96 /* Register with KCF. If the registration fails, remove the module. */
97 if (crypto_register_provider(&aes_prov_info
, &aes_prov_handle
))
106 /* Unregister from KCF if module is registered */
107 if (aes_prov_handle
!= 0) {
108 if (crypto_unregister_provider(aes_prov_handle
))
118 aes_check_mech_param(crypto_mechanism_t
*mechanism
, aes_ctx_t
**ctx
)
121 boolean_t param_required
= B_TRUE
;
123 void *(*alloc_fun
)(int);
124 int rv
= CRYPTO_SUCCESS
;
126 switch (mechanism
->cm_type
) {
127 case AES_CCM_MECH_INFO_TYPE
:
128 param_len
= sizeof (CK_AES_CCM_PARAMS
);
129 alloc_fun
= ccm_alloc_ctx
;
131 case AES_GCM_MECH_INFO_TYPE
:
132 param_len
= sizeof (CK_AES_GCM_PARAMS
);
133 alloc_fun
= gcm_alloc_ctx
;
136 __builtin_unreachable();
138 if (param_required
&& mechanism
->cm_param
!= NULL
&&
139 mechanism
->cm_param_len
!= param_len
) {
140 rv
= CRYPTO_MECHANISM_PARAM_INVALID
;
143 p
= (alloc_fun
)(KM_SLEEP
);
150 * Initialize key schedules for AES
153 init_keysched(crypto_key_t
*key
, void *newbie
)
155 if (key
->ck_length
< AES_MINBITS
||
156 key
->ck_length
> AES_MAXBITS
) {
157 return (CRYPTO_KEY_SIZE_RANGE
);
160 /* key length must be either 128, 192, or 256 */
161 if ((key
->ck_length
& 63) != 0)
162 return (CRYPTO_KEY_SIZE_RANGE
);
164 aes_init_keysched(key
->ck_data
, key
->ck_length
, newbie
);
165 return (CRYPTO_SUCCESS
);
169 * KCF software provider encrypt entry points.
172 aes_encrypt_atomic(crypto_mechanism_t
*mechanism
,
173 crypto_key_t
*key
, crypto_data_t
*plaintext
, crypto_data_t
*ciphertext
,
174 crypto_spi_ctx_template_t
template)
179 size_t length_needed
;
182 memset(&aes_ctx
, 0, sizeof (aes_ctx_t
));
184 ASSERT(ciphertext
!= NULL
);
186 if ((ret
= aes_check_mech_param(mechanism
, NULL
)) != CRYPTO_SUCCESS
)
189 ret
= aes_common_init_ctx(&aes_ctx
, template, mechanism
, key
,
191 if (ret
!= CRYPTO_SUCCESS
)
194 switch (mechanism
->cm_type
) {
195 case AES_CCM_MECH_INFO_TYPE
:
196 length_needed
= plaintext
->cd_length
+ aes_ctx
.ac_mac_len
;
198 case AES_GCM_MECH_INFO_TYPE
:
199 length_needed
= plaintext
->cd_length
+ aes_ctx
.ac_tag_len
;
202 __builtin_unreachable();
205 /* return size of buffer needed to store output */
206 if (ciphertext
->cd_length
< length_needed
) {
207 ciphertext
->cd_length
= length_needed
;
208 ret
= CRYPTO_BUFFER_TOO_SMALL
;
212 saved_offset
= ciphertext
->cd_offset
;
213 saved_length
= ciphertext
->cd_length
;
216 * Do an update on the specified input data.
218 switch (plaintext
->cd_format
) {
219 case CRYPTO_DATA_RAW
:
220 ret
= crypto_update_iov(&aes_ctx
, plaintext
, ciphertext
,
221 aes_encrypt_contiguous_blocks
);
223 case CRYPTO_DATA_UIO
:
224 ret
= crypto_update_uio(&aes_ctx
, plaintext
, ciphertext
,
225 aes_encrypt_contiguous_blocks
);
228 ret
= CRYPTO_ARGUMENTS_BAD
;
231 if (ret
== CRYPTO_SUCCESS
) {
232 if (mechanism
->cm_type
== AES_CCM_MECH_INFO_TYPE
) {
233 ret
= ccm_encrypt_final((ccm_ctx_t
*)&aes_ctx
,
234 ciphertext
, AES_BLOCK_LEN
, aes_encrypt_block
,
236 if (ret
!= CRYPTO_SUCCESS
)
238 ASSERT(aes_ctx
.ac_remainder_len
== 0);
239 } else if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
) {
240 ret
= gcm_encrypt_final((gcm_ctx_t
*)&aes_ctx
,
241 ciphertext
, AES_BLOCK_LEN
, aes_encrypt_block
,
242 aes_copy_block
, aes_xor_block
);
243 if (ret
!= CRYPTO_SUCCESS
)
245 ASSERT(aes_ctx
.ac_remainder_len
== 0);
247 ASSERT(aes_ctx
.ac_remainder_len
== 0);
250 if (plaintext
!= ciphertext
) {
251 ciphertext
->cd_length
=
252 ciphertext
->cd_offset
- saved_offset
;
255 ciphertext
->cd_length
= saved_length
;
257 ciphertext
->cd_offset
= saved_offset
;
260 if (aes_ctx
.ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
261 memset(aes_ctx
.ac_keysched
, 0, aes_ctx
.ac_keysched_len
);
262 kmem_free(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
264 if (aes_ctx
.ac_flags
& GCM_MODE
) {
265 gcm_clear_ctx((gcm_ctx_t
*)&aes_ctx
);
271 aes_decrypt_atomic(crypto_mechanism_t
*mechanism
,
272 crypto_key_t
*key
, crypto_data_t
*ciphertext
, crypto_data_t
*plaintext
,
273 crypto_spi_ctx_template_t
template)
278 size_t length_needed
;
281 memset(&aes_ctx
, 0, sizeof (aes_ctx_t
));
283 ASSERT(plaintext
!= NULL
);
285 if ((ret
= aes_check_mech_param(mechanism
, NULL
)) != CRYPTO_SUCCESS
)
288 ret
= aes_common_init_ctx(&aes_ctx
, template, mechanism
, key
,
290 if (ret
!= CRYPTO_SUCCESS
)
293 switch (mechanism
->cm_type
) {
294 case AES_CCM_MECH_INFO_TYPE
:
295 length_needed
= aes_ctx
.ac_data_len
;
297 case AES_GCM_MECH_INFO_TYPE
:
298 length_needed
= ciphertext
->cd_length
- aes_ctx
.ac_tag_len
;
301 __builtin_unreachable();
304 /* return size of buffer needed to store output */
305 if (plaintext
->cd_length
< length_needed
) {
306 plaintext
->cd_length
= length_needed
;
307 ret
= CRYPTO_BUFFER_TOO_SMALL
;
311 saved_offset
= plaintext
->cd_offset
;
312 saved_length
= plaintext
->cd_length
;
315 * Do an update on the specified input data.
317 switch (ciphertext
->cd_format
) {
318 case CRYPTO_DATA_RAW
:
319 ret
= crypto_update_iov(&aes_ctx
, ciphertext
, plaintext
,
320 aes_decrypt_contiguous_blocks
);
322 case CRYPTO_DATA_UIO
:
323 ret
= crypto_update_uio(&aes_ctx
, ciphertext
, plaintext
,
324 aes_decrypt_contiguous_blocks
);
327 ret
= CRYPTO_ARGUMENTS_BAD
;
330 if (ret
== CRYPTO_SUCCESS
) {
331 if (mechanism
->cm_type
== AES_CCM_MECH_INFO_TYPE
) {
332 ASSERT(aes_ctx
.ac_processed_data_len
333 == aes_ctx
.ac_data_len
);
334 ASSERT(aes_ctx
.ac_processed_mac_len
335 == aes_ctx
.ac_mac_len
);
336 ret
= ccm_decrypt_final((ccm_ctx_t
*)&aes_ctx
,
337 plaintext
, AES_BLOCK_LEN
, aes_encrypt_block
,
338 aes_copy_block
, aes_xor_block
);
339 ASSERT(aes_ctx
.ac_remainder_len
== 0);
340 if ((ret
== CRYPTO_SUCCESS
) &&
341 (ciphertext
!= plaintext
)) {
342 plaintext
->cd_length
=
343 plaintext
->cd_offset
- saved_offset
;
345 plaintext
->cd_length
= saved_length
;
347 } else if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
) {
348 ret
= gcm_decrypt_final((gcm_ctx_t
*)&aes_ctx
,
349 plaintext
, AES_BLOCK_LEN
, aes_encrypt_block
,
351 ASSERT(aes_ctx
.ac_remainder_len
== 0);
352 if ((ret
== CRYPTO_SUCCESS
) &&
353 (ciphertext
!= plaintext
)) {
354 plaintext
->cd_length
=
355 plaintext
->cd_offset
- saved_offset
;
357 plaintext
->cd_length
= saved_length
;
360 __builtin_unreachable();
362 plaintext
->cd_length
= saved_length
;
364 plaintext
->cd_offset
= saved_offset
;
367 if (aes_ctx
.ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
368 memset(aes_ctx
.ac_keysched
, 0, aes_ctx
.ac_keysched_len
);
369 kmem_free(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
372 if (aes_ctx
.ac_flags
& CCM_MODE
) {
373 if (aes_ctx
.ac_pt_buf
!= NULL
) {
374 vmem_free(aes_ctx
.ac_pt_buf
, aes_ctx
.ac_data_len
);
376 } else if (aes_ctx
.ac_flags
& GCM_MODE
) {
377 gcm_clear_ctx((gcm_ctx_t
*)&aes_ctx
);
384 * KCF software provider context template entry points.
387 aes_create_ctx_template(crypto_mechanism_t
*mechanism
, crypto_key_t
*key
,
388 crypto_spi_ctx_template_t
*tmpl
, size_t *tmpl_size
)
394 if (mechanism
->cm_type
!= AES_CCM_MECH_INFO_TYPE
&&
395 mechanism
->cm_type
!= AES_GCM_MECH_INFO_TYPE
)
396 return (CRYPTO_MECHANISM_INVALID
);
398 if ((keysched
= aes_alloc_keysched(&size
, KM_SLEEP
)) == NULL
) {
399 return (CRYPTO_HOST_MEMORY
);
403 * Initialize key schedule. Key length information is stored
406 if ((rv
= init_keysched(key
, keysched
)) != CRYPTO_SUCCESS
) {
407 memset(keysched
, 0, size
);
408 kmem_free(keysched
, size
);
415 return (CRYPTO_SUCCESS
);
420 aes_free_context(crypto_ctx_t
*ctx
)
422 aes_ctx_t
*aes_ctx
= ctx
->cc_provider_private
;
424 if (aes_ctx
!= NULL
) {
425 if (aes_ctx
->ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
426 ASSERT(aes_ctx
->ac_keysched_len
!= 0);
427 memset(aes_ctx
->ac_keysched
, 0,
428 aes_ctx
->ac_keysched_len
);
429 kmem_free(aes_ctx
->ac_keysched
,
430 aes_ctx
->ac_keysched_len
);
432 crypto_free_mode_ctx(aes_ctx
);
433 ctx
->cc_provider_private
= NULL
;
436 return (CRYPTO_SUCCESS
);
441 aes_common_init_ctx(aes_ctx_t
*aes_ctx
, crypto_spi_ctx_template_t
*template,
442 crypto_mechanism_t
*mechanism
, crypto_key_t
*key
, int kmflag
,
443 boolean_t is_encrypt_init
)
445 int rv
= CRYPTO_SUCCESS
;
449 if (template == NULL
) {
450 if ((keysched
= aes_alloc_keysched(&size
, kmflag
)) == NULL
)
451 return (CRYPTO_HOST_MEMORY
);
453 * Initialize key schedule.
454 * Key length is stored in the key.
456 if ((rv
= init_keysched(key
, keysched
)) != CRYPTO_SUCCESS
) {
457 kmem_free(keysched
, size
);
461 aes_ctx
->ac_flags
|= PROVIDER_OWNS_KEY_SCHEDULE
;
462 aes_ctx
->ac_keysched_len
= size
;
466 aes_ctx
->ac_keysched
= keysched
;
468 switch (mechanism
->cm_type
) {
469 case AES_CCM_MECH_INFO_TYPE
:
470 if (mechanism
->cm_param
== NULL
||
471 mechanism
->cm_param_len
!= sizeof (CK_AES_CCM_PARAMS
)) {
472 return (CRYPTO_MECHANISM_PARAM_INVALID
);
474 rv
= ccm_init_ctx((ccm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
475 kmflag
, is_encrypt_init
, AES_BLOCK_LEN
, aes_encrypt_block
,
478 case AES_GCM_MECH_INFO_TYPE
:
479 if (mechanism
->cm_param
== NULL
||
480 mechanism
->cm_param_len
!= sizeof (CK_AES_GCM_PARAMS
)) {
481 return (CRYPTO_MECHANISM_PARAM_INVALID
);
483 rv
= gcm_init_ctx((gcm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
484 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
489 if (rv
!= CRYPTO_SUCCESS
) {
490 if (aes_ctx
->ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
491 memset(keysched
, 0, size
);
492 kmem_free(keysched
, size
);