4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
26 * AES provider for the Kernel Cryptographic Framework (KCF)
29 #include <sys/zfs_context.h>
30 #include <sys/crypto/common.h>
31 #include <sys/crypto/impl.h>
32 #include <sys/crypto/spi.h>
33 #include <sys/crypto/icp.h>
34 #include <modes/modes.h>
36 #include <aes/aes_impl.h>
37 #include <modes/gcm_impl.h>
40 * Mechanism info structure passed to KCF during registration.
42 static const crypto_mech_info_t aes_mech_info_tab
[] = {
44 {SUN_CKM_AES_ECB
, AES_ECB_MECH_INFO_TYPE
,
45 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
46 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
47 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
49 {SUN_CKM_AES_CBC
, AES_CBC_MECH_INFO_TYPE
,
50 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
51 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
52 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
54 {SUN_CKM_AES_CTR
, AES_CTR_MECH_INFO_TYPE
,
55 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
56 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
57 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
59 {SUN_CKM_AES_CCM
, AES_CCM_MECH_INFO_TYPE
,
60 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
61 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
62 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
64 {SUN_CKM_AES_GCM
, AES_GCM_MECH_INFO_TYPE
,
65 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
66 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
67 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
69 {SUN_CKM_AES_GMAC
, AES_GMAC_MECH_INFO_TYPE
,
70 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
71 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
|
72 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
|
73 CRYPTO_FG_SIGN
| CRYPTO_FG_SIGN_ATOMIC
|
74 CRYPTO_FG_VERIFY
| CRYPTO_FG_VERIFY_ATOMIC
,
75 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
}
78 static void aes_provider_status(crypto_provider_handle_t
, uint_t
*);
80 static const crypto_control_ops_t aes_control_ops
= {
84 static int aes_encrypt_init(crypto_ctx_t
*, crypto_mechanism_t
*,
85 crypto_key_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
86 static int aes_decrypt_init(crypto_ctx_t
*, crypto_mechanism_t
*,
87 crypto_key_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
88 static int aes_common_init(crypto_ctx_t
*, crypto_mechanism_t
*,
89 crypto_key_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
, boolean_t
);
90 static int aes_common_init_ctx(aes_ctx_t
*, crypto_spi_ctx_template_t
*,
91 crypto_mechanism_t
*, crypto_key_t
*, int, boolean_t
);
92 static int aes_encrypt_final(crypto_ctx_t
*, crypto_data_t
*,
94 static int aes_decrypt_final(crypto_ctx_t
*, crypto_data_t
*,
97 static int aes_encrypt(crypto_ctx_t
*, crypto_data_t
*, crypto_data_t
*,
99 static int aes_encrypt_update(crypto_ctx_t
*, crypto_data_t
*,
100 crypto_data_t
*, crypto_req_handle_t
);
101 static int aes_encrypt_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
102 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*,
103 crypto_data_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
105 static int aes_decrypt(crypto_ctx_t
*, crypto_data_t
*, crypto_data_t
*,
106 crypto_req_handle_t
);
107 static int aes_decrypt_update(crypto_ctx_t
*, crypto_data_t
*,
108 crypto_data_t
*, crypto_req_handle_t
);
109 static int aes_decrypt_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
110 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*,
111 crypto_data_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
113 static const crypto_cipher_ops_t aes_cipher_ops
= {
114 .encrypt_init
= aes_encrypt_init
,
115 .encrypt
= aes_encrypt
,
116 .encrypt_update
= aes_encrypt_update
,
117 .encrypt_final
= aes_encrypt_final
,
118 .encrypt_atomic
= aes_encrypt_atomic
,
119 .decrypt_init
= aes_decrypt_init
,
120 .decrypt
= aes_decrypt
,
121 .decrypt_update
= aes_decrypt_update
,
122 .decrypt_final
= aes_decrypt_final
,
123 .decrypt_atomic
= aes_decrypt_atomic
126 static int aes_mac_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
127 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*, crypto_data_t
*,
128 crypto_spi_ctx_template_t
, crypto_req_handle_t
);
129 static int aes_mac_verify_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
130 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*, crypto_data_t
*,
131 crypto_spi_ctx_template_t
, crypto_req_handle_t
);
133 static const crypto_mac_ops_t aes_mac_ops
= {
138 .mac_atomic
= aes_mac_atomic
,
139 .mac_verify_atomic
= aes_mac_verify_atomic
142 static int aes_create_ctx_template(crypto_provider_handle_t
,
143 crypto_mechanism_t
*, crypto_key_t
*, crypto_spi_ctx_template_t
*,
144 size_t *, crypto_req_handle_t
);
145 static int aes_free_context(crypto_ctx_t
*);
147 static const crypto_ctx_ops_t aes_ctx_ops
= {
148 .create_ctx_template
= aes_create_ctx_template
,
149 .free_context
= aes_free_context
152 static const crypto_ops_t aes_crypto_ops
= {{{{{
169 static const crypto_provider_info_t aes_prov_info
= {{{{
170 CRYPTO_SPI_VERSION_1
,
171 "AES Software Provider",
175 sizeof (aes_mech_info_tab
) / sizeof (crypto_mech_info_t
),
179 static crypto_kcf_provider_handle_t aes_prov_handle
= 0;
180 static crypto_data_t null_crypto_data
= { CRYPTO_DATA_RAW
};
185 /* Determine the fastest available implementation. */
189 /* Register with KCF. If the registration fails, remove the module. */
190 if (crypto_register_provider(&aes_prov_info
, &aes_prov_handle
))
199 /* Unregister from KCF if module is registered */
200 if (aes_prov_handle
!= 0) {
201 if (crypto_unregister_provider(aes_prov_handle
))
211 aes_check_mech_param(crypto_mechanism_t
*mechanism
, aes_ctx_t
**ctx
, int kmflag
)
214 boolean_t param_required
= B_TRUE
;
216 void *(*alloc_fun
)(int);
217 int rv
= CRYPTO_SUCCESS
;
219 switch (mechanism
->cm_type
) {
220 case AES_ECB_MECH_INFO_TYPE
:
221 param_required
= B_FALSE
;
222 alloc_fun
= ecb_alloc_ctx
;
224 case AES_CBC_MECH_INFO_TYPE
:
225 param_len
= AES_BLOCK_LEN
;
226 alloc_fun
= cbc_alloc_ctx
;
228 case AES_CTR_MECH_INFO_TYPE
:
229 param_len
= sizeof (CK_AES_CTR_PARAMS
);
230 alloc_fun
= ctr_alloc_ctx
;
232 case AES_CCM_MECH_INFO_TYPE
:
233 param_len
= sizeof (CK_AES_CCM_PARAMS
);
234 alloc_fun
= ccm_alloc_ctx
;
236 case AES_GCM_MECH_INFO_TYPE
:
237 param_len
= sizeof (CK_AES_GCM_PARAMS
);
238 alloc_fun
= gcm_alloc_ctx
;
240 case AES_GMAC_MECH_INFO_TYPE
:
241 param_len
= sizeof (CK_AES_GMAC_PARAMS
);
242 alloc_fun
= gmac_alloc_ctx
;
245 rv
= CRYPTO_MECHANISM_INVALID
;
248 if (param_required
&& mechanism
->cm_param
!= NULL
&&
249 mechanism
->cm_param_len
!= param_len
) {
250 rv
= CRYPTO_MECHANISM_PARAM_INVALID
;
253 p
= (alloc_fun
)(kmflag
);
260 * Initialize key schedules for AES
263 init_keysched(crypto_key_t
*key
, void *newbie
)
266 * Only keys by value are supported by this module.
268 switch (key
->ck_format
) {
270 if (key
->ck_length
< AES_MINBITS
||
271 key
->ck_length
> AES_MAXBITS
) {
272 return (CRYPTO_KEY_SIZE_RANGE
);
275 /* key length must be either 128, 192, or 256 */
276 if ((key
->ck_length
& 63) != 0)
277 return (CRYPTO_KEY_SIZE_RANGE
);
280 return (CRYPTO_KEY_TYPE_INCONSISTENT
);
283 aes_init_keysched(key
->ck_data
, key
->ck_length
, newbie
);
284 return (CRYPTO_SUCCESS
);
288 * KCF software provider control entry points.
291 aes_provider_status(crypto_provider_handle_t provider
, uint_t
*status
)
294 *status
= CRYPTO_PROVIDER_READY
;
298 aes_encrypt_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
299 crypto_key_t
*key
, crypto_spi_ctx_template_t
template,
300 crypto_req_handle_t req
)
302 return (aes_common_init(ctx
, mechanism
, key
, template, req
, B_TRUE
));
306 aes_decrypt_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
307 crypto_key_t
*key
, crypto_spi_ctx_template_t
template,
308 crypto_req_handle_t req
)
310 return (aes_common_init(ctx
, mechanism
, key
, template, req
, B_FALSE
));
316 * KCF software provider encrypt entry points.
319 aes_common_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
320 crypto_key_t
*key
, crypto_spi_ctx_template_t
template,
321 crypto_req_handle_t req
, boolean_t is_encrypt_init
)
328 * Only keys by value are supported by this module.
330 if (key
->ck_format
!= CRYPTO_KEY_RAW
) {
331 return (CRYPTO_KEY_TYPE_INCONSISTENT
);
334 kmflag
= crypto_kmflag(req
);
335 if ((rv
= aes_check_mech_param(mechanism
, &aes_ctx
, kmflag
))
339 rv
= aes_common_init_ctx(aes_ctx
, template, mechanism
, key
, kmflag
,
341 if (rv
!= CRYPTO_SUCCESS
) {
342 crypto_free_mode_ctx(aes_ctx
);
346 ctx
->cc_provider_private
= aes_ctx
;
348 return (CRYPTO_SUCCESS
);
352 aes_copy_block64(uint8_t *in
, uint64_t *out
)
354 if (IS_P2ALIGNED(in
, sizeof (uint64_t))) {
355 /* LINTED: pointer alignment */
356 out
[0] = *(uint64_t *)&in
[0];
357 /* LINTED: pointer alignment */
358 out
[1] = *(uint64_t *)&in
[8];
360 uint8_t *iv8
= (uint8_t *)&out
[0];
362 AES_COPY_BLOCK(in
, iv8
);
368 aes_encrypt(crypto_ctx_t
*ctx
, crypto_data_t
*plaintext
,
369 crypto_data_t
*ciphertext
, crypto_req_handle_t req
)
371 int ret
= CRYPTO_FAILED
;
374 size_t saved_length
, saved_offset
, length_needed
;
376 ASSERT(ctx
->cc_provider_private
!= NULL
);
377 aes_ctx
= ctx
->cc_provider_private
;
380 * For block ciphers, plaintext must be a multiple of AES block size.
381 * This test is only valid for ciphers whose blocksize is a power of 2.
383 if (((aes_ctx
->ac_flags
& (CTR_MODE
|CCM_MODE
|GCM_MODE
|GMAC_MODE
))
384 == 0) && (plaintext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0)
385 return (CRYPTO_DATA_LEN_RANGE
);
387 ASSERT(ciphertext
!= NULL
);
390 * We need to just return the length needed to store the output.
391 * We should not destroy the context for the following case.
393 switch (aes_ctx
->ac_flags
& (CCM_MODE
|GCM_MODE
|GMAC_MODE
)) {
395 length_needed
= plaintext
->cd_length
+ aes_ctx
->ac_mac_len
;
398 length_needed
= plaintext
->cd_length
+ aes_ctx
->ac_tag_len
;
401 if (plaintext
->cd_length
!= 0)
402 return (CRYPTO_ARGUMENTS_BAD
);
404 length_needed
= aes_ctx
->ac_tag_len
;
407 length_needed
= plaintext
->cd_length
;
410 if (ciphertext
->cd_length
< length_needed
) {
411 ciphertext
->cd_length
= length_needed
;
412 return (CRYPTO_BUFFER_TOO_SMALL
);
415 saved_length
= ciphertext
->cd_length
;
416 saved_offset
= ciphertext
->cd_offset
;
419 * Do an update on the specified input data.
421 ret
= aes_encrypt_update(ctx
, plaintext
, ciphertext
, req
);
422 if (ret
!= CRYPTO_SUCCESS
) {
427 * For CCM mode, aes_ccm_encrypt_final() will take care of any
428 * left-over unprocessed data, and compute the MAC
430 if (aes_ctx
->ac_flags
& CCM_MODE
) {
432 * ccm_encrypt_final() will compute the MAC and append
433 * it to existing ciphertext. So, need to adjust the left over
434 * length value accordingly
437 /* order of following 2 lines MUST not be reversed */
438 ciphertext
->cd_offset
= ciphertext
->cd_length
;
439 ciphertext
->cd_length
= saved_length
- ciphertext
->cd_length
;
440 ret
= ccm_encrypt_final((ccm_ctx_t
*)aes_ctx
, ciphertext
,
441 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
442 if (ret
!= CRYPTO_SUCCESS
) {
446 if (plaintext
!= ciphertext
) {
447 ciphertext
->cd_length
=
448 ciphertext
->cd_offset
- saved_offset
;
450 ciphertext
->cd_offset
= saved_offset
;
451 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
453 * gcm_encrypt_final() will compute the MAC and append
454 * it to existing ciphertext. So, need to adjust the left over
455 * length value accordingly
458 /* order of following 2 lines MUST not be reversed */
459 ciphertext
->cd_offset
= ciphertext
->cd_length
;
460 ciphertext
->cd_length
= saved_length
- ciphertext
->cd_length
;
461 ret
= gcm_encrypt_final((gcm_ctx_t
*)aes_ctx
, ciphertext
,
462 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
464 if (ret
!= CRYPTO_SUCCESS
) {
468 if (plaintext
!= ciphertext
) {
469 ciphertext
->cd_length
=
470 ciphertext
->cd_offset
- saved_offset
;
472 ciphertext
->cd_offset
= saved_offset
;
475 ASSERT(aes_ctx
->ac_remainder_len
== 0);
476 (void) aes_free_context(ctx
);
483 aes_decrypt(crypto_ctx_t
*ctx
, crypto_data_t
*ciphertext
,
484 crypto_data_t
*plaintext
, crypto_req_handle_t req
)
486 int ret
= CRYPTO_FAILED
;
490 size_t saved_length
, length_needed
;
492 ASSERT(ctx
->cc_provider_private
!= NULL
);
493 aes_ctx
= ctx
->cc_provider_private
;
496 * For block ciphers, plaintext must be a multiple of AES block size.
497 * This test is only valid for ciphers whose blocksize is a power of 2.
499 if (((aes_ctx
->ac_flags
& (CTR_MODE
|CCM_MODE
|GCM_MODE
|GMAC_MODE
))
500 == 0) && (ciphertext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0) {
501 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE
);
504 ASSERT(plaintext
!= NULL
);
507 * Return length needed to store the output.
508 * Do not destroy context when plaintext buffer is too small.
510 * CCM: plaintext is MAC len smaller than cipher text
511 * GCM: plaintext is TAG len smaller than cipher text
512 * GMAC: plaintext length must be zero
514 switch (aes_ctx
->ac_flags
& (CCM_MODE
|GCM_MODE
|GMAC_MODE
)) {
516 length_needed
= aes_ctx
->ac_processed_data_len
;
519 length_needed
= ciphertext
->cd_length
- aes_ctx
->ac_tag_len
;
522 if (plaintext
->cd_length
!= 0)
523 return (CRYPTO_ARGUMENTS_BAD
);
528 length_needed
= ciphertext
->cd_length
;
531 if (plaintext
->cd_length
< length_needed
) {
532 plaintext
->cd_length
= length_needed
;
533 return (CRYPTO_BUFFER_TOO_SMALL
);
536 saved_offset
= plaintext
->cd_offset
;
537 saved_length
= plaintext
->cd_length
;
540 * Do an update on the specified input data.
542 ret
= aes_decrypt_update(ctx
, ciphertext
, plaintext
, req
);
543 if (ret
!= CRYPTO_SUCCESS
) {
547 if (aes_ctx
->ac_flags
& CCM_MODE
) {
548 ASSERT(aes_ctx
->ac_processed_data_len
== aes_ctx
->ac_data_len
);
549 ASSERT(aes_ctx
->ac_processed_mac_len
== aes_ctx
->ac_mac_len
);
551 /* order of following 2 lines MUST not be reversed */
552 plaintext
->cd_offset
= plaintext
->cd_length
;
553 plaintext
->cd_length
= saved_length
- plaintext
->cd_length
;
555 ret
= ccm_decrypt_final((ccm_ctx_t
*)aes_ctx
, plaintext
,
556 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
558 if (ret
== CRYPTO_SUCCESS
) {
559 if (plaintext
!= ciphertext
) {
560 plaintext
->cd_length
=
561 plaintext
->cd_offset
- saved_offset
;
564 plaintext
->cd_length
= saved_length
;
567 plaintext
->cd_offset
= saved_offset
;
568 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
569 /* order of following 2 lines MUST not be reversed */
570 plaintext
->cd_offset
= plaintext
->cd_length
;
571 plaintext
->cd_length
= saved_length
- plaintext
->cd_length
;
573 ret
= gcm_decrypt_final((gcm_ctx_t
*)aes_ctx
, plaintext
,
574 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
575 if (ret
== CRYPTO_SUCCESS
) {
576 if (plaintext
!= ciphertext
) {
577 plaintext
->cd_length
=
578 plaintext
->cd_offset
- saved_offset
;
581 plaintext
->cd_length
= saved_length
;
584 plaintext
->cd_offset
= saved_offset
;
587 ASSERT(aes_ctx
->ac_remainder_len
== 0);
590 (void) aes_free_context(ctx
);
597 aes_encrypt_update(crypto_ctx_t
*ctx
, crypto_data_t
*plaintext
,
598 crypto_data_t
*ciphertext
, crypto_req_handle_t req
)
602 size_t saved_length
, out_len
;
603 int ret
= CRYPTO_SUCCESS
;
606 ASSERT(ctx
->cc_provider_private
!= NULL
);
607 aes_ctx
= ctx
->cc_provider_private
;
609 ASSERT(ciphertext
!= NULL
);
611 /* compute number of bytes that will hold the ciphertext */
612 out_len
= aes_ctx
->ac_remainder_len
;
613 out_len
+= plaintext
->cd_length
;
614 out_len
&= ~(AES_BLOCK_LEN
- 1);
616 /* return length needed to store the output */
617 if (ciphertext
->cd_length
< out_len
) {
618 ciphertext
->cd_length
= out_len
;
619 return (CRYPTO_BUFFER_TOO_SMALL
);
622 saved_offset
= ciphertext
->cd_offset
;
623 saved_length
= ciphertext
->cd_length
;
626 * Do the AES update on the specified input data.
628 switch (plaintext
->cd_format
) {
629 case CRYPTO_DATA_RAW
:
630 ret
= crypto_update_iov(ctx
->cc_provider_private
,
631 plaintext
, ciphertext
, aes_encrypt_contiguous_blocks
,
634 case CRYPTO_DATA_UIO
:
635 ret
= crypto_update_uio(ctx
->cc_provider_private
,
636 plaintext
, ciphertext
, aes_encrypt_contiguous_blocks
,
640 ret
= CRYPTO_ARGUMENTS_BAD
;
644 * Since AES counter mode is a stream cipher, we call
645 * ctr_mode_final() to pick up any remaining bytes.
646 * It is an internal function that does not destroy
647 * the context like *normal* final routines.
649 if ((aes_ctx
->ac_flags
& CTR_MODE
) && (aes_ctx
->ac_remainder_len
> 0)) {
650 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
,
651 ciphertext
, aes_encrypt_block
);
654 if (ret
== CRYPTO_SUCCESS
) {
655 if (plaintext
!= ciphertext
)
656 ciphertext
->cd_length
=
657 ciphertext
->cd_offset
- saved_offset
;
659 ciphertext
->cd_length
= saved_length
;
661 ciphertext
->cd_offset
= saved_offset
;
668 aes_decrypt_update(crypto_ctx_t
*ctx
, crypto_data_t
*ciphertext
,
669 crypto_data_t
*plaintext
, crypto_req_handle_t req
)
672 size_t saved_length
, out_len
;
673 int ret
= CRYPTO_SUCCESS
;
676 ASSERT(ctx
->cc_provider_private
!= NULL
);
677 aes_ctx
= ctx
->cc_provider_private
;
679 ASSERT(plaintext
!= NULL
);
682 * Compute number of bytes that will hold the plaintext.
683 * This is not necessary for CCM, GCM, and GMAC since these
684 * mechanisms never return plaintext for update operations.
686 if ((aes_ctx
->ac_flags
& (CCM_MODE
|GCM_MODE
|GMAC_MODE
)) == 0) {
687 out_len
= aes_ctx
->ac_remainder_len
;
688 out_len
+= ciphertext
->cd_length
;
689 out_len
&= ~(AES_BLOCK_LEN
- 1);
691 /* return length needed to store the output */
692 if (plaintext
->cd_length
< out_len
) {
693 plaintext
->cd_length
= out_len
;
694 return (CRYPTO_BUFFER_TOO_SMALL
);
698 saved_offset
= plaintext
->cd_offset
;
699 saved_length
= plaintext
->cd_length
;
701 if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
))
702 gcm_set_kmflag((gcm_ctx_t
*)aes_ctx
, crypto_kmflag(req
));
705 * Do the AES update on the specified input data.
707 switch (ciphertext
->cd_format
) {
708 case CRYPTO_DATA_RAW
:
709 ret
= crypto_update_iov(ctx
->cc_provider_private
,
710 ciphertext
, plaintext
, aes_decrypt_contiguous_blocks
,
713 case CRYPTO_DATA_UIO
:
714 ret
= crypto_update_uio(ctx
->cc_provider_private
,
715 ciphertext
, plaintext
, aes_decrypt_contiguous_blocks
,
719 ret
= CRYPTO_ARGUMENTS_BAD
;
723 * Since AES counter mode is a stream cipher, we call
724 * ctr_mode_final() to pick up any remaining bytes.
725 * It is an internal function that does not destroy
726 * the context like *normal* final routines.
728 if ((aes_ctx
->ac_flags
& CTR_MODE
) && (aes_ctx
->ac_remainder_len
> 0)) {
729 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
, plaintext
,
731 if (ret
== CRYPTO_DATA_LEN_RANGE
)
732 ret
= CRYPTO_ENCRYPTED_DATA_LEN_RANGE
;
735 if (ret
== CRYPTO_SUCCESS
) {
736 if (ciphertext
!= plaintext
)
737 plaintext
->cd_length
=
738 plaintext
->cd_offset
- saved_offset
;
740 plaintext
->cd_length
= saved_length
;
742 plaintext
->cd_offset
= saved_offset
;
749 aes_encrypt_final(crypto_ctx_t
*ctx
, crypto_data_t
*data
,
750 crypto_req_handle_t req
)
756 ASSERT(ctx
->cc_provider_private
!= NULL
);
757 aes_ctx
= ctx
->cc_provider_private
;
759 if (data
->cd_format
!= CRYPTO_DATA_RAW
&&
760 data
->cd_format
!= CRYPTO_DATA_UIO
) {
761 return (CRYPTO_ARGUMENTS_BAD
);
764 if (aes_ctx
->ac_flags
& CTR_MODE
) {
765 if (aes_ctx
->ac_remainder_len
> 0) {
766 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
, data
,
768 if (ret
!= CRYPTO_SUCCESS
)
771 } else if (aes_ctx
->ac_flags
& CCM_MODE
) {
772 ret
= ccm_encrypt_final((ccm_ctx_t
*)aes_ctx
, data
,
773 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
774 if (ret
!= CRYPTO_SUCCESS
) {
777 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
778 size_t saved_offset
= data
->cd_offset
;
780 ret
= gcm_encrypt_final((gcm_ctx_t
*)aes_ctx
, data
,
781 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
783 if (ret
!= CRYPTO_SUCCESS
) {
786 data
->cd_length
= data
->cd_offset
- saved_offset
;
787 data
->cd_offset
= saved_offset
;
790 * There must be no unprocessed plaintext.
791 * This happens if the length of the last data is
792 * not a multiple of the AES block length.
794 if (aes_ctx
->ac_remainder_len
> 0) {
795 return (CRYPTO_DATA_LEN_RANGE
);
800 (void) aes_free_context(ctx
);
802 return (CRYPTO_SUCCESS
);
806 aes_decrypt_final(crypto_ctx_t
*ctx
, crypto_data_t
*data
,
807 crypto_req_handle_t req
)
815 ASSERT(ctx
->cc_provider_private
!= NULL
);
816 aes_ctx
= ctx
->cc_provider_private
;
818 if (data
->cd_format
!= CRYPTO_DATA_RAW
&&
819 data
->cd_format
!= CRYPTO_DATA_UIO
) {
820 return (CRYPTO_ARGUMENTS_BAD
);
824 * There must be no unprocessed ciphertext.
825 * This happens if the length of the last ciphertext is
826 * not a multiple of the AES block length.
828 if (aes_ctx
->ac_remainder_len
> 0) {
829 if ((aes_ctx
->ac_flags
& CTR_MODE
) == 0)
830 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE
);
832 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
, data
,
834 if (ret
== CRYPTO_DATA_LEN_RANGE
)
835 ret
= CRYPTO_ENCRYPTED_DATA_LEN_RANGE
;
836 if (ret
!= CRYPTO_SUCCESS
)
841 if (aes_ctx
->ac_flags
& CCM_MODE
) {
843 * This is where all the plaintext is returned, make sure
844 * the plaintext buffer is big enough
846 size_t pt_len
= aes_ctx
->ac_data_len
;
847 if (data
->cd_length
< pt_len
) {
848 data
->cd_length
= pt_len
;
849 return (CRYPTO_BUFFER_TOO_SMALL
);
852 ASSERT(aes_ctx
->ac_processed_data_len
== pt_len
);
853 ASSERT(aes_ctx
->ac_processed_mac_len
== aes_ctx
->ac_mac_len
);
854 saved_offset
= data
->cd_offset
;
855 saved_length
= data
->cd_length
;
856 ret
= ccm_decrypt_final((ccm_ctx_t
*)aes_ctx
, data
,
857 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
859 if (ret
== CRYPTO_SUCCESS
) {
860 data
->cd_length
= data
->cd_offset
- saved_offset
;
862 data
->cd_length
= saved_length
;
865 data
->cd_offset
= saved_offset
;
866 if (ret
!= CRYPTO_SUCCESS
) {
869 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
871 * This is where all the plaintext is returned, make sure
872 * the plaintext buffer is big enough
874 gcm_ctx_t
*ctx
= (gcm_ctx_t
*)aes_ctx
;
875 size_t pt_len
= ctx
->gcm_processed_data_len
- ctx
->gcm_tag_len
;
877 if (data
->cd_length
< pt_len
) {
878 data
->cd_length
= pt_len
;
879 return (CRYPTO_BUFFER_TOO_SMALL
);
882 saved_offset
= data
->cd_offset
;
883 saved_length
= data
->cd_length
;
884 ret
= gcm_decrypt_final((gcm_ctx_t
*)aes_ctx
, data
,
885 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
886 if (ret
== CRYPTO_SUCCESS
) {
887 data
->cd_length
= data
->cd_offset
- saved_offset
;
889 data
->cd_length
= saved_length
;
892 data
->cd_offset
= saved_offset
;
893 if (ret
!= CRYPTO_SUCCESS
) {
899 if ((aes_ctx
->ac_flags
& (CTR_MODE
|CCM_MODE
|GCM_MODE
|GMAC_MODE
)) == 0) {
903 (void) aes_free_context(ctx
);
905 return (CRYPTO_SUCCESS
);
909 aes_encrypt_atomic(crypto_provider_handle_t provider
,
910 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
911 crypto_key_t
*key
, crypto_data_t
*plaintext
, crypto_data_t
*ciphertext
,
912 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
914 (void) provider
, (void) session_id
;
915 aes_ctx_t aes_ctx
; /* on the stack */
918 size_t length_needed
;
921 ASSERT(ciphertext
!= NULL
);
924 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
925 * be a multiple of AES block size.
927 switch (mechanism
->cm_type
) {
928 case AES_CTR_MECH_INFO_TYPE
:
929 case AES_CCM_MECH_INFO_TYPE
:
930 case AES_GCM_MECH_INFO_TYPE
:
931 case AES_GMAC_MECH_INFO_TYPE
:
934 if ((plaintext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0)
935 return (CRYPTO_DATA_LEN_RANGE
);
938 if ((ret
= aes_check_mech_param(mechanism
, NULL
, 0)) != CRYPTO_SUCCESS
)
941 bzero(&aes_ctx
, sizeof (aes_ctx_t
));
943 ret
= aes_common_init_ctx(&aes_ctx
, template, mechanism
, key
,
944 crypto_kmflag(req
), B_TRUE
);
945 if (ret
!= CRYPTO_SUCCESS
)
948 switch (mechanism
->cm_type
) {
949 case AES_CCM_MECH_INFO_TYPE
:
950 length_needed
= plaintext
->cd_length
+ aes_ctx
.ac_mac_len
;
952 case AES_GMAC_MECH_INFO_TYPE
:
953 if (plaintext
->cd_length
!= 0)
954 return (CRYPTO_ARGUMENTS_BAD
);
956 case AES_GCM_MECH_INFO_TYPE
:
957 length_needed
= plaintext
->cd_length
+ aes_ctx
.ac_tag_len
;
960 length_needed
= plaintext
->cd_length
;
963 /* return size of buffer needed to store output */
964 if (ciphertext
->cd_length
< length_needed
) {
965 ciphertext
->cd_length
= length_needed
;
966 ret
= CRYPTO_BUFFER_TOO_SMALL
;
970 saved_offset
= ciphertext
->cd_offset
;
971 saved_length
= ciphertext
->cd_length
;
974 * Do an update on the specified input data.
976 switch (plaintext
->cd_format
) {
977 case CRYPTO_DATA_RAW
:
978 ret
= crypto_update_iov(&aes_ctx
, plaintext
, ciphertext
,
979 aes_encrypt_contiguous_blocks
, aes_copy_block64
);
981 case CRYPTO_DATA_UIO
:
982 ret
= crypto_update_uio(&aes_ctx
, plaintext
, ciphertext
,
983 aes_encrypt_contiguous_blocks
, aes_copy_block64
);
986 ret
= CRYPTO_ARGUMENTS_BAD
;
989 if (ret
== CRYPTO_SUCCESS
) {
990 if (mechanism
->cm_type
== AES_CCM_MECH_INFO_TYPE
) {
991 ret
= ccm_encrypt_final((ccm_ctx_t
*)&aes_ctx
,
992 ciphertext
, AES_BLOCK_LEN
, aes_encrypt_block
,
994 if (ret
!= CRYPTO_SUCCESS
)
996 ASSERT(aes_ctx
.ac_remainder_len
== 0);
997 } else if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
||
998 mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
) {
999 ret
= gcm_encrypt_final((gcm_ctx_t
*)&aes_ctx
,
1000 ciphertext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1001 aes_copy_block
, aes_xor_block
);
1002 if (ret
!= CRYPTO_SUCCESS
)
1004 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1005 } else if (mechanism
->cm_type
== AES_CTR_MECH_INFO_TYPE
) {
1006 if (aes_ctx
.ac_remainder_len
> 0) {
1007 ret
= ctr_mode_final((ctr_ctx_t
*)&aes_ctx
,
1008 ciphertext
, aes_encrypt_block
);
1009 if (ret
!= CRYPTO_SUCCESS
)
1013 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1016 if (plaintext
!= ciphertext
) {
1017 ciphertext
->cd_length
=
1018 ciphertext
->cd_offset
- saved_offset
;
1021 ciphertext
->cd_length
= saved_length
;
1023 ciphertext
->cd_offset
= saved_offset
;
1026 if (aes_ctx
.ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1027 bzero(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1028 kmem_free(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1030 #ifdef CAN_USE_GCM_ASM
1031 if (aes_ctx
.ac_flags
& (GCM_MODE
|GMAC_MODE
) &&
1032 ((gcm_ctx_t
*)&aes_ctx
)->gcm_Htable
!= NULL
) {
1034 gcm_ctx_t
*ctx
= (gcm_ctx_t
*)&aes_ctx
;
1036 bzero(ctx
->gcm_Htable
, ctx
->gcm_htab_len
);
1037 kmem_free(ctx
->gcm_Htable
, ctx
->gcm_htab_len
);
1045 aes_decrypt_atomic(crypto_provider_handle_t provider
,
1046 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1047 crypto_key_t
*key
, crypto_data_t
*ciphertext
, crypto_data_t
*plaintext
,
1048 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
1050 (void) provider
, (void) session_id
;
1051 aes_ctx_t aes_ctx
; /* on the stack */
1053 size_t saved_length
;
1054 size_t length_needed
;
1057 ASSERT(plaintext
!= NULL
);
1060 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1061 * be a multiple of AES block size.
1063 switch (mechanism
->cm_type
) {
1064 case AES_CTR_MECH_INFO_TYPE
:
1065 case AES_CCM_MECH_INFO_TYPE
:
1066 case AES_GCM_MECH_INFO_TYPE
:
1067 case AES_GMAC_MECH_INFO_TYPE
:
1070 if ((ciphertext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0)
1071 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE
);
1074 if ((ret
= aes_check_mech_param(mechanism
, NULL
, 0)) != CRYPTO_SUCCESS
)
1077 bzero(&aes_ctx
, sizeof (aes_ctx_t
));
1079 ret
= aes_common_init_ctx(&aes_ctx
, template, mechanism
, key
,
1080 crypto_kmflag(req
), B_FALSE
);
1081 if (ret
!= CRYPTO_SUCCESS
)
1084 switch (mechanism
->cm_type
) {
1085 case AES_CCM_MECH_INFO_TYPE
:
1086 length_needed
= aes_ctx
.ac_data_len
;
1088 case AES_GCM_MECH_INFO_TYPE
:
1089 length_needed
= ciphertext
->cd_length
- aes_ctx
.ac_tag_len
;
1091 case AES_GMAC_MECH_INFO_TYPE
:
1092 if (plaintext
->cd_length
!= 0)
1093 return (CRYPTO_ARGUMENTS_BAD
);
1097 length_needed
= ciphertext
->cd_length
;
1100 /* return size of buffer needed to store output */
1101 if (plaintext
->cd_length
< length_needed
) {
1102 plaintext
->cd_length
= length_needed
;
1103 ret
= CRYPTO_BUFFER_TOO_SMALL
;
1107 saved_offset
= plaintext
->cd_offset
;
1108 saved_length
= plaintext
->cd_length
;
1110 if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
||
1111 mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
)
1112 gcm_set_kmflag((gcm_ctx_t
*)&aes_ctx
, crypto_kmflag(req
));
1115 * Do an update on the specified input data.
1117 switch (ciphertext
->cd_format
) {
1118 case CRYPTO_DATA_RAW
:
1119 ret
= crypto_update_iov(&aes_ctx
, ciphertext
, plaintext
,
1120 aes_decrypt_contiguous_blocks
, aes_copy_block64
);
1122 case CRYPTO_DATA_UIO
:
1123 ret
= crypto_update_uio(&aes_ctx
, ciphertext
, plaintext
,
1124 aes_decrypt_contiguous_blocks
, aes_copy_block64
);
1127 ret
= CRYPTO_ARGUMENTS_BAD
;
1130 if (ret
== CRYPTO_SUCCESS
) {
1131 if (mechanism
->cm_type
== AES_CCM_MECH_INFO_TYPE
) {
1132 ASSERT(aes_ctx
.ac_processed_data_len
1133 == aes_ctx
.ac_data_len
);
1134 ASSERT(aes_ctx
.ac_processed_mac_len
1135 == aes_ctx
.ac_mac_len
);
1136 ret
= ccm_decrypt_final((ccm_ctx_t
*)&aes_ctx
,
1137 plaintext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1138 aes_copy_block
, aes_xor_block
);
1139 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1140 if ((ret
== CRYPTO_SUCCESS
) &&
1141 (ciphertext
!= plaintext
)) {
1142 plaintext
->cd_length
=
1143 plaintext
->cd_offset
- saved_offset
;
1145 plaintext
->cd_length
= saved_length
;
1147 } else if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
||
1148 mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
) {
1149 ret
= gcm_decrypt_final((gcm_ctx_t
*)&aes_ctx
,
1150 plaintext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1152 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1153 if ((ret
== CRYPTO_SUCCESS
) &&
1154 (ciphertext
!= plaintext
)) {
1155 plaintext
->cd_length
=
1156 plaintext
->cd_offset
- saved_offset
;
1158 plaintext
->cd_length
= saved_length
;
1160 } else if (mechanism
->cm_type
!= AES_CTR_MECH_INFO_TYPE
) {
1161 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1162 if (ciphertext
!= plaintext
)
1163 plaintext
->cd_length
=
1164 plaintext
->cd_offset
- saved_offset
;
1166 if (aes_ctx
.ac_remainder_len
> 0) {
1167 ret
= ctr_mode_final((ctr_ctx_t
*)&aes_ctx
,
1168 plaintext
, aes_encrypt_block
);
1169 if (ret
== CRYPTO_DATA_LEN_RANGE
)
1170 ret
= CRYPTO_ENCRYPTED_DATA_LEN_RANGE
;
1171 if (ret
!= CRYPTO_SUCCESS
)
1174 if (ciphertext
!= plaintext
)
1175 plaintext
->cd_length
=
1176 plaintext
->cd_offset
- saved_offset
;
1179 plaintext
->cd_length
= saved_length
;
1181 plaintext
->cd_offset
= saved_offset
;
1184 if (aes_ctx
.ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1185 bzero(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1186 kmem_free(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1189 if (aes_ctx
.ac_flags
& CCM_MODE
) {
1190 if (aes_ctx
.ac_pt_buf
!= NULL
) {
1191 vmem_free(aes_ctx
.ac_pt_buf
, aes_ctx
.ac_data_len
);
1193 } else if (aes_ctx
.ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
1194 if (((gcm_ctx_t
*)&aes_ctx
)->gcm_pt_buf
!= NULL
) {
1195 vmem_free(((gcm_ctx_t
*)&aes_ctx
)->gcm_pt_buf
,
1196 ((gcm_ctx_t
*)&aes_ctx
)->gcm_pt_buf_len
);
1198 #ifdef CAN_USE_GCM_ASM
1199 if (((gcm_ctx_t
*)&aes_ctx
)->gcm_Htable
!= NULL
) {
1200 gcm_ctx_t
*ctx
= (gcm_ctx_t
*)&aes_ctx
;
1202 bzero(ctx
->gcm_Htable
, ctx
->gcm_htab_len
);
1203 kmem_free(ctx
->gcm_Htable
, ctx
->gcm_htab_len
);
1212 * KCF software provider context template entry points.
1215 aes_create_ctx_template(crypto_provider_handle_t provider
,
1216 crypto_mechanism_t
*mechanism
, crypto_key_t
*key
,
1217 crypto_spi_ctx_template_t
*tmpl
, size_t *tmpl_size
, crypto_req_handle_t req
)
1224 if (mechanism
->cm_type
!= AES_ECB_MECH_INFO_TYPE
&&
1225 mechanism
->cm_type
!= AES_CBC_MECH_INFO_TYPE
&&
1226 mechanism
->cm_type
!= AES_CTR_MECH_INFO_TYPE
&&
1227 mechanism
->cm_type
!= AES_CCM_MECH_INFO_TYPE
&&
1228 mechanism
->cm_type
!= AES_GCM_MECH_INFO_TYPE
&&
1229 mechanism
->cm_type
!= AES_GMAC_MECH_INFO_TYPE
)
1230 return (CRYPTO_MECHANISM_INVALID
);
1232 if ((keysched
= aes_alloc_keysched(&size
,
1233 crypto_kmflag(req
))) == NULL
) {
1234 return (CRYPTO_HOST_MEMORY
);
1238 * Initialize key schedule. Key length information is stored
1241 if ((rv
= init_keysched(key
, keysched
)) != CRYPTO_SUCCESS
) {
1242 bzero(keysched
, size
);
1243 kmem_free(keysched
, size
);
1250 return (CRYPTO_SUCCESS
);
1255 aes_free_context(crypto_ctx_t
*ctx
)
1257 aes_ctx_t
*aes_ctx
= ctx
->cc_provider_private
;
1259 if (aes_ctx
!= NULL
) {
1260 if (aes_ctx
->ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1261 ASSERT(aes_ctx
->ac_keysched_len
!= 0);
1262 bzero(aes_ctx
->ac_keysched
, aes_ctx
->ac_keysched_len
);
1263 kmem_free(aes_ctx
->ac_keysched
,
1264 aes_ctx
->ac_keysched_len
);
1266 crypto_free_mode_ctx(aes_ctx
);
1267 ctx
->cc_provider_private
= NULL
;
1270 return (CRYPTO_SUCCESS
);
1275 aes_common_init_ctx(aes_ctx_t
*aes_ctx
, crypto_spi_ctx_template_t
*template,
1276 crypto_mechanism_t
*mechanism
, crypto_key_t
*key
, int kmflag
,
1277 boolean_t is_encrypt_init
)
1279 int rv
= CRYPTO_SUCCESS
;
1283 if (template == NULL
) {
1284 if ((keysched
= aes_alloc_keysched(&size
, kmflag
)) == NULL
)
1285 return (CRYPTO_HOST_MEMORY
);
1287 * Initialize key schedule.
1288 * Key length is stored in the key.
1290 if ((rv
= init_keysched(key
, keysched
)) != CRYPTO_SUCCESS
) {
1291 kmem_free(keysched
, size
);
1295 aes_ctx
->ac_flags
|= PROVIDER_OWNS_KEY_SCHEDULE
;
1296 aes_ctx
->ac_keysched_len
= size
;
1298 keysched
= template;
1300 aes_ctx
->ac_keysched
= keysched
;
1302 switch (mechanism
->cm_type
) {
1303 case AES_CBC_MECH_INFO_TYPE
:
1304 rv
= cbc_init_ctx((cbc_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1305 mechanism
->cm_param_len
, AES_BLOCK_LEN
, aes_copy_block64
);
1307 case AES_CTR_MECH_INFO_TYPE
: {
1308 CK_AES_CTR_PARAMS
*pp
;
1310 if (mechanism
->cm_param
== NULL
||
1311 mechanism
->cm_param_len
!= sizeof (CK_AES_CTR_PARAMS
)) {
1312 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1314 pp
= (CK_AES_CTR_PARAMS
*)(void *)mechanism
->cm_param
;
1315 rv
= ctr_init_ctx((ctr_ctx_t
*)aes_ctx
, pp
->ulCounterBits
,
1316 pp
->cb
, aes_copy_block
);
1319 case AES_CCM_MECH_INFO_TYPE
:
1320 if (mechanism
->cm_param
== NULL
||
1321 mechanism
->cm_param_len
!= sizeof (CK_AES_CCM_PARAMS
)) {
1322 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1324 rv
= ccm_init_ctx((ccm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1325 kmflag
, is_encrypt_init
, AES_BLOCK_LEN
, aes_encrypt_block
,
1328 case AES_GCM_MECH_INFO_TYPE
:
1329 if (mechanism
->cm_param
== NULL
||
1330 mechanism
->cm_param_len
!= sizeof (CK_AES_GCM_PARAMS
)) {
1331 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1333 rv
= gcm_init_ctx((gcm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1334 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
1337 case AES_GMAC_MECH_INFO_TYPE
:
1338 if (mechanism
->cm_param
== NULL
||
1339 mechanism
->cm_param_len
!= sizeof (CK_AES_GMAC_PARAMS
)) {
1340 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1342 rv
= gmac_init_ctx((gcm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1343 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
1346 case AES_ECB_MECH_INFO_TYPE
:
1347 aes_ctx
->ac_flags
|= ECB_MODE
;
1350 if (rv
!= CRYPTO_SUCCESS
) {
1351 if (aes_ctx
->ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1352 bzero(keysched
, size
);
1353 kmem_free(keysched
, size
);
1361 process_gmac_mech(crypto_mechanism_t
*mech
, crypto_data_t
*data
,
1362 CK_AES_GCM_PARAMS
*gcm_params
)
1364 /* LINTED: pointer alignment */
1365 CK_AES_GMAC_PARAMS
*params
= (CK_AES_GMAC_PARAMS
*)mech
->cm_param
;
1367 if (mech
->cm_type
!= AES_GMAC_MECH_INFO_TYPE
)
1368 return (CRYPTO_MECHANISM_INVALID
);
1370 if (mech
->cm_param_len
!= sizeof (CK_AES_GMAC_PARAMS
))
1371 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1373 if (params
->pIv
== NULL
)
1374 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1376 gcm_params
->pIv
= params
->pIv
;
1377 gcm_params
->ulIvLen
= AES_GMAC_IV_LEN
;
1378 gcm_params
->ulTagBits
= AES_GMAC_TAG_BITS
;
1381 return (CRYPTO_SUCCESS
);
1383 if (data
->cd_format
!= CRYPTO_DATA_RAW
)
1384 return (CRYPTO_ARGUMENTS_BAD
);
1386 gcm_params
->pAAD
= (uchar_t
*)data
->cd_raw
.iov_base
;
1387 gcm_params
->ulAADLen
= data
->cd_length
;
1388 return (CRYPTO_SUCCESS
);
1392 aes_mac_atomic(crypto_provider_handle_t provider
,
1393 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1394 crypto_key_t
*key
, crypto_data_t
*data
, crypto_data_t
*mac
,
1395 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
1397 CK_AES_GCM_PARAMS gcm_params
;
1398 crypto_mechanism_t gcm_mech
;
1401 if ((rv
= process_gmac_mech(mechanism
, data
, &gcm_params
))
1405 gcm_mech
.cm_type
= AES_GCM_MECH_INFO_TYPE
;
1406 gcm_mech
.cm_param_len
= sizeof (CK_AES_GCM_PARAMS
);
1407 gcm_mech
.cm_param
= (char *)&gcm_params
;
1409 return (aes_encrypt_atomic(provider
, session_id
, &gcm_mech
,
1410 key
, &null_crypto_data
, mac
, template, req
));
1414 aes_mac_verify_atomic(crypto_provider_handle_t provider
,
1415 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1416 crypto_key_t
*key
, crypto_data_t
*data
, crypto_data_t
*mac
,
1417 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
1419 CK_AES_GCM_PARAMS gcm_params
;
1420 crypto_mechanism_t gcm_mech
;
1423 if ((rv
= process_gmac_mech(mechanism
, data
, &gcm_params
))
1427 gcm_mech
.cm_type
= AES_GCM_MECH_INFO_TYPE
;
1428 gcm_mech
.cm_param_len
= sizeof (CK_AES_GCM_PARAMS
);
1429 gcm_mech
.cm_param
= (char *)&gcm_params
;
1431 return (aes_decrypt_atomic(provider
, session_id
, &gcm_mech
,
1432 key
, mac
, &null_crypto_data
, template, req
));