4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
26 * AES provider for the Kernel Cryptographic Framework (KCF)
29 #include <sys/zfs_context.h>
30 #include <sys/crypto/common.h>
31 #include <sys/crypto/impl.h>
32 #include <sys/crypto/spi.h>
33 #include <sys/crypto/icp.h>
34 #include <modes/modes.h>
35 #include <sys/modctl.h>
37 #include <aes/aes_impl.h>
39 #define CRYPTO_PROVIDER_NAME "aes"
41 extern struct mod_ops mod_cryptoops
;
44 * Module linkage information for the kernel.
46 static struct modlcrypto modlcrypto
= {
48 "AES Kernel SW Provider"
51 static struct modlinkage modlinkage
= {
52 MODREV_1
, { (void *)&modlcrypto
, NULL
}
56 * Mechanism info structure passed to KCF during registration.
58 static crypto_mech_info_t aes_mech_info_tab
[] = {
60 {SUN_CKM_AES_ECB
, AES_ECB_MECH_INFO_TYPE
,
61 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
62 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
63 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
65 {SUN_CKM_AES_CBC
, AES_CBC_MECH_INFO_TYPE
,
66 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
67 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
68 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
70 {SUN_CKM_AES_CTR
, AES_CTR_MECH_INFO_TYPE
,
71 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
72 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
73 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
75 {SUN_CKM_AES_CCM
, AES_CCM_MECH_INFO_TYPE
,
76 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
77 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
78 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
80 {SUN_CKM_AES_GCM
, AES_GCM_MECH_INFO_TYPE
,
81 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
82 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
83 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
85 {SUN_CKM_AES_GMAC
, AES_GMAC_MECH_INFO_TYPE
,
86 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
87 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
|
88 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
|
89 CRYPTO_FG_SIGN
| CRYPTO_FG_SIGN_ATOMIC
|
90 CRYPTO_FG_VERIFY
| CRYPTO_FG_VERIFY_ATOMIC
,
91 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
}
94 /* operations are in-place if the output buffer is NULL */
95 #define AES_ARG_INPLACE(input, output) \
96 if ((output) == NULL) \
99 static void aes_provider_status(crypto_provider_handle_t
, uint_t
*);
101 static crypto_control_ops_t aes_control_ops
= {
105 static int aes_encrypt_init(crypto_ctx_t
*, crypto_mechanism_t
*,
106 crypto_key_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
107 static int aes_decrypt_init(crypto_ctx_t
*, crypto_mechanism_t
*,
108 crypto_key_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
109 static int aes_common_init(crypto_ctx_t
*, crypto_mechanism_t
*,
110 crypto_key_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
, boolean_t
);
111 static int aes_common_init_ctx(aes_ctx_t
*, crypto_spi_ctx_template_t
*,
112 crypto_mechanism_t
*, crypto_key_t
*, int, boolean_t
);
113 static int aes_encrypt_final(crypto_ctx_t
*, crypto_data_t
*,
114 crypto_req_handle_t
);
115 static int aes_decrypt_final(crypto_ctx_t
*, crypto_data_t
*,
116 crypto_req_handle_t
);
118 static int aes_encrypt(crypto_ctx_t
*, crypto_data_t
*, crypto_data_t
*,
119 crypto_req_handle_t
);
120 static int aes_encrypt_update(crypto_ctx_t
*, crypto_data_t
*,
121 crypto_data_t
*, crypto_req_handle_t
);
122 static int aes_encrypt_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
123 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*,
124 crypto_data_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
126 static int aes_decrypt(crypto_ctx_t
*, crypto_data_t
*, crypto_data_t
*,
127 crypto_req_handle_t
);
128 static int aes_decrypt_update(crypto_ctx_t
*, crypto_data_t
*,
129 crypto_data_t
*, crypto_req_handle_t
);
130 static int aes_decrypt_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
131 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*,
132 crypto_data_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
134 static crypto_cipher_ops_t aes_cipher_ops
= {
147 static int aes_mac_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
148 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*, crypto_data_t
*,
149 crypto_spi_ctx_template_t
, crypto_req_handle_t
);
150 static int aes_mac_verify_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
151 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*, crypto_data_t
*,
152 crypto_spi_ctx_template_t
, crypto_req_handle_t
);
154 static crypto_mac_ops_t aes_mac_ops
= {
160 aes_mac_verify_atomic
163 static int aes_create_ctx_template(crypto_provider_handle_t
,
164 crypto_mechanism_t
*, crypto_key_t
*, crypto_spi_ctx_template_t
*,
165 size_t *, crypto_req_handle_t
);
166 static int aes_free_context(crypto_ctx_t
*);
168 static crypto_ctx_ops_t aes_ctx_ops
= {
169 aes_create_ctx_template
,
173 static crypto_ops_t aes_crypto_ops
= {{{{{
190 static crypto_provider_info_t aes_prov_info
= {{{{
191 CRYPTO_SPI_VERSION_1
,
192 "AES Software Provider",
196 sizeof (aes_mech_info_tab
)/sizeof (crypto_mech_info_t
),
200 static crypto_kcf_provider_handle_t aes_prov_handle
= 0;
201 static crypto_data_t null_crypto_data
= { CRYPTO_DATA_RAW
};
208 if ((ret
= mod_install(&modlinkage
)) != 0)
211 /* Register with KCF. If the registration fails, remove the module. */
212 if (crypto_register_provider(&aes_prov_info
, &aes_prov_handle
)) {
213 (void) mod_remove(&modlinkage
);
223 /* Unregister from KCF if module is registered */
224 if (aes_prov_handle
!= 0) {
225 if (crypto_unregister_provider(aes_prov_handle
))
231 return (mod_remove(&modlinkage
));
235 aes_check_mech_param(crypto_mechanism_t
*mechanism
, aes_ctx_t
**ctx
, int kmflag
)
238 boolean_t param_required
= B_TRUE
;
240 void *(*alloc_fun
)(int);
241 int rv
= CRYPTO_SUCCESS
;
243 switch (mechanism
->cm_type
) {
244 case AES_ECB_MECH_INFO_TYPE
:
245 param_required
= B_FALSE
;
246 alloc_fun
= ecb_alloc_ctx
;
248 case AES_CBC_MECH_INFO_TYPE
:
249 param_len
= AES_BLOCK_LEN
;
250 alloc_fun
= cbc_alloc_ctx
;
252 case AES_CTR_MECH_INFO_TYPE
:
253 param_len
= sizeof (CK_AES_CTR_PARAMS
);
254 alloc_fun
= ctr_alloc_ctx
;
256 case AES_CCM_MECH_INFO_TYPE
:
257 param_len
= sizeof (CK_AES_CCM_PARAMS
);
258 alloc_fun
= ccm_alloc_ctx
;
260 case AES_GCM_MECH_INFO_TYPE
:
261 param_len
= sizeof (CK_AES_GCM_PARAMS
);
262 alloc_fun
= gcm_alloc_ctx
;
264 case AES_GMAC_MECH_INFO_TYPE
:
265 param_len
= sizeof (CK_AES_GMAC_PARAMS
);
266 alloc_fun
= gmac_alloc_ctx
;
269 rv
= CRYPTO_MECHANISM_INVALID
;
272 if (param_required
&& mechanism
->cm_param
!= NULL
&&
273 mechanism
->cm_param_len
!= param_len
) {
274 rv
= CRYPTO_MECHANISM_PARAM_INVALID
;
277 p
= (alloc_fun
)(kmflag
);
284 * Initialize key schedules for AES
287 init_keysched(crypto_key_t
*key
, void *newbie
)
290 * Only keys by value are supported by this module.
292 switch (key
->ck_format
) {
294 if (key
->ck_length
< AES_MINBITS
||
295 key
->ck_length
> AES_MAXBITS
) {
296 return (CRYPTO_KEY_SIZE_RANGE
);
299 /* key length must be either 128, 192, or 256 */
300 if ((key
->ck_length
& 63) != 0)
301 return (CRYPTO_KEY_SIZE_RANGE
);
304 return (CRYPTO_KEY_TYPE_INCONSISTENT
);
307 aes_init_keysched(key
->ck_data
, key
->ck_length
, newbie
);
308 return (CRYPTO_SUCCESS
);
312 * KCF software provider control entry points.
316 aes_provider_status(crypto_provider_handle_t provider
, uint_t
*status
)
318 *status
= CRYPTO_PROVIDER_READY
;
322 aes_encrypt_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
323 crypto_key_t
*key
, crypto_spi_ctx_template_t
template,
324 crypto_req_handle_t req
) {
325 return (aes_common_init(ctx
, mechanism
, key
, template, req
, B_TRUE
));
329 aes_decrypt_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
330 crypto_key_t
*key
, crypto_spi_ctx_template_t
template,
331 crypto_req_handle_t req
) {
332 return (aes_common_init(ctx
, mechanism
, key
, template, req
, B_FALSE
));
338 * KCF software provider encrypt entry points.
341 aes_common_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
342 crypto_key_t
*key
, crypto_spi_ctx_template_t
template,
343 crypto_req_handle_t req
, boolean_t is_encrypt_init
)
350 * Only keys by value are supported by this module.
352 if (key
->ck_format
!= CRYPTO_KEY_RAW
) {
353 return (CRYPTO_KEY_TYPE_INCONSISTENT
);
356 kmflag
= crypto_kmflag(req
);
357 if ((rv
= aes_check_mech_param(mechanism
, &aes_ctx
, kmflag
))
361 rv
= aes_common_init_ctx(aes_ctx
, template, mechanism
, key
, kmflag
,
363 if (rv
!= CRYPTO_SUCCESS
) {
364 crypto_free_mode_ctx(aes_ctx
);
368 ctx
->cc_provider_private
= aes_ctx
;
370 return (CRYPTO_SUCCESS
);
374 aes_copy_block64(uint8_t *in
, uint64_t *out
)
376 if (IS_P2ALIGNED(in
, sizeof (uint64_t))) {
377 /* LINTED: pointer alignment */
378 out
[0] = *(uint64_t *)&in
[0];
379 /* LINTED: pointer alignment */
380 out
[1] = *(uint64_t *)&in
[8];
382 uint8_t *iv8
= (uint8_t *)&out
[0];
384 AES_COPY_BLOCK(in
, iv8
);
390 aes_encrypt(crypto_ctx_t
*ctx
, crypto_data_t
*plaintext
,
391 crypto_data_t
*ciphertext
, crypto_req_handle_t req
)
393 int ret
= CRYPTO_FAILED
;
396 size_t saved_length
, saved_offset
, length_needed
;
398 ASSERT(ctx
->cc_provider_private
!= NULL
);
399 aes_ctx
= ctx
->cc_provider_private
;
402 * For block ciphers, plaintext must be a multiple of AES block size.
403 * This test is only valid for ciphers whose blocksize is a power of 2.
405 if (((aes_ctx
->ac_flags
& (CTR_MODE
|CCM_MODE
|GCM_MODE
|GMAC_MODE
))
406 == 0) && (plaintext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0)
407 return (CRYPTO_DATA_LEN_RANGE
);
409 AES_ARG_INPLACE(plaintext
, ciphertext
);
412 * We need to just return the length needed to store the output.
413 * We should not destroy the context for the following case.
415 switch (aes_ctx
->ac_flags
& (CCM_MODE
|GCM_MODE
|GMAC_MODE
)) {
417 length_needed
= plaintext
->cd_length
+ aes_ctx
->ac_mac_len
;
420 length_needed
= plaintext
->cd_length
+ aes_ctx
->ac_tag_len
;
423 if (plaintext
->cd_length
!= 0)
424 return (CRYPTO_ARGUMENTS_BAD
);
426 length_needed
= aes_ctx
->ac_tag_len
;
429 length_needed
= plaintext
->cd_length
;
432 if (ciphertext
->cd_length
< length_needed
) {
433 ciphertext
->cd_length
= length_needed
;
434 return (CRYPTO_BUFFER_TOO_SMALL
);
437 saved_length
= ciphertext
->cd_length
;
438 saved_offset
= ciphertext
->cd_offset
;
441 * Do an update on the specified input data.
443 ret
= aes_encrypt_update(ctx
, plaintext
, ciphertext
, req
);
444 if (ret
!= CRYPTO_SUCCESS
) {
449 * For CCM mode, aes_ccm_encrypt_final() will take care of any
450 * left-over unprocessed data, and compute the MAC
452 if (aes_ctx
->ac_flags
& CCM_MODE
) {
454 * ccm_encrypt_final() will compute the MAC and append
455 * it to existing ciphertext. So, need to adjust the left over
456 * length value accordingly
459 /* order of following 2 lines MUST not be reversed */
460 ciphertext
->cd_offset
= ciphertext
->cd_length
;
461 ciphertext
->cd_length
= saved_length
- ciphertext
->cd_length
;
462 ret
= ccm_encrypt_final((ccm_ctx_t
*)aes_ctx
, ciphertext
,
463 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
464 if (ret
!= CRYPTO_SUCCESS
) {
468 if (plaintext
!= ciphertext
) {
469 ciphertext
->cd_length
=
470 ciphertext
->cd_offset
- saved_offset
;
472 ciphertext
->cd_offset
= saved_offset
;
473 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
475 * gcm_encrypt_final() will compute the MAC and append
476 * it to existing ciphertext. So, need to adjust the left over
477 * length value accordingly
480 /* order of following 2 lines MUST not be reversed */
481 ciphertext
->cd_offset
= ciphertext
->cd_length
;
482 ciphertext
->cd_length
= saved_length
- ciphertext
->cd_length
;
483 ret
= gcm_encrypt_final((gcm_ctx_t
*)aes_ctx
, ciphertext
,
484 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
486 if (ret
!= CRYPTO_SUCCESS
) {
490 if (plaintext
!= ciphertext
) {
491 ciphertext
->cd_length
=
492 ciphertext
->cd_offset
- saved_offset
;
494 ciphertext
->cd_offset
= saved_offset
;
497 ASSERT(aes_ctx
->ac_remainder_len
== 0);
498 (void) aes_free_context(ctx
);
505 aes_decrypt(crypto_ctx_t
*ctx
, crypto_data_t
*ciphertext
,
506 crypto_data_t
*plaintext
, crypto_req_handle_t req
)
508 int ret
= CRYPTO_FAILED
;
512 size_t saved_length
, length_needed
;
514 ASSERT(ctx
->cc_provider_private
!= NULL
);
515 aes_ctx
= ctx
->cc_provider_private
;
518 * For block ciphers, plaintext must be a multiple of AES block size.
519 * This test is only valid for ciphers whose blocksize is a power of 2.
521 if (((aes_ctx
->ac_flags
& (CTR_MODE
|CCM_MODE
|GCM_MODE
|GMAC_MODE
))
522 == 0) && (ciphertext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0) {
523 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE
);
526 AES_ARG_INPLACE(ciphertext
, plaintext
);
529 * Return length needed to store the output.
530 * Do not destroy context when plaintext buffer is too small.
532 * CCM: plaintext is MAC len smaller than cipher text
533 * GCM: plaintext is TAG len smaller than cipher text
534 * GMAC: plaintext length must be zero
536 switch (aes_ctx
->ac_flags
& (CCM_MODE
|GCM_MODE
|GMAC_MODE
)) {
538 length_needed
= aes_ctx
->ac_processed_data_len
;
541 length_needed
= ciphertext
->cd_length
- aes_ctx
->ac_tag_len
;
544 if (plaintext
->cd_length
!= 0)
545 return (CRYPTO_ARGUMENTS_BAD
);
550 length_needed
= ciphertext
->cd_length
;
553 if (plaintext
->cd_length
< length_needed
) {
554 plaintext
->cd_length
= length_needed
;
555 return (CRYPTO_BUFFER_TOO_SMALL
);
558 saved_offset
= plaintext
->cd_offset
;
559 saved_length
= plaintext
->cd_length
;
562 * Do an update on the specified input data.
564 ret
= aes_decrypt_update(ctx
, ciphertext
, plaintext
, req
);
565 if (ret
!= CRYPTO_SUCCESS
) {
569 if (aes_ctx
->ac_flags
& CCM_MODE
) {
570 ASSERT(aes_ctx
->ac_processed_data_len
== aes_ctx
->ac_data_len
);
571 ASSERT(aes_ctx
->ac_processed_mac_len
== aes_ctx
->ac_mac_len
);
573 /* order of following 2 lines MUST not be reversed */
574 plaintext
->cd_offset
= plaintext
->cd_length
;
575 plaintext
->cd_length
= saved_length
- plaintext
->cd_length
;
577 ret
= ccm_decrypt_final((ccm_ctx_t
*)aes_ctx
, plaintext
,
578 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
580 if (ret
== CRYPTO_SUCCESS
) {
581 if (plaintext
!= ciphertext
) {
582 plaintext
->cd_length
=
583 plaintext
->cd_offset
- saved_offset
;
586 plaintext
->cd_length
= saved_length
;
589 plaintext
->cd_offset
= saved_offset
;
590 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
591 /* order of following 2 lines MUST not be reversed */
592 plaintext
->cd_offset
= plaintext
->cd_length
;
593 plaintext
->cd_length
= saved_length
- plaintext
->cd_length
;
595 ret
= gcm_decrypt_final((gcm_ctx_t
*)aes_ctx
, plaintext
,
596 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
597 if (ret
== CRYPTO_SUCCESS
) {
598 if (plaintext
!= ciphertext
) {
599 plaintext
->cd_length
=
600 plaintext
->cd_offset
- saved_offset
;
603 plaintext
->cd_length
= saved_length
;
606 plaintext
->cd_offset
= saved_offset
;
609 ASSERT(aes_ctx
->ac_remainder_len
== 0);
612 (void) aes_free_context(ctx
);
620 aes_encrypt_update(crypto_ctx_t
*ctx
, crypto_data_t
*plaintext
,
621 crypto_data_t
*ciphertext
, crypto_req_handle_t req
)
624 size_t saved_length
, out_len
;
625 int ret
= CRYPTO_SUCCESS
;
628 ASSERT(ctx
->cc_provider_private
!= NULL
);
629 aes_ctx
= ctx
->cc_provider_private
;
631 AES_ARG_INPLACE(plaintext
, ciphertext
);
633 /* compute number of bytes that will hold the ciphertext */
634 out_len
= aes_ctx
->ac_remainder_len
;
635 out_len
+= plaintext
->cd_length
;
636 out_len
&= ~(AES_BLOCK_LEN
- 1);
638 /* return length needed to store the output */
639 if (ciphertext
->cd_length
< out_len
) {
640 ciphertext
->cd_length
= out_len
;
641 return (CRYPTO_BUFFER_TOO_SMALL
);
644 saved_offset
= ciphertext
->cd_offset
;
645 saved_length
= ciphertext
->cd_length
;
648 * Do the AES update on the specified input data.
650 switch (plaintext
->cd_format
) {
651 case CRYPTO_DATA_RAW
:
652 ret
= crypto_update_iov(ctx
->cc_provider_private
,
653 plaintext
, ciphertext
, aes_encrypt_contiguous_blocks
,
656 case CRYPTO_DATA_UIO
:
657 ret
= crypto_update_uio(ctx
->cc_provider_private
,
658 plaintext
, ciphertext
, aes_encrypt_contiguous_blocks
,
662 ret
= CRYPTO_ARGUMENTS_BAD
;
666 * Since AES counter mode is a stream cipher, we call
667 * ctr_mode_final() to pick up any remaining bytes.
668 * It is an internal function that does not destroy
669 * the context like *normal* final routines.
671 if ((aes_ctx
->ac_flags
& CTR_MODE
) && (aes_ctx
->ac_remainder_len
> 0)) {
672 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
,
673 ciphertext
, aes_encrypt_block
);
676 if (ret
== CRYPTO_SUCCESS
) {
677 if (plaintext
!= ciphertext
)
678 ciphertext
->cd_length
=
679 ciphertext
->cd_offset
- saved_offset
;
681 ciphertext
->cd_length
= saved_length
;
683 ciphertext
->cd_offset
= saved_offset
;
690 aes_decrypt_update(crypto_ctx_t
*ctx
, crypto_data_t
*ciphertext
,
691 crypto_data_t
*plaintext
, crypto_req_handle_t req
)
694 size_t saved_length
, out_len
;
695 int ret
= CRYPTO_SUCCESS
;
698 ASSERT(ctx
->cc_provider_private
!= NULL
);
699 aes_ctx
= ctx
->cc_provider_private
;
701 AES_ARG_INPLACE(ciphertext
, plaintext
);
704 * Compute number of bytes that will hold the plaintext.
705 * This is not necessary for CCM, GCM, and GMAC since these
706 * mechanisms never return plaintext for update operations.
708 if ((aes_ctx
->ac_flags
& (CCM_MODE
|GCM_MODE
|GMAC_MODE
)) == 0) {
709 out_len
= aes_ctx
->ac_remainder_len
;
710 out_len
+= ciphertext
->cd_length
;
711 out_len
&= ~(AES_BLOCK_LEN
- 1);
713 /* return length needed to store the output */
714 if (plaintext
->cd_length
< out_len
) {
715 plaintext
->cd_length
= out_len
;
716 return (CRYPTO_BUFFER_TOO_SMALL
);
720 saved_offset
= plaintext
->cd_offset
;
721 saved_length
= plaintext
->cd_length
;
723 if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
))
724 gcm_set_kmflag((gcm_ctx_t
*)aes_ctx
, crypto_kmflag(req
));
727 * Do the AES update on the specified input data.
729 switch (ciphertext
->cd_format
) {
730 case CRYPTO_DATA_RAW
:
731 ret
= crypto_update_iov(ctx
->cc_provider_private
,
732 ciphertext
, plaintext
, aes_decrypt_contiguous_blocks
,
735 case CRYPTO_DATA_UIO
:
736 ret
= crypto_update_uio(ctx
->cc_provider_private
,
737 ciphertext
, plaintext
, aes_decrypt_contiguous_blocks
,
741 ret
= CRYPTO_ARGUMENTS_BAD
;
745 * Since AES counter mode is a stream cipher, we call
746 * ctr_mode_final() to pick up any remaining bytes.
747 * It is an internal function that does not destroy
748 * the context like *normal* final routines.
750 if ((aes_ctx
->ac_flags
& CTR_MODE
) && (aes_ctx
->ac_remainder_len
> 0)) {
751 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
, plaintext
,
753 if (ret
== CRYPTO_DATA_LEN_RANGE
)
754 ret
= CRYPTO_ENCRYPTED_DATA_LEN_RANGE
;
757 if (ret
== CRYPTO_SUCCESS
) {
758 if (ciphertext
!= plaintext
)
759 plaintext
->cd_length
=
760 plaintext
->cd_offset
- saved_offset
;
762 plaintext
->cd_length
= saved_length
;
764 plaintext
->cd_offset
= saved_offset
;
772 aes_encrypt_final(crypto_ctx_t
*ctx
, crypto_data_t
*data
,
773 crypto_req_handle_t req
)
778 ASSERT(ctx
->cc_provider_private
!= NULL
);
779 aes_ctx
= ctx
->cc_provider_private
;
781 if (data
->cd_format
!= CRYPTO_DATA_RAW
&&
782 data
->cd_format
!= CRYPTO_DATA_UIO
) {
783 return (CRYPTO_ARGUMENTS_BAD
);
786 if (aes_ctx
->ac_flags
& CTR_MODE
) {
787 if (aes_ctx
->ac_remainder_len
> 0) {
788 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
, data
,
790 if (ret
!= CRYPTO_SUCCESS
)
793 } else if (aes_ctx
->ac_flags
& CCM_MODE
) {
794 ret
= ccm_encrypt_final((ccm_ctx_t
*)aes_ctx
, data
,
795 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
796 if (ret
!= CRYPTO_SUCCESS
) {
799 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
800 size_t saved_offset
= data
->cd_offset
;
802 ret
= gcm_encrypt_final((gcm_ctx_t
*)aes_ctx
, data
,
803 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
805 if (ret
!= CRYPTO_SUCCESS
) {
808 data
->cd_length
= data
->cd_offset
- saved_offset
;
809 data
->cd_offset
= saved_offset
;
812 * There must be no unprocessed plaintext.
813 * This happens if the length of the last data is
814 * not a multiple of the AES block length.
816 if (aes_ctx
->ac_remainder_len
> 0) {
817 return (CRYPTO_DATA_LEN_RANGE
);
822 (void) aes_free_context(ctx
);
824 return (CRYPTO_SUCCESS
);
829 aes_decrypt_final(crypto_ctx_t
*ctx
, crypto_data_t
*data
,
830 crypto_req_handle_t req
)
837 ASSERT(ctx
->cc_provider_private
!= NULL
);
838 aes_ctx
= ctx
->cc_provider_private
;
840 if (data
->cd_format
!= CRYPTO_DATA_RAW
&&
841 data
->cd_format
!= CRYPTO_DATA_UIO
) {
842 return (CRYPTO_ARGUMENTS_BAD
);
846 * There must be no unprocessed ciphertext.
847 * This happens if the length of the last ciphertext is
848 * not a multiple of the AES block length.
850 if (aes_ctx
->ac_remainder_len
> 0) {
851 if ((aes_ctx
->ac_flags
& CTR_MODE
) == 0)
852 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE
);
854 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
, data
,
856 if (ret
== CRYPTO_DATA_LEN_RANGE
)
857 ret
= CRYPTO_ENCRYPTED_DATA_LEN_RANGE
;
858 if (ret
!= CRYPTO_SUCCESS
)
863 if (aes_ctx
->ac_flags
& CCM_MODE
) {
865 * This is where all the plaintext is returned, make sure
866 * the plaintext buffer is big enough
868 size_t pt_len
= aes_ctx
->ac_data_len
;
869 if (data
->cd_length
< pt_len
) {
870 data
->cd_length
= pt_len
;
871 return (CRYPTO_BUFFER_TOO_SMALL
);
874 ASSERT(aes_ctx
->ac_processed_data_len
== pt_len
);
875 ASSERT(aes_ctx
->ac_processed_mac_len
== aes_ctx
->ac_mac_len
);
876 saved_offset
= data
->cd_offset
;
877 saved_length
= data
->cd_length
;
878 ret
= ccm_decrypt_final((ccm_ctx_t
*)aes_ctx
, data
,
879 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
881 if (ret
== CRYPTO_SUCCESS
) {
882 data
->cd_length
= data
->cd_offset
- saved_offset
;
884 data
->cd_length
= saved_length
;
887 data
->cd_offset
= saved_offset
;
888 if (ret
!= CRYPTO_SUCCESS
) {
891 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
893 * This is where all the plaintext is returned, make sure
894 * the plaintext buffer is big enough
896 gcm_ctx_t
*ctx
= (gcm_ctx_t
*)aes_ctx
;
897 size_t pt_len
= ctx
->gcm_processed_data_len
- ctx
->gcm_tag_len
;
899 if (data
->cd_length
< pt_len
) {
900 data
->cd_length
= pt_len
;
901 return (CRYPTO_BUFFER_TOO_SMALL
);
904 saved_offset
= data
->cd_offset
;
905 saved_length
= data
->cd_length
;
906 ret
= gcm_decrypt_final((gcm_ctx_t
*)aes_ctx
, data
,
907 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
908 if (ret
== CRYPTO_SUCCESS
) {
909 data
->cd_length
= data
->cd_offset
- saved_offset
;
911 data
->cd_length
= saved_length
;
914 data
->cd_offset
= saved_offset
;
915 if (ret
!= CRYPTO_SUCCESS
) {
921 if ((aes_ctx
->ac_flags
& (CTR_MODE
|CCM_MODE
|GCM_MODE
|GMAC_MODE
)) == 0) {
925 (void) aes_free_context(ctx
);
927 return (CRYPTO_SUCCESS
);
932 aes_encrypt_atomic(crypto_provider_handle_t provider
,
933 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
934 crypto_key_t
*key
, crypto_data_t
*plaintext
, crypto_data_t
*ciphertext
,
935 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
937 aes_ctx_t aes_ctx
; /* on the stack */
940 size_t length_needed
;
943 AES_ARG_INPLACE(plaintext
, ciphertext
);
946 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
947 * be a multiple of AES block size.
949 switch (mechanism
->cm_type
) {
950 case AES_CTR_MECH_INFO_TYPE
:
951 case AES_CCM_MECH_INFO_TYPE
:
952 case AES_GCM_MECH_INFO_TYPE
:
953 case AES_GMAC_MECH_INFO_TYPE
:
956 if ((plaintext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0)
957 return (CRYPTO_DATA_LEN_RANGE
);
960 if ((ret
= aes_check_mech_param(mechanism
, NULL
, 0)) != CRYPTO_SUCCESS
)
963 bzero(&aes_ctx
, sizeof (aes_ctx_t
));
965 ret
= aes_common_init_ctx(&aes_ctx
, template, mechanism
, key
,
966 crypto_kmflag(req
), B_TRUE
);
967 if (ret
!= CRYPTO_SUCCESS
)
970 switch (mechanism
->cm_type
) {
971 case AES_CCM_MECH_INFO_TYPE
:
972 length_needed
= plaintext
->cd_length
+ aes_ctx
.ac_mac_len
;
974 case AES_GMAC_MECH_INFO_TYPE
:
975 if (plaintext
->cd_length
!= 0)
976 return (CRYPTO_ARGUMENTS_BAD
);
978 case AES_GCM_MECH_INFO_TYPE
:
979 length_needed
= plaintext
->cd_length
+ aes_ctx
.ac_tag_len
;
982 length_needed
= plaintext
->cd_length
;
985 /* return size of buffer needed to store output */
986 if (ciphertext
->cd_length
< length_needed
) {
987 ciphertext
->cd_length
= length_needed
;
988 ret
= CRYPTO_BUFFER_TOO_SMALL
;
992 saved_offset
= ciphertext
->cd_offset
;
993 saved_length
= ciphertext
->cd_length
;
996 * Do an update on the specified input data.
998 switch (plaintext
->cd_format
) {
999 case CRYPTO_DATA_RAW
:
1000 ret
= crypto_update_iov(&aes_ctx
, plaintext
, ciphertext
,
1001 aes_encrypt_contiguous_blocks
, aes_copy_block64
);
1003 case CRYPTO_DATA_UIO
:
1004 ret
= crypto_update_uio(&aes_ctx
, plaintext
, ciphertext
,
1005 aes_encrypt_contiguous_blocks
, aes_copy_block64
);
1008 ret
= CRYPTO_ARGUMENTS_BAD
;
1011 if (ret
== CRYPTO_SUCCESS
) {
1012 if (mechanism
->cm_type
== AES_CCM_MECH_INFO_TYPE
) {
1013 ret
= ccm_encrypt_final((ccm_ctx_t
*)&aes_ctx
,
1014 ciphertext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1016 if (ret
!= CRYPTO_SUCCESS
)
1018 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1019 } else if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
||
1020 mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
) {
1021 ret
= gcm_encrypt_final((gcm_ctx_t
*)&aes_ctx
,
1022 ciphertext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1023 aes_copy_block
, aes_xor_block
);
1024 if (ret
!= CRYPTO_SUCCESS
)
1026 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1027 } else if (mechanism
->cm_type
== AES_CTR_MECH_INFO_TYPE
) {
1028 if (aes_ctx
.ac_remainder_len
> 0) {
1029 ret
= ctr_mode_final((ctr_ctx_t
*)&aes_ctx
,
1030 ciphertext
, aes_encrypt_block
);
1031 if (ret
!= CRYPTO_SUCCESS
)
1035 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1038 if (plaintext
!= ciphertext
) {
1039 ciphertext
->cd_length
=
1040 ciphertext
->cd_offset
- saved_offset
;
1043 ciphertext
->cd_length
= saved_length
;
1045 ciphertext
->cd_offset
= saved_offset
;
1048 if (aes_ctx
.ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1049 bzero(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1050 kmem_free(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1058 aes_decrypt_atomic(crypto_provider_handle_t provider
,
1059 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1060 crypto_key_t
*key
, crypto_data_t
*ciphertext
, crypto_data_t
*plaintext
,
1061 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
1063 aes_ctx_t aes_ctx
; /* on the stack */
1065 size_t saved_length
;
1066 size_t length_needed
;
1069 AES_ARG_INPLACE(ciphertext
, plaintext
);
1072 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1073 * be a multiple of AES block size.
1075 switch (mechanism
->cm_type
) {
1076 case AES_CTR_MECH_INFO_TYPE
:
1077 case AES_CCM_MECH_INFO_TYPE
:
1078 case AES_GCM_MECH_INFO_TYPE
:
1079 case AES_GMAC_MECH_INFO_TYPE
:
1082 if ((ciphertext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0)
1083 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE
);
1086 if ((ret
= aes_check_mech_param(mechanism
, NULL
, 0)) != CRYPTO_SUCCESS
)
1089 bzero(&aes_ctx
, sizeof (aes_ctx_t
));
1091 ret
= aes_common_init_ctx(&aes_ctx
, template, mechanism
, key
,
1092 crypto_kmflag(req
), B_FALSE
);
1093 if (ret
!= CRYPTO_SUCCESS
)
1096 switch (mechanism
->cm_type
) {
1097 case AES_CCM_MECH_INFO_TYPE
:
1098 length_needed
= aes_ctx
.ac_data_len
;
1100 case AES_GCM_MECH_INFO_TYPE
:
1101 length_needed
= ciphertext
->cd_length
- aes_ctx
.ac_tag_len
;
1103 case AES_GMAC_MECH_INFO_TYPE
:
1104 if (plaintext
->cd_length
!= 0)
1105 return (CRYPTO_ARGUMENTS_BAD
);
1109 length_needed
= ciphertext
->cd_length
;
1112 /* return size of buffer needed to store output */
1113 if (plaintext
->cd_length
< length_needed
) {
1114 plaintext
->cd_length
= length_needed
;
1115 ret
= CRYPTO_BUFFER_TOO_SMALL
;
1119 saved_offset
= plaintext
->cd_offset
;
1120 saved_length
= plaintext
->cd_length
;
1122 if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
||
1123 mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
)
1124 gcm_set_kmflag((gcm_ctx_t
*)&aes_ctx
, crypto_kmflag(req
));
1127 * Do an update on the specified input data.
1129 switch (ciphertext
->cd_format
) {
1130 case CRYPTO_DATA_RAW
:
1131 ret
= crypto_update_iov(&aes_ctx
, ciphertext
, plaintext
,
1132 aes_decrypt_contiguous_blocks
, aes_copy_block64
);
1134 case CRYPTO_DATA_UIO
:
1135 ret
= crypto_update_uio(&aes_ctx
, ciphertext
, plaintext
,
1136 aes_decrypt_contiguous_blocks
, aes_copy_block64
);
1139 ret
= CRYPTO_ARGUMENTS_BAD
;
1142 if (ret
== CRYPTO_SUCCESS
) {
1143 if (mechanism
->cm_type
== AES_CCM_MECH_INFO_TYPE
) {
1144 ASSERT(aes_ctx
.ac_processed_data_len
1145 == aes_ctx
.ac_data_len
);
1146 ASSERT(aes_ctx
.ac_processed_mac_len
1147 == aes_ctx
.ac_mac_len
);
1148 ret
= ccm_decrypt_final((ccm_ctx_t
*)&aes_ctx
,
1149 plaintext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1150 aes_copy_block
, aes_xor_block
);
1151 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1152 if ((ret
== CRYPTO_SUCCESS
) &&
1153 (ciphertext
!= plaintext
)) {
1154 plaintext
->cd_length
=
1155 plaintext
->cd_offset
- saved_offset
;
1157 plaintext
->cd_length
= saved_length
;
1159 } else if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
||
1160 mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
) {
1161 ret
= gcm_decrypt_final((gcm_ctx_t
*)&aes_ctx
,
1162 plaintext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1164 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1165 if ((ret
== CRYPTO_SUCCESS
) &&
1166 (ciphertext
!= plaintext
)) {
1167 plaintext
->cd_length
=
1168 plaintext
->cd_offset
- saved_offset
;
1170 plaintext
->cd_length
= saved_length
;
1172 } else if (mechanism
->cm_type
!= AES_CTR_MECH_INFO_TYPE
) {
1173 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1174 if (ciphertext
!= plaintext
)
1175 plaintext
->cd_length
=
1176 plaintext
->cd_offset
- saved_offset
;
1178 if (aes_ctx
.ac_remainder_len
> 0) {
1179 ret
= ctr_mode_final((ctr_ctx_t
*)&aes_ctx
,
1180 plaintext
, aes_encrypt_block
);
1181 if (ret
== CRYPTO_DATA_LEN_RANGE
)
1182 ret
= CRYPTO_ENCRYPTED_DATA_LEN_RANGE
;
1183 if (ret
!= CRYPTO_SUCCESS
)
1186 if (ciphertext
!= plaintext
)
1187 plaintext
->cd_length
=
1188 plaintext
->cd_offset
- saved_offset
;
1191 plaintext
->cd_length
= saved_length
;
1193 plaintext
->cd_offset
= saved_offset
;
1196 if (aes_ctx
.ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1197 bzero(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1198 kmem_free(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1201 if (aes_ctx
.ac_flags
& CCM_MODE
) {
1202 if (aes_ctx
.ac_pt_buf
!= NULL
) {
1203 vmem_free(aes_ctx
.ac_pt_buf
, aes_ctx
.ac_data_len
);
1205 } else if (aes_ctx
.ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
1206 if (((gcm_ctx_t
*)&aes_ctx
)->gcm_pt_buf
!= NULL
) {
1207 vmem_free(((gcm_ctx_t
*)&aes_ctx
)->gcm_pt_buf
,
1208 ((gcm_ctx_t
*)&aes_ctx
)->gcm_pt_buf_len
);
1216 * KCF software provider context template entry points.
1220 aes_create_ctx_template(crypto_provider_handle_t provider
,
1221 crypto_mechanism_t
*mechanism
, crypto_key_t
*key
,
1222 crypto_spi_ctx_template_t
*tmpl
, size_t *tmpl_size
, crypto_req_handle_t req
)
1228 if (mechanism
->cm_type
!= AES_ECB_MECH_INFO_TYPE
&&
1229 mechanism
->cm_type
!= AES_CBC_MECH_INFO_TYPE
&&
1230 mechanism
->cm_type
!= AES_CTR_MECH_INFO_TYPE
&&
1231 mechanism
->cm_type
!= AES_CCM_MECH_INFO_TYPE
&&
1232 mechanism
->cm_type
!= AES_GCM_MECH_INFO_TYPE
&&
1233 mechanism
->cm_type
!= AES_GMAC_MECH_INFO_TYPE
)
1234 return (CRYPTO_MECHANISM_INVALID
);
1236 if ((keysched
= aes_alloc_keysched(&size
,
1237 crypto_kmflag(req
))) == NULL
) {
1238 return (CRYPTO_HOST_MEMORY
);
1242 * Initialize key schedule. Key length information is stored
1245 if ((rv
= init_keysched(key
, keysched
)) != CRYPTO_SUCCESS
) {
1246 bzero(keysched
, size
);
1247 kmem_free(keysched
, size
);
1254 return (CRYPTO_SUCCESS
);
1259 aes_free_context(crypto_ctx_t
*ctx
)
1261 aes_ctx_t
*aes_ctx
= ctx
->cc_provider_private
;
1263 if (aes_ctx
!= NULL
) {
1264 if (aes_ctx
->ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1265 ASSERT(aes_ctx
->ac_keysched_len
!= 0);
1266 bzero(aes_ctx
->ac_keysched
, aes_ctx
->ac_keysched_len
);
1267 kmem_free(aes_ctx
->ac_keysched
,
1268 aes_ctx
->ac_keysched_len
);
1270 crypto_free_mode_ctx(aes_ctx
);
1271 ctx
->cc_provider_private
= NULL
;
1274 return (CRYPTO_SUCCESS
);
1279 aes_common_init_ctx(aes_ctx_t
*aes_ctx
, crypto_spi_ctx_template_t
*template,
1280 crypto_mechanism_t
*mechanism
, crypto_key_t
*key
, int kmflag
,
1281 boolean_t is_encrypt_init
)
1283 int rv
= CRYPTO_SUCCESS
;
1287 if (template == NULL
) {
1288 if ((keysched
= aes_alloc_keysched(&size
, kmflag
)) == NULL
)
1289 return (CRYPTO_HOST_MEMORY
);
1291 * Initialize key schedule.
1292 * Key length is stored in the key.
1294 if ((rv
= init_keysched(key
, keysched
)) != CRYPTO_SUCCESS
) {
1295 kmem_free(keysched
, size
);
1299 aes_ctx
->ac_flags
|= PROVIDER_OWNS_KEY_SCHEDULE
;
1300 aes_ctx
->ac_keysched_len
= size
;
1302 keysched
= template;
1304 aes_ctx
->ac_keysched
= keysched
;
1306 switch (mechanism
->cm_type
) {
1307 case AES_CBC_MECH_INFO_TYPE
:
1308 rv
= cbc_init_ctx((cbc_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1309 mechanism
->cm_param_len
, AES_BLOCK_LEN
, aes_copy_block64
);
1311 case AES_CTR_MECH_INFO_TYPE
: {
1312 CK_AES_CTR_PARAMS
*pp
;
1314 if (mechanism
->cm_param
== NULL
||
1315 mechanism
->cm_param_len
!= sizeof (CK_AES_CTR_PARAMS
)) {
1316 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1318 pp
= (CK_AES_CTR_PARAMS
*)(void *)mechanism
->cm_param
;
1319 rv
= ctr_init_ctx((ctr_ctx_t
*)aes_ctx
, pp
->ulCounterBits
,
1320 pp
->cb
, aes_copy_block
);
1323 case AES_CCM_MECH_INFO_TYPE
:
1324 if (mechanism
->cm_param
== NULL
||
1325 mechanism
->cm_param_len
!= sizeof (CK_AES_CCM_PARAMS
)) {
1326 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1328 rv
= ccm_init_ctx((ccm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1329 kmflag
, is_encrypt_init
, AES_BLOCK_LEN
, aes_encrypt_block
,
1332 case AES_GCM_MECH_INFO_TYPE
:
1333 if (mechanism
->cm_param
== NULL
||
1334 mechanism
->cm_param_len
!= sizeof (CK_AES_GCM_PARAMS
)) {
1335 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1337 rv
= gcm_init_ctx((gcm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1338 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
1341 case AES_GMAC_MECH_INFO_TYPE
:
1342 if (mechanism
->cm_param
== NULL
||
1343 mechanism
->cm_param_len
!= sizeof (CK_AES_GMAC_PARAMS
)) {
1344 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1346 rv
= gmac_init_ctx((gcm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1347 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
1350 case AES_ECB_MECH_INFO_TYPE
:
1351 aes_ctx
->ac_flags
|= ECB_MODE
;
1354 if (rv
!= CRYPTO_SUCCESS
) {
1355 if (aes_ctx
->ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1356 bzero(keysched
, size
);
1357 kmem_free(keysched
, size
);
1365 process_gmac_mech(crypto_mechanism_t
*mech
, crypto_data_t
*data
,
1366 CK_AES_GCM_PARAMS
*gcm_params
)
1368 /* LINTED: pointer alignment */
1369 CK_AES_GMAC_PARAMS
*params
= (CK_AES_GMAC_PARAMS
*)mech
->cm_param
;
1371 if (mech
->cm_type
!= AES_GMAC_MECH_INFO_TYPE
)
1372 return (CRYPTO_MECHANISM_INVALID
);
1374 if (mech
->cm_param_len
!= sizeof (CK_AES_GMAC_PARAMS
))
1375 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1377 if (params
->pIv
== NULL
)
1378 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1380 gcm_params
->pIv
= params
->pIv
;
1381 gcm_params
->ulIvLen
= AES_GMAC_IV_LEN
;
1382 gcm_params
->ulTagBits
= AES_GMAC_TAG_BITS
;
1385 return (CRYPTO_SUCCESS
);
1387 if (data
->cd_format
!= CRYPTO_DATA_RAW
)
1388 return (CRYPTO_ARGUMENTS_BAD
);
1390 gcm_params
->pAAD
= (uchar_t
*)data
->cd_raw
.iov_base
;
1391 gcm_params
->ulAADLen
= data
->cd_length
;
1392 return (CRYPTO_SUCCESS
);
1396 aes_mac_atomic(crypto_provider_handle_t provider
,
1397 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1398 crypto_key_t
*key
, crypto_data_t
*data
, crypto_data_t
*mac
,
1399 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
1401 CK_AES_GCM_PARAMS gcm_params
;
1402 crypto_mechanism_t gcm_mech
;
1405 if ((rv
= process_gmac_mech(mechanism
, data
, &gcm_params
))
1409 gcm_mech
.cm_type
= AES_GCM_MECH_INFO_TYPE
;
1410 gcm_mech
.cm_param_len
= sizeof (CK_AES_GCM_PARAMS
);
1411 gcm_mech
.cm_param
= (char *)&gcm_params
;
1413 return (aes_encrypt_atomic(provider
, session_id
, &gcm_mech
,
1414 key
, &null_crypto_data
, mac
, template, req
));
1418 aes_mac_verify_atomic(crypto_provider_handle_t provider
,
1419 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1420 crypto_key_t
*key
, crypto_data_t
*data
, crypto_data_t
*mac
,
1421 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
1423 CK_AES_GCM_PARAMS gcm_params
;
1424 crypto_mechanism_t gcm_mech
;
1427 if ((rv
= process_gmac_mech(mechanism
, data
, &gcm_params
))
1431 gcm_mech
.cm_type
= AES_GCM_MECH_INFO_TYPE
;
1432 gcm_mech
.cm_param_len
= sizeof (CK_AES_GCM_PARAMS
);
1433 gcm_mech
.cm_param
= (char *)&gcm_params
;
1435 return (aes_decrypt_atomic(provider
, session_id
, &gcm_mech
,
1436 key
, mac
, &null_crypto_data
, template, req
));