4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
26 * AES provider for the Kernel Cryptographic Framework (KCF)
29 #include <sys/zfs_context.h>
30 #include <sys/crypto/common.h>
31 #include <sys/crypto/impl.h>
32 #include <sys/crypto/spi.h>
33 #include <sys/crypto/icp.h>
34 #include <modes/modes.h>
35 #include <sys/modctl.h>
37 #include <aes/aes_impl.h>
38 #include <modes/gcm_impl.h>
40 #define CRYPTO_PROVIDER_NAME "aes"
42 extern struct mod_ops mod_cryptoops
;
45 * Module linkage information for the kernel.
47 static struct modlcrypto modlcrypto
= {
49 "AES Kernel SW Provider"
52 static struct modlinkage modlinkage
= {
53 MODREV_1
, { (void *)&modlcrypto
, NULL
}
57 * Mechanism info structure passed to KCF during registration.
59 static crypto_mech_info_t aes_mech_info_tab
[] = {
61 {SUN_CKM_AES_ECB
, AES_ECB_MECH_INFO_TYPE
,
62 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
63 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
64 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
66 {SUN_CKM_AES_CBC
, AES_CBC_MECH_INFO_TYPE
,
67 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
68 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
69 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
71 {SUN_CKM_AES_CTR
, AES_CTR_MECH_INFO_TYPE
,
72 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
73 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
74 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
76 {SUN_CKM_AES_CCM
, AES_CCM_MECH_INFO_TYPE
,
77 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
78 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
79 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
81 {SUN_CKM_AES_GCM
, AES_GCM_MECH_INFO_TYPE
,
82 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
83 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
,
84 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
86 {SUN_CKM_AES_GMAC
, AES_GMAC_MECH_INFO_TYPE
,
87 CRYPTO_FG_ENCRYPT
| CRYPTO_FG_ENCRYPT_ATOMIC
|
88 CRYPTO_FG_DECRYPT
| CRYPTO_FG_DECRYPT_ATOMIC
|
89 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
|
90 CRYPTO_FG_SIGN
| CRYPTO_FG_SIGN_ATOMIC
|
91 CRYPTO_FG_VERIFY
| CRYPTO_FG_VERIFY_ATOMIC
,
92 AES_MIN_KEY_BYTES
, AES_MAX_KEY_BYTES
, CRYPTO_KEYSIZE_UNIT_IN_BYTES
}
95 /* operations are in-place if the output buffer is NULL */
96 #define AES_ARG_INPLACE(input, output) \
97 if ((output) == NULL) \
100 static void aes_provider_status(crypto_provider_handle_t
, uint_t
*);
102 static crypto_control_ops_t aes_control_ops
= {
106 static int aes_encrypt_init(crypto_ctx_t
*, crypto_mechanism_t
*,
107 crypto_key_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
108 static int aes_decrypt_init(crypto_ctx_t
*, crypto_mechanism_t
*,
109 crypto_key_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
110 static int aes_common_init(crypto_ctx_t
*, crypto_mechanism_t
*,
111 crypto_key_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
, boolean_t
);
112 static int aes_common_init_ctx(aes_ctx_t
*, crypto_spi_ctx_template_t
*,
113 crypto_mechanism_t
*, crypto_key_t
*, int, boolean_t
);
114 static int aes_encrypt_final(crypto_ctx_t
*, crypto_data_t
*,
115 crypto_req_handle_t
);
116 static int aes_decrypt_final(crypto_ctx_t
*, crypto_data_t
*,
117 crypto_req_handle_t
);
119 static int aes_encrypt(crypto_ctx_t
*, crypto_data_t
*, crypto_data_t
*,
120 crypto_req_handle_t
);
121 static int aes_encrypt_update(crypto_ctx_t
*, crypto_data_t
*,
122 crypto_data_t
*, crypto_req_handle_t
);
123 static int aes_encrypt_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
124 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*,
125 crypto_data_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
127 static int aes_decrypt(crypto_ctx_t
*, crypto_data_t
*, crypto_data_t
*,
128 crypto_req_handle_t
);
129 static int aes_decrypt_update(crypto_ctx_t
*, crypto_data_t
*,
130 crypto_data_t
*, crypto_req_handle_t
);
131 static int aes_decrypt_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
132 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*,
133 crypto_data_t
*, crypto_spi_ctx_template_t
, crypto_req_handle_t
);
135 static crypto_cipher_ops_t aes_cipher_ops
= {
136 .encrypt_init
= aes_encrypt_init
,
137 .encrypt
= aes_encrypt
,
138 .encrypt_update
= aes_encrypt_update
,
139 .encrypt_final
= aes_encrypt_final
,
140 .encrypt_atomic
= aes_encrypt_atomic
,
141 .decrypt_init
= aes_decrypt_init
,
142 .decrypt
= aes_decrypt
,
143 .decrypt_update
= aes_decrypt_update
,
144 .decrypt_final
= aes_decrypt_final
,
145 .decrypt_atomic
= aes_decrypt_atomic
148 static int aes_mac_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
149 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*, crypto_data_t
*,
150 crypto_spi_ctx_template_t
, crypto_req_handle_t
);
151 static int aes_mac_verify_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
152 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*, crypto_data_t
*,
153 crypto_spi_ctx_template_t
, crypto_req_handle_t
);
155 static crypto_mac_ops_t aes_mac_ops
= {
160 .mac_atomic
= aes_mac_atomic
,
161 .mac_verify_atomic
= aes_mac_verify_atomic
164 static int aes_create_ctx_template(crypto_provider_handle_t
,
165 crypto_mechanism_t
*, crypto_key_t
*, crypto_spi_ctx_template_t
*,
166 size_t *, crypto_req_handle_t
);
167 static int aes_free_context(crypto_ctx_t
*);
169 static crypto_ctx_ops_t aes_ctx_ops
= {
170 .create_ctx_template
= aes_create_ctx_template
,
171 .free_context
= aes_free_context
174 static crypto_ops_t aes_crypto_ops
= {{{{{
191 static crypto_provider_info_t aes_prov_info
= {{{{
192 CRYPTO_SPI_VERSION_1
,
193 "AES Software Provider",
197 sizeof (aes_mech_info_tab
)/sizeof (crypto_mech_info_t
),
201 static crypto_kcf_provider_handle_t aes_prov_handle
= 0;
202 static crypto_data_t null_crypto_data
= { CRYPTO_DATA_RAW
};
209 /* find fastest implementations and set any requested implementations */
213 if ((ret
= mod_install(&modlinkage
)) != 0)
216 /* Register with KCF. If the registration fails, remove the module. */
217 if (crypto_register_provider(&aes_prov_info
, &aes_prov_handle
)) {
218 (void) mod_remove(&modlinkage
);
228 /* Unregister from KCF if module is registered */
229 if (aes_prov_handle
!= 0) {
230 if (crypto_unregister_provider(aes_prov_handle
))
236 return (mod_remove(&modlinkage
));
240 aes_check_mech_param(crypto_mechanism_t
*mechanism
, aes_ctx_t
**ctx
, int kmflag
)
243 boolean_t param_required
= B_TRUE
;
245 void *(*alloc_fun
)(int);
246 int rv
= CRYPTO_SUCCESS
;
248 switch (mechanism
->cm_type
) {
249 case AES_ECB_MECH_INFO_TYPE
:
250 param_required
= B_FALSE
;
251 alloc_fun
= ecb_alloc_ctx
;
253 case AES_CBC_MECH_INFO_TYPE
:
254 param_len
= AES_BLOCK_LEN
;
255 alloc_fun
= cbc_alloc_ctx
;
257 case AES_CTR_MECH_INFO_TYPE
:
258 param_len
= sizeof (CK_AES_CTR_PARAMS
);
259 alloc_fun
= ctr_alloc_ctx
;
261 case AES_CCM_MECH_INFO_TYPE
:
262 param_len
= sizeof (CK_AES_CCM_PARAMS
);
263 alloc_fun
= ccm_alloc_ctx
;
265 case AES_GCM_MECH_INFO_TYPE
:
266 param_len
= sizeof (CK_AES_GCM_PARAMS
);
267 alloc_fun
= gcm_alloc_ctx
;
269 case AES_GMAC_MECH_INFO_TYPE
:
270 param_len
= sizeof (CK_AES_GMAC_PARAMS
);
271 alloc_fun
= gmac_alloc_ctx
;
274 rv
= CRYPTO_MECHANISM_INVALID
;
277 if (param_required
&& mechanism
->cm_param
!= NULL
&&
278 mechanism
->cm_param_len
!= param_len
) {
279 rv
= CRYPTO_MECHANISM_PARAM_INVALID
;
282 p
= (alloc_fun
)(kmflag
);
289 * Initialize key schedules for AES
292 init_keysched(crypto_key_t
*key
, void *newbie
)
295 * Only keys by value are supported by this module.
297 switch (key
->ck_format
) {
299 if (key
->ck_length
< AES_MINBITS
||
300 key
->ck_length
> AES_MAXBITS
) {
301 return (CRYPTO_KEY_SIZE_RANGE
);
304 /* key length must be either 128, 192, or 256 */
305 if ((key
->ck_length
& 63) != 0)
306 return (CRYPTO_KEY_SIZE_RANGE
);
309 return (CRYPTO_KEY_TYPE_INCONSISTENT
);
312 aes_init_keysched(key
->ck_data
, key
->ck_length
, newbie
);
313 return (CRYPTO_SUCCESS
);
317 * KCF software provider control entry points.
321 aes_provider_status(crypto_provider_handle_t provider
, uint_t
*status
)
323 *status
= CRYPTO_PROVIDER_READY
;
327 aes_encrypt_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
328 crypto_key_t
*key
, crypto_spi_ctx_template_t
template,
329 crypto_req_handle_t req
)
331 return (aes_common_init(ctx
, mechanism
, key
, template, req
, B_TRUE
));
335 aes_decrypt_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
336 crypto_key_t
*key
, crypto_spi_ctx_template_t
template,
337 crypto_req_handle_t req
)
339 return (aes_common_init(ctx
, mechanism
, key
, template, req
, B_FALSE
));
345 * KCF software provider encrypt entry points.
348 aes_common_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
349 crypto_key_t
*key
, crypto_spi_ctx_template_t
template,
350 crypto_req_handle_t req
, boolean_t is_encrypt_init
)
357 * Only keys by value are supported by this module.
359 if (key
->ck_format
!= CRYPTO_KEY_RAW
) {
360 return (CRYPTO_KEY_TYPE_INCONSISTENT
);
363 kmflag
= crypto_kmflag(req
);
364 if ((rv
= aes_check_mech_param(mechanism
, &aes_ctx
, kmflag
))
368 rv
= aes_common_init_ctx(aes_ctx
, template, mechanism
, key
, kmflag
,
370 if (rv
!= CRYPTO_SUCCESS
) {
371 crypto_free_mode_ctx(aes_ctx
);
375 ctx
->cc_provider_private
= aes_ctx
;
377 return (CRYPTO_SUCCESS
);
381 aes_copy_block64(uint8_t *in
, uint64_t *out
)
383 if (IS_P2ALIGNED(in
, sizeof (uint64_t))) {
384 /* LINTED: pointer alignment */
385 out
[0] = *(uint64_t *)&in
[0];
386 /* LINTED: pointer alignment */
387 out
[1] = *(uint64_t *)&in
[8];
389 uint8_t *iv8
= (uint8_t *)&out
[0];
391 AES_COPY_BLOCK(in
, iv8
);
397 aes_encrypt(crypto_ctx_t
*ctx
, crypto_data_t
*plaintext
,
398 crypto_data_t
*ciphertext
, crypto_req_handle_t req
)
400 int ret
= CRYPTO_FAILED
;
403 size_t saved_length
, saved_offset
, length_needed
;
405 ASSERT(ctx
->cc_provider_private
!= NULL
);
406 aes_ctx
= ctx
->cc_provider_private
;
409 * For block ciphers, plaintext must be a multiple of AES block size.
410 * This test is only valid for ciphers whose blocksize is a power of 2.
412 if (((aes_ctx
->ac_flags
& (CTR_MODE
|CCM_MODE
|GCM_MODE
|GMAC_MODE
))
413 == 0) && (plaintext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0)
414 return (CRYPTO_DATA_LEN_RANGE
);
416 AES_ARG_INPLACE(plaintext
, ciphertext
);
419 * We need to just return the length needed to store the output.
420 * We should not destroy the context for the following case.
422 switch (aes_ctx
->ac_flags
& (CCM_MODE
|GCM_MODE
|GMAC_MODE
)) {
424 length_needed
= plaintext
->cd_length
+ aes_ctx
->ac_mac_len
;
427 length_needed
= plaintext
->cd_length
+ aes_ctx
->ac_tag_len
;
430 if (plaintext
->cd_length
!= 0)
431 return (CRYPTO_ARGUMENTS_BAD
);
433 length_needed
= aes_ctx
->ac_tag_len
;
436 length_needed
= plaintext
->cd_length
;
439 if (ciphertext
->cd_length
< length_needed
) {
440 ciphertext
->cd_length
= length_needed
;
441 return (CRYPTO_BUFFER_TOO_SMALL
);
444 saved_length
= ciphertext
->cd_length
;
445 saved_offset
= ciphertext
->cd_offset
;
448 * Do an update on the specified input data.
450 ret
= aes_encrypt_update(ctx
, plaintext
, ciphertext
, req
);
451 if (ret
!= CRYPTO_SUCCESS
) {
456 * For CCM mode, aes_ccm_encrypt_final() will take care of any
457 * left-over unprocessed data, and compute the MAC
459 if (aes_ctx
->ac_flags
& CCM_MODE
) {
461 * ccm_encrypt_final() will compute the MAC and append
462 * it to existing ciphertext. So, need to adjust the left over
463 * length value accordingly
466 /* order of following 2 lines MUST not be reversed */
467 ciphertext
->cd_offset
= ciphertext
->cd_length
;
468 ciphertext
->cd_length
= saved_length
- ciphertext
->cd_length
;
469 ret
= ccm_encrypt_final((ccm_ctx_t
*)aes_ctx
, ciphertext
,
470 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
471 if (ret
!= CRYPTO_SUCCESS
) {
475 if (plaintext
!= ciphertext
) {
476 ciphertext
->cd_length
=
477 ciphertext
->cd_offset
- saved_offset
;
479 ciphertext
->cd_offset
= saved_offset
;
480 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
482 * gcm_encrypt_final() will compute the MAC and append
483 * it to existing ciphertext. So, need to adjust the left over
484 * length value accordingly
487 /* order of following 2 lines MUST not be reversed */
488 ciphertext
->cd_offset
= ciphertext
->cd_length
;
489 ciphertext
->cd_length
= saved_length
- ciphertext
->cd_length
;
490 ret
= gcm_encrypt_final((gcm_ctx_t
*)aes_ctx
, ciphertext
,
491 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
493 if (ret
!= CRYPTO_SUCCESS
) {
497 if (plaintext
!= ciphertext
) {
498 ciphertext
->cd_length
=
499 ciphertext
->cd_offset
- saved_offset
;
501 ciphertext
->cd_offset
= saved_offset
;
504 ASSERT(aes_ctx
->ac_remainder_len
== 0);
505 (void) aes_free_context(ctx
);
512 aes_decrypt(crypto_ctx_t
*ctx
, crypto_data_t
*ciphertext
,
513 crypto_data_t
*plaintext
, crypto_req_handle_t req
)
515 int ret
= CRYPTO_FAILED
;
519 size_t saved_length
, length_needed
;
521 ASSERT(ctx
->cc_provider_private
!= NULL
);
522 aes_ctx
= ctx
->cc_provider_private
;
525 * For block ciphers, plaintext must be a multiple of AES block size.
526 * This test is only valid for ciphers whose blocksize is a power of 2.
528 if (((aes_ctx
->ac_flags
& (CTR_MODE
|CCM_MODE
|GCM_MODE
|GMAC_MODE
))
529 == 0) && (ciphertext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0) {
530 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE
);
533 AES_ARG_INPLACE(ciphertext
, plaintext
);
536 * Return length needed to store the output.
537 * Do not destroy context when plaintext buffer is too small.
539 * CCM: plaintext is MAC len smaller than cipher text
540 * GCM: plaintext is TAG len smaller than cipher text
541 * GMAC: plaintext length must be zero
543 switch (aes_ctx
->ac_flags
& (CCM_MODE
|GCM_MODE
|GMAC_MODE
)) {
545 length_needed
= aes_ctx
->ac_processed_data_len
;
548 length_needed
= ciphertext
->cd_length
- aes_ctx
->ac_tag_len
;
551 if (plaintext
->cd_length
!= 0)
552 return (CRYPTO_ARGUMENTS_BAD
);
557 length_needed
= ciphertext
->cd_length
;
560 if (plaintext
->cd_length
< length_needed
) {
561 plaintext
->cd_length
= length_needed
;
562 return (CRYPTO_BUFFER_TOO_SMALL
);
565 saved_offset
= plaintext
->cd_offset
;
566 saved_length
= plaintext
->cd_length
;
569 * Do an update on the specified input data.
571 ret
= aes_decrypt_update(ctx
, ciphertext
, plaintext
, req
);
572 if (ret
!= CRYPTO_SUCCESS
) {
576 if (aes_ctx
->ac_flags
& CCM_MODE
) {
577 ASSERT(aes_ctx
->ac_processed_data_len
== aes_ctx
->ac_data_len
);
578 ASSERT(aes_ctx
->ac_processed_mac_len
== aes_ctx
->ac_mac_len
);
580 /* order of following 2 lines MUST not be reversed */
581 plaintext
->cd_offset
= plaintext
->cd_length
;
582 plaintext
->cd_length
= saved_length
- plaintext
->cd_length
;
584 ret
= ccm_decrypt_final((ccm_ctx_t
*)aes_ctx
, plaintext
,
585 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
587 if (ret
== CRYPTO_SUCCESS
) {
588 if (plaintext
!= ciphertext
) {
589 plaintext
->cd_length
=
590 plaintext
->cd_offset
- saved_offset
;
593 plaintext
->cd_length
= saved_length
;
596 plaintext
->cd_offset
= saved_offset
;
597 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
598 /* order of following 2 lines MUST not be reversed */
599 plaintext
->cd_offset
= plaintext
->cd_length
;
600 plaintext
->cd_length
= saved_length
- plaintext
->cd_length
;
602 ret
= gcm_decrypt_final((gcm_ctx_t
*)aes_ctx
, plaintext
,
603 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
604 if (ret
== CRYPTO_SUCCESS
) {
605 if (plaintext
!= ciphertext
) {
606 plaintext
->cd_length
=
607 plaintext
->cd_offset
- saved_offset
;
610 plaintext
->cd_length
= saved_length
;
613 plaintext
->cd_offset
= saved_offset
;
616 ASSERT(aes_ctx
->ac_remainder_len
== 0);
619 (void) aes_free_context(ctx
);
627 aes_encrypt_update(crypto_ctx_t
*ctx
, crypto_data_t
*plaintext
,
628 crypto_data_t
*ciphertext
, crypto_req_handle_t req
)
631 size_t saved_length
, out_len
;
632 int ret
= CRYPTO_SUCCESS
;
635 ASSERT(ctx
->cc_provider_private
!= NULL
);
636 aes_ctx
= ctx
->cc_provider_private
;
638 AES_ARG_INPLACE(plaintext
, ciphertext
);
640 /* compute number of bytes that will hold the ciphertext */
641 out_len
= aes_ctx
->ac_remainder_len
;
642 out_len
+= plaintext
->cd_length
;
643 out_len
&= ~(AES_BLOCK_LEN
- 1);
645 /* return length needed to store the output */
646 if (ciphertext
->cd_length
< out_len
) {
647 ciphertext
->cd_length
= out_len
;
648 return (CRYPTO_BUFFER_TOO_SMALL
);
651 saved_offset
= ciphertext
->cd_offset
;
652 saved_length
= ciphertext
->cd_length
;
655 * Do the AES update on the specified input data.
657 switch (plaintext
->cd_format
) {
658 case CRYPTO_DATA_RAW
:
659 ret
= crypto_update_iov(ctx
->cc_provider_private
,
660 plaintext
, ciphertext
, aes_encrypt_contiguous_blocks
,
663 case CRYPTO_DATA_UIO
:
664 ret
= crypto_update_uio(ctx
->cc_provider_private
,
665 plaintext
, ciphertext
, aes_encrypt_contiguous_blocks
,
669 ret
= CRYPTO_ARGUMENTS_BAD
;
673 * Since AES counter mode is a stream cipher, we call
674 * ctr_mode_final() to pick up any remaining bytes.
675 * It is an internal function that does not destroy
676 * the context like *normal* final routines.
678 if ((aes_ctx
->ac_flags
& CTR_MODE
) && (aes_ctx
->ac_remainder_len
> 0)) {
679 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
,
680 ciphertext
, aes_encrypt_block
);
683 if (ret
== CRYPTO_SUCCESS
) {
684 if (plaintext
!= ciphertext
)
685 ciphertext
->cd_length
=
686 ciphertext
->cd_offset
- saved_offset
;
688 ciphertext
->cd_length
= saved_length
;
690 ciphertext
->cd_offset
= saved_offset
;
697 aes_decrypt_update(crypto_ctx_t
*ctx
, crypto_data_t
*ciphertext
,
698 crypto_data_t
*plaintext
, crypto_req_handle_t req
)
701 size_t saved_length
, out_len
;
702 int ret
= CRYPTO_SUCCESS
;
705 ASSERT(ctx
->cc_provider_private
!= NULL
);
706 aes_ctx
= ctx
->cc_provider_private
;
708 AES_ARG_INPLACE(ciphertext
, plaintext
);
711 * Compute number of bytes that will hold the plaintext.
712 * This is not necessary for CCM, GCM, and GMAC since these
713 * mechanisms never return plaintext for update operations.
715 if ((aes_ctx
->ac_flags
& (CCM_MODE
|GCM_MODE
|GMAC_MODE
)) == 0) {
716 out_len
= aes_ctx
->ac_remainder_len
;
717 out_len
+= ciphertext
->cd_length
;
718 out_len
&= ~(AES_BLOCK_LEN
- 1);
720 /* return length needed to store the output */
721 if (plaintext
->cd_length
< out_len
) {
722 plaintext
->cd_length
= out_len
;
723 return (CRYPTO_BUFFER_TOO_SMALL
);
727 saved_offset
= plaintext
->cd_offset
;
728 saved_length
= plaintext
->cd_length
;
730 if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
))
731 gcm_set_kmflag((gcm_ctx_t
*)aes_ctx
, crypto_kmflag(req
));
734 * Do the AES update on the specified input data.
736 switch (ciphertext
->cd_format
) {
737 case CRYPTO_DATA_RAW
:
738 ret
= crypto_update_iov(ctx
->cc_provider_private
,
739 ciphertext
, plaintext
, aes_decrypt_contiguous_blocks
,
742 case CRYPTO_DATA_UIO
:
743 ret
= crypto_update_uio(ctx
->cc_provider_private
,
744 ciphertext
, plaintext
, aes_decrypt_contiguous_blocks
,
748 ret
= CRYPTO_ARGUMENTS_BAD
;
752 * Since AES counter mode is a stream cipher, we call
753 * ctr_mode_final() to pick up any remaining bytes.
754 * It is an internal function that does not destroy
755 * the context like *normal* final routines.
757 if ((aes_ctx
->ac_flags
& CTR_MODE
) && (aes_ctx
->ac_remainder_len
> 0)) {
758 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
, plaintext
,
760 if (ret
== CRYPTO_DATA_LEN_RANGE
)
761 ret
= CRYPTO_ENCRYPTED_DATA_LEN_RANGE
;
764 if (ret
== CRYPTO_SUCCESS
) {
765 if (ciphertext
!= plaintext
)
766 plaintext
->cd_length
=
767 plaintext
->cd_offset
- saved_offset
;
769 plaintext
->cd_length
= saved_length
;
771 plaintext
->cd_offset
= saved_offset
;
779 aes_encrypt_final(crypto_ctx_t
*ctx
, crypto_data_t
*data
,
780 crypto_req_handle_t req
)
785 ASSERT(ctx
->cc_provider_private
!= NULL
);
786 aes_ctx
= ctx
->cc_provider_private
;
788 if (data
->cd_format
!= CRYPTO_DATA_RAW
&&
789 data
->cd_format
!= CRYPTO_DATA_UIO
) {
790 return (CRYPTO_ARGUMENTS_BAD
);
793 if (aes_ctx
->ac_flags
& CTR_MODE
) {
794 if (aes_ctx
->ac_remainder_len
> 0) {
795 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
, data
,
797 if (ret
!= CRYPTO_SUCCESS
)
800 } else if (aes_ctx
->ac_flags
& CCM_MODE
) {
801 ret
= ccm_encrypt_final((ccm_ctx_t
*)aes_ctx
, data
,
802 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
803 if (ret
!= CRYPTO_SUCCESS
) {
806 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
807 size_t saved_offset
= data
->cd_offset
;
809 ret
= gcm_encrypt_final((gcm_ctx_t
*)aes_ctx
, data
,
810 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
812 if (ret
!= CRYPTO_SUCCESS
) {
815 data
->cd_length
= data
->cd_offset
- saved_offset
;
816 data
->cd_offset
= saved_offset
;
819 * There must be no unprocessed plaintext.
820 * This happens if the length of the last data is
821 * not a multiple of the AES block length.
823 if (aes_ctx
->ac_remainder_len
> 0) {
824 return (CRYPTO_DATA_LEN_RANGE
);
829 (void) aes_free_context(ctx
);
831 return (CRYPTO_SUCCESS
);
836 aes_decrypt_final(crypto_ctx_t
*ctx
, crypto_data_t
*data
,
837 crypto_req_handle_t req
)
844 ASSERT(ctx
->cc_provider_private
!= NULL
);
845 aes_ctx
= ctx
->cc_provider_private
;
847 if (data
->cd_format
!= CRYPTO_DATA_RAW
&&
848 data
->cd_format
!= CRYPTO_DATA_UIO
) {
849 return (CRYPTO_ARGUMENTS_BAD
);
853 * There must be no unprocessed ciphertext.
854 * This happens if the length of the last ciphertext is
855 * not a multiple of the AES block length.
857 if (aes_ctx
->ac_remainder_len
> 0) {
858 if ((aes_ctx
->ac_flags
& CTR_MODE
) == 0)
859 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE
);
861 ret
= ctr_mode_final((ctr_ctx_t
*)aes_ctx
, data
,
863 if (ret
== CRYPTO_DATA_LEN_RANGE
)
864 ret
= CRYPTO_ENCRYPTED_DATA_LEN_RANGE
;
865 if (ret
!= CRYPTO_SUCCESS
)
870 if (aes_ctx
->ac_flags
& CCM_MODE
) {
872 * This is where all the plaintext is returned, make sure
873 * the plaintext buffer is big enough
875 size_t pt_len
= aes_ctx
->ac_data_len
;
876 if (data
->cd_length
< pt_len
) {
877 data
->cd_length
= pt_len
;
878 return (CRYPTO_BUFFER_TOO_SMALL
);
881 ASSERT(aes_ctx
->ac_processed_data_len
== pt_len
);
882 ASSERT(aes_ctx
->ac_processed_mac_len
== aes_ctx
->ac_mac_len
);
883 saved_offset
= data
->cd_offset
;
884 saved_length
= data
->cd_length
;
885 ret
= ccm_decrypt_final((ccm_ctx_t
*)aes_ctx
, data
,
886 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
888 if (ret
== CRYPTO_SUCCESS
) {
889 data
->cd_length
= data
->cd_offset
- saved_offset
;
891 data
->cd_length
= saved_length
;
894 data
->cd_offset
= saved_offset
;
895 if (ret
!= CRYPTO_SUCCESS
) {
898 } else if (aes_ctx
->ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
900 * This is where all the plaintext is returned, make sure
901 * the plaintext buffer is big enough
903 gcm_ctx_t
*ctx
= (gcm_ctx_t
*)aes_ctx
;
904 size_t pt_len
= ctx
->gcm_processed_data_len
- ctx
->gcm_tag_len
;
906 if (data
->cd_length
< pt_len
) {
907 data
->cd_length
= pt_len
;
908 return (CRYPTO_BUFFER_TOO_SMALL
);
911 saved_offset
= data
->cd_offset
;
912 saved_length
= data
->cd_length
;
913 ret
= gcm_decrypt_final((gcm_ctx_t
*)aes_ctx
, data
,
914 AES_BLOCK_LEN
, aes_encrypt_block
, aes_xor_block
);
915 if (ret
== CRYPTO_SUCCESS
) {
916 data
->cd_length
= data
->cd_offset
- saved_offset
;
918 data
->cd_length
= saved_length
;
921 data
->cd_offset
= saved_offset
;
922 if (ret
!= CRYPTO_SUCCESS
) {
928 if ((aes_ctx
->ac_flags
& (CTR_MODE
|CCM_MODE
|GCM_MODE
|GMAC_MODE
)) == 0) {
932 (void) aes_free_context(ctx
);
934 return (CRYPTO_SUCCESS
);
939 aes_encrypt_atomic(crypto_provider_handle_t provider
,
940 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
941 crypto_key_t
*key
, crypto_data_t
*plaintext
, crypto_data_t
*ciphertext
,
942 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
944 aes_ctx_t aes_ctx
; /* on the stack */
947 size_t length_needed
;
950 AES_ARG_INPLACE(plaintext
, ciphertext
);
953 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
954 * be a multiple of AES block size.
956 switch (mechanism
->cm_type
) {
957 case AES_CTR_MECH_INFO_TYPE
:
958 case AES_CCM_MECH_INFO_TYPE
:
959 case AES_GCM_MECH_INFO_TYPE
:
960 case AES_GMAC_MECH_INFO_TYPE
:
963 if ((plaintext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0)
964 return (CRYPTO_DATA_LEN_RANGE
);
967 if ((ret
= aes_check_mech_param(mechanism
, NULL
, 0)) != CRYPTO_SUCCESS
)
970 bzero(&aes_ctx
, sizeof (aes_ctx_t
));
972 ret
= aes_common_init_ctx(&aes_ctx
, template, mechanism
, key
,
973 crypto_kmflag(req
), B_TRUE
);
974 if (ret
!= CRYPTO_SUCCESS
)
977 switch (mechanism
->cm_type
) {
978 case AES_CCM_MECH_INFO_TYPE
:
979 length_needed
= plaintext
->cd_length
+ aes_ctx
.ac_mac_len
;
981 case AES_GMAC_MECH_INFO_TYPE
:
982 if (plaintext
->cd_length
!= 0)
983 return (CRYPTO_ARGUMENTS_BAD
);
985 case AES_GCM_MECH_INFO_TYPE
:
986 length_needed
= plaintext
->cd_length
+ aes_ctx
.ac_tag_len
;
989 length_needed
= plaintext
->cd_length
;
992 /* return size of buffer needed to store output */
993 if (ciphertext
->cd_length
< length_needed
) {
994 ciphertext
->cd_length
= length_needed
;
995 ret
= CRYPTO_BUFFER_TOO_SMALL
;
999 saved_offset
= ciphertext
->cd_offset
;
1000 saved_length
= ciphertext
->cd_length
;
1003 * Do an update on the specified input data.
1005 switch (plaintext
->cd_format
) {
1006 case CRYPTO_DATA_RAW
:
1007 ret
= crypto_update_iov(&aes_ctx
, plaintext
, ciphertext
,
1008 aes_encrypt_contiguous_blocks
, aes_copy_block64
);
1010 case CRYPTO_DATA_UIO
:
1011 ret
= crypto_update_uio(&aes_ctx
, plaintext
, ciphertext
,
1012 aes_encrypt_contiguous_blocks
, aes_copy_block64
);
1015 ret
= CRYPTO_ARGUMENTS_BAD
;
1018 if (ret
== CRYPTO_SUCCESS
) {
1019 if (mechanism
->cm_type
== AES_CCM_MECH_INFO_TYPE
) {
1020 ret
= ccm_encrypt_final((ccm_ctx_t
*)&aes_ctx
,
1021 ciphertext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1023 if (ret
!= CRYPTO_SUCCESS
)
1025 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1026 } else if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
||
1027 mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
) {
1028 ret
= gcm_encrypt_final((gcm_ctx_t
*)&aes_ctx
,
1029 ciphertext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1030 aes_copy_block
, aes_xor_block
);
1031 if (ret
!= CRYPTO_SUCCESS
)
1033 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1034 } else if (mechanism
->cm_type
== AES_CTR_MECH_INFO_TYPE
) {
1035 if (aes_ctx
.ac_remainder_len
> 0) {
1036 ret
= ctr_mode_final((ctr_ctx_t
*)&aes_ctx
,
1037 ciphertext
, aes_encrypt_block
);
1038 if (ret
!= CRYPTO_SUCCESS
)
1042 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1045 if (plaintext
!= ciphertext
) {
1046 ciphertext
->cd_length
=
1047 ciphertext
->cd_offset
- saved_offset
;
1050 ciphertext
->cd_length
= saved_length
;
1052 ciphertext
->cd_offset
= saved_offset
;
1055 if (aes_ctx
.ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1056 bzero(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1057 kmem_free(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1065 aes_decrypt_atomic(crypto_provider_handle_t provider
,
1066 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1067 crypto_key_t
*key
, crypto_data_t
*ciphertext
, crypto_data_t
*plaintext
,
1068 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
1070 aes_ctx_t aes_ctx
; /* on the stack */
1072 size_t saved_length
;
1073 size_t length_needed
;
1076 AES_ARG_INPLACE(ciphertext
, plaintext
);
1079 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1080 * be a multiple of AES block size.
1082 switch (mechanism
->cm_type
) {
1083 case AES_CTR_MECH_INFO_TYPE
:
1084 case AES_CCM_MECH_INFO_TYPE
:
1085 case AES_GCM_MECH_INFO_TYPE
:
1086 case AES_GMAC_MECH_INFO_TYPE
:
1089 if ((ciphertext
->cd_length
& (AES_BLOCK_LEN
- 1)) != 0)
1090 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE
);
1093 if ((ret
= aes_check_mech_param(mechanism
, NULL
, 0)) != CRYPTO_SUCCESS
)
1096 bzero(&aes_ctx
, sizeof (aes_ctx_t
));
1098 ret
= aes_common_init_ctx(&aes_ctx
, template, mechanism
, key
,
1099 crypto_kmflag(req
), B_FALSE
);
1100 if (ret
!= CRYPTO_SUCCESS
)
1103 switch (mechanism
->cm_type
) {
1104 case AES_CCM_MECH_INFO_TYPE
:
1105 length_needed
= aes_ctx
.ac_data_len
;
1107 case AES_GCM_MECH_INFO_TYPE
:
1108 length_needed
= ciphertext
->cd_length
- aes_ctx
.ac_tag_len
;
1110 case AES_GMAC_MECH_INFO_TYPE
:
1111 if (plaintext
->cd_length
!= 0)
1112 return (CRYPTO_ARGUMENTS_BAD
);
1116 length_needed
= ciphertext
->cd_length
;
1119 /* return size of buffer needed to store output */
1120 if (plaintext
->cd_length
< length_needed
) {
1121 plaintext
->cd_length
= length_needed
;
1122 ret
= CRYPTO_BUFFER_TOO_SMALL
;
1126 saved_offset
= plaintext
->cd_offset
;
1127 saved_length
= plaintext
->cd_length
;
1129 if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
||
1130 mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
)
1131 gcm_set_kmflag((gcm_ctx_t
*)&aes_ctx
, crypto_kmflag(req
));
1134 * Do an update on the specified input data.
1136 switch (ciphertext
->cd_format
) {
1137 case CRYPTO_DATA_RAW
:
1138 ret
= crypto_update_iov(&aes_ctx
, ciphertext
, plaintext
,
1139 aes_decrypt_contiguous_blocks
, aes_copy_block64
);
1141 case CRYPTO_DATA_UIO
:
1142 ret
= crypto_update_uio(&aes_ctx
, ciphertext
, plaintext
,
1143 aes_decrypt_contiguous_blocks
, aes_copy_block64
);
1146 ret
= CRYPTO_ARGUMENTS_BAD
;
1149 if (ret
== CRYPTO_SUCCESS
) {
1150 if (mechanism
->cm_type
== AES_CCM_MECH_INFO_TYPE
) {
1151 ASSERT(aes_ctx
.ac_processed_data_len
1152 == aes_ctx
.ac_data_len
);
1153 ASSERT(aes_ctx
.ac_processed_mac_len
1154 == aes_ctx
.ac_mac_len
);
1155 ret
= ccm_decrypt_final((ccm_ctx_t
*)&aes_ctx
,
1156 plaintext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1157 aes_copy_block
, aes_xor_block
);
1158 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1159 if ((ret
== CRYPTO_SUCCESS
) &&
1160 (ciphertext
!= plaintext
)) {
1161 plaintext
->cd_length
=
1162 plaintext
->cd_offset
- saved_offset
;
1164 plaintext
->cd_length
= saved_length
;
1166 } else if (mechanism
->cm_type
== AES_GCM_MECH_INFO_TYPE
||
1167 mechanism
->cm_type
== AES_GMAC_MECH_INFO_TYPE
) {
1168 ret
= gcm_decrypt_final((gcm_ctx_t
*)&aes_ctx
,
1169 plaintext
, AES_BLOCK_LEN
, aes_encrypt_block
,
1171 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1172 if ((ret
== CRYPTO_SUCCESS
) &&
1173 (ciphertext
!= plaintext
)) {
1174 plaintext
->cd_length
=
1175 plaintext
->cd_offset
- saved_offset
;
1177 plaintext
->cd_length
= saved_length
;
1179 } else if (mechanism
->cm_type
!= AES_CTR_MECH_INFO_TYPE
) {
1180 ASSERT(aes_ctx
.ac_remainder_len
== 0);
1181 if (ciphertext
!= plaintext
)
1182 plaintext
->cd_length
=
1183 plaintext
->cd_offset
- saved_offset
;
1185 if (aes_ctx
.ac_remainder_len
> 0) {
1186 ret
= ctr_mode_final((ctr_ctx_t
*)&aes_ctx
,
1187 plaintext
, aes_encrypt_block
);
1188 if (ret
== CRYPTO_DATA_LEN_RANGE
)
1189 ret
= CRYPTO_ENCRYPTED_DATA_LEN_RANGE
;
1190 if (ret
!= CRYPTO_SUCCESS
)
1193 if (ciphertext
!= plaintext
)
1194 plaintext
->cd_length
=
1195 plaintext
->cd_offset
- saved_offset
;
1198 plaintext
->cd_length
= saved_length
;
1200 plaintext
->cd_offset
= saved_offset
;
1203 if (aes_ctx
.ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1204 bzero(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1205 kmem_free(aes_ctx
.ac_keysched
, aes_ctx
.ac_keysched_len
);
1208 if (aes_ctx
.ac_flags
& CCM_MODE
) {
1209 if (aes_ctx
.ac_pt_buf
!= NULL
) {
1210 vmem_free(aes_ctx
.ac_pt_buf
, aes_ctx
.ac_data_len
);
1212 } else if (aes_ctx
.ac_flags
& (GCM_MODE
|GMAC_MODE
)) {
1213 if (((gcm_ctx_t
*)&aes_ctx
)->gcm_pt_buf
!= NULL
) {
1214 vmem_free(((gcm_ctx_t
*)&aes_ctx
)->gcm_pt_buf
,
1215 ((gcm_ctx_t
*)&aes_ctx
)->gcm_pt_buf_len
);
1223 * KCF software provider context template entry points.
1227 aes_create_ctx_template(crypto_provider_handle_t provider
,
1228 crypto_mechanism_t
*mechanism
, crypto_key_t
*key
,
1229 crypto_spi_ctx_template_t
*tmpl
, size_t *tmpl_size
, crypto_req_handle_t req
)
1235 if (mechanism
->cm_type
!= AES_ECB_MECH_INFO_TYPE
&&
1236 mechanism
->cm_type
!= AES_CBC_MECH_INFO_TYPE
&&
1237 mechanism
->cm_type
!= AES_CTR_MECH_INFO_TYPE
&&
1238 mechanism
->cm_type
!= AES_CCM_MECH_INFO_TYPE
&&
1239 mechanism
->cm_type
!= AES_GCM_MECH_INFO_TYPE
&&
1240 mechanism
->cm_type
!= AES_GMAC_MECH_INFO_TYPE
)
1241 return (CRYPTO_MECHANISM_INVALID
);
1243 if ((keysched
= aes_alloc_keysched(&size
,
1244 crypto_kmflag(req
))) == NULL
) {
1245 return (CRYPTO_HOST_MEMORY
);
1249 * Initialize key schedule. Key length information is stored
1252 if ((rv
= init_keysched(key
, keysched
)) != CRYPTO_SUCCESS
) {
1253 bzero(keysched
, size
);
1254 kmem_free(keysched
, size
);
1261 return (CRYPTO_SUCCESS
);
1266 aes_free_context(crypto_ctx_t
*ctx
)
1268 aes_ctx_t
*aes_ctx
= ctx
->cc_provider_private
;
1270 if (aes_ctx
!= NULL
) {
1271 if (aes_ctx
->ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1272 ASSERT(aes_ctx
->ac_keysched_len
!= 0);
1273 bzero(aes_ctx
->ac_keysched
, aes_ctx
->ac_keysched_len
);
1274 kmem_free(aes_ctx
->ac_keysched
,
1275 aes_ctx
->ac_keysched_len
);
1277 crypto_free_mode_ctx(aes_ctx
);
1278 ctx
->cc_provider_private
= NULL
;
1281 return (CRYPTO_SUCCESS
);
1286 aes_common_init_ctx(aes_ctx_t
*aes_ctx
, crypto_spi_ctx_template_t
*template,
1287 crypto_mechanism_t
*mechanism
, crypto_key_t
*key
, int kmflag
,
1288 boolean_t is_encrypt_init
)
1290 int rv
= CRYPTO_SUCCESS
;
1294 if (template == NULL
) {
1295 if ((keysched
= aes_alloc_keysched(&size
, kmflag
)) == NULL
)
1296 return (CRYPTO_HOST_MEMORY
);
1298 * Initialize key schedule.
1299 * Key length is stored in the key.
1301 if ((rv
= init_keysched(key
, keysched
)) != CRYPTO_SUCCESS
) {
1302 kmem_free(keysched
, size
);
1306 aes_ctx
->ac_flags
|= PROVIDER_OWNS_KEY_SCHEDULE
;
1307 aes_ctx
->ac_keysched_len
= size
;
1309 keysched
= template;
1311 aes_ctx
->ac_keysched
= keysched
;
1313 switch (mechanism
->cm_type
) {
1314 case AES_CBC_MECH_INFO_TYPE
:
1315 rv
= cbc_init_ctx((cbc_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1316 mechanism
->cm_param_len
, AES_BLOCK_LEN
, aes_copy_block64
);
1318 case AES_CTR_MECH_INFO_TYPE
: {
1319 CK_AES_CTR_PARAMS
*pp
;
1321 if (mechanism
->cm_param
== NULL
||
1322 mechanism
->cm_param_len
!= sizeof (CK_AES_CTR_PARAMS
)) {
1323 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1325 pp
= (CK_AES_CTR_PARAMS
*)(void *)mechanism
->cm_param
;
1326 rv
= ctr_init_ctx((ctr_ctx_t
*)aes_ctx
, pp
->ulCounterBits
,
1327 pp
->cb
, aes_copy_block
);
1330 case AES_CCM_MECH_INFO_TYPE
:
1331 if (mechanism
->cm_param
== NULL
||
1332 mechanism
->cm_param_len
!= sizeof (CK_AES_CCM_PARAMS
)) {
1333 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1335 rv
= ccm_init_ctx((ccm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1336 kmflag
, is_encrypt_init
, AES_BLOCK_LEN
, aes_encrypt_block
,
1339 case AES_GCM_MECH_INFO_TYPE
:
1340 if (mechanism
->cm_param
== NULL
||
1341 mechanism
->cm_param_len
!= sizeof (CK_AES_GCM_PARAMS
)) {
1342 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1344 rv
= gcm_init_ctx((gcm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1345 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
1348 case AES_GMAC_MECH_INFO_TYPE
:
1349 if (mechanism
->cm_param
== NULL
||
1350 mechanism
->cm_param_len
!= sizeof (CK_AES_GMAC_PARAMS
)) {
1351 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1353 rv
= gmac_init_ctx((gcm_ctx_t
*)aes_ctx
, mechanism
->cm_param
,
1354 AES_BLOCK_LEN
, aes_encrypt_block
, aes_copy_block
,
1357 case AES_ECB_MECH_INFO_TYPE
:
1358 aes_ctx
->ac_flags
|= ECB_MODE
;
1361 if (rv
!= CRYPTO_SUCCESS
) {
1362 if (aes_ctx
->ac_flags
& PROVIDER_OWNS_KEY_SCHEDULE
) {
1363 bzero(keysched
, size
);
1364 kmem_free(keysched
, size
);
1372 process_gmac_mech(crypto_mechanism_t
*mech
, crypto_data_t
*data
,
1373 CK_AES_GCM_PARAMS
*gcm_params
)
1375 /* LINTED: pointer alignment */
1376 CK_AES_GMAC_PARAMS
*params
= (CK_AES_GMAC_PARAMS
*)mech
->cm_param
;
1378 if (mech
->cm_type
!= AES_GMAC_MECH_INFO_TYPE
)
1379 return (CRYPTO_MECHANISM_INVALID
);
1381 if (mech
->cm_param_len
!= sizeof (CK_AES_GMAC_PARAMS
))
1382 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1384 if (params
->pIv
== NULL
)
1385 return (CRYPTO_MECHANISM_PARAM_INVALID
);
1387 gcm_params
->pIv
= params
->pIv
;
1388 gcm_params
->ulIvLen
= AES_GMAC_IV_LEN
;
1389 gcm_params
->ulTagBits
= AES_GMAC_TAG_BITS
;
1392 return (CRYPTO_SUCCESS
);
1394 if (data
->cd_format
!= CRYPTO_DATA_RAW
)
1395 return (CRYPTO_ARGUMENTS_BAD
);
1397 gcm_params
->pAAD
= (uchar_t
*)data
->cd_raw
.iov_base
;
1398 gcm_params
->ulAADLen
= data
->cd_length
;
1399 return (CRYPTO_SUCCESS
);
1403 aes_mac_atomic(crypto_provider_handle_t provider
,
1404 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1405 crypto_key_t
*key
, crypto_data_t
*data
, crypto_data_t
*mac
,
1406 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
1408 CK_AES_GCM_PARAMS gcm_params
;
1409 crypto_mechanism_t gcm_mech
;
1412 if ((rv
= process_gmac_mech(mechanism
, data
, &gcm_params
))
1416 gcm_mech
.cm_type
= AES_GCM_MECH_INFO_TYPE
;
1417 gcm_mech
.cm_param_len
= sizeof (CK_AES_GCM_PARAMS
);
1418 gcm_mech
.cm_param
= (char *)&gcm_params
;
1420 return (aes_encrypt_atomic(provider
, session_id
, &gcm_mech
,
1421 key
, &null_crypto_data
, mac
, template, req
));
1425 aes_mac_verify_atomic(crypto_provider_handle_t provider
,
1426 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1427 crypto_key_t
*key
, crypto_data_t
*data
, crypto_data_t
*mac
,
1428 crypto_spi_ctx_template_t
template, crypto_req_handle_t req
)
1430 CK_AES_GCM_PARAMS gcm_params
;
1431 crypto_mechanism_t gcm_mech
;
1434 if ((rv
= process_gmac_mech(mechanism
, data
, &gcm_params
))
1438 gcm_mech
.cm_type
= AES_GCM_MECH_INFO_TYPE
;
1439 gcm_mech
.cm_param_len
= sizeof (CK_AES_GCM_PARAMS
);
1440 gcm_mech
.cm_param
= (char *)&gcm_params
;
1442 return (aes_decrypt_atomic(provider
, session_id
, &gcm_mech
,
1443 key
, mac
, &null_crypto_data
, template, req
));