4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
23 * Copyright 2010 Sun Microsystems, Inc. All rights reserved.
24 * Use is subject to license terms.
27 #include <sys/zfs_context.h>
28 #include <sys/modctl.h>
29 #include <sys/crypto/common.h>
30 #include <sys/crypto/spi.h>
31 #include <sys/crypto/icp.h>
34 #include <sha2/sha2_impl.h>
37 * The sha2 module is created with two modlinkages:
38 * - a modlmisc that allows consumers to directly call the entry points
39 * SHA2Init, SHA2Update, and SHA2Final.
40 * - a modlcrypto that allows the module to register with the Kernel
41 * Cryptographic Framework (KCF) as a software provider for the SHA2
45 static struct modlcrypto modlcrypto
= {
47 "SHA2 Kernel SW Provider"
50 static struct modlinkage modlinkage
= {
51 MODREV_1
, {&modlcrypto
, NULL
}
55 * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
56 * by KCF to one of the entry points.
59 #define PROV_SHA2_CTX(ctx) ((sha2_ctx_t *)(ctx)->cc_provider_private)
60 #define PROV_SHA2_HMAC_CTX(ctx) ((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
62 /* to extract the digest length passed as mechanism parameter */
63 #define PROV_SHA2_GET_DIGEST_LEN(m, len) { \
64 if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t))) \
65 (len) = (uint32_t)*((ulong_t *)(m)->cm_param); \
68 bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t)); \
69 (len) = (uint32_t)tmp_ulong; \
73 #define PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) { \
74 SHA2Init(mech, ctx); \
75 SHA2Update(ctx, key, len); \
76 SHA2Final(digest, ctx); \
80 * Mechanism info structure passed to KCF during registration.
82 static crypto_mech_info_t sha2_mech_info_tab
[] = {
84 {SUN_CKM_SHA256
, SHA256_MECH_INFO_TYPE
,
85 CRYPTO_FG_DIGEST
| CRYPTO_FG_DIGEST_ATOMIC
,
86 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS
},
88 {SUN_CKM_SHA256_HMAC
, SHA256_HMAC_MECH_INFO_TYPE
,
89 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
,
90 SHA2_HMAC_MIN_KEY_LEN
, SHA2_HMAC_MAX_KEY_LEN
,
91 CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
92 /* SHA256-HMAC GENERAL */
93 {SUN_CKM_SHA256_HMAC_GENERAL
, SHA256_HMAC_GEN_MECH_INFO_TYPE
,
94 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
,
95 SHA2_HMAC_MIN_KEY_LEN
, SHA2_HMAC_MAX_KEY_LEN
,
96 CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
98 {SUN_CKM_SHA384
, SHA384_MECH_INFO_TYPE
,
99 CRYPTO_FG_DIGEST
| CRYPTO_FG_DIGEST_ATOMIC
,
100 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS
},
102 {SUN_CKM_SHA384_HMAC
, SHA384_HMAC_MECH_INFO_TYPE
,
103 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
,
104 SHA2_HMAC_MIN_KEY_LEN
, SHA2_HMAC_MAX_KEY_LEN
,
105 CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
106 /* SHA384-HMAC GENERAL */
107 {SUN_CKM_SHA384_HMAC_GENERAL
, SHA384_HMAC_GEN_MECH_INFO_TYPE
,
108 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
,
109 SHA2_HMAC_MIN_KEY_LEN
, SHA2_HMAC_MAX_KEY_LEN
,
110 CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
112 {SUN_CKM_SHA512
, SHA512_MECH_INFO_TYPE
,
113 CRYPTO_FG_DIGEST
| CRYPTO_FG_DIGEST_ATOMIC
,
114 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS
},
116 {SUN_CKM_SHA512_HMAC
, SHA512_HMAC_MECH_INFO_TYPE
,
117 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
,
118 SHA2_HMAC_MIN_KEY_LEN
, SHA2_HMAC_MAX_KEY_LEN
,
119 CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
120 /* SHA512-HMAC GENERAL */
121 {SUN_CKM_SHA512_HMAC_GENERAL
, SHA512_HMAC_GEN_MECH_INFO_TYPE
,
122 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
,
123 SHA2_HMAC_MIN_KEY_LEN
, SHA2_HMAC_MAX_KEY_LEN
,
124 CRYPTO_KEYSIZE_UNIT_IN_BYTES
}
127 static void sha2_provider_status(crypto_provider_handle_t
, uint_t
*);
129 static crypto_control_ops_t sha2_control_ops
= {
133 static int sha2_digest_init(crypto_ctx_t
*, crypto_mechanism_t
*,
134 crypto_req_handle_t
);
135 static int sha2_digest(crypto_ctx_t
*, crypto_data_t
*, crypto_data_t
*,
136 crypto_req_handle_t
);
137 static int sha2_digest_update(crypto_ctx_t
*, crypto_data_t
*,
138 crypto_req_handle_t
);
139 static int sha2_digest_final(crypto_ctx_t
*, crypto_data_t
*,
140 crypto_req_handle_t
);
141 static int sha2_digest_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
142 crypto_mechanism_t
*, crypto_data_t
*, crypto_data_t
*,
143 crypto_req_handle_t
);
145 static crypto_digest_ops_t sha2_digest_ops
= {
146 .digest_init
= sha2_digest_init
,
147 .digest
= sha2_digest
,
148 .digest_update
= sha2_digest_update
,
150 .digest_final
= sha2_digest_final
,
151 .digest_atomic
= sha2_digest_atomic
154 static int sha2_mac_init(crypto_ctx_t
*, crypto_mechanism_t
*, crypto_key_t
*,
155 crypto_spi_ctx_template_t
, crypto_req_handle_t
);
156 static int sha2_mac_update(crypto_ctx_t
*, crypto_data_t
*,
157 crypto_req_handle_t
);
158 static int sha2_mac_final(crypto_ctx_t
*, crypto_data_t
*, crypto_req_handle_t
);
159 static int sha2_mac_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
160 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*, crypto_data_t
*,
161 crypto_spi_ctx_template_t
, crypto_req_handle_t
);
162 static int sha2_mac_verify_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
163 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*, crypto_data_t
*,
164 crypto_spi_ctx_template_t
, crypto_req_handle_t
);
166 static crypto_mac_ops_t sha2_mac_ops
= {
167 .mac_init
= sha2_mac_init
,
169 .mac_update
= sha2_mac_update
,
170 .mac_final
= sha2_mac_final
,
171 .mac_atomic
= sha2_mac_atomic
,
172 .mac_verify_atomic
= sha2_mac_verify_atomic
175 static int sha2_create_ctx_template(crypto_provider_handle_t
,
176 crypto_mechanism_t
*, crypto_key_t
*, crypto_spi_ctx_template_t
*,
177 size_t *, crypto_req_handle_t
);
178 static int sha2_free_context(crypto_ctx_t
*);
180 static crypto_ctx_ops_t sha2_ctx_ops
= {
181 .create_ctx_template
= sha2_create_ctx_template
,
182 .free_context
= sha2_free_context
185 static crypto_ops_t sha2_crypto_ops
= {{{{{
202 static crypto_provider_info_t sha2_prov_info
= {{{{
203 CRYPTO_SPI_VERSION_1
,
204 "SHA2 Software Provider",
208 sizeof (sha2_mech_info_tab
)/sizeof (crypto_mech_info_t
),
212 static crypto_kcf_provider_handle_t sha2_prov_handle
= 0;
219 if ((ret
= mod_install(&modlinkage
)) != 0)
223 * Register with KCF. If the registration fails, log an
224 * error but do not uninstall the module, since the functionality
225 * provided by misc/sha2 should still be available.
227 if ((ret
= crypto_register_provider(&sha2_prov_info
,
228 &sha2_prov_handle
)) != CRYPTO_SUCCESS
)
229 cmn_err(CE_WARN
, "sha2 _init: "
230 "crypto_register_provider() failed (0x%x)", ret
);
240 if (sha2_prov_handle
!= 0) {
241 if ((ret
= crypto_unregister_provider(sha2_prov_handle
)) !=
244 "sha2 _fini: crypto_unregister_provider() "
245 "failed (0x%x)", ret
);
248 sha2_prov_handle
= 0;
251 return (mod_remove(&modlinkage
));
255 * KCF software provider control entry points.
259 sha2_provider_status(crypto_provider_handle_t provider
, uint_t
*status
)
261 *status
= CRYPTO_PROVIDER_READY
;
265 * KCF software provider digest entry points.
269 sha2_digest_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
270 crypto_req_handle_t req
)
274 * Allocate and initialize SHA2 context.
276 ctx
->cc_provider_private
= kmem_alloc(sizeof (sha2_ctx_t
),
278 if (ctx
->cc_provider_private
== NULL
)
279 return (CRYPTO_HOST_MEMORY
);
281 PROV_SHA2_CTX(ctx
)->sc_mech_type
= mechanism
->cm_type
;
282 SHA2Init(mechanism
->cm_type
, &PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
);
284 return (CRYPTO_SUCCESS
);
288 * Helper SHA2 digest update function for uio data.
291 sha2_digest_update_uio(SHA2_CTX
*sha2_ctx
, crypto_data_t
*data
)
293 off_t offset
= data
->cd_offset
;
294 size_t length
= data
->cd_length
;
298 /* we support only kernel buffer */
299 if (data
->cd_uio
->uio_segflg
!= UIO_SYSSPACE
)
300 return (CRYPTO_ARGUMENTS_BAD
);
303 * Jump to the first iovec containing data to be
306 for (vec_idx
= 0; vec_idx
< data
->cd_uio
->uio_iovcnt
&&
307 offset
>= data
->cd_uio
->uio_iov
[vec_idx
].iov_len
;
308 offset
-= data
->cd_uio
->uio_iov
[vec_idx
++].iov_len
)
310 if (vec_idx
== data
->cd_uio
->uio_iovcnt
) {
312 * The caller specified an offset that is larger than the
313 * total size of the buffers it provided.
315 return (CRYPTO_DATA_LEN_RANGE
);
319 * Now do the digesting on the iovecs.
321 while (vec_idx
< data
->cd_uio
->uio_iovcnt
&& length
> 0) {
322 cur_len
= MIN(data
->cd_uio
->uio_iov
[vec_idx
].iov_len
-
325 SHA2Update(sha2_ctx
, (uint8_t *)data
->cd_uio
->
326 uio_iov
[vec_idx
].iov_base
+ offset
, cur_len
);
332 if (vec_idx
== data
->cd_uio
->uio_iovcnt
&& length
> 0) {
334 * The end of the specified iovec's was reached but
335 * the length requested could not be processed, i.e.
336 * The caller requested to digest more data than it provided.
338 return (CRYPTO_DATA_LEN_RANGE
);
341 return (CRYPTO_SUCCESS
);
345 * Helper SHA2 digest final function for uio data.
346 * digest_len is the length of the desired digest. If digest_len
347 * is smaller than the default SHA2 digest length, the caller
348 * must pass a scratch buffer, digest_scratch, which must
349 * be at least the algorithm's digest length bytes.
352 sha2_digest_final_uio(SHA2_CTX
*sha2_ctx
, crypto_data_t
*digest
,
353 ulong_t digest_len
, uchar_t
*digest_scratch
)
355 off_t offset
= digest
->cd_offset
;
358 /* we support only kernel buffer */
359 if (digest
->cd_uio
->uio_segflg
!= UIO_SYSSPACE
)
360 return (CRYPTO_ARGUMENTS_BAD
);
363 * Jump to the first iovec containing ptr to the digest to
366 for (vec_idx
= 0; offset
>= digest
->cd_uio
->uio_iov
[vec_idx
].iov_len
&&
367 vec_idx
< digest
->cd_uio
->uio_iovcnt
;
368 offset
-= digest
->cd_uio
->uio_iov
[vec_idx
++].iov_len
)
370 if (vec_idx
== digest
->cd_uio
->uio_iovcnt
) {
372 * The caller specified an offset that is
373 * larger than the total size of the buffers
376 return (CRYPTO_DATA_LEN_RANGE
);
379 if (offset
+ digest_len
<=
380 digest
->cd_uio
->uio_iov
[vec_idx
].iov_len
) {
382 * The computed SHA2 digest will fit in the current
385 if (((sha2_ctx
->algotype
<= SHA256_HMAC_GEN_MECH_INFO_TYPE
) &&
386 (digest_len
!= SHA256_DIGEST_LENGTH
)) ||
387 ((sha2_ctx
->algotype
> SHA256_HMAC_GEN_MECH_INFO_TYPE
) &&
388 (digest_len
!= SHA512_DIGEST_LENGTH
))) {
390 * The caller requested a short digest. Digest
391 * into a scratch buffer and return to
392 * the user only what was requested.
394 SHA2Final(digest_scratch
, sha2_ctx
);
396 bcopy(digest_scratch
, (uchar_t
*)digest
->
397 cd_uio
->uio_iov
[vec_idx
].iov_base
+ offset
,
400 SHA2Final((uchar_t
*)digest
->
401 cd_uio
->uio_iov
[vec_idx
].iov_base
+ offset
,
407 * The computed digest will be crossing one or more iovec's.
408 * This is bad performance-wise but we need to support it.
409 * Allocate a small scratch buffer on the stack and
410 * copy it piece meal to the specified digest iovec's.
412 uchar_t digest_tmp
[SHA512_DIGEST_LENGTH
];
413 off_t scratch_offset
= 0;
414 size_t length
= digest_len
;
417 SHA2Final(digest_tmp
, sha2_ctx
);
419 while (vec_idx
< digest
->cd_uio
->uio_iovcnt
&& length
> 0) {
421 MIN(digest
->cd_uio
->uio_iov
[vec_idx
].iov_len
-
423 bcopy(digest_tmp
+ scratch_offset
,
424 digest
->cd_uio
->uio_iov
[vec_idx
].iov_base
+ offset
,
429 scratch_offset
+= cur_len
;
433 if (vec_idx
== digest
->cd_uio
->uio_iovcnt
&& length
> 0) {
435 * The end of the specified iovec's was reached but
436 * the length requested could not be processed, i.e.
437 * The caller requested to digest more data than it
440 return (CRYPTO_DATA_LEN_RANGE
);
444 return (CRYPTO_SUCCESS
);
449 sha2_digest(crypto_ctx_t
*ctx
, crypto_data_t
*data
, crypto_data_t
*digest
,
450 crypto_req_handle_t req
)
452 int ret
= CRYPTO_SUCCESS
;
453 uint_t sha_digest_len
;
455 ASSERT(ctx
->cc_provider_private
!= NULL
);
457 switch (PROV_SHA2_CTX(ctx
)->sc_mech_type
) {
458 case SHA256_MECH_INFO_TYPE
:
459 sha_digest_len
= SHA256_DIGEST_LENGTH
;
461 case SHA384_MECH_INFO_TYPE
:
462 sha_digest_len
= SHA384_DIGEST_LENGTH
;
464 case SHA512_MECH_INFO_TYPE
:
465 sha_digest_len
= SHA512_DIGEST_LENGTH
;
468 return (CRYPTO_MECHANISM_INVALID
);
472 * We need to just return the length needed to store the output.
473 * We should not destroy the context for the following cases.
475 if ((digest
->cd_length
== 0) ||
476 (digest
->cd_length
< sha_digest_len
)) {
477 digest
->cd_length
= sha_digest_len
;
478 return (CRYPTO_BUFFER_TOO_SMALL
);
482 * Do the SHA2 update on the specified input data.
484 switch (data
->cd_format
) {
485 case CRYPTO_DATA_RAW
:
486 SHA2Update(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
487 (uint8_t *)data
->cd_raw
.iov_base
+ data
->cd_offset
,
490 case CRYPTO_DATA_UIO
:
491 ret
= sha2_digest_update_uio(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
495 ret
= CRYPTO_ARGUMENTS_BAD
;
498 if (ret
!= CRYPTO_SUCCESS
) {
499 /* the update failed, free context and bail */
500 kmem_free(ctx
->cc_provider_private
, sizeof (sha2_ctx_t
));
501 ctx
->cc_provider_private
= NULL
;
502 digest
->cd_length
= 0;
507 * Do a SHA2 final, must be done separately since the digest
508 * type can be different than the input data type.
510 switch (digest
->cd_format
) {
511 case CRYPTO_DATA_RAW
:
512 SHA2Final((unsigned char *)digest
->cd_raw
.iov_base
+
513 digest
->cd_offset
, &PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
);
515 case CRYPTO_DATA_UIO
:
516 ret
= sha2_digest_final_uio(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
517 digest
, sha_digest_len
, NULL
);
520 ret
= CRYPTO_ARGUMENTS_BAD
;
523 /* all done, free context and return */
525 if (ret
== CRYPTO_SUCCESS
)
526 digest
->cd_length
= sha_digest_len
;
528 digest
->cd_length
= 0;
530 kmem_free(ctx
->cc_provider_private
, sizeof (sha2_ctx_t
));
531 ctx
->cc_provider_private
= NULL
;
537 sha2_digest_update(crypto_ctx_t
*ctx
, crypto_data_t
*data
,
538 crypto_req_handle_t req
)
540 int ret
= CRYPTO_SUCCESS
;
542 ASSERT(ctx
->cc_provider_private
!= NULL
);
545 * Do the SHA2 update on the specified input data.
547 switch (data
->cd_format
) {
548 case CRYPTO_DATA_RAW
:
549 SHA2Update(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
550 (uint8_t *)data
->cd_raw
.iov_base
+ data
->cd_offset
,
553 case CRYPTO_DATA_UIO
:
554 ret
= sha2_digest_update_uio(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
558 ret
= CRYPTO_ARGUMENTS_BAD
;
566 sha2_digest_final(crypto_ctx_t
*ctx
, crypto_data_t
*digest
,
567 crypto_req_handle_t req
)
569 int ret
= CRYPTO_SUCCESS
;
570 uint_t sha_digest_len
;
572 ASSERT(ctx
->cc_provider_private
!= NULL
);
574 switch (PROV_SHA2_CTX(ctx
)->sc_mech_type
) {
575 case SHA256_MECH_INFO_TYPE
:
576 sha_digest_len
= SHA256_DIGEST_LENGTH
;
578 case SHA384_MECH_INFO_TYPE
:
579 sha_digest_len
= SHA384_DIGEST_LENGTH
;
581 case SHA512_MECH_INFO_TYPE
:
582 sha_digest_len
= SHA512_DIGEST_LENGTH
;
585 return (CRYPTO_MECHANISM_INVALID
);
589 * We need to just return the length needed to store the output.
590 * We should not destroy the context for the following cases.
592 if ((digest
->cd_length
== 0) ||
593 (digest
->cd_length
< sha_digest_len
)) {
594 digest
->cd_length
= sha_digest_len
;
595 return (CRYPTO_BUFFER_TOO_SMALL
);
601 switch (digest
->cd_format
) {
602 case CRYPTO_DATA_RAW
:
603 SHA2Final((unsigned char *)digest
->cd_raw
.iov_base
+
604 digest
->cd_offset
, &PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
);
606 case CRYPTO_DATA_UIO
:
607 ret
= sha2_digest_final_uio(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
608 digest
, sha_digest_len
, NULL
);
611 ret
= CRYPTO_ARGUMENTS_BAD
;
614 /* all done, free context and return */
616 if (ret
== CRYPTO_SUCCESS
)
617 digest
->cd_length
= sha_digest_len
;
619 digest
->cd_length
= 0;
621 kmem_free(ctx
->cc_provider_private
, sizeof (sha2_ctx_t
));
622 ctx
->cc_provider_private
= NULL
;
629 sha2_digest_atomic(crypto_provider_handle_t provider
,
630 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
631 crypto_data_t
*data
, crypto_data_t
*digest
,
632 crypto_req_handle_t req
)
634 int ret
= CRYPTO_SUCCESS
;
636 uint32_t sha_digest_len
;
642 SHA2Init(mechanism
->cm_type
, &sha2_ctx
);
644 switch (data
->cd_format
) {
645 case CRYPTO_DATA_RAW
:
646 SHA2Update(&sha2_ctx
, (uint8_t *)data
->
647 cd_raw
.iov_base
+ data
->cd_offset
, data
->cd_length
);
649 case CRYPTO_DATA_UIO
:
650 ret
= sha2_digest_update_uio(&sha2_ctx
, data
);
653 ret
= CRYPTO_ARGUMENTS_BAD
;
657 * Do the SHA updates on the specified input data.
660 if (ret
!= CRYPTO_SUCCESS
) {
661 /* the update failed, bail */
662 digest
->cd_length
= 0;
666 if (mechanism
->cm_type
<= SHA256_HMAC_GEN_MECH_INFO_TYPE
)
667 sha_digest_len
= SHA256_DIGEST_LENGTH
;
669 sha_digest_len
= SHA512_DIGEST_LENGTH
;
672 * Do a SHA2 final, must be done separately since the digest
673 * type can be different than the input data type.
675 switch (digest
->cd_format
) {
676 case CRYPTO_DATA_RAW
:
677 SHA2Final((unsigned char *)digest
->cd_raw
.iov_base
+
678 digest
->cd_offset
, &sha2_ctx
);
680 case CRYPTO_DATA_UIO
:
681 ret
= sha2_digest_final_uio(&sha2_ctx
, digest
,
682 sha_digest_len
, NULL
);
685 ret
= CRYPTO_ARGUMENTS_BAD
;
688 if (ret
== CRYPTO_SUCCESS
)
689 digest
->cd_length
= sha_digest_len
;
691 digest
->cd_length
= 0;
697 * KCF software provider mac entry points.
699 * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
702 * The initialization routine initializes what we denote
703 * as the inner and outer contexts by doing
704 * - for inner context: SHA2(key XOR ipad)
705 * - for outer context: SHA2(key XOR opad)
708 * Each subsequent SHA2 HMAC update will result in an
709 * update of the inner context with the specified data.
712 * The SHA2 HMAC final will do a SHA2 final operation on the
713 * inner context, and the resulting digest will be used
714 * as the data for an update on the outer context. Last
715 * but not least, a SHA2 final on the outer context will
716 * be performed to obtain the SHA2 HMAC digest to return
721 * Initialize a SHA2-HMAC context.
724 sha2_mac_init_ctx(sha2_hmac_ctx_t
*ctx
, void *keyval
, uint_t length_in_bytes
)
726 uint64_t ipad
[SHA512_HMAC_BLOCK_SIZE
/ sizeof (uint64_t)];
727 uint64_t opad
[SHA512_HMAC_BLOCK_SIZE
/ sizeof (uint64_t)];
728 int i
, block_size
, blocks_per_int64
;
730 /* Determine the block size */
731 if (ctx
->hc_mech_type
<= SHA256_HMAC_GEN_MECH_INFO_TYPE
) {
732 block_size
= SHA256_HMAC_BLOCK_SIZE
;
733 blocks_per_int64
= SHA256_HMAC_BLOCK_SIZE
/ sizeof (uint64_t);
735 block_size
= SHA512_HMAC_BLOCK_SIZE
;
736 blocks_per_int64
= SHA512_HMAC_BLOCK_SIZE
/ sizeof (uint64_t);
739 (void) bzero(ipad
, block_size
);
740 (void) bzero(opad
, block_size
);
741 (void) bcopy(keyval
, ipad
, length_in_bytes
);
742 (void) bcopy(keyval
, opad
, length_in_bytes
);
744 /* XOR key with ipad (0x36) and opad (0x5c) */
745 for (i
= 0; i
< blocks_per_int64
; i
++) {
746 ipad
[i
] ^= 0x3636363636363636;
747 opad
[i
] ^= 0x5c5c5c5c5c5c5c5c;
750 /* perform SHA2 on ipad */
751 SHA2Init(ctx
->hc_mech_type
, &ctx
->hc_icontext
);
752 SHA2Update(&ctx
->hc_icontext
, (uint8_t *)ipad
, block_size
);
754 /* perform SHA2 on opad */
755 SHA2Init(ctx
->hc_mech_type
, &ctx
->hc_ocontext
);
756 SHA2Update(&ctx
->hc_ocontext
, (uint8_t *)opad
, block_size
);
763 sha2_mac_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
764 crypto_key_t
*key
, crypto_spi_ctx_template_t ctx_template
,
765 crypto_req_handle_t req
)
767 int ret
= CRYPTO_SUCCESS
;
768 uint_t keylen_in_bytes
= CRYPTO_BITS2BYTES(key
->ck_length
);
769 uint_t sha_digest_len
, sha_hmac_block_size
;
772 * Set the digest length and block size to values appropriate to the
775 switch (mechanism
->cm_type
) {
776 case SHA256_HMAC_MECH_INFO_TYPE
:
777 case SHA256_HMAC_GEN_MECH_INFO_TYPE
:
778 sha_digest_len
= SHA256_DIGEST_LENGTH
;
779 sha_hmac_block_size
= SHA256_HMAC_BLOCK_SIZE
;
781 case SHA384_HMAC_MECH_INFO_TYPE
:
782 case SHA384_HMAC_GEN_MECH_INFO_TYPE
:
783 case SHA512_HMAC_MECH_INFO_TYPE
:
784 case SHA512_HMAC_GEN_MECH_INFO_TYPE
:
785 sha_digest_len
= SHA512_DIGEST_LENGTH
;
786 sha_hmac_block_size
= SHA512_HMAC_BLOCK_SIZE
;
789 return (CRYPTO_MECHANISM_INVALID
);
792 if (key
->ck_format
!= CRYPTO_KEY_RAW
)
793 return (CRYPTO_ARGUMENTS_BAD
);
795 ctx
->cc_provider_private
= kmem_alloc(sizeof (sha2_hmac_ctx_t
),
797 if (ctx
->cc_provider_private
== NULL
)
798 return (CRYPTO_HOST_MEMORY
);
800 PROV_SHA2_HMAC_CTX(ctx
)->hc_mech_type
= mechanism
->cm_type
;
801 if (ctx_template
!= NULL
) {
802 /* reuse context template */
803 bcopy(ctx_template
, PROV_SHA2_HMAC_CTX(ctx
),
804 sizeof (sha2_hmac_ctx_t
));
806 /* no context template, compute context */
807 if (keylen_in_bytes
> sha_hmac_block_size
) {
808 uchar_t digested_key
[SHA512_DIGEST_LENGTH
];
809 sha2_hmac_ctx_t
*hmac_ctx
= ctx
->cc_provider_private
;
812 * Hash the passed-in key to get a smaller key.
813 * The inner context is used since it hasn't been
816 PROV_SHA2_DIGEST_KEY(mechanism
->cm_type
/ 3,
817 &hmac_ctx
->hc_icontext
,
818 key
->ck_data
, keylen_in_bytes
, digested_key
);
819 sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx
),
820 digested_key
, sha_digest_len
);
822 sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx
),
823 key
->ck_data
, keylen_in_bytes
);
828 * Get the mechanism parameters, if applicable.
830 if (mechanism
->cm_type
% 3 == 2) {
831 if (mechanism
->cm_param
== NULL
||
832 mechanism
->cm_param_len
!= sizeof (ulong_t
))
833 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
834 PROV_SHA2_GET_DIGEST_LEN(mechanism
,
835 PROV_SHA2_HMAC_CTX(ctx
)->hc_digest_len
);
836 if (PROV_SHA2_HMAC_CTX(ctx
)->hc_digest_len
> sha_digest_len
)
837 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
840 if (ret
!= CRYPTO_SUCCESS
) {
841 bzero(ctx
->cc_provider_private
, sizeof (sha2_hmac_ctx_t
));
842 kmem_free(ctx
->cc_provider_private
, sizeof (sha2_hmac_ctx_t
));
843 ctx
->cc_provider_private
= NULL
;
851 sha2_mac_update(crypto_ctx_t
*ctx
, crypto_data_t
*data
,
852 crypto_req_handle_t req
)
854 int ret
= CRYPTO_SUCCESS
;
856 ASSERT(ctx
->cc_provider_private
!= NULL
);
859 * Do a SHA2 update of the inner context using the specified
862 switch (data
->cd_format
) {
863 case CRYPTO_DATA_RAW
:
864 SHA2Update(&PROV_SHA2_HMAC_CTX(ctx
)->hc_icontext
,
865 (uint8_t *)data
->cd_raw
.iov_base
+ data
->cd_offset
,
868 case CRYPTO_DATA_UIO
:
869 ret
= sha2_digest_update_uio(
870 &PROV_SHA2_HMAC_CTX(ctx
)->hc_icontext
, data
);
873 ret
= CRYPTO_ARGUMENTS_BAD
;
881 sha2_mac_final(crypto_ctx_t
*ctx
, crypto_data_t
*mac
, crypto_req_handle_t req
)
883 int ret
= CRYPTO_SUCCESS
;
884 uchar_t digest
[SHA512_DIGEST_LENGTH
];
885 uint32_t digest_len
, sha_digest_len
;
887 ASSERT(ctx
->cc_provider_private
!= NULL
);
889 /* Set the digest lengths to values appropriate to the mechanism */
890 switch (PROV_SHA2_HMAC_CTX(ctx
)->hc_mech_type
) {
891 case SHA256_HMAC_MECH_INFO_TYPE
:
892 sha_digest_len
= digest_len
= SHA256_DIGEST_LENGTH
;
894 case SHA384_HMAC_MECH_INFO_TYPE
:
895 sha_digest_len
= digest_len
= SHA384_DIGEST_LENGTH
;
897 case SHA512_HMAC_MECH_INFO_TYPE
:
898 sha_digest_len
= digest_len
= SHA512_DIGEST_LENGTH
;
900 case SHA256_HMAC_GEN_MECH_INFO_TYPE
:
901 sha_digest_len
= SHA256_DIGEST_LENGTH
;
902 digest_len
= PROV_SHA2_HMAC_CTX(ctx
)->hc_digest_len
;
904 case SHA384_HMAC_GEN_MECH_INFO_TYPE
:
905 case SHA512_HMAC_GEN_MECH_INFO_TYPE
:
906 sha_digest_len
= SHA512_DIGEST_LENGTH
;
907 digest_len
= PROV_SHA2_HMAC_CTX(ctx
)->hc_digest_len
;
910 return (CRYPTO_ARGUMENTS_BAD
);
914 * We need to just return the length needed to store the output.
915 * We should not destroy the context for the following cases.
917 if ((mac
->cd_length
== 0) || (mac
->cd_length
< digest_len
)) {
918 mac
->cd_length
= digest_len
;
919 return (CRYPTO_BUFFER_TOO_SMALL
);
923 * Do a SHA2 final on the inner context.
925 SHA2Final(digest
, &PROV_SHA2_HMAC_CTX(ctx
)->hc_icontext
);
928 * Do a SHA2 update on the outer context, feeding the inner
931 SHA2Update(&PROV_SHA2_HMAC_CTX(ctx
)->hc_ocontext
, digest
,
935 * Do a SHA2 final on the outer context, storing the computing
936 * digest in the users buffer.
938 switch (mac
->cd_format
) {
939 case CRYPTO_DATA_RAW
:
940 if (digest_len
!= sha_digest_len
) {
942 * The caller requested a short digest. Digest
943 * into a scratch buffer and return to
944 * the user only what was requested.
947 &PROV_SHA2_HMAC_CTX(ctx
)->hc_ocontext
);
948 bcopy(digest
, (unsigned char *)mac
->cd_raw
.iov_base
+
949 mac
->cd_offset
, digest_len
);
951 SHA2Final((unsigned char *)mac
->cd_raw
.iov_base
+
953 &PROV_SHA2_HMAC_CTX(ctx
)->hc_ocontext
);
956 case CRYPTO_DATA_UIO
:
957 ret
= sha2_digest_final_uio(
958 &PROV_SHA2_HMAC_CTX(ctx
)->hc_ocontext
, mac
,
962 ret
= CRYPTO_ARGUMENTS_BAD
;
965 if (ret
== CRYPTO_SUCCESS
)
966 mac
->cd_length
= digest_len
;
970 bzero(ctx
->cc_provider_private
, sizeof (sha2_hmac_ctx_t
));
971 kmem_free(ctx
->cc_provider_private
, sizeof (sha2_hmac_ctx_t
));
972 ctx
->cc_provider_private
= NULL
;
977 #define SHA2_MAC_UPDATE(data, ctx, ret) { \
978 switch (data->cd_format) { \
979 case CRYPTO_DATA_RAW: \
980 SHA2Update(&(ctx).hc_icontext, \
981 (uint8_t *)data->cd_raw.iov_base + \
982 data->cd_offset, data->cd_length); \
984 case CRYPTO_DATA_UIO: \
985 ret = sha2_digest_update_uio(&(ctx).hc_icontext, data); \
988 ret = CRYPTO_ARGUMENTS_BAD; \
994 sha2_mac_atomic(crypto_provider_handle_t provider
,
995 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
996 crypto_key_t
*key
, crypto_data_t
*data
, crypto_data_t
*mac
,
997 crypto_spi_ctx_template_t ctx_template
, crypto_req_handle_t req
)
999 int ret
= CRYPTO_SUCCESS
;
1000 uchar_t digest
[SHA512_DIGEST_LENGTH
];
1001 sha2_hmac_ctx_t sha2_hmac_ctx
;
1002 uint32_t sha_digest_len
, digest_len
, sha_hmac_block_size
;
1003 uint_t keylen_in_bytes
= CRYPTO_BITS2BYTES(key
->ck_length
);
1006 * Set the digest length and block size to values appropriate to the
1009 switch (mechanism
->cm_type
) {
1010 case SHA256_HMAC_MECH_INFO_TYPE
:
1011 case SHA256_HMAC_GEN_MECH_INFO_TYPE
:
1012 sha_digest_len
= digest_len
= SHA256_DIGEST_LENGTH
;
1013 sha_hmac_block_size
= SHA256_HMAC_BLOCK_SIZE
;
1015 case SHA384_HMAC_MECH_INFO_TYPE
:
1016 case SHA384_HMAC_GEN_MECH_INFO_TYPE
:
1017 case SHA512_HMAC_MECH_INFO_TYPE
:
1018 case SHA512_HMAC_GEN_MECH_INFO_TYPE
:
1019 sha_digest_len
= digest_len
= SHA512_DIGEST_LENGTH
;
1020 sha_hmac_block_size
= SHA512_HMAC_BLOCK_SIZE
;
1023 return (CRYPTO_MECHANISM_INVALID
);
1026 /* Add support for key by attributes (RFE 4706552) */
1027 if (key
->ck_format
!= CRYPTO_KEY_RAW
)
1028 return (CRYPTO_ARGUMENTS_BAD
);
1030 if (ctx_template
!= NULL
) {
1031 /* reuse context template */
1032 bcopy(ctx_template
, &sha2_hmac_ctx
, sizeof (sha2_hmac_ctx_t
));
1034 sha2_hmac_ctx
.hc_mech_type
= mechanism
->cm_type
;
1035 /* no context template, initialize context */
1036 if (keylen_in_bytes
> sha_hmac_block_size
) {
1038 * Hash the passed-in key to get a smaller key.
1039 * The inner context is used since it hasn't been
1042 PROV_SHA2_DIGEST_KEY(mechanism
->cm_type
/ 3,
1043 &sha2_hmac_ctx
.hc_icontext
,
1044 key
->ck_data
, keylen_in_bytes
, digest
);
1045 sha2_mac_init_ctx(&sha2_hmac_ctx
, digest
,
1048 sha2_mac_init_ctx(&sha2_hmac_ctx
, key
->ck_data
,
1053 /* get the mechanism parameters, if applicable */
1054 if ((mechanism
->cm_type
% 3) == 2) {
1055 if (mechanism
->cm_param
== NULL
||
1056 mechanism
->cm_param_len
!= sizeof (ulong_t
)) {
1057 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
1060 PROV_SHA2_GET_DIGEST_LEN(mechanism
, digest_len
);
1061 if (digest_len
> sha_digest_len
) {
1062 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
1067 /* do a SHA2 update of the inner context using the specified data */
1068 SHA2_MAC_UPDATE(data
, sha2_hmac_ctx
, ret
);
1069 if (ret
!= CRYPTO_SUCCESS
)
1070 /* the update failed, free context and bail */
1074 * Do a SHA2 final on the inner context.
1076 SHA2Final(digest
, &sha2_hmac_ctx
.hc_icontext
);
1079 * Do an SHA2 update on the outer context, feeding the inner
1082 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1083 * bytes of the inner hash value.
1085 if (mechanism
->cm_type
== SHA384_HMAC_MECH_INFO_TYPE
||
1086 mechanism
->cm_type
== SHA384_HMAC_GEN_MECH_INFO_TYPE
)
1087 SHA2Update(&sha2_hmac_ctx
.hc_ocontext
, digest
,
1088 SHA384_DIGEST_LENGTH
);
1090 SHA2Update(&sha2_hmac_ctx
.hc_ocontext
, digest
, sha_digest_len
);
1093 * Do a SHA2 final on the outer context, storing the computed
1094 * digest in the users buffer.
1096 switch (mac
->cd_format
) {
1097 case CRYPTO_DATA_RAW
:
1098 if (digest_len
!= sha_digest_len
) {
1100 * The caller requested a short digest. Digest
1101 * into a scratch buffer and return to
1102 * the user only what was requested.
1104 SHA2Final(digest
, &sha2_hmac_ctx
.hc_ocontext
);
1105 bcopy(digest
, (unsigned char *)mac
->cd_raw
.iov_base
+
1106 mac
->cd_offset
, digest_len
);
1108 SHA2Final((unsigned char *)mac
->cd_raw
.iov_base
+
1109 mac
->cd_offset
, &sha2_hmac_ctx
.hc_ocontext
);
1112 case CRYPTO_DATA_UIO
:
1113 ret
= sha2_digest_final_uio(&sha2_hmac_ctx
.hc_ocontext
, mac
,
1114 digest_len
, digest
);
1117 ret
= CRYPTO_ARGUMENTS_BAD
;
1120 if (ret
== CRYPTO_SUCCESS
) {
1121 mac
->cd_length
= digest_len
;
1122 return (CRYPTO_SUCCESS
);
1125 bzero(&sha2_hmac_ctx
, sizeof (sha2_hmac_ctx_t
));
1132 sha2_mac_verify_atomic(crypto_provider_handle_t provider
,
1133 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1134 crypto_key_t
*key
, crypto_data_t
*data
, crypto_data_t
*mac
,
1135 crypto_spi_ctx_template_t ctx_template
, crypto_req_handle_t req
)
1137 int ret
= CRYPTO_SUCCESS
;
1138 uchar_t digest
[SHA512_DIGEST_LENGTH
];
1139 sha2_hmac_ctx_t sha2_hmac_ctx
;
1140 uint32_t sha_digest_len
, digest_len
, sha_hmac_block_size
;
1141 uint_t keylen_in_bytes
= CRYPTO_BITS2BYTES(key
->ck_length
);
1144 * Set the digest length and block size to values appropriate to the
1147 switch (mechanism
->cm_type
) {
1148 case SHA256_HMAC_MECH_INFO_TYPE
:
1149 case SHA256_HMAC_GEN_MECH_INFO_TYPE
:
1150 sha_digest_len
= digest_len
= SHA256_DIGEST_LENGTH
;
1151 sha_hmac_block_size
= SHA256_HMAC_BLOCK_SIZE
;
1153 case SHA384_HMAC_MECH_INFO_TYPE
:
1154 case SHA384_HMAC_GEN_MECH_INFO_TYPE
:
1155 case SHA512_HMAC_MECH_INFO_TYPE
:
1156 case SHA512_HMAC_GEN_MECH_INFO_TYPE
:
1157 sha_digest_len
= digest_len
= SHA512_DIGEST_LENGTH
;
1158 sha_hmac_block_size
= SHA512_HMAC_BLOCK_SIZE
;
1161 return (CRYPTO_MECHANISM_INVALID
);
1164 /* Add support for key by attributes (RFE 4706552) */
1165 if (key
->ck_format
!= CRYPTO_KEY_RAW
)
1166 return (CRYPTO_ARGUMENTS_BAD
);
1168 if (ctx_template
!= NULL
) {
1169 /* reuse context template */
1170 bcopy(ctx_template
, &sha2_hmac_ctx
, sizeof (sha2_hmac_ctx_t
));
1172 sha2_hmac_ctx
.hc_mech_type
= mechanism
->cm_type
;
1173 /* no context template, initialize context */
1174 if (keylen_in_bytes
> sha_hmac_block_size
) {
1176 * Hash the passed-in key to get a smaller key.
1177 * The inner context is used since it hasn't been
1180 PROV_SHA2_DIGEST_KEY(mechanism
->cm_type
/ 3,
1181 &sha2_hmac_ctx
.hc_icontext
,
1182 key
->ck_data
, keylen_in_bytes
, digest
);
1183 sha2_mac_init_ctx(&sha2_hmac_ctx
, digest
,
1186 sha2_mac_init_ctx(&sha2_hmac_ctx
, key
->ck_data
,
1191 /* get the mechanism parameters, if applicable */
1192 if (mechanism
->cm_type
% 3 == 2) {
1193 if (mechanism
->cm_param
== NULL
||
1194 mechanism
->cm_param_len
!= sizeof (ulong_t
)) {
1195 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
1198 PROV_SHA2_GET_DIGEST_LEN(mechanism
, digest_len
);
1199 if (digest_len
> sha_digest_len
) {
1200 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
1205 if (mac
->cd_length
!= digest_len
) {
1206 ret
= CRYPTO_INVALID_MAC
;
1210 /* do a SHA2 update of the inner context using the specified data */
1211 SHA2_MAC_UPDATE(data
, sha2_hmac_ctx
, ret
);
1212 if (ret
!= CRYPTO_SUCCESS
)
1213 /* the update failed, free context and bail */
1216 /* do a SHA2 final on the inner context */
1217 SHA2Final(digest
, &sha2_hmac_ctx
.hc_icontext
);
1220 * Do an SHA2 update on the outer context, feeding the inner
1223 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1224 * bytes of the inner hash value.
1226 if (mechanism
->cm_type
== SHA384_HMAC_MECH_INFO_TYPE
||
1227 mechanism
->cm_type
== SHA384_HMAC_GEN_MECH_INFO_TYPE
)
1228 SHA2Update(&sha2_hmac_ctx
.hc_ocontext
, digest
,
1229 SHA384_DIGEST_LENGTH
);
1231 SHA2Update(&sha2_hmac_ctx
.hc_ocontext
, digest
, sha_digest_len
);
1234 * Do a SHA2 final on the outer context, storing the computed
1235 * digest in the users buffer.
1237 SHA2Final(digest
, &sha2_hmac_ctx
.hc_ocontext
);
1240 * Compare the computed digest against the expected digest passed
1244 switch (mac
->cd_format
) {
1246 case CRYPTO_DATA_RAW
:
1247 if (bcmp(digest
, (unsigned char *)mac
->cd_raw
.iov_base
+
1248 mac
->cd_offset
, digest_len
) != 0)
1249 ret
= CRYPTO_INVALID_MAC
;
1252 case CRYPTO_DATA_UIO
: {
1253 off_t offset
= mac
->cd_offset
;
1255 off_t scratch_offset
= 0;
1256 size_t length
= digest_len
;
1259 /* we support only kernel buffer */
1260 if (mac
->cd_uio
->uio_segflg
!= UIO_SYSSPACE
)
1261 return (CRYPTO_ARGUMENTS_BAD
);
1263 /* jump to the first iovec containing the expected digest */
1265 offset
>= mac
->cd_uio
->uio_iov
[vec_idx
].iov_len
&&
1266 vec_idx
< mac
->cd_uio
->uio_iovcnt
;
1267 offset
-= mac
->cd_uio
->uio_iov
[vec_idx
++].iov_len
)
1269 if (vec_idx
== mac
->cd_uio
->uio_iovcnt
) {
1271 * The caller specified an offset that is
1272 * larger than the total size of the buffers
1275 ret
= CRYPTO_DATA_LEN_RANGE
;
1279 /* do the comparison of computed digest vs specified one */
1280 while (vec_idx
< mac
->cd_uio
->uio_iovcnt
&& length
> 0) {
1281 cur_len
= MIN(mac
->cd_uio
->uio_iov
[vec_idx
].iov_len
-
1284 if (bcmp(digest
+ scratch_offset
,
1285 mac
->cd_uio
->uio_iov
[vec_idx
].iov_base
+ offset
,
1287 ret
= CRYPTO_INVALID_MAC
;
1293 scratch_offset
+= cur_len
;
1300 ret
= CRYPTO_ARGUMENTS_BAD
;
1305 bzero(&sha2_hmac_ctx
, sizeof (sha2_hmac_ctx_t
));
1311 * KCF software provider context management entry points.
1316 sha2_create_ctx_template(crypto_provider_handle_t provider
,
1317 crypto_mechanism_t
*mechanism
, crypto_key_t
*key
,
1318 crypto_spi_ctx_template_t
*ctx_template
, size_t *ctx_template_size
,
1319 crypto_req_handle_t req
)
1321 sha2_hmac_ctx_t
*sha2_hmac_ctx_tmpl
;
1322 uint_t keylen_in_bytes
= CRYPTO_BITS2BYTES(key
->ck_length
);
1323 uint32_t sha_digest_len
, sha_hmac_block_size
;
1326 * Set the digest length and block size to values appropriate to the
1329 switch (mechanism
->cm_type
) {
1330 case SHA256_HMAC_MECH_INFO_TYPE
:
1331 case SHA256_HMAC_GEN_MECH_INFO_TYPE
:
1332 sha_digest_len
= SHA256_DIGEST_LENGTH
;
1333 sha_hmac_block_size
= SHA256_HMAC_BLOCK_SIZE
;
1335 case SHA384_HMAC_MECH_INFO_TYPE
:
1336 case SHA384_HMAC_GEN_MECH_INFO_TYPE
:
1337 case SHA512_HMAC_MECH_INFO_TYPE
:
1338 case SHA512_HMAC_GEN_MECH_INFO_TYPE
:
1339 sha_digest_len
= SHA512_DIGEST_LENGTH
;
1340 sha_hmac_block_size
= SHA512_HMAC_BLOCK_SIZE
;
1343 return (CRYPTO_MECHANISM_INVALID
);
1346 /* Add support for key by attributes (RFE 4706552) */
1347 if (key
->ck_format
!= CRYPTO_KEY_RAW
)
1348 return (CRYPTO_ARGUMENTS_BAD
);
1351 * Allocate and initialize SHA2 context.
1353 sha2_hmac_ctx_tmpl
= kmem_alloc(sizeof (sha2_hmac_ctx_t
),
1354 crypto_kmflag(req
));
1355 if (sha2_hmac_ctx_tmpl
== NULL
)
1356 return (CRYPTO_HOST_MEMORY
);
1358 sha2_hmac_ctx_tmpl
->hc_mech_type
= mechanism
->cm_type
;
1360 if (keylen_in_bytes
> sha_hmac_block_size
) {
1361 uchar_t digested_key
[SHA512_DIGEST_LENGTH
];
1364 * Hash the passed-in key to get a smaller key.
1365 * The inner context is used since it hasn't been
1368 PROV_SHA2_DIGEST_KEY(mechanism
->cm_type
/ 3,
1369 &sha2_hmac_ctx_tmpl
->hc_icontext
,
1370 key
->ck_data
, keylen_in_bytes
, digested_key
);
1371 sha2_mac_init_ctx(sha2_hmac_ctx_tmpl
, digested_key
,
1374 sha2_mac_init_ctx(sha2_hmac_ctx_tmpl
, key
->ck_data
,
1378 *ctx_template
= (crypto_spi_ctx_template_t
)sha2_hmac_ctx_tmpl
;
1379 *ctx_template_size
= sizeof (sha2_hmac_ctx_t
);
1381 return (CRYPTO_SUCCESS
);
1385 sha2_free_context(crypto_ctx_t
*ctx
)
1389 if (ctx
->cc_provider_private
== NULL
)
1390 return (CRYPTO_SUCCESS
);
1393 * We have to free either SHA2 or SHA2-HMAC contexts, which
1394 * have different lengths.
1396 * Note: Below is dependent on the mechanism ordering.
1399 if (PROV_SHA2_CTX(ctx
)->sc_mech_type
% 3 == 0)
1400 ctx_len
= sizeof (sha2_ctx_t
);
1402 ctx_len
= sizeof (sha2_hmac_ctx_t
);
1404 bzero(ctx
->cc_provider_private
, ctx_len
);
1405 kmem_free(ctx
->cc_provider_private
, ctx_len
);
1406 ctx
->cc_provider_private
= NULL
;
1408 return (CRYPTO_SUCCESS
);