4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
23 * Copyright 2010 Sun Microsystems, Inc. All rights reserved.
24 * Use is subject to license terms.
27 #include <sys/zfs_context.h>
28 #include <sys/modctl.h>
29 #include <sys/crypto/common.h>
30 #include <sys/crypto/spi.h>
31 #include <sys/crypto/icp.h>
34 #include <sha2/sha2_impl.h>
37 * The sha2 module is created with two modlinkages:
38 * - a modlmisc that allows consumers to directly call the entry points
39 * SHA2Init, SHA2Update, and SHA2Final.
40 * - a modlcrypto that allows the module to register with the Kernel
41 * Cryptographic Framework (KCF) as a software provider for the SHA2
45 static struct modlcrypto modlcrypto
= {
47 "SHA2 Kernel SW Provider"
50 static struct modlinkage modlinkage
= {
51 MODREV_1
, {&modlcrypto
, NULL
}
55 * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
56 * by KCF to one of the entry points.
59 #define PROV_SHA2_CTX(ctx) ((sha2_ctx_t *)(ctx)->cc_provider_private)
60 #define PROV_SHA2_HMAC_CTX(ctx) ((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
62 /* to extract the digest length passed as mechanism parameter */
63 #define PROV_SHA2_GET_DIGEST_LEN(m, len) { \
64 if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t))) \
65 (len) = (uint32_t)*((ulong_t *)(m)->cm_param); \
68 bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t)); \
69 (len) = (uint32_t)tmp_ulong; \
73 #define PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) { \
74 SHA2Init(mech, ctx); \
75 SHA2Update(ctx, key, len); \
76 SHA2Final(digest, ctx); \
80 * Mechanism info structure passed to KCF during registration.
82 static crypto_mech_info_t sha2_mech_info_tab
[] = {
84 {SUN_CKM_SHA256
, SHA256_MECH_INFO_TYPE
,
85 CRYPTO_FG_DIGEST
| CRYPTO_FG_DIGEST_ATOMIC
,
86 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS
},
88 {SUN_CKM_SHA256_HMAC
, SHA256_HMAC_MECH_INFO_TYPE
,
89 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
,
90 SHA2_HMAC_MIN_KEY_LEN
, SHA2_HMAC_MAX_KEY_LEN
,
91 CRYPTO_KEYSIZE_UNIT_IN_BYTES
},
92 /* SHA256-HMAC GENERAL */
93 {SUN_CKM_SHA256_HMAC_GENERAL
, SHA256_HMAC_GEN_MECH_INFO_TYPE
,
94 CRYPTO_FG_MAC
| CRYPTO_FG_MAC_ATOMIC
,
95 SHA2_HMAC_MIN_KEY_LEN
, SHA2_HMAC_MAX_KEY_LEN
,
96 CRYPTO_KEYSIZE_UNIT_IN_BYTES
}
99 static void sha2_provider_status(crypto_provider_handle_t
, uint_t
*);
101 static crypto_control_ops_t sha2_control_ops
= {
105 static int sha2_digest_init(crypto_ctx_t
*, crypto_mechanism_t
*,
106 crypto_req_handle_t
);
107 static int sha2_digest(crypto_ctx_t
*, crypto_data_t
*, crypto_data_t
*,
108 crypto_req_handle_t
);
109 static int sha2_digest_update(crypto_ctx_t
*, crypto_data_t
*,
110 crypto_req_handle_t
);
111 static int sha2_digest_final(crypto_ctx_t
*, crypto_data_t
*,
112 crypto_req_handle_t
);
113 static int sha2_digest_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
114 crypto_mechanism_t
*, crypto_data_t
*, crypto_data_t
*,
115 crypto_req_handle_t
);
117 static crypto_digest_ops_t sha2_digest_ops
= {
126 static int sha2_mac_init(crypto_ctx_t
*, crypto_mechanism_t
*, crypto_key_t
*,
127 crypto_spi_ctx_template_t
, crypto_req_handle_t
);
128 static int sha2_mac_update(crypto_ctx_t
*, crypto_data_t
*,
129 crypto_req_handle_t
);
130 static int sha2_mac_final(crypto_ctx_t
*, crypto_data_t
*, crypto_req_handle_t
);
131 static int sha2_mac_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
132 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*, crypto_data_t
*,
133 crypto_spi_ctx_template_t
, crypto_req_handle_t
);
134 static int sha2_mac_verify_atomic(crypto_provider_handle_t
, crypto_session_id_t
,
135 crypto_mechanism_t
*, crypto_key_t
*, crypto_data_t
*, crypto_data_t
*,
136 crypto_spi_ctx_template_t
, crypto_req_handle_t
);
138 static crypto_mac_ops_t sha2_mac_ops
= {
144 sha2_mac_verify_atomic
147 static int sha2_create_ctx_template(crypto_provider_handle_t
,
148 crypto_mechanism_t
*, crypto_key_t
*, crypto_spi_ctx_template_t
*,
149 size_t *, crypto_req_handle_t
);
150 static int sha2_free_context(crypto_ctx_t
*);
152 static crypto_ctx_ops_t sha2_ctx_ops
= {
153 sha2_create_ctx_template
,
157 static crypto_ops_t sha2_crypto_ops
= {{{{{
174 static crypto_provider_info_t sha2_prov_info
= {{{{
175 CRYPTO_SPI_VERSION_1
,
176 "SHA2 Software Provider",
180 sizeof (sha2_mech_info_tab
)/sizeof (crypto_mech_info_t
),
184 static crypto_kcf_provider_handle_t sha2_prov_handle
= 0;
191 if ((ret
= mod_install(&modlinkage
)) != 0)
195 * Register with KCF. If the registration fails, log an
196 * error but do not uninstall the module, since the functionality
197 * provided by misc/sha2 should still be available.
199 if ((ret
= crypto_register_provider(&sha2_prov_info
,
200 &sha2_prov_handle
)) != CRYPTO_SUCCESS
)
201 cmn_err(CE_WARN
, "sha2 _init: "
202 "crypto_register_provider() failed (0x%x)", ret
);
212 if (sha2_prov_handle
!= 0) {
213 if ((ret
= crypto_unregister_provider(sha2_prov_handle
)) !=
216 "sha2 _fini: crypto_unregister_provider() "
217 "failed (0x%x)", ret
);
220 sha2_prov_handle
= 0;
223 return (mod_remove(&modlinkage
));
227 * KCF software provider control entry points.
231 sha2_provider_status(crypto_provider_handle_t provider
, uint_t
*status
)
233 *status
= CRYPTO_PROVIDER_READY
;
237 * KCF software provider digest entry points.
241 sha2_digest_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
242 crypto_req_handle_t req
)
246 * Allocate and initialize SHA2 context.
248 ctx
->cc_provider_private
= kmem_alloc(sizeof (sha2_ctx_t
),
250 if (ctx
->cc_provider_private
== NULL
)
251 return (CRYPTO_HOST_MEMORY
);
253 PROV_SHA2_CTX(ctx
)->sc_mech_type
= mechanism
->cm_type
;
254 SHA2Init(mechanism
->cm_type
, &PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
);
256 return (CRYPTO_SUCCESS
);
260 * Helper SHA2 digest update function for uio data.
263 sha2_digest_update_uio(SHA2_CTX
*sha2_ctx
, crypto_data_t
*data
)
265 off_t offset
= data
->cd_offset
;
266 size_t length
= data
->cd_length
;
270 /* we support only kernel buffer */
271 if (data
->cd_uio
->uio_segflg
!= UIO_SYSSPACE
)
272 return (CRYPTO_ARGUMENTS_BAD
);
275 * Jump to the first iovec containing data to be
278 for (vec_idx
= 0; vec_idx
< data
->cd_uio
->uio_iovcnt
&&
279 offset
>= data
->cd_uio
->uio_iov
[vec_idx
].iov_len
;
280 offset
-= data
->cd_uio
->uio_iov
[vec_idx
++].iov_len
)
282 if (vec_idx
== data
->cd_uio
->uio_iovcnt
) {
284 * The caller specified an offset that is larger than the
285 * total size of the buffers it provided.
287 return (CRYPTO_DATA_LEN_RANGE
);
291 * Now do the digesting on the iovecs.
293 while (vec_idx
< data
->cd_uio
->uio_iovcnt
&& length
> 0) {
294 cur_len
= MIN(data
->cd_uio
->uio_iov
[vec_idx
].iov_len
-
297 SHA2Update(sha2_ctx
, (uint8_t *)data
->cd_uio
->
298 uio_iov
[vec_idx
].iov_base
+ offset
, cur_len
);
304 if (vec_idx
== data
->cd_uio
->uio_iovcnt
&& length
> 0) {
306 * The end of the specified iovec's was reached but
307 * the length requested could not be processed, i.e.
308 * The caller requested to digest more data than it provided.
310 return (CRYPTO_DATA_LEN_RANGE
);
313 return (CRYPTO_SUCCESS
);
317 * Helper SHA2 digest final function for uio data.
318 * digest_len is the length of the desired digest. If digest_len
319 * is smaller than the default SHA2 digest length, the caller
320 * must pass a scratch buffer, digest_scratch, which must
321 * be at least the algorithm's digest length bytes.
324 sha2_digest_final_uio(SHA2_CTX
*sha2_ctx
, crypto_data_t
*digest
,
325 ulong_t digest_len
, uchar_t
*digest_scratch
)
327 off_t offset
= digest
->cd_offset
;
330 /* we support only kernel buffer */
331 if (digest
->cd_uio
->uio_segflg
!= UIO_SYSSPACE
)
332 return (CRYPTO_ARGUMENTS_BAD
);
335 * Jump to the first iovec containing ptr to the digest to
338 for (vec_idx
= 0; offset
>= digest
->cd_uio
->uio_iov
[vec_idx
].iov_len
&&
339 vec_idx
< digest
->cd_uio
->uio_iovcnt
;
340 offset
-= digest
->cd_uio
->uio_iov
[vec_idx
++].iov_len
)
342 if (vec_idx
== digest
->cd_uio
->uio_iovcnt
) {
344 * The caller specified an offset that is
345 * larger than the total size of the buffers
348 return (CRYPTO_DATA_LEN_RANGE
);
351 if (offset
+ digest_len
<=
352 digest
->cd_uio
->uio_iov
[vec_idx
].iov_len
) {
354 * The computed SHA2 digest will fit in the current
357 if (((sha2_ctx
->algotype
<= SHA256_HMAC_GEN_MECH_INFO_TYPE
) &&
358 (digest_len
!= SHA256_DIGEST_LENGTH
))) {
360 * The caller requested a short digest. Digest
361 * into a scratch buffer and return to
362 * the user only what was requested.
364 SHA2Final(digest_scratch
, sha2_ctx
);
366 bcopy(digest_scratch
, (uchar_t
*)digest
->
367 cd_uio
->uio_iov
[vec_idx
].iov_base
+ offset
,
370 SHA2Final((uchar_t
*)digest
->
371 cd_uio
->uio_iov
[vec_idx
].iov_base
+ offset
,
377 * The computed digest will be crossing one or more iovec's.
378 * This is bad performance-wise but we need to support it.
379 * Allocate a small scratch buffer on the stack and
380 * copy it piece meal to the specified digest iovec's.
382 uchar_t digest_tmp
[SHA256_DIGEST_LENGTH
];
383 off_t scratch_offset
= 0;
384 size_t length
= digest_len
;
387 SHA2Final(digest_tmp
, sha2_ctx
);
389 while (vec_idx
< digest
->cd_uio
->uio_iovcnt
&& length
> 0) {
391 MIN(digest
->cd_uio
->uio_iov
[vec_idx
].iov_len
-
393 bcopy(digest_tmp
+ scratch_offset
,
394 digest
->cd_uio
->uio_iov
[vec_idx
].iov_base
+ offset
,
399 scratch_offset
+= cur_len
;
403 if (vec_idx
== digest
->cd_uio
->uio_iovcnt
&& length
> 0) {
405 * The end of the specified iovec's was reached but
406 * the length requested could not be processed, i.e.
407 * The caller requested to digest more data than it
410 return (CRYPTO_DATA_LEN_RANGE
);
414 return (CRYPTO_SUCCESS
);
419 sha2_digest(crypto_ctx_t
*ctx
, crypto_data_t
*data
, crypto_data_t
*digest
,
420 crypto_req_handle_t req
)
422 int ret
= CRYPTO_SUCCESS
;
423 uint_t sha_digest_len
;
425 ASSERT(ctx
->cc_provider_private
!= NULL
);
427 switch (PROV_SHA2_CTX(ctx
)->sc_mech_type
) {
428 case SHA256_MECH_INFO_TYPE
:
429 sha_digest_len
= SHA256_DIGEST_LENGTH
;
432 return (CRYPTO_MECHANISM_INVALID
);
436 * We need to just return the length needed to store the output.
437 * We should not destroy the context for the following cases.
439 if ((digest
->cd_length
== 0) ||
440 (digest
->cd_length
< sha_digest_len
)) {
441 digest
->cd_length
= sha_digest_len
;
442 return (CRYPTO_BUFFER_TOO_SMALL
);
446 * Do the SHA2 update on the specified input data.
448 switch (data
->cd_format
) {
449 case CRYPTO_DATA_RAW
:
450 SHA2Update(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
451 (uint8_t *)data
->cd_raw
.iov_base
+ data
->cd_offset
,
454 case CRYPTO_DATA_UIO
:
455 ret
= sha2_digest_update_uio(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
459 ret
= CRYPTO_ARGUMENTS_BAD
;
462 if (ret
!= CRYPTO_SUCCESS
) {
463 /* the update failed, free context and bail */
464 kmem_free(ctx
->cc_provider_private
, sizeof (sha2_ctx_t
));
465 ctx
->cc_provider_private
= NULL
;
466 digest
->cd_length
= 0;
471 * Do a SHA2 final, must be done separately since the digest
472 * type can be different than the input data type.
474 switch (digest
->cd_format
) {
475 case CRYPTO_DATA_RAW
:
476 SHA2Final((unsigned char *)digest
->cd_raw
.iov_base
+
477 digest
->cd_offset
, &PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
);
479 case CRYPTO_DATA_UIO
:
480 ret
= sha2_digest_final_uio(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
481 digest
, sha_digest_len
, NULL
);
484 ret
= CRYPTO_ARGUMENTS_BAD
;
487 /* all done, free context and return */
489 if (ret
== CRYPTO_SUCCESS
)
490 digest
->cd_length
= sha_digest_len
;
492 digest
->cd_length
= 0;
494 kmem_free(ctx
->cc_provider_private
, sizeof (sha2_ctx_t
));
495 ctx
->cc_provider_private
= NULL
;
501 sha2_digest_update(crypto_ctx_t
*ctx
, crypto_data_t
*data
,
502 crypto_req_handle_t req
)
504 int ret
= CRYPTO_SUCCESS
;
506 ASSERT(ctx
->cc_provider_private
!= NULL
);
509 * Do the SHA2 update on the specified input data.
511 switch (data
->cd_format
) {
512 case CRYPTO_DATA_RAW
:
513 SHA2Update(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
514 (uint8_t *)data
->cd_raw
.iov_base
+ data
->cd_offset
,
517 case CRYPTO_DATA_UIO
:
518 ret
= sha2_digest_update_uio(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
522 ret
= CRYPTO_ARGUMENTS_BAD
;
530 sha2_digest_final(crypto_ctx_t
*ctx
, crypto_data_t
*digest
,
531 crypto_req_handle_t req
)
533 int ret
= CRYPTO_SUCCESS
;
534 uint_t sha_digest_len
;
536 ASSERT(ctx
->cc_provider_private
!= NULL
);
538 switch (PROV_SHA2_CTX(ctx
)->sc_mech_type
) {
539 case SHA256_MECH_INFO_TYPE
:
540 sha_digest_len
= SHA256_DIGEST_LENGTH
;
543 return (CRYPTO_MECHANISM_INVALID
);
547 * We need to just return the length needed to store the output.
548 * We should not destroy the context for the following cases.
550 if ((digest
->cd_length
== 0) ||
551 (digest
->cd_length
< sha_digest_len
)) {
552 digest
->cd_length
= sha_digest_len
;
553 return (CRYPTO_BUFFER_TOO_SMALL
);
559 switch (digest
->cd_format
) {
560 case CRYPTO_DATA_RAW
:
561 SHA2Final((unsigned char *)digest
->cd_raw
.iov_base
+
562 digest
->cd_offset
, &PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
);
564 case CRYPTO_DATA_UIO
:
565 ret
= sha2_digest_final_uio(&PROV_SHA2_CTX(ctx
)->sc_sha2_ctx
,
566 digest
, sha_digest_len
, NULL
);
569 ret
= CRYPTO_ARGUMENTS_BAD
;
572 /* all done, free context and return */
574 if (ret
== CRYPTO_SUCCESS
)
575 digest
->cd_length
= sha_digest_len
;
577 digest
->cd_length
= 0;
579 kmem_free(ctx
->cc_provider_private
, sizeof (sha2_ctx_t
));
580 ctx
->cc_provider_private
= NULL
;
587 sha2_digest_atomic(crypto_provider_handle_t provider
,
588 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
589 crypto_data_t
*data
, crypto_data_t
*digest
,
590 crypto_req_handle_t req
)
592 int ret
= CRYPTO_SUCCESS
;
594 uint32_t sha_digest_len
;
600 SHA2Init(mechanism
->cm_type
, &sha2_ctx
);
602 switch (data
->cd_format
) {
603 case CRYPTO_DATA_RAW
:
604 SHA2Update(&sha2_ctx
, (uint8_t *)data
->
605 cd_raw
.iov_base
+ data
->cd_offset
, data
->cd_length
);
607 case CRYPTO_DATA_UIO
:
608 ret
= sha2_digest_update_uio(&sha2_ctx
, data
);
611 ret
= CRYPTO_ARGUMENTS_BAD
;
615 * Do the SHA updates on the specified input data.
618 if (ret
!= CRYPTO_SUCCESS
) {
619 /* the update failed, bail */
620 digest
->cd_length
= 0;
624 if (mechanism
->cm_type
<= SHA256_HMAC_GEN_MECH_INFO_TYPE
)
625 sha_digest_len
= SHA256_DIGEST_LENGTH
;
628 * Do a SHA2 final, must be done separately since the digest
629 * type can be different than the input data type.
631 switch (digest
->cd_format
) {
632 case CRYPTO_DATA_RAW
:
633 SHA2Final((unsigned char *)digest
->cd_raw
.iov_base
+
634 digest
->cd_offset
, &sha2_ctx
);
636 case CRYPTO_DATA_UIO
:
637 ret
= sha2_digest_final_uio(&sha2_ctx
, digest
,
638 sha_digest_len
, NULL
);
641 ret
= CRYPTO_ARGUMENTS_BAD
;
644 if (ret
== CRYPTO_SUCCESS
)
645 digest
->cd_length
= sha_digest_len
;
647 digest
->cd_length
= 0;
653 * KCF software provider mac entry points.
655 * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
658 * The initialization routine initializes what we denote
659 * as the inner and outer contexts by doing
660 * - for inner context: SHA2(key XOR ipad)
661 * - for outer context: SHA2(key XOR opad)
664 * Each subsequent SHA2 HMAC update will result in an
665 * update of the inner context with the specified data.
668 * The SHA2 HMAC final will do a SHA2 final operation on the
669 * inner context, and the resulting digest will be used
670 * as the data for an update on the outer context. Last
671 * but not least, a SHA2 final on the outer context will
672 * be performed to obtain the SHA2 HMAC digest to return
677 * Initialize a SHA2-HMAC context.
680 sha2_mac_init_ctx(sha2_hmac_ctx_t
*ctx
, void *keyval
, uint_t length_in_bytes
)
682 uint64_t ipad
[SHA256_HMAC_BLOCK_SIZE
/ sizeof (uint64_t)];
683 uint64_t opad
[SHA256_HMAC_BLOCK_SIZE
/ sizeof (uint64_t)];
684 int i
, block_size
= 0, blocks_per_int64
= 0;
686 /* Determine the block size */
687 if (ctx
->hc_mech_type
<= SHA256_HMAC_GEN_MECH_INFO_TYPE
) {
688 block_size
= SHA256_HMAC_BLOCK_SIZE
;
689 blocks_per_int64
= SHA256_HMAC_BLOCK_SIZE
/ sizeof (uint64_t);
692 (void) bzero(ipad
, block_size
);
693 (void) bzero(opad
, block_size
);
694 (void) bcopy(keyval
, ipad
, length_in_bytes
);
695 (void) bcopy(keyval
, opad
, length_in_bytes
);
697 /* XOR key with ipad (0x36) and opad (0x5c) */
698 for (i
= 0; i
< blocks_per_int64
; i
++) {
699 ipad
[i
] ^= 0x3636363636363636;
700 opad
[i
] ^= 0x5c5c5c5c5c5c5c5c;
703 /* perform SHA2 on ipad */
704 SHA2Init(ctx
->hc_mech_type
, &ctx
->hc_icontext
);
705 SHA2Update(&ctx
->hc_icontext
, (uint8_t *)ipad
, block_size
);
707 /* perform SHA2 on opad */
708 SHA2Init(ctx
->hc_mech_type
, &ctx
->hc_ocontext
);
709 SHA2Update(&ctx
->hc_ocontext
, (uint8_t *)opad
, block_size
);
716 sha2_mac_init(crypto_ctx_t
*ctx
, crypto_mechanism_t
*mechanism
,
717 crypto_key_t
*key
, crypto_spi_ctx_template_t ctx_template
,
718 crypto_req_handle_t req
)
720 int ret
= CRYPTO_SUCCESS
;
721 uint_t keylen_in_bytes
= CRYPTO_BITS2BYTES(key
->ck_length
);
722 uint_t sha_digest_len
, sha_hmac_block_size
;
725 * Set the digest length and block size to values approriate to the
728 switch (mechanism
->cm_type
) {
729 case SHA256_HMAC_MECH_INFO_TYPE
:
730 case SHA256_HMAC_GEN_MECH_INFO_TYPE
:
731 sha_digest_len
= SHA256_DIGEST_LENGTH
;
732 sha_hmac_block_size
= SHA256_HMAC_BLOCK_SIZE
;
735 return (CRYPTO_MECHANISM_INVALID
);
738 if (key
->ck_format
!= CRYPTO_KEY_RAW
)
739 return (CRYPTO_ARGUMENTS_BAD
);
741 ctx
->cc_provider_private
= kmem_alloc(sizeof (sha2_hmac_ctx_t
),
743 if (ctx
->cc_provider_private
== NULL
)
744 return (CRYPTO_HOST_MEMORY
);
746 PROV_SHA2_HMAC_CTX(ctx
)->hc_mech_type
= mechanism
->cm_type
;
747 if (ctx_template
!= NULL
) {
748 /* reuse context template */
749 bcopy(ctx_template
, PROV_SHA2_HMAC_CTX(ctx
),
750 sizeof (sha2_hmac_ctx_t
));
752 /* no context template, compute context */
753 if (keylen_in_bytes
> sha_hmac_block_size
) {
754 uchar_t digested_key
[SHA256_DIGEST_LENGTH
];
755 sha2_hmac_ctx_t
*hmac_ctx
= ctx
->cc_provider_private
;
758 * Hash the passed-in key to get a smaller key.
759 * The inner context is used since it hasn't been
762 PROV_SHA2_DIGEST_KEY(mechanism
->cm_type
/ 3,
763 &hmac_ctx
->hc_icontext
,
764 key
->ck_data
, keylen_in_bytes
, digested_key
);
765 sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx
),
766 digested_key
, sha_digest_len
);
768 sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx
),
769 key
->ck_data
, keylen_in_bytes
);
774 * Get the mechanism parameters, if applicable.
776 if (mechanism
->cm_type
% 3 == 2) {
777 if (mechanism
->cm_param
== NULL
||
778 mechanism
->cm_param_len
!= sizeof (ulong_t
))
779 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
780 PROV_SHA2_GET_DIGEST_LEN(mechanism
,
781 PROV_SHA2_HMAC_CTX(ctx
)->hc_digest_len
);
782 if (PROV_SHA2_HMAC_CTX(ctx
)->hc_digest_len
> sha_digest_len
)
783 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
786 if (ret
!= CRYPTO_SUCCESS
) {
787 bzero(ctx
->cc_provider_private
, sizeof (sha2_hmac_ctx_t
));
788 kmem_free(ctx
->cc_provider_private
, sizeof (sha2_hmac_ctx_t
));
789 ctx
->cc_provider_private
= NULL
;
797 sha2_mac_update(crypto_ctx_t
*ctx
, crypto_data_t
*data
,
798 crypto_req_handle_t req
)
800 int ret
= CRYPTO_SUCCESS
;
802 ASSERT(ctx
->cc_provider_private
!= NULL
);
805 * Do a SHA2 update of the inner context using the specified
808 switch (data
->cd_format
) {
809 case CRYPTO_DATA_RAW
:
810 SHA2Update(&PROV_SHA2_HMAC_CTX(ctx
)->hc_icontext
,
811 (uint8_t *)data
->cd_raw
.iov_base
+ data
->cd_offset
,
814 case CRYPTO_DATA_UIO
:
815 ret
= sha2_digest_update_uio(
816 &PROV_SHA2_HMAC_CTX(ctx
)->hc_icontext
, data
);
819 ret
= CRYPTO_ARGUMENTS_BAD
;
827 sha2_mac_final(crypto_ctx_t
*ctx
, crypto_data_t
*mac
, crypto_req_handle_t req
)
829 int ret
= CRYPTO_SUCCESS
;
830 uchar_t digest
[SHA256_DIGEST_LENGTH
];
831 uint32_t digest_len
= 0, sha_digest_len
= 0;
833 ASSERT(ctx
->cc_provider_private
!= NULL
);
835 /* Set the digest lengths to values approriate to the mechanism */
836 switch (PROV_SHA2_HMAC_CTX(ctx
)->hc_mech_type
) {
837 case SHA256_HMAC_MECH_INFO_TYPE
:
838 sha_digest_len
= digest_len
= SHA256_DIGEST_LENGTH
;
840 case SHA256_HMAC_GEN_MECH_INFO_TYPE
:
841 sha_digest_len
= SHA256_DIGEST_LENGTH
;
842 digest_len
= PROV_SHA2_HMAC_CTX(ctx
)->hc_digest_len
;
849 * We need to just return the length needed to store the output.
850 * We should not destroy the context for the following cases.
852 if ((mac
->cd_length
== 0) || (mac
->cd_length
< digest_len
)) {
853 mac
->cd_length
= digest_len
;
854 return (CRYPTO_BUFFER_TOO_SMALL
);
858 * Do a SHA2 final on the inner context.
860 SHA2Final(digest
, &PROV_SHA2_HMAC_CTX(ctx
)->hc_icontext
);
863 * Do a SHA2 update on the outer context, feeding the inner
866 SHA2Update(&PROV_SHA2_HMAC_CTX(ctx
)->hc_ocontext
, digest
,
870 * Do a SHA2 final on the outer context, storing the computing
871 * digest in the users buffer.
873 switch (mac
->cd_format
) {
874 case CRYPTO_DATA_RAW
:
875 if (digest_len
!= sha_digest_len
) {
877 * The caller requested a short digest. Digest
878 * into a scratch buffer and return to
879 * the user only what was requested.
882 &PROV_SHA2_HMAC_CTX(ctx
)->hc_ocontext
);
883 bcopy(digest
, (unsigned char *)mac
->cd_raw
.iov_base
+
884 mac
->cd_offset
, digest_len
);
886 SHA2Final((unsigned char *)mac
->cd_raw
.iov_base
+
888 &PROV_SHA2_HMAC_CTX(ctx
)->hc_ocontext
);
891 case CRYPTO_DATA_UIO
:
892 ret
= sha2_digest_final_uio(
893 &PROV_SHA2_HMAC_CTX(ctx
)->hc_ocontext
, mac
,
897 ret
= CRYPTO_ARGUMENTS_BAD
;
900 if (ret
== CRYPTO_SUCCESS
)
901 mac
->cd_length
= digest_len
;
905 bzero(ctx
->cc_provider_private
, sizeof (sha2_hmac_ctx_t
));
906 kmem_free(ctx
->cc_provider_private
, sizeof (sha2_hmac_ctx_t
));
907 ctx
->cc_provider_private
= NULL
;
912 #define SHA2_MAC_UPDATE(data, ctx, ret) { \
913 switch (data->cd_format) { \
914 case CRYPTO_DATA_RAW: \
915 SHA2Update(&(ctx).hc_icontext, \
916 (uint8_t *)data->cd_raw.iov_base + \
917 data->cd_offset, data->cd_length); \
919 case CRYPTO_DATA_UIO: \
920 ret = sha2_digest_update_uio(&(ctx).hc_icontext, data); \
923 ret = CRYPTO_ARGUMENTS_BAD; \
929 sha2_mac_atomic(crypto_provider_handle_t provider
,
930 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
931 crypto_key_t
*key
, crypto_data_t
*data
, crypto_data_t
*mac
,
932 crypto_spi_ctx_template_t ctx_template
, crypto_req_handle_t req
)
934 int ret
= CRYPTO_SUCCESS
;
935 uchar_t digest
[SHA256_DIGEST_LENGTH
];
936 sha2_hmac_ctx_t sha2_hmac_ctx
;
937 uint32_t sha_digest_len
, digest_len
, sha_hmac_block_size
;
938 uint_t keylen_in_bytes
= CRYPTO_BITS2BYTES(key
->ck_length
);
941 * Set the digest length and block size to values appropriate to the
944 switch (mechanism
->cm_type
) {
945 case SHA256_HMAC_MECH_INFO_TYPE
:
946 case SHA256_HMAC_GEN_MECH_INFO_TYPE
:
947 sha_digest_len
= digest_len
= SHA256_DIGEST_LENGTH
;
948 sha_hmac_block_size
= SHA256_HMAC_BLOCK_SIZE
;
951 return (CRYPTO_MECHANISM_INVALID
);
954 /* Add support for key by attributes (RFE 4706552) */
955 if (key
->ck_format
!= CRYPTO_KEY_RAW
)
956 return (CRYPTO_ARGUMENTS_BAD
);
958 if (ctx_template
!= NULL
) {
959 /* reuse context template */
960 bcopy(ctx_template
, &sha2_hmac_ctx
, sizeof (sha2_hmac_ctx_t
));
962 sha2_hmac_ctx
.hc_mech_type
= mechanism
->cm_type
;
963 /* no context template, initialize context */
964 if (keylen_in_bytes
> sha_hmac_block_size
) {
966 * Hash the passed-in key to get a smaller key.
967 * The inner context is used since it hasn't been
970 PROV_SHA2_DIGEST_KEY(mechanism
->cm_type
/ 3,
971 &sha2_hmac_ctx
.hc_icontext
,
972 key
->ck_data
, keylen_in_bytes
, digest
);
973 sha2_mac_init_ctx(&sha2_hmac_ctx
, digest
,
976 sha2_mac_init_ctx(&sha2_hmac_ctx
, key
->ck_data
,
981 /* get the mechanism parameters, if applicable */
982 if ((mechanism
->cm_type
% 3) == 2) {
983 if (mechanism
->cm_param
== NULL
||
984 mechanism
->cm_param_len
!= sizeof (ulong_t
)) {
985 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
988 PROV_SHA2_GET_DIGEST_LEN(mechanism
, digest_len
);
989 if (digest_len
> sha_digest_len
) {
990 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
995 /* do a SHA2 update of the inner context using the specified data */
996 SHA2_MAC_UPDATE(data
, sha2_hmac_ctx
, ret
);
997 if (ret
!= CRYPTO_SUCCESS
)
998 /* the update failed, free context and bail */
1002 * Do a SHA2 final on the inner context.
1004 SHA2Final(digest
, &sha2_hmac_ctx
.hc_icontext
);
1007 * Do an SHA2 update on the outer context, feeding the inner
1010 SHA2Update(&sha2_hmac_ctx
.hc_ocontext
, digest
, sha_digest_len
);
1013 * Do a SHA2 final on the outer context, storing the computed
1014 * digest in the users buffer.
1016 switch (mac
->cd_format
) {
1017 case CRYPTO_DATA_RAW
:
1018 if (digest_len
!= sha_digest_len
) {
1020 * The caller requested a short digest. Digest
1021 * into a scratch buffer and return to
1022 * the user only what was requested.
1024 SHA2Final(digest
, &sha2_hmac_ctx
.hc_ocontext
);
1025 bcopy(digest
, (unsigned char *)mac
->cd_raw
.iov_base
+
1026 mac
->cd_offset
, digest_len
);
1028 SHA2Final((unsigned char *)mac
->cd_raw
.iov_base
+
1029 mac
->cd_offset
, &sha2_hmac_ctx
.hc_ocontext
);
1032 case CRYPTO_DATA_UIO
:
1033 ret
= sha2_digest_final_uio(&sha2_hmac_ctx
.hc_ocontext
, mac
,
1034 digest_len
, digest
);
1037 ret
= CRYPTO_ARGUMENTS_BAD
;
1040 if (ret
== CRYPTO_SUCCESS
) {
1041 mac
->cd_length
= digest_len
;
1042 return (CRYPTO_SUCCESS
);
1045 bzero(&sha2_hmac_ctx
, sizeof (sha2_hmac_ctx_t
));
1052 sha2_mac_verify_atomic(crypto_provider_handle_t provider
,
1053 crypto_session_id_t session_id
, crypto_mechanism_t
*mechanism
,
1054 crypto_key_t
*key
, crypto_data_t
*data
, crypto_data_t
*mac
,
1055 crypto_spi_ctx_template_t ctx_template
, crypto_req_handle_t req
)
1057 int ret
= CRYPTO_SUCCESS
;
1058 uchar_t digest
[SHA256_DIGEST_LENGTH
];
1059 sha2_hmac_ctx_t sha2_hmac_ctx
;
1060 uint32_t sha_digest_len
, digest_len
, sha_hmac_block_size
;
1061 uint_t keylen_in_bytes
= CRYPTO_BITS2BYTES(key
->ck_length
);
1064 * Set the digest length and block size to values appropriate to the
1067 switch (mechanism
->cm_type
) {
1068 case SHA256_HMAC_MECH_INFO_TYPE
:
1069 case SHA256_HMAC_GEN_MECH_INFO_TYPE
:
1070 sha_digest_len
= digest_len
= SHA256_DIGEST_LENGTH
;
1071 sha_hmac_block_size
= SHA256_HMAC_BLOCK_SIZE
;
1074 return (CRYPTO_MECHANISM_INVALID
);
1077 /* Add support for key by attributes (RFE 4706552) */
1078 if (key
->ck_format
!= CRYPTO_KEY_RAW
)
1079 return (CRYPTO_ARGUMENTS_BAD
);
1081 if (ctx_template
!= NULL
) {
1082 /* reuse context template */
1083 bcopy(ctx_template
, &sha2_hmac_ctx
, sizeof (sha2_hmac_ctx_t
));
1085 sha2_hmac_ctx
.hc_mech_type
= mechanism
->cm_type
;
1086 /* no context template, initialize context */
1087 if (keylen_in_bytes
> sha_hmac_block_size
) {
1089 * Hash the passed-in key to get a smaller key.
1090 * The inner context is used since it hasn't been
1093 PROV_SHA2_DIGEST_KEY(mechanism
->cm_type
/ 3,
1094 &sha2_hmac_ctx
.hc_icontext
,
1095 key
->ck_data
, keylen_in_bytes
, digest
);
1096 sha2_mac_init_ctx(&sha2_hmac_ctx
, digest
,
1099 sha2_mac_init_ctx(&sha2_hmac_ctx
, key
->ck_data
,
1104 /* get the mechanism parameters, if applicable */
1105 if (mechanism
->cm_type
% 3 == 2) {
1106 if (mechanism
->cm_param
== NULL
||
1107 mechanism
->cm_param_len
!= sizeof (ulong_t
)) {
1108 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
1111 PROV_SHA2_GET_DIGEST_LEN(mechanism
, digest_len
);
1112 if (digest_len
> sha_digest_len
) {
1113 ret
= CRYPTO_MECHANISM_PARAM_INVALID
;
1118 if (mac
->cd_length
!= digest_len
) {
1119 ret
= CRYPTO_INVALID_MAC
;
1123 /* do a SHA2 update of the inner context using the specified data */
1124 SHA2_MAC_UPDATE(data
, sha2_hmac_ctx
, ret
);
1125 if (ret
!= CRYPTO_SUCCESS
)
1126 /* the update failed, free context and bail */
1129 /* do a SHA2 final on the inner context */
1130 SHA2Final(digest
, &sha2_hmac_ctx
.hc_icontext
);
1133 * Do an SHA2 update on the outer context, feeding the inner
1136 SHA2Update(&sha2_hmac_ctx
.hc_ocontext
, digest
, sha_digest_len
);
1139 * Do a SHA2 final on the outer context, storing the computed
1140 * digest in the users buffer.
1142 SHA2Final(digest
, &sha2_hmac_ctx
.hc_ocontext
);
1145 * Compare the computed digest against the expected digest passed
1149 switch (mac
->cd_format
) {
1151 case CRYPTO_DATA_RAW
:
1152 if (bcmp(digest
, (unsigned char *)mac
->cd_raw
.iov_base
+
1153 mac
->cd_offset
, digest_len
) != 0)
1154 ret
= CRYPTO_INVALID_MAC
;
1157 case CRYPTO_DATA_UIO
: {
1158 off_t offset
= mac
->cd_offset
;
1160 off_t scratch_offset
= 0;
1161 size_t length
= digest_len
;
1164 /* we support only kernel buffer */
1165 if (mac
->cd_uio
->uio_segflg
!= UIO_SYSSPACE
)
1166 return (CRYPTO_ARGUMENTS_BAD
);
1168 /* jump to the first iovec containing the expected digest */
1170 offset
>= mac
->cd_uio
->uio_iov
[vec_idx
].iov_len
&&
1171 vec_idx
< mac
->cd_uio
->uio_iovcnt
;
1172 offset
-= mac
->cd_uio
->uio_iov
[vec_idx
++].iov_len
)
1174 if (vec_idx
== mac
->cd_uio
->uio_iovcnt
) {
1176 * The caller specified an offset that is
1177 * larger than the total size of the buffers
1180 ret
= CRYPTO_DATA_LEN_RANGE
;
1184 /* do the comparison of computed digest vs specified one */
1185 while (vec_idx
< mac
->cd_uio
->uio_iovcnt
&& length
> 0) {
1186 cur_len
= MIN(mac
->cd_uio
->uio_iov
[vec_idx
].iov_len
-
1189 if (bcmp(digest
+ scratch_offset
,
1190 mac
->cd_uio
->uio_iov
[vec_idx
].iov_base
+ offset
,
1192 ret
= CRYPTO_INVALID_MAC
;
1198 scratch_offset
+= cur_len
;
1205 ret
= CRYPTO_ARGUMENTS_BAD
;
1210 bzero(&sha2_hmac_ctx
, sizeof (sha2_hmac_ctx_t
));
1216 * KCF software provider context management entry points.
1221 sha2_create_ctx_template(crypto_provider_handle_t provider
,
1222 crypto_mechanism_t
*mechanism
, crypto_key_t
*key
,
1223 crypto_spi_ctx_template_t
*ctx_template
, size_t *ctx_template_size
,
1224 crypto_req_handle_t req
)
1226 sha2_hmac_ctx_t
*sha2_hmac_ctx_tmpl
;
1227 uint_t keylen_in_bytes
= CRYPTO_BITS2BYTES(key
->ck_length
);
1228 uint32_t sha_digest_len
, sha_hmac_block_size
;
1231 * Set the digest length and block size to values appropriate to the
1234 switch (mechanism
->cm_type
) {
1235 case SHA256_HMAC_MECH_INFO_TYPE
:
1236 case SHA256_HMAC_GEN_MECH_INFO_TYPE
:
1237 sha_digest_len
= SHA256_DIGEST_LENGTH
;
1238 sha_hmac_block_size
= SHA256_HMAC_BLOCK_SIZE
;
1241 return (CRYPTO_MECHANISM_INVALID
);
1244 /* Add support for key by attributes (RFE 4706552) */
1245 if (key
->ck_format
!= CRYPTO_KEY_RAW
)
1246 return (CRYPTO_ARGUMENTS_BAD
);
1249 * Allocate and initialize SHA2 context.
1251 sha2_hmac_ctx_tmpl
= kmem_alloc(sizeof (sha2_hmac_ctx_t
),
1252 crypto_kmflag(req
));
1253 if (sha2_hmac_ctx_tmpl
== NULL
)
1254 return (CRYPTO_HOST_MEMORY
);
1256 sha2_hmac_ctx_tmpl
->hc_mech_type
= mechanism
->cm_type
;
1258 if (keylen_in_bytes
> sha_hmac_block_size
) {
1259 uchar_t digested_key
[SHA256_DIGEST_LENGTH
];
1262 * Hash the passed-in key to get a smaller key.
1263 * The inner context is used since it hasn't been
1266 PROV_SHA2_DIGEST_KEY(mechanism
->cm_type
/ 3,
1267 &sha2_hmac_ctx_tmpl
->hc_icontext
,
1268 key
->ck_data
, keylen_in_bytes
, digested_key
);
1269 sha2_mac_init_ctx(sha2_hmac_ctx_tmpl
, digested_key
,
1272 sha2_mac_init_ctx(sha2_hmac_ctx_tmpl
, key
->ck_data
,
1276 *ctx_template
= (crypto_spi_ctx_template_t
)sha2_hmac_ctx_tmpl
;
1277 *ctx_template_size
= sizeof (sha2_hmac_ctx_t
);
1279 return (CRYPTO_SUCCESS
);
1283 sha2_free_context(crypto_ctx_t
*ctx
)
1287 if (ctx
->cc_provider_private
== NULL
)
1288 return (CRYPTO_SUCCESS
);
1291 * We have to free either SHA2 or SHA2-HMAC contexts, which
1292 * have different lengths.
1294 * Note: Below is dependent on the mechanism ordering.
1297 if (PROV_SHA2_CTX(ctx
)->sc_mech_type
% 3 == 0)
1298 ctx_len
= sizeof (sha2_ctx_t
);
1300 ctx_len
= sizeof (sha2_hmac_ctx_t
);
1302 bzero(ctx
->cc_provider_private
, ctx_len
);
1303 kmem_free(ctx
->cc_provider_private
, ctx_len
);
1304 ctx
->cc_provider_private
= NULL
;
1306 return (CRYPTO_SUCCESS
);