]> git.proxmox.com Git - mirror_zfs.git/blob - module/icp/io/sha2_mod.c
OpenZFS 4185 - add new cryptographic checksums to ZFS: SHA-512, Skein, Edon-R
[mirror_zfs.git] / module / icp / io / sha2_mod.c
1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21
22 /*
23 * Copyright 2010 Sun Microsystems, Inc. All rights reserved.
24 * Use is subject to license terms.
25 */
26
27 #include <sys/zfs_context.h>
28 #include <sys/modctl.h>
29 #include <sys/crypto/common.h>
30 #include <sys/crypto/spi.h>
31 #include <sys/crypto/icp.h>
32 #define _SHA2_IMPL
33 #include <sys/sha2.h>
34 #include <sha2/sha2_impl.h>
35
36 /*
37 * The sha2 module is created with two modlinkages:
38 * - a modlmisc that allows consumers to directly call the entry points
39 * SHA2Init, SHA2Update, and SHA2Final.
40 * - a modlcrypto that allows the module to register with the Kernel
41 * Cryptographic Framework (KCF) as a software provider for the SHA2
42 * mechanisms.
43 */
44
45 static struct modlcrypto modlcrypto = {
46 &mod_cryptoops,
47 "SHA2 Kernel SW Provider"
48 };
49
50 static struct modlinkage modlinkage = {
51 MODREV_1, {&modlcrypto, NULL}
52 };
53
54 /*
55 * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
56 * by KCF to one of the entry points.
57 */
58
59 #define PROV_SHA2_CTX(ctx) ((sha2_ctx_t *)(ctx)->cc_provider_private)
60 #define PROV_SHA2_HMAC_CTX(ctx) ((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
61
62 /* to extract the digest length passed as mechanism parameter */
63 #define PROV_SHA2_GET_DIGEST_LEN(m, len) { \
64 if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t))) \
65 (len) = (uint32_t)*((ulong_t *)(m)->cm_param); \
66 else { \
67 ulong_t tmp_ulong; \
68 bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t)); \
69 (len) = (uint32_t)tmp_ulong; \
70 } \
71 }
72
73 #define PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) { \
74 SHA2Init(mech, ctx); \
75 SHA2Update(ctx, key, len); \
76 SHA2Final(digest, ctx); \
77 }
78
79 /*
80 * Mechanism info structure passed to KCF during registration.
81 */
82 static crypto_mech_info_t sha2_mech_info_tab[] = {
83 /* SHA256 */
84 {SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
85 CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
86 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
87 /* SHA256-HMAC */
88 {SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
89 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
90 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
91 CRYPTO_KEYSIZE_UNIT_IN_BYTES},
92 /* SHA256-HMAC GENERAL */
93 {SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
94 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
95 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
96 CRYPTO_KEYSIZE_UNIT_IN_BYTES}
97 };
98
99 static void sha2_provider_status(crypto_provider_handle_t, uint_t *);
100
101 static crypto_control_ops_t sha2_control_ops = {
102 sha2_provider_status
103 };
104
105 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *,
106 crypto_req_handle_t);
107 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
108 crypto_req_handle_t);
109 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *,
110 crypto_req_handle_t);
111 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *,
112 crypto_req_handle_t);
113 static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t,
114 crypto_mechanism_t *, crypto_data_t *, crypto_data_t *,
115 crypto_req_handle_t);
116
117 static crypto_digest_ops_t sha2_digest_ops = {
118 sha2_digest_init,
119 sha2_digest,
120 sha2_digest_update,
121 NULL,
122 sha2_digest_final,
123 sha2_digest_atomic
124 };
125
126 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
127 crypto_spi_ctx_template_t, crypto_req_handle_t);
128 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *,
129 crypto_req_handle_t);
130 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t);
131 static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
132 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
133 crypto_spi_ctx_template_t, crypto_req_handle_t);
134 static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
135 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
136 crypto_spi_ctx_template_t, crypto_req_handle_t);
137
138 static crypto_mac_ops_t sha2_mac_ops = {
139 sha2_mac_init,
140 NULL,
141 sha2_mac_update,
142 sha2_mac_final,
143 sha2_mac_atomic,
144 sha2_mac_verify_atomic
145 };
146
147 static int sha2_create_ctx_template(crypto_provider_handle_t,
148 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
149 size_t *, crypto_req_handle_t);
150 static int sha2_free_context(crypto_ctx_t *);
151
152 static crypto_ctx_ops_t sha2_ctx_ops = {
153 sha2_create_ctx_template,
154 sha2_free_context
155 };
156
157 static crypto_ops_t sha2_crypto_ops = {{{{{
158 &sha2_control_ops,
159 &sha2_digest_ops,
160 NULL,
161 &sha2_mac_ops,
162 NULL,
163 NULL,
164 NULL,
165 NULL,
166 NULL,
167 NULL,
168 NULL,
169 NULL,
170 NULL,
171 &sha2_ctx_ops
172 }}}}};
173
174 static crypto_provider_info_t sha2_prov_info = {{{{
175 CRYPTO_SPI_VERSION_1,
176 "SHA2 Software Provider",
177 CRYPTO_SW_PROVIDER,
178 NULL,
179 &sha2_crypto_ops,
180 sizeof (sha2_mech_info_tab)/sizeof (crypto_mech_info_t),
181 sha2_mech_info_tab
182 }}}};
183
184 static crypto_kcf_provider_handle_t sha2_prov_handle = 0;
185
186 int
187 sha2_mod_init(void)
188 {
189 int ret;
190
191 if ((ret = mod_install(&modlinkage)) != 0)
192 return (ret);
193
194 /*
195 * Register with KCF. If the registration fails, log an
196 * error but do not uninstall the module, since the functionality
197 * provided by misc/sha2 should still be available.
198 */
199 if ((ret = crypto_register_provider(&sha2_prov_info,
200 &sha2_prov_handle)) != CRYPTO_SUCCESS)
201 cmn_err(CE_WARN, "sha2 _init: "
202 "crypto_register_provider() failed (0x%x)", ret);
203
204 return (0);
205 }
206
207 int
208 sha2_mod_fini(void)
209 {
210 int ret;
211
212 if (sha2_prov_handle != 0) {
213 if ((ret = crypto_unregister_provider(sha2_prov_handle)) !=
214 CRYPTO_SUCCESS) {
215 cmn_err(CE_WARN,
216 "sha2 _fini: crypto_unregister_provider() "
217 "failed (0x%x)", ret);
218 return (EBUSY);
219 }
220 sha2_prov_handle = 0;
221 }
222
223 return (mod_remove(&modlinkage));
224 }
225
226 /*
227 * KCF software provider control entry points.
228 */
229 /* ARGSUSED */
230 static void
231 sha2_provider_status(crypto_provider_handle_t provider, uint_t *status)
232 {
233 *status = CRYPTO_PROVIDER_READY;
234 }
235
236 /*
237 * KCF software provider digest entry points.
238 */
239
240 static int
241 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
242 crypto_req_handle_t req)
243 {
244
245 /*
246 * Allocate and initialize SHA2 context.
247 */
248 ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
249 crypto_kmflag(req));
250 if (ctx->cc_provider_private == NULL)
251 return (CRYPTO_HOST_MEMORY);
252
253 PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
254 SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
255
256 return (CRYPTO_SUCCESS);
257 }
258
259 /*
260 * Helper SHA2 digest update function for uio data.
261 */
262 static int
263 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
264 {
265 off_t offset = data->cd_offset;
266 size_t length = data->cd_length;
267 uint_t vec_idx;
268 size_t cur_len;
269
270 /* we support only kernel buffer */
271 if (data->cd_uio->uio_segflg != UIO_SYSSPACE)
272 return (CRYPTO_ARGUMENTS_BAD);
273
274 /*
275 * Jump to the first iovec containing data to be
276 * digested.
277 */
278 for (vec_idx = 0; vec_idx < data->cd_uio->uio_iovcnt &&
279 offset >= data->cd_uio->uio_iov[vec_idx].iov_len;
280 offset -= data->cd_uio->uio_iov[vec_idx++].iov_len)
281 ;
282 if (vec_idx == data->cd_uio->uio_iovcnt) {
283 /*
284 * The caller specified an offset that is larger than the
285 * total size of the buffers it provided.
286 */
287 return (CRYPTO_DATA_LEN_RANGE);
288 }
289
290 /*
291 * Now do the digesting on the iovecs.
292 */
293 while (vec_idx < data->cd_uio->uio_iovcnt && length > 0) {
294 cur_len = MIN(data->cd_uio->uio_iov[vec_idx].iov_len -
295 offset, length);
296
297 SHA2Update(sha2_ctx, (uint8_t *)data->cd_uio->
298 uio_iov[vec_idx].iov_base + offset, cur_len);
299 length -= cur_len;
300 vec_idx++;
301 offset = 0;
302 }
303
304 if (vec_idx == data->cd_uio->uio_iovcnt && length > 0) {
305 /*
306 * The end of the specified iovec's was reached but
307 * the length requested could not be processed, i.e.
308 * The caller requested to digest more data than it provided.
309 */
310 return (CRYPTO_DATA_LEN_RANGE);
311 }
312
313 return (CRYPTO_SUCCESS);
314 }
315
316 /*
317 * Helper SHA2 digest final function for uio data.
318 * digest_len is the length of the desired digest. If digest_len
319 * is smaller than the default SHA2 digest length, the caller
320 * must pass a scratch buffer, digest_scratch, which must
321 * be at least the algorithm's digest length bytes.
322 */
323 static int
324 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
325 ulong_t digest_len, uchar_t *digest_scratch)
326 {
327 off_t offset = digest->cd_offset;
328 uint_t vec_idx;
329
330 /* we support only kernel buffer */
331 if (digest->cd_uio->uio_segflg != UIO_SYSSPACE)
332 return (CRYPTO_ARGUMENTS_BAD);
333
334 /*
335 * Jump to the first iovec containing ptr to the digest to
336 * be returned.
337 */
338 for (vec_idx = 0; offset >= digest->cd_uio->uio_iov[vec_idx].iov_len &&
339 vec_idx < digest->cd_uio->uio_iovcnt;
340 offset -= digest->cd_uio->uio_iov[vec_idx++].iov_len)
341 ;
342 if (vec_idx == digest->cd_uio->uio_iovcnt) {
343 /*
344 * The caller specified an offset that is
345 * larger than the total size of the buffers
346 * it provided.
347 */
348 return (CRYPTO_DATA_LEN_RANGE);
349 }
350
351 if (offset + digest_len <=
352 digest->cd_uio->uio_iov[vec_idx].iov_len) {
353 /*
354 * The computed SHA2 digest will fit in the current
355 * iovec.
356 */
357 if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
358 (digest_len != SHA256_DIGEST_LENGTH))) {
359 /*
360 * The caller requested a short digest. Digest
361 * into a scratch buffer and return to
362 * the user only what was requested.
363 */
364 SHA2Final(digest_scratch, sha2_ctx);
365
366 bcopy(digest_scratch, (uchar_t *)digest->
367 cd_uio->uio_iov[vec_idx].iov_base + offset,
368 digest_len);
369 } else {
370 SHA2Final((uchar_t *)digest->
371 cd_uio->uio_iov[vec_idx].iov_base + offset,
372 sha2_ctx);
373
374 }
375 } else {
376 /*
377 * The computed digest will be crossing one or more iovec's.
378 * This is bad performance-wise but we need to support it.
379 * Allocate a small scratch buffer on the stack and
380 * copy it piece meal to the specified digest iovec's.
381 */
382 uchar_t digest_tmp[SHA256_DIGEST_LENGTH];
383 off_t scratch_offset = 0;
384 size_t length = digest_len;
385 size_t cur_len;
386
387 SHA2Final(digest_tmp, sha2_ctx);
388
389 while (vec_idx < digest->cd_uio->uio_iovcnt && length > 0) {
390 cur_len =
391 MIN(digest->cd_uio->uio_iov[vec_idx].iov_len -
392 offset, length);
393 bcopy(digest_tmp + scratch_offset,
394 digest->cd_uio->uio_iov[vec_idx].iov_base + offset,
395 cur_len);
396
397 length -= cur_len;
398 vec_idx++;
399 scratch_offset += cur_len;
400 offset = 0;
401 }
402
403 if (vec_idx == digest->cd_uio->uio_iovcnt && length > 0) {
404 /*
405 * The end of the specified iovec's was reached but
406 * the length requested could not be processed, i.e.
407 * The caller requested to digest more data than it
408 * provided.
409 */
410 return (CRYPTO_DATA_LEN_RANGE);
411 }
412 }
413
414 return (CRYPTO_SUCCESS);
415 }
416
417 /* ARGSUSED */
418 static int
419 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest,
420 crypto_req_handle_t req)
421 {
422 int ret = CRYPTO_SUCCESS;
423 uint_t sha_digest_len;
424
425 ASSERT(ctx->cc_provider_private != NULL);
426
427 switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
428 case SHA256_MECH_INFO_TYPE:
429 sha_digest_len = SHA256_DIGEST_LENGTH;
430 break;
431 default:
432 return (CRYPTO_MECHANISM_INVALID);
433 }
434
435 /*
436 * We need to just return the length needed to store the output.
437 * We should not destroy the context for the following cases.
438 */
439 if ((digest->cd_length == 0) ||
440 (digest->cd_length < sha_digest_len)) {
441 digest->cd_length = sha_digest_len;
442 return (CRYPTO_BUFFER_TOO_SMALL);
443 }
444
445 /*
446 * Do the SHA2 update on the specified input data.
447 */
448 switch (data->cd_format) {
449 case CRYPTO_DATA_RAW:
450 SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
451 (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
452 data->cd_length);
453 break;
454 case CRYPTO_DATA_UIO:
455 ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
456 data);
457 break;
458 default:
459 ret = CRYPTO_ARGUMENTS_BAD;
460 }
461
462 if (ret != CRYPTO_SUCCESS) {
463 /* the update failed, free context and bail */
464 kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
465 ctx->cc_provider_private = NULL;
466 digest->cd_length = 0;
467 return (ret);
468 }
469
470 /*
471 * Do a SHA2 final, must be done separately since the digest
472 * type can be different than the input data type.
473 */
474 switch (digest->cd_format) {
475 case CRYPTO_DATA_RAW:
476 SHA2Final((unsigned char *)digest->cd_raw.iov_base +
477 digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
478 break;
479 case CRYPTO_DATA_UIO:
480 ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
481 digest, sha_digest_len, NULL);
482 break;
483 default:
484 ret = CRYPTO_ARGUMENTS_BAD;
485 }
486
487 /* all done, free context and return */
488
489 if (ret == CRYPTO_SUCCESS)
490 digest->cd_length = sha_digest_len;
491 else
492 digest->cd_length = 0;
493
494 kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
495 ctx->cc_provider_private = NULL;
496 return (ret);
497 }
498
499 /* ARGSUSED */
500 static int
501 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data,
502 crypto_req_handle_t req)
503 {
504 int ret = CRYPTO_SUCCESS;
505
506 ASSERT(ctx->cc_provider_private != NULL);
507
508 /*
509 * Do the SHA2 update on the specified input data.
510 */
511 switch (data->cd_format) {
512 case CRYPTO_DATA_RAW:
513 SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
514 (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
515 data->cd_length);
516 break;
517 case CRYPTO_DATA_UIO:
518 ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
519 data);
520 break;
521 default:
522 ret = CRYPTO_ARGUMENTS_BAD;
523 }
524
525 return (ret);
526 }
527
528 /* ARGSUSED */
529 static int
530 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest,
531 crypto_req_handle_t req)
532 {
533 int ret = CRYPTO_SUCCESS;
534 uint_t sha_digest_len;
535
536 ASSERT(ctx->cc_provider_private != NULL);
537
538 switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
539 case SHA256_MECH_INFO_TYPE:
540 sha_digest_len = SHA256_DIGEST_LENGTH;
541 break;
542 default:
543 return (CRYPTO_MECHANISM_INVALID);
544 }
545
546 /*
547 * We need to just return the length needed to store the output.
548 * We should not destroy the context for the following cases.
549 */
550 if ((digest->cd_length == 0) ||
551 (digest->cd_length < sha_digest_len)) {
552 digest->cd_length = sha_digest_len;
553 return (CRYPTO_BUFFER_TOO_SMALL);
554 }
555
556 /*
557 * Do a SHA2 final.
558 */
559 switch (digest->cd_format) {
560 case CRYPTO_DATA_RAW:
561 SHA2Final((unsigned char *)digest->cd_raw.iov_base +
562 digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
563 break;
564 case CRYPTO_DATA_UIO:
565 ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
566 digest, sha_digest_len, NULL);
567 break;
568 default:
569 ret = CRYPTO_ARGUMENTS_BAD;
570 }
571
572 /* all done, free context and return */
573
574 if (ret == CRYPTO_SUCCESS)
575 digest->cd_length = sha_digest_len;
576 else
577 digest->cd_length = 0;
578
579 kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
580 ctx->cc_provider_private = NULL;
581
582 return (ret);
583 }
584
585 /* ARGSUSED */
586 static int
587 sha2_digest_atomic(crypto_provider_handle_t provider,
588 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
589 crypto_data_t *data, crypto_data_t *digest,
590 crypto_req_handle_t req)
591 {
592 int ret = CRYPTO_SUCCESS;
593 SHA2_CTX sha2_ctx;
594 uint32_t sha_digest_len;
595
596 /*
597 * Do the SHA inits.
598 */
599
600 SHA2Init(mechanism->cm_type, &sha2_ctx);
601
602 switch (data->cd_format) {
603 case CRYPTO_DATA_RAW:
604 SHA2Update(&sha2_ctx, (uint8_t *)data->
605 cd_raw.iov_base + data->cd_offset, data->cd_length);
606 break;
607 case CRYPTO_DATA_UIO:
608 ret = sha2_digest_update_uio(&sha2_ctx, data);
609 break;
610 default:
611 ret = CRYPTO_ARGUMENTS_BAD;
612 }
613
614 /*
615 * Do the SHA updates on the specified input data.
616 */
617
618 if (ret != CRYPTO_SUCCESS) {
619 /* the update failed, bail */
620 digest->cd_length = 0;
621 return (ret);
622 }
623
624 if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
625 sha_digest_len = SHA256_DIGEST_LENGTH;
626
627 /*
628 * Do a SHA2 final, must be done separately since the digest
629 * type can be different than the input data type.
630 */
631 switch (digest->cd_format) {
632 case CRYPTO_DATA_RAW:
633 SHA2Final((unsigned char *)digest->cd_raw.iov_base +
634 digest->cd_offset, &sha2_ctx);
635 break;
636 case CRYPTO_DATA_UIO:
637 ret = sha2_digest_final_uio(&sha2_ctx, digest,
638 sha_digest_len, NULL);
639 break;
640 default:
641 ret = CRYPTO_ARGUMENTS_BAD;
642 }
643
644 if (ret == CRYPTO_SUCCESS)
645 digest->cd_length = sha_digest_len;
646 else
647 digest->cd_length = 0;
648
649 return (ret);
650 }
651
652 /*
653 * KCF software provider mac entry points.
654 *
655 * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
656 *
657 * Init:
658 * The initialization routine initializes what we denote
659 * as the inner and outer contexts by doing
660 * - for inner context: SHA2(key XOR ipad)
661 * - for outer context: SHA2(key XOR opad)
662 *
663 * Update:
664 * Each subsequent SHA2 HMAC update will result in an
665 * update of the inner context with the specified data.
666 *
667 * Final:
668 * The SHA2 HMAC final will do a SHA2 final operation on the
669 * inner context, and the resulting digest will be used
670 * as the data for an update on the outer context. Last
671 * but not least, a SHA2 final on the outer context will
672 * be performed to obtain the SHA2 HMAC digest to return
673 * to the user.
674 */
675
676 /*
677 * Initialize a SHA2-HMAC context.
678 */
679 static void
680 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
681 {
682 uint64_t ipad[SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
683 uint64_t opad[SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
684 int i, block_size = 0, blocks_per_int64 = 0;
685
686 /* Determine the block size */
687 if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
688 block_size = SHA256_HMAC_BLOCK_SIZE;
689 blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
690 }
691
692 (void) bzero(ipad, block_size);
693 (void) bzero(opad, block_size);
694 (void) bcopy(keyval, ipad, length_in_bytes);
695 (void) bcopy(keyval, opad, length_in_bytes);
696
697 /* XOR key with ipad (0x36) and opad (0x5c) */
698 for (i = 0; i < blocks_per_int64; i ++) {
699 ipad[i] ^= 0x3636363636363636;
700 opad[i] ^= 0x5c5c5c5c5c5c5c5c;
701 }
702
703 /* perform SHA2 on ipad */
704 SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
705 SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
706
707 /* perform SHA2 on opad */
708 SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
709 SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
710
711 }
712
713 /*
714 */
715 static int
716 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
717 crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
718 crypto_req_handle_t req)
719 {
720 int ret = CRYPTO_SUCCESS;
721 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
722 uint_t sha_digest_len, sha_hmac_block_size;
723
724 /*
725 * Set the digest length and block size to values approriate to the
726 * mechanism
727 */
728 switch (mechanism->cm_type) {
729 case SHA256_HMAC_MECH_INFO_TYPE:
730 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
731 sha_digest_len = SHA256_DIGEST_LENGTH;
732 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
733 break;
734 default:
735 return (CRYPTO_MECHANISM_INVALID);
736 }
737
738 if (key->ck_format != CRYPTO_KEY_RAW)
739 return (CRYPTO_ARGUMENTS_BAD);
740
741 ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
742 crypto_kmflag(req));
743 if (ctx->cc_provider_private == NULL)
744 return (CRYPTO_HOST_MEMORY);
745
746 PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
747 if (ctx_template != NULL) {
748 /* reuse context template */
749 bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
750 sizeof (sha2_hmac_ctx_t));
751 } else {
752 /* no context template, compute context */
753 if (keylen_in_bytes > sha_hmac_block_size) {
754 uchar_t digested_key[SHA256_DIGEST_LENGTH];
755 sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
756
757 /*
758 * Hash the passed-in key to get a smaller key.
759 * The inner context is used since it hasn't been
760 * initialized yet.
761 */
762 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
763 &hmac_ctx->hc_icontext,
764 key->ck_data, keylen_in_bytes, digested_key);
765 sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
766 digested_key, sha_digest_len);
767 } else {
768 sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
769 key->ck_data, keylen_in_bytes);
770 }
771 }
772
773 /*
774 * Get the mechanism parameters, if applicable.
775 */
776 if (mechanism->cm_type % 3 == 2) {
777 if (mechanism->cm_param == NULL ||
778 mechanism->cm_param_len != sizeof (ulong_t))
779 ret = CRYPTO_MECHANISM_PARAM_INVALID;
780 PROV_SHA2_GET_DIGEST_LEN(mechanism,
781 PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
782 if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
783 ret = CRYPTO_MECHANISM_PARAM_INVALID;
784 }
785
786 if (ret != CRYPTO_SUCCESS) {
787 bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
788 kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
789 ctx->cc_provider_private = NULL;
790 }
791
792 return (ret);
793 }
794
795 /* ARGSUSED */
796 static int
797 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
798 crypto_req_handle_t req)
799 {
800 int ret = CRYPTO_SUCCESS;
801
802 ASSERT(ctx->cc_provider_private != NULL);
803
804 /*
805 * Do a SHA2 update of the inner context using the specified
806 * data.
807 */
808 switch (data->cd_format) {
809 case CRYPTO_DATA_RAW:
810 SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
811 (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
812 data->cd_length);
813 break;
814 case CRYPTO_DATA_UIO:
815 ret = sha2_digest_update_uio(
816 &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
817 break;
818 default:
819 ret = CRYPTO_ARGUMENTS_BAD;
820 }
821
822 return (ret);
823 }
824
825 /* ARGSUSED */
826 static int
827 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
828 {
829 int ret = CRYPTO_SUCCESS;
830 uchar_t digest[SHA256_DIGEST_LENGTH];
831 uint32_t digest_len = 0, sha_digest_len = 0;
832
833 ASSERT(ctx->cc_provider_private != NULL);
834
835 /* Set the digest lengths to values approriate to the mechanism */
836 switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
837 case SHA256_HMAC_MECH_INFO_TYPE:
838 sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
839 break;
840 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
841 sha_digest_len = SHA256_DIGEST_LENGTH;
842 digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
843 break;
844 default:
845 break;
846 }
847
848 /*
849 * We need to just return the length needed to store the output.
850 * We should not destroy the context for the following cases.
851 */
852 if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
853 mac->cd_length = digest_len;
854 return (CRYPTO_BUFFER_TOO_SMALL);
855 }
856
857 /*
858 * Do a SHA2 final on the inner context.
859 */
860 SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
861
862 /*
863 * Do a SHA2 update on the outer context, feeding the inner
864 * digest as data.
865 */
866 SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
867 sha_digest_len);
868
869 /*
870 * Do a SHA2 final on the outer context, storing the computing
871 * digest in the users buffer.
872 */
873 switch (mac->cd_format) {
874 case CRYPTO_DATA_RAW:
875 if (digest_len != sha_digest_len) {
876 /*
877 * The caller requested a short digest. Digest
878 * into a scratch buffer and return to
879 * the user only what was requested.
880 */
881 SHA2Final(digest,
882 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
883 bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
884 mac->cd_offset, digest_len);
885 } else {
886 SHA2Final((unsigned char *)mac->cd_raw.iov_base +
887 mac->cd_offset,
888 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
889 }
890 break;
891 case CRYPTO_DATA_UIO:
892 ret = sha2_digest_final_uio(
893 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
894 digest_len, digest);
895 break;
896 default:
897 ret = CRYPTO_ARGUMENTS_BAD;
898 }
899
900 if (ret == CRYPTO_SUCCESS)
901 mac->cd_length = digest_len;
902 else
903 mac->cd_length = 0;
904
905 bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
906 kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
907 ctx->cc_provider_private = NULL;
908
909 return (ret);
910 }
911
912 #define SHA2_MAC_UPDATE(data, ctx, ret) { \
913 switch (data->cd_format) { \
914 case CRYPTO_DATA_RAW: \
915 SHA2Update(&(ctx).hc_icontext, \
916 (uint8_t *)data->cd_raw.iov_base + \
917 data->cd_offset, data->cd_length); \
918 break; \
919 case CRYPTO_DATA_UIO: \
920 ret = sha2_digest_update_uio(&(ctx).hc_icontext, data); \
921 break; \
922 default: \
923 ret = CRYPTO_ARGUMENTS_BAD; \
924 } \
925 }
926
927 /* ARGSUSED */
928 static int
929 sha2_mac_atomic(crypto_provider_handle_t provider,
930 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
931 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
932 crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
933 {
934 int ret = CRYPTO_SUCCESS;
935 uchar_t digest[SHA256_DIGEST_LENGTH];
936 sha2_hmac_ctx_t sha2_hmac_ctx;
937 uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
938 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
939
940 /*
941 * Set the digest length and block size to values appropriate to the
942 * mechanism
943 */
944 switch (mechanism->cm_type) {
945 case SHA256_HMAC_MECH_INFO_TYPE:
946 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
947 sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
948 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
949 break;
950 default:
951 return (CRYPTO_MECHANISM_INVALID);
952 }
953
954 /* Add support for key by attributes (RFE 4706552) */
955 if (key->ck_format != CRYPTO_KEY_RAW)
956 return (CRYPTO_ARGUMENTS_BAD);
957
958 if (ctx_template != NULL) {
959 /* reuse context template */
960 bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
961 } else {
962 sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
963 /* no context template, initialize context */
964 if (keylen_in_bytes > sha_hmac_block_size) {
965 /*
966 * Hash the passed-in key to get a smaller key.
967 * The inner context is used since it hasn't been
968 * initialized yet.
969 */
970 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
971 &sha2_hmac_ctx.hc_icontext,
972 key->ck_data, keylen_in_bytes, digest);
973 sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
974 sha_digest_len);
975 } else {
976 sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
977 keylen_in_bytes);
978 }
979 }
980
981 /* get the mechanism parameters, if applicable */
982 if ((mechanism->cm_type % 3) == 2) {
983 if (mechanism->cm_param == NULL ||
984 mechanism->cm_param_len != sizeof (ulong_t)) {
985 ret = CRYPTO_MECHANISM_PARAM_INVALID;
986 goto bail;
987 }
988 PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
989 if (digest_len > sha_digest_len) {
990 ret = CRYPTO_MECHANISM_PARAM_INVALID;
991 goto bail;
992 }
993 }
994
995 /* do a SHA2 update of the inner context using the specified data */
996 SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
997 if (ret != CRYPTO_SUCCESS)
998 /* the update failed, free context and bail */
999 goto bail;
1000
1001 /*
1002 * Do a SHA2 final on the inner context.
1003 */
1004 SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1005
1006 /*
1007 * Do an SHA2 update on the outer context, feeding the inner
1008 * digest as data.
1009 */
1010 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1011
1012 /*
1013 * Do a SHA2 final on the outer context, storing the computed
1014 * digest in the users buffer.
1015 */
1016 switch (mac->cd_format) {
1017 case CRYPTO_DATA_RAW:
1018 if (digest_len != sha_digest_len) {
1019 /*
1020 * The caller requested a short digest. Digest
1021 * into a scratch buffer and return to
1022 * the user only what was requested.
1023 */
1024 SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1025 bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1026 mac->cd_offset, digest_len);
1027 } else {
1028 SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1029 mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1030 }
1031 break;
1032 case CRYPTO_DATA_UIO:
1033 ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1034 digest_len, digest);
1035 break;
1036 default:
1037 ret = CRYPTO_ARGUMENTS_BAD;
1038 }
1039
1040 if (ret == CRYPTO_SUCCESS) {
1041 mac->cd_length = digest_len;
1042 return (CRYPTO_SUCCESS);
1043 }
1044 bail:
1045 bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1046 mac->cd_length = 0;
1047 return (ret);
1048 }
1049
1050 /* ARGSUSED */
1051 static int
1052 sha2_mac_verify_atomic(crypto_provider_handle_t provider,
1053 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1054 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1055 crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1056 {
1057 int ret = CRYPTO_SUCCESS;
1058 uchar_t digest[SHA256_DIGEST_LENGTH];
1059 sha2_hmac_ctx_t sha2_hmac_ctx;
1060 uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1061 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1062
1063 /*
1064 * Set the digest length and block size to values appropriate to the
1065 * mechanism
1066 */
1067 switch (mechanism->cm_type) {
1068 case SHA256_HMAC_MECH_INFO_TYPE:
1069 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1070 sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1071 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1072 break;
1073 default:
1074 return (CRYPTO_MECHANISM_INVALID);
1075 }
1076
1077 /* Add support for key by attributes (RFE 4706552) */
1078 if (key->ck_format != CRYPTO_KEY_RAW)
1079 return (CRYPTO_ARGUMENTS_BAD);
1080
1081 if (ctx_template != NULL) {
1082 /* reuse context template */
1083 bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1084 } else {
1085 sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1086 /* no context template, initialize context */
1087 if (keylen_in_bytes > sha_hmac_block_size) {
1088 /*
1089 * Hash the passed-in key to get a smaller key.
1090 * The inner context is used since it hasn't been
1091 * initialized yet.
1092 */
1093 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1094 &sha2_hmac_ctx.hc_icontext,
1095 key->ck_data, keylen_in_bytes, digest);
1096 sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1097 sha_digest_len);
1098 } else {
1099 sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1100 keylen_in_bytes);
1101 }
1102 }
1103
1104 /* get the mechanism parameters, if applicable */
1105 if (mechanism->cm_type % 3 == 2) {
1106 if (mechanism->cm_param == NULL ||
1107 mechanism->cm_param_len != sizeof (ulong_t)) {
1108 ret = CRYPTO_MECHANISM_PARAM_INVALID;
1109 goto bail;
1110 }
1111 PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1112 if (digest_len > sha_digest_len) {
1113 ret = CRYPTO_MECHANISM_PARAM_INVALID;
1114 goto bail;
1115 }
1116 }
1117
1118 if (mac->cd_length != digest_len) {
1119 ret = CRYPTO_INVALID_MAC;
1120 goto bail;
1121 }
1122
1123 /* do a SHA2 update of the inner context using the specified data */
1124 SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1125 if (ret != CRYPTO_SUCCESS)
1126 /* the update failed, free context and bail */
1127 goto bail;
1128
1129 /* do a SHA2 final on the inner context */
1130 SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1131
1132 /*
1133 * Do an SHA2 update on the outer context, feeding the inner
1134 * digest as data.
1135 */
1136 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1137
1138 /*
1139 * Do a SHA2 final on the outer context, storing the computed
1140 * digest in the users buffer.
1141 */
1142 SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1143
1144 /*
1145 * Compare the computed digest against the expected digest passed
1146 * as argument.
1147 */
1148
1149 switch (mac->cd_format) {
1150
1151 case CRYPTO_DATA_RAW:
1152 if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
1153 mac->cd_offset, digest_len) != 0)
1154 ret = CRYPTO_INVALID_MAC;
1155 break;
1156
1157 case CRYPTO_DATA_UIO: {
1158 off_t offset = mac->cd_offset;
1159 uint_t vec_idx;
1160 off_t scratch_offset = 0;
1161 size_t length = digest_len;
1162 size_t cur_len;
1163
1164 /* we support only kernel buffer */
1165 if (mac->cd_uio->uio_segflg != UIO_SYSSPACE)
1166 return (CRYPTO_ARGUMENTS_BAD);
1167
1168 /* jump to the first iovec containing the expected digest */
1169 for (vec_idx = 0;
1170 offset >= mac->cd_uio->uio_iov[vec_idx].iov_len &&
1171 vec_idx < mac->cd_uio->uio_iovcnt;
1172 offset -= mac->cd_uio->uio_iov[vec_idx++].iov_len)
1173 ;
1174 if (vec_idx == mac->cd_uio->uio_iovcnt) {
1175 /*
1176 * The caller specified an offset that is
1177 * larger than the total size of the buffers
1178 * it provided.
1179 */
1180 ret = CRYPTO_DATA_LEN_RANGE;
1181 break;
1182 }
1183
1184 /* do the comparison of computed digest vs specified one */
1185 while (vec_idx < mac->cd_uio->uio_iovcnt && length > 0) {
1186 cur_len = MIN(mac->cd_uio->uio_iov[vec_idx].iov_len -
1187 offset, length);
1188
1189 if (bcmp(digest + scratch_offset,
1190 mac->cd_uio->uio_iov[vec_idx].iov_base + offset,
1191 cur_len) != 0) {
1192 ret = CRYPTO_INVALID_MAC;
1193 break;
1194 }
1195
1196 length -= cur_len;
1197 vec_idx++;
1198 scratch_offset += cur_len;
1199 offset = 0;
1200 }
1201 break;
1202 }
1203
1204 default:
1205 ret = CRYPTO_ARGUMENTS_BAD;
1206 }
1207
1208 return (ret);
1209 bail:
1210 bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1211 mac->cd_length = 0;
1212 return (ret);
1213 }
1214
1215 /*
1216 * KCF software provider context management entry points.
1217 */
1218
1219 /* ARGSUSED */
1220 static int
1221 sha2_create_ctx_template(crypto_provider_handle_t provider,
1222 crypto_mechanism_t *mechanism, crypto_key_t *key,
1223 crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
1224 crypto_req_handle_t req)
1225 {
1226 sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
1227 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1228 uint32_t sha_digest_len, sha_hmac_block_size;
1229
1230 /*
1231 * Set the digest length and block size to values appropriate to the
1232 * mechanism
1233 */
1234 switch (mechanism->cm_type) {
1235 case SHA256_HMAC_MECH_INFO_TYPE:
1236 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1237 sha_digest_len = SHA256_DIGEST_LENGTH;
1238 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1239 break;
1240 default:
1241 return (CRYPTO_MECHANISM_INVALID);
1242 }
1243
1244 /* Add support for key by attributes (RFE 4706552) */
1245 if (key->ck_format != CRYPTO_KEY_RAW)
1246 return (CRYPTO_ARGUMENTS_BAD);
1247
1248 /*
1249 * Allocate and initialize SHA2 context.
1250 */
1251 sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
1252 crypto_kmflag(req));
1253 if (sha2_hmac_ctx_tmpl == NULL)
1254 return (CRYPTO_HOST_MEMORY);
1255
1256 sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
1257
1258 if (keylen_in_bytes > sha_hmac_block_size) {
1259 uchar_t digested_key[SHA256_DIGEST_LENGTH];
1260
1261 /*
1262 * Hash the passed-in key to get a smaller key.
1263 * The inner context is used since it hasn't been
1264 * initialized yet.
1265 */
1266 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1267 &sha2_hmac_ctx_tmpl->hc_icontext,
1268 key->ck_data, keylen_in_bytes, digested_key);
1269 sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
1270 sha_digest_len);
1271 } else {
1272 sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
1273 keylen_in_bytes);
1274 }
1275
1276 *ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
1277 *ctx_template_size = sizeof (sha2_hmac_ctx_t);
1278
1279 return (CRYPTO_SUCCESS);
1280 }
1281
1282 static int
1283 sha2_free_context(crypto_ctx_t *ctx)
1284 {
1285 uint_t ctx_len;
1286
1287 if (ctx->cc_provider_private == NULL)
1288 return (CRYPTO_SUCCESS);
1289
1290 /*
1291 * We have to free either SHA2 or SHA2-HMAC contexts, which
1292 * have different lengths.
1293 *
1294 * Note: Below is dependent on the mechanism ordering.
1295 */
1296
1297 if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
1298 ctx_len = sizeof (sha2_ctx_t);
1299 else
1300 ctx_len = sizeof (sha2_hmac_ctx_t);
1301
1302 bzero(ctx->cc_provider_private, ctx_len);
1303 kmem_free(ctx->cc_provider_private, ctx_len);
1304 ctx->cc_provider_private = NULL;
1305
1306 return (CRYPTO_SUCCESS);
1307 }