]> git.proxmox.com Git - mirror_zfs.git/blob - module/icp/io/aes.c
Rename fallthrough to zfs_fallthrough
[mirror_zfs.git] / module / icp / io / aes.c
1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21 /*
22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23 */
24
25 /*
26 * AES provider for the Kernel Cryptographic Framework (KCF)
27 */
28
29 #include <sys/zfs_context.h>
30 #include <sys/crypto/common.h>
31 #include <sys/crypto/impl.h>
32 #include <sys/crypto/spi.h>
33 #include <sys/crypto/icp.h>
34 #include <modes/modes.h>
35 #define _AES_IMPL
36 #include <aes/aes_impl.h>
37 #include <modes/gcm_impl.h>
38
39 /*
40 * Mechanism info structure passed to KCF during registration.
41 */
42 static const crypto_mech_info_t aes_mech_info_tab[] = {
43 /* AES_ECB */
44 {SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
45 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
46 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
47 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
48 /* AES_CBC */
49 {SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
50 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
51 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
52 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
53 /* AES_CTR */
54 {SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
55 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
56 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
57 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
58 /* AES_CCM */
59 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
60 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
61 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
62 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
63 /* AES_GCM */
64 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
65 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
66 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
67 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
68 /* AES_GMAC */
69 {SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
70 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
71 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
72 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
73 CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC |
74 CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC,
75 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
76 };
77
78 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
79
80 static const crypto_control_ops_t aes_control_ops = {
81 aes_provider_status
82 };
83
84 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
85 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
86 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
87 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
88 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
89 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
90 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
91 crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
92 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
93 crypto_req_handle_t);
94 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
95 crypto_req_handle_t);
96
97 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
98 crypto_req_handle_t);
99 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
100 crypto_data_t *, crypto_req_handle_t);
101 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
102 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
103 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
104
105 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
106 crypto_req_handle_t);
107 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
108 crypto_data_t *, crypto_req_handle_t);
109 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
110 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
111 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
112
113 static const crypto_cipher_ops_t aes_cipher_ops = {
114 .encrypt_init = aes_encrypt_init,
115 .encrypt = aes_encrypt,
116 .encrypt_update = aes_encrypt_update,
117 .encrypt_final = aes_encrypt_final,
118 .encrypt_atomic = aes_encrypt_atomic,
119 .decrypt_init = aes_decrypt_init,
120 .decrypt = aes_decrypt,
121 .decrypt_update = aes_decrypt_update,
122 .decrypt_final = aes_decrypt_final,
123 .decrypt_atomic = aes_decrypt_atomic
124 };
125
126 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
127 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
128 crypto_spi_ctx_template_t, crypto_req_handle_t);
129 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
130 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
131 crypto_spi_ctx_template_t, crypto_req_handle_t);
132
133 static const crypto_mac_ops_t aes_mac_ops = {
134 .mac_init = NULL,
135 .mac = NULL,
136 .mac_update = NULL,
137 .mac_final = NULL,
138 .mac_atomic = aes_mac_atomic,
139 .mac_verify_atomic = aes_mac_verify_atomic
140 };
141
142 static int aes_create_ctx_template(crypto_provider_handle_t,
143 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
144 size_t *, crypto_req_handle_t);
145 static int aes_free_context(crypto_ctx_t *);
146
147 static const crypto_ctx_ops_t aes_ctx_ops = {
148 .create_ctx_template = aes_create_ctx_template,
149 .free_context = aes_free_context
150 };
151
152 static const crypto_ops_t aes_crypto_ops = {{{{{
153 &aes_control_ops,
154 NULL,
155 &aes_cipher_ops,
156 &aes_mac_ops,
157 NULL,
158 NULL,
159 NULL,
160 NULL,
161 NULL,
162 NULL,
163 NULL,
164 NULL,
165 NULL,
166 &aes_ctx_ops
167 }}}}};
168
169 static const crypto_provider_info_t aes_prov_info = {{{{
170 CRYPTO_SPI_VERSION_1,
171 "AES Software Provider",
172 CRYPTO_SW_PROVIDER,
173 NULL,
174 &aes_crypto_ops,
175 sizeof (aes_mech_info_tab) / sizeof (crypto_mech_info_t),
176 aes_mech_info_tab
177 }}}};
178
179 static crypto_kcf_provider_handle_t aes_prov_handle = 0;
180 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
181
182 int
183 aes_mod_init(void)
184 {
185 /* Determine the fastest available implementation. */
186 aes_impl_init();
187 gcm_impl_init();
188
189 /* Register with KCF. If the registration fails, remove the module. */
190 if (crypto_register_provider(&aes_prov_info, &aes_prov_handle))
191 return (EACCES);
192
193 return (0);
194 }
195
196 int
197 aes_mod_fini(void)
198 {
199 /* Unregister from KCF if module is registered */
200 if (aes_prov_handle != 0) {
201 if (crypto_unregister_provider(aes_prov_handle))
202 return (EBUSY);
203
204 aes_prov_handle = 0;
205 }
206
207 return (0);
208 }
209
210 static int
211 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
212 {
213 void *p = NULL;
214 boolean_t param_required = B_TRUE;
215 size_t param_len;
216 void *(*alloc_fun)(int);
217 int rv = CRYPTO_SUCCESS;
218
219 switch (mechanism->cm_type) {
220 case AES_ECB_MECH_INFO_TYPE:
221 param_required = B_FALSE;
222 alloc_fun = ecb_alloc_ctx;
223 break;
224 case AES_CBC_MECH_INFO_TYPE:
225 param_len = AES_BLOCK_LEN;
226 alloc_fun = cbc_alloc_ctx;
227 break;
228 case AES_CTR_MECH_INFO_TYPE:
229 param_len = sizeof (CK_AES_CTR_PARAMS);
230 alloc_fun = ctr_alloc_ctx;
231 break;
232 case AES_CCM_MECH_INFO_TYPE:
233 param_len = sizeof (CK_AES_CCM_PARAMS);
234 alloc_fun = ccm_alloc_ctx;
235 break;
236 case AES_GCM_MECH_INFO_TYPE:
237 param_len = sizeof (CK_AES_GCM_PARAMS);
238 alloc_fun = gcm_alloc_ctx;
239 break;
240 case AES_GMAC_MECH_INFO_TYPE:
241 param_len = sizeof (CK_AES_GMAC_PARAMS);
242 alloc_fun = gmac_alloc_ctx;
243 break;
244 default:
245 rv = CRYPTO_MECHANISM_INVALID;
246 return (rv);
247 }
248 if (param_required && mechanism->cm_param != NULL &&
249 mechanism->cm_param_len != param_len) {
250 rv = CRYPTO_MECHANISM_PARAM_INVALID;
251 }
252 if (ctx != NULL) {
253 p = (alloc_fun)(kmflag);
254 *ctx = p;
255 }
256 return (rv);
257 }
258
259 /*
260 * Initialize key schedules for AES
261 */
262 static int
263 init_keysched(crypto_key_t *key, void *newbie)
264 {
265 /*
266 * Only keys by value are supported by this module.
267 */
268 switch (key->ck_format) {
269 case CRYPTO_KEY_RAW:
270 if (key->ck_length < AES_MINBITS ||
271 key->ck_length > AES_MAXBITS) {
272 return (CRYPTO_KEY_SIZE_RANGE);
273 }
274
275 /* key length must be either 128, 192, or 256 */
276 if ((key->ck_length & 63) != 0)
277 return (CRYPTO_KEY_SIZE_RANGE);
278 break;
279 default:
280 return (CRYPTO_KEY_TYPE_INCONSISTENT);
281 }
282
283 aes_init_keysched(key->ck_data, key->ck_length, newbie);
284 return (CRYPTO_SUCCESS);
285 }
286
287 /*
288 * KCF software provider control entry points.
289 */
290 static void
291 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
292 {
293 (void) provider;
294 *status = CRYPTO_PROVIDER_READY;
295 }
296
297 static int
298 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
299 crypto_key_t *key, crypto_spi_ctx_template_t template,
300 crypto_req_handle_t req)
301 {
302 return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
303 }
304
305 static int
306 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
307 crypto_key_t *key, crypto_spi_ctx_template_t template,
308 crypto_req_handle_t req)
309 {
310 return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
311 }
312
313
314
315 /*
316 * KCF software provider encrypt entry points.
317 */
318 static int
319 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
320 crypto_key_t *key, crypto_spi_ctx_template_t template,
321 crypto_req_handle_t req, boolean_t is_encrypt_init)
322 {
323 aes_ctx_t *aes_ctx;
324 int rv;
325 int kmflag;
326
327 /*
328 * Only keys by value are supported by this module.
329 */
330 if (key->ck_format != CRYPTO_KEY_RAW) {
331 return (CRYPTO_KEY_TYPE_INCONSISTENT);
332 }
333
334 kmflag = crypto_kmflag(req);
335 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
336 != CRYPTO_SUCCESS)
337 return (rv);
338
339 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
340 is_encrypt_init);
341 if (rv != CRYPTO_SUCCESS) {
342 crypto_free_mode_ctx(aes_ctx);
343 return (rv);
344 }
345
346 ctx->cc_provider_private = aes_ctx;
347
348 return (CRYPTO_SUCCESS);
349 }
350
351 static void
352 aes_copy_block64(uint8_t *in, uint64_t *out)
353 {
354 if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
355 /* LINTED: pointer alignment */
356 out[0] = *(uint64_t *)&in[0];
357 /* LINTED: pointer alignment */
358 out[1] = *(uint64_t *)&in[8];
359 } else {
360 uint8_t *iv8 = (uint8_t *)&out[0];
361
362 AES_COPY_BLOCK(in, iv8);
363 }
364 }
365
366
367 static int
368 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
369 crypto_data_t *ciphertext, crypto_req_handle_t req)
370 {
371 int ret = CRYPTO_FAILED;
372
373 aes_ctx_t *aes_ctx;
374 size_t saved_length, saved_offset, length_needed;
375
376 ASSERT(ctx->cc_provider_private != NULL);
377 aes_ctx = ctx->cc_provider_private;
378
379 /*
380 * For block ciphers, plaintext must be a multiple of AES block size.
381 * This test is only valid for ciphers whose blocksize is a power of 2.
382 */
383 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
384 == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
385 return (CRYPTO_DATA_LEN_RANGE);
386
387 ASSERT(ciphertext != NULL);
388
389 /*
390 * We need to just return the length needed to store the output.
391 * We should not destroy the context for the following case.
392 */
393 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
394 case CCM_MODE:
395 length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
396 break;
397 case GCM_MODE:
398 length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
399 break;
400 case GMAC_MODE:
401 if (plaintext->cd_length != 0)
402 return (CRYPTO_ARGUMENTS_BAD);
403
404 length_needed = aes_ctx->ac_tag_len;
405 break;
406 default:
407 length_needed = plaintext->cd_length;
408 }
409
410 if (ciphertext->cd_length < length_needed) {
411 ciphertext->cd_length = length_needed;
412 return (CRYPTO_BUFFER_TOO_SMALL);
413 }
414
415 saved_length = ciphertext->cd_length;
416 saved_offset = ciphertext->cd_offset;
417
418 /*
419 * Do an update on the specified input data.
420 */
421 ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
422 if (ret != CRYPTO_SUCCESS) {
423 return (ret);
424 }
425
426 /*
427 * For CCM mode, aes_ccm_encrypt_final() will take care of any
428 * left-over unprocessed data, and compute the MAC
429 */
430 if (aes_ctx->ac_flags & CCM_MODE) {
431 /*
432 * ccm_encrypt_final() will compute the MAC and append
433 * it to existing ciphertext. So, need to adjust the left over
434 * length value accordingly
435 */
436
437 /* order of following 2 lines MUST not be reversed */
438 ciphertext->cd_offset = ciphertext->cd_length;
439 ciphertext->cd_length = saved_length - ciphertext->cd_length;
440 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
441 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
442 if (ret != CRYPTO_SUCCESS) {
443 return (ret);
444 }
445
446 if (plaintext != ciphertext) {
447 ciphertext->cd_length =
448 ciphertext->cd_offset - saved_offset;
449 }
450 ciphertext->cd_offset = saved_offset;
451 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
452 /*
453 * gcm_encrypt_final() will compute the MAC and append
454 * it to existing ciphertext. So, need to adjust the left over
455 * length value accordingly
456 */
457
458 /* order of following 2 lines MUST not be reversed */
459 ciphertext->cd_offset = ciphertext->cd_length;
460 ciphertext->cd_length = saved_length - ciphertext->cd_length;
461 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
462 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
463 aes_xor_block);
464 if (ret != CRYPTO_SUCCESS) {
465 return (ret);
466 }
467
468 if (plaintext != ciphertext) {
469 ciphertext->cd_length =
470 ciphertext->cd_offset - saved_offset;
471 }
472 ciphertext->cd_offset = saved_offset;
473 }
474
475 ASSERT(aes_ctx->ac_remainder_len == 0);
476 (void) aes_free_context(ctx);
477
478 return (ret);
479 }
480
481
482 static int
483 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
484 crypto_data_t *plaintext, crypto_req_handle_t req)
485 {
486 int ret = CRYPTO_FAILED;
487
488 aes_ctx_t *aes_ctx;
489 off_t saved_offset;
490 size_t saved_length, length_needed;
491
492 ASSERT(ctx->cc_provider_private != NULL);
493 aes_ctx = ctx->cc_provider_private;
494
495 /*
496 * For block ciphers, plaintext must be a multiple of AES block size.
497 * This test is only valid for ciphers whose blocksize is a power of 2.
498 */
499 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
500 == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
501 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
502 }
503
504 ASSERT(plaintext != NULL);
505
506 /*
507 * Return length needed to store the output.
508 * Do not destroy context when plaintext buffer is too small.
509 *
510 * CCM: plaintext is MAC len smaller than cipher text
511 * GCM: plaintext is TAG len smaller than cipher text
512 * GMAC: plaintext length must be zero
513 */
514 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
515 case CCM_MODE:
516 length_needed = aes_ctx->ac_processed_data_len;
517 break;
518 case GCM_MODE:
519 length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
520 break;
521 case GMAC_MODE:
522 if (plaintext->cd_length != 0)
523 return (CRYPTO_ARGUMENTS_BAD);
524
525 length_needed = 0;
526 break;
527 default:
528 length_needed = ciphertext->cd_length;
529 }
530
531 if (plaintext->cd_length < length_needed) {
532 plaintext->cd_length = length_needed;
533 return (CRYPTO_BUFFER_TOO_SMALL);
534 }
535
536 saved_offset = plaintext->cd_offset;
537 saved_length = plaintext->cd_length;
538
539 /*
540 * Do an update on the specified input data.
541 */
542 ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
543 if (ret != CRYPTO_SUCCESS) {
544 goto cleanup;
545 }
546
547 if (aes_ctx->ac_flags & CCM_MODE) {
548 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
549 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
550
551 /* order of following 2 lines MUST not be reversed */
552 plaintext->cd_offset = plaintext->cd_length;
553 plaintext->cd_length = saved_length - plaintext->cd_length;
554
555 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
556 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
557 aes_xor_block);
558 if (ret == CRYPTO_SUCCESS) {
559 if (plaintext != ciphertext) {
560 plaintext->cd_length =
561 plaintext->cd_offset - saved_offset;
562 }
563 } else {
564 plaintext->cd_length = saved_length;
565 }
566
567 plaintext->cd_offset = saved_offset;
568 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
569 /* order of following 2 lines MUST not be reversed */
570 plaintext->cd_offset = plaintext->cd_length;
571 plaintext->cd_length = saved_length - plaintext->cd_length;
572
573 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
574 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
575 if (ret == CRYPTO_SUCCESS) {
576 if (plaintext != ciphertext) {
577 plaintext->cd_length =
578 plaintext->cd_offset - saved_offset;
579 }
580 } else {
581 plaintext->cd_length = saved_length;
582 }
583
584 plaintext->cd_offset = saved_offset;
585 }
586
587 ASSERT(aes_ctx->ac_remainder_len == 0);
588
589 cleanup:
590 (void) aes_free_context(ctx);
591
592 return (ret);
593 }
594
595
596 static int
597 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
598 crypto_data_t *ciphertext, crypto_req_handle_t req)
599 {
600 (void) req;
601 off_t saved_offset;
602 size_t saved_length, out_len;
603 int ret = CRYPTO_SUCCESS;
604 aes_ctx_t *aes_ctx;
605
606 ASSERT(ctx->cc_provider_private != NULL);
607 aes_ctx = ctx->cc_provider_private;
608
609 ASSERT(ciphertext != NULL);
610
611 /* compute number of bytes that will hold the ciphertext */
612 out_len = aes_ctx->ac_remainder_len;
613 out_len += plaintext->cd_length;
614 out_len &= ~(AES_BLOCK_LEN - 1);
615
616 /* return length needed to store the output */
617 if (ciphertext->cd_length < out_len) {
618 ciphertext->cd_length = out_len;
619 return (CRYPTO_BUFFER_TOO_SMALL);
620 }
621
622 saved_offset = ciphertext->cd_offset;
623 saved_length = ciphertext->cd_length;
624
625 /*
626 * Do the AES update on the specified input data.
627 */
628 switch (plaintext->cd_format) {
629 case CRYPTO_DATA_RAW:
630 ret = crypto_update_iov(ctx->cc_provider_private,
631 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
632 aes_copy_block64);
633 break;
634 case CRYPTO_DATA_UIO:
635 ret = crypto_update_uio(ctx->cc_provider_private,
636 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
637 aes_copy_block64);
638 break;
639 default:
640 ret = CRYPTO_ARGUMENTS_BAD;
641 }
642
643 /*
644 * Since AES counter mode is a stream cipher, we call
645 * ctr_mode_final() to pick up any remaining bytes.
646 * It is an internal function that does not destroy
647 * the context like *normal* final routines.
648 */
649 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
650 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
651 ciphertext, aes_encrypt_block);
652 }
653
654 if (ret == CRYPTO_SUCCESS) {
655 if (plaintext != ciphertext)
656 ciphertext->cd_length =
657 ciphertext->cd_offset - saved_offset;
658 } else {
659 ciphertext->cd_length = saved_length;
660 }
661 ciphertext->cd_offset = saved_offset;
662
663 return (ret);
664 }
665
666
667 static int
668 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
669 crypto_data_t *plaintext, crypto_req_handle_t req)
670 {
671 off_t saved_offset;
672 size_t saved_length, out_len;
673 int ret = CRYPTO_SUCCESS;
674 aes_ctx_t *aes_ctx;
675
676 ASSERT(ctx->cc_provider_private != NULL);
677 aes_ctx = ctx->cc_provider_private;
678
679 ASSERT(plaintext != NULL);
680
681 /*
682 * Compute number of bytes that will hold the plaintext.
683 * This is not necessary for CCM, GCM, and GMAC since these
684 * mechanisms never return plaintext for update operations.
685 */
686 if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
687 out_len = aes_ctx->ac_remainder_len;
688 out_len += ciphertext->cd_length;
689 out_len &= ~(AES_BLOCK_LEN - 1);
690
691 /* return length needed to store the output */
692 if (plaintext->cd_length < out_len) {
693 plaintext->cd_length = out_len;
694 return (CRYPTO_BUFFER_TOO_SMALL);
695 }
696 }
697
698 saved_offset = plaintext->cd_offset;
699 saved_length = plaintext->cd_length;
700
701 if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
702 gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
703
704 /*
705 * Do the AES update on the specified input data.
706 */
707 switch (ciphertext->cd_format) {
708 case CRYPTO_DATA_RAW:
709 ret = crypto_update_iov(ctx->cc_provider_private,
710 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
711 aes_copy_block64);
712 break;
713 case CRYPTO_DATA_UIO:
714 ret = crypto_update_uio(ctx->cc_provider_private,
715 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
716 aes_copy_block64);
717 break;
718 default:
719 ret = CRYPTO_ARGUMENTS_BAD;
720 }
721
722 /*
723 * Since AES counter mode is a stream cipher, we call
724 * ctr_mode_final() to pick up any remaining bytes.
725 * It is an internal function that does not destroy
726 * the context like *normal* final routines.
727 */
728 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
729 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
730 aes_encrypt_block);
731 if (ret == CRYPTO_DATA_LEN_RANGE)
732 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
733 }
734
735 if (ret == CRYPTO_SUCCESS) {
736 if (ciphertext != plaintext)
737 plaintext->cd_length =
738 plaintext->cd_offset - saved_offset;
739 } else {
740 plaintext->cd_length = saved_length;
741 }
742 plaintext->cd_offset = saved_offset;
743
744
745 return (ret);
746 }
747
748 static int
749 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
750 crypto_req_handle_t req)
751 {
752 (void) req;
753 aes_ctx_t *aes_ctx;
754 int ret;
755
756 ASSERT(ctx->cc_provider_private != NULL);
757 aes_ctx = ctx->cc_provider_private;
758
759 if (data->cd_format != CRYPTO_DATA_RAW &&
760 data->cd_format != CRYPTO_DATA_UIO) {
761 return (CRYPTO_ARGUMENTS_BAD);
762 }
763
764 if (aes_ctx->ac_flags & CTR_MODE) {
765 if (aes_ctx->ac_remainder_len > 0) {
766 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
767 aes_encrypt_block);
768 if (ret != CRYPTO_SUCCESS)
769 return (ret);
770 }
771 } else if (aes_ctx->ac_flags & CCM_MODE) {
772 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
773 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
774 if (ret != CRYPTO_SUCCESS) {
775 return (ret);
776 }
777 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
778 size_t saved_offset = data->cd_offset;
779
780 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
781 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
782 aes_xor_block);
783 if (ret != CRYPTO_SUCCESS) {
784 return (ret);
785 }
786 data->cd_length = data->cd_offset - saved_offset;
787 data->cd_offset = saved_offset;
788 } else {
789 /*
790 * There must be no unprocessed plaintext.
791 * This happens if the length of the last data is
792 * not a multiple of the AES block length.
793 */
794 if (aes_ctx->ac_remainder_len > 0) {
795 return (CRYPTO_DATA_LEN_RANGE);
796 }
797 data->cd_length = 0;
798 }
799
800 (void) aes_free_context(ctx);
801
802 return (CRYPTO_SUCCESS);
803 }
804
805 static int
806 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
807 crypto_req_handle_t req)
808 {
809 (void) req;
810 aes_ctx_t *aes_ctx;
811 int ret;
812 off_t saved_offset;
813 size_t saved_length;
814
815 ASSERT(ctx->cc_provider_private != NULL);
816 aes_ctx = ctx->cc_provider_private;
817
818 if (data->cd_format != CRYPTO_DATA_RAW &&
819 data->cd_format != CRYPTO_DATA_UIO) {
820 return (CRYPTO_ARGUMENTS_BAD);
821 }
822
823 /*
824 * There must be no unprocessed ciphertext.
825 * This happens if the length of the last ciphertext is
826 * not a multiple of the AES block length.
827 */
828 if (aes_ctx->ac_remainder_len > 0) {
829 if ((aes_ctx->ac_flags & CTR_MODE) == 0)
830 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
831 else {
832 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
833 aes_encrypt_block);
834 if (ret == CRYPTO_DATA_LEN_RANGE)
835 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
836 if (ret != CRYPTO_SUCCESS)
837 return (ret);
838 }
839 }
840
841 if (aes_ctx->ac_flags & CCM_MODE) {
842 /*
843 * This is where all the plaintext is returned, make sure
844 * the plaintext buffer is big enough
845 */
846 size_t pt_len = aes_ctx->ac_data_len;
847 if (data->cd_length < pt_len) {
848 data->cd_length = pt_len;
849 return (CRYPTO_BUFFER_TOO_SMALL);
850 }
851
852 ASSERT(aes_ctx->ac_processed_data_len == pt_len);
853 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
854 saved_offset = data->cd_offset;
855 saved_length = data->cd_length;
856 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
857 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
858 aes_xor_block);
859 if (ret == CRYPTO_SUCCESS) {
860 data->cd_length = data->cd_offset - saved_offset;
861 } else {
862 data->cd_length = saved_length;
863 }
864
865 data->cd_offset = saved_offset;
866 if (ret != CRYPTO_SUCCESS) {
867 return (ret);
868 }
869 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
870 /*
871 * This is where all the plaintext is returned, make sure
872 * the plaintext buffer is big enough
873 */
874 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
875 size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
876
877 if (data->cd_length < pt_len) {
878 data->cd_length = pt_len;
879 return (CRYPTO_BUFFER_TOO_SMALL);
880 }
881
882 saved_offset = data->cd_offset;
883 saved_length = data->cd_length;
884 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
885 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
886 if (ret == CRYPTO_SUCCESS) {
887 data->cd_length = data->cd_offset - saved_offset;
888 } else {
889 data->cd_length = saved_length;
890 }
891
892 data->cd_offset = saved_offset;
893 if (ret != CRYPTO_SUCCESS) {
894 return (ret);
895 }
896 }
897
898
899 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
900 data->cd_length = 0;
901 }
902
903 (void) aes_free_context(ctx);
904
905 return (CRYPTO_SUCCESS);
906 }
907
908 static int
909 aes_encrypt_atomic(crypto_provider_handle_t provider,
910 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
911 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
912 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
913 {
914 (void) provider, (void) session_id;
915 aes_ctx_t aes_ctx; /* on the stack */
916 off_t saved_offset;
917 size_t saved_length;
918 size_t length_needed;
919 int ret;
920
921 ASSERT(ciphertext != NULL);
922
923 /*
924 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
925 * be a multiple of AES block size.
926 */
927 switch (mechanism->cm_type) {
928 case AES_CTR_MECH_INFO_TYPE:
929 case AES_CCM_MECH_INFO_TYPE:
930 case AES_GCM_MECH_INFO_TYPE:
931 case AES_GMAC_MECH_INFO_TYPE:
932 break;
933 default:
934 if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
935 return (CRYPTO_DATA_LEN_RANGE);
936 }
937
938 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
939 return (ret);
940
941 bzero(&aes_ctx, sizeof (aes_ctx_t));
942
943 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
944 crypto_kmflag(req), B_TRUE);
945 if (ret != CRYPTO_SUCCESS)
946 return (ret);
947
948 switch (mechanism->cm_type) {
949 case AES_CCM_MECH_INFO_TYPE:
950 length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
951 break;
952 case AES_GMAC_MECH_INFO_TYPE:
953 if (plaintext->cd_length != 0)
954 return (CRYPTO_ARGUMENTS_BAD);
955 zfs_fallthrough;
956 case AES_GCM_MECH_INFO_TYPE:
957 length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
958 break;
959 default:
960 length_needed = plaintext->cd_length;
961 }
962
963 /* return size of buffer needed to store output */
964 if (ciphertext->cd_length < length_needed) {
965 ciphertext->cd_length = length_needed;
966 ret = CRYPTO_BUFFER_TOO_SMALL;
967 goto out;
968 }
969
970 saved_offset = ciphertext->cd_offset;
971 saved_length = ciphertext->cd_length;
972
973 /*
974 * Do an update on the specified input data.
975 */
976 switch (plaintext->cd_format) {
977 case CRYPTO_DATA_RAW:
978 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
979 aes_encrypt_contiguous_blocks, aes_copy_block64);
980 break;
981 case CRYPTO_DATA_UIO:
982 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
983 aes_encrypt_contiguous_blocks, aes_copy_block64);
984 break;
985 default:
986 ret = CRYPTO_ARGUMENTS_BAD;
987 }
988
989 if (ret == CRYPTO_SUCCESS) {
990 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
991 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
992 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
993 aes_xor_block);
994 if (ret != CRYPTO_SUCCESS)
995 goto out;
996 ASSERT(aes_ctx.ac_remainder_len == 0);
997 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
998 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
999 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1000 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1001 aes_copy_block, aes_xor_block);
1002 if (ret != CRYPTO_SUCCESS)
1003 goto out;
1004 ASSERT(aes_ctx.ac_remainder_len == 0);
1005 } else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1006 if (aes_ctx.ac_remainder_len > 0) {
1007 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1008 ciphertext, aes_encrypt_block);
1009 if (ret != CRYPTO_SUCCESS)
1010 goto out;
1011 }
1012 } else {
1013 ASSERT(aes_ctx.ac_remainder_len == 0);
1014 }
1015
1016 if (plaintext != ciphertext) {
1017 ciphertext->cd_length =
1018 ciphertext->cd_offset - saved_offset;
1019 }
1020 } else {
1021 ciphertext->cd_length = saved_length;
1022 }
1023 ciphertext->cd_offset = saved_offset;
1024
1025 out:
1026 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1027 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1028 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1029 }
1030 #ifdef CAN_USE_GCM_ASM
1031 if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE) &&
1032 ((gcm_ctx_t *)&aes_ctx)->gcm_Htable != NULL) {
1033
1034 gcm_ctx_t *ctx = (gcm_ctx_t *)&aes_ctx;
1035
1036 bzero(ctx->gcm_Htable, ctx->gcm_htab_len);
1037 kmem_free(ctx->gcm_Htable, ctx->gcm_htab_len);
1038 }
1039 #endif
1040
1041 return (ret);
1042 }
1043
1044 static int
1045 aes_decrypt_atomic(crypto_provider_handle_t provider,
1046 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1047 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1048 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1049 {
1050 (void) provider, (void) session_id;
1051 aes_ctx_t aes_ctx; /* on the stack */
1052 off_t saved_offset;
1053 size_t saved_length;
1054 size_t length_needed;
1055 int ret;
1056
1057 ASSERT(plaintext != NULL);
1058
1059 /*
1060 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1061 * be a multiple of AES block size.
1062 */
1063 switch (mechanism->cm_type) {
1064 case AES_CTR_MECH_INFO_TYPE:
1065 case AES_CCM_MECH_INFO_TYPE:
1066 case AES_GCM_MECH_INFO_TYPE:
1067 case AES_GMAC_MECH_INFO_TYPE:
1068 break;
1069 default:
1070 if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1071 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
1072 }
1073
1074 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1075 return (ret);
1076
1077 bzero(&aes_ctx, sizeof (aes_ctx_t));
1078
1079 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1080 crypto_kmflag(req), B_FALSE);
1081 if (ret != CRYPTO_SUCCESS)
1082 return (ret);
1083
1084 switch (mechanism->cm_type) {
1085 case AES_CCM_MECH_INFO_TYPE:
1086 length_needed = aes_ctx.ac_data_len;
1087 break;
1088 case AES_GCM_MECH_INFO_TYPE:
1089 length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1090 break;
1091 case AES_GMAC_MECH_INFO_TYPE:
1092 if (plaintext->cd_length != 0)
1093 return (CRYPTO_ARGUMENTS_BAD);
1094 length_needed = 0;
1095 break;
1096 default:
1097 length_needed = ciphertext->cd_length;
1098 }
1099
1100 /* return size of buffer needed to store output */
1101 if (plaintext->cd_length < length_needed) {
1102 plaintext->cd_length = length_needed;
1103 ret = CRYPTO_BUFFER_TOO_SMALL;
1104 goto out;
1105 }
1106
1107 saved_offset = plaintext->cd_offset;
1108 saved_length = plaintext->cd_length;
1109
1110 if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1111 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE)
1112 gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req));
1113
1114 /*
1115 * Do an update on the specified input data.
1116 */
1117 switch (ciphertext->cd_format) {
1118 case CRYPTO_DATA_RAW:
1119 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1120 aes_decrypt_contiguous_blocks, aes_copy_block64);
1121 break;
1122 case CRYPTO_DATA_UIO:
1123 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1124 aes_decrypt_contiguous_blocks, aes_copy_block64);
1125 break;
1126 default:
1127 ret = CRYPTO_ARGUMENTS_BAD;
1128 }
1129
1130 if (ret == CRYPTO_SUCCESS) {
1131 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1132 ASSERT(aes_ctx.ac_processed_data_len
1133 == aes_ctx.ac_data_len);
1134 ASSERT(aes_ctx.ac_processed_mac_len
1135 == aes_ctx.ac_mac_len);
1136 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1137 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1138 aes_copy_block, aes_xor_block);
1139 ASSERT(aes_ctx.ac_remainder_len == 0);
1140 if ((ret == CRYPTO_SUCCESS) &&
1141 (ciphertext != plaintext)) {
1142 plaintext->cd_length =
1143 plaintext->cd_offset - saved_offset;
1144 } else {
1145 plaintext->cd_length = saved_length;
1146 }
1147 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1148 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1149 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1150 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1151 aes_xor_block);
1152 ASSERT(aes_ctx.ac_remainder_len == 0);
1153 if ((ret == CRYPTO_SUCCESS) &&
1154 (ciphertext != plaintext)) {
1155 plaintext->cd_length =
1156 plaintext->cd_offset - saved_offset;
1157 } else {
1158 plaintext->cd_length = saved_length;
1159 }
1160 } else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1161 ASSERT(aes_ctx.ac_remainder_len == 0);
1162 if (ciphertext != plaintext)
1163 plaintext->cd_length =
1164 plaintext->cd_offset - saved_offset;
1165 } else {
1166 if (aes_ctx.ac_remainder_len > 0) {
1167 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1168 plaintext, aes_encrypt_block);
1169 if (ret == CRYPTO_DATA_LEN_RANGE)
1170 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1171 if (ret != CRYPTO_SUCCESS)
1172 goto out;
1173 }
1174 if (ciphertext != plaintext)
1175 plaintext->cd_length =
1176 plaintext->cd_offset - saved_offset;
1177 }
1178 } else {
1179 plaintext->cd_length = saved_length;
1180 }
1181 plaintext->cd_offset = saved_offset;
1182
1183 out:
1184 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1185 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1186 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1187 }
1188
1189 if (aes_ctx.ac_flags & CCM_MODE) {
1190 if (aes_ctx.ac_pt_buf != NULL) {
1191 vmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1192 }
1193 } else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1194 if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1195 vmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1196 ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1197 }
1198 #ifdef CAN_USE_GCM_ASM
1199 if (((gcm_ctx_t *)&aes_ctx)->gcm_Htable != NULL) {
1200 gcm_ctx_t *ctx = (gcm_ctx_t *)&aes_ctx;
1201
1202 bzero(ctx->gcm_Htable, ctx->gcm_htab_len);
1203 kmem_free(ctx->gcm_Htable, ctx->gcm_htab_len);
1204 }
1205 #endif
1206 }
1207
1208 return (ret);
1209 }
1210
1211 /*
1212 * KCF software provider context template entry points.
1213 */
1214 static int
1215 aes_create_ctx_template(crypto_provider_handle_t provider,
1216 crypto_mechanism_t *mechanism, crypto_key_t *key,
1217 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1218 {
1219 (void) provider;
1220 void *keysched;
1221 size_t size;
1222 int rv;
1223
1224 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1225 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1226 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1227 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1228 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1229 mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1230 return (CRYPTO_MECHANISM_INVALID);
1231
1232 if ((keysched = aes_alloc_keysched(&size,
1233 crypto_kmflag(req))) == NULL) {
1234 return (CRYPTO_HOST_MEMORY);
1235 }
1236
1237 /*
1238 * Initialize key schedule. Key length information is stored
1239 * in the key.
1240 */
1241 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1242 bzero(keysched, size);
1243 kmem_free(keysched, size);
1244 return (rv);
1245 }
1246
1247 *tmpl = keysched;
1248 *tmpl_size = size;
1249
1250 return (CRYPTO_SUCCESS);
1251 }
1252
1253
1254 static int
1255 aes_free_context(crypto_ctx_t *ctx)
1256 {
1257 aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1258
1259 if (aes_ctx != NULL) {
1260 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1261 ASSERT(aes_ctx->ac_keysched_len != 0);
1262 bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1263 kmem_free(aes_ctx->ac_keysched,
1264 aes_ctx->ac_keysched_len);
1265 }
1266 crypto_free_mode_ctx(aes_ctx);
1267 ctx->cc_provider_private = NULL;
1268 }
1269
1270 return (CRYPTO_SUCCESS);
1271 }
1272
1273
1274 static int
1275 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1276 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1277 boolean_t is_encrypt_init)
1278 {
1279 int rv = CRYPTO_SUCCESS;
1280 void *keysched;
1281 size_t size = 0;
1282
1283 if (template == NULL) {
1284 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1285 return (CRYPTO_HOST_MEMORY);
1286 /*
1287 * Initialize key schedule.
1288 * Key length is stored in the key.
1289 */
1290 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1291 kmem_free(keysched, size);
1292 return (rv);
1293 }
1294
1295 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1296 aes_ctx->ac_keysched_len = size;
1297 } else {
1298 keysched = template;
1299 }
1300 aes_ctx->ac_keysched = keysched;
1301
1302 switch (mechanism->cm_type) {
1303 case AES_CBC_MECH_INFO_TYPE:
1304 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1305 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1306 break;
1307 case AES_CTR_MECH_INFO_TYPE: {
1308 CK_AES_CTR_PARAMS *pp;
1309
1310 if (mechanism->cm_param == NULL ||
1311 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1312 return (CRYPTO_MECHANISM_PARAM_INVALID);
1313 }
1314 pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1315 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1316 pp->cb, aes_copy_block);
1317 break;
1318 }
1319 case AES_CCM_MECH_INFO_TYPE:
1320 if (mechanism->cm_param == NULL ||
1321 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1322 return (CRYPTO_MECHANISM_PARAM_INVALID);
1323 }
1324 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1325 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1326 aes_xor_block);
1327 break;
1328 case AES_GCM_MECH_INFO_TYPE:
1329 if (mechanism->cm_param == NULL ||
1330 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1331 return (CRYPTO_MECHANISM_PARAM_INVALID);
1332 }
1333 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1334 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1335 aes_xor_block);
1336 break;
1337 case AES_GMAC_MECH_INFO_TYPE:
1338 if (mechanism->cm_param == NULL ||
1339 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1340 return (CRYPTO_MECHANISM_PARAM_INVALID);
1341 }
1342 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1343 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1344 aes_xor_block);
1345 break;
1346 case AES_ECB_MECH_INFO_TYPE:
1347 aes_ctx->ac_flags |= ECB_MODE;
1348 }
1349
1350 if (rv != CRYPTO_SUCCESS) {
1351 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1352 bzero(keysched, size);
1353 kmem_free(keysched, size);
1354 }
1355 }
1356
1357 return (rv);
1358 }
1359
1360 static int
1361 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1362 CK_AES_GCM_PARAMS *gcm_params)
1363 {
1364 /* LINTED: pointer alignment */
1365 CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1366
1367 if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1368 return (CRYPTO_MECHANISM_INVALID);
1369
1370 if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1371 return (CRYPTO_MECHANISM_PARAM_INVALID);
1372
1373 if (params->pIv == NULL)
1374 return (CRYPTO_MECHANISM_PARAM_INVALID);
1375
1376 gcm_params->pIv = params->pIv;
1377 gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1378 gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1379
1380 if (data == NULL)
1381 return (CRYPTO_SUCCESS);
1382
1383 if (data->cd_format != CRYPTO_DATA_RAW)
1384 return (CRYPTO_ARGUMENTS_BAD);
1385
1386 gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1387 gcm_params->ulAADLen = data->cd_length;
1388 return (CRYPTO_SUCCESS);
1389 }
1390
1391 static int
1392 aes_mac_atomic(crypto_provider_handle_t provider,
1393 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1394 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1395 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1396 {
1397 CK_AES_GCM_PARAMS gcm_params;
1398 crypto_mechanism_t gcm_mech;
1399 int rv;
1400
1401 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1402 != CRYPTO_SUCCESS)
1403 return (rv);
1404
1405 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1406 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1407 gcm_mech.cm_param = (char *)&gcm_params;
1408
1409 return (aes_encrypt_atomic(provider, session_id, &gcm_mech,
1410 key, &null_crypto_data, mac, template, req));
1411 }
1412
1413 static int
1414 aes_mac_verify_atomic(crypto_provider_handle_t provider,
1415 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1416 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1417 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1418 {
1419 CK_AES_GCM_PARAMS gcm_params;
1420 crypto_mechanism_t gcm_mech;
1421 int rv;
1422
1423 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1424 != CRYPTO_SUCCESS)
1425 return (rv);
1426
1427 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1428 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1429 gcm_mech.cm_param = (char *)&gcm_params;
1430
1431 return (aes_decrypt_atomic(provider, session_id, &gcm_mech,
1432 key, mac, &null_crypto_data, template, req));
1433 }