]> git.proxmox.com Git - mirror_zfs-debian.git/blob - module/icp/io/aes.c
12d57ed79eeeb288a7ff86c972f9cab339e02506
[mirror_zfs-debian.git] / module / icp / io / aes.c
1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21 /*
22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23 */
24
25 /*
26 * AES provider for the Kernel Cryptographic Framework (KCF)
27 */
28
29 #include <sys/zfs_context.h>
30 #include <sys/crypto/common.h>
31 #include <sys/crypto/impl.h>
32 #include <sys/crypto/spi.h>
33 #include <sys/crypto/icp.h>
34 #include <modes/modes.h>
35 #include <sys/modctl.h>
36 #define _AES_IMPL
37 #include <aes/aes_impl.h>
38
39 #define CRYPTO_PROVIDER_NAME "aes"
40
41 extern struct mod_ops mod_cryptoops;
42
43 /*
44 * Module linkage information for the kernel.
45 */
46 static struct modlcrypto modlcrypto = {
47 &mod_cryptoops,
48 "AES Kernel SW Provider"
49 };
50
51 static struct modlinkage modlinkage = {
52 MODREV_1, { (void *)&modlcrypto, NULL }
53 };
54
55 /*
56 * Mechanism info structure passed to KCF during registration.
57 */
58 static crypto_mech_info_t aes_mech_info_tab[] = {
59 /* AES_ECB */
60 {SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
61 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
62 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
63 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
64 /* AES_CBC */
65 {SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
66 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
67 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
68 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
69 /* AES_CTR */
70 {SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
71 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
72 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
73 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
74 /* AES_CCM */
75 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
76 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
77 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
78 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
79 /* AES_GCM */
80 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
81 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
82 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
83 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
84 /* AES_GMAC */
85 {SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
86 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
87 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
88 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
89 CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC |
90 CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC,
91 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
92 };
93
94 /* operations are in-place if the output buffer is NULL */
95 #define AES_ARG_INPLACE(input, output) \
96 if ((output) == NULL) \
97 (output) = (input);
98
99 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
100
101 static crypto_control_ops_t aes_control_ops = {
102 aes_provider_status
103 };
104
105 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
106 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
107 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
108 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
109 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
110 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
111 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
112 crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
113 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
114 crypto_req_handle_t);
115 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
116 crypto_req_handle_t);
117
118 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
119 crypto_req_handle_t);
120 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
121 crypto_data_t *, crypto_req_handle_t);
122 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
123 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
124 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
125
126 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
127 crypto_req_handle_t);
128 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
129 crypto_data_t *, crypto_req_handle_t);
130 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
131 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
132 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
133
134 static crypto_cipher_ops_t aes_cipher_ops = {
135 aes_encrypt_init,
136 aes_encrypt,
137 aes_encrypt_update,
138 aes_encrypt_final,
139 aes_encrypt_atomic,
140 aes_decrypt_init,
141 aes_decrypt,
142 aes_decrypt_update,
143 aes_decrypt_final,
144 aes_decrypt_atomic
145 };
146
147 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
148 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
149 crypto_spi_ctx_template_t, crypto_req_handle_t);
150 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
151 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
152 crypto_spi_ctx_template_t, crypto_req_handle_t);
153
154 static crypto_mac_ops_t aes_mac_ops = {
155 NULL,
156 NULL,
157 NULL,
158 NULL,
159 aes_mac_atomic,
160 aes_mac_verify_atomic
161 };
162
163 static int aes_create_ctx_template(crypto_provider_handle_t,
164 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
165 size_t *, crypto_req_handle_t);
166 static int aes_free_context(crypto_ctx_t *);
167
168 static crypto_ctx_ops_t aes_ctx_ops = {
169 aes_create_ctx_template,
170 aes_free_context
171 };
172
173 static crypto_ops_t aes_crypto_ops = {{{{{
174 &aes_control_ops,
175 NULL,
176 &aes_cipher_ops,
177 &aes_mac_ops,
178 NULL,
179 NULL,
180 NULL,
181 NULL,
182 NULL,
183 NULL,
184 NULL,
185 NULL,
186 NULL,
187 &aes_ctx_ops
188 }}}}};
189
190 static crypto_provider_info_t aes_prov_info = {{{{
191 CRYPTO_SPI_VERSION_1,
192 "AES Software Provider",
193 CRYPTO_SW_PROVIDER,
194 NULL,
195 &aes_crypto_ops,
196 sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
197 aes_mech_info_tab
198 }}}};
199
200 static crypto_kcf_provider_handle_t aes_prov_handle = 0;
201 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
202
203 int
204 aes_mod_init(void)
205 {
206 int ret;
207
208 if ((ret = mod_install(&modlinkage)) != 0)
209 return (ret);
210
211 /* Register with KCF. If the registration fails, remove the module. */
212 if (crypto_register_provider(&aes_prov_info, &aes_prov_handle)) {
213 (void) mod_remove(&modlinkage);
214 return (EACCES);
215 }
216
217 return (0);
218 }
219
220 int
221 aes_mod_fini(void)
222 {
223 /* Unregister from KCF if module is registered */
224 if (aes_prov_handle != 0) {
225 if (crypto_unregister_provider(aes_prov_handle))
226 return (EBUSY);
227
228 aes_prov_handle = 0;
229 }
230
231 return (mod_remove(&modlinkage));
232 }
233
234 static int
235 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
236 {
237 void *p = NULL;
238 boolean_t param_required = B_TRUE;
239 size_t param_len;
240 void *(*alloc_fun)(int);
241 int rv = CRYPTO_SUCCESS;
242
243 switch (mechanism->cm_type) {
244 case AES_ECB_MECH_INFO_TYPE:
245 param_required = B_FALSE;
246 alloc_fun = ecb_alloc_ctx;
247 break;
248 case AES_CBC_MECH_INFO_TYPE:
249 param_len = AES_BLOCK_LEN;
250 alloc_fun = cbc_alloc_ctx;
251 break;
252 case AES_CTR_MECH_INFO_TYPE:
253 param_len = sizeof (CK_AES_CTR_PARAMS);
254 alloc_fun = ctr_alloc_ctx;
255 break;
256 case AES_CCM_MECH_INFO_TYPE:
257 param_len = sizeof (CK_AES_CCM_PARAMS);
258 alloc_fun = ccm_alloc_ctx;
259 break;
260 case AES_GCM_MECH_INFO_TYPE:
261 param_len = sizeof (CK_AES_GCM_PARAMS);
262 alloc_fun = gcm_alloc_ctx;
263 break;
264 case AES_GMAC_MECH_INFO_TYPE:
265 param_len = sizeof (CK_AES_GMAC_PARAMS);
266 alloc_fun = gmac_alloc_ctx;
267 break;
268 default:
269 rv = CRYPTO_MECHANISM_INVALID;
270 return (rv);
271 }
272 if (param_required && mechanism->cm_param != NULL &&
273 mechanism->cm_param_len != param_len) {
274 rv = CRYPTO_MECHANISM_PARAM_INVALID;
275 }
276 if (ctx != NULL) {
277 p = (alloc_fun)(kmflag);
278 *ctx = p;
279 }
280 return (rv);
281 }
282
283 /*
284 * Initialize key schedules for AES
285 */
286 static int
287 init_keysched(crypto_key_t *key, void *newbie)
288 {
289 /*
290 * Only keys by value are supported by this module.
291 */
292 switch (key->ck_format) {
293 case CRYPTO_KEY_RAW:
294 if (key->ck_length < AES_MINBITS ||
295 key->ck_length > AES_MAXBITS) {
296 return (CRYPTO_KEY_SIZE_RANGE);
297 }
298
299 /* key length must be either 128, 192, or 256 */
300 if ((key->ck_length & 63) != 0)
301 return (CRYPTO_KEY_SIZE_RANGE);
302 break;
303 default:
304 return (CRYPTO_KEY_TYPE_INCONSISTENT);
305 }
306
307 aes_init_keysched(key->ck_data, key->ck_length, newbie);
308 return (CRYPTO_SUCCESS);
309 }
310
311 /*
312 * KCF software provider control entry points.
313 */
314 /* ARGSUSED */
315 static void
316 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
317 {
318 *status = CRYPTO_PROVIDER_READY;
319 }
320
321 static int
322 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
323 crypto_key_t *key, crypto_spi_ctx_template_t template,
324 crypto_req_handle_t req)
325 {
326 return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
327 }
328
329 static int
330 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
331 crypto_key_t *key, crypto_spi_ctx_template_t template,
332 crypto_req_handle_t req)
333 {
334 return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
335 }
336
337
338
339 /*
340 * KCF software provider encrypt entry points.
341 */
342 static int
343 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
344 crypto_key_t *key, crypto_spi_ctx_template_t template,
345 crypto_req_handle_t req, boolean_t is_encrypt_init)
346 {
347 aes_ctx_t *aes_ctx;
348 int rv;
349 int kmflag;
350
351 /*
352 * Only keys by value are supported by this module.
353 */
354 if (key->ck_format != CRYPTO_KEY_RAW) {
355 return (CRYPTO_KEY_TYPE_INCONSISTENT);
356 }
357
358 kmflag = crypto_kmflag(req);
359 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
360 != CRYPTO_SUCCESS)
361 return (rv);
362
363 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
364 is_encrypt_init);
365 if (rv != CRYPTO_SUCCESS) {
366 crypto_free_mode_ctx(aes_ctx);
367 return (rv);
368 }
369
370 ctx->cc_provider_private = aes_ctx;
371
372 return (CRYPTO_SUCCESS);
373 }
374
375 static void
376 aes_copy_block64(uint8_t *in, uint64_t *out)
377 {
378 if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
379 /* LINTED: pointer alignment */
380 out[0] = *(uint64_t *)&in[0];
381 /* LINTED: pointer alignment */
382 out[1] = *(uint64_t *)&in[8];
383 } else {
384 uint8_t *iv8 = (uint8_t *)&out[0];
385
386 AES_COPY_BLOCK(in, iv8);
387 }
388 }
389
390
391 static int
392 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
393 crypto_data_t *ciphertext, crypto_req_handle_t req)
394 {
395 int ret = CRYPTO_FAILED;
396
397 aes_ctx_t *aes_ctx;
398 size_t saved_length, saved_offset, length_needed;
399
400 ASSERT(ctx->cc_provider_private != NULL);
401 aes_ctx = ctx->cc_provider_private;
402
403 /*
404 * For block ciphers, plaintext must be a multiple of AES block size.
405 * This test is only valid for ciphers whose blocksize is a power of 2.
406 */
407 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
408 == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
409 return (CRYPTO_DATA_LEN_RANGE);
410
411 AES_ARG_INPLACE(plaintext, ciphertext);
412
413 /*
414 * We need to just return the length needed to store the output.
415 * We should not destroy the context for the following case.
416 */
417 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
418 case CCM_MODE:
419 length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
420 break;
421 case GCM_MODE:
422 length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
423 break;
424 case GMAC_MODE:
425 if (plaintext->cd_length != 0)
426 return (CRYPTO_ARGUMENTS_BAD);
427
428 length_needed = aes_ctx->ac_tag_len;
429 break;
430 default:
431 length_needed = plaintext->cd_length;
432 }
433
434 if (ciphertext->cd_length < length_needed) {
435 ciphertext->cd_length = length_needed;
436 return (CRYPTO_BUFFER_TOO_SMALL);
437 }
438
439 saved_length = ciphertext->cd_length;
440 saved_offset = ciphertext->cd_offset;
441
442 /*
443 * Do an update on the specified input data.
444 */
445 ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
446 if (ret != CRYPTO_SUCCESS) {
447 return (ret);
448 }
449
450 /*
451 * For CCM mode, aes_ccm_encrypt_final() will take care of any
452 * left-over unprocessed data, and compute the MAC
453 */
454 if (aes_ctx->ac_flags & CCM_MODE) {
455 /*
456 * ccm_encrypt_final() will compute the MAC and append
457 * it to existing ciphertext. So, need to adjust the left over
458 * length value accordingly
459 */
460
461 /* order of following 2 lines MUST not be reversed */
462 ciphertext->cd_offset = ciphertext->cd_length;
463 ciphertext->cd_length = saved_length - ciphertext->cd_length;
464 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
465 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
466 if (ret != CRYPTO_SUCCESS) {
467 return (ret);
468 }
469
470 if (plaintext != ciphertext) {
471 ciphertext->cd_length =
472 ciphertext->cd_offset - saved_offset;
473 }
474 ciphertext->cd_offset = saved_offset;
475 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
476 /*
477 * gcm_encrypt_final() will compute the MAC and append
478 * it to existing ciphertext. So, need to adjust the left over
479 * length value accordingly
480 */
481
482 /* order of following 2 lines MUST not be reversed */
483 ciphertext->cd_offset = ciphertext->cd_length;
484 ciphertext->cd_length = saved_length - ciphertext->cd_length;
485 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
486 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
487 aes_xor_block);
488 if (ret != CRYPTO_SUCCESS) {
489 return (ret);
490 }
491
492 if (plaintext != ciphertext) {
493 ciphertext->cd_length =
494 ciphertext->cd_offset - saved_offset;
495 }
496 ciphertext->cd_offset = saved_offset;
497 }
498
499 ASSERT(aes_ctx->ac_remainder_len == 0);
500 (void) aes_free_context(ctx);
501
502 return (ret);
503 }
504
505
506 static int
507 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
508 crypto_data_t *plaintext, crypto_req_handle_t req)
509 {
510 int ret = CRYPTO_FAILED;
511
512 aes_ctx_t *aes_ctx;
513 off_t saved_offset;
514 size_t saved_length, length_needed;
515
516 ASSERT(ctx->cc_provider_private != NULL);
517 aes_ctx = ctx->cc_provider_private;
518
519 /*
520 * For block ciphers, plaintext must be a multiple of AES block size.
521 * This test is only valid for ciphers whose blocksize is a power of 2.
522 */
523 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
524 == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
525 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
526 }
527
528 AES_ARG_INPLACE(ciphertext, plaintext);
529
530 /*
531 * Return length needed to store the output.
532 * Do not destroy context when plaintext buffer is too small.
533 *
534 * CCM: plaintext is MAC len smaller than cipher text
535 * GCM: plaintext is TAG len smaller than cipher text
536 * GMAC: plaintext length must be zero
537 */
538 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
539 case CCM_MODE:
540 length_needed = aes_ctx->ac_processed_data_len;
541 break;
542 case GCM_MODE:
543 length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
544 break;
545 case GMAC_MODE:
546 if (plaintext->cd_length != 0)
547 return (CRYPTO_ARGUMENTS_BAD);
548
549 length_needed = 0;
550 break;
551 default:
552 length_needed = ciphertext->cd_length;
553 }
554
555 if (plaintext->cd_length < length_needed) {
556 plaintext->cd_length = length_needed;
557 return (CRYPTO_BUFFER_TOO_SMALL);
558 }
559
560 saved_offset = plaintext->cd_offset;
561 saved_length = plaintext->cd_length;
562
563 /*
564 * Do an update on the specified input data.
565 */
566 ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
567 if (ret != CRYPTO_SUCCESS) {
568 goto cleanup;
569 }
570
571 if (aes_ctx->ac_flags & CCM_MODE) {
572 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
573 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
574
575 /* order of following 2 lines MUST not be reversed */
576 plaintext->cd_offset = plaintext->cd_length;
577 plaintext->cd_length = saved_length - plaintext->cd_length;
578
579 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
580 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
581 aes_xor_block);
582 if (ret == CRYPTO_SUCCESS) {
583 if (plaintext != ciphertext) {
584 plaintext->cd_length =
585 plaintext->cd_offset - saved_offset;
586 }
587 } else {
588 plaintext->cd_length = saved_length;
589 }
590
591 plaintext->cd_offset = saved_offset;
592 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
593 /* order of following 2 lines MUST not be reversed */
594 plaintext->cd_offset = plaintext->cd_length;
595 plaintext->cd_length = saved_length - plaintext->cd_length;
596
597 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
598 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
599 if (ret == CRYPTO_SUCCESS) {
600 if (plaintext != ciphertext) {
601 plaintext->cd_length =
602 plaintext->cd_offset - saved_offset;
603 }
604 } else {
605 plaintext->cd_length = saved_length;
606 }
607
608 plaintext->cd_offset = saved_offset;
609 }
610
611 ASSERT(aes_ctx->ac_remainder_len == 0);
612
613 cleanup:
614 (void) aes_free_context(ctx);
615
616 return (ret);
617 }
618
619
620 /* ARGSUSED */
621 static int
622 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
623 crypto_data_t *ciphertext, crypto_req_handle_t req)
624 {
625 off_t saved_offset;
626 size_t saved_length, out_len;
627 int ret = CRYPTO_SUCCESS;
628 aes_ctx_t *aes_ctx;
629
630 ASSERT(ctx->cc_provider_private != NULL);
631 aes_ctx = ctx->cc_provider_private;
632
633 AES_ARG_INPLACE(plaintext, ciphertext);
634
635 /* compute number of bytes that will hold the ciphertext */
636 out_len = aes_ctx->ac_remainder_len;
637 out_len += plaintext->cd_length;
638 out_len &= ~(AES_BLOCK_LEN - 1);
639
640 /* return length needed to store the output */
641 if (ciphertext->cd_length < out_len) {
642 ciphertext->cd_length = out_len;
643 return (CRYPTO_BUFFER_TOO_SMALL);
644 }
645
646 saved_offset = ciphertext->cd_offset;
647 saved_length = ciphertext->cd_length;
648
649 /*
650 * Do the AES update on the specified input data.
651 */
652 switch (plaintext->cd_format) {
653 case CRYPTO_DATA_RAW:
654 ret = crypto_update_iov(ctx->cc_provider_private,
655 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
656 aes_copy_block64);
657 break;
658 case CRYPTO_DATA_UIO:
659 ret = crypto_update_uio(ctx->cc_provider_private,
660 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
661 aes_copy_block64);
662 break;
663 default:
664 ret = CRYPTO_ARGUMENTS_BAD;
665 }
666
667 /*
668 * Since AES counter mode is a stream cipher, we call
669 * ctr_mode_final() to pick up any remaining bytes.
670 * It is an internal function that does not destroy
671 * the context like *normal* final routines.
672 */
673 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
674 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
675 ciphertext, aes_encrypt_block);
676 }
677
678 if (ret == CRYPTO_SUCCESS) {
679 if (plaintext != ciphertext)
680 ciphertext->cd_length =
681 ciphertext->cd_offset - saved_offset;
682 } else {
683 ciphertext->cd_length = saved_length;
684 }
685 ciphertext->cd_offset = saved_offset;
686
687 return (ret);
688 }
689
690
691 static int
692 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
693 crypto_data_t *plaintext, crypto_req_handle_t req)
694 {
695 off_t saved_offset;
696 size_t saved_length, out_len;
697 int ret = CRYPTO_SUCCESS;
698 aes_ctx_t *aes_ctx;
699
700 ASSERT(ctx->cc_provider_private != NULL);
701 aes_ctx = ctx->cc_provider_private;
702
703 AES_ARG_INPLACE(ciphertext, plaintext);
704
705 /*
706 * Compute number of bytes that will hold the plaintext.
707 * This is not necessary for CCM, GCM, and GMAC since these
708 * mechanisms never return plaintext for update operations.
709 */
710 if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
711 out_len = aes_ctx->ac_remainder_len;
712 out_len += ciphertext->cd_length;
713 out_len &= ~(AES_BLOCK_LEN - 1);
714
715 /* return length needed to store the output */
716 if (plaintext->cd_length < out_len) {
717 plaintext->cd_length = out_len;
718 return (CRYPTO_BUFFER_TOO_SMALL);
719 }
720 }
721
722 saved_offset = plaintext->cd_offset;
723 saved_length = plaintext->cd_length;
724
725 if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
726 gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
727
728 /*
729 * Do the AES update on the specified input data.
730 */
731 switch (ciphertext->cd_format) {
732 case CRYPTO_DATA_RAW:
733 ret = crypto_update_iov(ctx->cc_provider_private,
734 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
735 aes_copy_block64);
736 break;
737 case CRYPTO_DATA_UIO:
738 ret = crypto_update_uio(ctx->cc_provider_private,
739 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
740 aes_copy_block64);
741 break;
742 default:
743 ret = CRYPTO_ARGUMENTS_BAD;
744 }
745
746 /*
747 * Since AES counter mode is a stream cipher, we call
748 * ctr_mode_final() to pick up any remaining bytes.
749 * It is an internal function that does not destroy
750 * the context like *normal* final routines.
751 */
752 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
753 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
754 aes_encrypt_block);
755 if (ret == CRYPTO_DATA_LEN_RANGE)
756 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
757 }
758
759 if (ret == CRYPTO_SUCCESS) {
760 if (ciphertext != plaintext)
761 plaintext->cd_length =
762 plaintext->cd_offset - saved_offset;
763 } else {
764 plaintext->cd_length = saved_length;
765 }
766 plaintext->cd_offset = saved_offset;
767
768
769 return (ret);
770 }
771
772 /* ARGSUSED */
773 static int
774 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
775 crypto_req_handle_t req)
776 {
777 aes_ctx_t *aes_ctx;
778 int ret;
779
780 ASSERT(ctx->cc_provider_private != NULL);
781 aes_ctx = ctx->cc_provider_private;
782
783 if (data->cd_format != CRYPTO_DATA_RAW &&
784 data->cd_format != CRYPTO_DATA_UIO) {
785 return (CRYPTO_ARGUMENTS_BAD);
786 }
787
788 if (aes_ctx->ac_flags & CTR_MODE) {
789 if (aes_ctx->ac_remainder_len > 0) {
790 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
791 aes_encrypt_block);
792 if (ret != CRYPTO_SUCCESS)
793 return (ret);
794 }
795 } else if (aes_ctx->ac_flags & CCM_MODE) {
796 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
797 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
798 if (ret != CRYPTO_SUCCESS) {
799 return (ret);
800 }
801 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
802 size_t saved_offset = data->cd_offset;
803
804 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
805 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
806 aes_xor_block);
807 if (ret != CRYPTO_SUCCESS) {
808 return (ret);
809 }
810 data->cd_length = data->cd_offset - saved_offset;
811 data->cd_offset = saved_offset;
812 } else {
813 /*
814 * There must be no unprocessed plaintext.
815 * This happens if the length of the last data is
816 * not a multiple of the AES block length.
817 */
818 if (aes_ctx->ac_remainder_len > 0) {
819 return (CRYPTO_DATA_LEN_RANGE);
820 }
821 data->cd_length = 0;
822 }
823
824 (void) aes_free_context(ctx);
825
826 return (CRYPTO_SUCCESS);
827 }
828
829 /* ARGSUSED */
830 static int
831 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
832 crypto_req_handle_t req)
833 {
834 aes_ctx_t *aes_ctx;
835 int ret;
836 off_t saved_offset;
837 size_t saved_length;
838
839 ASSERT(ctx->cc_provider_private != NULL);
840 aes_ctx = ctx->cc_provider_private;
841
842 if (data->cd_format != CRYPTO_DATA_RAW &&
843 data->cd_format != CRYPTO_DATA_UIO) {
844 return (CRYPTO_ARGUMENTS_BAD);
845 }
846
847 /*
848 * There must be no unprocessed ciphertext.
849 * This happens if the length of the last ciphertext is
850 * not a multiple of the AES block length.
851 */
852 if (aes_ctx->ac_remainder_len > 0) {
853 if ((aes_ctx->ac_flags & CTR_MODE) == 0)
854 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
855 else {
856 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
857 aes_encrypt_block);
858 if (ret == CRYPTO_DATA_LEN_RANGE)
859 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
860 if (ret != CRYPTO_SUCCESS)
861 return (ret);
862 }
863 }
864
865 if (aes_ctx->ac_flags & CCM_MODE) {
866 /*
867 * This is where all the plaintext is returned, make sure
868 * the plaintext buffer is big enough
869 */
870 size_t pt_len = aes_ctx->ac_data_len;
871 if (data->cd_length < pt_len) {
872 data->cd_length = pt_len;
873 return (CRYPTO_BUFFER_TOO_SMALL);
874 }
875
876 ASSERT(aes_ctx->ac_processed_data_len == pt_len);
877 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
878 saved_offset = data->cd_offset;
879 saved_length = data->cd_length;
880 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
881 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
882 aes_xor_block);
883 if (ret == CRYPTO_SUCCESS) {
884 data->cd_length = data->cd_offset - saved_offset;
885 } else {
886 data->cd_length = saved_length;
887 }
888
889 data->cd_offset = saved_offset;
890 if (ret != CRYPTO_SUCCESS) {
891 return (ret);
892 }
893 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
894 /*
895 * This is where all the plaintext is returned, make sure
896 * the plaintext buffer is big enough
897 */
898 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
899 size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
900
901 if (data->cd_length < pt_len) {
902 data->cd_length = pt_len;
903 return (CRYPTO_BUFFER_TOO_SMALL);
904 }
905
906 saved_offset = data->cd_offset;
907 saved_length = data->cd_length;
908 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
909 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
910 if (ret == CRYPTO_SUCCESS) {
911 data->cd_length = data->cd_offset - saved_offset;
912 } else {
913 data->cd_length = saved_length;
914 }
915
916 data->cd_offset = saved_offset;
917 if (ret != CRYPTO_SUCCESS) {
918 return (ret);
919 }
920 }
921
922
923 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
924 data->cd_length = 0;
925 }
926
927 (void) aes_free_context(ctx);
928
929 return (CRYPTO_SUCCESS);
930 }
931
932 /* ARGSUSED */
933 static int
934 aes_encrypt_atomic(crypto_provider_handle_t provider,
935 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
936 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
937 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
938 {
939 aes_ctx_t aes_ctx; /* on the stack */
940 off_t saved_offset;
941 size_t saved_length;
942 size_t length_needed;
943 int ret;
944
945 AES_ARG_INPLACE(plaintext, ciphertext);
946
947 /*
948 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
949 * be a multiple of AES block size.
950 */
951 switch (mechanism->cm_type) {
952 case AES_CTR_MECH_INFO_TYPE:
953 case AES_CCM_MECH_INFO_TYPE:
954 case AES_GCM_MECH_INFO_TYPE:
955 case AES_GMAC_MECH_INFO_TYPE:
956 break;
957 default:
958 if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
959 return (CRYPTO_DATA_LEN_RANGE);
960 }
961
962 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
963 return (ret);
964
965 bzero(&aes_ctx, sizeof (aes_ctx_t));
966
967 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
968 crypto_kmflag(req), B_TRUE);
969 if (ret != CRYPTO_SUCCESS)
970 return (ret);
971
972 switch (mechanism->cm_type) {
973 case AES_CCM_MECH_INFO_TYPE:
974 length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
975 break;
976 case AES_GMAC_MECH_INFO_TYPE:
977 if (plaintext->cd_length != 0)
978 return (CRYPTO_ARGUMENTS_BAD);
979 /* FALLTHRU */
980 case AES_GCM_MECH_INFO_TYPE:
981 length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
982 break;
983 default:
984 length_needed = plaintext->cd_length;
985 }
986
987 /* return size of buffer needed to store output */
988 if (ciphertext->cd_length < length_needed) {
989 ciphertext->cd_length = length_needed;
990 ret = CRYPTO_BUFFER_TOO_SMALL;
991 goto out;
992 }
993
994 saved_offset = ciphertext->cd_offset;
995 saved_length = ciphertext->cd_length;
996
997 /*
998 * Do an update on the specified input data.
999 */
1000 switch (plaintext->cd_format) {
1001 case CRYPTO_DATA_RAW:
1002 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1003 aes_encrypt_contiguous_blocks, aes_copy_block64);
1004 break;
1005 case CRYPTO_DATA_UIO:
1006 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1007 aes_encrypt_contiguous_blocks, aes_copy_block64);
1008 break;
1009 default:
1010 ret = CRYPTO_ARGUMENTS_BAD;
1011 }
1012
1013 if (ret == CRYPTO_SUCCESS) {
1014 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1015 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1016 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1017 aes_xor_block);
1018 if (ret != CRYPTO_SUCCESS)
1019 goto out;
1020 ASSERT(aes_ctx.ac_remainder_len == 0);
1021 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1022 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1023 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1024 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1025 aes_copy_block, aes_xor_block);
1026 if (ret != CRYPTO_SUCCESS)
1027 goto out;
1028 ASSERT(aes_ctx.ac_remainder_len == 0);
1029 } else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1030 if (aes_ctx.ac_remainder_len > 0) {
1031 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1032 ciphertext, aes_encrypt_block);
1033 if (ret != CRYPTO_SUCCESS)
1034 goto out;
1035 }
1036 } else {
1037 ASSERT(aes_ctx.ac_remainder_len == 0);
1038 }
1039
1040 if (plaintext != ciphertext) {
1041 ciphertext->cd_length =
1042 ciphertext->cd_offset - saved_offset;
1043 }
1044 } else {
1045 ciphertext->cd_length = saved_length;
1046 }
1047 ciphertext->cd_offset = saved_offset;
1048
1049 out:
1050 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1051 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1052 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1053 }
1054
1055 return (ret);
1056 }
1057
1058 /* ARGSUSED */
1059 static int
1060 aes_decrypt_atomic(crypto_provider_handle_t provider,
1061 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1062 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1063 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1064 {
1065 aes_ctx_t aes_ctx; /* on the stack */
1066 off_t saved_offset;
1067 size_t saved_length;
1068 size_t length_needed;
1069 int ret;
1070
1071 AES_ARG_INPLACE(ciphertext, plaintext);
1072
1073 /*
1074 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1075 * be a multiple of AES block size.
1076 */
1077 switch (mechanism->cm_type) {
1078 case AES_CTR_MECH_INFO_TYPE:
1079 case AES_CCM_MECH_INFO_TYPE:
1080 case AES_GCM_MECH_INFO_TYPE:
1081 case AES_GMAC_MECH_INFO_TYPE:
1082 break;
1083 default:
1084 if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1085 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
1086 }
1087
1088 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1089 return (ret);
1090
1091 bzero(&aes_ctx, sizeof (aes_ctx_t));
1092
1093 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1094 crypto_kmflag(req), B_FALSE);
1095 if (ret != CRYPTO_SUCCESS)
1096 return (ret);
1097
1098 switch (mechanism->cm_type) {
1099 case AES_CCM_MECH_INFO_TYPE:
1100 length_needed = aes_ctx.ac_data_len;
1101 break;
1102 case AES_GCM_MECH_INFO_TYPE:
1103 length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1104 break;
1105 case AES_GMAC_MECH_INFO_TYPE:
1106 if (plaintext->cd_length != 0)
1107 return (CRYPTO_ARGUMENTS_BAD);
1108 length_needed = 0;
1109 break;
1110 default:
1111 length_needed = ciphertext->cd_length;
1112 }
1113
1114 /* return size of buffer needed to store output */
1115 if (plaintext->cd_length < length_needed) {
1116 plaintext->cd_length = length_needed;
1117 ret = CRYPTO_BUFFER_TOO_SMALL;
1118 goto out;
1119 }
1120
1121 saved_offset = plaintext->cd_offset;
1122 saved_length = plaintext->cd_length;
1123
1124 if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1125 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE)
1126 gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req));
1127
1128 /*
1129 * Do an update on the specified input data.
1130 */
1131 switch (ciphertext->cd_format) {
1132 case CRYPTO_DATA_RAW:
1133 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1134 aes_decrypt_contiguous_blocks, aes_copy_block64);
1135 break;
1136 case CRYPTO_DATA_UIO:
1137 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1138 aes_decrypt_contiguous_blocks, aes_copy_block64);
1139 break;
1140 default:
1141 ret = CRYPTO_ARGUMENTS_BAD;
1142 }
1143
1144 if (ret == CRYPTO_SUCCESS) {
1145 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1146 ASSERT(aes_ctx.ac_processed_data_len
1147 == aes_ctx.ac_data_len);
1148 ASSERT(aes_ctx.ac_processed_mac_len
1149 == aes_ctx.ac_mac_len);
1150 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1151 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1152 aes_copy_block, aes_xor_block);
1153 ASSERT(aes_ctx.ac_remainder_len == 0);
1154 if ((ret == CRYPTO_SUCCESS) &&
1155 (ciphertext != plaintext)) {
1156 plaintext->cd_length =
1157 plaintext->cd_offset - saved_offset;
1158 } else {
1159 plaintext->cd_length = saved_length;
1160 }
1161 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1162 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1163 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1164 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1165 aes_xor_block);
1166 ASSERT(aes_ctx.ac_remainder_len == 0);
1167 if ((ret == CRYPTO_SUCCESS) &&
1168 (ciphertext != plaintext)) {
1169 plaintext->cd_length =
1170 plaintext->cd_offset - saved_offset;
1171 } else {
1172 plaintext->cd_length = saved_length;
1173 }
1174 } else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1175 ASSERT(aes_ctx.ac_remainder_len == 0);
1176 if (ciphertext != plaintext)
1177 plaintext->cd_length =
1178 plaintext->cd_offset - saved_offset;
1179 } else {
1180 if (aes_ctx.ac_remainder_len > 0) {
1181 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1182 plaintext, aes_encrypt_block);
1183 if (ret == CRYPTO_DATA_LEN_RANGE)
1184 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1185 if (ret != CRYPTO_SUCCESS)
1186 goto out;
1187 }
1188 if (ciphertext != plaintext)
1189 plaintext->cd_length =
1190 plaintext->cd_offset - saved_offset;
1191 }
1192 } else {
1193 plaintext->cd_length = saved_length;
1194 }
1195 plaintext->cd_offset = saved_offset;
1196
1197 out:
1198 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1199 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1200 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1201 }
1202
1203 if (aes_ctx.ac_flags & CCM_MODE) {
1204 if (aes_ctx.ac_pt_buf != NULL) {
1205 vmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1206 }
1207 } else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1208 if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1209 vmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1210 ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1211 }
1212 }
1213
1214 return (ret);
1215 }
1216
1217 /*
1218 * KCF software provider context template entry points.
1219 */
1220 /* ARGSUSED */
1221 static int
1222 aes_create_ctx_template(crypto_provider_handle_t provider,
1223 crypto_mechanism_t *mechanism, crypto_key_t *key,
1224 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1225 {
1226 void *keysched;
1227 size_t size;
1228 int rv;
1229
1230 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1231 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1232 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1233 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1234 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1235 mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1236 return (CRYPTO_MECHANISM_INVALID);
1237
1238 if ((keysched = aes_alloc_keysched(&size,
1239 crypto_kmflag(req))) == NULL) {
1240 return (CRYPTO_HOST_MEMORY);
1241 }
1242
1243 /*
1244 * Initialize key schedule. Key length information is stored
1245 * in the key.
1246 */
1247 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1248 bzero(keysched, size);
1249 kmem_free(keysched, size);
1250 return (rv);
1251 }
1252
1253 *tmpl = keysched;
1254 *tmpl_size = size;
1255
1256 return (CRYPTO_SUCCESS);
1257 }
1258
1259
1260 static int
1261 aes_free_context(crypto_ctx_t *ctx)
1262 {
1263 aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1264
1265 if (aes_ctx != NULL) {
1266 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1267 ASSERT(aes_ctx->ac_keysched_len != 0);
1268 bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1269 kmem_free(aes_ctx->ac_keysched,
1270 aes_ctx->ac_keysched_len);
1271 }
1272 crypto_free_mode_ctx(aes_ctx);
1273 ctx->cc_provider_private = NULL;
1274 }
1275
1276 return (CRYPTO_SUCCESS);
1277 }
1278
1279
1280 static int
1281 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1282 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1283 boolean_t is_encrypt_init)
1284 {
1285 int rv = CRYPTO_SUCCESS;
1286 void *keysched;
1287 size_t size = 0;
1288
1289 if (template == NULL) {
1290 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1291 return (CRYPTO_HOST_MEMORY);
1292 /*
1293 * Initialize key schedule.
1294 * Key length is stored in the key.
1295 */
1296 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1297 kmem_free(keysched, size);
1298 return (rv);
1299 }
1300
1301 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1302 aes_ctx->ac_keysched_len = size;
1303 } else {
1304 keysched = template;
1305 }
1306 aes_ctx->ac_keysched = keysched;
1307
1308 switch (mechanism->cm_type) {
1309 case AES_CBC_MECH_INFO_TYPE:
1310 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1311 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1312 break;
1313 case AES_CTR_MECH_INFO_TYPE: {
1314 CK_AES_CTR_PARAMS *pp;
1315
1316 if (mechanism->cm_param == NULL ||
1317 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1318 return (CRYPTO_MECHANISM_PARAM_INVALID);
1319 }
1320 pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1321 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1322 pp->cb, aes_copy_block);
1323 break;
1324 }
1325 case AES_CCM_MECH_INFO_TYPE:
1326 if (mechanism->cm_param == NULL ||
1327 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1328 return (CRYPTO_MECHANISM_PARAM_INVALID);
1329 }
1330 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1331 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1332 aes_xor_block);
1333 break;
1334 case AES_GCM_MECH_INFO_TYPE:
1335 if (mechanism->cm_param == NULL ||
1336 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1337 return (CRYPTO_MECHANISM_PARAM_INVALID);
1338 }
1339 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1340 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1341 aes_xor_block);
1342 break;
1343 case AES_GMAC_MECH_INFO_TYPE:
1344 if (mechanism->cm_param == NULL ||
1345 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1346 return (CRYPTO_MECHANISM_PARAM_INVALID);
1347 }
1348 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1349 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1350 aes_xor_block);
1351 break;
1352 case AES_ECB_MECH_INFO_TYPE:
1353 aes_ctx->ac_flags |= ECB_MODE;
1354 }
1355
1356 if (rv != CRYPTO_SUCCESS) {
1357 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1358 bzero(keysched, size);
1359 kmem_free(keysched, size);
1360 }
1361 }
1362
1363 return (rv);
1364 }
1365
1366 static int
1367 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1368 CK_AES_GCM_PARAMS *gcm_params)
1369 {
1370 /* LINTED: pointer alignment */
1371 CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1372
1373 if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1374 return (CRYPTO_MECHANISM_INVALID);
1375
1376 if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1377 return (CRYPTO_MECHANISM_PARAM_INVALID);
1378
1379 if (params->pIv == NULL)
1380 return (CRYPTO_MECHANISM_PARAM_INVALID);
1381
1382 gcm_params->pIv = params->pIv;
1383 gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1384 gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1385
1386 if (data == NULL)
1387 return (CRYPTO_SUCCESS);
1388
1389 if (data->cd_format != CRYPTO_DATA_RAW)
1390 return (CRYPTO_ARGUMENTS_BAD);
1391
1392 gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1393 gcm_params->ulAADLen = data->cd_length;
1394 return (CRYPTO_SUCCESS);
1395 }
1396
1397 static int
1398 aes_mac_atomic(crypto_provider_handle_t provider,
1399 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1400 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1401 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1402 {
1403 CK_AES_GCM_PARAMS gcm_params;
1404 crypto_mechanism_t gcm_mech;
1405 int rv;
1406
1407 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1408 != CRYPTO_SUCCESS)
1409 return (rv);
1410
1411 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1412 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1413 gcm_mech.cm_param = (char *)&gcm_params;
1414
1415 return (aes_encrypt_atomic(provider, session_id, &gcm_mech,
1416 key, &null_crypto_data, mac, template, req));
1417 }
1418
1419 static int
1420 aes_mac_verify_atomic(crypto_provider_handle_t provider,
1421 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1422 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1423 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1424 {
1425 CK_AES_GCM_PARAMS gcm_params;
1426 crypto_mechanism_t gcm_mech;
1427 int rv;
1428
1429 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1430 != CRYPTO_SUCCESS)
1431 return (rv);
1432
1433 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1434 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1435 gcm_mech.cm_param = (char *)&gcm_params;
1436
1437 return (aes_decrypt_atomic(provider, session_id, &gcm_mech,
1438 key, mac, &null_crypto_data, template, req));
1439 }