]> git.proxmox.com Git - mirror_zfs.git/blob - module/icp/io/aes.c
Add support for selecting encryption backend
[mirror_zfs.git] / module / icp / io / aes.c
1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21 /*
22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23 */
24
25 /*
26 * AES provider for the Kernel Cryptographic Framework (KCF)
27 */
28
29 #include <sys/zfs_context.h>
30 #include <sys/crypto/common.h>
31 #include <sys/crypto/impl.h>
32 #include <sys/crypto/spi.h>
33 #include <sys/crypto/icp.h>
34 #include <modes/modes.h>
35 #include <sys/modctl.h>
36 #define _AES_IMPL
37 #include <aes/aes_impl.h>
38 #include <modes/gcm_impl.h>
39
40 #define CRYPTO_PROVIDER_NAME "aes"
41
42 extern struct mod_ops mod_cryptoops;
43
44 /*
45 * Module linkage information for the kernel.
46 */
47 static struct modlcrypto modlcrypto = {
48 &mod_cryptoops,
49 "AES Kernel SW Provider"
50 };
51
52 static struct modlinkage modlinkage = {
53 MODREV_1, { (void *)&modlcrypto, NULL }
54 };
55
56 /*
57 * Mechanism info structure passed to KCF during registration.
58 */
59 static crypto_mech_info_t aes_mech_info_tab[] = {
60 /* AES_ECB */
61 {SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
62 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
63 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
64 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
65 /* AES_CBC */
66 {SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
67 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
68 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
69 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
70 /* AES_CTR */
71 {SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
72 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
73 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
74 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
75 /* AES_CCM */
76 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
77 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
78 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
79 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
80 /* AES_GCM */
81 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
82 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
83 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
84 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
85 /* AES_GMAC */
86 {SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
87 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
88 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
89 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
90 CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC |
91 CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC,
92 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
93 };
94
95 /* operations are in-place if the output buffer is NULL */
96 #define AES_ARG_INPLACE(input, output) \
97 if ((output) == NULL) \
98 (output) = (input);
99
100 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
101
102 static crypto_control_ops_t aes_control_ops = {
103 aes_provider_status
104 };
105
106 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
107 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
108 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
109 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
110 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
111 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
112 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
113 crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
114 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
115 crypto_req_handle_t);
116 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
117 crypto_req_handle_t);
118
119 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
120 crypto_req_handle_t);
121 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
122 crypto_data_t *, crypto_req_handle_t);
123 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
124 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
125 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
126
127 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
128 crypto_req_handle_t);
129 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
130 crypto_data_t *, crypto_req_handle_t);
131 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
132 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
133 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
134
135 static crypto_cipher_ops_t aes_cipher_ops = {
136 .encrypt_init = aes_encrypt_init,
137 .encrypt = aes_encrypt,
138 .encrypt_update = aes_encrypt_update,
139 .encrypt_final = aes_encrypt_final,
140 .encrypt_atomic = aes_encrypt_atomic,
141 .decrypt_init = aes_decrypt_init,
142 .decrypt = aes_decrypt,
143 .decrypt_update = aes_decrypt_update,
144 .decrypt_final = aes_decrypt_final,
145 .decrypt_atomic = aes_decrypt_atomic
146 };
147
148 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
149 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
150 crypto_spi_ctx_template_t, crypto_req_handle_t);
151 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
152 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
153 crypto_spi_ctx_template_t, crypto_req_handle_t);
154
155 static crypto_mac_ops_t aes_mac_ops = {
156 .mac_init = NULL,
157 .mac = NULL,
158 .mac_update = NULL,
159 .mac_final = NULL,
160 .mac_atomic = aes_mac_atomic,
161 .mac_verify_atomic = aes_mac_verify_atomic
162 };
163
164 static int aes_create_ctx_template(crypto_provider_handle_t,
165 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
166 size_t *, crypto_req_handle_t);
167 static int aes_free_context(crypto_ctx_t *);
168
169 static crypto_ctx_ops_t aes_ctx_ops = {
170 .create_ctx_template = aes_create_ctx_template,
171 .free_context = aes_free_context
172 };
173
174 static crypto_ops_t aes_crypto_ops = {{{{{
175 &aes_control_ops,
176 NULL,
177 &aes_cipher_ops,
178 &aes_mac_ops,
179 NULL,
180 NULL,
181 NULL,
182 NULL,
183 NULL,
184 NULL,
185 NULL,
186 NULL,
187 NULL,
188 &aes_ctx_ops
189 }}}}};
190
191 static crypto_provider_info_t aes_prov_info = {{{{
192 CRYPTO_SPI_VERSION_1,
193 "AES Software Provider",
194 CRYPTO_SW_PROVIDER,
195 NULL,
196 &aes_crypto_ops,
197 sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
198 aes_mech_info_tab
199 }}}};
200
201 static crypto_kcf_provider_handle_t aes_prov_handle = 0;
202 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
203
204 int
205 aes_mod_init(void)
206 {
207 int ret;
208
209 /* find fastest implementations and set any requested implementations */
210 aes_impl_init();
211 gcm_impl_init();
212
213 if ((ret = mod_install(&modlinkage)) != 0)
214 return (ret);
215
216 /* Register with KCF. If the registration fails, remove the module. */
217 if (crypto_register_provider(&aes_prov_info, &aes_prov_handle)) {
218 (void) mod_remove(&modlinkage);
219 return (EACCES);
220 }
221
222 return (0);
223 }
224
225 int
226 aes_mod_fini(void)
227 {
228 /* Unregister from KCF if module is registered */
229 if (aes_prov_handle != 0) {
230 if (crypto_unregister_provider(aes_prov_handle))
231 return (EBUSY);
232
233 aes_prov_handle = 0;
234 }
235
236 return (mod_remove(&modlinkage));
237 }
238
239 static int
240 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
241 {
242 void *p = NULL;
243 boolean_t param_required = B_TRUE;
244 size_t param_len;
245 void *(*alloc_fun)(int);
246 int rv = CRYPTO_SUCCESS;
247
248 switch (mechanism->cm_type) {
249 case AES_ECB_MECH_INFO_TYPE:
250 param_required = B_FALSE;
251 alloc_fun = ecb_alloc_ctx;
252 break;
253 case AES_CBC_MECH_INFO_TYPE:
254 param_len = AES_BLOCK_LEN;
255 alloc_fun = cbc_alloc_ctx;
256 break;
257 case AES_CTR_MECH_INFO_TYPE:
258 param_len = sizeof (CK_AES_CTR_PARAMS);
259 alloc_fun = ctr_alloc_ctx;
260 break;
261 case AES_CCM_MECH_INFO_TYPE:
262 param_len = sizeof (CK_AES_CCM_PARAMS);
263 alloc_fun = ccm_alloc_ctx;
264 break;
265 case AES_GCM_MECH_INFO_TYPE:
266 param_len = sizeof (CK_AES_GCM_PARAMS);
267 alloc_fun = gcm_alloc_ctx;
268 break;
269 case AES_GMAC_MECH_INFO_TYPE:
270 param_len = sizeof (CK_AES_GMAC_PARAMS);
271 alloc_fun = gmac_alloc_ctx;
272 break;
273 default:
274 rv = CRYPTO_MECHANISM_INVALID;
275 return (rv);
276 }
277 if (param_required && mechanism->cm_param != NULL &&
278 mechanism->cm_param_len != param_len) {
279 rv = CRYPTO_MECHANISM_PARAM_INVALID;
280 }
281 if (ctx != NULL) {
282 p = (alloc_fun)(kmflag);
283 *ctx = p;
284 }
285 return (rv);
286 }
287
288 /*
289 * Initialize key schedules for AES
290 */
291 static int
292 init_keysched(crypto_key_t *key, void *newbie)
293 {
294 /*
295 * Only keys by value are supported by this module.
296 */
297 switch (key->ck_format) {
298 case CRYPTO_KEY_RAW:
299 if (key->ck_length < AES_MINBITS ||
300 key->ck_length > AES_MAXBITS) {
301 return (CRYPTO_KEY_SIZE_RANGE);
302 }
303
304 /* key length must be either 128, 192, or 256 */
305 if ((key->ck_length & 63) != 0)
306 return (CRYPTO_KEY_SIZE_RANGE);
307 break;
308 default:
309 return (CRYPTO_KEY_TYPE_INCONSISTENT);
310 }
311
312 aes_init_keysched(key->ck_data, key->ck_length, newbie);
313 return (CRYPTO_SUCCESS);
314 }
315
316 /*
317 * KCF software provider control entry points.
318 */
319 /* ARGSUSED */
320 static void
321 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
322 {
323 *status = CRYPTO_PROVIDER_READY;
324 }
325
326 static int
327 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
328 crypto_key_t *key, crypto_spi_ctx_template_t template,
329 crypto_req_handle_t req)
330 {
331 return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
332 }
333
334 static int
335 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
336 crypto_key_t *key, crypto_spi_ctx_template_t template,
337 crypto_req_handle_t req)
338 {
339 return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
340 }
341
342
343
344 /*
345 * KCF software provider encrypt entry points.
346 */
347 static int
348 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
349 crypto_key_t *key, crypto_spi_ctx_template_t template,
350 crypto_req_handle_t req, boolean_t is_encrypt_init)
351 {
352 aes_ctx_t *aes_ctx;
353 int rv;
354 int kmflag;
355
356 /*
357 * Only keys by value are supported by this module.
358 */
359 if (key->ck_format != CRYPTO_KEY_RAW) {
360 return (CRYPTO_KEY_TYPE_INCONSISTENT);
361 }
362
363 kmflag = crypto_kmflag(req);
364 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
365 != CRYPTO_SUCCESS)
366 return (rv);
367
368 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
369 is_encrypt_init);
370 if (rv != CRYPTO_SUCCESS) {
371 crypto_free_mode_ctx(aes_ctx);
372 return (rv);
373 }
374
375 ctx->cc_provider_private = aes_ctx;
376
377 return (CRYPTO_SUCCESS);
378 }
379
380 static void
381 aes_copy_block64(uint8_t *in, uint64_t *out)
382 {
383 if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
384 /* LINTED: pointer alignment */
385 out[0] = *(uint64_t *)&in[0];
386 /* LINTED: pointer alignment */
387 out[1] = *(uint64_t *)&in[8];
388 } else {
389 uint8_t *iv8 = (uint8_t *)&out[0];
390
391 AES_COPY_BLOCK(in, iv8);
392 }
393 }
394
395
396 static int
397 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
398 crypto_data_t *ciphertext, crypto_req_handle_t req)
399 {
400 int ret = CRYPTO_FAILED;
401
402 aes_ctx_t *aes_ctx;
403 size_t saved_length, saved_offset, length_needed;
404
405 ASSERT(ctx->cc_provider_private != NULL);
406 aes_ctx = ctx->cc_provider_private;
407
408 /*
409 * For block ciphers, plaintext must be a multiple of AES block size.
410 * This test is only valid for ciphers whose blocksize is a power of 2.
411 */
412 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
413 == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
414 return (CRYPTO_DATA_LEN_RANGE);
415
416 AES_ARG_INPLACE(plaintext, ciphertext);
417
418 /*
419 * We need to just return the length needed to store the output.
420 * We should not destroy the context for the following case.
421 */
422 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
423 case CCM_MODE:
424 length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
425 break;
426 case GCM_MODE:
427 length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
428 break;
429 case GMAC_MODE:
430 if (plaintext->cd_length != 0)
431 return (CRYPTO_ARGUMENTS_BAD);
432
433 length_needed = aes_ctx->ac_tag_len;
434 break;
435 default:
436 length_needed = plaintext->cd_length;
437 }
438
439 if (ciphertext->cd_length < length_needed) {
440 ciphertext->cd_length = length_needed;
441 return (CRYPTO_BUFFER_TOO_SMALL);
442 }
443
444 saved_length = ciphertext->cd_length;
445 saved_offset = ciphertext->cd_offset;
446
447 /*
448 * Do an update on the specified input data.
449 */
450 ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
451 if (ret != CRYPTO_SUCCESS) {
452 return (ret);
453 }
454
455 /*
456 * For CCM mode, aes_ccm_encrypt_final() will take care of any
457 * left-over unprocessed data, and compute the MAC
458 */
459 if (aes_ctx->ac_flags & CCM_MODE) {
460 /*
461 * ccm_encrypt_final() will compute the MAC and append
462 * it to existing ciphertext. So, need to adjust the left over
463 * length value accordingly
464 */
465
466 /* order of following 2 lines MUST not be reversed */
467 ciphertext->cd_offset = ciphertext->cd_length;
468 ciphertext->cd_length = saved_length - ciphertext->cd_length;
469 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
470 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
471 if (ret != CRYPTO_SUCCESS) {
472 return (ret);
473 }
474
475 if (plaintext != ciphertext) {
476 ciphertext->cd_length =
477 ciphertext->cd_offset - saved_offset;
478 }
479 ciphertext->cd_offset = saved_offset;
480 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
481 /*
482 * gcm_encrypt_final() will compute the MAC and append
483 * it to existing ciphertext. So, need to adjust the left over
484 * length value accordingly
485 */
486
487 /* order of following 2 lines MUST not be reversed */
488 ciphertext->cd_offset = ciphertext->cd_length;
489 ciphertext->cd_length = saved_length - ciphertext->cd_length;
490 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
491 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
492 aes_xor_block);
493 if (ret != CRYPTO_SUCCESS) {
494 return (ret);
495 }
496
497 if (plaintext != ciphertext) {
498 ciphertext->cd_length =
499 ciphertext->cd_offset - saved_offset;
500 }
501 ciphertext->cd_offset = saved_offset;
502 }
503
504 ASSERT(aes_ctx->ac_remainder_len == 0);
505 (void) aes_free_context(ctx);
506
507 return (ret);
508 }
509
510
511 static int
512 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
513 crypto_data_t *plaintext, crypto_req_handle_t req)
514 {
515 int ret = CRYPTO_FAILED;
516
517 aes_ctx_t *aes_ctx;
518 off_t saved_offset;
519 size_t saved_length, length_needed;
520
521 ASSERT(ctx->cc_provider_private != NULL);
522 aes_ctx = ctx->cc_provider_private;
523
524 /*
525 * For block ciphers, plaintext must be a multiple of AES block size.
526 * This test is only valid for ciphers whose blocksize is a power of 2.
527 */
528 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
529 == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
530 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
531 }
532
533 AES_ARG_INPLACE(ciphertext, plaintext);
534
535 /*
536 * Return length needed to store the output.
537 * Do not destroy context when plaintext buffer is too small.
538 *
539 * CCM: plaintext is MAC len smaller than cipher text
540 * GCM: plaintext is TAG len smaller than cipher text
541 * GMAC: plaintext length must be zero
542 */
543 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
544 case CCM_MODE:
545 length_needed = aes_ctx->ac_processed_data_len;
546 break;
547 case GCM_MODE:
548 length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
549 break;
550 case GMAC_MODE:
551 if (plaintext->cd_length != 0)
552 return (CRYPTO_ARGUMENTS_BAD);
553
554 length_needed = 0;
555 break;
556 default:
557 length_needed = ciphertext->cd_length;
558 }
559
560 if (plaintext->cd_length < length_needed) {
561 plaintext->cd_length = length_needed;
562 return (CRYPTO_BUFFER_TOO_SMALL);
563 }
564
565 saved_offset = plaintext->cd_offset;
566 saved_length = plaintext->cd_length;
567
568 /*
569 * Do an update on the specified input data.
570 */
571 ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
572 if (ret != CRYPTO_SUCCESS) {
573 goto cleanup;
574 }
575
576 if (aes_ctx->ac_flags & CCM_MODE) {
577 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
578 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
579
580 /* order of following 2 lines MUST not be reversed */
581 plaintext->cd_offset = plaintext->cd_length;
582 plaintext->cd_length = saved_length - plaintext->cd_length;
583
584 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
585 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
586 aes_xor_block);
587 if (ret == CRYPTO_SUCCESS) {
588 if (plaintext != ciphertext) {
589 plaintext->cd_length =
590 plaintext->cd_offset - saved_offset;
591 }
592 } else {
593 plaintext->cd_length = saved_length;
594 }
595
596 plaintext->cd_offset = saved_offset;
597 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
598 /* order of following 2 lines MUST not be reversed */
599 plaintext->cd_offset = plaintext->cd_length;
600 plaintext->cd_length = saved_length - plaintext->cd_length;
601
602 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
603 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
604 if (ret == CRYPTO_SUCCESS) {
605 if (plaintext != ciphertext) {
606 plaintext->cd_length =
607 plaintext->cd_offset - saved_offset;
608 }
609 } else {
610 plaintext->cd_length = saved_length;
611 }
612
613 plaintext->cd_offset = saved_offset;
614 }
615
616 ASSERT(aes_ctx->ac_remainder_len == 0);
617
618 cleanup:
619 (void) aes_free_context(ctx);
620
621 return (ret);
622 }
623
624
625 /* ARGSUSED */
626 static int
627 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
628 crypto_data_t *ciphertext, crypto_req_handle_t req)
629 {
630 off_t saved_offset;
631 size_t saved_length, out_len;
632 int ret = CRYPTO_SUCCESS;
633 aes_ctx_t *aes_ctx;
634
635 ASSERT(ctx->cc_provider_private != NULL);
636 aes_ctx = ctx->cc_provider_private;
637
638 AES_ARG_INPLACE(plaintext, ciphertext);
639
640 /* compute number of bytes that will hold the ciphertext */
641 out_len = aes_ctx->ac_remainder_len;
642 out_len += plaintext->cd_length;
643 out_len &= ~(AES_BLOCK_LEN - 1);
644
645 /* return length needed to store the output */
646 if (ciphertext->cd_length < out_len) {
647 ciphertext->cd_length = out_len;
648 return (CRYPTO_BUFFER_TOO_SMALL);
649 }
650
651 saved_offset = ciphertext->cd_offset;
652 saved_length = ciphertext->cd_length;
653
654 /*
655 * Do the AES update on the specified input data.
656 */
657 switch (plaintext->cd_format) {
658 case CRYPTO_DATA_RAW:
659 ret = crypto_update_iov(ctx->cc_provider_private,
660 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
661 aes_copy_block64);
662 break;
663 case CRYPTO_DATA_UIO:
664 ret = crypto_update_uio(ctx->cc_provider_private,
665 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
666 aes_copy_block64);
667 break;
668 default:
669 ret = CRYPTO_ARGUMENTS_BAD;
670 }
671
672 /*
673 * Since AES counter mode is a stream cipher, we call
674 * ctr_mode_final() to pick up any remaining bytes.
675 * It is an internal function that does not destroy
676 * the context like *normal* final routines.
677 */
678 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
679 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
680 ciphertext, aes_encrypt_block);
681 }
682
683 if (ret == CRYPTO_SUCCESS) {
684 if (plaintext != ciphertext)
685 ciphertext->cd_length =
686 ciphertext->cd_offset - saved_offset;
687 } else {
688 ciphertext->cd_length = saved_length;
689 }
690 ciphertext->cd_offset = saved_offset;
691
692 return (ret);
693 }
694
695
696 static int
697 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
698 crypto_data_t *plaintext, crypto_req_handle_t req)
699 {
700 off_t saved_offset;
701 size_t saved_length, out_len;
702 int ret = CRYPTO_SUCCESS;
703 aes_ctx_t *aes_ctx;
704
705 ASSERT(ctx->cc_provider_private != NULL);
706 aes_ctx = ctx->cc_provider_private;
707
708 AES_ARG_INPLACE(ciphertext, plaintext);
709
710 /*
711 * Compute number of bytes that will hold the plaintext.
712 * This is not necessary for CCM, GCM, and GMAC since these
713 * mechanisms never return plaintext for update operations.
714 */
715 if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
716 out_len = aes_ctx->ac_remainder_len;
717 out_len += ciphertext->cd_length;
718 out_len &= ~(AES_BLOCK_LEN - 1);
719
720 /* return length needed to store the output */
721 if (plaintext->cd_length < out_len) {
722 plaintext->cd_length = out_len;
723 return (CRYPTO_BUFFER_TOO_SMALL);
724 }
725 }
726
727 saved_offset = plaintext->cd_offset;
728 saved_length = plaintext->cd_length;
729
730 if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
731 gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
732
733 /*
734 * Do the AES update on the specified input data.
735 */
736 switch (ciphertext->cd_format) {
737 case CRYPTO_DATA_RAW:
738 ret = crypto_update_iov(ctx->cc_provider_private,
739 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
740 aes_copy_block64);
741 break;
742 case CRYPTO_DATA_UIO:
743 ret = crypto_update_uio(ctx->cc_provider_private,
744 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
745 aes_copy_block64);
746 break;
747 default:
748 ret = CRYPTO_ARGUMENTS_BAD;
749 }
750
751 /*
752 * Since AES counter mode is a stream cipher, we call
753 * ctr_mode_final() to pick up any remaining bytes.
754 * It is an internal function that does not destroy
755 * the context like *normal* final routines.
756 */
757 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
758 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
759 aes_encrypt_block);
760 if (ret == CRYPTO_DATA_LEN_RANGE)
761 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
762 }
763
764 if (ret == CRYPTO_SUCCESS) {
765 if (ciphertext != plaintext)
766 plaintext->cd_length =
767 plaintext->cd_offset - saved_offset;
768 } else {
769 plaintext->cd_length = saved_length;
770 }
771 plaintext->cd_offset = saved_offset;
772
773
774 return (ret);
775 }
776
777 /* ARGSUSED */
778 static int
779 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
780 crypto_req_handle_t req)
781 {
782 aes_ctx_t *aes_ctx;
783 int ret;
784
785 ASSERT(ctx->cc_provider_private != NULL);
786 aes_ctx = ctx->cc_provider_private;
787
788 if (data->cd_format != CRYPTO_DATA_RAW &&
789 data->cd_format != CRYPTO_DATA_UIO) {
790 return (CRYPTO_ARGUMENTS_BAD);
791 }
792
793 if (aes_ctx->ac_flags & CTR_MODE) {
794 if (aes_ctx->ac_remainder_len > 0) {
795 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
796 aes_encrypt_block);
797 if (ret != CRYPTO_SUCCESS)
798 return (ret);
799 }
800 } else if (aes_ctx->ac_flags & CCM_MODE) {
801 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
802 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
803 if (ret != CRYPTO_SUCCESS) {
804 return (ret);
805 }
806 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
807 size_t saved_offset = data->cd_offset;
808
809 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
810 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
811 aes_xor_block);
812 if (ret != CRYPTO_SUCCESS) {
813 return (ret);
814 }
815 data->cd_length = data->cd_offset - saved_offset;
816 data->cd_offset = saved_offset;
817 } else {
818 /*
819 * There must be no unprocessed plaintext.
820 * This happens if the length of the last data is
821 * not a multiple of the AES block length.
822 */
823 if (aes_ctx->ac_remainder_len > 0) {
824 return (CRYPTO_DATA_LEN_RANGE);
825 }
826 data->cd_length = 0;
827 }
828
829 (void) aes_free_context(ctx);
830
831 return (CRYPTO_SUCCESS);
832 }
833
834 /* ARGSUSED */
835 static int
836 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
837 crypto_req_handle_t req)
838 {
839 aes_ctx_t *aes_ctx;
840 int ret;
841 off_t saved_offset;
842 size_t saved_length;
843
844 ASSERT(ctx->cc_provider_private != NULL);
845 aes_ctx = ctx->cc_provider_private;
846
847 if (data->cd_format != CRYPTO_DATA_RAW &&
848 data->cd_format != CRYPTO_DATA_UIO) {
849 return (CRYPTO_ARGUMENTS_BAD);
850 }
851
852 /*
853 * There must be no unprocessed ciphertext.
854 * This happens if the length of the last ciphertext is
855 * not a multiple of the AES block length.
856 */
857 if (aes_ctx->ac_remainder_len > 0) {
858 if ((aes_ctx->ac_flags & CTR_MODE) == 0)
859 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
860 else {
861 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
862 aes_encrypt_block);
863 if (ret == CRYPTO_DATA_LEN_RANGE)
864 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
865 if (ret != CRYPTO_SUCCESS)
866 return (ret);
867 }
868 }
869
870 if (aes_ctx->ac_flags & CCM_MODE) {
871 /*
872 * This is where all the plaintext is returned, make sure
873 * the plaintext buffer is big enough
874 */
875 size_t pt_len = aes_ctx->ac_data_len;
876 if (data->cd_length < pt_len) {
877 data->cd_length = pt_len;
878 return (CRYPTO_BUFFER_TOO_SMALL);
879 }
880
881 ASSERT(aes_ctx->ac_processed_data_len == pt_len);
882 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
883 saved_offset = data->cd_offset;
884 saved_length = data->cd_length;
885 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
886 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
887 aes_xor_block);
888 if (ret == CRYPTO_SUCCESS) {
889 data->cd_length = data->cd_offset - saved_offset;
890 } else {
891 data->cd_length = saved_length;
892 }
893
894 data->cd_offset = saved_offset;
895 if (ret != CRYPTO_SUCCESS) {
896 return (ret);
897 }
898 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
899 /*
900 * This is where all the plaintext is returned, make sure
901 * the plaintext buffer is big enough
902 */
903 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
904 size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
905
906 if (data->cd_length < pt_len) {
907 data->cd_length = pt_len;
908 return (CRYPTO_BUFFER_TOO_SMALL);
909 }
910
911 saved_offset = data->cd_offset;
912 saved_length = data->cd_length;
913 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
914 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
915 if (ret == CRYPTO_SUCCESS) {
916 data->cd_length = data->cd_offset - saved_offset;
917 } else {
918 data->cd_length = saved_length;
919 }
920
921 data->cd_offset = saved_offset;
922 if (ret != CRYPTO_SUCCESS) {
923 return (ret);
924 }
925 }
926
927
928 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
929 data->cd_length = 0;
930 }
931
932 (void) aes_free_context(ctx);
933
934 return (CRYPTO_SUCCESS);
935 }
936
937 /* ARGSUSED */
938 static int
939 aes_encrypt_atomic(crypto_provider_handle_t provider,
940 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
941 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
942 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
943 {
944 aes_ctx_t aes_ctx; /* on the stack */
945 off_t saved_offset;
946 size_t saved_length;
947 size_t length_needed;
948 int ret;
949
950 AES_ARG_INPLACE(plaintext, ciphertext);
951
952 /*
953 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
954 * be a multiple of AES block size.
955 */
956 switch (mechanism->cm_type) {
957 case AES_CTR_MECH_INFO_TYPE:
958 case AES_CCM_MECH_INFO_TYPE:
959 case AES_GCM_MECH_INFO_TYPE:
960 case AES_GMAC_MECH_INFO_TYPE:
961 break;
962 default:
963 if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
964 return (CRYPTO_DATA_LEN_RANGE);
965 }
966
967 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
968 return (ret);
969
970 bzero(&aes_ctx, sizeof (aes_ctx_t));
971
972 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
973 crypto_kmflag(req), B_TRUE);
974 if (ret != CRYPTO_SUCCESS)
975 return (ret);
976
977 switch (mechanism->cm_type) {
978 case AES_CCM_MECH_INFO_TYPE:
979 length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
980 break;
981 case AES_GMAC_MECH_INFO_TYPE:
982 if (plaintext->cd_length != 0)
983 return (CRYPTO_ARGUMENTS_BAD);
984 /* FALLTHRU */
985 case AES_GCM_MECH_INFO_TYPE:
986 length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
987 break;
988 default:
989 length_needed = plaintext->cd_length;
990 }
991
992 /* return size of buffer needed to store output */
993 if (ciphertext->cd_length < length_needed) {
994 ciphertext->cd_length = length_needed;
995 ret = CRYPTO_BUFFER_TOO_SMALL;
996 goto out;
997 }
998
999 saved_offset = ciphertext->cd_offset;
1000 saved_length = ciphertext->cd_length;
1001
1002 /*
1003 * Do an update on the specified input data.
1004 */
1005 switch (plaintext->cd_format) {
1006 case CRYPTO_DATA_RAW:
1007 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1008 aes_encrypt_contiguous_blocks, aes_copy_block64);
1009 break;
1010 case CRYPTO_DATA_UIO:
1011 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1012 aes_encrypt_contiguous_blocks, aes_copy_block64);
1013 break;
1014 default:
1015 ret = CRYPTO_ARGUMENTS_BAD;
1016 }
1017
1018 if (ret == CRYPTO_SUCCESS) {
1019 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1020 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1021 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1022 aes_xor_block);
1023 if (ret != CRYPTO_SUCCESS)
1024 goto out;
1025 ASSERT(aes_ctx.ac_remainder_len == 0);
1026 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1027 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1028 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1029 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1030 aes_copy_block, aes_xor_block);
1031 if (ret != CRYPTO_SUCCESS)
1032 goto out;
1033 ASSERT(aes_ctx.ac_remainder_len == 0);
1034 } else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1035 if (aes_ctx.ac_remainder_len > 0) {
1036 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1037 ciphertext, aes_encrypt_block);
1038 if (ret != CRYPTO_SUCCESS)
1039 goto out;
1040 }
1041 } else {
1042 ASSERT(aes_ctx.ac_remainder_len == 0);
1043 }
1044
1045 if (plaintext != ciphertext) {
1046 ciphertext->cd_length =
1047 ciphertext->cd_offset - saved_offset;
1048 }
1049 } else {
1050 ciphertext->cd_length = saved_length;
1051 }
1052 ciphertext->cd_offset = saved_offset;
1053
1054 out:
1055 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1056 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1057 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1058 }
1059
1060 return (ret);
1061 }
1062
1063 /* ARGSUSED */
1064 static int
1065 aes_decrypt_atomic(crypto_provider_handle_t provider,
1066 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1067 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1068 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1069 {
1070 aes_ctx_t aes_ctx; /* on the stack */
1071 off_t saved_offset;
1072 size_t saved_length;
1073 size_t length_needed;
1074 int ret;
1075
1076 AES_ARG_INPLACE(ciphertext, plaintext);
1077
1078 /*
1079 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1080 * be a multiple of AES block size.
1081 */
1082 switch (mechanism->cm_type) {
1083 case AES_CTR_MECH_INFO_TYPE:
1084 case AES_CCM_MECH_INFO_TYPE:
1085 case AES_GCM_MECH_INFO_TYPE:
1086 case AES_GMAC_MECH_INFO_TYPE:
1087 break;
1088 default:
1089 if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1090 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
1091 }
1092
1093 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1094 return (ret);
1095
1096 bzero(&aes_ctx, sizeof (aes_ctx_t));
1097
1098 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1099 crypto_kmflag(req), B_FALSE);
1100 if (ret != CRYPTO_SUCCESS)
1101 return (ret);
1102
1103 switch (mechanism->cm_type) {
1104 case AES_CCM_MECH_INFO_TYPE:
1105 length_needed = aes_ctx.ac_data_len;
1106 break;
1107 case AES_GCM_MECH_INFO_TYPE:
1108 length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1109 break;
1110 case AES_GMAC_MECH_INFO_TYPE:
1111 if (plaintext->cd_length != 0)
1112 return (CRYPTO_ARGUMENTS_BAD);
1113 length_needed = 0;
1114 break;
1115 default:
1116 length_needed = ciphertext->cd_length;
1117 }
1118
1119 /* return size of buffer needed to store output */
1120 if (plaintext->cd_length < length_needed) {
1121 plaintext->cd_length = length_needed;
1122 ret = CRYPTO_BUFFER_TOO_SMALL;
1123 goto out;
1124 }
1125
1126 saved_offset = plaintext->cd_offset;
1127 saved_length = plaintext->cd_length;
1128
1129 if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1130 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE)
1131 gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req));
1132
1133 /*
1134 * Do an update on the specified input data.
1135 */
1136 switch (ciphertext->cd_format) {
1137 case CRYPTO_DATA_RAW:
1138 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1139 aes_decrypt_contiguous_blocks, aes_copy_block64);
1140 break;
1141 case CRYPTO_DATA_UIO:
1142 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1143 aes_decrypt_contiguous_blocks, aes_copy_block64);
1144 break;
1145 default:
1146 ret = CRYPTO_ARGUMENTS_BAD;
1147 }
1148
1149 if (ret == CRYPTO_SUCCESS) {
1150 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1151 ASSERT(aes_ctx.ac_processed_data_len
1152 == aes_ctx.ac_data_len);
1153 ASSERT(aes_ctx.ac_processed_mac_len
1154 == aes_ctx.ac_mac_len);
1155 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1156 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1157 aes_copy_block, aes_xor_block);
1158 ASSERT(aes_ctx.ac_remainder_len == 0);
1159 if ((ret == CRYPTO_SUCCESS) &&
1160 (ciphertext != plaintext)) {
1161 plaintext->cd_length =
1162 plaintext->cd_offset - saved_offset;
1163 } else {
1164 plaintext->cd_length = saved_length;
1165 }
1166 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1167 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1168 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1169 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1170 aes_xor_block);
1171 ASSERT(aes_ctx.ac_remainder_len == 0);
1172 if ((ret == CRYPTO_SUCCESS) &&
1173 (ciphertext != plaintext)) {
1174 plaintext->cd_length =
1175 plaintext->cd_offset - saved_offset;
1176 } else {
1177 plaintext->cd_length = saved_length;
1178 }
1179 } else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1180 ASSERT(aes_ctx.ac_remainder_len == 0);
1181 if (ciphertext != plaintext)
1182 plaintext->cd_length =
1183 plaintext->cd_offset - saved_offset;
1184 } else {
1185 if (aes_ctx.ac_remainder_len > 0) {
1186 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1187 plaintext, aes_encrypt_block);
1188 if (ret == CRYPTO_DATA_LEN_RANGE)
1189 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1190 if (ret != CRYPTO_SUCCESS)
1191 goto out;
1192 }
1193 if (ciphertext != plaintext)
1194 plaintext->cd_length =
1195 plaintext->cd_offset - saved_offset;
1196 }
1197 } else {
1198 plaintext->cd_length = saved_length;
1199 }
1200 plaintext->cd_offset = saved_offset;
1201
1202 out:
1203 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1204 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1205 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1206 }
1207
1208 if (aes_ctx.ac_flags & CCM_MODE) {
1209 if (aes_ctx.ac_pt_buf != NULL) {
1210 vmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1211 }
1212 } else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1213 if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1214 vmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1215 ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1216 }
1217 }
1218
1219 return (ret);
1220 }
1221
1222 /*
1223 * KCF software provider context template entry points.
1224 */
1225 /* ARGSUSED */
1226 static int
1227 aes_create_ctx_template(crypto_provider_handle_t provider,
1228 crypto_mechanism_t *mechanism, crypto_key_t *key,
1229 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1230 {
1231 void *keysched;
1232 size_t size;
1233 int rv;
1234
1235 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1236 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1237 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1238 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1239 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1240 mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1241 return (CRYPTO_MECHANISM_INVALID);
1242
1243 if ((keysched = aes_alloc_keysched(&size,
1244 crypto_kmflag(req))) == NULL) {
1245 return (CRYPTO_HOST_MEMORY);
1246 }
1247
1248 /*
1249 * Initialize key schedule. Key length information is stored
1250 * in the key.
1251 */
1252 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1253 bzero(keysched, size);
1254 kmem_free(keysched, size);
1255 return (rv);
1256 }
1257
1258 *tmpl = keysched;
1259 *tmpl_size = size;
1260
1261 return (CRYPTO_SUCCESS);
1262 }
1263
1264
1265 static int
1266 aes_free_context(crypto_ctx_t *ctx)
1267 {
1268 aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1269
1270 if (aes_ctx != NULL) {
1271 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1272 ASSERT(aes_ctx->ac_keysched_len != 0);
1273 bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1274 kmem_free(aes_ctx->ac_keysched,
1275 aes_ctx->ac_keysched_len);
1276 }
1277 crypto_free_mode_ctx(aes_ctx);
1278 ctx->cc_provider_private = NULL;
1279 }
1280
1281 return (CRYPTO_SUCCESS);
1282 }
1283
1284
1285 static int
1286 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1287 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1288 boolean_t is_encrypt_init)
1289 {
1290 int rv = CRYPTO_SUCCESS;
1291 void *keysched;
1292 size_t size = 0;
1293
1294 if (template == NULL) {
1295 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1296 return (CRYPTO_HOST_MEMORY);
1297 /*
1298 * Initialize key schedule.
1299 * Key length is stored in the key.
1300 */
1301 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1302 kmem_free(keysched, size);
1303 return (rv);
1304 }
1305
1306 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1307 aes_ctx->ac_keysched_len = size;
1308 } else {
1309 keysched = template;
1310 }
1311 aes_ctx->ac_keysched = keysched;
1312
1313 switch (mechanism->cm_type) {
1314 case AES_CBC_MECH_INFO_TYPE:
1315 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1316 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1317 break;
1318 case AES_CTR_MECH_INFO_TYPE: {
1319 CK_AES_CTR_PARAMS *pp;
1320
1321 if (mechanism->cm_param == NULL ||
1322 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1323 return (CRYPTO_MECHANISM_PARAM_INVALID);
1324 }
1325 pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1326 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1327 pp->cb, aes_copy_block);
1328 break;
1329 }
1330 case AES_CCM_MECH_INFO_TYPE:
1331 if (mechanism->cm_param == NULL ||
1332 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1333 return (CRYPTO_MECHANISM_PARAM_INVALID);
1334 }
1335 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1336 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1337 aes_xor_block);
1338 break;
1339 case AES_GCM_MECH_INFO_TYPE:
1340 if (mechanism->cm_param == NULL ||
1341 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1342 return (CRYPTO_MECHANISM_PARAM_INVALID);
1343 }
1344 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1345 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1346 aes_xor_block);
1347 break;
1348 case AES_GMAC_MECH_INFO_TYPE:
1349 if (mechanism->cm_param == NULL ||
1350 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1351 return (CRYPTO_MECHANISM_PARAM_INVALID);
1352 }
1353 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1354 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1355 aes_xor_block);
1356 break;
1357 case AES_ECB_MECH_INFO_TYPE:
1358 aes_ctx->ac_flags |= ECB_MODE;
1359 }
1360
1361 if (rv != CRYPTO_SUCCESS) {
1362 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1363 bzero(keysched, size);
1364 kmem_free(keysched, size);
1365 }
1366 }
1367
1368 return (rv);
1369 }
1370
1371 static int
1372 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1373 CK_AES_GCM_PARAMS *gcm_params)
1374 {
1375 /* LINTED: pointer alignment */
1376 CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1377
1378 if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1379 return (CRYPTO_MECHANISM_INVALID);
1380
1381 if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1382 return (CRYPTO_MECHANISM_PARAM_INVALID);
1383
1384 if (params->pIv == NULL)
1385 return (CRYPTO_MECHANISM_PARAM_INVALID);
1386
1387 gcm_params->pIv = params->pIv;
1388 gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1389 gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1390
1391 if (data == NULL)
1392 return (CRYPTO_SUCCESS);
1393
1394 if (data->cd_format != CRYPTO_DATA_RAW)
1395 return (CRYPTO_ARGUMENTS_BAD);
1396
1397 gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1398 gcm_params->ulAADLen = data->cd_length;
1399 return (CRYPTO_SUCCESS);
1400 }
1401
1402 static int
1403 aes_mac_atomic(crypto_provider_handle_t provider,
1404 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1405 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1406 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1407 {
1408 CK_AES_GCM_PARAMS gcm_params;
1409 crypto_mechanism_t gcm_mech;
1410 int rv;
1411
1412 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1413 != CRYPTO_SUCCESS)
1414 return (rv);
1415
1416 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1417 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1418 gcm_mech.cm_param = (char *)&gcm_params;
1419
1420 return (aes_encrypt_atomic(provider, session_id, &gcm_mech,
1421 key, &null_crypto_data, mac, template, req));
1422 }
1423
1424 static int
1425 aes_mac_verify_atomic(crypto_provider_handle_t provider,
1426 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1427 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1428 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1429 {
1430 CK_AES_GCM_PARAMS gcm_params;
1431 crypto_mechanism_t gcm_mech;
1432 int rv;
1433
1434 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1435 != CRYPTO_SUCCESS)
1436 return (rv);
1437
1438 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1439 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1440 gcm_mech.cm_param = (char *)&gcm_params;
1441
1442 return (aes_decrypt_atomic(provider, session_id, &gcm_mech,
1443 key, mac, &null_crypto_data, template, req));
1444 }