1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21 /*
22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23 * Copyright 2015 by Saso Kiselkov. All rights reserved.
24 */
25
26 /*
27 * AES provider for the Kernel Cryptographic Framework (KCF)
28 */
29
30 #include <sys/types.h>
31 #include <sys/systm.h>
32 #include <sys/modctl.h>
33 #include <sys/cmn_err.h>
34 #include <sys/ddi.h>
35 #include <sys/crypto/common.h>
36 #include <sys/crypto/impl.h>
37 #include <sys/crypto/spi.h>
38 #include <sys/sysmacros.h>
39 #include <sys/strsun.h>
40 #include <modes/modes.h>
41 #define _AES_IMPL
42 #include <aes/aes_impl.h>
43
44 extern struct mod_ops mod_cryptoops;
45
46 /*
47 * Module linkage information for the kernel.
48 */
49 static struct modlcrypto modlcrypto = {
50 &mod_cryptoops,
51 "AES Kernel SW Provider"
52 };
53
54 static struct modlinkage modlinkage = {
55 MODREV_1,
56 (void *)&modlcrypto,
57 NULL
58 };
59
60 /*
61 * Mechanism info structure passed to KCF during registration.
62 */
63 static crypto_mech_info_t aes_mech_info_tab[] = {
64 /* AES_ECB */
65 {SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
66 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
67 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
68 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
69 /* AES_CBC */
70 {SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
71 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
72 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
73 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
74 /* AES_CTR */
75 {SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
76 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
77 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
78 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
79 /* AES_CCM */
80 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
81 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
82 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
83 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
84 /* AES_GCM */
85 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
86 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
87 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
88 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
89 /* AES_GMAC */
90 {SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
91 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
92 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
93 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
94 CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC |
95 CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC,
96 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
97 };
98
99 /* operations are in-place if the output buffer is NULL */
100 #define AES_ARG_INPLACE(input, output) \
101 if ((output) == NULL) \
102 (output) = (input);
103
104 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
105
106 static crypto_control_ops_t aes_control_ops = {
107 aes_provider_status
108 };
109
110 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
111 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
112 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
113 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
114 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
115 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
116 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
117 crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
118 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
119 crypto_req_handle_t);
120 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
121 crypto_req_handle_t);
122
123 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
124 crypto_req_handle_t);
125 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
126 crypto_data_t *, crypto_req_handle_t);
127 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
128 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
129 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
130
131 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
132 crypto_req_handle_t);
133 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
134 crypto_data_t *, crypto_req_handle_t);
135 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
136 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
137 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
138
139 static crypto_cipher_ops_t aes_cipher_ops = {
140 aes_encrypt_init,
141 aes_encrypt,
142 aes_encrypt_update,
143 aes_encrypt_final,
144 aes_encrypt_atomic,
145 aes_decrypt_init,
146 aes_decrypt,
147 aes_decrypt_update,
148 aes_decrypt_final,
149 aes_decrypt_atomic
150 };
151
152 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
153 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
154 crypto_spi_ctx_template_t, crypto_req_handle_t);
155 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
156 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
157 crypto_spi_ctx_template_t, crypto_req_handle_t);
158
159 static crypto_mac_ops_t aes_mac_ops = {
160 NULL,
161 NULL,
162 NULL,
163 NULL,
164 aes_mac_atomic,
165 aes_mac_verify_atomic
166 };
167
168 static int aes_create_ctx_template(crypto_provider_handle_t,
169 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
170 size_t *, crypto_req_handle_t);
171 static int aes_free_context(crypto_ctx_t *);
172
173 static crypto_ctx_ops_t aes_ctx_ops = {
174 aes_create_ctx_template,
175 aes_free_context
176 };
177
178 static crypto_ops_t aes_crypto_ops = {
179 &aes_control_ops,
180 NULL,
181 &aes_cipher_ops,
182 &aes_mac_ops,
183 NULL,
184 NULL,
185 NULL,
186 NULL,
187 NULL,
188 NULL,
189 NULL,
190 NULL,
191 NULL,
192 &aes_ctx_ops,
193 NULL,
194 NULL,
195 NULL,
196 };
197
198 static crypto_provider_info_t aes_prov_info = {
199 CRYPTO_SPI_VERSION_4,
200 "AES Software Provider",
201 CRYPTO_SW_PROVIDER,
202 {&modlinkage},
203 NULL,
204 &aes_crypto_ops,
205 sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
206 aes_mech_info_tab
207 };
208
209 static crypto_kcf_provider_handle_t aes_prov_handle = NULL;
210 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
211
212 int
213 _init(void)
214 {
215 int ret;
216
217 if ((ret = mod_install(&modlinkage)) != 0)
218 return (ret);
219
220 /* Register with KCF. If the registration fails, remove the module. */
221 if (crypto_register_provider(&aes_prov_info, &aes_prov_handle)) {
222 (void) mod_remove(&modlinkage);
223 return (EACCES);
224 }
225
226 return (0);
227 }
228
229 int
230 _fini(void)
231 {
232 /* Unregister from KCF if module is registered */
233 if (aes_prov_handle != NULL) {
234 if (crypto_unregister_provider(aes_prov_handle))
235 return (EBUSY);
236
237 aes_prov_handle = NULL;
238 }
239
240 return (mod_remove(&modlinkage));
241 }
242
243 int
244 _info(struct modinfo *modinfop)
245 {
246 return (mod_info(&modlinkage, modinfop));
247 }
248
249
250 static int
251 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
252 {
253 void *p = NULL;
254 boolean_t param_required = B_TRUE;
255 size_t param_len;
256 void *(*alloc_fun)(int);
257 int rv = CRYPTO_SUCCESS;
258
259 switch (mechanism->cm_type) {
260 case AES_ECB_MECH_INFO_TYPE:
261 param_required = B_FALSE;
262 alloc_fun = ecb_alloc_ctx;
263 break;
264 case AES_CBC_MECH_INFO_TYPE:
265 param_len = AES_BLOCK_LEN;
266 alloc_fun = cbc_alloc_ctx;
267 break;
268 case AES_CTR_MECH_INFO_TYPE:
269 param_len = sizeof (CK_AES_CTR_PARAMS);
270 alloc_fun = ctr_alloc_ctx;
271 break;
272 case AES_CCM_MECH_INFO_TYPE:
273 param_len = sizeof (CK_AES_CCM_PARAMS);
274 alloc_fun = ccm_alloc_ctx;
275 break;
276 case AES_GCM_MECH_INFO_TYPE:
277 param_len = sizeof (CK_AES_GCM_PARAMS);
278 alloc_fun = gcm_alloc_ctx;
279 break;
280 case AES_GMAC_MECH_INFO_TYPE:
281 param_len = sizeof (CK_AES_GMAC_PARAMS);
282 alloc_fun = gmac_alloc_ctx;
283 break;
284 default:
285 rv = CRYPTO_MECHANISM_INVALID;
286 return (rv);
287 }
288 if (param_required && mechanism->cm_param != NULL &&
289 mechanism->cm_param_len != param_len) {
290 rv = CRYPTO_MECHANISM_PARAM_INVALID;
291 }
292 if (ctx != NULL) {
293 p = (alloc_fun)(kmflag);
294 *ctx = p;
295 }
296 return (rv);
297 }
298
299 /*
300 * Initialize key schedules for AES
301 */
302 static int
303 init_keysched(crypto_key_t *key, void *newbie)
304 {
305 /*
306 * Only keys by value are supported by this module.
307 */
308 switch (key->ck_format) {
309 case CRYPTO_KEY_RAW:
310 if (key->ck_length < AES_MINBITS ||
311 key->ck_length > AES_MAXBITS) {
312 return (CRYPTO_KEY_SIZE_RANGE);
313 }
314
315 /* key length must be either 128, 192, or 256 */
316 if ((key->ck_length & 63) != 0)
317 return (CRYPTO_KEY_SIZE_RANGE);
318 break;
319 default:
320 return (CRYPTO_KEY_TYPE_INCONSISTENT);
321 }
322
323 aes_init_keysched(key->ck_data, key->ck_length, newbie);
324 return (CRYPTO_SUCCESS);
325 }
326
327 /*
328 * KCF software provider control entry points.
329 */
330 /* ARGSUSED */
331 static void
332 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
333 {
334 *status = CRYPTO_PROVIDER_READY;
335 }
336
337 static int
338 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
339 crypto_key_t *key, crypto_spi_ctx_template_t template,
340 crypto_req_handle_t req) {
341 return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
342 }
343
344 static int
345 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
346 crypto_key_t *key, crypto_spi_ctx_template_t template,
347 crypto_req_handle_t req) {
348 return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
349 }
350
351
352
353 /*
354 * KCF software provider encrypt entry points.
355 */
356 static int
357 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
358 crypto_key_t *key, crypto_spi_ctx_template_t template,
359 crypto_req_handle_t req, boolean_t is_encrypt_init)
360 {
361 aes_ctx_t *aes_ctx;
362 int rv;
363 int kmflag;
364
365 /*
366 * Only keys by value are supported by this module.
367 */
368 if (key->ck_format != CRYPTO_KEY_RAW) {
369 return (CRYPTO_KEY_TYPE_INCONSISTENT);
370 }
371
372 kmflag = crypto_kmflag(req);
373 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
374 != CRYPTO_SUCCESS)
375 return (rv);
376
377 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
378 is_encrypt_init);
379 if (rv != CRYPTO_SUCCESS) {
380 crypto_free_mode_ctx(aes_ctx);
381 return (rv);
382 }
383
384 ctx->cc_provider_private = aes_ctx;
385
386 return (CRYPTO_SUCCESS);
387 }
388
389 static void
390 aes_copy_block64(const uint8_t *in, uint64_t *out)
391 {
392 if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
393 /* LINTED: pointer alignment */
394 out[0] = *(uint64_t *)&in[0];
395 /* LINTED: pointer alignment */
396 out[1] = *(uint64_t *)&in[8];
397 } else {
398 uint8_t *iv8 = (uint8_t *)&out[0];
399
400 AES_COPY_BLOCK_UNALIGNED(in, iv8);
401 }
402 }
403
404
405 static int
406 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
407 crypto_data_t *ciphertext, crypto_req_handle_t req)
408 {
409 int ret = CRYPTO_FAILED;
410
411 aes_ctx_t *aes_ctx;
412 size_t saved_length, saved_offset, length_needed;
413
414 ASSERT(ctx->cc_provider_private != NULL);
415 aes_ctx = ctx->cc_provider_private;
416
417 /*
418 * For block ciphers, plaintext must be a multiple of AES block size.
419 * This test is only valid for ciphers whose blocksize is a power of 2.
420 */
421 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
422 == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
423 return (CRYPTO_DATA_LEN_RANGE);
424
425 AES_ARG_INPLACE(plaintext, ciphertext);
426
427 /*
428 * We need to just return the length needed to store the output.
429 * We should not destroy the context for the following case.
430 */
431 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
432 case CCM_MODE:
433 length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
434 break;
435 case GCM_MODE:
436 length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
437 break;
438 case GMAC_MODE:
439 if (plaintext->cd_length != 0)
440 return (CRYPTO_ARGUMENTS_BAD);
441
442 length_needed = aes_ctx->ac_tag_len;
443 break;
444 default:
445 length_needed = plaintext->cd_length;
446 }
447
448 if (ciphertext->cd_length < length_needed) {
449 ciphertext->cd_length = length_needed;
450 return (CRYPTO_BUFFER_TOO_SMALL);
451 }
452
453 saved_length = ciphertext->cd_length;
454 saved_offset = ciphertext->cd_offset;
455
456 /*
457 * Do an update on the specified input data.
458 */
459 ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
460 if (ret != CRYPTO_SUCCESS) {
461 return (ret);
462 }
463
464 /*
465 * For CCM mode, aes_ccm_encrypt_final() will take care of any
466 * left-over unprocessed data, and compute the MAC
467 */
468 if (aes_ctx->ac_flags & CCM_MODE) {
469 /*
470 * ccm_encrypt_final() will compute the MAC and append
471 * it to existing ciphertext. So, need to adjust the left over
472 * length value accordingly
473 */
474
475 /* order of following 2 lines MUST not be reversed */
476 ciphertext->cd_offset = ciphertext->cd_length;
477 ciphertext->cd_length = saved_length - ciphertext->cd_length;
478 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
479 AES_BLOCK_LEN, aes_encrypt_block, AES_XOR_BLOCK);
480 if (ret != CRYPTO_SUCCESS) {
481 return (ret);
482 }
483
484 if (plaintext != ciphertext) {
485 ciphertext->cd_length =
486 ciphertext->cd_offset - saved_offset;
487 }
488 ciphertext->cd_offset = saved_offset;
489 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
490 /*
491 * gcm_encrypt_final() will compute the MAC and append
492 * it to existing ciphertext. So, need to adjust the left over
493 * length value accordingly
494 */
495
496 /* order of following 2 lines MUST not be reversed */
497 ciphertext->cd_offset = ciphertext->cd_length;
498 ciphertext->cd_length = saved_length - ciphertext->cd_length;
499 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
500 AES_BLOCK_LEN, aes_encrypt_block, AES_COPY_BLOCK,
501 AES_XOR_BLOCK);
502 if (ret != CRYPTO_SUCCESS) {
503 return (ret);
504 }
505
506 if (plaintext != ciphertext) {
507 ciphertext->cd_length =
508 ciphertext->cd_offset - saved_offset;
509 }
510 ciphertext->cd_offset = saved_offset;
511 }
512
513 ASSERT(aes_ctx->ac_remainder_len == 0);
514 (void) aes_free_context(ctx);
515
516 return (ret);
517 }
518
519
520 static int
521 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
522 crypto_data_t *plaintext, crypto_req_handle_t req)
523 {
524 int ret = CRYPTO_FAILED;
525
526 aes_ctx_t *aes_ctx;
527 off_t saved_offset;
528 size_t saved_length, length_needed;
529
530 ASSERT(ctx->cc_provider_private != NULL);
531 aes_ctx = ctx->cc_provider_private;
532
533 /*
534 * For block ciphers, plaintext must be a multiple of AES block size.
535 * This test is only valid for ciphers whose blocksize is a power of 2.
536 */
537 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
538 == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
539 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
540 }
541
542 AES_ARG_INPLACE(ciphertext, plaintext);
543
544 /*
545 * Return length needed to store the output.
546 * Do not destroy context when plaintext buffer is too small.
547 *
548 * CCM: plaintext is MAC len smaller than cipher text
549 * GCM: plaintext is TAG len smaller than cipher text
550 * GMAC: plaintext length must be zero
551 */
552 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
553 case CCM_MODE:
554 length_needed = aes_ctx->ac_processed_data_len;
555 break;
556 case GCM_MODE:
557 length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
558 break;
559 case GMAC_MODE:
560 if (plaintext->cd_length != 0)
561 return (CRYPTO_ARGUMENTS_BAD);
562
563 length_needed = 0;
564 break;
565 default:
566 length_needed = ciphertext->cd_length;
567 }
568
569 if (plaintext->cd_length < length_needed) {
570 plaintext->cd_length = length_needed;
571 return (CRYPTO_BUFFER_TOO_SMALL);
572 }
573
574 saved_offset = plaintext->cd_offset;
575 saved_length = plaintext->cd_length;
576
577 /*
578 * Do an update on the specified input data.
579 */
580 ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
581 if (ret != CRYPTO_SUCCESS) {
582 goto cleanup;
583 }
584
585 if (aes_ctx->ac_flags & CCM_MODE) {
586 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
587 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
588
589 /* order of following 2 lines MUST not be reversed */
590 plaintext->cd_offset = plaintext->cd_length;
591 plaintext->cd_length = saved_length - plaintext->cd_length;
592
593 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
594 AES_BLOCK_LEN, aes_encrypt_block, AES_COPY_BLOCK,
595 AES_XOR_BLOCK);
596 if (ret == CRYPTO_SUCCESS) {
597 if (plaintext != ciphertext) {
598 plaintext->cd_length =
599 plaintext->cd_offset - saved_offset;
600 }
601 } else {
602 plaintext->cd_length = saved_length;
603 }
604
605 plaintext->cd_offset = saved_offset;
606 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
607 /* order of following 2 lines MUST not be reversed */
608 plaintext->cd_offset = plaintext->cd_length;
609 plaintext->cd_length = saved_length - plaintext->cd_length;
610
611 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
612 AES_BLOCK_LEN, aes_encrypt_block, AES_XOR_BLOCK,
613 AES_COPY_BLOCK, aes_ctr_mode);
614 if (ret == CRYPTO_SUCCESS) {
615 if (plaintext != ciphertext) {
616 plaintext->cd_length =
617 plaintext->cd_offset - saved_offset;
618 }
619 } else {
620 plaintext->cd_length = saved_length;
621 }
622
623 plaintext->cd_offset = saved_offset;
624 }
625
626 ASSERT(aes_ctx->ac_remainder_len == 0);
627
628 cleanup:
629 (void) aes_free_context(ctx);
630
631 return (ret);
632 }
633
634
635 /* ARGSUSED */
636 static int
637 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
638 crypto_data_t *ciphertext, crypto_req_handle_t req)
639 {
640 off_t saved_offset;
641 size_t saved_length, out_len;
642 int ret = CRYPTO_SUCCESS;
643 aes_ctx_t *aes_ctx;
644
645 ASSERT(ctx->cc_provider_private != NULL);
646 aes_ctx = ctx->cc_provider_private;
647
648 AES_ARG_INPLACE(plaintext, ciphertext);
649
650 /* compute number of bytes that will hold the ciphertext */
651 out_len = aes_ctx->ac_remainder_len;
652 out_len += plaintext->cd_length;
653 out_len &= ~(AES_BLOCK_LEN - 1);
654
655 /* return length needed to store the output */
656 if (ciphertext->cd_length < out_len) {
657 ciphertext->cd_length = out_len;
658 return (CRYPTO_BUFFER_TOO_SMALL);
659 }
660
661 saved_offset = ciphertext->cd_offset;
662 saved_length = ciphertext->cd_length;
663
664 /*
665 * Do the AES update on the specified input data.
666 */
667 switch (plaintext->cd_format) {
668 case CRYPTO_DATA_RAW:
669 ret = crypto_update_iov(ctx->cc_provider_private,
670 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
671 aes_copy_block64);
672 break;
673 case CRYPTO_DATA_UIO:
674 ret = crypto_update_uio(ctx->cc_provider_private,
675 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
676 aes_copy_block64);
677 break;
678 case CRYPTO_DATA_MBLK:
679 ret = crypto_update_mp(ctx->cc_provider_private,
680 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
681 aes_copy_block64);
682 break;
683 default:
684 ret = CRYPTO_ARGUMENTS_BAD;
685 }
686
687 /*
688 * Since AES counter mode is a stream cipher, we call
689 * ctr_mode_final() to pick up any remaining bytes.
690 * It is an internal function that does not destroy
691 * the context like *normal* final routines.
692 */
693 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
694 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
695 ciphertext, aes_encrypt_block);
696 }
697
698 if (ret == CRYPTO_SUCCESS) {
699 if (plaintext != ciphertext)
700 ciphertext->cd_length =
701 ciphertext->cd_offset - saved_offset;
702 } else {
703 ciphertext->cd_length = saved_length;
704 }
705 ciphertext->cd_offset = saved_offset;
706
707 return (ret);
708 }
709
710
711 static int
712 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
713 crypto_data_t *plaintext, crypto_req_handle_t req)
714 {
715 off_t saved_offset;
716 size_t saved_length, out_len;
717 int ret = CRYPTO_SUCCESS;
718 aes_ctx_t *aes_ctx;
719
720 ASSERT(ctx->cc_provider_private != NULL);
721 aes_ctx = ctx->cc_provider_private;
722
723 AES_ARG_INPLACE(ciphertext, plaintext);
724
725 /*
726 * Compute number of bytes that will hold the plaintext.
727 * This is not necessary for CCM and GMAC since these
728 * mechanisms never return plaintext for update operations.
729 */
730 if ((aes_ctx->ac_flags & (CCM_MODE|GMAC_MODE)) == 0) {
731 out_len = aes_ctx->ac_remainder_len;
732 out_len += ciphertext->cd_length;
733 out_len &= ~(AES_BLOCK_LEN - 1);
734 if (aes_ctx->ac_flags & GCM_MODE)
735 out_len -= ((gcm_ctx_t *)aes_ctx)->gcm_tag_len;
736
737 /* return length needed to store the output */
738 if (plaintext->cd_length < out_len) {
739 plaintext->cd_length = out_len;
740 return (CRYPTO_BUFFER_TOO_SMALL);
741 }
742 }
743
744 saved_offset = plaintext->cd_offset;
745 saved_length = plaintext->cd_length;
746
747 if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
748 gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
749
750 /*
751 * Do the AES update on the specified input data.
752 */
753 switch (ciphertext->cd_format) {
754 case CRYPTO_DATA_RAW:
755 ret = crypto_update_iov(ctx->cc_provider_private,
756 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
757 aes_copy_block64);
758 break;
759 case CRYPTO_DATA_UIO:
760 ret = crypto_update_uio(ctx->cc_provider_private,
761 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
762 aes_copy_block64);
763 break;
764 case CRYPTO_DATA_MBLK:
765 ret = crypto_update_mp(ctx->cc_provider_private,
766 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
767 aes_copy_block64);
768 break;
769 default:
770 ret = CRYPTO_ARGUMENTS_BAD;
771 }
772
773 /*
774 * Since AES counter mode is a stream cipher, we call
775 * ctr_mode_final() to pick up any remaining bytes.
776 * It is an internal function that does not destroy
777 * the context like *normal* final routines.
778 */
779 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
780 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
781 aes_encrypt_block);
782 if (ret == CRYPTO_DATA_LEN_RANGE)
783 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
784 }
785
786 if (ret == CRYPTO_SUCCESS) {
787 if (ciphertext != plaintext)
788 plaintext->cd_length =
789 plaintext->cd_offset - saved_offset;
790 } else {
791 plaintext->cd_length = saved_length;
792 }
793 plaintext->cd_offset = saved_offset;
794
795
796 return (ret);
797 }
798
799 /* ARGSUSED */
800 static int
801 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
802 crypto_req_handle_t req)
803 {
804 aes_ctx_t *aes_ctx;
805 int ret;
806
807 ASSERT(ctx->cc_provider_private != NULL);
808 aes_ctx = ctx->cc_provider_private;
809
810 if (data->cd_format != CRYPTO_DATA_RAW &&
811 data->cd_format != CRYPTO_DATA_UIO &&
812 data->cd_format != CRYPTO_DATA_MBLK) {
813 return (CRYPTO_ARGUMENTS_BAD);
814 }
815
816 if (aes_ctx->ac_flags & CTR_MODE) {
817 if (aes_ctx->ac_remainder_len > 0) {
818 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
819 aes_encrypt_block);
820 if (ret != CRYPTO_SUCCESS)
821 return (ret);
822 }
823 } else if (aes_ctx->ac_flags & CCM_MODE) {
824 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
825 AES_BLOCK_LEN, aes_encrypt_block, AES_XOR_BLOCK);
826 if (ret != CRYPTO_SUCCESS) {
827 return (ret);
828 }
829 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
830 size_t saved_offset = data->cd_offset;
831
832 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
833 AES_BLOCK_LEN, aes_encrypt_block, AES_COPY_BLOCK,
834 AES_XOR_BLOCK);
835 if (ret != CRYPTO_SUCCESS) {
836 return (ret);
837 }
838 data->cd_length = data->cd_offset - saved_offset;
839 data->cd_offset = saved_offset;
840 } else {
841 /*
842 * There must be no unprocessed plaintext.
843 * This happens if the length of the last data is
844 * not a multiple of the AES block length.
845 */
846 if (aes_ctx->ac_remainder_len > 0) {
847 return (CRYPTO_DATA_LEN_RANGE);
848 }
849 data->cd_length = 0;
850 }
851
852 (void) aes_free_context(ctx);
853
854 return (CRYPTO_SUCCESS);
855 }
856
857 /* ARGSUSED */
858 static int
859 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
860 crypto_req_handle_t req)
861 {
862 aes_ctx_t *aes_ctx;
863 int ret;
864 off_t saved_offset;
865 size_t saved_length;
866
867 ASSERT(ctx->cc_provider_private != NULL);
868 aes_ctx = ctx->cc_provider_private;
869
870 if (data->cd_format != CRYPTO_DATA_RAW &&
871 data->cd_format != CRYPTO_DATA_UIO &&
872 data->cd_format != CRYPTO_DATA_MBLK) {
873 return (CRYPTO_ARGUMENTS_BAD);
874 }
875
876 /*
877 * There must be no unprocessed ciphertext.
878 * This happens if the length of the last ciphertext is
879 * not a multiple of the AES block length.
880 */
881 if (aes_ctx->ac_remainder_len > 0) {
882 if ((aes_ctx->ac_flags & CTR_MODE) == 0)
883 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
884 else {
885 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
886 aes_encrypt_block);
887 if (ret == CRYPTO_DATA_LEN_RANGE)
888 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
889 if (ret != CRYPTO_SUCCESS)
890 return (ret);
891 }
892 }
893
894 if (aes_ctx->ac_flags & CCM_MODE) {
895 /*
896 * This is where all the plaintext is returned, make sure
897 * the plaintext buffer is big enough
898 */
899 size_t pt_len = aes_ctx->ac_data_len;
900 if (data->cd_length < pt_len) {
901 data->cd_length = pt_len;
902 return (CRYPTO_BUFFER_TOO_SMALL);
903 }
904
905 ASSERT(aes_ctx->ac_processed_data_len == pt_len);
906 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
907 saved_offset = data->cd_offset;
908 saved_length = data->cd_length;
909 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
910 AES_BLOCK_LEN, aes_encrypt_block, AES_COPY_BLOCK,
911 AES_XOR_BLOCK);
912 if (ret == CRYPTO_SUCCESS) {
913 data->cd_length = data->cd_offset - saved_offset;
914 } else {
915 data->cd_length = saved_length;
916 }
917
918 data->cd_offset = saved_offset;
919 if (ret != CRYPTO_SUCCESS) {
920 return (ret);
921 }
922 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
923 /*
924 * Check to make sure there is enough space for remaining
925 * plaintext.
926 */
927 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
928 size_t pt_len = ctx->gcm_last_input_fill - ctx->gcm_tag_len;
929
930 if (data->cd_length < pt_len) {
931 data->cd_length = pt_len;
932 return (CRYPTO_BUFFER_TOO_SMALL);
933 }
934 saved_offset = data->cd_offset;
935 saved_length = data->cd_length;
936 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
937 AES_BLOCK_LEN, aes_encrypt_block, AES_COPY_BLOCK,
938 AES_XOR_BLOCK, aes_ctr_mode);
939 if (ret == CRYPTO_SUCCESS) {
940 data->cd_length = data->cd_offset - saved_offset;
941 } else {
942 data->cd_length = saved_length;
943 }
944
945 data->cd_offset = saved_offset;
946 if (ret != CRYPTO_SUCCESS) {
947 return (ret);
948 }
949 }
950
951 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
952 data->cd_length = 0;
953 }
954
955 (void) aes_free_context(ctx);
956
957 return (CRYPTO_SUCCESS);
958 }
959
960 /* ARGSUSED */
961 static int
962 aes_encrypt_atomic(crypto_provider_handle_t provider,
963 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
964 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
965 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
966 {
967 aes_ctx_t aes_ctx; /* on the stack */
968 off_t saved_offset;
969 size_t saved_length;
970 size_t length_needed;
971 int ret;
972
973 AES_ARG_INPLACE(plaintext, ciphertext);
974
975 /*
976 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
977 * be a multiple of AES block size.
978 */
979 switch (mechanism->cm_type) {
980 case AES_CTR_MECH_INFO_TYPE:
981 case AES_CCM_MECH_INFO_TYPE:
982 case AES_GCM_MECH_INFO_TYPE:
983 case AES_GMAC_MECH_INFO_TYPE:
984 break;
985 default:
986 if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
987 return (CRYPTO_DATA_LEN_RANGE);
988 }
989
990 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
991 return (ret);
992
993 bzero(&aes_ctx, sizeof (aes_ctx_t));
994
995 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
996 crypto_kmflag(req), B_TRUE);
997 if (ret != CRYPTO_SUCCESS)
998 return (ret);
999
1000 switch (mechanism->cm_type) {
1001 case AES_CCM_MECH_INFO_TYPE:
1002 length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
1003 break;
1004 case AES_GMAC_MECH_INFO_TYPE:
1005 if (plaintext->cd_length != 0)
1006 return (CRYPTO_ARGUMENTS_BAD);
1007 /* FALLTHRU */
1008 case AES_GCM_MECH_INFO_TYPE:
1009 length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
1010 break;
1011 default:
1012 length_needed = plaintext->cd_length;
1013 }
1014
1015 /* return size of buffer needed to store output */
1016 if (ciphertext->cd_length < length_needed) {
1017 ciphertext->cd_length = length_needed;
1018 ret = CRYPTO_BUFFER_TOO_SMALL;
1019 goto out;
1020 }
1021
1022 saved_offset = ciphertext->cd_offset;
1023 saved_length = ciphertext->cd_length;
1024
1025 /*
1026 * Do an update on the specified input data.
1027 */
1028 switch (plaintext->cd_format) {
1029 case CRYPTO_DATA_RAW:
1030 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1031 aes_encrypt_contiguous_blocks, aes_copy_block64);
1032 break;
1033 case CRYPTO_DATA_UIO:
1034 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1035 aes_encrypt_contiguous_blocks, aes_copy_block64);
1036 break;
1037 case CRYPTO_DATA_MBLK:
1038 ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext,
1039 aes_encrypt_contiguous_blocks, aes_copy_block64);
1040 break;
1041 default:
1042 ret = CRYPTO_ARGUMENTS_BAD;
1043 }
1044
1045 if (ret == CRYPTO_SUCCESS) {
1046 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1047 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1048 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1049 AES_XOR_BLOCK);
1050 if (ret != CRYPTO_SUCCESS)
1051 goto out;
1052 ASSERT(aes_ctx.ac_remainder_len == 0);
1053 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1054 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1055 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1056 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1057 AES_COPY_BLOCK, AES_XOR_BLOCK);
1058 if (ret != CRYPTO_SUCCESS)
1059 goto out;
1060 ASSERT(aes_ctx.ac_remainder_len == 0);
1061 } else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1062 if (aes_ctx.ac_remainder_len > 0) {
1063 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1064 ciphertext, aes_encrypt_block);
1065 if (ret != CRYPTO_SUCCESS)
1066 goto out;
1067 }
1068 } else {
1069 ASSERT(aes_ctx.ac_remainder_len == 0);
1070 }
1071
1072 if (plaintext != ciphertext) {
1073 ciphertext->cd_length =
1074 ciphertext->cd_offset - saved_offset;
1075 }
1076 } else {
1077 ciphertext->cd_length = saved_length;
1078 }
1079 ciphertext->cd_offset = saved_offset;
1080
1081 out:
1082 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1083 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1084 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1085 }
1086
1087 return (ret);
1088 }
1089
1090 /* ARGSUSED */
1091 static int
1092 aes_decrypt_atomic(crypto_provider_handle_t provider,
1093 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1094 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1095 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1096 {
1097 aes_ctx_t aes_ctx; /* on the stack */
1098 off_t saved_offset;
1099 size_t saved_length;
1100 size_t length_needed;
1101 int ret;
1102
1103 AES_ARG_INPLACE(ciphertext, plaintext);
1104
1105 /*
1106 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1107 * be a multiple of AES block size.
1108 */
1109 switch (mechanism->cm_type) {
1110 case AES_CTR_MECH_INFO_TYPE:
1111 case AES_CCM_MECH_INFO_TYPE:
1112 case AES_GCM_MECH_INFO_TYPE:
1113 case AES_GMAC_MECH_INFO_TYPE:
1114 break;
1115 default:
1116 if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1117 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
1118 }
1119
1120 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1121 return (ret);
1122
1123 bzero(&aes_ctx, sizeof (aes_ctx_t));
1124
1125 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1126 crypto_kmflag(req), B_FALSE);
1127 if (ret != CRYPTO_SUCCESS)
1128 return (ret);
1129
1130 switch (mechanism->cm_type) {
1131 case AES_CCM_MECH_INFO_TYPE:
1132 length_needed = aes_ctx.ac_data_len;
1133 break;
1134 case AES_GCM_MECH_INFO_TYPE:
1135 length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1136 break;
1137 case AES_GMAC_MECH_INFO_TYPE:
1138 if (plaintext->cd_length != 0)
1139 return (CRYPTO_ARGUMENTS_BAD);
1140 length_needed = 0;
1141 break;
1142 default:
1143 length_needed = ciphertext->cd_length;
1144 }
1145
1146 /* return size of buffer needed to store output */
1147 if (plaintext->cd_length < length_needed) {
1148 plaintext->cd_length = length_needed;
1149 ret = CRYPTO_BUFFER_TOO_SMALL;
1150 goto out;
1151 }
1152
1153 saved_offset = plaintext->cd_offset;
1154 saved_length = plaintext->cd_length;
1155
1156 if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1157 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE)
1158 gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req));
1159
1160 /*
1161 * Do an update on the specified input data.
1162 */
1163 switch (ciphertext->cd_format) {
1164 case CRYPTO_DATA_RAW:
1165 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1166 aes_decrypt_contiguous_blocks, aes_copy_block64);
1167 break;
1168 case CRYPTO_DATA_UIO:
1169 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1170 aes_decrypt_contiguous_blocks, aes_copy_block64);
1171 break;
1172 case CRYPTO_DATA_MBLK:
1173 ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext,
1174 aes_decrypt_contiguous_blocks, aes_copy_block64);
1175 break;
1176 default:
1177 ret = CRYPTO_ARGUMENTS_BAD;
1178 }
1179
1180 if (ret == CRYPTO_SUCCESS) {
1181 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1182 ASSERT(aes_ctx.ac_processed_data_len
1183 == aes_ctx.ac_data_len);
1184 ASSERT(aes_ctx.ac_processed_mac_len
1185 == aes_ctx.ac_mac_len);
1186 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1187 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1188 AES_COPY_BLOCK, AES_XOR_BLOCK);
1189 ASSERT(aes_ctx.ac_remainder_len == 0);
1190 if ((ret == CRYPTO_SUCCESS) &&
1191 (ciphertext != plaintext)) {
1192 plaintext->cd_length =
1193 plaintext->cd_offset - saved_offset;
1194 } else {
1195 plaintext->cd_length = saved_length;
1196 }
1197 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1198 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1199 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1200 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1201 AES_COPY_BLOCK, AES_XOR_BLOCK, aes_ctr_mode);
1202 ASSERT(aes_ctx.ac_remainder_len == 0);
1203 if ((ret == CRYPTO_SUCCESS) &&
1204 (ciphertext != plaintext)) {
1205 plaintext->cd_length =
1206 plaintext->cd_offset - saved_offset;
1207 } else {
1208 plaintext->cd_length = saved_length;
1209 }
1210 } else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1211 ASSERT(aes_ctx.ac_remainder_len == 0);
1212 if (ciphertext != plaintext)
1213 plaintext->cd_length =
1214 plaintext->cd_offset - saved_offset;
1215 } else {
1216 if (aes_ctx.ac_remainder_len > 0) {
1217 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1218 plaintext, aes_encrypt_block);
1219 if (ret == CRYPTO_DATA_LEN_RANGE)
1220 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1221 if (ret != CRYPTO_SUCCESS)
1222 goto out;
1223 }
1224 if (ciphertext != plaintext)
1225 plaintext->cd_length =
1226 plaintext->cd_offset - saved_offset;
1227 }
1228 } else {
1229 plaintext->cd_length = saved_length;
1230 }
1231 plaintext->cd_offset = saved_offset;
1232
1233 out:
1234 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1235 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1236 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1237 }
1238
1239 if (aes_ctx.ac_flags & CCM_MODE) {
1240 if (aes_ctx.ac_pt_buf != NULL) {
1241 kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1242 }
1243 }
1244
1245 return (ret);
1246 }
1247
1248 /*
1249 * KCF software provider context template entry points.
1250 */
1251 /* ARGSUSED */
1252 static int
1253 aes_create_ctx_template(crypto_provider_handle_t provider,
1254 crypto_mechanism_t *mechanism, crypto_key_t *key,
1255 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1256 {
1257 void *keysched;
1258 size_t size;
1259 int rv;
1260
1261 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1262 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1263 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1264 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1265 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1266 mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1267 return (CRYPTO_MECHANISM_INVALID);
1268
1269 if ((keysched = aes_alloc_keysched(&size,
1270 crypto_kmflag(req))) == NULL) {
1271 return (CRYPTO_HOST_MEMORY);
1272 }
1273
1274 /*
1275 * Initialize key schedule. Key length information is stored
1276 * in the key.
1277 */
1278 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1279 bzero(keysched, size);
1280 kmem_free(keysched, size);
1281 return (rv);
1282 }
1283
1284 *tmpl = keysched;
1285 *tmpl_size = size;
1286
1287 return (CRYPTO_SUCCESS);
1288 }
1289
1290
1291 static int
1292 aes_free_context(crypto_ctx_t *ctx)
1293 {
1294 aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1295
1296 if (aes_ctx != NULL) {
1297 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1298 ASSERT(aes_ctx->ac_keysched_len != 0);
1299 bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1300 kmem_free(aes_ctx->ac_keysched,
1301 aes_ctx->ac_keysched_len);
1302 }
1303 crypto_free_mode_ctx(aes_ctx);
1304 ctx->cc_provider_private = NULL;
1305 }
1306
1307 return (CRYPTO_SUCCESS);
1308 }
1309
1310
1311 static int
1312 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1313 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1314 boolean_t is_encrypt_init)
1315 {
1316 int rv = CRYPTO_SUCCESS;
1317 void *keysched;
1318 size_t size;
1319
1320 if (template == NULL) {
1321 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1322 return (CRYPTO_HOST_MEMORY);
1323 /*
1324 * Initialize key schedule.
1325 * Key length is stored in the key.
1326 */
1327 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1328 kmem_free(keysched, size);
1329 return (rv);
1330 }
1331
1332 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1333 aes_ctx->ac_keysched_len = size;
1334 } else {
1335 keysched = template;
1336 }
1337 aes_ctx->ac_keysched = keysched;
1338
1339 switch (mechanism->cm_type) {
1340 case AES_CBC_MECH_INFO_TYPE:
1341 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1342 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1343 break;
1344 case AES_CTR_MECH_INFO_TYPE: {
1345 CK_AES_CTR_PARAMS *pp;
1346
1347 if (mechanism->cm_param == NULL ||
1348 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1349 return (CRYPTO_MECHANISM_PARAM_INVALID);
1350 }
1351 pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1352 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1353 pp->cb, AES_COPY_BLOCK);
1354 break;
1355 }
1356 case AES_CCM_MECH_INFO_TYPE:
1357 if (mechanism->cm_param == NULL ||
1358 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1359 return (CRYPTO_MECHANISM_PARAM_INVALID);
1360 }
1361 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1362 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1363 AES_XOR_BLOCK);
1364 break;
1365 case AES_GCM_MECH_INFO_TYPE:
1366 if (mechanism->cm_param == NULL ||
1367 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1368 return (CRYPTO_MECHANISM_PARAM_INVALID);
1369 }
1370 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1371 AES_BLOCK_LEN, aes_encrypt_block, AES_COPY_BLOCK,
1372 AES_XOR_BLOCK);
1373 break;
1374 case AES_GMAC_MECH_INFO_TYPE:
1375 if (mechanism->cm_param == NULL ||
1376 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1377 return (CRYPTO_MECHANISM_PARAM_INVALID);
1378 }
1379 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1380 AES_BLOCK_LEN, aes_encrypt_block, AES_COPY_BLOCK,
1381 AES_XOR_BLOCK);
1382 break;
1383 case AES_ECB_MECH_INFO_TYPE:
1384 aes_ctx->ac_flags |= ECB_MODE;
1385 }
1386
1387 if (rv != CRYPTO_SUCCESS) {
1388 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1389 bzero(keysched, size);
1390 kmem_free(keysched, size);
1391 }
1392 }
1393
1394 return (rv);
1395 }
1396
1397 static int
1398 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1399 CK_AES_GCM_PARAMS *gcm_params)
1400 {
1401 /* LINTED: pointer alignment */
1402 CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1403
1404 if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1405 return (CRYPTO_MECHANISM_INVALID);
1406
1407 if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1408 return (CRYPTO_MECHANISM_PARAM_INVALID);
1409
1410 if (params->pIv == NULL)
1411 return (CRYPTO_MECHANISM_PARAM_INVALID);
1412
1413 gcm_params->pIv = params->pIv;
1414 gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1415 gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1416
1417 if (data == NULL)
1418 return (CRYPTO_SUCCESS);
1419
1420 if (data->cd_format != CRYPTO_DATA_RAW)
1421 return (CRYPTO_ARGUMENTS_BAD);
1422
1423 gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1424 gcm_params->ulAADLen = data->cd_length;
1425 return (CRYPTO_SUCCESS);
1426 }
1427
1428 static int
1429 aes_mac_atomic(crypto_provider_handle_t provider,
1430 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1431 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1432 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1433 {
1434 CK_AES_GCM_PARAMS gcm_params;
1435 crypto_mechanism_t gcm_mech;
1436 int rv;
1437
1438 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1439 != CRYPTO_SUCCESS)
1440 return (rv);
1441
1442 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1443 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1444 gcm_mech.cm_param = (char *)&gcm_params;
1445
1446 return (aes_encrypt_atomic(provider, session_id, &gcm_mech,
1447 key, &null_crypto_data, mac, template, req));
1448 }
1449
1450 static int
1451 aes_mac_verify_atomic(crypto_provider_handle_t provider,
1452 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1453 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1454 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1455 {
1456 CK_AES_GCM_PARAMS gcm_params;
1457 crypto_mechanism_t gcm_mech;
1458 int rv;
1459
1460 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1461 != CRYPTO_SUCCESS)
1462 return (rv);
1463
1464 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1465 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1466 gcm_mech.cm_param = (char *)&gcm_params;
1467
1468 return (aes_decrypt_atomic(provider, session_id, &gcm_mech,
1469 key, mac, &null_crypto_data, template, req));
1470 }