3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21 /*
22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23 */
24
25 /*
26 * AES provider for the Kernel Cryptographic Framework (KCF)
27 */
28
29 #include <sys/types.h>
30 #include <sys/systm.h>
31 #include <sys/modctl.h>
32 #include <sys/cmn_err.h>
33 #include <sys/ddi.h>
34 #include <sys/crypto/common.h>
35 #include <sys/crypto/impl.h>
36 #include <sys/crypto/spi.h>
37 #include <sys/sysmacros.h>
38 #include <sys/strsun.h>
39 #include <modes/modes.h>
40 #define _AES_IMPL
41 #include <aes/aes_impl.h>
42
369 }
370
371 kmflag = crypto_kmflag(req);
372 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
373 != CRYPTO_SUCCESS)
374 return (rv);
375
376 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
377 is_encrypt_init);
378 if (rv != CRYPTO_SUCCESS) {
379 crypto_free_mode_ctx(aes_ctx);
380 return (rv);
381 }
382
383 ctx->cc_provider_private = aes_ctx;
384
385 return (CRYPTO_SUCCESS);
386 }
387
388 static void
389 aes_copy_block64(uint8_t *in, uint64_t *out)
390 {
391 if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
392 /* LINTED: pointer alignment */
393 out[0] = *(uint64_t *)&in[0];
394 /* LINTED: pointer alignment */
395 out[1] = *(uint64_t *)&in[8];
396 } else {
397 uint8_t *iv8 = (uint8_t *)&out[0];
398
399 AES_COPY_BLOCK(in, iv8);
400 }
401 }
402
403
404 static int
405 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
406 crypto_data_t *ciphertext, crypto_req_handle_t req)
407 {
408 int ret = CRYPTO_FAILED;
409
410 aes_ctx_t *aes_ctx;
411 size_t saved_length, saved_offset, length_needed;
412
413 ASSERT(ctx->cc_provider_private != NULL);
414 aes_ctx = ctx->cc_provider_private;
415
416 /*
417 * For block ciphers, plaintext must be a multiple of AES block size.
418 * This test is only valid for ciphers whose blocksize is a power of 2.
419 */
458 ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
459 if (ret != CRYPTO_SUCCESS) {
460 return (ret);
461 }
462
463 /*
464 * For CCM mode, aes_ccm_encrypt_final() will take care of any
465 * left-over unprocessed data, and compute the MAC
466 */
467 if (aes_ctx->ac_flags & CCM_MODE) {
468 /*
469 * ccm_encrypt_final() will compute the MAC and append
470 * it to existing ciphertext. So, need to adjust the left over
471 * length value accordingly
472 */
473
474 /* order of following 2 lines MUST not be reversed */
475 ciphertext->cd_offset = ciphertext->cd_length;
476 ciphertext->cd_length = saved_length - ciphertext->cd_length;
477 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
478 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
479 if (ret != CRYPTO_SUCCESS) {
480 return (ret);
481 }
482
483 if (plaintext != ciphertext) {
484 ciphertext->cd_length =
485 ciphertext->cd_offset - saved_offset;
486 }
487 ciphertext->cd_offset = saved_offset;
488 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
489 /*
490 * gcm_encrypt_final() will compute the MAC and append
491 * it to existing ciphertext. So, need to adjust the left over
492 * length value accordingly
493 */
494
495 /* order of following 2 lines MUST not be reversed */
496 ciphertext->cd_offset = ciphertext->cd_length;
497 ciphertext->cd_length = saved_length - ciphertext->cd_length;
498 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
499 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
500 aes_xor_block);
501 if (ret != CRYPTO_SUCCESS) {
502 return (ret);
503 }
504
505 if (plaintext != ciphertext) {
506 ciphertext->cd_length =
507 ciphertext->cd_offset - saved_offset;
508 }
509 ciphertext->cd_offset = saved_offset;
510 }
511
512 ASSERT(aes_ctx->ac_remainder_len == 0);
513 (void) aes_free_context(ctx);
514
515 return (ret);
516 }
517
518
519 static int
520 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
573 saved_offset = plaintext->cd_offset;
574 saved_length = plaintext->cd_length;
575
576 /*
577 * Do an update on the specified input data.
578 */
579 ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
580 if (ret != CRYPTO_SUCCESS) {
581 goto cleanup;
582 }
583
584 if (aes_ctx->ac_flags & CCM_MODE) {
585 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
586 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
587
588 /* order of following 2 lines MUST not be reversed */
589 plaintext->cd_offset = plaintext->cd_length;
590 plaintext->cd_length = saved_length - plaintext->cd_length;
591
592 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
593 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
594 aes_xor_block);
595 if (ret == CRYPTO_SUCCESS) {
596 if (plaintext != ciphertext) {
597 plaintext->cd_length =
598 plaintext->cd_offset - saved_offset;
599 }
600 } else {
601 plaintext->cd_length = saved_length;
602 }
603
604 plaintext->cd_offset = saved_offset;
605 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
606 /* order of following 2 lines MUST not be reversed */
607 plaintext->cd_offset = plaintext->cd_length;
608 plaintext->cd_length = saved_length - plaintext->cd_length;
609
610 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
611 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
612 if (ret == CRYPTO_SUCCESS) {
613 if (plaintext != ciphertext) {
614 plaintext->cd_length =
615 plaintext->cd_offset - saved_offset;
616 }
617 } else {
618 plaintext->cd_length = saved_length;
619 }
620
621 plaintext->cd_offset = saved_offset;
622 }
623
624 ASSERT(aes_ctx->ac_remainder_len == 0);
625
626 cleanup:
627 (void) aes_free_context(ctx);
628
629 return (ret);
630 }
631
705 return (ret);
706 }
707
708
709 static int
710 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
711 crypto_data_t *plaintext, crypto_req_handle_t req)
712 {
713 off_t saved_offset;
714 size_t saved_length, out_len;
715 int ret = CRYPTO_SUCCESS;
716 aes_ctx_t *aes_ctx;
717
718 ASSERT(ctx->cc_provider_private != NULL);
719 aes_ctx = ctx->cc_provider_private;
720
721 AES_ARG_INPLACE(ciphertext, plaintext);
722
723 /*
724 * Compute number of bytes that will hold the plaintext.
725 * This is not necessary for CCM, GCM, and GMAC since these
726 * mechanisms never return plaintext for update operations.
727 */
728 if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
729 out_len = aes_ctx->ac_remainder_len;
730 out_len += ciphertext->cd_length;
731 out_len &= ~(AES_BLOCK_LEN - 1);
732
733 /* return length needed to store the output */
734 if (plaintext->cd_length < out_len) {
735 plaintext->cd_length = out_len;
736 return (CRYPTO_BUFFER_TOO_SMALL);
737 }
738 }
739
740 saved_offset = plaintext->cd_offset;
741 saved_length = plaintext->cd_length;
742
743 if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
744 gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
745
746 /*
747 * Do the AES update on the specified input data.
748 */
749 switch (ciphertext->cd_format) {
750 case CRYPTO_DATA_RAW:
751 ret = crypto_update_iov(ctx->cc_provider_private,
801 int ret;
802
803 ASSERT(ctx->cc_provider_private != NULL);
804 aes_ctx = ctx->cc_provider_private;
805
806 if (data->cd_format != CRYPTO_DATA_RAW &&
807 data->cd_format != CRYPTO_DATA_UIO &&
808 data->cd_format != CRYPTO_DATA_MBLK) {
809 return (CRYPTO_ARGUMENTS_BAD);
810 }
811
812 if (aes_ctx->ac_flags & CTR_MODE) {
813 if (aes_ctx->ac_remainder_len > 0) {
814 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
815 aes_encrypt_block);
816 if (ret != CRYPTO_SUCCESS)
817 return (ret);
818 }
819 } else if (aes_ctx->ac_flags & CCM_MODE) {
820 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
821 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
822 if (ret != CRYPTO_SUCCESS) {
823 return (ret);
824 }
825 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
826 size_t saved_offset = data->cd_offset;
827
828 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
829 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
830 aes_xor_block);
831 if (ret != CRYPTO_SUCCESS) {
832 return (ret);
833 }
834 data->cd_length = data->cd_offset - saved_offset;
835 data->cd_offset = saved_offset;
836 } else {
837 /*
838 * There must be no unprocessed plaintext.
839 * This happens if the length of the last data is
840 * not a multiple of the AES block length.
841 */
842 if (aes_ctx->ac_remainder_len > 0) {
843 return (CRYPTO_DATA_LEN_RANGE);
844 }
845 data->cd_length = 0;
846 }
847
848 (void) aes_free_context(ctx);
849
850 return (CRYPTO_SUCCESS);
886 return (ret);
887 }
888 }
889
890 if (aes_ctx->ac_flags & CCM_MODE) {
891 /*
892 * This is where all the plaintext is returned, make sure
893 * the plaintext buffer is big enough
894 */
895 size_t pt_len = aes_ctx->ac_data_len;
896 if (data->cd_length < pt_len) {
897 data->cd_length = pt_len;
898 return (CRYPTO_BUFFER_TOO_SMALL);
899 }
900
901 ASSERT(aes_ctx->ac_processed_data_len == pt_len);
902 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
903 saved_offset = data->cd_offset;
904 saved_length = data->cd_length;
905 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
906 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
907 aes_xor_block);
908 if (ret == CRYPTO_SUCCESS) {
909 data->cd_length = data->cd_offset - saved_offset;
910 } else {
911 data->cd_length = saved_length;
912 }
913
914 data->cd_offset = saved_offset;
915 if (ret != CRYPTO_SUCCESS) {
916 return (ret);
917 }
918 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
919 /*
920 * This is where all the plaintext is returned, make sure
921 * the plaintext buffer is big enough
922 */
923 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
924 size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
925
926 if (data->cd_length < pt_len) {
927 data->cd_length = pt_len;
928 return (CRYPTO_BUFFER_TOO_SMALL);
929 }
930
931 saved_offset = data->cd_offset;
932 saved_length = data->cd_length;
933 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
934 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
935 if (ret == CRYPTO_SUCCESS) {
936 data->cd_length = data->cd_offset - saved_offset;
937 } else {
938 data->cd_length = saved_length;
939 }
940
941 data->cd_offset = saved_offset;
942 if (ret != CRYPTO_SUCCESS) {
943 return (ret);
944 }
945 }
946
947
948 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
949 data->cd_length = 0;
950 }
951
952 (void) aes_free_context(ctx);
953
954 return (CRYPTO_SUCCESS);
955 }
956
957 /* ARGSUSED */
958 static int
959 aes_encrypt_atomic(crypto_provider_handle_t provider,
960 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
961 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
962 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
963 {
964 aes_ctx_t aes_ctx; /* on the stack */
965 off_t saved_offset;
966 size_t saved_length;
967 size_t length_needed;
1026 case CRYPTO_DATA_RAW:
1027 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1028 aes_encrypt_contiguous_blocks, aes_copy_block64);
1029 break;
1030 case CRYPTO_DATA_UIO:
1031 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1032 aes_encrypt_contiguous_blocks, aes_copy_block64);
1033 break;
1034 case CRYPTO_DATA_MBLK:
1035 ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext,
1036 aes_encrypt_contiguous_blocks, aes_copy_block64);
1037 break;
1038 default:
1039 ret = CRYPTO_ARGUMENTS_BAD;
1040 }
1041
1042 if (ret == CRYPTO_SUCCESS) {
1043 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1044 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1045 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1046 aes_xor_block);
1047 if (ret != CRYPTO_SUCCESS)
1048 goto out;
1049 ASSERT(aes_ctx.ac_remainder_len == 0);
1050 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1051 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1052 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1053 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1054 aes_copy_block, aes_xor_block);
1055 if (ret != CRYPTO_SUCCESS)
1056 goto out;
1057 ASSERT(aes_ctx.ac_remainder_len == 0);
1058 } else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1059 if (aes_ctx.ac_remainder_len > 0) {
1060 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1061 ciphertext, aes_encrypt_block);
1062 if (ret != CRYPTO_SUCCESS)
1063 goto out;
1064 }
1065 } else {
1066 ASSERT(aes_ctx.ac_remainder_len == 0);
1067 }
1068
1069 if (plaintext != ciphertext) {
1070 ciphertext->cd_length =
1071 ciphertext->cd_offset - saved_offset;
1072 }
1073 } else {
1074 ciphertext->cd_length = saved_length;
1165 case CRYPTO_DATA_UIO:
1166 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1167 aes_decrypt_contiguous_blocks, aes_copy_block64);
1168 break;
1169 case CRYPTO_DATA_MBLK:
1170 ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext,
1171 aes_decrypt_contiguous_blocks, aes_copy_block64);
1172 break;
1173 default:
1174 ret = CRYPTO_ARGUMENTS_BAD;
1175 }
1176
1177 if (ret == CRYPTO_SUCCESS) {
1178 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1179 ASSERT(aes_ctx.ac_processed_data_len
1180 == aes_ctx.ac_data_len);
1181 ASSERT(aes_ctx.ac_processed_mac_len
1182 == aes_ctx.ac_mac_len);
1183 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1184 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1185 aes_copy_block, aes_xor_block);
1186 ASSERT(aes_ctx.ac_remainder_len == 0);
1187 if ((ret == CRYPTO_SUCCESS) &&
1188 (ciphertext != plaintext)) {
1189 plaintext->cd_length =
1190 plaintext->cd_offset - saved_offset;
1191 } else {
1192 plaintext->cd_length = saved_length;
1193 }
1194 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1195 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1196 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1197 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1198 aes_xor_block);
1199 ASSERT(aes_ctx.ac_remainder_len == 0);
1200 if ((ret == CRYPTO_SUCCESS) &&
1201 (ciphertext != plaintext)) {
1202 plaintext->cd_length =
1203 plaintext->cd_offset - saved_offset;
1204 } else {
1205 plaintext->cd_length = saved_length;
1206 }
1207 } else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1208 ASSERT(aes_ctx.ac_remainder_len == 0);
1209 if (ciphertext != plaintext)
1210 plaintext->cd_length =
1211 plaintext->cd_offset - saved_offset;
1212 } else {
1213 if (aes_ctx.ac_remainder_len > 0) {
1214 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1215 plaintext, aes_encrypt_block);
1216 if (ret == CRYPTO_DATA_LEN_RANGE)
1217 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1218 if (ret != CRYPTO_SUCCESS)
1220 }
1221 if (ciphertext != plaintext)
1222 plaintext->cd_length =
1223 plaintext->cd_offset - saved_offset;
1224 }
1225 } else {
1226 plaintext->cd_length = saved_length;
1227 }
1228 plaintext->cd_offset = saved_offset;
1229
1230 out:
1231 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1232 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1233 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1234 }
1235
1236 if (aes_ctx.ac_flags & CCM_MODE) {
1237 if (aes_ctx.ac_pt_buf != NULL) {
1238 kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1239 }
1240 } else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1241 if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1242 kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1243 ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1244 }
1245 }
1246
1247 return (ret);
1248 }
1249
1250 /*
1251 * KCF software provider context template entry points.
1252 */
1253 /* ARGSUSED */
1254 static int
1255 aes_create_ctx_template(crypto_provider_handle_t provider,
1256 crypto_mechanism_t *mechanism, crypto_key_t *key,
1257 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1258 {
1259 void *keysched;
1260 size_t size;
1261 int rv;
1262
1263 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1264 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1265 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1335 aes_ctx->ac_keysched_len = size;
1336 } else {
1337 keysched = template;
1338 }
1339 aes_ctx->ac_keysched = keysched;
1340
1341 switch (mechanism->cm_type) {
1342 case AES_CBC_MECH_INFO_TYPE:
1343 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1344 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1345 break;
1346 case AES_CTR_MECH_INFO_TYPE: {
1347 CK_AES_CTR_PARAMS *pp;
1348
1349 if (mechanism->cm_param == NULL ||
1350 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1351 return (CRYPTO_MECHANISM_PARAM_INVALID);
1352 }
1353 pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1354 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1355 pp->cb, aes_copy_block);
1356 break;
1357 }
1358 case AES_CCM_MECH_INFO_TYPE:
1359 if (mechanism->cm_param == NULL ||
1360 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1361 return (CRYPTO_MECHANISM_PARAM_INVALID);
1362 }
1363 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1364 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1365 aes_xor_block);
1366 break;
1367 case AES_GCM_MECH_INFO_TYPE:
1368 if (mechanism->cm_param == NULL ||
1369 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1370 return (CRYPTO_MECHANISM_PARAM_INVALID);
1371 }
1372 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1373 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1374 aes_xor_block);
1375 break;
1376 case AES_GMAC_MECH_INFO_TYPE:
1377 if (mechanism->cm_param == NULL ||
1378 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1379 return (CRYPTO_MECHANISM_PARAM_INVALID);
1380 }
1381 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1382 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1383 aes_xor_block);
1384 break;
1385 case AES_ECB_MECH_INFO_TYPE:
1386 aes_ctx->ac_flags |= ECB_MODE;
1387 }
1388
1389 if (rv != CRYPTO_SUCCESS) {
1390 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1391 bzero(keysched, size);
1392 kmem_free(keysched, size);
1393 }
1394 }
1395
1396 return (rv);
1397 }
1398
1399 static int
1400 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1401 CK_AES_GCM_PARAMS *gcm_params)
1402 {
1403 /* LINTED: pointer alignment */
|
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21 /*
22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23 * Copyright 2015 by Saso Kiselkov. All rights reserved.
24 */
25
26 /*
27 * AES provider for the Kernel Cryptographic Framework (KCF)
28 */
29
30 #include <sys/types.h>
31 #include <sys/systm.h>
32 #include <sys/modctl.h>
33 #include <sys/cmn_err.h>
34 #include <sys/ddi.h>
35 #include <sys/crypto/common.h>
36 #include <sys/crypto/impl.h>
37 #include <sys/crypto/spi.h>
38 #include <sys/sysmacros.h>
39 #include <sys/strsun.h>
40 #include <modes/modes.h>
41 #define _AES_IMPL
42 #include <aes/aes_impl.h>
43
370 }
371
372 kmflag = crypto_kmflag(req);
373 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
374 != CRYPTO_SUCCESS)
375 return (rv);
376
377 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
378 is_encrypt_init);
379 if (rv != CRYPTO_SUCCESS) {
380 crypto_free_mode_ctx(aes_ctx);
381 return (rv);
382 }
383
384 ctx->cc_provider_private = aes_ctx;
385
386 return (CRYPTO_SUCCESS);
387 }
388
389 static void
390 aes_copy_block64(const uint8_t *in, uint64_t *out)
391 {
392 if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
393 /* LINTED: pointer alignment */
394 out[0] = *(uint64_t *)&in[0];
395 /* LINTED: pointer alignment */
396 out[1] = *(uint64_t *)&in[8];
397 } else {
398 uint8_t *iv8 = (uint8_t *)&out[0];
399
400 AES_COPY_BLOCK_UNALIGNED(in, iv8);
401 }
402 }
403
404
405 static int
406 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
407 crypto_data_t *ciphertext, crypto_req_handle_t req)
408 {
409 int ret = CRYPTO_FAILED;
410
411 aes_ctx_t *aes_ctx;
412 size_t saved_length, saved_offset, length_needed;
413
414 ASSERT(ctx->cc_provider_private != NULL);
415 aes_ctx = ctx->cc_provider_private;
416
417 /*
418 * For block ciphers, plaintext must be a multiple of AES block size.
419 * This test is only valid for ciphers whose blocksize is a power of 2.
420 */
459 ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
460 if (ret != CRYPTO_SUCCESS) {
461 return (ret);
462 }
463
464 /*
465 * For CCM mode, aes_ccm_encrypt_final() will take care of any
466 * left-over unprocessed data, and compute the MAC
467 */
468 if (aes_ctx->ac_flags & CCM_MODE) {
469 /*
470 * ccm_encrypt_final() will compute the MAC and append
471 * it to existing ciphertext. So, need to adjust the left over
472 * length value accordingly
473 */
474
475 /* order of following 2 lines MUST not be reversed */
476 ciphertext->cd_offset = ciphertext->cd_length;
477 ciphertext->cd_length = saved_length - ciphertext->cd_length;
478 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
479 AES_BLOCK_LEN, aes_encrypt_block, AES_XOR_BLOCK);
480 if (ret != CRYPTO_SUCCESS) {
481 return (ret);
482 }
483
484 if (plaintext != ciphertext) {
485 ciphertext->cd_length =
486 ciphertext->cd_offset - saved_offset;
487 }
488 ciphertext->cd_offset = saved_offset;
489 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
490 /*
491 * gcm_encrypt_final() will compute the MAC and append
492 * it to existing ciphertext. So, need to adjust the left over
493 * length value accordingly
494 */
495
496 /* order of following 2 lines MUST not be reversed */
497 ciphertext->cd_offset = ciphertext->cd_length;
498 ciphertext->cd_length = saved_length - ciphertext->cd_length;
499 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
500 AES_BLOCK_LEN, aes_encrypt_block, AES_COPY_BLOCK,
501 AES_XOR_BLOCK);
502 if (ret != CRYPTO_SUCCESS) {
503 return (ret);
504 }
505
506 if (plaintext != ciphertext) {
507 ciphertext->cd_length =
508 ciphertext->cd_offset - saved_offset;
509 }
510 ciphertext->cd_offset = saved_offset;
511 }
512
513 ASSERT(aes_ctx->ac_remainder_len == 0);
514 (void) aes_free_context(ctx);
515
516 return (ret);
517 }
518
519
520 static int
521 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
574 saved_offset = plaintext->cd_offset;
575 saved_length = plaintext->cd_length;
576
577 /*
578 * Do an update on the specified input data.
579 */
580 ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
581 if (ret != CRYPTO_SUCCESS) {
582 goto cleanup;
583 }
584
585 if (aes_ctx->ac_flags & CCM_MODE) {
586 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
587 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
588
589 /* order of following 2 lines MUST not be reversed */
590 plaintext->cd_offset = plaintext->cd_length;
591 plaintext->cd_length = saved_length - plaintext->cd_length;
592
593 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
594 AES_BLOCK_LEN, aes_encrypt_block, AES_COPY_BLOCK,
595 AES_XOR_BLOCK);
596 if (ret == CRYPTO_SUCCESS) {
597 if (plaintext != ciphertext) {
598 plaintext->cd_length =
599 plaintext->cd_offset - saved_offset;
600 }
601 } else {
602 plaintext->cd_length = saved_length;
603 }
604
605 plaintext->cd_offset = saved_offset;
606 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
607 /* order of following 2 lines MUST not be reversed */
608 plaintext->cd_offset = plaintext->cd_length;
609 plaintext->cd_length = saved_length - plaintext->cd_length;
610
611 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
612 AES_BLOCK_LEN, aes_encrypt_block, AES_XOR_BLOCK,
613 AES_COPY_BLOCK, aes_ctr_mode);
614 if (ret == CRYPTO_SUCCESS) {
615 if (plaintext != ciphertext) {
616 plaintext->cd_length =
617 plaintext->cd_offset - saved_offset;
618 }
619 } else {
620 plaintext->cd_length = saved_length;
621 }
622
623 plaintext->cd_offset = saved_offset;
624 }
625
626 ASSERT(aes_ctx->ac_remainder_len == 0);
627
628 cleanup:
629 (void) aes_free_context(ctx);
630
631 return (ret);
632 }
633
707 return (ret);
708 }
709
710
711 static int
712 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
713 crypto_data_t *plaintext, crypto_req_handle_t req)
714 {
715 off_t saved_offset;
716 size_t saved_length, out_len;
717 int ret = CRYPTO_SUCCESS;
718 aes_ctx_t *aes_ctx;
719
720 ASSERT(ctx->cc_provider_private != NULL);
721 aes_ctx = ctx->cc_provider_private;
722
723 AES_ARG_INPLACE(ciphertext, plaintext);
724
725 /*
726 * Compute number of bytes that will hold the plaintext.
727 * This is not necessary for CCM and GMAC since these
728 * mechanisms never return plaintext for update operations.
729 */
730 if ((aes_ctx->ac_flags & (CCM_MODE|GMAC_MODE)) == 0) {
731 out_len = aes_ctx->ac_remainder_len;
732 out_len += ciphertext->cd_length;
733 out_len &= ~(AES_BLOCK_LEN - 1);
734 if (aes_ctx->ac_flags & GCM_MODE)
735 out_len -= ((gcm_ctx_t *)aes_ctx)->gcm_tag_len;
736
737 /* return length needed to store the output */
738 if (plaintext->cd_length < out_len) {
739 plaintext->cd_length = out_len;
740 return (CRYPTO_BUFFER_TOO_SMALL);
741 }
742 }
743
744 saved_offset = plaintext->cd_offset;
745 saved_length = plaintext->cd_length;
746
747 if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
748 gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
749
750 /*
751 * Do the AES update on the specified input data.
752 */
753 switch (ciphertext->cd_format) {
754 case CRYPTO_DATA_RAW:
755 ret = crypto_update_iov(ctx->cc_provider_private,
805 int ret;
806
807 ASSERT(ctx->cc_provider_private != NULL);
808 aes_ctx = ctx->cc_provider_private;
809
810 if (data->cd_format != CRYPTO_DATA_RAW &&
811 data->cd_format != CRYPTO_DATA_UIO &&
812 data->cd_format != CRYPTO_DATA_MBLK) {
813 return (CRYPTO_ARGUMENTS_BAD);
814 }
815
816 if (aes_ctx->ac_flags & CTR_MODE) {
817 if (aes_ctx->ac_remainder_len > 0) {
818 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
819 aes_encrypt_block);
820 if (ret != CRYPTO_SUCCESS)
821 return (ret);
822 }
823 } else if (aes_ctx->ac_flags & CCM_MODE) {
824 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
825 AES_BLOCK_LEN, aes_encrypt_block, AES_XOR_BLOCK);
826 if (ret != CRYPTO_SUCCESS) {
827 return (ret);
828 }
829 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
830 size_t saved_offset = data->cd_offset;
831
832 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
833 AES_BLOCK_LEN, aes_encrypt_block, AES_COPY_BLOCK,
834 AES_XOR_BLOCK);
835 if (ret != CRYPTO_SUCCESS) {
836 return (ret);
837 }
838 data->cd_length = data->cd_offset - saved_offset;
839 data->cd_offset = saved_offset;
840 } else {
841 /*
842 * There must be no unprocessed plaintext.
843 * This happens if the length of the last data is
844 * not a multiple of the AES block length.
845 */
846 if (aes_ctx->ac_remainder_len > 0) {
847 return (CRYPTO_DATA_LEN_RANGE);
848 }
849 data->cd_length = 0;
850 }
851
852 (void) aes_free_context(ctx);
853
854 return (CRYPTO_SUCCESS);
890 return (ret);
891 }
892 }
893
894 if (aes_ctx->ac_flags & CCM_MODE) {
895 /*
896 * This is where all the plaintext is returned, make sure
897 * the plaintext buffer is big enough
898 */
899 size_t pt_len = aes_ctx->ac_data_len;
900 if (data->cd_length < pt_len) {
901 data->cd_length = pt_len;
902 return (CRYPTO_BUFFER_TOO_SMALL);
903 }
904
905 ASSERT(aes_ctx->ac_processed_data_len == pt_len);
906 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
907 saved_offset = data->cd_offset;
908 saved_length = data->cd_length;
909 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
910 AES_BLOCK_LEN, aes_encrypt_block, AES_COPY_BLOCK,
911 AES_XOR_BLOCK);
912 if (ret == CRYPTO_SUCCESS) {
913 data->cd_length = data->cd_offset - saved_offset;
914 } else {
915 data->cd_length = saved_length;
916 }
917
918 data->cd_offset = saved_offset;
919 if (ret != CRYPTO_SUCCESS) {
920 return (ret);
921 }
922 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
923 /*
924 * Check to make sure there is enough space for remaining
925 * plaintext.
926 */
927 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
928 size_t pt_len = ctx->gcm_last_input_fill - ctx->gcm_tag_len;
929
930 if (data->cd_length < pt_len) {
931 data->cd_length = pt_len;
932 return (CRYPTO_BUFFER_TOO_SMALL);
933 }
934 saved_offset = data->cd_offset;
935 saved_length = data->cd_length;
936 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
937 AES_BLOCK_LEN, aes_encrypt_block, AES_COPY_BLOCK,
938 AES_XOR_BLOCK, aes_ctr_mode);
939 if (ret == CRYPTO_SUCCESS) {
940 data->cd_length = data->cd_offset - saved_offset;
941 } else {
942 data->cd_length = saved_length;
943 }
944
945 data->cd_offset = saved_offset;
946 if (ret != CRYPTO_SUCCESS) {
947 return (ret);
948 }
949 }
950
951 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
952 data->cd_length = 0;
953 }
954
955 (void) aes_free_context(ctx);
956
957 return (CRYPTO_SUCCESS);
958 }
959
960 /* ARGSUSED */
961 static int
962 aes_encrypt_atomic(crypto_provider_handle_t provider,
963 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
964 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
965 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
966 {
967 aes_ctx_t aes_ctx; /* on the stack */
968 off_t saved_offset;
969 size_t saved_length;
970 size_t length_needed;
1029 case CRYPTO_DATA_RAW:
1030 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1031 aes_encrypt_contiguous_blocks, aes_copy_block64);
1032 break;
1033 case CRYPTO_DATA_UIO:
1034 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1035 aes_encrypt_contiguous_blocks, aes_copy_block64);
1036 break;
1037 case CRYPTO_DATA_MBLK:
1038 ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext,
1039 aes_encrypt_contiguous_blocks, aes_copy_block64);
1040 break;
1041 default:
1042 ret = CRYPTO_ARGUMENTS_BAD;
1043 }
1044
1045 if (ret == CRYPTO_SUCCESS) {
1046 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1047 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1048 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1049 AES_XOR_BLOCK);
1050 if (ret != CRYPTO_SUCCESS)
1051 goto out;
1052 ASSERT(aes_ctx.ac_remainder_len == 0);
1053 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1054 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1055 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1056 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1057 AES_COPY_BLOCK, AES_XOR_BLOCK);
1058 if (ret != CRYPTO_SUCCESS)
1059 goto out;
1060 ASSERT(aes_ctx.ac_remainder_len == 0);
1061 } else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1062 if (aes_ctx.ac_remainder_len > 0) {
1063 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1064 ciphertext, aes_encrypt_block);
1065 if (ret != CRYPTO_SUCCESS)
1066 goto out;
1067 }
1068 } else {
1069 ASSERT(aes_ctx.ac_remainder_len == 0);
1070 }
1071
1072 if (plaintext != ciphertext) {
1073 ciphertext->cd_length =
1074 ciphertext->cd_offset - saved_offset;
1075 }
1076 } else {
1077 ciphertext->cd_length = saved_length;
1168 case CRYPTO_DATA_UIO:
1169 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1170 aes_decrypt_contiguous_blocks, aes_copy_block64);
1171 break;
1172 case CRYPTO_DATA_MBLK:
1173 ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext,
1174 aes_decrypt_contiguous_blocks, aes_copy_block64);
1175 break;
1176 default:
1177 ret = CRYPTO_ARGUMENTS_BAD;
1178 }
1179
1180 if (ret == CRYPTO_SUCCESS) {
1181 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1182 ASSERT(aes_ctx.ac_processed_data_len
1183 == aes_ctx.ac_data_len);
1184 ASSERT(aes_ctx.ac_processed_mac_len
1185 == aes_ctx.ac_mac_len);
1186 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1187 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1188 AES_COPY_BLOCK, AES_XOR_BLOCK);
1189 ASSERT(aes_ctx.ac_remainder_len == 0);
1190 if ((ret == CRYPTO_SUCCESS) &&
1191 (ciphertext != plaintext)) {
1192 plaintext->cd_length =
1193 plaintext->cd_offset - saved_offset;
1194 } else {
1195 plaintext->cd_length = saved_length;
1196 }
1197 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1198 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1199 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1200 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1201 AES_COPY_BLOCK, AES_XOR_BLOCK, aes_ctr_mode);
1202 ASSERT(aes_ctx.ac_remainder_len == 0);
1203 if ((ret == CRYPTO_SUCCESS) &&
1204 (ciphertext != plaintext)) {
1205 plaintext->cd_length =
1206 plaintext->cd_offset - saved_offset;
1207 } else {
1208 plaintext->cd_length = saved_length;
1209 }
1210 } else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1211 ASSERT(aes_ctx.ac_remainder_len == 0);
1212 if (ciphertext != plaintext)
1213 plaintext->cd_length =
1214 plaintext->cd_offset - saved_offset;
1215 } else {
1216 if (aes_ctx.ac_remainder_len > 0) {
1217 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1218 plaintext, aes_encrypt_block);
1219 if (ret == CRYPTO_DATA_LEN_RANGE)
1220 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1221 if (ret != CRYPTO_SUCCESS)
1223 }
1224 if (ciphertext != plaintext)
1225 plaintext->cd_length =
1226 plaintext->cd_offset - saved_offset;
1227 }
1228 } else {
1229 plaintext->cd_length = saved_length;
1230 }
1231 plaintext->cd_offset = saved_offset;
1232
1233 out:
1234 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1235 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1236 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1237 }
1238
1239 if (aes_ctx.ac_flags & CCM_MODE) {
1240 if (aes_ctx.ac_pt_buf != NULL) {
1241 kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1242 }
1243 }
1244
1245 return (ret);
1246 }
1247
1248 /*
1249 * KCF software provider context template entry points.
1250 */
1251 /* ARGSUSED */
1252 static int
1253 aes_create_ctx_template(crypto_provider_handle_t provider,
1254 crypto_mechanism_t *mechanism, crypto_key_t *key,
1255 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1256 {
1257 void *keysched;
1258 size_t size;
1259 int rv;
1260
1261 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1262 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1263 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1333 aes_ctx->ac_keysched_len = size;
1334 } else {
1335 keysched = template;
1336 }
1337 aes_ctx->ac_keysched = keysched;
1338
1339 switch (mechanism->cm_type) {
1340 case AES_CBC_MECH_INFO_TYPE:
1341 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1342 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1343 break;
1344 case AES_CTR_MECH_INFO_TYPE: {
1345 CK_AES_CTR_PARAMS *pp;
1346
1347 if (mechanism->cm_param == NULL ||
1348 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1349 return (CRYPTO_MECHANISM_PARAM_INVALID);
1350 }
1351 pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1352 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1353 pp->cb, AES_COPY_BLOCK);
1354 break;
1355 }
1356 case AES_CCM_MECH_INFO_TYPE:
1357 if (mechanism->cm_param == NULL ||
1358 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1359 return (CRYPTO_MECHANISM_PARAM_INVALID);
1360 }
1361 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1362 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1363 AES_XOR_BLOCK);
1364 break;
1365 case AES_GCM_MECH_INFO_TYPE:
1366 if (mechanism->cm_param == NULL ||
1367 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1368 return (CRYPTO_MECHANISM_PARAM_INVALID);
1369 }
1370 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1371 AES_BLOCK_LEN, aes_encrypt_block, AES_COPY_BLOCK,
1372 AES_XOR_BLOCK);
1373 break;
1374 case AES_GMAC_MECH_INFO_TYPE:
1375 if (mechanism->cm_param == NULL ||
1376 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1377 return (CRYPTO_MECHANISM_PARAM_INVALID);
1378 }
1379 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1380 AES_BLOCK_LEN, aes_encrypt_block, AES_COPY_BLOCK,
1381 AES_XOR_BLOCK);
1382 break;
1383 case AES_ECB_MECH_INFO_TYPE:
1384 aes_ctx->ac_flags |= ECB_MODE;
1385 }
1386
1387 if (rv != CRYPTO_SUCCESS) {
1388 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1389 bzero(keysched, size);
1390 kmem_free(keysched, size);
1391 }
1392 }
1393
1394 return (rv);
1395 }
1396
1397 static int
1398 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1399 CK_AES_GCM_PARAMS *gcm_params)
1400 {
1401 /* LINTED: pointer alignment */
|