Print this page
12390 AES: aes_mech_info_tab error
Reviewed by: Garrett D'Amore <garrett@damore.org>
Reviewed by: Matt Barden <mbarden@tintri.com>
Reviewed by: Toomas Soome <tsoome@me.com>
Split |
Close |
Expand all |
Collapse all |
--- old/usr/src/uts/common/crypto/io/aes.c
+++ new/usr/src/uts/common/crypto/io/aes.c
1 1 /*
2 2 * CDDL HEADER START
3 3 *
4 4 * The contents of this file are subject to the terms of the
5 5 * Common Development and Distribution License (the "License").
6 6 * You may not use this file except in compliance with the License.
7 7 *
8 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 9 * or http://www.opensolaris.org/os/licensing.
10 10 * See the License for the specific language governing permissions
11 11 * and limitations under the License.
12 12 *
13 13 * When distributing Covered Code, include this CDDL HEADER in each
14 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 15 * If applicable, add the following below this CDDL HEADER, with the
16 16 * fields enclosed by brackets "[]" replaced with your own identifying
17 17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 18 *
19 19 * CDDL HEADER END
20 20 */
21 21 /*
22 22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23 23 * Copyright 2017 Nexenta Systems, Inc. All rights reserved.
24 24 * Copyright 2019 Joyent, Inc.
25 25 */
26 26
27 27 /*
28 28 * AES provider for the Kernel Cryptographic Framework (KCF)
29 29 */
30 30
31 31 #include <sys/types.h>
32 32 #include <sys/systm.h>
33 33 #include <sys/modctl.h>
34 34 #include <sys/cmn_err.h>
35 35 #include <sys/ddi.h>
36 36 #include <sys/crypto/common.h>
37 37 #include <sys/crypto/impl.h>
38 38 #include <sys/crypto/spi.h>
39 39 #include <sys/sysmacros.h>
40 40 #include <sys/strsun.h>
41 41 #include <modes/modes.h>
42 42 #define _AES_IMPL
43 43 #include <aes/aes_impl.h>
44 44
45 45 extern struct mod_ops mod_cryptoops;
46 46
47 47 /*
48 48 * Module linkage information for the kernel.
49 49 */
50 50 static struct modlcrypto modlcrypto = {
51 51 &mod_cryptoops,
52 52 "AES Kernel SW Provider"
53 53 };
54 54
55 55 static struct modlinkage modlinkage = {
56 56 MODREV_1,
57 57 (void *)&modlcrypto,
58 58 NULL
59 59 };
60 60
61 61 /*
62 62 * Mechanism info structure passed to KCF during registration.
63 63 */
64 64 static crypto_mech_info_t aes_mech_info_tab[] = {
65 65 /* AES_ECB */
66 66 {SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
67 67 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
↓ open down ↓ |
67 lines elided |
↑ open up ↑ |
68 68 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
69 69 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
70 70 /* AES_CBC */
71 71 {SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
72 72 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
73 73 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
74 74 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
75 75 /* AES_CMAC */
76 76 {SUN_CKM_AES_CMAC, AES_CMAC_MECH_INFO_TYPE,
77 77 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
78 - CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
78 + CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
79 79 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
80 80 /* AES_CTR */
81 81 {SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
82 82 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
83 83 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
84 84 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
85 85 /* AES_CCM */
86 86 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
87 87 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
88 88 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
89 89 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
90 90 /* AES_GCM */
91 91 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
92 92 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
93 93 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
94 94 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
95 95 /* AES_GMAC */
96 96 {SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
97 97 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
98 98 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
99 99 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
100 100 CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC |
101 101 CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC,
102 102 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
103 103 };
104 104
105 105 /* operations are in-place if the output buffer is NULL */
106 106 #define AES_ARG_INPLACE(input, output) \
107 107 if ((output) == NULL) \
108 108 (output) = (input);
109 109
110 110 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
111 111
112 112 static crypto_control_ops_t aes_control_ops = {
113 113 aes_provider_status
114 114 };
115 115
116 116 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
117 117 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
118 118 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
119 119 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
120 120 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
121 121 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
122 122 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
123 123 crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
124 124 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
125 125 crypto_req_handle_t);
126 126 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
127 127 crypto_req_handle_t);
128 128
129 129 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
130 130 crypto_req_handle_t);
131 131 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
132 132 crypto_data_t *, crypto_req_handle_t);
133 133 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
134 134 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
135 135 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
136 136
137 137 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
138 138 crypto_req_handle_t);
139 139 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
140 140 crypto_data_t *, crypto_req_handle_t);
141 141 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
142 142 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
143 143 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
144 144
145 145 static crypto_cipher_ops_t aes_cipher_ops = {
146 146 aes_encrypt_init,
147 147 aes_encrypt,
148 148 aes_encrypt_update,
149 149 aes_encrypt_final,
150 150 aes_encrypt_atomic,
151 151 aes_decrypt_init,
152 152 aes_decrypt,
153 153 aes_decrypt_update,
154 154 aes_decrypt_final,
155 155 aes_decrypt_atomic
156 156 };
157 157
158 158 static int aes_mac_init(crypto_ctx_t *, crypto_mechanism_t *,
159 159 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
160 160 static int aes_mac(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
161 161 crypto_req_handle_t);
162 162 static int aes_mac_update(crypto_ctx_t *, crypto_data_t *,
163 163 crypto_req_handle_t);
164 164 static int aes_mac_final(crypto_ctx_t *, crypto_data_t *,
165 165 crypto_req_handle_t);
166 166 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
167 167 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
168 168 crypto_spi_ctx_template_t, crypto_req_handle_t);
169 169 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
170 170 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
171 171 crypto_spi_ctx_template_t, crypto_req_handle_t);
172 172
173 173 static crypto_mac_ops_t aes_mac_ops = {
174 174 aes_mac_init,
175 175 aes_mac,
176 176 aes_mac_update,
177 177 aes_mac_final,
178 178 aes_mac_atomic,
179 179 aes_mac_verify_atomic
180 180 };
181 181
182 182 static int aes_create_ctx_template(crypto_provider_handle_t,
183 183 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
184 184 size_t *, crypto_req_handle_t);
185 185 static int aes_free_context(crypto_ctx_t *);
186 186
187 187 static crypto_ctx_ops_t aes_ctx_ops = {
188 188 aes_create_ctx_template,
189 189 aes_free_context
190 190 };
191 191
192 192 static crypto_ops_t aes_crypto_ops = {
193 193 &aes_control_ops,
194 194 NULL,
195 195 &aes_cipher_ops,
196 196 &aes_mac_ops,
197 197 NULL,
198 198 NULL,
199 199 NULL,
200 200 NULL,
201 201 NULL,
202 202 NULL,
203 203 NULL,
204 204 NULL,
205 205 NULL,
206 206 &aes_ctx_ops,
207 207 NULL,
208 208 NULL,
209 209 NULL,
210 210 };
211 211
212 212 static crypto_provider_info_t aes_prov_info = {
213 213 CRYPTO_SPI_VERSION_4,
214 214 "AES Software Provider",
215 215 CRYPTO_SW_PROVIDER,
216 216 {&modlinkage},
217 217 NULL,
218 218 &aes_crypto_ops,
219 219 sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
220 220 aes_mech_info_tab
221 221 };
222 222
223 223 static crypto_kcf_provider_handle_t aes_prov_handle = 0;
224 224 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
225 225
226 226 int
227 227 _init(void)
228 228 {
229 229 int ret;
230 230
231 231 if ((ret = mod_install(&modlinkage)) != 0)
232 232 return (ret);
233 233
234 234 /* Register with KCF. If the registration fails, remove the module. */
235 235 if (crypto_register_provider(&aes_prov_info, &aes_prov_handle)) {
236 236 (void) mod_remove(&modlinkage);
237 237 return (EACCES);
238 238 }
239 239
240 240 return (0);
241 241 }
242 242
243 243 int
244 244 _fini(void)
245 245 {
246 246 /* Unregister from KCF if module is registered */
247 247 if (aes_prov_handle != 0) {
248 248 if (crypto_unregister_provider(aes_prov_handle))
249 249 return (EBUSY);
250 250
251 251 aes_prov_handle = 0;
252 252 }
253 253
254 254 return (mod_remove(&modlinkage));
255 255 }
256 256
257 257 int
258 258 _info(struct modinfo *modinfop)
259 259 {
260 260 return (mod_info(&modlinkage, modinfop));
261 261 }
262 262
263 263
264 264 static int
265 265 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
266 266 {
267 267 void *p = NULL;
268 268 boolean_t param_required = B_TRUE;
269 269 size_t param_len;
270 270 void *(*alloc_fun)(int);
271 271 int rv = CRYPTO_SUCCESS;
272 272
273 273 switch (mechanism->cm_type) {
274 274 case AES_ECB_MECH_INFO_TYPE:
275 275 param_required = B_FALSE;
276 276 alloc_fun = ecb_alloc_ctx;
277 277 break;
278 278 case AES_CBC_MECH_INFO_TYPE:
279 279 param_len = AES_BLOCK_LEN;
280 280 alloc_fun = cbc_alloc_ctx;
281 281 break;
282 282 case AES_CMAC_MECH_INFO_TYPE:
283 283 param_required = B_FALSE;
284 284 alloc_fun = cmac_alloc_ctx;
285 285 break;
286 286 case AES_CTR_MECH_INFO_TYPE:
287 287 param_len = sizeof (CK_AES_CTR_PARAMS);
288 288 alloc_fun = ctr_alloc_ctx;
289 289 break;
290 290 case AES_CCM_MECH_INFO_TYPE:
291 291 param_len = sizeof (CK_AES_CCM_PARAMS);
292 292 alloc_fun = ccm_alloc_ctx;
293 293 break;
294 294 case AES_GCM_MECH_INFO_TYPE:
295 295 param_len = sizeof (CK_AES_GCM_PARAMS);
296 296 alloc_fun = gcm_alloc_ctx;
297 297 break;
298 298 case AES_GMAC_MECH_INFO_TYPE:
299 299 param_len = sizeof (CK_AES_GMAC_PARAMS);
300 300 alloc_fun = gmac_alloc_ctx;
301 301 break;
302 302 default:
303 303 rv = CRYPTO_MECHANISM_INVALID;
304 304 return (rv);
305 305 }
306 306 if (param_required && mechanism->cm_param != NULL &&
307 307 mechanism->cm_param_len != param_len) {
308 308 rv = CRYPTO_MECHANISM_PARAM_INVALID;
309 309 }
310 310 if (ctx != NULL) {
311 311 p = (alloc_fun)(kmflag);
312 312 *ctx = p;
313 313 }
314 314 return (rv);
315 315 }
316 316
317 317 /*
318 318 * Initialize key schedules for AES
319 319 */
320 320 static int
321 321 init_keysched(crypto_key_t *key, void *newbie)
322 322 {
323 323 /*
324 324 * Only keys by value are supported by this module.
325 325 */
326 326 switch (key->ck_format) {
327 327 case CRYPTO_KEY_RAW:
328 328 if (key->ck_length < AES_MINBITS ||
329 329 key->ck_length > AES_MAXBITS) {
330 330 return (CRYPTO_KEY_SIZE_RANGE);
331 331 }
332 332
333 333 /* key length must be either 128, 192, or 256 */
334 334 if ((key->ck_length & 63) != 0)
335 335 return (CRYPTO_KEY_SIZE_RANGE);
336 336 break;
337 337 default:
338 338 return (CRYPTO_KEY_TYPE_INCONSISTENT);
339 339 }
340 340
341 341 aes_init_keysched(key->ck_data, key->ck_length, newbie);
342 342 return (CRYPTO_SUCCESS);
343 343 }
344 344
345 345 /*
346 346 * KCF software provider control entry points.
347 347 */
348 348 /* ARGSUSED */
349 349 static void
350 350 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
351 351 {
352 352 *status = CRYPTO_PROVIDER_READY;
353 353 }
354 354
355 355 static int
356 356 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
357 357 crypto_key_t *key, crypto_spi_ctx_template_t template,
358 358 crypto_req_handle_t req)
359 359 {
360 360 return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
361 361 }
362 362
363 363 static int
364 364 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
365 365 crypto_key_t *key, crypto_spi_ctx_template_t template,
366 366 crypto_req_handle_t req)
367 367 {
368 368 return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
369 369 }
370 370
371 371
372 372
373 373 /*
374 374 * KCF software provider encrypt entry points.
375 375 */
376 376 static int
377 377 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
378 378 crypto_key_t *key, crypto_spi_ctx_template_t template,
379 379 crypto_req_handle_t req, boolean_t is_encrypt_init)
380 380 {
381 381 aes_ctx_t *aes_ctx;
382 382 int rv;
383 383 int kmflag;
384 384
385 385 /*
386 386 * Only keys by value are supported by this module.
387 387 */
388 388 if (key->ck_format != CRYPTO_KEY_RAW) {
389 389 return (CRYPTO_KEY_TYPE_INCONSISTENT);
390 390 }
391 391
392 392 kmflag = crypto_kmflag(req);
393 393 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
394 394 != CRYPTO_SUCCESS)
395 395 return (rv);
396 396
397 397 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
398 398 is_encrypt_init);
399 399 if (rv != CRYPTO_SUCCESS) {
400 400 crypto_free_mode_ctx(aes_ctx);
401 401 return (rv);
402 402 }
403 403
404 404 ctx->cc_provider_private = aes_ctx;
405 405
406 406 return (CRYPTO_SUCCESS);
407 407 }
408 408
409 409 static int
410 410 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
411 411 crypto_data_t *ciphertext, crypto_req_handle_t req)
412 412 {
413 413 int ret = CRYPTO_FAILED;
414 414
415 415 aes_ctx_t *aes_ctx;
416 416 size_t saved_length, saved_offset, length_needed;
417 417
418 418 ASSERT(ctx->cc_provider_private != NULL);
419 419 aes_ctx = ctx->cc_provider_private;
420 420
421 421 /*
422 422 * For block ciphers, plaintext must be a multiple of AES block size.
423 423 * This test is only valid for ciphers whose blocksize is a power of 2.
424 424 */
425 425 if (((aes_ctx->ac_flags & (CMAC_MODE|CTR_MODE|CCM_MODE|
426 426 GCM_MODE|GMAC_MODE)) == 0) &&
427 427 (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
428 428 return (CRYPTO_DATA_LEN_RANGE);
429 429
430 430 AES_ARG_INPLACE(plaintext, ciphertext);
431 431
432 432 /*
433 433 * We need to just return the length needed to store the output.
434 434 * We should not destroy the context for the following case.
435 435 */
436 436 switch (aes_ctx->ac_flags & (CMAC_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) {
437 437 case CCM_MODE:
438 438 length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
439 439 break;
440 440 case GCM_MODE:
441 441 length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
442 442 break;
443 443 case CMAC_MODE:
444 444 length_needed = AES_BLOCK_LEN;
445 445 break;
446 446 case GMAC_MODE:
447 447 if (plaintext->cd_length != 0)
448 448 return (CRYPTO_ARGUMENTS_BAD);
449 449
450 450 length_needed = aes_ctx->ac_tag_len;
451 451 break;
452 452 default:
453 453 length_needed = plaintext->cd_length;
454 454 }
455 455
456 456 if (ciphertext->cd_length < length_needed) {
457 457 ciphertext->cd_length = length_needed;
458 458 return (CRYPTO_BUFFER_TOO_SMALL);
459 459 }
460 460
461 461 saved_length = ciphertext->cd_length;
462 462 saved_offset = ciphertext->cd_offset;
463 463
464 464 /*
465 465 * Do an update on the specified input data.
466 466 */
467 467 ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
468 468 if (ret != CRYPTO_SUCCESS) {
469 469 return (ret);
470 470 }
471 471
472 472 /*
473 473 * For CCM mode, aes_ccm_encrypt_final() will take care of any
474 474 * left-over unprocessed data, and compute the MAC
475 475 */
476 476 if (aes_ctx->ac_flags & CCM_MODE) {
477 477 /*
478 478 * ccm_encrypt_final() will compute the MAC and append
479 479 * it to existing ciphertext. So, need to adjust the left over
480 480 * length value accordingly
481 481 */
482 482
483 483 /* order of following 2 lines MUST not be reversed */
484 484 ciphertext->cd_offset = ciphertext->cd_length;
485 485 ciphertext->cd_length = saved_length - ciphertext->cd_length;
486 486 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
487 487 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
488 488 if (ret != CRYPTO_SUCCESS) {
489 489 return (ret);
490 490 }
491 491
492 492 if (plaintext != ciphertext) {
493 493 ciphertext->cd_length =
494 494 ciphertext->cd_offset - saved_offset;
495 495 }
496 496 ciphertext->cd_offset = saved_offset;
497 497 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
498 498 /*
499 499 * gcm_encrypt_final() will compute the MAC and append
500 500 * it to existing ciphertext. So, need to adjust the left over
501 501 * length value accordingly
502 502 */
503 503
504 504 /* order of following 2 lines MUST not be reversed */
505 505 ciphertext->cd_offset = ciphertext->cd_length;
506 506 ciphertext->cd_length = saved_length - ciphertext->cd_length;
507 507 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
508 508 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
509 509 aes_xor_block);
510 510 if (ret != CRYPTO_SUCCESS) {
511 511 return (ret);
512 512 }
513 513
514 514 if (plaintext != ciphertext) {
515 515 ciphertext->cd_length =
516 516 ciphertext->cd_offset - saved_offset;
517 517 }
518 518 ciphertext->cd_offset = saved_offset;
519 519 } else if (aes_ctx->ac_flags & CMAC_MODE) {
520 520 /* cmac_update doesn't store data */
521 521 ciphertext->cd_length = saved_length;
522 522 ret = cmac_mode_final((cbc_ctx_t *)aes_ctx, ciphertext,
523 523 aes_encrypt_block, aes_xor_block);
524 524 aes_ctx->ac_remainder_len = 0;
525 525 }
526 526
527 527 ASSERT(aes_ctx->ac_remainder_len == 0);
528 528 (void) aes_free_context(ctx);
529 529
530 530 return (ret);
531 531 }
532 532
533 533
534 534 static int
535 535 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
536 536 crypto_data_t *plaintext, crypto_req_handle_t req)
537 537 {
538 538 int ret = CRYPTO_FAILED;
539 539
540 540 aes_ctx_t *aes_ctx;
541 541 off_t saved_offset;
542 542 size_t saved_length, length_needed;
543 543
544 544 ASSERT(ctx->cc_provider_private != NULL);
545 545 aes_ctx = ctx->cc_provider_private;
546 546
547 547 /*
548 548 * For block ciphers, plaintext must be a multiple of AES block size.
549 549 * This test is only valid for ciphers whose blocksize is a power of 2.
550 550 */
551 551 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
552 552 == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
553 553 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
554 554 }
555 555
556 556 AES_ARG_INPLACE(ciphertext, plaintext);
557 557
558 558 /*
559 559 * Return length needed to store the output.
560 560 * Do not destroy context when plaintext buffer is too small.
561 561 *
562 562 * CCM: plaintext is MAC len smaller than cipher text
563 563 * GCM: plaintext is TAG len smaller than cipher text
564 564 * GMAC: plaintext length must be zero
565 565 */
566 566 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
567 567 case CCM_MODE:
568 568 length_needed = aes_ctx->ac_processed_data_len;
569 569 break;
570 570 case GCM_MODE:
571 571 length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
572 572 break;
573 573 case GMAC_MODE:
574 574 if (plaintext->cd_length != 0)
575 575 return (CRYPTO_ARGUMENTS_BAD);
576 576
577 577 length_needed = 0;
578 578 break;
579 579 default:
580 580 length_needed = ciphertext->cd_length;
581 581 }
582 582
583 583 if (plaintext->cd_length < length_needed) {
584 584 plaintext->cd_length = length_needed;
585 585 return (CRYPTO_BUFFER_TOO_SMALL);
586 586 }
587 587
588 588 saved_offset = plaintext->cd_offset;
589 589 saved_length = plaintext->cd_length;
590 590
591 591 /*
592 592 * Do an update on the specified input data.
593 593 */
594 594 ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
595 595 if (ret != CRYPTO_SUCCESS) {
596 596 goto cleanup;
597 597 }
598 598
599 599 if (aes_ctx->ac_flags & CCM_MODE) {
600 600 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
601 601 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
602 602
603 603 /* order of following 2 lines MUST not be reversed */
604 604 plaintext->cd_offset = plaintext->cd_length;
605 605 plaintext->cd_length = saved_length - plaintext->cd_length;
606 606
607 607 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
608 608 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
609 609 aes_xor_block);
610 610 if (ret == CRYPTO_SUCCESS) {
611 611 if (plaintext != ciphertext) {
612 612 plaintext->cd_length =
613 613 plaintext->cd_offset - saved_offset;
614 614 }
615 615 } else {
616 616 plaintext->cd_length = saved_length;
617 617 }
618 618
619 619 plaintext->cd_offset = saved_offset;
620 620 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
621 621 /* order of following 2 lines MUST not be reversed */
622 622 plaintext->cd_offset = plaintext->cd_length;
623 623 plaintext->cd_length = saved_length - plaintext->cd_length;
624 624
625 625 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
626 626 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
627 627 if (ret == CRYPTO_SUCCESS) {
628 628 if (plaintext != ciphertext) {
629 629 plaintext->cd_length =
630 630 plaintext->cd_offset - saved_offset;
631 631 }
632 632 } else {
633 633 plaintext->cd_length = saved_length;
634 634 }
635 635
636 636 plaintext->cd_offset = saved_offset;
637 637 }
638 638
639 639 ASSERT(aes_ctx->ac_remainder_len == 0);
640 640
641 641 cleanup:
642 642 (void) aes_free_context(ctx);
643 643
644 644 return (ret);
645 645 }
646 646
647 647
648 648 /* ARGSUSED */
649 649 static int
650 650 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
651 651 crypto_data_t *ciphertext, crypto_req_handle_t req)
652 652 {
653 653 off_t saved_offset;
654 654 size_t saved_length, out_len;
655 655 int ret = CRYPTO_SUCCESS;
656 656 aes_ctx_t *aes_ctx;
657 657
658 658 ASSERT(ctx->cc_provider_private != NULL);
659 659 aes_ctx = ctx->cc_provider_private;
660 660
661 661 AES_ARG_INPLACE(plaintext, ciphertext);
662 662
663 663 /*
664 664 * CTR mode does not accumulate plaintext across xx_update() calls --
665 665 * it always outputs the same number of bytes as the input (so
666 666 * ac_remainder_len is always 0). Other modes _do_ accumulate
667 667 * plaintext, and output only full blocks. For non-CTR modes, adjust
668 668 * the output size to reflect this.
669 669 */
670 670 out_len = plaintext->cd_length + aes_ctx->ac_remainder_len;
671 671 if ((aes_ctx->ac_flags & CTR_MODE) == 0)
672 672 out_len &= ~(AES_BLOCK_LEN - 1);
673 673
674 674 /*
675 675 * return length needed to store the output.
676 676 * CMAC stores its output in a local buffer until *_final.
677 677 */
678 678 if ((aes_ctx->ac_flags & CMAC_MODE) == 0 &&
679 679 ciphertext->cd_length < out_len) {
680 680 ciphertext->cd_length = out_len;
681 681 return (CRYPTO_BUFFER_TOO_SMALL);
682 682 }
683 683
684 684 saved_offset = ciphertext->cd_offset;
685 685 saved_length = ciphertext->cd_length;
686 686
687 687 /*
688 688 * Do the AES update on the specified input data.
689 689 */
690 690 switch (plaintext->cd_format) {
691 691 case CRYPTO_DATA_RAW:
692 692 ret = crypto_update_iov(ctx->cc_provider_private,
693 693 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
694 694 aes_copy_block64);
695 695 break;
696 696 case CRYPTO_DATA_UIO:
697 697 ret = crypto_update_uio(ctx->cc_provider_private,
698 698 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
699 699 aes_copy_block64);
700 700 break;
701 701 case CRYPTO_DATA_MBLK:
702 702 ret = crypto_update_mp(ctx->cc_provider_private,
703 703 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
704 704 aes_copy_block64);
705 705 break;
706 706 default:
707 707 ret = CRYPTO_ARGUMENTS_BAD;
708 708 }
709 709
710 710 if (ret == CRYPTO_SUCCESS) {
711 711 if (plaintext != ciphertext) {
712 712 ciphertext->cd_length =
713 713 ciphertext->cd_offset - saved_offset;
714 714 }
715 715 } else {
716 716 ciphertext->cd_length = saved_length;
717 717 }
718 718 ciphertext->cd_offset = saved_offset;
719 719
720 720 return (ret);
721 721 }
722 722
723 723
724 724 static int
725 725 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
726 726 crypto_data_t *plaintext, crypto_req_handle_t req)
727 727 {
728 728 off_t saved_offset;
729 729 size_t saved_length, out_len;
730 730 int ret = CRYPTO_SUCCESS;
731 731 aes_ctx_t *aes_ctx;
732 732
733 733 ASSERT(ctx->cc_provider_private != NULL);
734 734 aes_ctx = ctx->cc_provider_private;
735 735
736 736 AES_ARG_INPLACE(ciphertext, plaintext);
737 737
738 738 /*
739 739 * Adjust the number of bytes that will hold the plaintext (out_len).
740 740 * CCM, GCM, and GMAC mechanisms never return plaintext for update
741 741 * operations, so we set out_len to 0 for those.
742 742 *
743 743 * CTR mode does not accumulate any ciphertext across xx_decrypt
744 744 * calls, and always outputs as many bytes of plaintext as
745 745 * ciphertext.
746 746 *
747 747 * The remaining mechanisms output full blocks of plaintext, so
748 748 * we round out_len down to the closest multiple of AES_BLOCK_LEN.
749 749 */
750 750 out_len = aes_ctx->ac_remainder_len + ciphertext->cd_length;
751 751 if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) != 0) {
752 752 out_len = 0;
753 753 } else if ((aes_ctx->ac_flags & CTR_MODE) == 0) {
754 754 out_len &= ~(AES_BLOCK_LEN - 1);
755 755 }
756 756
757 757 /* return length needed to store the output */
758 758 if (plaintext->cd_length < out_len) {
759 759 plaintext->cd_length = out_len;
760 760 return (CRYPTO_BUFFER_TOO_SMALL);
761 761 }
762 762
763 763 saved_offset = plaintext->cd_offset;
764 764 saved_length = plaintext->cd_length;
765 765
766 766 if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
767 767 gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
768 768
769 769 /*
770 770 * Do the AES update on the specified input data.
771 771 */
772 772 switch (ciphertext->cd_format) {
773 773 case CRYPTO_DATA_RAW:
774 774 ret = crypto_update_iov(ctx->cc_provider_private,
775 775 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
776 776 aes_copy_block64);
777 777 break;
778 778 case CRYPTO_DATA_UIO:
779 779 ret = crypto_update_uio(ctx->cc_provider_private,
780 780 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
781 781 aes_copy_block64);
782 782 break;
783 783 case CRYPTO_DATA_MBLK:
784 784 ret = crypto_update_mp(ctx->cc_provider_private,
785 785 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
786 786 aes_copy_block64);
787 787 break;
788 788 default:
789 789 ret = CRYPTO_ARGUMENTS_BAD;
790 790 }
791 791
792 792 if (ret == CRYPTO_SUCCESS) {
793 793 if (ciphertext != plaintext)
794 794 plaintext->cd_length =
795 795 plaintext->cd_offset - saved_offset;
796 796 } else {
797 797 plaintext->cd_length = saved_length;
798 798 }
799 799 plaintext->cd_offset = saved_offset;
800 800
801 801
802 802 return (ret);
803 803 }
804 804
805 805 /* ARGSUSED */
806 806 static int
807 807 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
808 808 crypto_req_handle_t req)
809 809 {
810 810 aes_ctx_t *aes_ctx;
811 811 int ret;
812 812
813 813 ASSERT(ctx->cc_provider_private != NULL);
814 814 aes_ctx = ctx->cc_provider_private;
815 815
816 816 if (data->cd_format != CRYPTO_DATA_RAW &&
817 817 data->cd_format != CRYPTO_DATA_UIO &&
818 818 data->cd_format != CRYPTO_DATA_MBLK) {
819 819 return (CRYPTO_ARGUMENTS_BAD);
820 820 }
821 821
822 822 if (aes_ctx->ac_flags & CCM_MODE) {
823 823 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
824 824 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
825 825 if (ret != CRYPTO_SUCCESS) {
826 826 return (ret);
827 827 }
828 828 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
829 829 size_t saved_offset = data->cd_offset;
830 830
831 831 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
832 832 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
833 833 aes_xor_block);
834 834 if (ret != CRYPTO_SUCCESS) {
835 835 return (ret);
836 836 }
837 837 data->cd_length = data->cd_offset - saved_offset;
838 838 data->cd_offset = saved_offset;
839 839 } else if (aes_ctx->ac_flags & CMAC_MODE) {
840 840 ret = cmac_mode_final((cbc_ctx_t *)aes_ctx, data,
841 841 aes_encrypt_block, aes_xor_block);
842 842 if (ret != CRYPTO_SUCCESS)
843 843 return (ret);
844 844 data->cd_length = AES_BLOCK_LEN;
845 845 } else if ((aes_ctx->ac_flags & CTR_MODE) == 0) {
846 846 /*
847 847 * There must be no unprocessed plaintext.
848 848 * This happens if the length of the last data is
849 849 * not a multiple of the AES block length.
850 850 */
851 851 if (aes_ctx->ac_remainder_len > 0) {
852 852 return (CRYPTO_DATA_LEN_RANGE);
853 853 }
854 854 data->cd_length = 0;
855 855 }
856 856
857 857 (void) aes_free_context(ctx);
858 858
859 859 return (CRYPTO_SUCCESS);
860 860 }
861 861
862 862 /* ARGSUSED */
863 863 static int
864 864 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
865 865 crypto_req_handle_t req)
866 866 {
867 867 aes_ctx_t *aes_ctx;
868 868 int ret;
869 869 off_t saved_offset;
870 870 size_t saved_length;
871 871
872 872 ASSERT(ctx->cc_provider_private != NULL);
873 873 aes_ctx = ctx->cc_provider_private;
874 874
875 875 if (data->cd_format != CRYPTO_DATA_RAW &&
876 876 data->cd_format != CRYPTO_DATA_UIO &&
877 877 data->cd_format != CRYPTO_DATA_MBLK) {
878 878 return (CRYPTO_ARGUMENTS_BAD);
879 879 }
880 880
881 881 /*
882 882 * There must be no unprocessed ciphertext.
883 883 * This happens if the length of the last ciphertext is
884 884 * not a multiple of the AES block length.
885 885 *
886 886 * For CTR mode, ac_remainder_len is always zero (we never
887 887 * accumulate ciphertext across update calls with CTR mode).
888 888 */
889 889 if (aes_ctx->ac_remainder_len > 0 &&
890 890 (aes_ctx->ac_flags & CTR_MODE) == 0) {
891 891 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
892 892 }
893 893
894 894 if (aes_ctx->ac_flags & CCM_MODE) {
895 895 /*
896 896 * This is where all the plaintext is returned, make sure
897 897 * the plaintext buffer is big enough
898 898 */
899 899 size_t pt_len = aes_ctx->ac_data_len;
900 900 if (data->cd_length < pt_len) {
901 901 data->cd_length = pt_len;
902 902 return (CRYPTO_BUFFER_TOO_SMALL);
903 903 }
904 904
905 905 ASSERT(aes_ctx->ac_processed_data_len == pt_len);
906 906 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
907 907 saved_offset = data->cd_offset;
908 908 saved_length = data->cd_length;
909 909 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
910 910 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
911 911 aes_xor_block);
912 912 if (ret == CRYPTO_SUCCESS) {
913 913 data->cd_length = data->cd_offset - saved_offset;
914 914 } else {
915 915 data->cd_length = saved_length;
916 916 }
917 917
918 918 data->cd_offset = saved_offset;
919 919 if (ret != CRYPTO_SUCCESS) {
920 920 return (ret);
921 921 }
922 922 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
923 923 /*
924 924 * This is where all the plaintext is returned, make sure
925 925 * the plaintext buffer is big enough
926 926 */
927 927 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
928 928 size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
929 929
930 930 if (data->cd_length < pt_len) {
931 931 data->cd_length = pt_len;
932 932 return (CRYPTO_BUFFER_TOO_SMALL);
933 933 }
934 934
935 935 saved_offset = data->cd_offset;
936 936 saved_length = data->cd_length;
937 937 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
938 938 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
939 939 if (ret == CRYPTO_SUCCESS) {
940 940 data->cd_length = data->cd_offset - saved_offset;
941 941 } else {
942 942 data->cd_length = saved_length;
943 943 }
944 944
945 945 data->cd_offset = saved_offset;
946 946 if (ret != CRYPTO_SUCCESS) {
947 947 return (ret);
948 948 }
949 949 }
950 950
951 951
952 952 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
953 953 data->cd_length = 0;
954 954 }
955 955
956 956 (void) aes_free_context(ctx);
957 957
958 958 return (CRYPTO_SUCCESS);
959 959 }
960 960
961 961 /* ARGSUSED */
962 962 static int
963 963 aes_encrypt_atomic(crypto_provider_handle_t provider,
964 964 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
965 965 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
966 966 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
967 967 {
968 968 aes_ctx_t aes_ctx; /* on the stack */
969 969 off_t saved_offset;
970 970 size_t saved_length;
971 971 size_t length_needed;
972 972 int ret;
973 973
974 974 AES_ARG_INPLACE(plaintext, ciphertext);
975 975
976 976 /*
977 977 * CTR, CCM, CMAC, GCM, and GMAC modes do not require that plaintext
978 978 * be a multiple of AES block size.
979 979 */
980 980 switch (mechanism->cm_type) {
981 981 case AES_CTR_MECH_INFO_TYPE:
982 982 case AES_CCM_MECH_INFO_TYPE:
983 983 case AES_GCM_MECH_INFO_TYPE:
984 984 case AES_GMAC_MECH_INFO_TYPE:
985 985 case AES_CMAC_MECH_INFO_TYPE:
986 986 break;
987 987 default:
988 988 if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
989 989 return (CRYPTO_DATA_LEN_RANGE);
990 990 }
991 991
992 992 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
993 993 return (ret);
994 994
995 995 bzero(&aes_ctx, sizeof (aes_ctx_t));
996 996
997 997 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
998 998 crypto_kmflag(req), B_TRUE);
999 999 if (ret != CRYPTO_SUCCESS)
1000 1000 return (ret);
1001 1001
1002 1002 switch (mechanism->cm_type) {
1003 1003 case AES_CCM_MECH_INFO_TYPE:
1004 1004 length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
1005 1005 break;
1006 1006 case AES_GMAC_MECH_INFO_TYPE:
1007 1007 if (plaintext->cd_length != 0)
1008 1008 return (CRYPTO_ARGUMENTS_BAD);
1009 1009 /* FALLTHRU */
1010 1010 case AES_GCM_MECH_INFO_TYPE:
1011 1011 length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
1012 1012 break;
1013 1013 case AES_CMAC_MECH_INFO_TYPE:
1014 1014 length_needed = AES_BLOCK_LEN;
1015 1015 break;
1016 1016 default:
1017 1017 length_needed = plaintext->cd_length;
1018 1018 }
1019 1019
1020 1020 /* return size of buffer needed to store output */
1021 1021 if (ciphertext->cd_length < length_needed) {
1022 1022 ciphertext->cd_length = length_needed;
1023 1023 ret = CRYPTO_BUFFER_TOO_SMALL;
1024 1024 goto out;
1025 1025 }
1026 1026
1027 1027 saved_offset = ciphertext->cd_offset;
1028 1028 saved_length = ciphertext->cd_length;
1029 1029
1030 1030 /*
1031 1031 * Do an update on the specified input data.
1032 1032 */
1033 1033 switch (plaintext->cd_format) {
1034 1034 case CRYPTO_DATA_RAW:
1035 1035 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1036 1036 aes_encrypt_contiguous_blocks, aes_copy_block64);
1037 1037 break;
1038 1038 case CRYPTO_DATA_UIO:
1039 1039 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1040 1040 aes_encrypt_contiguous_blocks, aes_copy_block64);
1041 1041 break;
1042 1042 case CRYPTO_DATA_MBLK:
1043 1043 ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext,
1044 1044 aes_encrypt_contiguous_blocks, aes_copy_block64);
1045 1045 break;
1046 1046 default:
1047 1047 ret = CRYPTO_ARGUMENTS_BAD;
1048 1048 }
1049 1049
1050 1050 if (ret == CRYPTO_SUCCESS) {
1051 1051 switch (mechanism->cm_type) {
1052 1052 case AES_CCM_MECH_INFO_TYPE:
1053 1053 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1054 1054 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1055 1055 aes_xor_block);
1056 1056 if (ret != CRYPTO_SUCCESS)
1057 1057 goto out;
1058 1058 ASSERT3U(aes_ctx.ac_remainder_len, ==, 0);
1059 1059 break;
1060 1060 case AES_GCM_MECH_INFO_TYPE:
1061 1061 case AES_GMAC_MECH_INFO_TYPE:
1062 1062 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1063 1063 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1064 1064 aes_copy_block, aes_xor_block);
1065 1065 if (ret != CRYPTO_SUCCESS)
1066 1066 goto out;
1067 1067 ASSERT3U(aes_ctx.ac_remainder_len, ==, 0);
1068 1068 break;
1069 1069 case AES_CTR_MECH_INFO_TYPE:
1070 1070 /*
1071 1071 * Note that this use of the ASSERT3U has a slightly
1072 1072 * different meaning than the other uses in the
1073 1073 * switch statement. The other uses are to ensure
1074 1074 * no unprocessed plaintext remains after encryption
1075 1075 * (and that the input plaintext was an exact multiple
1076 1076 * of AES_BLOCK_LEN).
1077 1077 *
1078 1078 * For CTR mode, it is ensuring that no input
1079 1079 * plaintext was ever segmented and buffered during
1080 1080 * processing (since it's a stream cipher).
1081 1081 */
1082 1082 ASSERT3U(aes_ctx.ac_remainder_len, ==, 0);
1083 1083 break;
1084 1084 case AES_CMAC_MECH_INFO_TYPE:
1085 1085 ret = cmac_mode_final((cbc_ctx_t *)&aes_ctx,
1086 1086 ciphertext, aes_encrypt_block,
1087 1087 aes_xor_block);
1088 1088 if (ret != CRYPTO_SUCCESS)
1089 1089 goto out;
1090 1090 break;
1091 1091 default:
1092 1092 ASSERT3U(aes_ctx.ac_remainder_len, ==, 0);
1093 1093 break;
1094 1094 }
1095 1095
1096 1096 if (plaintext != ciphertext) {
1097 1097 ciphertext->cd_length =
1098 1098 ciphertext->cd_offset - saved_offset;
1099 1099 }
1100 1100 } else {
1101 1101 ciphertext->cd_length = saved_length;
1102 1102 }
1103 1103 ciphertext->cd_offset = saved_offset;
1104 1104
1105 1105 out:
1106 1106 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1107 1107 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1108 1108 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1109 1109 }
1110 1110
1111 1111 return (ret);
1112 1112 }
1113 1113
1114 1114 /* ARGSUSED */
1115 1115 static int
1116 1116 aes_decrypt_atomic(crypto_provider_handle_t provider,
1117 1117 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1118 1118 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1119 1119 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1120 1120 {
1121 1121 aes_ctx_t aes_ctx; /* on the stack */
1122 1122 off_t saved_offset;
1123 1123 size_t saved_length;
1124 1124 size_t length_needed;
1125 1125 int ret;
1126 1126
1127 1127 AES_ARG_INPLACE(ciphertext, plaintext);
1128 1128
1129 1129 /*
1130 1130 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1131 1131 * be a multiple of AES block size.
1132 1132 */
1133 1133 switch (mechanism->cm_type) {
1134 1134 case AES_CTR_MECH_INFO_TYPE:
1135 1135 case AES_CCM_MECH_INFO_TYPE:
1136 1136 case AES_GCM_MECH_INFO_TYPE:
1137 1137 case AES_GMAC_MECH_INFO_TYPE:
1138 1138 break;
1139 1139 default:
1140 1140 if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1141 1141 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
1142 1142 }
1143 1143
1144 1144 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1145 1145 return (ret);
1146 1146
1147 1147 bzero(&aes_ctx, sizeof (aes_ctx_t));
1148 1148
1149 1149 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1150 1150 crypto_kmflag(req), B_FALSE);
1151 1151 if (ret != CRYPTO_SUCCESS)
1152 1152 return (ret);
1153 1153
1154 1154 switch (mechanism->cm_type) {
1155 1155 case AES_CCM_MECH_INFO_TYPE:
1156 1156 length_needed = aes_ctx.ac_data_len;
1157 1157 break;
1158 1158 case AES_GCM_MECH_INFO_TYPE:
1159 1159 length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1160 1160 break;
1161 1161 case AES_GMAC_MECH_INFO_TYPE:
1162 1162 if (plaintext->cd_length != 0)
1163 1163 return (CRYPTO_ARGUMENTS_BAD);
1164 1164 length_needed = 0;
1165 1165 break;
1166 1166 default:
1167 1167 length_needed = ciphertext->cd_length;
1168 1168 }
1169 1169
1170 1170 /* return size of buffer needed to store output */
1171 1171 if (plaintext->cd_length < length_needed) {
1172 1172 plaintext->cd_length = length_needed;
1173 1173 ret = CRYPTO_BUFFER_TOO_SMALL;
1174 1174 goto out;
1175 1175 }
1176 1176
1177 1177 saved_offset = plaintext->cd_offset;
1178 1178 saved_length = plaintext->cd_length;
1179 1179
1180 1180 if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1181 1181 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE)
1182 1182 gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req));
1183 1183
1184 1184 /*
1185 1185 * Do an update on the specified input data.
1186 1186 */
1187 1187 switch (ciphertext->cd_format) {
1188 1188 case CRYPTO_DATA_RAW:
1189 1189 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1190 1190 aes_decrypt_contiguous_blocks, aes_copy_block64);
1191 1191 break;
1192 1192 case CRYPTO_DATA_UIO:
1193 1193 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1194 1194 aes_decrypt_contiguous_blocks, aes_copy_block64);
1195 1195 break;
1196 1196 case CRYPTO_DATA_MBLK:
1197 1197 ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext,
1198 1198 aes_decrypt_contiguous_blocks, aes_copy_block64);
1199 1199 break;
1200 1200 default:
1201 1201 ret = CRYPTO_ARGUMENTS_BAD;
1202 1202 }
1203 1203
1204 1204 if (ret == CRYPTO_SUCCESS) {
1205 1205 switch (mechanism->cm_type) {
1206 1206 case AES_CCM_MECH_INFO_TYPE:
1207 1207 ASSERT(aes_ctx.ac_processed_data_len
1208 1208 == aes_ctx.ac_data_len);
1209 1209 ASSERT(aes_ctx.ac_processed_mac_len
1210 1210 == aes_ctx.ac_mac_len);
1211 1211 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1212 1212 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1213 1213 aes_copy_block, aes_xor_block);
1214 1214 ASSERT3U(aes_ctx.ac_remainder_len, ==, 0);
1215 1215 if ((ret == CRYPTO_SUCCESS) &&
1216 1216 (ciphertext != plaintext)) {
1217 1217 plaintext->cd_length =
1218 1218 plaintext->cd_offset - saved_offset;
1219 1219 } else {
1220 1220 plaintext->cd_length = saved_length;
1221 1221 }
1222 1222 break;
1223 1223 case AES_GCM_MECH_INFO_TYPE:
1224 1224 case AES_GMAC_MECH_INFO_TYPE:
1225 1225 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1226 1226 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1227 1227 aes_xor_block);
1228 1228 ASSERT3U(aes_ctx.ac_remainder_len, ==, 0);
1229 1229 if ((ret == CRYPTO_SUCCESS) &&
1230 1230 (ciphertext != plaintext)) {
1231 1231 plaintext->cd_length =
1232 1232 plaintext->cd_offset - saved_offset;
1233 1233 } else {
1234 1234 plaintext->cd_length = saved_length;
1235 1235 }
1236 1236 break;
1237 1237 case AES_CTR_MECH_INFO_TYPE:
1238 1238 if (ciphertext != plaintext) {
1239 1239 plaintext->cd_length =
1240 1240 plaintext->cd_offset - saved_offset;
1241 1241 }
1242 1242 break;
1243 1243 default:
1244 1244 ASSERT3U(aes_ctx.ac_remainder_len, ==, 0);
1245 1245 if (ciphertext != plaintext) {
1246 1246 plaintext->cd_length =
1247 1247 plaintext->cd_offset - saved_offset;
1248 1248 }
1249 1249 break;
1250 1250 }
1251 1251 } else {
1252 1252 plaintext->cd_length = saved_length;
1253 1253 }
1254 1254 plaintext->cd_offset = saved_offset;
1255 1255
1256 1256 out:
1257 1257 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1258 1258 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1259 1259 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1260 1260 }
1261 1261
1262 1262 if (aes_ctx.ac_flags & CCM_MODE) {
1263 1263 if (aes_ctx.ac_pt_buf != NULL) {
1264 1264 kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1265 1265 }
1266 1266 } else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1267 1267 if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1268 1268 kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1269 1269 ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1270 1270 }
1271 1271 }
1272 1272
1273 1273 return (ret);
1274 1274 }
1275 1275
1276 1276 /*
1277 1277 * KCF software provider context template entry points.
1278 1278 */
1279 1279 /* ARGSUSED */
1280 1280 static int
1281 1281 aes_create_ctx_template(crypto_provider_handle_t provider,
1282 1282 crypto_mechanism_t *mechanism, crypto_key_t *key,
1283 1283 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1284 1284 {
1285 1285 void *keysched;
1286 1286 size_t size;
1287 1287 int rv;
1288 1288
1289 1289 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1290 1290 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1291 1291 mechanism->cm_type != AES_CMAC_MECH_INFO_TYPE &&
1292 1292 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1293 1293 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1294 1294 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1295 1295 mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1296 1296 return (CRYPTO_MECHANISM_INVALID);
1297 1297
1298 1298 if ((keysched = aes_alloc_keysched(&size,
1299 1299 crypto_kmflag(req))) == NULL) {
1300 1300 return (CRYPTO_HOST_MEMORY);
1301 1301 }
1302 1302
1303 1303 /*
1304 1304 * Initialize key schedule. Key length information is stored
1305 1305 * in the key.
1306 1306 */
1307 1307 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1308 1308 bzero(keysched, size);
1309 1309 kmem_free(keysched, size);
1310 1310 return (rv);
1311 1311 }
1312 1312
1313 1313 *tmpl = keysched;
1314 1314 *tmpl_size = size;
1315 1315
1316 1316 return (CRYPTO_SUCCESS);
1317 1317 }
1318 1318
1319 1319
1320 1320 static int
1321 1321 aes_free_context(crypto_ctx_t *ctx)
1322 1322 {
1323 1323 aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1324 1324
1325 1325 if (aes_ctx != NULL) {
1326 1326 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1327 1327 ASSERT(aes_ctx->ac_keysched_len != 0);
1328 1328 bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1329 1329 kmem_free(aes_ctx->ac_keysched,
1330 1330 aes_ctx->ac_keysched_len);
1331 1331 }
1332 1332 crypto_free_mode_ctx(aes_ctx);
1333 1333 ctx->cc_provider_private = NULL;
1334 1334 }
1335 1335
1336 1336 return (CRYPTO_SUCCESS);
1337 1337 }
1338 1338
1339 1339
1340 1340 static int
1341 1341 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1342 1342 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1343 1343 boolean_t is_encrypt_init)
1344 1344 {
1345 1345 int rv = CRYPTO_SUCCESS;
1346 1346 void *keysched;
1347 1347 size_t size;
1348 1348
1349 1349 if (template == NULL) {
1350 1350 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1351 1351 return (CRYPTO_HOST_MEMORY);
1352 1352 /*
1353 1353 * Initialize key schedule.
1354 1354 * Key length is stored in the key.
1355 1355 */
1356 1356 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1357 1357 kmem_free(keysched, size);
1358 1358 return (rv);
1359 1359 }
1360 1360
1361 1361 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1362 1362 aes_ctx->ac_keysched_len = size;
1363 1363 } else {
1364 1364 keysched = template;
1365 1365 }
1366 1366 aes_ctx->ac_keysched = keysched;
1367 1367
1368 1368 switch (mechanism->cm_type) {
1369 1369 case AES_CBC_MECH_INFO_TYPE:
1370 1370 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1371 1371 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1372 1372 break;
1373 1373 case AES_CMAC_MECH_INFO_TYPE:
1374 1374 rv = cmac_init_ctx((cbc_ctx_t *)aes_ctx, AES_BLOCK_LEN);
1375 1375 break;
1376 1376 case AES_CTR_MECH_INFO_TYPE: {
1377 1377 CK_AES_CTR_PARAMS *pp;
1378 1378
1379 1379 if (mechanism->cm_param == NULL ||
1380 1380 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1381 1381 return (CRYPTO_MECHANISM_PARAM_INVALID);
1382 1382 }
1383 1383 pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1384 1384 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1385 1385 pp->cb, aes_encrypt_block, aes_copy_block);
1386 1386 break;
1387 1387 }
1388 1388 case AES_CCM_MECH_INFO_TYPE:
1389 1389 if (mechanism->cm_param == NULL ||
1390 1390 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1391 1391 return (CRYPTO_MECHANISM_PARAM_INVALID);
1392 1392 }
1393 1393 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1394 1394 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1395 1395 aes_xor_block);
1396 1396 break;
1397 1397 case AES_GCM_MECH_INFO_TYPE:
1398 1398 if (mechanism->cm_param == NULL ||
1399 1399 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1400 1400 return (CRYPTO_MECHANISM_PARAM_INVALID);
1401 1401 }
1402 1402 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1403 1403 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1404 1404 aes_xor_block);
1405 1405 break;
1406 1406 case AES_GMAC_MECH_INFO_TYPE:
1407 1407 if (mechanism->cm_param == NULL ||
1408 1408 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1409 1409 return (CRYPTO_MECHANISM_PARAM_INVALID);
1410 1410 }
1411 1411 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1412 1412 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1413 1413 aes_xor_block);
1414 1414 break;
1415 1415 case AES_ECB_MECH_INFO_TYPE:
1416 1416 aes_ctx->ac_flags |= ECB_MODE;
1417 1417 }
1418 1418
1419 1419 if (rv != CRYPTO_SUCCESS) {
1420 1420 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1421 1421 bzero(keysched, size);
1422 1422 kmem_free(keysched, size);
1423 1423 }
1424 1424 }
1425 1425
1426 1426 return (rv);
1427 1427 }
1428 1428
1429 1429 static int
1430 1430 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1431 1431 CK_AES_GCM_PARAMS *gcm_params)
1432 1432 {
1433 1433 /* LINTED: pointer alignment */
1434 1434 CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1435 1435
1436 1436 if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1437 1437 return (CRYPTO_MECHANISM_INVALID);
1438 1438
1439 1439 if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1440 1440 return (CRYPTO_MECHANISM_PARAM_INVALID);
1441 1441
1442 1442 if (params->pIv == NULL)
1443 1443 return (CRYPTO_MECHANISM_PARAM_INVALID);
1444 1444
1445 1445 gcm_params->pIv = params->pIv;
1446 1446 gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1447 1447 gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1448 1448
1449 1449 if (data == NULL)
1450 1450 return (CRYPTO_SUCCESS);
1451 1451
1452 1452 if (data->cd_format != CRYPTO_DATA_RAW)
1453 1453 return (CRYPTO_ARGUMENTS_BAD);
1454 1454
1455 1455 gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1456 1456 gcm_params->ulAADLen = data->cd_length;
1457 1457 return (CRYPTO_SUCCESS);
1458 1458 }
1459 1459
1460 1460 static int
1461 1461 aes_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
1462 1462 crypto_key_t *key, crypto_spi_ctx_template_t template,
1463 1463 crypto_req_handle_t req)
1464 1464 {
1465 1465 return (aes_encrypt_init(ctx, mechanism,
1466 1466 key, template, req));
1467 1467 }
1468 1468
1469 1469 static int
1470 1470 aes_mac(crypto_ctx_t *ctx, crypto_data_t *plaintext, crypto_data_t *ciphertext,
1471 1471 crypto_req_handle_t req)
1472 1472 {
1473 1473 return (aes_encrypt(ctx, plaintext, ciphertext, req));
1474 1474 }
1475 1475
1476 1476 static int
1477 1477 aes_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
1478 1478 crypto_req_handle_t req)
1479 1479 {
1480 1480 crypto_data_t out;
1481 1481 uint8_t block[AES_BLOCK_LEN];
1482 1482 out.cd_format = CRYPTO_DATA_RAW;
1483 1483 out.cd_offset = 0;
1484 1484 out.cd_length = sizeof (block);
1485 1485 out.cd_miscdata = NULL;
1486 1486 out.cd_raw.iov_base = (void *)block;
1487 1487 out.cd_raw.iov_len = sizeof (block);
1488 1488
1489 1489 return (aes_encrypt_update(ctx, data, &out, req));
1490 1490 }
1491 1491
1492 1492 static int
1493 1493 aes_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
1494 1494 {
1495 1495 return (aes_encrypt_final(ctx, mac, req));
1496 1496 }
1497 1497
1498 1498 static int
1499 1499 aes_mac_atomic(crypto_provider_handle_t provider,
1500 1500 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1501 1501 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1502 1502 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1503 1503 {
1504 1504 CK_AES_GCM_PARAMS gcm_params;
1505 1505 crypto_mechanism_t gcm_mech;
1506 1506 int rv;
1507 1507
1508 1508 if (mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1509 1509 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1510 1510 != CRYPTO_SUCCESS)
1511 1511 return (rv);
1512 1512
1513 1513 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1514 1514 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1515 1515 gcm_mech.cm_param = (char *)&gcm_params;
1516 1516
1517 1517 return (aes_encrypt_atomic(provider, session_id, &gcm_mech,
1518 1518 key, &null_crypto_data, mac, template, req));
1519 1519 }
1520 1520 /* CMAC */
1521 1521 return (aes_encrypt_atomic(provider, session_id, mechanism,
1522 1522 key, data, mac, template, req));
1523 1523 }
1524 1524
1525 1525 static int
1526 1526 aes_mac_verify_atomic(crypto_provider_handle_t provider,
1527 1527 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1528 1528 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1529 1529 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1530 1530 {
1531 1531 CK_AES_GCM_PARAMS gcm_params;
1532 1532 crypto_mechanism_t gcm_mech;
1533 1533 crypto_data_t data_mac;
1534 1534 char buf[AES_BLOCK_LEN];
1535 1535 int rv;
1536 1536
1537 1537 if (mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1538 1538 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1539 1539 != CRYPTO_SUCCESS)
1540 1540 return (rv);
1541 1541
1542 1542 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1543 1543 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1544 1544 gcm_mech.cm_param = (char *)&gcm_params;
1545 1545
1546 1546 return (aes_decrypt_atomic(provider, session_id, &gcm_mech,
1547 1547 key, mac, &null_crypto_data, template, req));
1548 1548 }
1549 1549
1550 1550 /* CMAC */
1551 1551
1552 1552 data_mac.cd_format = CRYPTO_DATA_RAW;
1553 1553 data_mac.cd_offset = 0;
1554 1554 data_mac.cd_length = AES_BLOCK_LEN;
1555 1555 data_mac.cd_miscdata = NULL;
1556 1556 data_mac.cd_raw.iov_base = (void *) buf;
1557 1557 data_mac.cd_raw.iov_len = AES_BLOCK_LEN;
1558 1558
1559 1559 rv = aes_encrypt_atomic(provider, session_id, &gcm_mech,
1560 1560 key, data, &data_mac, template, req);
1561 1561
1562 1562 if (rv != CRYPTO_SUCCESS)
1563 1563 return (rv);
1564 1564
1565 1565 /* should use get_input_data for mac? */
1566 1566 if (bcmp(buf, mac->cd_raw.iov_base + mac->cd_offset,
1567 1567 AES_BLOCK_LEN) != 0)
1568 1568 return (CRYPTO_INVALID_MAC);
1569 1569
1570 1570 return (CRYPTO_SUCCESS);
1571 1571 }
↓ open down ↓ |
1483 lines elided |
↑ open up ↑ |
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX