Print this page
fixup .text where possible
7127 remove -Wno-missing-braces from Makefile.uts
Split |
Close |
Expand all |
Collapse all |
--- old/usr/src/uts/common/crypto/io/aes.c
+++ new/usr/src/uts/common/crypto/io/aes.c
1 1 /*
2 2 * CDDL HEADER START
3 3 *
4 4 * The contents of this file are subject to the terms of the
5 5 * Common Development and Distribution License (the "License").
6 6 * You may not use this file except in compliance with the License.
7 7 *
8 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 9 * or http://www.opensolaris.org/os/licensing.
10 10 * See the License for the specific language governing permissions
11 11 * and limitations under the License.
12 12 *
13 13 * When distributing Covered Code, include this CDDL HEADER in each
14 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 15 * If applicable, add the following below this CDDL HEADER, with the
16 16 * fields enclosed by brackets "[]" replaced with your own identifying
17 17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 18 *
19 19 * CDDL HEADER END
20 20 */
21 21 /*
22 22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23 23 */
24 24
25 25 /*
26 26 * AES provider for the Kernel Cryptographic Framework (KCF)
27 27 */
28 28
29 29 #include <sys/types.h>
30 30 #include <sys/systm.h>
31 31 #include <sys/modctl.h>
32 32 #include <sys/cmn_err.h>
33 33 #include <sys/ddi.h>
34 34 #include <sys/crypto/common.h>
35 35 #include <sys/crypto/impl.h>
36 36 #include <sys/crypto/spi.h>
37 37 #include <sys/sysmacros.h>
38 38 #include <sys/strsun.h>
39 39 #include <modes/modes.h>
40 40 #define _AES_IMPL
41 41 #include <aes/aes_impl.h>
42 42
43 43 extern struct mod_ops mod_cryptoops;
44 44
↓ open down ↓ |
44 lines elided |
↑ open up ↑ |
45 45 /*
46 46 * Module linkage information for the kernel.
47 47 */
48 48 static struct modlcrypto modlcrypto = {
49 49 &mod_cryptoops,
50 50 "AES Kernel SW Provider"
51 51 };
52 52
53 53 static struct modlinkage modlinkage = {
54 54 MODREV_1,
55 - (void *)&modlcrypto,
56 - NULL
55 + { (void *)&modlcrypto,
56 + NULL }
57 57 };
58 58
59 59 /*
60 60 * Mechanism info structure passed to KCF during registration.
61 61 */
62 62 static crypto_mech_info_t aes_mech_info_tab[] = {
63 63 /* AES_ECB */
64 64 {SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
65 65 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
66 66 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
67 67 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
68 68 /* AES_CBC */
69 69 {SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
70 70 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
71 71 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
72 72 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
73 73 /* AES_CTR */
74 74 {SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
75 75 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
76 76 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
77 77 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
78 78 /* AES_CCM */
79 79 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
80 80 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
81 81 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
82 82 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
83 83 /* AES_GCM */
84 84 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
85 85 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
86 86 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC,
87 87 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES},
88 88 /* AES_GMAC */
89 89 {SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
90 90 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
91 91 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
92 92 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC |
93 93 CRYPTO_FG_SIGN | CRYPTO_FG_SIGN_ATOMIC |
94 94 CRYPTO_FG_VERIFY | CRYPTO_FG_VERIFY_ATOMIC,
95 95 AES_MIN_KEY_BYTES, AES_MAX_KEY_BYTES, CRYPTO_KEYSIZE_UNIT_IN_BYTES}
96 96 };
97 97
98 98 /* operations are in-place if the output buffer is NULL */
99 99 #define AES_ARG_INPLACE(input, output) \
100 100 if ((output) == NULL) \
101 101 (output) = (input);
102 102
103 103 static void aes_provider_status(crypto_provider_handle_t, uint_t *);
104 104
105 105 static crypto_control_ops_t aes_control_ops = {
106 106 aes_provider_status
107 107 };
108 108
109 109 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
110 110 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
111 111 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
112 112 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
113 113 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
114 114 crypto_key_t *, crypto_spi_ctx_template_t, crypto_req_handle_t, boolean_t);
115 115 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
116 116 crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
117 117 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *,
118 118 crypto_req_handle_t);
119 119 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *,
120 120 crypto_req_handle_t);
121 121
122 122 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
123 123 crypto_req_handle_t);
124 124 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
125 125 crypto_data_t *, crypto_req_handle_t);
126 126 static int aes_encrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
127 127 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
128 128 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
129 129
130 130 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
131 131 crypto_req_handle_t);
132 132 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
133 133 crypto_data_t *, crypto_req_handle_t);
134 134 static int aes_decrypt_atomic(crypto_provider_handle_t, crypto_session_id_t,
135 135 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
136 136 crypto_data_t *, crypto_spi_ctx_template_t, crypto_req_handle_t);
137 137
138 138 static crypto_cipher_ops_t aes_cipher_ops = {
139 139 aes_encrypt_init,
140 140 aes_encrypt,
141 141 aes_encrypt_update,
142 142 aes_encrypt_final,
143 143 aes_encrypt_atomic,
144 144 aes_decrypt_init,
145 145 aes_decrypt,
146 146 aes_decrypt_update,
147 147 aes_decrypt_final,
148 148 aes_decrypt_atomic
149 149 };
150 150
151 151 static int aes_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
152 152 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
153 153 crypto_spi_ctx_template_t, crypto_req_handle_t);
154 154 static int aes_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
155 155 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
156 156 crypto_spi_ctx_template_t, crypto_req_handle_t);
157 157
158 158 static crypto_mac_ops_t aes_mac_ops = {
159 159 NULL,
160 160 NULL,
161 161 NULL,
162 162 NULL,
163 163 aes_mac_atomic,
164 164 aes_mac_verify_atomic
165 165 };
166 166
↓ open down ↓ |
100 lines elided |
↑ open up ↑ |
167 167 static int aes_create_ctx_template(crypto_provider_handle_t,
168 168 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
169 169 size_t *, crypto_req_handle_t);
170 170 static int aes_free_context(crypto_ctx_t *);
171 171
172 172 static crypto_ctx_ops_t aes_ctx_ops = {
173 173 aes_create_ctx_template,
174 174 aes_free_context
175 175 };
176 176
177 -static crypto_ops_t aes_crypto_ops = {
178 - &aes_control_ops,
179 - NULL,
180 - &aes_cipher_ops,
181 - &aes_mac_ops,
182 - NULL,
183 - NULL,
184 - NULL,
185 - NULL,
186 - NULL,
187 - NULL,
188 - NULL,
189 - NULL,
190 - NULL,
191 - &aes_ctx_ops,
192 - NULL,
193 - NULL,
194 - NULL,
195 -};
177 +static crypto_ops_t aes_crypto_ops = {{{{{{
178 + &aes_control_ops,
179 + NULL,
180 + &aes_cipher_ops,
181 + &aes_mac_ops,
182 + NULL,
183 + NULL,
184 + NULL,
185 + NULL,
186 + NULL,
187 + NULL,
188 + NULL,
189 + NULL,
190 + NULL,
191 + &aes_ctx_ops }, /* cou_v1 */
192 + NULL }, /* cou_v2 */
193 + NULL }, /* cou_v3 */
194 + NULL } /* cou_v4 */
195 +}};
196 196
197 -static crypto_provider_info_t aes_prov_info = {
197 +static crypto_provider_info_t aes_prov_info = {{{{
198 198 CRYPTO_SPI_VERSION_4,
199 199 "AES Software Provider",
200 200 CRYPTO_SW_PROVIDER,
201 201 {&modlinkage},
202 202 NULL,
203 203 &aes_crypto_ops,
204 204 sizeof (aes_mech_info_tab)/sizeof (crypto_mech_info_t),
205 205 aes_mech_info_tab
206 -};
206 +}}}};
207 207
208 208 static crypto_kcf_provider_handle_t aes_prov_handle = NULL;
209 209 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
210 210
211 211 int
212 212 _init(void)
213 213 {
214 214 int ret;
215 215
216 216 if ((ret = mod_install(&modlinkage)) != 0)
217 217 return (ret);
218 218
219 219 /* Register with KCF. If the registration fails, remove the module. */
220 220 if (crypto_register_provider(&aes_prov_info, &aes_prov_handle)) {
221 221 (void) mod_remove(&modlinkage);
222 222 return (EACCES);
223 223 }
224 224
225 225 return (0);
226 226 }
227 227
228 228 int
229 229 _fini(void)
230 230 {
231 231 /* Unregister from KCF if module is registered */
232 232 if (aes_prov_handle != NULL) {
233 233 if (crypto_unregister_provider(aes_prov_handle))
234 234 return (EBUSY);
235 235
236 236 aes_prov_handle = NULL;
237 237 }
238 238
239 239 return (mod_remove(&modlinkage));
240 240 }
241 241
242 242 int
243 243 _info(struct modinfo *modinfop)
244 244 {
245 245 return (mod_info(&modlinkage, modinfop));
246 246 }
247 247
248 248
249 249 static int
250 250 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx, int kmflag)
251 251 {
252 252 void *p = NULL;
253 253 boolean_t param_required = B_TRUE;
254 254 size_t param_len;
255 255 void *(*alloc_fun)(int);
256 256 int rv = CRYPTO_SUCCESS;
257 257
258 258 switch (mechanism->cm_type) {
259 259 case AES_ECB_MECH_INFO_TYPE:
260 260 param_required = B_FALSE;
261 261 alloc_fun = ecb_alloc_ctx;
262 262 break;
263 263 case AES_CBC_MECH_INFO_TYPE:
264 264 param_len = AES_BLOCK_LEN;
265 265 alloc_fun = cbc_alloc_ctx;
266 266 break;
267 267 case AES_CTR_MECH_INFO_TYPE:
268 268 param_len = sizeof (CK_AES_CTR_PARAMS);
269 269 alloc_fun = ctr_alloc_ctx;
270 270 break;
271 271 case AES_CCM_MECH_INFO_TYPE:
272 272 param_len = sizeof (CK_AES_CCM_PARAMS);
273 273 alloc_fun = ccm_alloc_ctx;
274 274 break;
275 275 case AES_GCM_MECH_INFO_TYPE:
276 276 param_len = sizeof (CK_AES_GCM_PARAMS);
277 277 alloc_fun = gcm_alloc_ctx;
278 278 break;
279 279 case AES_GMAC_MECH_INFO_TYPE:
280 280 param_len = sizeof (CK_AES_GMAC_PARAMS);
281 281 alloc_fun = gmac_alloc_ctx;
282 282 break;
283 283 default:
284 284 rv = CRYPTO_MECHANISM_INVALID;
285 285 return (rv);
286 286 }
287 287 if (param_required && mechanism->cm_param != NULL &&
288 288 mechanism->cm_param_len != param_len) {
289 289 rv = CRYPTO_MECHANISM_PARAM_INVALID;
290 290 }
291 291 if (ctx != NULL) {
292 292 p = (alloc_fun)(kmflag);
293 293 *ctx = p;
294 294 }
295 295 return (rv);
296 296 }
297 297
298 298 /*
299 299 * Initialize key schedules for AES
300 300 */
301 301 static int
302 302 init_keysched(crypto_key_t *key, void *newbie)
303 303 {
304 304 /*
305 305 * Only keys by value are supported by this module.
306 306 */
307 307 switch (key->ck_format) {
308 308 case CRYPTO_KEY_RAW:
309 309 if (key->ck_length < AES_MINBITS ||
310 310 key->ck_length > AES_MAXBITS) {
311 311 return (CRYPTO_KEY_SIZE_RANGE);
312 312 }
313 313
314 314 /* key length must be either 128, 192, or 256 */
315 315 if ((key->ck_length & 63) != 0)
316 316 return (CRYPTO_KEY_SIZE_RANGE);
317 317 break;
318 318 default:
319 319 return (CRYPTO_KEY_TYPE_INCONSISTENT);
320 320 }
321 321
322 322 aes_init_keysched(key->ck_data, key->ck_length, newbie);
323 323 return (CRYPTO_SUCCESS);
324 324 }
325 325
326 326 /*
327 327 * KCF software provider control entry points.
328 328 */
329 329 /* ARGSUSED */
330 330 static void
331 331 aes_provider_status(crypto_provider_handle_t provider, uint_t *status)
332 332 {
333 333 *status = CRYPTO_PROVIDER_READY;
334 334 }
335 335
336 336 static int
337 337 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
338 338 crypto_key_t *key, crypto_spi_ctx_template_t template,
339 339 crypto_req_handle_t req) {
340 340 return (aes_common_init(ctx, mechanism, key, template, req, B_TRUE));
341 341 }
342 342
343 343 static int
344 344 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
345 345 crypto_key_t *key, crypto_spi_ctx_template_t template,
346 346 crypto_req_handle_t req) {
347 347 return (aes_common_init(ctx, mechanism, key, template, req, B_FALSE));
348 348 }
349 349
350 350
351 351
352 352 /*
353 353 * KCF software provider encrypt entry points.
354 354 */
355 355 static int
356 356 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
357 357 crypto_key_t *key, crypto_spi_ctx_template_t template,
358 358 crypto_req_handle_t req, boolean_t is_encrypt_init)
359 359 {
360 360 aes_ctx_t *aes_ctx;
361 361 int rv;
362 362 int kmflag;
363 363
364 364 /*
365 365 * Only keys by value are supported by this module.
366 366 */
367 367 if (key->ck_format != CRYPTO_KEY_RAW) {
368 368 return (CRYPTO_KEY_TYPE_INCONSISTENT);
369 369 }
370 370
371 371 kmflag = crypto_kmflag(req);
372 372 if ((rv = aes_check_mech_param(mechanism, &aes_ctx, kmflag))
373 373 != CRYPTO_SUCCESS)
374 374 return (rv);
375 375
376 376 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, kmflag,
377 377 is_encrypt_init);
378 378 if (rv != CRYPTO_SUCCESS) {
379 379 crypto_free_mode_ctx(aes_ctx);
380 380 return (rv);
381 381 }
382 382
383 383 ctx->cc_provider_private = aes_ctx;
384 384
385 385 return (CRYPTO_SUCCESS);
386 386 }
387 387
388 388 static void
389 389 aes_copy_block64(uint8_t *in, uint64_t *out)
390 390 {
391 391 if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
392 392 /* LINTED: pointer alignment */
393 393 out[0] = *(uint64_t *)&in[0];
394 394 /* LINTED: pointer alignment */
395 395 out[1] = *(uint64_t *)&in[8];
396 396 } else {
397 397 uint8_t *iv8 = (uint8_t *)&out[0];
398 398
399 399 AES_COPY_BLOCK(in, iv8);
400 400 }
401 401 }
402 402
403 403
404 404 static int
405 405 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
406 406 crypto_data_t *ciphertext, crypto_req_handle_t req)
407 407 {
408 408 int ret = CRYPTO_FAILED;
409 409
410 410 aes_ctx_t *aes_ctx;
411 411 size_t saved_length, saved_offset, length_needed;
412 412
413 413 ASSERT(ctx->cc_provider_private != NULL);
414 414 aes_ctx = ctx->cc_provider_private;
415 415
416 416 /*
417 417 * For block ciphers, plaintext must be a multiple of AES block size.
418 418 * This test is only valid for ciphers whose blocksize is a power of 2.
419 419 */
420 420 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
421 421 == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
422 422 return (CRYPTO_DATA_LEN_RANGE);
423 423
424 424 AES_ARG_INPLACE(plaintext, ciphertext);
425 425
426 426 /*
427 427 * We need to just return the length needed to store the output.
428 428 * We should not destroy the context for the following case.
429 429 */
430 430 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
431 431 case CCM_MODE:
432 432 length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
433 433 break;
434 434 case GCM_MODE:
435 435 length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
436 436 break;
437 437 case GMAC_MODE:
438 438 if (plaintext->cd_length != 0)
439 439 return (CRYPTO_ARGUMENTS_BAD);
440 440
441 441 length_needed = aes_ctx->ac_tag_len;
442 442 break;
443 443 default:
444 444 length_needed = plaintext->cd_length;
445 445 }
446 446
447 447 if (ciphertext->cd_length < length_needed) {
448 448 ciphertext->cd_length = length_needed;
449 449 return (CRYPTO_BUFFER_TOO_SMALL);
450 450 }
451 451
452 452 saved_length = ciphertext->cd_length;
453 453 saved_offset = ciphertext->cd_offset;
454 454
455 455 /*
456 456 * Do an update on the specified input data.
457 457 */
458 458 ret = aes_encrypt_update(ctx, plaintext, ciphertext, req);
459 459 if (ret != CRYPTO_SUCCESS) {
460 460 return (ret);
461 461 }
462 462
463 463 /*
464 464 * For CCM mode, aes_ccm_encrypt_final() will take care of any
465 465 * left-over unprocessed data, and compute the MAC
466 466 */
467 467 if (aes_ctx->ac_flags & CCM_MODE) {
468 468 /*
469 469 * ccm_encrypt_final() will compute the MAC and append
470 470 * it to existing ciphertext. So, need to adjust the left over
471 471 * length value accordingly
472 472 */
473 473
474 474 /* order of following 2 lines MUST not be reversed */
475 475 ciphertext->cd_offset = ciphertext->cd_length;
476 476 ciphertext->cd_length = saved_length - ciphertext->cd_length;
477 477 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
478 478 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
479 479 if (ret != CRYPTO_SUCCESS) {
480 480 return (ret);
481 481 }
482 482
483 483 if (plaintext != ciphertext) {
484 484 ciphertext->cd_length =
485 485 ciphertext->cd_offset - saved_offset;
486 486 }
487 487 ciphertext->cd_offset = saved_offset;
488 488 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
489 489 /*
490 490 * gcm_encrypt_final() will compute the MAC and append
491 491 * it to existing ciphertext. So, need to adjust the left over
492 492 * length value accordingly
493 493 */
494 494
495 495 /* order of following 2 lines MUST not be reversed */
496 496 ciphertext->cd_offset = ciphertext->cd_length;
497 497 ciphertext->cd_length = saved_length - ciphertext->cd_length;
498 498 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
499 499 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
500 500 aes_xor_block);
501 501 if (ret != CRYPTO_SUCCESS) {
502 502 return (ret);
503 503 }
504 504
505 505 if (plaintext != ciphertext) {
506 506 ciphertext->cd_length =
507 507 ciphertext->cd_offset - saved_offset;
508 508 }
509 509 ciphertext->cd_offset = saved_offset;
510 510 }
511 511
512 512 ASSERT(aes_ctx->ac_remainder_len == 0);
513 513 (void) aes_free_context(ctx);
514 514
515 515 return (ret);
516 516 }
517 517
518 518
519 519 static int
520 520 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
521 521 crypto_data_t *plaintext, crypto_req_handle_t req)
522 522 {
523 523 int ret = CRYPTO_FAILED;
524 524
525 525 aes_ctx_t *aes_ctx;
526 526 off_t saved_offset;
527 527 size_t saved_length, length_needed;
528 528
529 529 ASSERT(ctx->cc_provider_private != NULL);
530 530 aes_ctx = ctx->cc_provider_private;
531 531
532 532 /*
533 533 * For block ciphers, plaintext must be a multiple of AES block size.
534 534 * This test is only valid for ciphers whose blocksize is a power of 2.
535 535 */
536 536 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
537 537 == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
538 538 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
539 539 }
540 540
541 541 AES_ARG_INPLACE(ciphertext, plaintext);
542 542
543 543 /*
544 544 * Return length needed to store the output.
545 545 * Do not destroy context when plaintext buffer is too small.
546 546 *
547 547 * CCM: plaintext is MAC len smaller than cipher text
548 548 * GCM: plaintext is TAG len smaller than cipher text
549 549 * GMAC: plaintext length must be zero
550 550 */
551 551 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
552 552 case CCM_MODE:
553 553 length_needed = aes_ctx->ac_processed_data_len;
554 554 break;
555 555 case GCM_MODE:
556 556 length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
557 557 break;
558 558 case GMAC_MODE:
559 559 if (plaintext->cd_length != 0)
560 560 return (CRYPTO_ARGUMENTS_BAD);
561 561
562 562 length_needed = 0;
563 563 break;
564 564 default:
565 565 length_needed = ciphertext->cd_length;
566 566 }
567 567
568 568 if (plaintext->cd_length < length_needed) {
569 569 plaintext->cd_length = length_needed;
570 570 return (CRYPTO_BUFFER_TOO_SMALL);
571 571 }
572 572
573 573 saved_offset = plaintext->cd_offset;
574 574 saved_length = plaintext->cd_length;
575 575
576 576 /*
577 577 * Do an update on the specified input data.
578 578 */
579 579 ret = aes_decrypt_update(ctx, ciphertext, plaintext, req);
580 580 if (ret != CRYPTO_SUCCESS) {
581 581 goto cleanup;
582 582 }
583 583
584 584 if (aes_ctx->ac_flags & CCM_MODE) {
585 585 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
586 586 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
587 587
588 588 /* order of following 2 lines MUST not be reversed */
589 589 plaintext->cd_offset = plaintext->cd_length;
590 590 plaintext->cd_length = saved_length - plaintext->cd_length;
591 591
592 592 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
593 593 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
594 594 aes_xor_block);
595 595 if (ret == CRYPTO_SUCCESS) {
596 596 if (plaintext != ciphertext) {
597 597 plaintext->cd_length =
598 598 plaintext->cd_offset - saved_offset;
599 599 }
600 600 } else {
601 601 plaintext->cd_length = saved_length;
602 602 }
603 603
604 604 plaintext->cd_offset = saved_offset;
605 605 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
606 606 /* order of following 2 lines MUST not be reversed */
607 607 plaintext->cd_offset = plaintext->cd_length;
608 608 plaintext->cd_length = saved_length - plaintext->cd_length;
609 609
610 610 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
611 611 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
612 612 if (ret == CRYPTO_SUCCESS) {
613 613 if (plaintext != ciphertext) {
614 614 plaintext->cd_length =
615 615 plaintext->cd_offset - saved_offset;
616 616 }
617 617 } else {
618 618 plaintext->cd_length = saved_length;
619 619 }
620 620
621 621 plaintext->cd_offset = saved_offset;
622 622 }
623 623
624 624 ASSERT(aes_ctx->ac_remainder_len == 0);
625 625
626 626 cleanup:
627 627 (void) aes_free_context(ctx);
628 628
629 629 return (ret);
630 630 }
631 631
632 632
633 633 /* ARGSUSED */
634 634 static int
635 635 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
636 636 crypto_data_t *ciphertext, crypto_req_handle_t req)
637 637 {
638 638 off_t saved_offset;
639 639 size_t saved_length, out_len;
640 640 int ret = CRYPTO_SUCCESS;
641 641 aes_ctx_t *aes_ctx;
642 642
643 643 ASSERT(ctx->cc_provider_private != NULL);
644 644 aes_ctx = ctx->cc_provider_private;
645 645
646 646 AES_ARG_INPLACE(plaintext, ciphertext);
647 647
648 648 /* compute number of bytes that will hold the ciphertext */
649 649 out_len = aes_ctx->ac_remainder_len;
650 650 out_len += plaintext->cd_length;
651 651 out_len &= ~(AES_BLOCK_LEN - 1);
652 652
653 653 /* return length needed to store the output */
654 654 if (ciphertext->cd_length < out_len) {
655 655 ciphertext->cd_length = out_len;
656 656 return (CRYPTO_BUFFER_TOO_SMALL);
657 657 }
658 658
659 659 saved_offset = ciphertext->cd_offset;
660 660 saved_length = ciphertext->cd_length;
661 661
662 662 /*
663 663 * Do the AES update on the specified input data.
664 664 */
665 665 switch (plaintext->cd_format) {
666 666 case CRYPTO_DATA_RAW:
667 667 ret = crypto_update_iov(ctx->cc_provider_private,
668 668 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
669 669 aes_copy_block64);
670 670 break;
671 671 case CRYPTO_DATA_UIO:
672 672 ret = crypto_update_uio(ctx->cc_provider_private,
673 673 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
674 674 aes_copy_block64);
675 675 break;
676 676 case CRYPTO_DATA_MBLK:
677 677 ret = crypto_update_mp(ctx->cc_provider_private,
678 678 plaintext, ciphertext, aes_encrypt_contiguous_blocks,
679 679 aes_copy_block64);
680 680 break;
681 681 default:
682 682 ret = CRYPTO_ARGUMENTS_BAD;
683 683 }
684 684
685 685 /*
686 686 * Since AES counter mode is a stream cipher, we call
687 687 * ctr_mode_final() to pick up any remaining bytes.
688 688 * It is an internal function that does not destroy
689 689 * the context like *normal* final routines.
690 690 */
691 691 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
692 692 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
693 693 ciphertext, aes_encrypt_block);
694 694 }
695 695
696 696 if (ret == CRYPTO_SUCCESS) {
697 697 if (plaintext != ciphertext)
698 698 ciphertext->cd_length =
699 699 ciphertext->cd_offset - saved_offset;
700 700 } else {
701 701 ciphertext->cd_length = saved_length;
702 702 }
703 703 ciphertext->cd_offset = saved_offset;
704 704
705 705 return (ret);
706 706 }
707 707
708 708
709 709 static int
710 710 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
711 711 crypto_data_t *plaintext, crypto_req_handle_t req)
712 712 {
713 713 off_t saved_offset;
714 714 size_t saved_length, out_len;
715 715 int ret = CRYPTO_SUCCESS;
716 716 aes_ctx_t *aes_ctx;
717 717
718 718 ASSERT(ctx->cc_provider_private != NULL);
719 719 aes_ctx = ctx->cc_provider_private;
720 720
721 721 AES_ARG_INPLACE(ciphertext, plaintext);
722 722
723 723 /*
724 724 * Compute number of bytes that will hold the plaintext.
725 725 * This is not necessary for CCM, GCM, and GMAC since these
726 726 * mechanisms never return plaintext for update operations.
727 727 */
728 728 if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
729 729 out_len = aes_ctx->ac_remainder_len;
730 730 out_len += ciphertext->cd_length;
731 731 out_len &= ~(AES_BLOCK_LEN - 1);
732 732
733 733 /* return length needed to store the output */
734 734 if (plaintext->cd_length < out_len) {
735 735 plaintext->cd_length = out_len;
736 736 return (CRYPTO_BUFFER_TOO_SMALL);
737 737 }
738 738 }
739 739
740 740 saved_offset = plaintext->cd_offset;
741 741 saved_length = plaintext->cd_length;
742 742
743 743 if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE))
744 744 gcm_set_kmflag((gcm_ctx_t *)aes_ctx, crypto_kmflag(req));
745 745
746 746 /*
747 747 * Do the AES update on the specified input data.
748 748 */
749 749 switch (ciphertext->cd_format) {
750 750 case CRYPTO_DATA_RAW:
751 751 ret = crypto_update_iov(ctx->cc_provider_private,
752 752 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
753 753 aes_copy_block64);
754 754 break;
755 755 case CRYPTO_DATA_UIO:
756 756 ret = crypto_update_uio(ctx->cc_provider_private,
757 757 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
758 758 aes_copy_block64);
759 759 break;
760 760 case CRYPTO_DATA_MBLK:
761 761 ret = crypto_update_mp(ctx->cc_provider_private,
762 762 ciphertext, plaintext, aes_decrypt_contiguous_blocks,
763 763 aes_copy_block64);
764 764 break;
765 765 default:
766 766 ret = CRYPTO_ARGUMENTS_BAD;
767 767 }
768 768
769 769 /*
770 770 * Since AES counter mode is a stream cipher, we call
771 771 * ctr_mode_final() to pick up any remaining bytes.
772 772 * It is an internal function that does not destroy
773 773 * the context like *normal* final routines.
774 774 */
775 775 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
776 776 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
777 777 aes_encrypt_block);
778 778 if (ret == CRYPTO_DATA_LEN_RANGE)
779 779 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
780 780 }
781 781
782 782 if (ret == CRYPTO_SUCCESS) {
783 783 if (ciphertext != plaintext)
784 784 plaintext->cd_length =
785 785 plaintext->cd_offset - saved_offset;
786 786 } else {
787 787 plaintext->cd_length = saved_length;
788 788 }
789 789 plaintext->cd_offset = saved_offset;
790 790
791 791
792 792 return (ret);
793 793 }
794 794
795 795 /* ARGSUSED */
796 796 static int
797 797 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
798 798 crypto_req_handle_t req)
799 799 {
800 800 aes_ctx_t *aes_ctx;
801 801 int ret;
802 802
803 803 ASSERT(ctx->cc_provider_private != NULL);
804 804 aes_ctx = ctx->cc_provider_private;
805 805
806 806 if (data->cd_format != CRYPTO_DATA_RAW &&
807 807 data->cd_format != CRYPTO_DATA_UIO &&
808 808 data->cd_format != CRYPTO_DATA_MBLK) {
809 809 return (CRYPTO_ARGUMENTS_BAD);
810 810 }
811 811
812 812 if (aes_ctx->ac_flags & CTR_MODE) {
813 813 if (aes_ctx->ac_remainder_len > 0) {
814 814 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
815 815 aes_encrypt_block);
816 816 if (ret != CRYPTO_SUCCESS)
817 817 return (ret);
818 818 }
819 819 } else if (aes_ctx->ac_flags & CCM_MODE) {
820 820 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
821 821 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
822 822 if (ret != CRYPTO_SUCCESS) {
823 823 return (ret);
824 824 }
825 825 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
826 826 size_t saved_offset = data->cd_offset;
827 827
828 828 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
829 829 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
830 830 aes_xor_block);
831 831 if (ret != CRYPTO_SUCCESS) {
832 832 return (ret);
833 833 }
834 834 data->cd_length = data->cd_offset - saved_offset;
835 835 data->cd_offset = saved_offset;
836 836 } else {
837 837 /*
838 838 * There must be no unprocessed plaintext.
839 839 * This happens if the length of the last data is
840 840 * not a multiple of the AES block length.
841 841 */
842 842 if (aes_ctx->ac_remainder_len > 0) {
843 843 return (CRYPTO_DATA_LEN_RANGE);
844 844 }
845 845 data->cd_length = 0;
846 846 }
847 847
848 848 (void) aes_free_context(ctx);
849 849
850 850 return (CRYPTO_SUCCESS);
851 851 }
852 852
853 853 /* ARGSUSED */
854 854 static int
855 855 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data,
856 856 crypto_req_handle_t req)
857 857 {
858 858 aes_ctx_t *aes_ctx;
859 859 int ret;
860 860 off_t saved_offset;
861 861 size_t saved_length;
862 862
863 863 ASSERT(ctx->cc_provider_private != NULL);
864 864 aes_ctx = ctx->cc_provider_private;
865 865
866 866 if (data->cd_format != CRYPTO_DATA_RAW &&
867 867 data->cd_format != CRYPTO_DATA_UIO &&
868 868 data->cd_format != CRYPTO_DATA_MBLK) {
869 869 return (CRYPTO_ARGUMENTS_BAD);
870 870 }
871 871
872 872 /*
873 873 * There must be no unprocessed ciphertext.
874 874 * This happens if the length of the last ciphertext is
875 875 * not a multiple of the AES block length.
876 876 */
877 877 if (aes_ctx->ac_remainder_len > 0) {
878 878 if ((aes_ctx->ac_flags & CTR_MODE) == 0)
879 879 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
880 880 else {
881 881 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
882 882 aes_encrypt_block);
883 883 if (ret == CRYPTO_DATA_LEN_RANGE)
884 884 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
885 885 if (ret != CRYPTO_SUCCESS)
886 886 return (ret);
887 887 }
888 888 }
889 889
890 890 if (aes_ctx->ac_flags & CCM_MODE) {
891 891 /*
892 892 * This is where all the plaintext is returned, make sure
893 893 * the plaintext buffer is big enough
894 894 */
895 895 size_t pt_len = aes_ctx->ac_data_len;
896 896 if (data->cd_length < pt_len) {
897 897 data->cd_length = pt_len;
898 898 return (CRYPTO_BUFFER_TOO_SMALL);
899 899 }
900 900
901 901 ASSERT(aes_ctx->ac_processed_data_len == pt_len);
902 902 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
903 903 saved_offset = data->cd_offset;
904 904 saved_length = data->cd_length;
905 905 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
906 906 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
907 907 aes_xor_block);
908 908 if (ret == CRYPTO_SUCCESS) {
909 909 data->cd_length = data->cd_offset - saved_offset;
910 910 } else {
911 911 data->cd_length = saved_length;
912 912 }
913 913
914 914 data->cd_offset = saved_offset;
915 915 if (ret != CRYPTO_SUCCESS) {
916 916 return (ret);
917 917 }
918 918 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
919 919 /*
920 920 * This is where all the plaintext is returned, make sure
921 921 * the plaintext buffer is big enough
922 922 */
923 923 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
924 924 size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
925 925
926 926 if (data->cd_length < pt_len) {
927 927 data->cd_length = pt_len;
928 928 return (CRYPTO_BUFFER_TOO_SMALL);
929 929 }
930 930
931 931 saved_offset = data->cd_offset;
932 932 saved_length = data->cd_length;
933 933 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
934 934 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
935 935 if (ret == CRYPTO_SUCCESS) {
936 936 data->cd_length = data->cd_offset - saved_offset;
937 937 } else {
938 938 data->cd_length = saved_length;
939 939 }
940 940
941 941 data->cd_offset = saved_offset;
942 942 if (ret != CRYPTO_SUCCESS) {
943 943 return (ret);
944 944 }
945 945 }
946 946
947 947
948 948 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
949 949 data->cd_length = 0;
950 950 }
951 951
952 952 (void) aes_free_context(ctx);
953 953
954 954 return (CRYPTO_SUCCESS);
955 955 }
956 956
957 957 /* ARGSUSED */
958 958 static int
959 959 aes_encrypt_atomic(crypto_provider_handle_t provider,
960 960 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
961 961 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
962 962 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
963 963 {
964 964 aes_ctx_t aes_ctx; /* on the stack */
965 965 off_t saved_offset;
966 966 size_t saved_length;
967 967 size_t length_needed;
968 968 int ret;
969 969
970 970 AES_ARG_INPLACE(plaintext, ciphertext);
971 971
972 972 /*
973 973 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
974 974 * be a multiple of AES block size.
975 975 */
976 976 switch (mechanism->cm_type) {
977 977 case AES_CTR_MECH_INFO_TYPE:
978 978 case AES_CCM_MECH_INFO_TYPE:
979 979 case AES_GCM_MECH_INFO_TYPE:
980 980 case AES_GMAC_MECH_INFO_TYPE:
981 981 break;
982 982 default:
983 983 if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
984 984 return (CRYPTO_DATA_LEN_RANGE);
985 985 }
986 986
987 987 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
988 988 return (ret);
989 989
990 990 bzero(&aes_ctx, sizeof (aes_ctx_t));
991 991
992 992 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
993 993 crypto_kmflag(req), B_TRUE);
994 994 if (ret != CRYPTO_SUCCESS)
995 995 return (ret);
996 996
997 997 switch (mechanism->cm_type) {
998 998 case AES_CCM_MECH_INFO_TYPE:
999 999 length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
1000 1000 break;
1001 1001 case AES_GMAC_MECH_INFO_TYPE:
1002 1002 if (plaintext->cd_length != 0)
1003 1003 return (CRYPTO_ARGUMENTS_BAD);
1004 1004 /* FALLTHRU */
1005 1005 case AES_GCM_MECH_INFO_TYPE:
1006 1006 length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
1007 1007 break;
1008 1008 default:
1009 1009 length_needed = plaintext->cd_length;
1010 1010 }
1011 1011
1012 1012 /* return size of buffer needed to store output */
1013 1013 if (ciphertext->cd_length < length_needed) {
1014 1014 ciphertext->cd_length = length_needed;
1015 1015 ret = CRYPTO_BUFFER_TOO_SMALL;
1016 1016 goto out;
1017 1017 }
1018 1018
1019 1019 saved_offset = ciphertext->cd_offset;
1020 1020 saved_length = ciphertext->cd_length;
1021 1021
1022 1022 /*
1023 1023 * Do an update on the specified input data.
1024 1024 */
1025 1025 switch (plaintext->cd_format) {
1026 1026 case CRYPTO_DATA_RAW:
1027 1027 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
1028 1028 aes_encrypt_contiguous_blocks, aes_copy_block64);
1029 1029 break;
1030 1030 case CRYPTO_DATA_UIO:
1031 1031 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
1032 1032 aes_encrypt_contiguous_blocks, aes_copy_block64);
1033 1033 break;
1034 1034 case CRYPTO_DATA_MBLK:
1035 1035 ret = crypto_update_mp(&aes_ctx, plaintext, ciphertext,
1036 1036 aes_encrypt_contiguous_blocks, aes_copy_block64);
1037 1037 break;
1038 1038 default:
1039 1039 ret = CRYPTO_ARGUMENTS_BAD;
1040 1040 }
1041 1041
1042 1042 if (ret == CRYPTO_SUCCESS) {
1043 1043 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1044 1044 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
1045 1045 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1046 1046 aes_xor_block);
1047 1047 if (ret != CRYPTO_SUCCESS)
1048 1048 goto out;
1049 1049 ASSERT(aes_ctx.ac_remainder_len == 0);
1050 1050 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1051 1051 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1052 1052 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
1053 1053 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
1054 1054 aes_copy_block, aes_xor_block);
1055 1055 if (ret != CRYPTO_SUCCESS)
1056 1056 goto out;
1057 1057 ASSERT(aes_ctx.ac_remainder_len == 0);
1058 1058 } else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
1059 1059 if (aes_ctx.ac_remainder_len > 0) {
1060 1060 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1061 1061 ciphertext, aes_encrypt_block);
1062 1062 if (ret != CRYPTO_SUCCESS)
1063 1063 goto out;
1064 1064 }
1065 1065 } else {
1066 1066 ASSERT(aes_ctx.ac_remainder_len == 0);
1067 1067 }
1068 1068
1069 1069 if (plaintext != ciphertext) {
1070 1070 ciphertext->cd_length =
1071 1071 ciphertext->cd_offset - saved_offset;
1072 1072 }
1073 1073 } else {
1074 1074 ciphertext->cd_length = saved_length;
1075 1075 }
1076 1076 ciphertext->cd_offset = saved_offset;
1077 1077
1078 1078 out:
1079 1079 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1080 1080 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1081 1081 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1082 1082 }
1083 1083
1084 1084 return (ret);
1085 1085 }
1086 1086
1087 1087 /* ARGSUSED */
1088 1088 static int
1089 1089 aes_decrypt_atomic(crypto_provider_handle_t provider,
1090 1090 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1091 1091 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
1092 1092 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1093 1093 {
1094 1094 aes_ctx_t aes_ctx; /* on the stack */
1095 1095 off_t saved_offset;
1096 1096 size_t saved_length;
1097 1097 size_t length_needed;
1098 1098 int ret;
1099 1099
1100 1100 AES_ARG_INPLACE(ciphertext, plaintext);
1101 1101
1102 1102 /*
1103 1103 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
1104 1104 * be a multiple of AES block size.
1105 1105 */
1106 1106 switch (mechanism->cm_type) {
1107 1107 case AES_CTR_MECH_INFO_TYPE:
1108 1108 case AES_CCM_MECH_INFO_TYPE:
1109 1109 case AES_GCM_MECH_INFO_TYPE:
1110 1110 case AES_GMAC_MECH_INFO_TYPE:
1111 1111 break;
1112 1112 default:
1113 1113 if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
1114 1114 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
1115 1115 }
1116 1116
1117 1117 if ((ret = aes_check_mech_param(mechanism, NULL, 0)) != CRYPTO_SUCCESS)
1118 1118 return (ret);
1119 1119
1120 1120 bzero(&aes_ctx, sizeof (aes_ctx_t));
1121 1121
1122 1122 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
1123 1123 crypto_kmflag(req), B_FALSE);
1124 1124 if (ret != CRYPTO_SUCCESS)
1125 1125 return (ret);
1126 1126
1127 1127 switch (mechanism->cm_type) {
1128 1128 case AES_CCM_MECH_INFO_TYPE:
1129 1129 length_needed = aes_ctx.ac_data_len;
1130 1130 break;
1131 1131 case AES_GCM_MECH_INFO_TYPE:
1132 1132 length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1133 1133 break;
1134 1134 case AES_GMAC_MECH_INFO_TYPE:
1135 1135 if (plaintext->cd_length != 0)
1136 1136 return (CRYPTO_ARGUMENTS_BAD);
1137 1137 length_needed = 0;
1138 1138 break;
1139 1139 default:
1140 1140 length_needed = ciphertext->cd_length;
1141 1141 }
1142 1142
1143 1143 /* return size of buffer needed to store output */
1144 1144 if (plaintext->cd_length < length_needed) {
1145 1145 plaintext->cd_length = length_needed;
1146 1146 ret = CRYPTO_BUFFER_TOO_SMALL;
1147 1147 goto out;
1148 1148 }
1149 1149
1150 1150 saved_offset = plaintext->cd_offset;
1151 1151 saved_length = plaintext->cd_length;
1152 1152
1153 1153 if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1154 1154 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE)
1155 1155 gcm_set_kmflag((gcm_ctx_t *)&aes_ctx, crypto_kmflag(req));
1156 1156
1157 1157 /*
1158 1158 * Do an update on the specified input data.
1159 1159 */
1160 1160 switch (ciphertext->cd_format) {
1161 1161 case CRYPTO_DATA_RAW:
1162 1162 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1163 1163 aes_decrypt_contiguous_blocks, aes_copy_block64);
1164 1164 break;
1165 1165 case CRYPTO_DATA_UIO:
1166 1166 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1167 1167 aes_decrypt_contiguous_blocks, aes_copy_block64);
1168 1168 break;
1169 1169 case CRYPTO_DATA_MBLK:
1170 1170 ret = crypto_update_mp(&aes_ctx, ciphertext, plaintext,
1171 1171 aes_decrypt_contiguous_blocks, aes_copy_block64);
1172 1172 break;
1173 1173 default:
1174 1174 ret = CRYPTO_ARGUMENTS_BAD;
1175 1175 }
1176 1176
1177 1177 if (ret == CRYPTO_SUCCESS) {
1178 1178 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1179 1179 ASSERT(aes_ctx.ac_processed_data_len
1180 1180 == aes_ctx.ac_data_len);
1181 1181 ASSERT(aes_ctx.ac_processed_mac_len
1182 1182 == aes_ctx.ac_mac_len);
1183 1183 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1184 1184 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1185 1185 aes_copy_block, aes_xor_block);
1186 1186 ASSERT(aes_ctx.ac_remainder_len == 0);
1187 1187 if ((ret == CRYPTO_SUCCESS) &&
1188 1188 (ciphertext != plaintext)) {
1189 1189 plaintext->cd_length =
1190 1190 plaintext->cd_offset - saved_offset;
1191 1191 } else {
1192 1192 plaintext->cd_length = saved_length;
1193 1193 }
1194 1194 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1195 1195 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1196 1196 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1197 1197 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1198 1198 aes_xor_block);
1199 1199 ASSERT(aes_ctx.ac_remainder_len == 0);
1200 1200 if ((ret == CRYPTO_SUCCESS) &&
1201 1201 (ciphertext != plaintext)) {
1202 1202 plaintext->cd_length =
1203 1203 plaintext->cd_offset - saved_offset;
1204 1204 } else {
1205 1205 plaintext->cd_length = saved_length;
1206 1206 }
1207 1207 } else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1208 1208 ASSERT(aes_ctx.ac_remainder_len == 0);
1209 1209 if (ciphertext != plaintext)
1210 1210 plaintext->cd_length =
1211 1211 plaintext->cd_offset - saved_offset;
1212 1212 } else {
1213 1213 if (aes_ctx.ac_remainder_len > 0) {
1214 1214 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1215 1215 plaintext, aes_encrypt_block);
1216 1216 if (ret == CRYPTO_DATA_LEN_RANGE)
1217 1217 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1218 1218 if (ret != CRYPTO_SUCCESS)
1219 1219 goto out;
1220 1220 }
1221 1221 if (ciphertext != plaintext)
1222 1222 plaintext->cd_length =
1223 1223 plaintext->cd_offset - saved_offset;
1224 1224 }
1225 1225 } else {
1226 1226 plaintext->cd_length = saved_length;
1227 1227 }
1228 1228 plaintext->cd_offset = saved_offset;
1229 1229
1230 1230 out:
1231 1231 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1232 1232 bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1233 1233 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1234 1234 }
1235 1235
1236 1236 if (aes_ctx.ac_flags & CCM_MODE) {
1237 1237 if (aes_ctx.ac_pt_buf != NULL) {
1238 1238 kmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1239 1239 }
1240 1240 } else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1241 1241 if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1242 1242 kmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1243 1243 ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1244 1244 }
1245 1245 }
1246 1246
1247 1247 return (ret);
1248 1248 }
1249 1249
1250 1250 /*
1251 1251 * KCF software provider context template entry points.
1252 1252 */
1253 1253 /* ARGSUSED */
1254 1254 static int
1255 1255 aes_create_ctx_template(crypto_provider_handle_t provider,
1256 1256 crypto_mechanism_t *mechanism, crypto_key_t *key,
1257 1257 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size, crypto_req_handle_t req)
1258 1258 {
1259 1259 void *keysched;
1260 1260 size_t size;
1261 1261 int rv;
1262 1262
1263 1263 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1264 1264 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1265 1265 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1266 1266 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1267 1267 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1268 1268 mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1269 1269 return (CRYPTO_MECHANISM_INVALID);
1270 1270
1271 1271 if ((keysched = aes_alloc_keysched(&size,
1272 1272 crypto_kmflag(req))) == NULL) {
1273 1273 return (CRYPTO_HOST_MEMORY);
1274 1274 }
1275 1275
1276 1276 /*
1277 1277 * Initialize key schedule. Key length information is stored
1278 1278 * in the key.
1279 1279 */
1280 1280 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1281 1281 bzero(keysched, size);
1282 1282 kmem_free(keysched, size);
1283 1283 return (rv);
1284 1284 }
1285 1285
1286 1286 *tmpl = keysched;
1287 1287 *tmpl_size = size;
1288 1288
1289 1289 return (CRYPTO_SUCCESS);
1290 1290 }
1291 1291
1292 1292
1293 1293 static int
1294 1294 aes_free_context(crypto_ctx_t *ctx)
1295 1295 {
1296 1296 aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1297 1297
1298 1298 if (aes_ctx != NULL) {
1299 1299 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1300 1300 ASSERT(aes_ctx->ac_keysched_len != 0);
1301 1301 bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
1302 1302 kmem_free(aes_ctx->ac_keysched,
1303 1303 aes_ctx->ac_keysched_len);
1304 1304 }
1305 1305 crypto_free_mode_ctx(aes_ctx);
1306 1306 ctx->cc_provider_private = NULL;
1307 1307 }
1308 1308
1309 1309 return (CRYPTO_SUCCESS);
1310 1310 }
1311 1311
1312 1312
1313 1313 static int
1314 1314 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1315 1315 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1316 1316 boolean_t is_encrypt_init)
1317 1317 {
1318 1318 int rv = CRYPTO_SUCCESS;
1319 1319 void *keysched;
1320 1320 size_t size;
1321 1321
1322 1322 if (template == NULL) {
1323 1323 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1324 1324 return (CRYPTO_HOST_MEMORY);
1325 1325 /*
1326 1326 * Initialize key schedule.
1327 1327 * Key length is stored in the key.
1328 1328 */
1329 1329 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1330 1330 kmem_free(keysched, size);
1331 1331 return (rv);
1332 1332 }
1333 1333
1334 1334 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1335 1335 aes_ctx->ac_keysched_len = size;
1336 1336 } else {
1337 1337 keysched = template;
1338 1338 }
1339 1339 aes_ctx->ac_keysched = keysched;
1340 1340
1341 1341 switch (mechanism->cm_type) {
1342 1342 case AES_CBC_MECH_INFO_TYPE:
1343 1343 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1344 1344 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1345 1345 break;
1346 1346 case AES_CTR_MECH_INFO_TYPE: {
1347 1347 CK_AES_CTR_PARAMS *pp;
1348 1348
1349 1349 if (mechanism->cm_param == NULL ||
1350 1350 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1351 1351 return (CRYPTO_MECHANISM_PARAM_INVALID);
1352 1352 }
1353 1353 pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1354 1354 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1355 1355 pp->cb, aes_copy_block);
1356 1356 break;
1357 1357 }
1358 1358 case AES_CCM_MECH_INFO_TYPE:
1359 1359 if (mechanism->cm_param == NULL ||
1360 1360 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1361 1361 return (CRYPTO_MECHANISM_PARAM_INVALID);
1362 1362 }
1363 1363 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1364 1364 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1365 1365 aes_xor_block);
1366 1366 break;
1367 1367 case AES_GCM_MECH_INFO_TYPE:
1368 1368 if (mechanism->cm_param == NULL ||
1369 1369 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1370 1370 return (CRYPTO_MECHANISM_PARAM_INVALID);
1371 1371 }
1372 1372 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1373 1373 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1374 1374 aes_xor_block);
1375 1375 break;
1376 1376 case AES_GMAC_MECH_INFO_TYPE:
1377 1377 if (mechanism->cm_param == NULL ||
1378 1378 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1379 1379 return (CRYPTO_MECHANISM_PARAM_INVALID);
1380 1380 }
1381 1381 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1382 1382 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1383 1383 aes_xor_block);
1384 1384 break;
1385 1385 case AES_ECB_MECH_INFO_TYPE:
1386 1386 aes_ctx->ac_flags |= ECB_MODE;
1387 1387 }
1388 1388
1389 1389 if (rv != CRYPTO_SUCCESS) {
1390 1390 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1391 1391 bzero(keysched, size);
1392 1392 kmem_free(keysched, size);
1393 1393 }
1394 1394 }
1395 1395
1396 1396 return (rv);
1397 1397 }
1398 1398
1399 1399 static int
1400 1400 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1401 1401 CK_AES_GCM_PARAMS *gcm_params)
1402 1402 {
1403 1403 /* LINTED: pointer alignment */
1404 1404 CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1405 1405
1406 1406 if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1407 1407 return (CRYPTO_MECHANISM_INVALID);
1408 1408
1409 1409 if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1410 1410 return (CRYPTO_MECHANISM_PARAM_INVALID);
1411 1411
1412 1412 if (params->pIv == NULL)
1413 1413 return (CRYPTO_MECHANISM_PARAM_INVALID);
1414 1414
1415 1415 gcm_params->pIv = params->pIv;
1416 1416 gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1417 1417 gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1418 1418
1419 1419 if (data == NULL)
1420 1420 return (CRYPTO_SUCCESS);
1421 1421
1422 1422 if (data->cd_format != CRYPTO_DATA_RAW)
1423 1423 return (CRYPTO_ARGUMENTS_BAD);
1424 1424
1425 1425 gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1426 1426 gcm_params->ulAADLen = data->cd_length;
1427 1427 return (CRYPTO_SUCCESS);
1428 1428 }
1429 1429
1430 1430 static int
1431 1431 aes_mac_atomic(crypto_provider_handle_t provider,
1432 1432 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1433 1433 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1434 1434 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1435 1435 {
1436 1436 CK_AES_GCM_PARAMS gcm_params;
1437 1437 crypto_mechanism_t gcm_mech;
1438 1438 int rv;
1439 1439
1440 1440 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1441 1441 != CRYPTO_SUCCESS)
1442 1442 return (rv);
1443 1443
1444 1444 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1445 1445 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1446 1446 gcm_mech.cm_param = (char *)&gcm_params;
1447 1447
1448 1448 return (aes_encrypt_atomic(provider, session_id, &gcm_mech,
1449 1449 key, &null_crypto_data, mac, template, req));
1450 1450 }
1451 1451
1452 1452 static int
1453 1453 aes_mac_verify_atomic(crypto_provider_handle_t provider,
1454 1454 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1455 1455 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1456 1456 crypto_spi_ctx_template_t template, crypto_req_handle_t req)
1457 1457 {
1458 1458 CK_AES_GCM_PARAMS gcm_params;
1459 1459 crypto_mechanism_t gcm_mech;
1460 1460 int rv;
1461 1461
1462 1462 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1463 1463 != CRYPTO_SUCCESS)
1464 1464 return (rv);
1465 1465
1466 1466 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1467 1467 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1468 1468 gcm_mech.cm_param = (char *)&gcm_params;
1469 1469
1470 1470 return (aes_decrypt_atomic(provider, session_id, &gcm_mech,
1471 1471 key, mac, &null_crypto_data, template, req));
1472 1472 }
↓ open down ↓ |
1256 lines elided |
↑ open up ↑ |
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX