Print this page
fixup .text where possible
7127 remove -Wno-missing-braces from Makefile.uts
Split |
Close |
Expand all |
Collapse all |
--- old/usr/src/uts/common/crypto/io/sha2_mod.c
+++ new/usr/src/uts/common/crypto/io/sha2_mod.c
1 1 /*
2 2 * CDDL HEADER START
3 3 *
4 4 * The contents of this file are subject to the terms of the
5 5 * Common Development and Distribution License (the "License").
6 6 * You may not use this file except in compliance with the License.
7 7 *
8 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 9 * or http://www.opensolaris.org/os/licensing.
10 10 * See the License for the specific language governing permissions
11 11 * and limitations under the License.
12 12 *
13 13 * When distributing Covered Code, include this CDDL HEADER in each
14 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 15 * If applicable, add the following below this CDDL HEADER, with the
16 16 * fields enclosed by brackets "[]" replaced with your own identifying
17 17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 18 *
19 19 * CDDL HEADER END
20 20 */
21 21
22 22 /*
23 23 * Copyright 2010 Sun Microsystems, Inc. All rights reserved.
24 24 * Use is subject to license terms.
25 25 */
26 26
27 27 #include <sys/modctl.h>
28 28 #include <sys/cmn_err.h>
29 29 #include <sys/crypto/common.h>
30 30 #include <sys/crypto/spi.h>
31 31 #include <sys/strsun.h>
32 32 #include <sys/systm.h>
33 33 #include <sys/sysmacros.h>
34 34 #define _SHA2_IMPL
35 35 #include <sys/sha2.h>
36 36 #include <sha2/sha2_impl.h>
37 37
38 38 /*
39 39 * The sha2 module is created with two modlinkages:
40 40 * - a modlmisc that allows consumers to directly call the entry points
41 41 * SHA2Init, SHA2Update, and SHA2Final.
42 42 * - a modlcrypto that allows the module to register with the Kernel
43 43 * Cryptographic Framework (KCF) as a software provider for the SHA2
44 44 * mechanisms.
45 45 */
46 46
47 47 static struct modlmisc modlmisc = {
↓ open down ↓ |
47 lines elided |
↑ open up ↑ |
48 48 &mod_miscops,
49 49 "SHA2 Message-Digest Algorithm"
50 50 };
51 51
52 52 static struct modlcrypto modlcrypto = {
53 53 &mod_cryptoops,
54 54 "SHA2 Kernel SW Provider"
55 55 };
56 56
57 57 static struct modlinkage modlinkage = {
58 - MODREV_1, &modlmisc, &modlcrypto, NULL
58 + MODREV_1, { &modlmisc, &modlcrypto, NULL }
59 59 };
60 60
61 61 /*
62 62 * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
63 63 * by KCF to one of the entry points.
64 64 */
65 65
66 66 #define PROV_SHA2_CTX(ctx) ((sha2_ctx_t *)(ctx)->cc_provider_private)
67 67 #define PROV_SHA2_HMAC_CTX(ctx) ((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
68 68
69 69 /* to extract the digest length passed as mechanism parameter */
70 70 #define PROV_SHA2_GET_DIGEST_LEN(m, len) { \
71 71 if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t))) \
72 72 (len) = (uint32_t)*((ulong_t *)(void *)(m)->cm_param); \
73 73 else { \
74 74 ulong_t tmp_ulong; \
75 75 bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t)); \
76 76 (len) = (uint32_t)tmp_ulong; \
77 77 } \
78 78 }
79 79
80 80 #define PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) { \
81 81 SHA2Init(mech, ctx); \
82 82 SHA2Update(ctx, key, len); \
83 83 SHA2Final(digest, ctx); \
84 84 }
85 85
86 86 /*
87 87 * Mechanism info structure passed to KCF during registration.
88 88 */
89 89 static crypto_mech_info_t sha2_mech_info_tab[] = {
90 90 /* SHA256 */
91 91 {SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
92 92 CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
93 93 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
94 94 /* SHA256-HMAC */
95 95 {SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
96 96 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
97 97 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
98 98 CRYPTO_KEYSIZE_UNIT_IN_BYTES},
99 99 /* SHA256-HMAC GENERAL */
100 100 {SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
101 101 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
102 102 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
103 103 CRYPTO_KEYSIZE_UNIT_IN_BYTES},
104 104 /* SHA384 */
105 105 {SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
106 106 CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
107 107 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
108 108 /* SHA384-HMAC */
109 109 {SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
110 110 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
111 111 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
112 112 CRYPTO_KEYSIZE_UNIT_IN_BYTES},
113 113 /* SHA384-HMAC GENERAL */
114 114 {SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
115 115 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
116 116 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
117 117 CRYPTO_KEYSIZE_UNIT_IN_BYTES},
118 118 /* SHA512 */
119 119 {SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
120 120 CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
121 121 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
122 122 /* SHA512-HMAC */
123 123 {SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
124 124 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
125 125 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
126 126 CRYPTO_KEYSIZE_UNIT_IN_BYTES},
127 127 /* SHA512-HMAC GENERAL */
128 128 {SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
129 129 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
130 130 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
131 131 CRYPTO_KEYSIZE_UNIT_IN_BYTES}
132 132 };
133 133
134 134 static void sha2_provider_status(crypto_provider_handle_t, uint_t *);
135 135
136 136 static crypto_control_ops_t sha2_control_ops = {
137 137 sha2_provider_status
138 138 };
139 139
140 140 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *,
141 141 crypto_req_handle_t);
142 142 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
143 143 crypto_req_handle_t);
144 144 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *,
145 145 crypto_req_handle_t);
146 146 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *,
147 147 crypto_req_handle_t);
148 148 static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t,
149 149 crypto_mechanism_t *, crypto_data_t *, crypto_data_t *,
150 150 crypto_req_handle_t);
151 151
152 152 static crypto_digest_ops_t sha2_digest_ops = {
153 153 sha2_digest_init,
154 154 sha2_digest,
155 155 sha2_digest_update,
156 156 NULL,
157 157 sha2_digest_final,
158 158 sha2_digest_atomic
159 159 };
160 160
161 161 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
162 162 crypto_spi_ctx_template_t, crypto_req_handle_t);
163 163 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *,
164 164 crypto_req_handle_t);
165 165 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t);
166 166 static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
167 167 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
168 168 crypto_spi_ctx_template_t, crypto_req_handle_t);
169 169 static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
170 170 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
171 171 crypto_spi_ctx_template_t, crypto_req_handle_t);
172 172
173 173 static crypto_mac_ops_t sha2_mac_ops = {
174 174 sha2_mac_init,
175 175 NULL,
176 176 sha2_mac_update,
177 177 sha2_mac_final,
178 178 sha2_mac_atomic,
179 179 sha2_mac_verify_atomic
180 180 };
181 181
182 182 static int sha2_create_ctx_template(crypto_provider_handle_t,
↓ open down ↓ |
114 lines elided |
↑ open up ↑ |
183 183 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
184 184 size_t *, crypto_req_handle_t);
185 185 static int sha2_free_context(crypto_ctx_t *);
186 186
187 187 static crypto_ctx_ops_t sha2_ctx_ops = {
188 188 sha2_create_ctx_template,
189 189 sha2_free_context
190 190 };
191 191
192 192 static crypto_ops_t sha2_crypto_ops = {
193 - &sha2_control_ops,
194 - &sha2_digest_ops,
195 - NULL,
196 - &sha2_mac_ops,
197 - NULL,
198 - NULL,
199 - NULL,
200 - NULL,
201 - NULL,
202 - NULL,
203 - NULL,
204 - NULL,
205 - NULL,
206 - &sha2_ctx_ops,
207 - NULL,
208 - NULL,
209 - NULL,
193 + .co_control_ops = &sha2_control_ops,
194 + .co_digest_ops = &sha2_digest_ops,
195 + .co_mac_ops = &sha2_mac_ops,
196 + .co_ctx_ops = &sha2_ctx_ops
210 197 };
211 198
212 -static crypto_provider_info_t sha2_prov_info = {
199 +static crypto_provider_info_t sha2_prov_info = {{{{
213 200 CRYPTO_SPI_VERSION_4,
214 201 "SHA2 Software Provider",
215 202 CRYPTO_SW_PROVIDER,
216 203 {&modlinkage},
217 204 NULL,
218 205 &sha2_crypto_ops,
219 206 sizeof (sha2_mech_info_tab)/sizeof (crypto_mech_info_t),
220 207 sha2_mech_info_tab
221 -};
208 +}}}};
222 209
223 210 static crypto_kcf_provider_handle_t sha2_prov_handle = NULL;
224 211
225 212 int
226 213 _init()
227 214 {
228 215 int ret;
229 216
230 217 if ((ret = mod_install(&modlinkage)) != 0)
231 218 return (ret);
232 219
233 220 /*
234 221 * Register with KCF. If the registration fails, do not uninstall the
235 222 * module, since the functionality provided by misc/sha2 should still
236 223 * be available.
237 224 */
238 225 (void) crypto_register_provider(&sha2_prov_info, &sha2_prov_handle);
239 226
240 227 return (0);
241 228 }
242 229
243 230 int
244 231 _info(struct modinfo *modinfop)
245 232 {
246 233 return (mod_info(&modlinkage, modinfop));
247 234 }
248 235
249 236 /*
250 237 * KCF software provider control entry points.
251 238 */
252 239 /* ARGSUSED */
253 240 static void
254 241 sha2_provider_status(crypto_provider_handle_t provider, uint_t *status)
255 242 {
256 243 *status = CRYPTO_PROVIDER_READY;
257 244 }
258 245
259 246 /*
260 247 * KCF software provider digest entry points.
261 248 */
262 249
263 250 static int
264 251 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
265 252 crypto_req_handle_t req)
266 253 {
267 254
268 255 /*
269 256 * Allocate and initialize SHA2 context.
270 257 */
271 258 ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
272 259 crypto_kmflag(req));
273 260 if (ctx->cc_provider_private == NULL)
274 261 return (CRYPTO_HOST_MEMORY);
275 262
276 263 PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
277 264 SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
278 265
279 266 return (CRYPTO_SUCCESS);
280 267 }
281 268
282 269 /*
283 270 * Helper SHA2 digest update function for uio data.
284 271 */
285 272 static int
286 273 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
287 274 {
288 275 off_t offset = data->cd_offset;
289 276 size_t length = data->cd_length;
290 277 uint_t vec_idx;
291 278 size_t cur_len;
292 279
293 280 /* we support only kernel buffer */
294 281 if (data->cd_uio->uio_segflg != UIO_SYSSPACE)
295 282 return (CRYPTO_ARGUMENTS_BAD);
296 283
297 284 /*
298 285 * Jump to the first iovec containing data to be
299 286 * digested.
300 287 */
301 288 for (vec_idx = 0; vec_idx < data->cd_uio->uio_iovcnt &&
302 289 offset >= data->cd_uio->uio_iov[vec_idx].iov_len;
303 290 offset -= data->cd_uio->uio_iov[vec_idx++].iov_len)
304 291 ;
305 292 if (vec_idx == data->cd_uio->uio_iovcnt) {
306 293 /*
307 294 * The caller specified an offset that is larger than the
308 295 * total size of the buffers it provided.
309 296 */
310 297 return (CRYPTO_DATA_LEN_RANGE);
311 298 }
312 299
313 300 /*
314 301 * Now do the digesting on the iovecs.
315 302 */
316 303 while (vec_idx < data->cd_uio->uio_iovcnt && length > 0) {
317 304 cur_len = MIN(data->cd_uio->uio_iov[vec_idx].iov_len -
318 305 offset, length);
319 306
320 307 SHA2Update(sha2_ctx, (uint8_t *)data->cd_uio->
321 308 uio_iov[vec_idx].iov_base + offset, cur_len);
322 309 length -= cur_len;
323 310 vec_idx++;
324 311 offset = 0;
325 312 }
326 313
327 314 if (vec_idx == data->cd_uio->uio_iovcnt && length > 0) {
328 315 /*
329 316 * The end of the specified iovec's was reached but
330 317 * the length requested could not be processed, i.e.
331 318 * The caller requested to digest more data than it provided.
332 319 */
333 320 return (CRYPTO_DATA_LEN_RANGE);
334 321 }
335 322
336 323 return (CRYPTO_SUCCESS);
337 324 }
338 325
339 326 /*
340 327 * Helper SHA2 digest final function for uio data.
341 328 * digest_len is the length of the desired digest. If digest_len
342 329 * is smaller than the default SHA2 digest length, the caller
343 330 * must pass a scratch buffer, digest_scratch, which must
344 331 * be at least the algorithm's digest length bytes.
345 332 */
346 333 static int
347 334 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
348 335 ulong_t digest_len, uchar_t *digest_scratch)
349 336 {
350 337 off_t offset = digest->cd_offset;
351 338 uint_t vec_idx;
352 339
353 340 /* we support only kernel buffer */
354 341 if (digest->cd_uio->uio_segflg != UIO_SYSSPACE)
355 342 return (CRYPTO_ARGUMENTS_BAD);
356 343
357 344 /*
358 345 * Jump to the first iovec containing ptr to the digest to
359 346 * be returned.
360 347 */
361 348 for (vec_idx = 0; offset >= digest->cd_uio->uio_iov[vec_idx].iov_len &&
362 349 vec_idx < digest->cd_uio->uio_iovcnt;
363 350 offset -= digest->cd_uio->uio_iov[vec_idx++].iov_len)
364 351 ;
365 352 if (vec_idx == digest->cd_uio->uio_iovcnt) {
366 353 /*
367 354 * The caller specified an offset that is
368 355 * larger than the total size of the buffers
369 356 * it provided.
370 357 */
371 358 return (CRYPTO_DATA_LEN_RANGE);
372 359 }
373 360
374 361 if (offset + digest_len <=
375 362 digest->cd_uio->uio_iov[vec_idx].iov_len) {
376 363 /*
377 364 * The computed SHA2 digest will fit in the current
378 365 * iovec.
379 366 */
380 367 if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
381 368 (digest_len != SHA256_DIGEST_LENGTH)) ||
382 369 ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
383 370 (digest_len != SHA512_DIGEST_LENGTH))) {
384 371 /*
385 372 * The caller requested a short digest. Digest
386 373 * into a scratch buffer and return to
387 374 * the user only what was requested.
388 375 */
389 376 SHA2Final(digest_scratch, sha2_ctx);
390 377
391 378 bcopy(digest_scratch, (uchar_t *)digest->
392 379 cd_uio->uio_iov[vec_idx].iov_base + offset,
393 380 digest_len);
394 381 } else {
395 382 SHA2Final((uchar_t *)digest->
396 383 cd_uio->uio_iov[vec_idx].iov_base + offset,
397 384 sha2_ctx);
398 385
399 386 }
400 387 } else {
401 388 /*
402 389 * The computed digest will be crossing one or more iovec's.
403 390 * This is bad performance-wise but we need to support it.
404 391 * Allocate a small scratch buffer on the stack and
405 392 * copy it piece meal to the specified digest iovec's.
406 393 */
407 394 uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
408 395 off_t scratch_offset = 0;
409 396 size_t length = digest_len;
410 397 size_t cur_len;
411 398
412 399 SHA2Final(digest_tmp, sha2_ctx);
413 400
414 401 while (vec_idx < digest->cd_uio->uio_iovcnt && length > 0) {
415 402 cur_len =
416 403 MIN(digest->cd_uio->uio_iov[vec_idx].iov_len -
417 404 offset, length);
418 405 bcopy(digest_tmp + scratch_offset,
419 406 digest->cd_uio->uio_iov[vec_idx].iov_base + offset,
420 407 cur_len);
421 408
422 409 length -= cur_len;
423 410 vec_idx++;
424 411 scratch_offset += cur_len;
425 412 offset = 0;
426 413 }
427 414
428 415 if (vec_idx == digest->cd_uio->uio_iovcnt && length > 0) {
429 416 /*
430 417 * The end of the specified iovec's was reached but
431 418 * the length requested could not be processed, i.e.
432 419 * The caller requested to digest more data than it
433 420 * provided.
434 421 */
435 422 return (CRYPTO_DATA_LEN_RANGE);
436 423 }
437 424 }
438 425
439 426 return (CRYPTO_SUCCESS);
440 427 }
441 428
442 429 /*
443 430 * Helper SHA2 digest update for mblk's.
444 431 */
445 432 static int
446 433 sha2_digest_update_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *data)
447 434 {
448 435 off_t offset = data->cd_offset;
449 436 size_t length = data->cd_length;
450 437 mblk_t *mp;
451 438 size_t cur_len;
452 439
453 440 /*
454 441 * Jump to the first mblk_t containing data to be digested.
455 442 */
456 443 for (mp = data->cd_mp; mp != NULL && offset >= MBLKL(mp);
457 444 offset -= MBLKL(mp), mp = mp->b_cont)
458 445 ;
459 446 if (mp == NULL) {
460 447 /*
461 448 * The caller specified an offset that is larger than the
462 449 * total size of the buffers it provided.
463 450 */
464 451 return (CRYPTO_DATA_LEN_RANGE);
465 452 }
466 453
467 454 /*
468 455 * Now do the digesting on the mblk chain.
469 456 */
470 457 while (mp != NULL && length > 0) {
471 458 cur_len = MIN(MBLKL(mp) - offset, length);
472 459 SHA2Update(sha2_ctx, mp->b_rptr + offset, cur_len);
473 460 length -= cur_len;
474 461 offset = 0;
475 462 mp = mp->b_cont;
476 463 }
477 464
478 465 if (mp == NULL && length > 0) {
479 466 /*
480 467 * The end of the mblk was reached but the length requested
481 468 * could not be processed, i.e. The caller requested
482 469 * to digest more data than it provided.
483 470 */
484 471 return (CRYPTO_DATA_LEN_RANGE);
485 472 }
486 473
487 474 return (CRYPTO_SUCCESS);
488 475 }
489 476
490 477 /*
491 478 * Helper SHA2 digest final for mblk's.
492 479 * digest_len is the length of the desired digest. If digest_len
493 480 * is smaller than the default SHA2 digest length, the caller
494 481 * must pass a scratch buffer, digest_scratch, which must
495 482 * be at least the algorithm's digest length bytes.
496 483 */
497 484 static int
498 485 sha2_digest_final_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
499 486 ulong_t digest_len, uchar_t *digest_scratch)
500 487 {
501 488 off_t offset = digest->cd_offset;
502 489 mblk_t *mp;
503 490
504 491 /*
505 492 * Jump to the first mblk_t that will be used to store the digest.
506 493 */
507 494 for (mp = digest->cd_mp; mp != NULL && offset >= MBLKL(mp);
508 495 offset -= MBLKL(mp), mp = mp->b_cont)
509 496 ;
510 497 if (mp == NULL) {
511 498 /*
512 499 * The caller specified an offset that is larger than the
513 500 * total size of the buffers it provided.
514 501 */
515 502 return (CRYPTO_DATA_LEN_RANGE);
516 503 }
517 504
518 505 if (offset + digest_len <= MBLKL(mp)) {
519 506 /*
520 507 * The computed SHA2 digest will fit in the current mblk.
521 508 * Do the SHA2Final() in-place.
522 509 */
523 510 if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
524 511 (digest_len != SHA256_DIGEST_LENGTH)) ||
525 512 ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
526 513 (digest_len != SHA512_DIGEST_LENGTH))) {
527 514 /*
528 515 * The caller requested a short digest. Digest
529 516 * into a scratch buffer and return to
530 517 * the user only what was requested.
531 518 */
532 519 SHA2Final(digest_scratch, sha2_ctx);
533 520 bcopy(digest_scratch, mp->b_rptr + offset, digest_len);
534 521 } else {
535 522 SHA2Final(mp->b_rptr + offset, sha2_ctx);
536 523 }
537 524 } else {
538 525 /*
539 526 * The computed digest will be crossing one or more mblk's.
540 527 * This is bad performance-wise but we need to support it.
541 528 * Allocate a small scratch buffer on the stack and
542 529 * copy it piece meal to the specified digest iovec's.
543 530 */
544 531 uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
545 532 off_t scratch_offset = 0;
546 533 size_t length = digest_len;
547 534 size_t cur_len;
548 535
549 536 SHA2Final(digest_tmp, sha2_ctx);
550 537
551 538 while (mp != NULL && length > 0) {
552 539 cur_len = MIN(MBLKL(mp) - offset, length);
553 540 bcopy(digest_tmp + scratch_offset,
554 541 mp->b_rptr + offset, cur_len);
555 542
556 543 length -= cur_len;
557 544 mp = mp->b_cont;
558 545 scratch_offset += cur_len;
559 546 offset = 0;
560 547 }
561 548
562 549 if (mp == NULL && length > 0) {
563 550 /*
564 551 * The end of the specified mblk was reached but
565 552 * the length requested could not be processed, i.e.
566 553 * The caller requested to digest more data than it
567 554 * provided.
568 555 */
569 556 return (CRYPTO_DATA_LEN_RANGE);
570 557 }
571 558 }
572 559
573 560 return (CRYPTO_SUCCESS);
574 561 }
575 562
576 563 /* ARGSUSED */
577 564 static int
578 565 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest,
579 566 crypto_req_handle_t req)
580 567 {
581 568 int ret = CRYPTO_SUCCESS;
582 569 uint_t sha_digest_len;
583 570
584 571 ASSERT(ctx->cc_provider_private != NULL);
585 572
586 573 switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
587 574 case SHA256_MECH_INFO_TYPE:
588 575 sha_digest_len = SHA256_DIGEST_LENGTH;
589 576 break;
590 577 case SHA384_MECH_INFO_TYPE:
591 578 sha_digest_len = SHA384_DIGEST_LENGTH;
592 579 break;
593 580 case SHA512_MECH_INFO_TYPE:
594 581 sha_digest_len = SHA512_DIGEST_LENGTH;
595 582 break;
596 583 default:
597 584 return (CRYPTO_MECHANISM_INVALID);
598 585 }
599 586
600 587 /*
601 588 * We need to just return the length needed to store the output.
602 589 * We should not destroy the context for the following cases.
603 590 */
604 591 if ((digest->cd_length == 0) ||
605 592 (digest->cd_length < sha_digest_len)) {
606 593 digest->cd_length = sha_digest_len;
607 594 return (CRYPTO_BUFFER_TOO_SMALL);
608 595 }
609 596
610 597 /*
611 598 * Do the SHA2 update on the specified input data.
612 599 */
613 600 switch (data->cd_format) {
614 601 case CRYPTO_DATA_RAW:
615 602 SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
616 603 (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
617 604 data->cd_length);
618 605 break;
619 606 case CRYPTO_DATA_UIO:
620 607 ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
621 608 data);
622 609 break;
623 610 case CRYPTO_DATA_MBLK:
624 611 ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
625 612 data);
626 613 break;
627 614 default:
628 615 ret = CRYPTO_ARGUMENTS_BAD;
629 616 }
630 617
631 618 if (ret != CRYPTO_SUCCESS) {
632 619 /* the update failed, free context and bail */
633 620 kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
634 621 ctx->cc_provider_private = NULL;
635 622 digest->cd_length = 0;
636 623 return (ret);
637 624 }
638 625
639 626 /*
640 627 * Do a SHA2 final, must be done separately since the digest
641 628 * type can be different than the input data type.
642 629 */
643 630 switch (digest->cd_format) {
644 631 case CRYPTO_DATA_RAW:
645 632 SHA2Final((unsigned char *)digest->cd_raw.iov_base +
646 633 digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
647 634 break;
648 635 case CRYPTO_DATA_UIO:
649 636 ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
650 637 digest, sha_digest_len, NULL);
651 638 break;
652 639 case CRYPTO_DATA_MBLK:
653 640 ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
654 641 digest, sha_digest_len, NULL);
655 642 break;
656 643 default:
657 644 ret = CRYPTO_ARGUMENTS_BAD;
658 645 }
659 646
660 647 /* all done, free context and return */
661 648
662 649 if (ret == CRYPTO_SUCCESS)
663 650 digest->cd_length = sha_digest_len;
664 651 else
665 652 digest->cd_length = 0;
666 653
667 654 kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
668 655 ctx->cc_provider_private = NULL;
669 656 return (ret);
670 657 }
671 658
672 659 /* ARGSUSED */
673 660 static int
674 661 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data,
675 662 crypto_req_handle_t req)
676 663 {
677 664 int ret = CRYPTO_SUCCESS;
678 665
679 666 ASSERT(ctx->cc_provider_private != NULL);
680 667
681 668 /*
682 669 * Do the SHA2 update on the specified input data.
683 670 */
684 671 switch (data->cd_format) {
685 672 case CRYPTO_DATA_RAW:
686 673 SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
687 674 (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
688 675 data->cd_length);
689 676 break;
690 677 case CRYPTO_DATA_UIO:
691 678 ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
692 679 data);
693 680 break;
694 681 case CRYPTO_DATA_MBLK:
695 682 ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
696 683 data);
697 684 break;
698 685 default:
699 686 ret = CRYPTO_ARGUMENTS_BAD;
700 687 }
701 688
702 689 return (ret);
703 690 }
704 691
705 692 /* ARGSUSED */
706 693 static int
707 694 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest,
708 695 crypto_req_handle_t req)
709 696 {
710 697 int ret = CRYPTO_SUCCESS;
711 698 uint_t sha_digest_len;
712 699
713 700 ASSERT(ctx->cc_provider_private != NULL);
714 701
715 702 switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
716 703 case SHA256_MECH_INFO_TYPE:
717 704 sha_digest_len = SHA256_DIGEST_LENGTH;
718 705 break;
719 706 case SHA384_MECH_INFO_TYPE:
720 707 sha_digest_len = SHA384_DIGEST_LENGTH;
721 708 break;
722 709 case SHA512_MECH_INFO_TYPE:
723 710 sha_digest_len = SHA512_DIGEST_LENGTH;
724 711 break;
725 712 default:
726 713 return (CRYPTO_MECHANISM_INVALID);
727 714 }
728 715
729 716 /*
730 717 * We need to just return the length needed to store the output.
731 718 * We should not destroy the context for the following cases.
732 719 */
733 720 if ((digest->cd_length == 0) ||
734 721 (digest->cd_length < sha_digest_len)) {
735 722 digest->cd_length = sha_digest_len;
736 723 return (CRYPTO_BUFFER_TOO_SMALL);
737 724 }
738 725
739 726 /*
740 727 * Do a SHA2 final.
741 728 */
742 729 switch (digest->cd_format) {
743 730 case CRYPTO_DATA_RAW:
744 731 SHA2Final((unsigned char *)digest->cd_raw.iov_base +
745 732 digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
746 733 break;
747 734 case CRYPTO_DATA_UIO:
748 735 ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
749 736 digest, sha_digest_len, NULL);
750 737 break;
751 738 case CRYPTO_DATA_MBLK:
752 739 ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
753 740 digest, sha_digest_len, NULL);
754 741 break;
755 742 default:
756 743 ret = CRYPTO_ARGUMENTS_BAD;
757 744 }
758 745
759 746 /* all done, free context and return */
760 747
761 748 if (ret == CRYPTO_SUCCESS)
762 749 digest->cd_length = sha_digest_len;
763 750 else
764 751 digest->cd_length = 0;
765 752
766 753 kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
767 754 ctx->cc_provider_private = NULL;
768 755
769 756 return (ret);
770 757 }
771 758
772 759 /* ARGSUSED */
773 760 static int
774 761 sha2_digest_atomic(crypto_provider_handle_t provider,
775 762 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
776 763 crypto_data_t *data, crypto_data_t *digest,
777 764 crypto_req_handle_t req)
778 765 {
779 766 int ret = CRYPTO_SUCCESS;
780 767 SHA2_CTX sha2_ctx;
781 768 uint32_t sha_digest_len;
782 769
783 770 /*
784 771 * Do the SHA inits.
785 772 */
786 773
787 774 SHA2Init(mechanism->cm_type, &sha2_ctx);
788 775
789 776 switch (data->cd_format) {
790 777 case CRYPTO_DATA_RAW:
791 778 SHA2Update(&sha2_ctx, (uint8_t *)data->
792 779 cd_raw.iov_base + data->cd_offset, data->cd_length);
793 780 break;
794 781 case CRYPTO_DATA_UIO:
795 782 ret = sha2_digest_update_uio(&sha2_ctx, data);
796 783 break;
797 784 case CRYPTO_DATA_MBLK:
798 785 ret = sha2_digest_update_mblk(&sha2_ctx, data);
799 786 break;
800 787 default:
801 788 ret = CRYPTO_ARGUMENTS_BAD;
802 789 }
803 790
804 791 /*
805 792 * Do the SHA updates on the specified input data.
806 793 */
807 794
808 795 if (ret != CRYPTO_SUCCESS) {
809 796 /* the update failed, bail */
810 797 digest->cd_length = 0;
811 798 return (ret);
812 799 }
813 800
814 801 if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
815 802 sha_digest_len = SHA256_DIGEST_LENGTH;
816 803 else
817 804 sha_digest_len = SHA512_DIGEST_LENGTH;
818 805
819 806 /*
820 807 * Do a SHA2 final, must be done separately since the digest
821 808 * type can be different than the input data type.
822 809 */
823 810 switch (digest->cd_format) {
824 811 case CRYPTO_DATA_RAW:
825 812 SHA2Final((unsigned char *)digest->cd_raw.iov_base +
826 813 digest->cd_offset, &sha2_ctx);
827 814 break;
828 815 case CRYPTO_DATA_UIO:
829 816 ret = sha2_digest_final_uio(&sha2_ctx, digest,
830 817 sha_digest_len, NULL);
831 818 break;
832 819 case CRYPTO_DATA_MBLK:
833 820 ret = sha2_digest_final_mblk(&sha2_ctx, digest,
834 821 sha_digest_len, NULL);
835 822 break;
836 823 default:
837 824 ret = CRYPTO_ARGUMENTS_BAD;
838 825 }
839 826
840 827 if (ret == CRYPTO_SUCCESS)
841 828 digest->cd_length = sha_digest_len;
842 829 else
843 830 digest->cd_length = 0;
844 831
845 832 return (ret);
846 833 }
847 834
848 835 /*
849 836 * KCF software provider mac entry points.
850 837 *
851 838 * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
852 839 *
853 840 * Init:
854 841 * The initialization routine initializes what we denote
855 842 * as the inner and outer contexts by doing
856 843 * - for inner context: SHA2(key XOR ipad)
857 844 * - for outer context: SHA2(key XOR opad)
858 845 *
859 846 * Update:
860 847 * Each subsequent SHA2 HMAC update will result in an
861 848 * update of the inner context with the specified data.
862 849 *
863 850 * Final:
864 851 * The SHA2 HMAC final will do a SHA2 final operation on the
865 852 * inner context, and the resulting digest will be used
866 853 * as the data for an update on the outer context. Last
867 854 * but not least, a SHA2 final on the outer context will
868 855 * be performed to obtain the SHA2 HMAC digest to return
869 856 * to the user.
870 857 */
871 858
872 859 /*
873 860 * Initialize a SHA2-HMAC context.
874 861 */
875 862 static void
876 863 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
877 864 {
878 865 uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
879 866 uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
880 867 int i, block_size, blocks_per_int64;
881 868
882 869 /* Determine the block size */
883 870 if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
884 871 block_size = SHA256_HMAC_BLOCK_SIZE;
885 872 blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
886 873 } else {
887 874 block_size = SHA512_HMAC_BLOCK_SIZE;
888 875 blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
889 876 }
890 877
891 878 (void) bzero(ipad, block_size);
892 879 (void) bzero(opad, block_size);
893 880 (void) bcopy(keyval, ipad, length_in_bytes);
894 881 (void) bcopy(keyval, opad, length_in_bytes);
895 882
896 883 /* XOR key with ipad (0x36) and opad (0x5c) */
897 884 for (i = 0; i < blocks_per_int64; i ++) {
898 885 ipad[i] ^= 0x3636363636363636;
899 886 opad[i] ^= 0x5c5c5c5c5c5c5c5c;
900 887 }
901 888
902 889 /* perform SHA2 on ipad */
903 890 SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
904 891 SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
905 892
906 893 /* perform SHA2 on opad */
907 894 SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
908 895 SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
909 896
910 897 }
911 898
912 899 /*
913 900 */
914 901 static int
915 902 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
916 903 crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
917 904 crypto_req_handle_t req)
918 905 {
919 906 int ret = CRYPTO_SUCCESS;
920 907 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
921 908 uint_t sha_digest_len, sha_hmac_block_size;
922 909
923 910 /*
924 911 * Set the digest length and block size to values appropriate to the
925 912 * mechanism
926 913 */
927 914 switch (mechanism->cm_type) {
928 915 case SHA256_HMAC_MECH_INFO_TYPE:
929 916 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
930 917 sha_digest_len = SHA256_DIGEST_LENGTH;
931 918 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
932 919 break;
933 920 case SHA384_HMAC_MECH_INFO_TYPE:
934 921 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
935 922 case SHA512_HMAC_MECH_INFO_TYPE:
936 923 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
937 924 sha_digest_len = SHA512_DIGEST_LENGTH;
938 925 sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
939 926 break;
940 927 default:
941 928 return (CRYPTO_MECHANISM_INVALID);
942 929 }
943 930
944 931 if (key->ck_format != CRYPTO_KEY_RAW)
945 932 return (CRYPTO_ARGUMENTS_BAD);
946 933
947 934 ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
948 935 crypto_kmflag(req));
949 936 if (ctx->cc_provider_private == NULL)
950 937 return (CRYPTO_HOST_MEMORY);
951 938
952 939 PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
953 940 if (ctx_template != NULL) {
954 941 /* reuse context template */
955 942 bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
956 943 sizeof (sha2_hmac_ctx_t));
957 944 } else {
958 945 /* no context template, compute context */
959 946 if (keylen_in_bytes > sha_hmac_block_size) {
960 947 uchar_t digested_key[SHA512_DIGEST_LENGTH];
961 948 sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
962 949
963 950 /*
964 951 * Hash the passed-in key to get a smaller key.
965 952 * The inner context is used since it hasn't been
966 953 * initialized yet.
967 954 */
968 955 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
969 956 &hmac_ctx->hc_icontext,
970 957 key->ck_data, keylen_in_bytes, digested_key);
971 958 sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
972 959 digested_key, sha_digest_len);
973 960 } else {
974 961 sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
975 962 key->ck_data, keylen_in_bytes);
976 963 }
977 964 }
978 965
979 966 /*
980 967 * Get the mechanism parameters, if applicable.
981 968 */
982 969 if (mechanism->cm_type % 3 == 2) {
983 970 if (mechanism->cm_param == NULL ||
984 971 mechanism->cm_param_len != sizeof (ulong_t))
985 972 ret = CRYPTO_MECHANISM_PARAM_INVALID;
986 973 PROV_SHA2_GET_DIGEST_LEN(mechanism,
987 974 PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
988 975 if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
989 976 ret = CRYPTO_MECHANISM_PARAM_INVALID;
990 977 }
991 978
992 979 if (ret != CRYPTO_SUCCESS) {
993 980 bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
994 981 kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
995 982 ctx->cc_provider_private = NULL;
996 983 }
997 984
998 985 return (ret);
999 986 }
1000 987
1001 988 /* ARGSUSED */
1002 989 static int
1003 990 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
1004 991 crypto_req_handle_t req)
1005 992 {
1006 993 int ret = CRYPTO_SUCCESS;
1007 994
1008 995 ASSERT(ctx->cc_provider_private != NULL);
1009 996
1010 997 /*
1011 998 * Do a SHA2 update of the inner context using the specified
1012 999 * data.
1013 1000 */
1014 1001 switch (data->cd_format) {
1015 1002 case CRYPTO_DATA_RAW:
1016 1003 SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
1017 1004 (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1018 1005 data->cd_length);
1019 1006 break;
1020 1007 case CRYPTO_DATA_UIO:
1021 1008 ret = sha2_digest_update_uio(
1022 1009 &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1023 1010 break;
1024 1011 case CRYPTO_DATA_MBLK:
1025 1012 ret = sha2_digest_update_mblk(
1026 1013 &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1027 1014 break;
1028 1015 default:
1029 1016 ret = CRYPTO_ARGUMENTS_BAD;
1030 1017 }
1031 1018
1032 1019 return (ret);
1033 1020 }
1034 1021
1035 1022 /* ARGSUSED */
1036 1023 static int
1037 1024 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
1038 1025 {
1039 1026 int ret = CRYPTO_SUCCESS;
1040 1027 uchar_t digest[SHA512_DIGEST_LENGTH];
1041 1028 uint32_t digest_len, sha_digest_len;
1042 1029
1043 1030 ASSERT(ctx->cc_provider_private != NULL);
1044 1031
1045 1032 /* Set the digest lengths to values appropriate to the mechanism */
1046 1033 switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
1047 1034 case SHA256_HMAC_MECH_INFO_TYPE:
1048 1035 sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1049 1036 break;
1050 1037 case SHA384_HMAC_MECH_INFO_TYPE:
1051 1038 sha_digest_len = digest_len = SHA384_DIGEST_LENGTH;
1052 1039 break;
1053 1040 case SHA512_HMAC_MECH_INFO_TYPE:
1054 1041 sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1055 1042 break;
1056 1043 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1057 1044 sha_digest_len = SHA256_DIGEST_LENGTH;
1058 1045 digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1059 1046 break;
1060 1047 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1061 1048 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1062 1049 sha_digest_len = SHA512_DIGEST_LENGTH;
1063 1050 digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1064 1051 break;
1065 1052 }
1066 1053
1067 1054 /*
1068 1055 * We need to just return the length needed to store the output.
1069 1056 * We should not destroy the context for the following cases.
1070 1057 */
1071 1058 if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
1072 1059 mac->cd_length = digest_len;
1073 1060 return (CRYPTO_BUFFER_TOO_SMALL);
1074 1061 }
1075 1062
1076 1063 /*
1077 1064 * Do a SHA2 final on the inner context.
1078 1065 */
1079 1066 SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
1080 1067
1081 1068 /*
1082 1069 * Do a SHA2 update on the outer context, feeding the inner
1083 1070 * digest as data.
1084 1071 */
1085 1072 SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
1086 1073 sha_digest_len);
1087 1074
1088 1075 /*
1089 1076 * Do a SHA2 final on the outer context, storing the computing
1090 1077 * digest in the users buffer.
1091 1078 */
1092 1079 switch (mac->cd_format) {
1093 1080 case CRYPTO_DATA_RAW:
1094 1081 if (digest_len != sha_digest_len) {
1095 1082 /*
1096 1083 * The caller requested a short digest. Digest
1097 1084 * into a scratch buffer and return to
1098 1085 * the user only what was requested.
1099 1086 */
1100 1087 SHA2Final(digest,
1101 1088 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1102 1089 bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1103 1090 mac->cd_offset, digest_len);
1104 1091 } else {
1105 1092 SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1106 1093 mac->cd_offset,
1107 1094 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1108 1095 }
1109 1096 break;
1110 1097 case CRYPTO_DATA_UIO:
1111 1098 ret = sha2_digest_final_uio(
1112 1099 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1113 1100 digest_len, digest);
1114 1101 break;
1115 1102 case CRYPTO_DATA_MBLK:
1116 1103 ret = sha2_digest_final_mblk(
1117 1104 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1118 1105 digest_len, digest);
1119 1106 break;
1120 1107 default:
1121 1108 ret = CRYPTO_ARGUMENTS_BAD;
1122 1109 }
1123 1110
1124 1111 if (ret == CRYPTO_SUCCESS)
1125 1112 mac->cd_length = digest_len;
1126 1113 else
1127 1114 mac->cd_length = 0;
1128 1115
1129 1116 bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1130 1117 kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1131 1118 ctx->cc_provider_private = NULL;
1132 1119
1133 1120 return (ret);
1134 1121 }
1135 1122
1136 1123 #define SHA2_MAC_UPDATE(data, ctx, ret) { \
1137 1124 switch (data->cd_format) { \
1138 1125 case CRYPTO_DATA_RAW: \
1139 1126 SHA2Update(&(ctx).hc_icontext, \
1140 1127 (uint8_t *)data->cd_raw.iov_base + \
1141 1128 data->cd_offset, data->cd_length); \
1142 1129 break; \
1143 1130 case CRYPTO_DATA_UIO: \
1144 1131 ret = sha2_digest_update_uio(&(ctx).hc_icontext, data); \
1145 1132 break; \
1146 1133 case CRYPTO_DATA_MBLK: \
1147 1134 ret = sha2_digest_update_mblk(&(ctx).hc_icontext, \
1148 1135 data); \
1149 1136 break; \
1150 1137 default: \
1151 1138 ret = CRYPTO_ARGUMENTS_BAD; \
1152 1139 } \
1153 1140 }
1154 1141
1155 1142 /* ARGSUSED */
1156 1143 static int
1157 1144 sha2_mac_atomic(crypto_provider_handle_t provider,
1158 1145 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1159 1146 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1160 1147 crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1161 1148 {
1162 1149 int ret = CRYPTO_SUCCESS;
1163 1150 uchar_t digest[SHA512_DIGEST_LENGTH];
1164 1151 sha2_hmac_ctx_t sha2_hmac_ctx;
1165 1152 uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1166 1153 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1167 1154
1168 1155 /*
1169 1156 * Set the digest length and block size to values appropriate to the
1170 1157 * mechanism
1171 1158 */
1172 1159 switch (mechanism->cm_type) {
1173 1160 case SHA256_HMAC_MECH_INFO_TYPE:
1174 1161 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1175 1162 sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1176 1163 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1177 1164 break;
1178 1165 case SHA384_HMAC_MECH_INFO_TYPE:
1179 1166 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1180 1167 case SHA512_HMAC_MECH_INFO_TYPE:
1181 1168 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1182 1169 sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1183 1170 sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1184 1171 break;
1185 1172 default:
1186 1173 return (CRYPTO_MECHANISM_INVALID);
1187 1174 }
1188 1175
1189 1176 /* Add support for key by attributes (RFE 4706552) */
1190 1177 if (key->ck_format != CRYPTO_KEY_RAW)
1191 1178 return (CRYPTO_ARGUMENTS_BAD);
1192 1179
1193 1180 if (ctx_template != NULL) {
1194 1181 /* reuse context template */
1195 1182 bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1196 1183 } else {
1197 1184 sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1198 1185 /* no context template, initialize context */
1199 1186 if (keylen_in_bytes > sha_hmac_block_size) {
1200 1187 /*
1201 1188 * Hash the passed-in key to get a smaller key.
1202 1189 * The inner context is used since it hasn't been
1203 1190 * initialized yet.
1204 1191 */
1205 1192 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1206 1193 &sha2_hmac_ctx.hc_icontext,
1207 1194 key->ck_data, keylen_in_bytes, digest);
1208 1195 sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1209 1196 sha_digest_len);
1210 1197 } else {
1211 1198 sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1212 1199 keylen_in_bytes);
1213 1200 }
1214 1201 }
1215 1202
1216 1203 /* get the mechanism parameters, if applicable */
1217 1204 if ((mechanism->cm_type % 3) == 2) {
1218 1205 if (mechanism->cm_param == NULL ||
1219 1206 mechanism->cm_param_len != sizeof (ulong_t)) {
1220 1207 ret = CRYPTO_MECHANISM_PARAM_INVALID;
1221 1208 goto bail;
1222 1209 }
1223 1210 PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1224 1211 if (digest_len > sha_digest_len) {
1225 1212 ret = CRYPTO_MECHANISM_PARAM_INVALID;
1226 1213 goto bail;
1227 1214 }
1228 1215 }
1229 1216
1230 1217 /* do a SHA2 update of the inner context using the specified data */
1231 1218 SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1232 1219 if (ret != CRYPTO_SUCCESS)
1233 1220 /* the update failed, free context and bail */
1234 1221 goto bail;
1235 1222
1236 1223 /*
1237 1224 * Do a SHA2 final on the inner context.
1238 1225 */
1239 1226 SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1240 1227
1241 1228 /*
1242 1229 * Do an SHA2 update on the outer context, feeding the inner
1243 1230 * digest as data.
1244 1231 *
1245 1232 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1246 1233 * bytes of the inner hash value.
1247 1234 */
1248 1235 if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1249 1236 mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1250 1237 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1251 1238 SHA384_DIGEST_LENGTH);
1252 1239 else
1253 1240 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1254 1241
1255 1242 /*
1256 1243 * Do a SHA2 final on the outer context, storing the computed
1257 1244 * digest in the users buffer.
1258 1245 */
1259 1246 switch (mac->cd_format) {
1260 1247 case CRYPTO_DATA_RAW:
1261 1248 if (digest_len != sha_digest_len) {
1262 1249 /*
1263 1250 * The caller requested a short digest. Digest
1264 1251 * into a scratch buffer and return to
1265 1252 * the user only what was requested.
1266 1253 */
1267 1254 SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1268 1255 bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1269 1256 mac->cd_offset, digest_len);
1270 1257 } else {
1271 1258 SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1272 1259 mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1273 1260 }
1274 1261 break;
1275 1262 case CRYPTO_DATA_UIO:
1276 1263 ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1277 1264 digest_len, digest);
1278 1265 break;
1279 1266 case CRYPTO_DATA_MBLK:
1280 1267 ret = sha2_digest_final_mblk(&sha2_hmac_ctx.hc_ocontext, mac,
1281 1268 digest_len, digest);
1282 1269 break;
1283 1270 default:
1284 1271 ret = CRYPTO_ARGUMENTS_BAD;
1285 1272 }
1286 1273
1287 1274 if (ret == CRYPTO_SUCCESS) {
1288 1275 mac->cd_length = digest_len;
1289 1276 return (CRYPTO_SUCCESS);
1290 1277 }
1291 1278 bail:
1292 1279 bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1293 1280 mac->cd_length = 0;
1294 1281 return (ret);
1295 1282 }
1296 1283
1297 1284 /* ARGSUSED */
1298 1285 static int
1299 1286 sha2_mac_verify_atomic(crypto_provider_handle_t provider,
1300 1287 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1301 1288 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1302 1289 crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1303 1290 {
1304 1291 int ret = CRYPTO_SUCCESS;
1305 1292 uchar_t digest[SHA512_DIGEST_LENGTH];
1306 1293 sha2_hmac_ctx_t sha2_hmac_ctx;
1307 1294 uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1308 1295 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1309 1296
1310 1297 /*
1311 1298 * Set the digest length and block size to values appropriate to the
1312 1299 * mechanism
1313 1300 */
1314 1301 switch (mechanism->cm_type) {
1315 1302 case SHA256_HMAC_MECH_INFO_TYPE:
1316 1303 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1317 1304 sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1318 1305 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1319 1306 break;
1320 1307 case SHA384_HMAC_MECH_INFO_TYPE:
1321 1308 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1322 1309 case SHA512_HMAC_MECH_INFO_TYPE:
1323 1310 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1324 1311 sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1325 1312 sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1326 1313 break;
1327 1314 default:
1328 1315 return (CRYPTO_MECHANISM_INVALID);
1329 1316 }
1330 1317
1331 1318 /* Add support for key by attributes (RFE 4706552) */
1332 1319 if (key->ck_format != CRYPTO_KEY_RAW)
1333 1320 return (CRYPTO_ARGUMENTS_BAD);
1334 1321
1335 1322 if (ctx_template != NULL) {
1336 1323 /* reuse context template */
1337 1324 bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1338 1325 } else {
1339 1326 sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1340 1327 /* no context template, initialize context */
1341 1328 if (keylen_in_bytes > sha_hmac_block_size) {
1342 1329 /*
1343 1330 * Hash the passed-in key to get a smaller key.
1344 1331 * The inner context is used since it hasn't been
1345 1332 * initialized yet.
1346 1333 */
1347 1334 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1348 1335 &sha2_hmac_ctx.hc_icontext,
1349 1336 key->ck_data, keylen_in_bytes, digest);
1350 1337 sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1351 1338 sha_digest_len);
1352 1339 } else {
1353 1340 sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1354 1341 keylen_in_bytes);
1355 1342 }
1356 1343 }
1357 1344
1358 1345 /* get the mechanism parameters, if applicable */
1359 1346 if (mechanism->cm_type % 3 == 2) {
1360 1347 if (mechanism->cm_param == NULL ||
1361 1348 mechanism->cm_param_len != sizeof (ulong_t)) {
1362 1349 ret = CRYPTO_MECHANISM_PARAM_INVALID;
1363 1350 goto bail;
1364 1351 }
1365 1352 PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1366 1353 if (digest_len > sha_digest_len) {
1367 1354 ret = CRYPTO_MECHANISM_PARAM_INVALID;
1368 1355 goto bail;
1369 1356 }
1370 1357 }
1371 1358
1372 1359 if (mac->cd_length != digest_len) {
1373 1360 ret = CRYPTO_INVALID_MAC;
1374 1361 goto bail;
1375 1362 }
1376 1363
1377 1364 /* do a SHA2 update of the inner context using the specified data */
1378 1365 SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1379 1366 if (ret != CRYPTO_SUCCESS)
1380 1367 /* the update failed, free context and bail */
1381 1368 goto bail;
1382 1369
1383 1370 /* do a SHA2 final on the inner context */
1384 1371 SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1385 1372
1386 1373 /*
1387 1374 * Do an SHA2 update on the outer context, feeding the inner
1388 1375 * digest as data.
1389 1376 *
1390 1377 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1391 1378 * bytes of the inner hash value.
1392 1379 */
1393 1380 if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1394 1381 mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1395 1382 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1396 1383 SHA384_DIGEST_LENGTH);
1397 1384 else
1398 1385 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1399 1386
1400 1387 /*
1401 1388 * Do a SHA2 final on the outer context, storing the computed
1402 1389 * digest in the users buffer.
1403 1390 */
1404 1391 SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1405 1392
1406 1393 /*
1407 1394 * Compare the computed digest against the expected digest passed
1408 1395 * as argument.
1409 1396 */
1410 1397
1411 1398 switch (mac->cd_format) {
1412 1399
1413 1400 case CRYPTO_DATA_RAW:
1414 1401 if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
1415 1402 mac->cd_offset, digest_len) != 0)
1416 1403 ret = CRYPTO_INVALID_MAC;
1417 1404 break;
1418 1405
1419 1406 case CRYPTO_DATA_UIO: {
1420 1407 off_t offset = mac->cd_offset;
1421 1408 uint_t vec_idx;
1422 1409 off_t scratch_offset = 0;
1423 1410 size_t length = digest_len;
1424 1411 size_t cur_len;
1425 1412
1426 1413 /* we support only kernel buffer */
1427 1414 if (mac->cd_uio->uio_segflg != UIO_SYSSPACE)
1428 1415 return (CRYPTO_ARGUMENTS_BAD);
1429 1416
1430 1417 /* jump to the first iovec containing the expected digest */
1431 1418 for (vec_idx = 0;
1432 1419 offset >= mac->cd_uio->uio_iov[vec_idx].iov_len &&
1433 1420 vec_idx < mac->cd_uio->uio_iovcnt;
1434 1421 offset -= mac->cd_uio->uio_iov[vec_idx++].iov_len)
1435 1422 ;
1436 1423 if (vec_idx == mac->cd_uio->uio_iovcnt) {
1437 1424 /*
1438 1425 * The caller specified an offset that is
1439 1426 * larger than the total size of the buffers
1440 1427 * it provided.
1441 1428 */
1442 1429 ret = CRYPTO_DATA_LEN_RANGE;
1443 1430 break;
1444 1431 }
1445 1432
1446 1433 /* do the comparison of computed digest vs specified one */
1447 1434 while (vec_idx < mac->cd_uio->uio_iovcnt && length > 0) {
1448 1435 cur_len = MIN(mac->cd_uio->uio_iov[vec_idx].iov_len -
1449 1436 offset, length);
1450 1437
1451 1438 if (bcmp(digest + scratch_offset,
1452 1439 mac->cd_uio->uio_iov[vec_idx].iov_base + offset,
1453 1440 cur_len) != 0) {
1454 1441 ret = CRYPTO_INVALID_MAC;
1455 1442 break;
1456 1443 }
1457 1444
1458 1445 length -= cur_len;
1459 1446 vec_idx++;
1460 1447 scratch_offset += cur_len;
1461 1448 offset = 0;
1462 1449 }
1463 1450 break;
1464 1451 }
1465 1452
1466 1453 case CRYPTO_DATA_MBLK: {
1467 1454 off_t offset = mac->cd_offset;
1468 1455 mblk_t *mp;
1469 1456 off_t scratch_offset = 0;
1470 1457 size_t length = digest_len;
1471 1458 size_t cur_len;
1472 1459
1473 1460 /* jump to the first mblk_t containing the expected digest */
1474 1461 for (mp = mac->cd_mp; mp != NULL && offset >= MBLKL(mp);
1475 1462 offset -= MBLKL(mp), mp = mp->b_cont)
1476 1463 ;
1477 1464 if (mp == NULL) {
1478 1465 /*
1479 1466 * The caller specified an offset that is larger than
1480 1467 * the total size of the buffers it provided.
1481 1468 */
1482 1469 ret = CRYPTO_DATA_LEN_RANGE;
1483 1470 break;
1484 1471 }
1485 1472
1486 1473 while (mp != NULL && length > 0) {
1487 1474 cur_len = MIN(MBLKL(mp) - offset, length);
1488 1475 if (bcmp(digest + scratch_offset,
1489 1476 mp->b_rptr + offset, cur_len) != 0) {
1490 1477 ret = CRYPTO_INVALID_MAC;
1491 1478 break;
1492 1479 }
1493 1480
1494 1481 length -= cur_len;
1495 1482 mp = mp->b_cont;
1496 1483 scratch_offset += cur_len;
1497 1484 offset = 0;
1498 1485 }
1499 1486 break;
1500 1487 }
1501 1488
1502 1489 default:
1503 1490 ret = CRYPTO_ARGUMENTS_BAD;
1504 1491 }
1505 1492
1506 1493 return (ret);
1507 1494 bail:
1508 1495 bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1509 1496 mac->cd_length = 0;
1510 1497 return (ret);
1511 1498 }
1512 1499
1513 1500 /*
1514 1501 * KCF software provider context management entry points.
1515 1502 */
1516 1503
1517 1504 /* ARGSUSED */
1518 1505 static int
1519 1506 sha2_create_ctx_template(crypto_provider_handle_t provider,
1520 1507 crypto_mechanism_t *mechanism, crypto_key_t *key,
1521 1508 crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
1522 1509 crypto_req_handle_t req)
1523 1510 {
1524 1511 sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
1525 1512 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1526 1513 uint32_t sha_digest_len, sha_hmac_block_size;
1527 1514
1528 1515 /*
1529 1516 * Set the digest length and block size to values appropriate to the
1530 1517 * mechanism
1531 1518 */
1532 1519 switch (mechanism->cm_type) {
1533 1520 case SHA256_HMAC_MECH_INFO_TYPE:
1534 1521 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1535 1522 sha_digest_len = SHA256_DIGEST_LENGTH;
1536 1523 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1537 1524 break;
1538 1525 case SHA384_HMAC_MECH_INFO_TYPE:
1539 1526 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1540 1527 case SHA512_HMAC_MECH_INFO_TYPE:
1541 1528 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1542 1529 sha_digest_len = SHA512_DIGEST_LENGTH;
1543 1530 sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1544 1531 break;
1545 1532 default:
1546 1533 return (CRYPTO_MECHANISM_INVALID);
1547 1534 }
1548 1535
1549 1536 /* Add support for key by attributes (RFE 4706552) */
1550 1537 if (key->ck_format != CRYPTO_KEY_RAW)
1551 1538 return (CRYPTO_ARGUMENTS_BAD);
1552 1539
1553 1540 /*
1554 1541 * Allocate and initialize SHA2 context.
1555 1542 */
1556 1543 sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
1557 1544 crypto_kmflag(req));
1558 1545 if (sha2_hmac_ctx_tmpl == NULL)
1559 1546 return (CRYPTO_HOST_MEMORY);
1560 1547
1561 1548 sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
1562 1549
1563 1550 if (keylen_in_bytes > sha_hmac_block_size) {
1564 1551 uchar_t digested_key[SHA512_DIGEST_LENGTH];
1565 1552
1566 1553 /*
1567 1554 * Hash the passed-in key to get a smaller key.
1568 1555 * The inner context is used since it hasn't been
1569 1556 * initialized yet.
1570 1557 */
1571 1558 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1572 1559 &sha2_hmac_ctx_tmpl->hc_icontext,
1573 1560 key->ck_data, keylen_in_bytes, digested_key);
1574 1561 sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
1575 1562 sha_digest_len);
1576 1563 } else {
1577 1564 sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
1578 1565 keylen_in_bytes);
1579 1566 }
1580 1567
1581 1568 *ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
1582 1569 *ctx_template_size = sizeof (sha2_hmac_ctx_t);
1583 1570
1584 1571 return (CRYPTO_SUCCESS);
1585 1572 }
1586 1573
1587 1574 static int
1588 1575 sha2_free_context(crypto_ctx_t *ctx)
1589 1576 {
1590 1577 uint_t ctx_len;
1591 1578
1592 1579 if (ctx->cc_provider_private == NULL)
1593 1580 return (CRYPTO_SUCCESS);
1594 1581
1595 1582 /*
1596 1583 * We have to free either SHA2 or SHA2-HMAC contexts, which
1597 1584 * have different lengths.
1598 1585 *
1599 1586 * Note: Below is dependent on the mechanism ordering.
1600 1587 */
1601 1588
1602 1589 if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
1603 1590 ctx_len = sizeof (sha2_ctx_t);
1604 1591 else
1605 1592 ctx_len = sizeof (sha2_hmac_ctx_t);
1606 1593
1607 1594 bzero(ctx->cc_provider_private, ctx_len);
1608 1595 kmem_free(ctx->cc_provider_private, ctx_len);
1609 1596 ctx->cc_provider_private = NULL;
1610 1597
1611 1598 return (CRYPTO_SUCCESS);
1612 1599 }
↓ open down ↓ |
1381 lines elided |
↑ open up ↑ |
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX