1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21
22 /*
23 * Copyright 2010 Sun Microsystems, Inc. All rights reserved.
24 * Use is subject to license terms.
25 */
26
27 #include <sys/modctl.h>
28 #include <sys/cmn_err.h>
29 #include <sys/crypto/common.h>
30 #include <sys/crypto/spi.h>
31 #include <sys/strsun.h>
32 #include <sys/systm.h>
33 #include <sys/sysmacros.h>
34 #define _SHA2_IMPL
35 #include <sys/sha2.h>
36 #include <sha2/sha2_impl.h>
37
38 /*
39 * The sha2 module is created with two modlinkages:
40 * - a modlmisc that allows consumers to directly call the entry points
41 * SHA2Init, SHA2Update, and SHA2Final.
42 * - a modlcrypto that allows the module to register with the Kernel
43 * Cryptographic Framework (KCF) as a software provider for the SHA2
44 * mechanisms.
45 */
46
47 static struct modlmisc modlmisc = {
48 &mod_miscops,
49 "SHA2 Message-Digest Algorithm"
50 };
51
52 static struct modlcrypto modlcrypto = {
53 &mod_cryptoops,
54 "SHA2 Kernel SW Provider"
55 };
56
57 static struct modlinkage modlinkage = {
58 MODREV_1, { &modlmisc, &modlcrypto, NULL }
59 };
60
61 /*
62 * Macros to access the SHA2 or SHA2-HMAC contexts from a context passed
63 * by KCF to one of the entry points.
64 */
65
66 #define PROV_SHA2_CTX(ctx) ((sha2_ctx_t *)(ctx)->cc_provider_private)
67 #define PROV_SHA2_HMAC_CTX(ctx) ((sha2_hmac_ctx_t *)(ctx)->cc_provider_private)
68
69 /* to extract the digest length passed as mechanism parameter */
70 #define PROV_SHA2_GET_DIGEST_LEN(m, len) { \
71 if (IS_P2ALIGNED((m)->cm_param, sizeof (ulong_t))) \
72 (len) = (uint32_t)*((ulong_t *)(void *)(m)->cm_param); \
73 else { \
74 ulong_t tmp_ulong; \
75 bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t)); \
76 (len) = (uint32_t)tmp_ulong; \
77 } \
78 }
79
80 #define PROV_SHA2_DIGEST_KEY(mech, ctx, key, len, digest) { \
81 SHA2Init(mech, ctx); \
82 SHA2Update(ctx, key, len); \
83 SHA2Final(digest, ctx); \
84 }
85
86 /*
87 * Mechanism info structure passed to KCF during registration.
88 */
89 static crypto_mech_info_t sha2_mech_info_tab[] = {
90 /* SHA256 */
91 {SUN_CKM_SHA256, SHA256_MECH_INFO_TYPE,
92 CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
93 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
94 /* SHA256-HMAC */
95 {SUN_CKM_SHA256_HMAC, SHA256_HMAC_MECH_INFO_TYPE,
96 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
97 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
98 CRYPTO_KEYSIZE_UNIT_IN_BYTES},
99 /* SHA256-HMAC GENERAL */
100 {SUN_CKM_SHA256_HMAC_GENERAL, SHA256_HMAC_GEN_MECH_INFO_TYPE,
101 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
102 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
103 CRYPTO_KEYSIZE_UNIT_IN_BYTES},
104 /* SHA384 */
105 {SUN_CKM_SHA384, SHA384_MECH_INFO_TYPE,
106 CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
107 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
108 /* SHA384-HMAC */
109 {SUN_CKM_SHA384_HMAC, SHA384_HMAC_MECH_INFO_TYPE,
110 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
111 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
112 CRYPTO_KEYSIZE_UNIT_IN_BYTES},
113 /* SHA384-HMAC GENERAL */
114 {SUN_CKM_SHA384_HMAC_GENERAL, SHA384_HMAC_GEN_MECH_INFO_TYPE,
115 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
116 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
117 CRYPTO_KEYSIZE_UNIT_IN_BYTES},
118 /* SHA512 */
119 {SUN_CKM_SHA512, SHA512_MECH_INFO_TYPE,
120 CRYPTO_FG_DIGEST | CRYPTO_FG_DIGEST_ATOMIC,
121 0, 0, CRYPTO_KEYSIZE_UNIT_IN_BITS},
122 /* SHA512-HMAC */
123 {SUN_CKM_SHA512_HMAC, SHA512_HMAC_MECH_INFO_TYPE,
124 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
125 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
126 CRYPTO_KEYSIZE_UNIT_IN_BYTES},
127 /* SHA512-HMAC GENERAL */
128 {SUN_CKM_SHA512_HMAC_GENERAL, SHA512_HMAC_GEN_MECH_INFO_TYPE,
129 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC,
130 SHA2_HMAC_MIN_KEY_LEN, SHA2_HMAC_MAX_KEY_LEN,
131 CRYPTO_KEYSIZE_UNIT_IN_BYTES}
132 };
133
134 static void sha2_provider_status(crypto_provider_handle_t, uint_t *);
135
136 static crypto_control_ops_t sha2_control_ops = {
137 sha2_provider_status
138 };
139
140 static int sha2_digest_init(crypto_ctx_t *, crypto_mechanism_t *,
141 crypto_req_handle_t);
142 static int sha2_digest(crypto_ctx_t *, crypto_data_t *, crypto_data_t *,
143 crypto_req_handle_t);
144 static int sha2_digest_update(crypto_ctx_t *, crypto_data_t *,
145 crypto_req_handle_t);
146 static int sha2_digest_final(crypto_ctx_t *, crypto_data_t *,
147 crypto_req_handle_t);
148 static int sha2_digest_atomic(crypto_provider_handle_t, crypto_session_id_t,
149 crypto_mechanism_t *, crypto_data_t *, crypto_data_t *,
150 crypto_req_handle_t);
151
152 static crypto_digest_ops_t sha2_digest_ops = {
153 sha2_digest_init,
154 sha2_digest,
155 sha2_digest_update,
156 NULL,
157 sha2_digest_final,
158 sha2_digest_atomic
159 };
160
161 static int sha2_mac_init(crypto_ctx_t *, crypto_mechanism_t *, crypto_key_t *,
162 crypto_spi_ctx_template_t, crypto_req_handle_t);
163 static int sha2_mac_update(crypto_ctx_t *, crypto_data_t *,
164 crypto_req_handle_t);
165 static int sha2_mac_final(crypto_ctx_t *, crypto_data_t *, crypto_req_handle_t);
166 static int sha2_mac_atomic(crypto_provider_handle_t, crypto_session_id_t,
167 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
168 crypto_spi_ctx_template_t, crypto_req_handle_t);
169 static int sha2_mac_verify_atomic(crypto_provider_handle_t, crypto_session_id_t,
170 crypto_mechanism_t *, crypto_key_t *, crypto_data_t *, crypto_data_t *,
171 crypto_spi_ctx_template_t, crypto_req_handle_t);
172
173 static crypto_mac_ops_t sha2_mac_ops = {
174 sha2_mac_init,
175 NULL,
176 sha2_mac_update,
177 sha2_mac_final,
178 sha2_mac_atomic,
179 sha2_mac_verify_atomic
180 };
181
182 static int sha2_create_ctx_template(crypto_provider_handle_t,
183 crypto_mechanism_t *, crypto_key_t *, crypto_spi_ctx_template_t *,
184 size_t *, crypto_req_handle_t);
185 static int sha2_free_context(crypto_ctx_t *);
186
187 static crypto_ctx_ops_t sha2_ctx_ops = {
188 sha2_create_ctx_template,
189 sha2_free_context
190 };
191
192 static crypto_ops_t sha2_crypto_ops = {
193 .co_control_ops = &sha2_control_ops,
194 .co_digest_ops = &sha2_digest_ops,
195 .co_mac_ops = &sha2_mac_ops,
196 .co_ctx_ops = &sha2_ctx_ops
197 };
198
199 static crypto_provider_info_t sha2_prov_info = {{{{
200 CRYPTO_SPI_VERSION_4,
201 "SHA2 Software Provider",
202 CRYPTO_SW_PROVIDER,
203 {&modlinkage},
204 NULL,
205 &sha2_crypto_ops,
206 sizeof (sha2_mech_info_tab)/sizeof (crypto_mech_info_t),
207 sha2_mech_info_tab
208 }}}};
209
210 static crypto_kcf_provider_handle_t sha2_prov_handle = NULL;
211
212 int
213 _init()
214 {
215 int ret;
216
217 if ((ret = mod_install(&modlinkage)) != 0)
218 return (ret);
219
220 /*
221 * Register with KCF. If the registration fails, do not uninstall the
222 * module, since the functionality provided by misc/sha2 should still
223 * be available.
224 */
225 (void) crypto_register_provider(&sha2_prov_info, &sha2_prov_handle);
226
227 return (0);
228 }
229
230 int
231 _info(struct modinfo *modinfop)
232 {
233 return (mod_info(&modlinkage, modinfop));
234 }
235
236 /*
237 * KCF software provider control entry points.
238 */
239 /* ARGSUSED */
240 static void
241 sha2_provider_status(crypto_provider_handle_t provider, uint_t *status)
242 {
243 *status = CRYPTO_PROVIDER_READY;
244 }
245
246 /*
247 * KCF software provider digest entry points.
248 */
249
250 static int
251 sha2_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
252 crypto_req_handle_t req)
253 {
254
255 /*
256 * Allocate and initialize SHA2 context.
257 */
258 ctx->cc_provider_private = kmem_alloc(sizeof (sha2_ctx_t),
259 crypto_kmflag(req));
260 if (ctx->cc_provider_private == NULL)
261 return (CRYPTO_HOST_MEMORY);
262
263 PROV_SHA2_CTX(ctx)->sc_mech_type = mechanism->cm_type;
264 SHA2Init(mechanism->cm_type, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
265
266 return (CRYPTO_SUCCESS);
267 }
268
269 /*
270 * Helper SHA2 digest update function for uio data.
271 */
272 static int
273 sha2_digest_update_uio(SHA2_CTX *sha2_ctx, crypto_data_t *data)
274 {
275 off_t offset = data->cd_offset;
276 size_t length = data->cd_length;
277 uint_t vec_idx;
278 size_t cur_len;
279
280 /* we support only kernel buffer */
281 if (data->cd_uio->uio_segflg != UIO_SYSSPACE)
282 return (CRYPTO_ARGUMENTS_BAD);
283
284 /*
285 * Jump to the first iovec containing data to be
286 * digested.
287 */
288 for (vec_idx = 0; vec_idx < data->cd_uio->uio_iovcnt &&
289 offset >= data->cd_uio->uio_iov[vec_idx].iov_len;
290 offset -= data->cd_uio->uio_iov[vec_idx++].iov_len)
291 ;
292 if (vec_idx == data->cd_uio->uio_iovcnt) {
293 /*
294 * The caller specified an offset that is larger than the
295 * total size of the buffers it provided.
296 */
297 return (CRYPTO_DATA_LEN_RANGE);
298 }
299
300 /*
301 * Now do the digesting on the iovecs.
302 */
303 while (vec_idx < data->cd_uio->uio_iovcnt && length > 0) {
304 cur_len = MIN(data->cd_uio->uio_iov[vec_idx].iov_len -
305 offset, length);
306
307 SHA2Update(sha2_ctx, (uint8_t *)data->cd_uio->
308 uio_iov[vec_idx].iov_base + offset, cur_len);
309 length -= cur_len;
310 vec_idx++;
311 offset = 0;
312 }
313
314 if (vec_idx == data->cd_uio->uio_iovcnt && length > 0) {
315 /*
316 * The end of the specified iovec's was reached but
317 * the length requested could not be processed, i.e.
318 * The caller requested to digest more data than it provided.
319 */
320 return (CRYPTO_DATA_LEN_RANGE);
321 }
322
323 return (CRYPTO_SUCCESS);
324 }
325
326 /*
327 * Helper SHA2 digest final function for uio data.
328 * digest_len is the length of the desired digest. If digest_len
329 * is smaller than the default SHA2 digest length, the caller
330 * must pass a scratch buffer, digest_scratch, which must
331 * be at least the algorithm's digest length bytes.
332 */
333 static int
334 sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
335 ulong_t digest_len, uchar_t *digest_scratch)
336 {
337 off_t offset = digest->cd_offset;
338 uint_t vec_idx;
339
340 /* we support only kernel buffer */
341 if (digest->cd_uio->uio_segflg != UIO_SYSSPACE)
342 return (CRYPTO_ARGUMENTS_BAD);
343
344 /*
345 * Jump to the first iovec containing ptr to the digest to
346 * be returned.
347 */
348 for (vec_idx = 0; offset >= digest->cd_uio->uio_iov[vec_idx].iov_len &&
349 vec_idx < digest->cd_uio->uio_iovcnt;
350 offset -= digest->cd_uio->uio_iov[vec_idx++].iov_len)
351 ;
352 if (vec_idx == digest->cd_uio->uio_iovcnt) {
353 /*
354 * The caller specified an offset that is
355 * larger than the total size of the buffers
356 * it provided.
357 */
358 return (CRYPTO_DATA_LEN_RANGE);
359 }
360
361 if (offset + digest_len <=
362 digest->cd_uio->uio_iov[vec_idx].iov_len) {
363 /*
364 * The computed SHA2 digest will fit in the current
365 * iovec.
366 */
367 if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
368 (digest_len != SHA256_DIGEST_LENGTH)) ||
369 ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
370 (digest_len != SHA512_DIGEST_LENGTH))) {
371 /*
372 * The caller requested a short digest. Digest
373 * into a scratch buffer and return to
374 * the user only what was requested.
375 */
376 SHA2Final(digest_scratch, sha2_ctx);
377
378 bcopy(digest_scratch, (uchar_t *)digest->
379 cd_uio->uio_iov[vec_idx].iov_base + offset,
380 digest_len);
381 } else {
382 SHA2Final((uchar_t *)digest->
383 cd_uio->uio_iov[vec_idx].iov_base + offset,
384 sha2_ctx);
385
386 }
387 } else {
388 /*
389 * The computed digest will be crossing one or more iovec's.
390 * This is bad performance-wise but we need to support it.
391 * Allocate a small scratch buffer on the stack and
392 * copy it piece meal to the specified digest iovec's.
393 */
394 uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
395 off_t scratch_offset = 0;
396 size_t length = digest_len;
397 size_t cur_len;
398
399 SHA2Final(digest_tmp, sha2_ctx);
400
401 while (vec_idx < digest->cd_uio->uio_iovcnt && length > 0) {
402 cur_len =
403 MIN(digest->cd_uio->uio_iov[vec_idx].iov_len -
404 offset, length);
405 bcopy(digest_tmp + scratch_offset,
406 digest->cd_uio->uio_iov[vec_idx].iov_base + offset,
407 cur_len);
408
409 length -= cur_len;
410 vec_idx++;
411 scratch_offset += cur_len;
412 offset = 0;
413 }
414
415 if (vec_idx == digest->cd_uio->uio_iovcnt && length > 0) {
416 /*
417 * The end of the specified iovec's was reached but
418 * the length requested could not be processed, i.e.
419 * The caller requested to digest more data than it
420 * provided.
421 */
422 return (CRYPTO_DATA_LEN_RANGE);
423 }
424 }
425
426 return (CRYPTO_SUCCESS);
427 }
428
429 /*
430 * Helper SHA2 digest update for mblk's.
431 */
432 static int
433 sha2_digest_update_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *data)
434 {
435 off_t offset = data->cd_offset;
436 size_t length = data->cd_length;
437 mblk_t *mp;
438 size_t cur_len;
439
440 /*
441 * Jump to the first mblk_t containing data to be digested.
442 */
443 for (mp = data->cd_mp; mp != NULL && offset >= MBLKL(mp);
444 offset -= MBLKL(mp), mp = mp->b_cont)
445 ;
446 if (mp == NULL) {
447 /*
448 * The caller specified an offset that is larger than the
449 * total size of the buffers it provided.
450 */
451 return (CRYPTO_DATA_LEN_RANGE);
452 }
453
454 /*
455 * Now do the digesting on the mblk chain.
456 */
457 while (mp != NULL && length > 0) {
458 cur_len = MIN(MBLKL(mp) - offset, length);
459 SHA2Update(sha2_ctx, mp->b_rptr + offset, cur_len);
460 length -= cur_len;
461 offset = 0;
462 mp = mp->b_cont;
463 }
464
465 if (mp == NULL && length > 0) {
466 /*
467 * The end of the mblk was reached but the length requested
468 * could not be processed, i.e. The caller requested
469 * to digest more data than it provided.
470 */
471 return (CRYPTO_DATA_LEN_RANGE);
472 }
473
474 return (CRYPTO_SUCCESS);
475 }
476
477 /*
478 * Helper SHA2 digest final for mblk's.
479 * digest_len is the length of the desired digest. If digest_len
480 * is smaller than the default SHA2 digest length, the caller
481 * must pass a scratch buffer, digest_scratch, which must
482 * be at least the algorithm's digest length bytes.
483 */
484 static int
485 sha2_digest_final_mblk(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
486 ulong_t digest_len, uchar_t *digest_scratch)
487 {
488 off_t offset = digest->cd_offset;
489 mblk_t *mp;
490
491 /*
492 * Jump to the first mblk_t that will be used to store the digest.
493 */
494 for (mp = digest->cd_mp; mp != NULL && offset >= MBLKL(mp);
495 offset -= MBLKL(mp), mp = mp->b_cont)
496 ;
497 if (mp == NULL) {
498 /*
499 * The caller specified an offset that is larger than the
500 * total size of the buffers it provided.
501 */
502 return (CRYPTO_DATA_LEN_RANGE);
503 }
504
505 if (offset + digest_len <= MBLKL(mp)) {
506 /*
507 * The computed SHA2 digest will fit in the current mblk.
508 * Do the SHA2Final() in-place.
509 */
510 if (((sha2_ctx->algotype <= SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
511 (digest_len != SHA256_DIGEST_LENGTH)) ||
512 ((sha2_ctx->algotype > SHA256_HMAC_GEN_MECH_INFO_TYPE) &&
513 (digest_len != SHA512_DIGEST_LENGTH))) {
514 /*
515 * The caller requested a short digest. Digest
516 * into a scratch buffer and return to
517 * the user only what was requested.
518 */
519 SHA2Final(digest_scratch, sha2_ctx);
520 bcopy(digest_scratch, mp->b_rptr + offset, digest_len);
521 } else {
522 SHA2Final(mp->b_rptr + offset, sha2_ctx);
523 }
524 } else {
525 /*
526 * The computed digest will be crossing one or more mblk's.
527 * This is bad performance-wise but we need to support it.
528 * Allocate a small scratch buffer on the stack and
529 * copy it piece meal to the specified digest iovec's.
530 */
531 uchar_t digest_tmp[SHA512_DIGEST_LENGTH];
532 off_t scratch_offset = 0;
533 size_t length = digest_len;
534 size_t cur_len;
535
536 SHA2Final(digest_tmp, sha2_ctx);
537
538 while (mp != NULL && length > 0) {
539 cur_len = MIN(MBLKL(mp) - offset, length);
540 bcopy(digest_tmp + scratch_offset,
541 mp->b_rptr + offset, cur_len);
542
543 length -= cur_len;
544 mp = mp->b_cont;
545 scratch_offset += cur_len;
546 offset = 0;
547 }
548
549 if (mp == NULL && length > 0) {
550 /*
551 * The end of the specified mblk was reached but
552 * the length requested could not be processed, i.e.
553 * The caller requested to digest more data than it
554 * provided.
555 */
556 return (CRYPTO_DATA_LEN_RANGE);
557 }
558 }
559
560 return (CRYPTO_SUCCESS);
561 }
562
563 /* ARGSUSED */
564 static int
565 sha2_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest,
566 crypto_req_handle_t req)
567 {
568 int ret = CRYPTO_SUCCESS;
569 uint_t sha_digest_len;
570
571 ASSERT(ctx->cc_provider_private != NULL);
572
573 switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
574 case SHA256_MECH_INFO_TYPE:
575 sha_digest_len = SHA256_DIGEST_LENGTH;
576 break;
577 case SHA384_MECH_INFO_TYPE:
578 sha_digest_len = SHA384_DIGEST_LENGTH;
579 break;
580 case SHA512_MECH_INFO_TYPE:
581 sha_digest_len = SHA512_DIGEST_LENGTH;
582 break;
583 default:
584 return (CRYPTO_MECHANISM_INVALID);
585 }
586
587 /*
588 * We need to just return the length needed to store the output.
589 * We should not destroy the context for the following cases.
590 */
591 if ((digest->cd_length == 0) ||
592 (digest->cd_length < sha_digest_len)) {
593 digest->cd_length = sha_digest_len;
594 return (CRYPTO_BUFFER_TOO_SMALL);
595 }
596
597 /*
598 * Do the SHA2 update on the specified input data.
599 */
600 switch (data->cd_format) {
601 case CRYPTO_DATA_RAW:
602 SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
603 (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
604 data->cd_length);
605 break;
606 case CRYPTO_DATA_UIO:
607 ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
608 data);
609 break;
610 case CRYPTO_DATA_MBLK:
611 ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
612 data);
613 break;
614 default:
615 ret = CRYPTO_ARGUMENTS_BAD;
616 }
617
618 if (ret != CRYPTO_SUCCESS) {
619 /* the update failed, free context and bail */
620 kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
621 ctx->cc_provider_private = NULL;
622 digest->cd_length = 0;
623 return (ret);
624 }
625
626 /*
627 * Do a SHA2 final, must be done separately since the digest
628 * type can be different than the input data type.
629 */
630 switch (digest->cd_format) {
631 case CRYPTO_DATA_RAW:
632 SHA2Final((unsigned char *)digest->cd_raw.iov_base +
633 digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
634 break;
635 case CRYPTO_DATA_UIO:
636 ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
637 digest, sha_digest_len, NULL);
638 break;
639 case CRYPTO_DATA_MBLK:
640 ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
641 digest, sha_digest_len, NULL);
642 break;
643 default:
644 ret = CRYPTO_ARGUMENTS_BAD;
645 }
646
647 /* all done, free context and return */
648
649 if (ret == CRYPTO_SUCCESS)
650 digest->cd_length = sha_digest_len;
651 else
652 digest->cd_length = 0;
653
654 kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
655 ctx->cc_provider_private = NULL;
656 return (ret);
657 }
658
659 /* ARGSUSED */
660 static int
661 sha2_digest_update(crypto_ctx_t *ctx, crypto_data_t *data,
662 crypto_req_handle_t req)
663 {
664 int ret = CRYPTO_SUCCESS;
665
666 ASSERT(ctx->cc_provider_private != NULL);
667
668 /*
669 * Do the SHA2 update on the specified input data.
670 */
671 switch (data->cd_format) {
672 case CRYPTO_DATA_RAW:
673 SHA2Update(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
674 (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
675 data->cd_length);
676 break;
677 case CRYPTO_DATA_UIO:
678 ret = sha2_digest_update_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
679 data);
680 break;
681 case CRYPTO_DATA_MBLK:
682 ret = sha2_digest_update_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
683 data);
684 break;
685 default:
686 ret = CRYPTO_ARGUMENTS_BAD;
687 }
688
689 return (ret);
690 }
691
692 /* ARGSUSED */
693 static int
694 sha2_digest_final(crypto_ctx_t *ctx, crypto_data_t *digest,
695 crypto_req_handle_t req)
696 {
697 int ret = CRYPTO_SUCCESS;
698 uint_t sha_digest_len;
699
700 ASSERT(ctx->cc_provider_private != NULL);
701
702 switch (PROV_SHA2_CTX(ctx)->sc_mech_type) {
703 case SHA256_MECH_INFO_TYPE:
704 sha_digest_len = SHA256_DIGEST_LENGTH;
705 break;
706 case SHA384_MECH_INFO_TYPE:
707 sha_digest_len = SHA384_DIGEST_LENGTH;
708 break;
709 case SHA512_MECH_INFO_TYPE:
710 sha_digest_len = SHA512_DIGEST_LENGTH;
711 break;
712 default:
713 return (CRYPTO_MECHANISM_INVALID);
714 }
715
716 /*
717 * We need to just return the length needed to store the output.
718 * We should not destroy the context for the following cases.
719 */
720 if ((digest->cd_length == 0) ||
721 (digest->cd_length < sha_digest_len)) {
722 digest->cd_length = sha_digest_len;
723 return (CRYPTO_BUFFER_TOO_SMALL);
724 }
725
726 /*
727 * Do a SHA2 final.
728 */
729 switch (digest->cd_format) {
730 case CRYPTO_DATA_RAW:
731 SHA2Final((unsigned char *)digest->cd_raw.iov_base +
732 digest->cd_offset, &PROV_SHA2_CTX(ctx)->sc_sha2_ctx);
733 break;
734 case CRYPTO_DATA_UIO:
735 ret = sha2_digest_final_uio(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
736 digest, sha_digest_len, NULL);
737 break;
738 case CRYPTO_DATA_MBLK:
739 ret = sha2_digest_final_mblk(&PROV_SHA2_CTX(ctx)->sc_sha2_ctx,
740 digest, sha_digest_len, NULL);
741 break;
742 default:
743 ret = CRYPTO_ARGUMENTS_BAD;
744 }
745
746 /* all done, free context and return */
747
748 if (ret == CRYPTO_SUCCESS)
749 digest->cd_length = sha_digest_len;
750 else
751 digest->cd_length = 0;
752
753 kmem_free(ctx->cc_provider_private, sizeof (sha2_ctx_t));
754 ctx->cc_provider_private = NULL;
755
756 return (ret);
757 }
758
759 /* ARGSUSED */
760 static int
761 sha2_digest_atomic(crypto_provider_handle_t provider,
762 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
763 crypto_data_t *data, crypto_data_t *digest,
764 crypto_req_handle_t req)
765 {
766 int ret = CRYPTO_SUCCESS;
767 SHA2_CTX sha2_ctx;
768 uint32_t sha_digest_len;
769
770 /*
771 * Do the SHA inits.
772 */
773
774 SHA2Init(mechanism->cm_type, &sha2_ctx);
775
776 switch (data->cd_format) {
777 case CRYPTO_DATA_RAW:
778 SHA2Update(&sha2_ctx, (uint8_t *)data->
779 cd_raw.iov_base + data->cd_offset, data->cd_length);
780 break;
781 case CRYPTO_DATA_UIO:
782 ret = sha2_digest_update_uio(&sha2_ctx, data);
783 break;
784 case CRYPTO_DATA_MBLK:
785 ret = sha2_digest_update_mblk(&sha2_ctx, data);
786 break;
787 default:
788 ret = CRYPTO_ARGUMENTS_BAD;
789 }
790
791 /*
792 * Do the SHA updates on the specified input data.
793 */
794
795 if (ret != CRYPTO_SUCCESS) {
796 /* the update failed, bail */
797 digest->cd_length = 0;
798 return (ret);
799 }
800
801 if (mechanism->cm_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE)
802 sha_digest_len = SHA256_DIGEST_LENGTH;
803 else
804 sha_digest_len = SHA512_DIGEST_LENGTH;
805
806 /*
807 * Do a SHA2 final, must be done separately since the digest
808 * type can be different than the input data type.
809 */
810 switch (digest->cd_format) {
811 case CRYPTO_DATA_RAW:
812 SHA2Final((unsigned char *)digest->cd_raw.iov_base +
813 digest->cd_offset, &sha2_ctx);
814 break;
815 case CRYPTO_DATA_UIO:
816 ret = sha2_digest_final_uio(&sha2_ctx, digest,
817 sha_digest_len, NULL);
818 break;
819 case CRYPTO_DATA_MBLK:
820 ret = sha2_digest_final_mblk(&sha2_ctx, digest,
821 sha_digest_len, NULL);
822 break;
823 default:
824 ret = CRYPTO_ARGUMENTS_BAD;
825 }
826
827 if (ret == CRYPTO_SUCCESS)
828 digest->cd_length = sha_digest_len;
829 else
830 digest->cd_length = 0;
831
832 return (ret);
833 }
834
835 /*
836 * KCF software provider mac entry points.
837 *
838 * SHA2 HMAC is: SHA2(key XOR opad, SHA2(key XOR ipad, text))
839 *
840 * Init:
841 * The initialization routine initializes what we denote
842 * as the inner and outer contexts by doing
843 * - for inner context: SHA2(key XOR ipad)
844 * - for outer context: SHA2(key XOR opad)
845 *
846 * Update:
847 * Each subsequent SHA2 HMAC update will result in an
848 * update of the inner context with the specified data.
849 *
850 * Final:
851 * The SHA2 HMAC final will do a SHA2 final operation on the
852 * inner context, and the resulting digest will be used
853 * as the data for an update on the outer context. Last
854 * but not least, a SHA2 final on the outer context will
855 * be performed to obtain the SHA2 HMAC digest to return
856 * to the user.
857 */
858
859 /*
860 * Initialize a SHA2-HMAC context.
861 */
862 static void
863 sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
864 {
865 uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
866 uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
867 int i, block_size, blocks_per_int64;
868
869 /* Determine the block size */
870 if (ctx->hc_mech_type <= SHA256_HMAC_GEN_MECH_INFO_TYPE) {
871 block_size = SHA256_HMAC_BLOCK_SIZE;
872 blocks_per_int64 = SHA256_HMAC_BLOCK_SIZE / sizeof (uint64_t);
873 } else {
874 block_size = SHA512_HMAC_BLOCK_SIZE;
875 blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
876 }
877
878 (void) bzero(ipad, block_size);
879 (void) bzero(opad, block_size);
880 (void) bcopy(keyval, ipad, length_in_bytes);
881 (void) bcopy(keyval, opad, length_in_bytes);
882
883 /* XOR key with ipad (0x36) and opad (0x5c) */
884 for (i = 0; i < blocks_per_int64; i ++) {
885 ipad[i] ^= 0x3636363636363636;
886 opad[i] ^= 0x5c5c5c5c5c5c5c5c;
887 }
888
889 /* perform SHA2 on ipad */
890 SHA2Init(ctx->hc_mech_type, &ctx->hc_icontext);
891 SHA2Update(&ctx->hc_icontext, (uint8_t *)ipad, block_size);
892
893 /* perform SHA2 on opad */
894 SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
895 SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
896
897 }
898
899 /*
900 */
901 static int
902 sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
903 crypto_key_t *key, crypto_spi_ctx_template_t ctx_template,
904 crypto_req_handle_t req)
905 {
906 int ret = CRYPTO_SUCCESS;
907 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
908 uint_t sha_digest_len, sha_hmac_block_size;
909
910 /*
911 * Set the digest length and block size to values appropriate to the
912 * mechanism
913 */
914 switch (mechanism->cm_type) {
915 case SHA256_HMAC_MECH_INFO_TYPE:
916 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
917 sha_digest_len = SHA256_DIGEST_LENGTH;
918 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
919 break;
920 case SHA384_HMAC_MECH_INFO_TYPE:
921 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
922 case SHA512_HMAC_MECH_INFO_TYPE:
923 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
924 sha_digest_len = SHA512_DIGEST_LENGTH;
925 sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
926 break;
927 default:
928 return (CRYPTO_MECHANISM_INVALID);
929 }
930
931 if (key->ck_format != CRYPTO_KEY_RAW)
932 return (CRYPTO_ARGUMENTS_BAD);
933
934 ctx->cc_provider_private = kmem_alloc(sizeof (sha2_hmac_ctx_t),
935 crypto_kmflag(req));
936 if (ctx->cc_provider_private == NULL)
937 return (CRYPTO_HOST_MEMORY);
938
939 PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
940 if (ctx_template != NULL) {
941 /* reuse context template */
942 bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
943 sizeof (sha2_hmac_ctx_t));
944 } else {
945 /* no context template, compute context */
946 if (keylen_in_bytes > sha_hmac_block_size) {
947 uchar_t digested_key[SHA512_DIGEST_LENGTH];
948 sha2_hmac_ctx_t *hmac_ctx = ctx->cc_provider_private;
949
950 /*
951 * Hash the passed-in key to get a smaller key.
952 * The inner context is used since it hasn't been
953 * initialized yet.
954 */
955 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
956 &hmac_ctx->hc_icontext,
957 key->ck_data, keylen_in_bytes, digested_key);
958 sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
959 digested_key, sha_digest_len);
960 } else {
961 sha2_mac_init_ctx(PROV_SHA2_HMAC_CTX(ctx),
962 key->ck_data, keylen_in_bytes);
963 }
964 }
965
966 /*
967 * Get the mechanism parameters, if applicable.
968 */
969 if (mechanism->cm_type % 3 == 2) {
970 if (mechanism->cm_param == NULL ||
971 mechanism->cm_param_len != sizeof (ulong_t))
972 ret = CRYPTO_MECHANISM_PARAM_INVALID;
973 PROV_SHA2_GET_DIGEST_LEN(mechanism,
974 PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len);
975 if (PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len > sha_digest_len)
976 ret = CRYPTO_MECHANISM_PARAM_INVALID;
977 }
978
979 if (ret != CRYPTO_SUCCESS) {
980 bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
981 kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
982 ctx->cc_provider_private = NULL;
983 }
984
985 return (ret);
986 }
987
988 /* ARGSUSED */
989 static int
990 sha2_mac_update(crypto_ctx_t *ctx, crypto_data_t *data,
991 crypto_req_handle_t req)
992 {
993 int ret = CRYPTO_SUCCESS;
994
995 ASSERT(ctx->cc_provider_private != NULL);
996
997 /*
998 * Do a SHA2 update of the inner context using the specified
999 * data.
1000 */
1001 switch (data->cd_format) {
1002 case CRYPTO_DATA_RAW:
1003 SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_icontext,
1004 (uint8_t *)data->cd_raw.iov_base + data->cd_offset,
1005 data->cd_length);
1006 break;
1007 case CRYPTO_DATA_UIO:
1008 ret = sha2_digest_update_uio(
1009 &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1010 break;
1011 case CRYPTO_DATA_MBLK:
1012 ret = sha2_digest_update_mblk(
1013 &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext, data);
1014 break;
1015 default:
1016 ret = CRYPTO_ARGUMENTS_BAD;
1017 }
1018
1019 return (ret);
1020 }
1021
1022 /* ARGSUSED */
1023 static int
1024 sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac, crypto_req_handle_t req)
1025 {
1026 int ret = CRYPTO_SUCCESS;
1027 uchar_t digest[SHA512_DIGEST_LENGTH];
1028 uint32_t digest_len, sha_digest_len;
1029
1030 ASSERT(ctx->cc_provider_private != NULL);
1031
1032 /* Set the digest lengths to values appropriate to the mechanism */
1033 switch (PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type) {
1034 case SHA256_HMAC_MECH_INFO_TYPE:
1035 sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1036 break;
1037 case SHA384_HMAC_MECH_INFO_TYPE:
1038 sha_digest_len = digest_len = SHA384_DIGEST_LENGTH;
1039 break;
1040 case SHA512_HMAC_MECH_INFO_TYPE:
1041 sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1042 break;
1043 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1044 sha_digest_len = SHA256_DIGEST_LENGTH;
1045 digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1046 break;
1047 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1048 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1049 sha_digest_len = SHA512_DIGEST_LENGTH;
1050 digest_len = PROV_SHA2_HMAC_CTX(ctx)->hc_digest_len;
1051 break;
1052 }
1053
1054 /*
1055 * We need to just return the length needed to store the output.
1056 * We should not destroy the context for the following cases.
1057 */
1058 if ((mac->cd_length == 0) || (mac->cd_length < digest_len)) {
1059 mac->cd_length = digest_len;
1060 return (CRYPTO_BUFFER_TOO_SMALL);
1061 }
1062
1063 /*
1064 * Do a SHA2 final on the inner context.
1065 */
1066 SHA2Final(digest, &PROV_SHA2_HMAC_CTX(ctx)->hc_icontext);
1067
1068 /*
1069 * Do a SHA2 update on the outer context, feeding the inner
1070 * digest as data.
1071 */
1072 SHA2Update(&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, digest,
1073 sha_digest_len);
1074
1075 /*
1076 * Do a SHA2 final on the outer context, storing the computing
1077 * digest in the users buffer.
1078 */
1079 switch (mac->cd_format) {
1080 case CRYPTO_DATA_RAW:
1081 if (digest_len != sha_digest_len) {
1082 /*
1083 * The caller requested a short digest. Digest
1084 * into a scratch buffer and return to
1085 * the user only what was requested.
1086 */
1087 SHA2Final(digest,
1088 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1089 bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1090 mac->cd_offset, digest_len);
1091 } else {
1092 SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1093 mac->cd_offset,
1094 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
1095 }
1096 break;
1097 case CRYPTO_DATA_UIO:
1098 ret = sha2_digest_final_uio(
1099 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1100 digest_len, digest);
1101 break;
1102 case CRYPTO_DATA_MBLK:
1103 ret = sha2_digest_final_mblk(
1104 &PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext, mac,
1105 digest_len, digest);
1106 break;
1107 default:
1108 ret = CRYPTO_ARGUMENTS_BAD;
1109 }
1110
1111 if (ret == CRYPTO_SUCCESS)
1112 mac->cd_length = digest_len;
1113 else
1114 mac->cd_length = 0;
1115
1116 bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1117 kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
1118 ctx->cc_provider_private = NULL;
1119
1120 return (ret);
1121 }
1122
1123 #define SHA2_MAC_UPDATE(data, ctx, ret) { \
1124 switch (data->cd_format) { \
1125 case CRYPTO_DATA_RAW: \
1126 SHA2Update(&(ctx).hc_icontext, \
1127 (uint8_t *)data->cd_raw.iov_base + \
1128 data->cd_offset, data->cd_length); \
1129 break; \
1130 case CRYPTO_DATA_UIO: \
1131 ret = sha2_digest_update_uio(&(ctx).hc_icontext, data); \
1132 break; \
1133 case CRYPTO_DATA_MBLK: \
1134 ret = sha2_digest_update_mblk(&(ctx).hc_icontext, \
1135 data); \
1136 break; \
1137 default: \
1138 ret = CRYPTO_ARGUMENTS_BAD; \
1139 } \
1140 }
1141
1142 /* ARGSUSED */
1143 static int
1144 sha2_mac_atomic(crypto_provider_handle_t provider,
1145 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1146 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1147 crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1148 {
1149 int ret = CRYPTO_SUCCESS;
1150 uchar_t digest[SHA512_DIGEST_LENGTH];
1151 sha2_hmac_ctx_t sha2_hmac_ctx;
1152 uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1153 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1154
1155 /*
1156 * Set the digest length and block size to values appropriate to the
1157 * mechanism
1158 */
1159 switch (mechanism->cm_type) {
1160 case SHA256_HMAC_MECH_INFO_TYPE:
1161 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1162 sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1163 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1164 break;
1165 case SHA384_HMAC_MECH_INFO_TYPE:
1166 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1167 case SHA512_HMAC_MECH_INFO_TYPE:
1168 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1169 sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1170 sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1171 break;
1172 default:
1173 return (CRYPTO_MECHANISM_INVALID);
1174 }
1175
1176 /* Add support for key by attributes (RFE 4706552) */
1177 if (key->ck_format != CRYPTO_KEY_RAW)
1178 return (CRYPTO_ARGUMENTS_BAD);
1179
1180 if (ctx_template != NULL) {
1181 /* reuse context template */
1182 bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1183 } else {
1184 sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1185 /* no context template, initialize context */
1186 if (keylen_in_bytes > sha_hmac_block_size) {
1187 /*
1188 * Hash the passed-in key to get a smaller key.
1189 * The inner context is used since it hasn't been
1190 * initialized yet.
1191 */
1192 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1193 &sha2_hmac_ctx.hc_icontext,
1194 key->ck_data, keylen_in_bytes, digest);
1195 sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1196 sha_digest_len);
1197 } else {
1198 sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1199 keylen_in_bytes);
1200 }
1201 }
1202
1203 /* get the mechanism parameters, if applicable */
1204 if ((mechanism->cm_type % 3) == 2) {
1205 if (mechanism->cm_param == NULL ||
1206 mechanism->cm_param_len != sizeof (ulong_t)) {
1207 ret = CRYPTO_MECHANISM_PARAM_INVALID;
1208 goto bail;
1209 }
1210 PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1211 if (digest_len > sha_digest_len) {
1212 ret = CRYPTO_MECHANISM_PARAM_INVALID;
1213 goto bail;
1214 }
1215 }
1216
1217 /* do a SHA2 update of the inner context using the specified data */
1218 SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1219 if (ret != CRYPTO_SUCCESS)
1220 /* the update failed, free context and bail */
1221 goto bail;
1222
1223 /*
1224 * Do a SHA2 final on the inner context.
1225 */
1226 SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1227
1228 /*
1229 * Do an SHA2 update on the outer context, feeding the inner
1230 * digest as data.
1231 *
1232 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1233 * bytes of the inner hash value.
1234 */
1235 if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1236 mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1237 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1238 SHA384_DIGEST_LENGTH);
1239 else
1240 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1241
1242 /*
1243 * Do a SHA2 final on the outer context, storing the computed
1244 * digest in the users buffer.
1245 */
1246 switch (mac->cd_format) {
1247 case CRYPTO_DATA_RAW:
1248 if (digest_len != sha_digest_len) {
1249 /*
1250 * The caller requested a short digest. Digest
1251 * into a scratch buffer and return to
1252 * the user only what was requested.
1253 */
1254 SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1255 bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
1256 mac->cd_offset, digest_len);
1257 } else {
1258 SHA2Final((unsigned char *)mac->cd_raw.iov_base +
1259 mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
1260 }
1261 break;
1262 case CRYPTO_DATA_UIO:
1263 ret = sha2_digest_final_uio(&sha2_hmac_ctx.hc_ocontext, mac,
1264 digest_len, digest);
1265 break;
1266 case CRYPTO_DATA_MBLK:
1267 ret = sha2_digest_final_mblk(&sha2_hmac_ctx.hc_ocontext, mac,
1268 digest_len, digest);
1269 break;
1270 default:
1271 ret = CRYPTO_ARGUMENTS_BAD;
1272 }
1273
1274 if (ret == CRYPTO_SUCCESS) {
1275 mac->cd_length = digest_len;
1276 return (CRYPTO_SUCCESS);
1277 }
1278 bail:
1279 bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1280 mac->cd_length = 0;
1281 return (ret);
1282 }
1283
1284 /* ARGSUSED */
1285 static int
1286 sha2_mac_verify_atomic(crypto_provider_handle_t provider,
1287 crypto_session_id_t session_id, crypto_mechanism_t *mechanism,
1288 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1289 crypto_spi_ctx_template_t ctx_template, crypto_req_handle_t req)
1290 {
1291 int ret = CRYPTO_SUCCESS;
1292 uchar_t digest[SHA512_DIGEST_LENGTH];
1293 sha2_hmac_ctx_t sha2_hmac_ctx;
1294 uint32_t sha_digest_len, digest_len, sha_hmac_block_size;
1295 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1296
1297 /*
1298 * Set the digest length and block size to values appropriate to the
1299 * mechanism
1300 */
1301 switch (mechanism->cm_type) {
1302 case SHA256_HMAC_MECH_INFO_TYPE:
1303 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1304 sha_digest_len = digest_len = SHA256_DIGEST_LENGTH;
1305 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1306 break;
1307 case SHA384_HMAC_MECH_INFO_TYPE:
1308 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1309 case SHA512_HMAC_MECH_INFO_TYPE:
1310 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1311 sha_digest_len = digest_len = SHA512_DIGEST_LENGTH;
1312 sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1313 break;
1314 default:
1315 return (CRYPTO_MECHANISM_INVALID);
1316 }
1317
1318 /* Add support for key by attributes (RFE 4706552) */
1319 if (key->ck_format != CRYPTO_KEY_RAW)
1320 return (CRYPTO_ARGUMENTS_BAD);
1321
1322 if (ctx_template != NULL) {
1323 /* reuse context template */
1324 bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1325 } else {
1326 sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
1327 /* no context template, initialize context */
1328 if (keylen_in_bytes > sha_hmac_block_size) {
1329 /*
1330 * Hash the passed-in key to get a smaller key.
1331 * The inner context is used since it hasn't been
1332 * initialized yet.
1333 */
1334 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1335 &sha2_hmac_ctx.hc_icontext,
1336 key->ck_data, keylen_in_bytes, digest);
1337 sha2_mac_init_ctx(&sha2_hmac_ctx, digest,
1338 sha_digest_len);
1339 } else {
1340 sha2_mac_init_ctx(&sha2_hmac_ctx, key->ck_data,
1341 keylen_in_bytes);
1342 }
1343 }
1344
1345 /* get the mechanism parameters, if applicable */
1346 if (mechanism->cm_type % 3 == 2) {
1347 if (mechanism->cm_param == NULL ||
1348 mechanism->cm_param_len != sizeof (ulong_t)) {
1349 ret = CRYPTO_MECHANISM_PARAM_INVALID;
1350 goto bail;
1351 }
1352 PROV_SHA2_GET_DIGEST_LEN(mechanism, digest_len);
1353 if (digest_len > sha_digest_len) {
1354 ret = CRYPTO_MECHANISM_PARAM_INVALID;
1355 goto bail;
1356 }
1357 }
1358
1359 if (mac->cd_length != digest_len) {
1360 ret = CRYPTO_INVALID_MAC;
1361 goto bail;
1362 }
1363
1364 /* do a SHA2 update of the inner context using the specified data */
1365 SHA2_MAC_UPDATE(data, sha2_hmac_ctx, ret);
1366 if (ret != CRYPTO_SUCCESS)
1367 /* the update failed, free context and bail */
1368 goto bail;
1369
1370 /* do a SHA2 final on the inner context */
1371 SHA2Final(digest, &sha2_hmac_ctx.hc_icontext);
1372
1373 /*
1374 * Do an SHA2 update on the outer context, feeding the inner
1375 * digest as data.
1376 *
1377 * HMAC-SHA384 needs special handling as the outer hash needs only 48
1378 * bytes of the inner hash value.
1379 */
1380 if (mechanism->cm_type == SHA384_HMAC_MECH_INFO_TYPE ||
1381 mechanism->cm_type == SHA384_HMAC_GEN_MECH_INFO_TYPE)
1382 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest,
1383 SHA384_DIGEST_LENGTH);
1384 else
1385 SHA2Update(&sha2_hmac_ctx.hc_ocontext, digest, sha_digest_len);
1386
1387 /*
1388 * Do a SHA2 final on the outer context, storing the computed
1389 * digest in the users buffer.
1390 */
1391 SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
1392
1393 /*
1394 * Compare the computed digest against the expected digest passed
1395 * as argument.
1396 */
1397
1398 switch (mac->cd_format) {
1399
1400 case CRYPTO_DATA_RAW:
1401 if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
1402 mac->cd_offset, digest_len) != 0)
1403 ret = CRYPTO_INVALID_MAC;
1404 break;
1405
1406 case CRYPTO_DATA_UIO: {
1407 off_t offset = mac->cd_offset;
1408 uint_t vec_idx;
1409 off_t scratch_offset = 0;
1410 size_t length = digest_len;
1411 size_t cur_len;
1412
1413 /* we support only kernel buffer */
1414 if (mac->cd_uio->uio_segflg != UIO_SYSSPACE)
1415 return (CRYPTO_ARGUMENTS_BAD);
1416
1417 /* jump to the first iovec containing the expected digest */
1418 for (vec_idx = 0;
1419 offset >= mac->cd_uio->uio_iov[vec_idx].iov_len &&
1420 vec_idx < mac->cd_uio->uio_iovcnt;
1421 offset -= mac->cd_uio->uio_iov[vec_idx++].iov_len)
1422 ;
1423 if (vec_idx == mac->cd_uio->uio_iovcnt) {
1424 /*
1425 * The caller specified an offset that is
1426 * larger than the total size of the buffers
1427 * it provided.
1428 */
1429 ret = CRYPTO_DATA_LEN_RANGE;
1430 break;
1431 }
1432
1433 /* do the comparison of computed digest vs specified one */
1434 while (vec_idx < mac->cd_uio->uio_iovcnt && length > 0) {
1435 cur_len = MIN(mac->cd_uio->uio_iov[vec_idx].iov_len -
1436 offset, length);
1437
1438 if (bcmp(digest + scratch_offset,
1439 mac->cd_uio->uio_iov[vec_idx].iov_base + offset,
1440 cur_len) != 0) {
1441 ret = CRYPTO_INVALID_MAC;
1442 break;
1443 }
1444
1445 length -= cur_len;
1446 vec_idx++;
1447 scratch_offset += cur_len;
1448 offset = 0;
1449 }
1450 break;
1451 }
1452
1453 case CRYPTO_DATA_MBLK: {
1454 off_t offset = mac->cd_offset;
1455 mblk_t *mp;
1456 off_t scratch_offset = 0;
1457 size_t length = digest_len;
1458 size_t cur_len;
1459
1460 /* jump to the first mblk_t containing the expected digest */
1461 for (mp = mac->cd_mp; mp != NULL && offset >= MBLKL(mp);
1462 offset -= MBLKL(mp), mp = mp->b_cont)
1463 ;
1464 if (mp == NULL) {
1465 /*
1466 * The caller specified an offset that is larger than
1467 * the total size of the buffers it provided.
1468 */
1469 ret = CRYPTO_DATA_LEN_RANGE;
1470 break;
1471 }
1472
1473 while (mp != NULL && length > 0) {
1474 cur_len = MIN(MBLKL(mp) - offset, length);
1475 if (bcmp(digest + scratch_offset,
1476 mp->b_rptr + offset, cur_len) != 0) {
1477 ret = CRYPTO_INVALID_MAC;
1478 break;
1479 }
1480
1481 length -= cur_len;
1482 mp = mp->b_cont;
1483 scratch_offset += cur_len;
1484 offset = 0;
1485 }
1486 break;
1487 }
1488
1489 default:
1490 ret = CRYPTO_ARGUMENTS_BAD;
1491 }
1492
1493 return (ret);
1494 bail:
1495 bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
1496 mac->cd_length = 0;
1497 return (ret);
1498 }
1499
1500 /*
1501 * KCF software provider context management entry points.
1502 */
1503
1504 /* ARGSUSED */
1505 static int
1506 sha2_create_ctx_template(crypto_provider_handle_t provider,
1507 crypto_mechanism_t *mechanism, crypto_key_t *key,
1508 crypto_spi_ctx_template_t *ctx_template, size_t *ctx_template_size,
1509 crypto_req_handle_t req)
1510 {
1511 sha2_hmac_ctx_t *sha2_hmac_ctx_tmpl;
1512 uint_t keylen_in_bytes = CRYPTO_BITS2BYTES(key->ck_length);
1513 uint32_t sha_digest_len, sha_hmac_block_size;
1514
1515 /*
1516 * Set the digest length and block size to values appropriate to the
1517 * mechanism
1518 */
1519 switch (mechanism->cm_type) {
1520 case SHA256_HMAC_MECH_INFO_TYPE:
1521 case SHA256_HMAC_GEN_MECH_INFO_TYPE:
1522 sha_digest_len = SHA256_DIGEST_LENGTH;
1523 sha_hmac_block_size = SHA256_HMAC_BLOCK_SIZE;
1524 break;
1525 case SHA384_HMAC_MECH_INFO_TYPE:
1526 case SHA384_HMAC_GEN_MECH_INFO_TYPE:
1527 case SHA512_HMAC_MECH_INFO_TYPE:
1528 case SHA512_HMAC_GEN_MECH_INFO_TYPE:
1529 sha_digest_len = SHA512_DIGEST_LENGTH;
1530 sha_hmac_block_size = SHA512_HMAC_BLOCK_SIZE;
1531 break;
1532 default:
1533 return (CRYPTO_MECHANISM_INVALID);
1534 }
1535
1536 /* Add support for key by attributes (RFE 4706552) */
1537 if (key->ck_format != CRYPTO_KEY_RAW)
1538 return (CRYPTO_ARGUMENTS_BAD);
1539
1540 /*
1541 * Allocate and initialize SHA2 context.
1542 */
1543 sha2_hmac_ctx_tmpl = kmem_alloc(sizeof (sha2_hmac_ctx_t),
1544 crypto_kmflag(req));
1545 if (sha2_hmac_ctx_tmpl == NULL)
1546 return (CRYPTO_HOST_MEMORY);
1547
1548 sha2_hmac_ctx_tmpl->hc_mech_type = mechanism->cm_type;
1549
1550 if (keylen_in_bytes > sha_hmac_block_size) {
1551 uchar_t digested_key[SHA512_DIGEST_LENGTH];
1552
1553 /*
1554 * Hash the passed-in key to get a smaller key.
1555 * The inner context is used since it hasn't been
1556 * initialized yet.
1557 */
1558 PROV_SHA2_DIGEST_KEY(mechanism->cm_type / 3,
1559 &sha2_hmac_ctx_tmpl->hc_icontext,
1560 key->ck_data, keylen_in_bytes, digested_key);
1561 sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, digested_key,
1562 sha_digest_len);
1563 } else {
1564 sha2_mac_init_ctx(sha2_hmac_ctx_tmpl, key->ck_data,
1565 keylen_in_bytes);
1566 }
1567
1568 *ctx_template = (crypto_spi_ctx_template_t)sha2_hmac_ctx_tmpl;
1569 *ctx_template_size = sizeof (sha2_hmac_ctx_t);
1570
1571 return (CRYPTO_SUCCESS);
1572 }
1573
1574 static int
1575 sha2_free_context(crypto_ctx_t *ctx)
1576 {
1577 uint_t ctx_len;
1578
1579 if (ctx->cc_provider_private == NULL)
1580 return (CRYPTO_SUCCESS);
1581
1582 /*
1583 * We have to free either SHA2 or SHA2-HMAC contexts, which
1584 * have different lengths.
1585 *
1586 * Note: Below is dependent on the mechanism ordering.
1587 */
1588
1589 if (PROV_SHA2_CTX(ctx)->sc_mech_type % 3 == 0)
1590 ctx_len = sizeof (sha2_ctx_t);
1591 else
1592 ctx_len = sizeof (sha2_hmac_ctx_t);
1593
1594 bzero(ctx->cc_provider_private, ctx_len);
1595 kmem_free(ctx->cc_provider_private, ctx_len);
1596 ctx->cc_provider_private = NULL;
1597
1598 return (CRYPTO_SUCCESS);
1599 }