1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21 /*
22 * Copyright 2009 Sun Microsystems, Inc. All rights reserved.
23 * Use is subject to license terms.
24 */
25 /*
26 * Copyright 2015 by Saso Kiselkov. All rights reserved.
27 */
28
29 #ifndef _COMMON_CRYPTO_MODES_H
30 #define _COMMON_CRYPTO_MODES_H
31
32 #ifdef __cplusplus
33 extern "C" {
34 #endif
35
36 #include <sys/strsun.h>
37 #include <sys/systm.h>
38 #include <sys/sysmacros.h>
39 #include <sys/types.h>
40 #include <sys/errno.h>
41 #include <sys/rwlock.h>
42 #include <sys/kmem.h>
43 #include <sys/crypto/common.h>
44 #include <sys/crypto/impl.h>
45
46 #define ECB_MODE 0x00000002
47 #define CBC_MODE 0x00000004
48 #define CTR_MODE 0x00000008
49 #define CCM_MODE 0x00000010
50 #define GCM_MODE 0x00000020
51 #define GMAC_MODE 0x00000040
52
53 /*
54 * cc_keysched: Pointer to key schedule.
55 *
56 * cc_keysched_len: Length of the key schedule.
57 *
58 * cc_remainder: This is for residual data, i.e. data that can't
59 * be processed because there are too few bytes.
60 * Must wait until more data arrives.
61 *
62 * cc_remainder_len: Number of bytes in cc_remainder.
63 *
64 * cc_iv: Scratch buffer that sometimes contains the IV.
65 *
66 * cc_lastp: Pointer to previous block of ciphertext.
67 *
68 * cc_copy_to: Pointer to where encrypted residual data needs
69 * to be copied.
70 *
71 * cc_flags: PROVIDER_OWNS_KEY_SCHEDULE
72 * When a context is freed, it is necessary
73 * to know whether the key schedule was allocated
74 * by the caller, or internally, e.g. an init routine.
75 * If allocated by the latter, then it needs to be freed.
76 *
77 * ECB_MODE, CBC_MODE, CTR_MODE, or CCM_MODE
78 */
79 struct common_ctx {
80 void *cc_keysched;
81 size_t cc_keysched_len;
82 uint64_t cc_iv[2];
83 uint64_t cc_remainder[2];
84 size_t cc_remainder_len;
85 uint8_t *cc_lastp;
86 uint8_t *cc_copy_to;
87 uint32_t cc_flags;
88 };
89
90 typedef struct common_ctx common_ctx_t;
91
92 typedef struct ecb_ctx {
93 struct common_ctx ecb_common;
94 uint64_t ecb_lastblock[2];
95 } ecb_ctx_t;
96
97 #define ecb_keysched ecb_common.cc_keysched
98 #define ecb_keysched_len ecb_common.cc_keysched_len
99 #define ecb_iv ecb_common.cc_iv
100 #define ecb_remainder ecb_common.cc_remainder
101 #define ecb_remainder_len ecb_common.cc_remainder_len
102 #define ecb_lastp ecb_common.cc_lastp
103 #define ecb_copy_to ecb_common.cc_copy_to
104 #define ecb_flags ecb_common.cc_flags
105
106 typedef struct cbc_ctx {
107 struct common_ctx cbc_common;
108 uint64_t cbc_lastblock[2];
109 } cbc_ctx_t;
110
111 #define cbc_keysched cbc_common.cc_keysched
112 #define cbc_keysched_len cbc_common.cc_keysched_len
113 #define cbc_iv cbc_common.cc_iv
114 #define cbc_remainder cbc_common.cc_remainder
115 #define cbc_remainder_len cbc_common.cc_remainder_len
116 #define cbc_lastp cbc_common.cc_lastp
117 #define cbc_copy_to cbc_common.cc_copy_to
118 #define cbc_flags cbc_common.cc_flags
119
120 /*
121 * ctr_lower_mask Bit-mask for lower 8 bytes of counter block.
122 * ctr_upper_mask Bit-mask for upper 8 bytes of counter block.
123 */
124 typedef struct ctr_ctx {
125 struct common_ctx ctr_common;
126 uint64_t ctr_lower_mask;
127 uint64_t ctr_upper_mask;
128 uint32_t ctr_tmp[4];
129 } ctr_ctx_t;
130
131 /*
132 * ctr_cb Counter block.
133 */
134 #define ctr_keysched ctr_common.cc_keysched
135 #define ctr_keysched_len ctr_common.cc_keysched_len
136 #define ctr_cb ctr_common.cc_iv
137 #define ctr_remainder ctr_common.cc_remainder
138 #define ctr_remainder_len ctr_common.cc_remainder_len
139 #define ctr_lastp ctr_common.cc_lastp
140 #define ctr_copy_to ctr_common.cc_copy_to
141 #define ctr_flags ctr_common.cc_flags
142
143 /*
144 *
145 * ccm_mac_len: Stores length of the MAC in CCM mode.
146 * ccm_mac_buf: Stores the intermediate value for MAC in CCM encrypt.
147 * In CCM decrypt, stores the input MAC value.
148 * ccm_data_len: Length of the plaintext for CCM mode encrypt, or
149 * length of the ciphertext for CCM mode decrypt.
150 * ccm_processed_data_len:
151 * Length of processed plaintext in CCM mode encrypt,
152 * or length of processed ciphertext for CCM mode decrypt.
153 * ccm_processed_mac_len:
154 * Length of MAC data accumulated in CCM mode decrypt.
155 *
156 * ccm_pt_buf: Only used in CCM mode decrypt. It stores the
157 * decrypted plaintext to be returned when
158 * MAC verification succeeds in decrypt_final.
159 * Memory for this should be allocated in the AES module.
160 *
161 */
162 typedef struct ccm_ctx {
163 struct common_ctx ccm_common;
164 uint32_t ccm_tmp[4];
165 size_t ccm_mac_len;
166 uint64_t ccm_mac_buf[2];
167 size_t ccm_data_len;
168 size_t ccm_processed_data_len;
169 size_t ccm_processed_mac_len;
170 uint8_t *ccm_pt_buf;
171 uint64_t ccm_mac_input_buf[2];
172 uint64_t ccm_counter_mask;
173 } ccm_ctx_t;
174
175 #define ccm_keysched ccm_common.cc_keysched
176 #define ccm_keysched_len ccm_common.cc_keysched_len
177 #define ccm_cb ccm_common.cc_iv
178 #define ccm_remainder ccm_common.cc_remainder
179 #define ccm_remainder_len ccm_common.cc_remainder_len
180 #define ccm_lastp ccm_common.cc_lastp
181 #define ccm_copy_to ccm_common.cc_copy_to
182 #define ccm_flags ccm_common.cc_flags
183
184 /*
185 * gcm_tag_len: Length of authentication tag.
186 *
187 * gcm_ghash: Stores output from the GHASH function.
188 *
189 * gcm_processed_data_len:
190 * Length of processed plaintext (encrypt) or
191 * length of processed ciphertext (decrypt).
192 *
193 * gcm_H: Subkey.
194 *
195 * gcm_H_table: Pipelined Karatsuba multipliers.
196 *
197 * gcm_J0: Pre-counter block generated from the IV.
198 *
199 * gcm_tmp: Temp storage for ciphertext when padding is needed.
200 *
201 * gcm_len_a_len_c: 64-bit representations of the bit lengths of
202 * AAD and ciphertext.
203 *
204 * gcm_kmflag: Current value of kmflag. Used only for allocating
205 * the plaintext buffer during decryption.
206 *
207 * gcm_last_input: Buffer of (up to) two last blocks. This is used when
208 * input is not block-aligned and to temporarily hold
209 * the end of the ciphertext stream during decryption,
210 * since that could potentially be the GHASH auth tag
211 * which we must check in the final() call instead of
212 * decrypting it.
213 *
214 * gcm_last_input_fill: Number of bytes actually stored in gcm_last_input.
215 */
216 typedef struct gcm_ctx {
217 struct common_ctx gcm_common;
218 size_t gcm_tag_len;
219 size_t gcm_processed_data_len;
220 uint64_t gcm_ghash[2];
221 uint64_t gcm_H[2];
222 #ifdef __amd64
223 uint8_t gcm_H_table[256];
224 #endif
225 uint64_t gcm_J0[2];
226 uint64_t gcm_tmp[2];
227 uint64_t gcm_len_a_len_c[2];
228 int gcm_kmflag;
229 uint8_t gcm_last_input[32];
230 size_t gcm_last_input_fill;
231 } gcm_ctx_t;
232
233 #define gcm_keysched gcm_common.cc_keysched
234 #define gcm_keysched_len gcm_common.cc_keysched_len
235 #define gcm_cb gcm_common.cc_iv
236 #define gcm_remainder gcm_common.cc_remainder
237 #define gcm_remainder_len gcm_common.cc_remainder_len
238 #define gcm_lastp gcm_common.cc_lastp
239 #define gcm_copy_to gcm_common.cc_copy_to
240 #define gcm_flags gcm_common.cc_flags
241
242 #define AES_GMAC_IV_LEN 12
243 #define AES_GMAC_TAG_BITS 128
244
245 typedef struct aes_ctx {
246 union {
247 ecb_ctx_t acu_ecb;
248 cbc_ctx_t acu_cbc;
249 ctr_ctx_t acu_ctr;
250 #ifdef _KERNEL
251 ccm_ctx_t acu_ccm;
252 gcm_ctx_t acu_gcm;
253 #endif
254 } acu;
255 } aes_ctx_t;
256
257 #define ac_flags acu.acu_ecb.ecb_common.cc_flags
258 #define ac_remainder_len acu.acu_ecb.ecb_common.cc_remainder_len
259 #define ac_keysched acu.acu_ecb.ecb_common.cc_keysched
260 #define ac_keysched_len acu.acu_ecb.ecb_common.cc_keysched_len
261 #define ac_iv acu.acu_ecb.ecb_common.cc_iv
262 #define ac_lastp acu.acu_ecb.ecb_common.cc_lastp
263 #define ac_pt_buf acu.acu_ccm.ccm_pt_buf
264 #define ac_mac_len acu.acu_ccm.ccm_mac_len
265 #define ac_data_len acu.acu_ccm.ccm_data_len
266 #define ac_processed_mac_len acu.acu_ccm.ccm_processed_mac_len
267 #define ac_processed_data_len acu.acu_ccm.ccm_processed_data_len
268 #define ac_tag_len acu.acu_gcm.gcm_tag_len
269
270 typedef struct blowfish_ctx {
271 union {
272 ecb_ctx_t bcu_ecb;
273 cbc_ctx_t bcu_cbc;
274 } bcu;
275 } blowfish_ctx_t;
276
277 #define bc_flags bcu.bcu_ecb.ecb_common.cc_flags
278 #define bc_remainder_len bcu.bcu_ecb.ecb_common.cc_remainder_len
279 #define bc_keysched bcu.bcu_ecb.ecb_common.cc_keysched
280 #define bc_keysched_len bcu.bcu_ecb.ecb_common.cc_keysched_len
281 #define bc_iv bcu.bcu_ecb.ecb_common.cc_iv
282 #define bc_lastp bcu.bcu_ecb.ecb_common.cc_lastp
283
284 typedef struct des_ctx {
285 union {
286 ecb_ctx_t dcu_ecb;
287 cbc_ctx_t dcu_cbc;
288 } dcu;
289 } des_ctx_t;
290
291 #define dc_flags dcu.dcu_ecb.ecb_common.cc_flags
292 #define dc_remainder_len dcu.dcu_ecb.ecb_common.cc_remainder_len
293 #define dc_keysched dcu.dcu_ecb.ecb_common.cc_keysched
294 #define dc_keysched_len dcu.dcu_ecb.ecb_common.cc_keysched_len
295 #define dc_iv dcu.dcu_ecb.ecb_common.cc_iv
296 #define dc_lastp dcu.dcu_ecb.ecb_common.cc_lastp
297
298 extern int ecb_cipher_contiguous_blocks(ecb_ctx_t *, char *, size_t,
299 crypto_data_t *, size_t,
300 int (*cipher)(const void *, const uint8_t *, uint8_t *),
301 int (*cipher_ecb)(const void *, const uint8_t *, uint8_t *, uint64_t));
302
303 extern int cbc_encrypt_contiguous_blocks(cbc_ctx_t *, char *, size_t,
304 crypto_data_t *, size_t,
305 int (*encrypt)(const void *, const uint8_t *, uint8_t *),
306 void (*copy_block)(const uint8_t *, uint8_t *),
307 void (*xor_block)(const uint8_t *, uint8_t *),
308 int (*encrypt_cbc)(const void *, const uint8_t *, uint8_t *,
309 const uint8_t *, uint64_t));
310
311 extern int cbc_decrypt_contiguous_blocks(cbc_ctx_t *, char *, size_t,
312 crypto_data_t *, size_t,
313 int (*decrypt)(const void *, const uint8_t *, uint8_t *),
314 void (*copy_block)(const uint8_t *, uint8_t *),
315 void (*xor_block)(const uint8_t *, uint8_t *),
316 int (*decrypt_ecb)(const void *, const uint8_t *, uint8_t *, uint64_t),
317 void (*xor_block_range)(const uint8_t *, uint8_t *, uint64_t));
318
319 extern int ctr_mode_contiguous_blocks(ctr_ctx_t *, char *, size_t,
320 crypto_data_t *, size_t,
321 int (*cipher)(const void *, const uint8_t *, uint8_t *),
322 void (*xor_block)(const uint8_t *, uint8_t *),
323 int (*cipher_ctr)(const void *, const uint8_t *, uint8_t *, uint64_t,
324 uint64_t *));
325
326 extern int ccm_mode_encrypt_contiguous_blocks(ccm_ctx_t *, char *, size_t,
327 crypto_data_t *, size_t,
328 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
329 void (*copy_block)(const uint8_t *, uint8_t *),
330 void (*xor_block)(const uint8_t *, uint8_t *));
331
332 extern int ccm_mode_decrypt_contiguous_blocks(ccm_ctx_t *, char *, size_t,
333 crypto_data_t *, size_t,
334 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
335 void (*copy_block)(const uint8_t *, uint8_t *),
336 void (*xor_block)(const uint8_t *, uint8_t *));
337
338 extern int gcm_mode_encrypt_contiguous_blocks(gcm_ctx_t *, char *, size_t,
339 crypto_data_t *, size_t,
340 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
341 void (*copy_block)(const uint8_t *, uint8_t *),
342 void (*xor_block)(const uint8_t *, uint8_t *),
343 int (*cipher_ctr)(const void *, const uint8_t *, uint8_t *, uint64_t,
344 uint64_t *));
345
346 extern int gcm_mode_decrypt_contiguous_blocks(gcm_ctx_t *, char *, size_t,
347 crypto_data_t *, size_t,
348 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
349 void (*copy_block)(const uint8_t *, uint8_t *),
350 void (*xor_block)(const uint8_t *, uint8_t *),
351 int (*cipher_ctr)(const void *, const uint8_t *, uint8_t *, uint64_t,
352 uint64_t *));
353
354 int ccm_encrypt_final(ccm_ctx_t *, crypto_data_t *, size_t,
355 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
356 void (*xor_block)(const uint8_t *, uint8_t *));
357
358 int gcm_encrypt_final(gcm_ctx_t *, crypto_data_t *, size_t,
359 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
360 void (*copy_block)(const uint8_t *, uint8_t *),
361 void (*xor_block)(const uint8_t *, uint8_t *));
362
363 extern int ccm_decrypt_final(ccm_ctx_t *, crypto_data_t *, size_t,
364 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
365 void (*copy_block)(const uint8_t *, uint8_t *),
366 void (*xor_block)(const uint8_t *, uint8_t *));
367
368 extern int gcm_decrypt_final(gcm_ctx_t *, crypto_data_t *, size_t,
369 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
370 void (*copy_block)(const uint8_t *, uint8_t *),
371 void (*xor_block)(const uint8_t *, uint8_t *),
372 int (*cipher_ctr)(const void *, const uint8_t *, uint8_t *, uint64_t,
373 uint64_t *));
374
375 extern int ctr_mode_final(ctr_ctx_t *, crypto_data_t *,
376 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *));
377
378 extern int cbc_init_ctx(cbc_ctx_t *, char *, size_t, size_t,
379 void (*copy_block)(const uint8_t *, uint64_t *));
380
381 extern int ctr_init_ctx(ctr_ctx_t *, ulong_t, uint8_t *,
382 void (*copy_block)(const uint8_t *, uint8_t *));
383
384 extern int ccm_init_ctx(ccm_ctx_t *, char *, int, boolean_t, size_t,
385 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
386 void (*xor_block)(const uint8_t *, uint8_t *));
387
388 extern int gcm_init_ctx(gcm_ctx_t *, char *, size_t,
389 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
390 void (*copy_block)(const uint8_t *, uint8_t *),
391 void (*xor_block)(const uint8_t *, uint8_t *));
392
393 extern int gmac_init_ctx(gcm_ctx_t *, char *, size_t,
394 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
395 void (*copy_block)(const uint8_t *, uint8_t *),
396 void (*xor_block)(const uint8_t *, uint8_t *));
397
398 extern void calculate_ccm_mac(ccm_ctx_t *, uint8_t *,
399 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *));
400
401 extern void crypto_init_ptrs(crypto_data_t *, void **, offset_t *);
402
403 extern void *ecb_alloc_ctx(int);
404 extern void *cbc_alloc_ctx(int);
405 extern void *ctr_alloc_ctx(int);
406 extern void *ccm_alloc_ctx(int);
407 extern void *gcm_alloc_ctx(int);
408 extern void *gmac_alloc_ctx(int);
409 extern void crypto_free_mode_ctx(void *);
410 extern void gcm_set_kmflag(gcm_ctx_t *, int);
411
412 #ifdef INLINE_CRYPTO_GET_PTRS
413 /*
414 * Get pointers for where in the output to copy a block of encrypted or
415 * decrypted data. The iov_or_mp argument stores a pointer to the current
416 * iovec or mp, and offset stores an offset into the current iovec or mp.
417 */
418 static inline void
419 crypto_get_ptrs(crypto_data_t *out, void **iov_or_mp, offset_t *current_offset,
420 uint8_t **out_data_1, size_t *out_data_1_len, uint8_t **out_data_2,
421 size_t amt)
422 {
423 offset_t offset;
424
425 switch (out->cd_format) {
426 case CRYPTO_DATA_RAW: {
427 iovec_t *iov;
428
429 offset = *current_offset;
430 iov = &out->cd_raw;
431 if ((offset + amt) <= iov->iov_len) {
432 /* one block fits */
433 *out_data_1 = (uint8_t *)iov->iov_base + offset;
434 *out_data_1_len = amt;
435 *out_data_2 = NULL;
436 *current_offset = offset + amt;
437 }
438 break;
439 }
440
441 case CRYPTO_DATA_UIO: {
442 uio_t *uio = out->cd_uio;
443 iovec_t *iov;
444 offset_t offset;
445 uintptr_t vec_idx;
446 uint8_t *p;
447
448 offset = *current_offset;
449 vec_idx = (uintptr_t)(*iov_or_mp);
450 iov = &uio->uio_iov[vec_idx];
451 p = (uint8_t *)iov->iov_base + offset;
452 *out_data_1 = p;
453
454 if (offset + amt <= iov->iov_len) {
455 /* can fit one block into this iov */
456 *out_data_1_len = amt;
457 *out_data_2 = NULL;
458 *current_offset = offset + amt;
459 } else {
460 /* one block spans two iovecs */
461 *out_data_1_len = iov->iov_len - offset;
462 if (vec_idx == uio->uio_iovcnt)
463 return;
464 vec_idx++;
465 iov = &uio->uio_iov[vec_idx];
466 *out_data_2 = (uint8_t *)iov->iov_base;
467 *current_offset = amt - *out_data_1_len;
468 }
469 *iov_or_mp = (void *)vec_idx;
470 break;
471 }
472
473 case CRYPTO_DATA_MBLK: {
474 mblk_t *mp;
475 uint8_t *p;
476
477 offset = *current_offset;
478 mp = (mblk_t *)*iov_or_mp;
479 p = mp->b_rptr + offset;
480 *out_data_1 = p;
481 if ((p + amt) <= mp->b_wptr) {
482 /* can fit one block into this mblk */
483 *out_data_1_len = amt;
484 *out_data_2 = NULL;
485 *current_offset = offset + amt;
486 } else {
487 /* one block spans two mblks */
488 *out_data_1_len = _PTRDIFF(mp->b_wptr, p);
489 if ((mp = mp->b_cont) == NULL)
490 return;
491 *out_data_2 = mp->b_rptr;
492 *current_offset = (amt - *out_data_1_len);
493 }
494 *iov_or_mp = mp;
495 break;
496 }
497 } /* end switch */
498 }
499 #endif /* INLINE_CRYPTO_GET_PTRS */
500
501 /*
502 * Checks whether a crypto_data_t object is composed of a single contiguous
503 * buffer. This is used in all fastpath detection code to avoid the
504 * possibility of having to do partial block splicing.
505 */
506 #define CRYPTO_DATA_IS_SINGLE_BLOCK(cd) \
507 (cd != NULL && (cd->cd_format == CRYPTO_DATA_RAW || \
508 (cd->cd_format == CRYPTO_DATA_UIO && cd->cd_uio->uio_iovcnt == 1) || \
509 (cd->cd_format == CRYPTO_DATA_MBLK && cd->cd_mp->b_next == NULL)))
510
511 /*
512 * Returns the first contiguous data buffer in a crypto_data_t object.
513 */
514 #define CRYPTO_DATA_FIRST_BLOCK(cd) \
515 (cd->cd_format == CRYPTO_DATA_RAW ? \
516 (void *)(cd->cd_raw.iov_base + cd->cd_offset) : \
517 (cd->cd_format == CRYPTO_DATA_UIO ? \
518 (void *)(cd->cd_uio->uio_iov[0].iov_base + cd->cd_offset) : \
519 (void *)(cd->cd_mp->b_rptr + cd->cd_offset)))
520
521 #ifdef __cplusplus
522 }
523 #endif
524
525 #endif /* _COMMON_CRYPTO_MODES_H */