Print this page
4896 Performance improvements for KCF AES modes
Split |
Close |
Expand all |
Collapse all |
--- old/usr/src/common/crypto/modes/modes.h
+++ new/usr/src/common/crypto/modes/modes.h
1 1 /*
2 2 * CDDL HEADER START
3 3 *
4 4 * The contents of this file are subject to the terms of the
5 5 * Common Development and Distribution License (the "License").
6 6 * You may not use this file except in compliance with the License.
7 7 *
8 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 9 * or http://www.opensolaris.org/os/licensing.
10 10 * See the License for the specific language governing permissions
11 11 * and limitations under the License.
12 12 *
13 13 * When distributing Covered Code, include this CDDL HEADER in each
14 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
↓ open down ↓ |
14 lines elided |
↑ open up ↑ |
15 15 * If applicable, add the following below this CDDL HEADER, with the
16 16 * fields enclosed by brackets "[]" replaced with your own identifying
17 17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 18 *
19 19 * CDDL HEADER END
20 20 */
21 21 /*
22 22 * Copyright 2009 Sun Microsystems, Inc. All rights reserved.
23 23 * Use is subject to license terms.
24 24 */
25 +/*
26 + * Copyright 2015 by Saso Kiselkov. All rights reserved.
27 + */
25 28
26 29 #ifndef _COMMON_CRYPTO_MODES_H
27 30 #define _COMMON_CRYPTO_MODES_H
28 31
29 32 #ifdef __cplusplus
30 33 extern "C" {
31 34 #endif
32 35
33 36 #include <sys/strsun.h>
34 37 #include <sys/systm.h>
35 38 #include <sys/sysmacros.h>
36 39 #include <sys/types.h>
37 40 #include <sys/errno.h>
38 41 #include <sys/rwlock.h>
39 42 #include <sys/kmem.h>
40 43 #include <sys/crypto/common.h>
41 44 #include <sys/crypto/impl.h>
42 45
43 46 #define ECB_MODE 0x00000002
44 47 #define CBC_MODE 0x00000004
45 48 #define CTR_MODE 0x00000008
46 49 #define CCM_MODE 0x00000010
47 50 #define GCM_MODE 0x00000020
48 51 #define GMAC_MODE 0x00000040
49 52
50 53 /*
51 54 * cc_keysched: Pointer to key schedule.
52 55 *
53 56 * cc_keysched_len: Length of the key schedule.
54 57 *
55 58 * cc_remainder: This is for residual data, i.e. data that can't
56 59 * be processed because there are too few bytes.
57 60 * Must wait until more data arrives.
58 61 *
59 62 * cc_remainder_len: Number of bytes in cc_remainder.
60 63 *
61 64 * cc_iv: Scratch buffer that sometimes contains the IV.
62 65 *
63 66 * cc_lastp: Pointer to previous block of ciphertext.
64 67 *
65 68 * cc_copy_to: Pointer to where encrypted residual data needs
66 69 * to be copied.
67 70 *
68 71 * cc_flags: PROVIDER_OWNS_KEY_SCHEDULE
69 72 * When a context is freed, it is necessary
70 73 * to know whether the key schedule was allocated
71 74 * by the caller, or internally, e.g. an init routine.
72 75 * If allocated by the latter, then it needs to be freed.
73 76 *
74 77 * ECB_MODE, CBC_MODE, CTR_MODE, or CCM_MODE
75 78 */
76 79 struct common_ctx {
77 80 void *cc_keysched;
78 81 size_t cc_keysched_len;
79 82 uint64_t cc_iv[2];
80 83 uint64_t cc_remainder[2];
81 84 size_t cc_remainder_len;
82 85 uint8_t *cc_lastp;
83 86 uint8_t *cc_copy_to;
84 87 uint32_t cc_flags;
85 88 };
86 89
87 90 typedef struct common_ctx common_ctx_t;
88 91
89 92 typedef struct ecb_ctx {
90 93 struct common_ctx ecb_common;
91 94 uint64_t ecb_lastblock[2];
92 95 } ecb_ctx_t;
93 96
94 97 #define ecb_keysched ecb_common.cc_keysched
95 98 #define ecb_keysched_len ecb_common.cc_keysched_len
96 99 #define ecb_iv ecb_common.cc_iv
97 100 #define ecb_remainder ecb_common.cc_remainder
98 101 #define ecb_remainder_len ecb_common.cc_remainder_len
99 102 #define ecb_lastp ecb_common.cc_lastp
100 103 #define ecb_copy_to ecb_common.cc_copy_to
101 104 #define ecb_flags ecb_common.cc_flags
102 105
103 106 typedef struct cbc_ctx {
104 107 struct common_ctx cbc_common;
105 108 uint64_t cbc_lastblock[2];
106 109 } cbc_ctx_t;
107 110
108 111 #define cbc_keysched cbc_common.cc_keysched
109 112 #define cbc_keysched_len cbc_common.cc_keysched_len
110 113 #define cbc_iv cbc_common.cc_iv
111 114 #define cbc_remainder cbc_common.cc_remainder
112 115 #define cbc_remainder_len cbc_common.cc_remainder_len
113 116 #define cbc_lastp cbc_common.cc_lastp
114 117 #define cbc_copy_to cbc_common.cc_copy_to
115 118 #define cbc_flags cbc_common.cc_flags
116 119
117 120 /*
118 121 * ctr_lower_mask Bit-mask for lower 8 bytes of counter block.
119 122 * ctr_upper_mask Bit-mask for upper 8 bytes of counter block.
120 123 */
121 124 typedef struct ctr_ctx {
122 125 struct common_ctx ctr_common;
123 126 uint64_t ctr_lower_mask;
124 127 uint64_t ctr_upper_mask;
125 128 uint32_t ctr_tmp[4];
126 129 } ctr_ctx_t;
127 130
128 131 /*
129 132 * ctr_cb Counter block.
130 133 */
131 134 #define ctr_keysched ctr_common.cc_keysched
132 135 #define ctr_keysched_len ctr_common.cc_keysched_len
133 136 #define ctr_cb ctr_common.cc_iv
134 137 #define ctr_remainder ctr_common.cc_remainder
135 138 #define ctr_remainder_len ctr_common.cc_remainder_len
136 139 #define ctr_lastp ctr_common.cc_lastp
137 140 #define ctr_copy_to ctr_common.cc_copy_to
138 141 #define ctr_flags ctr_common.cc_flags
139 142
140 143 /*
141 144 *
142 145 * ccm_mac_len: Stores length of the MAC in CCM mode.
143 146 * ccm_mac_buf: Stores the intermediate value for MAC in CCM encrypt.
144 147 * In CCM decrypt, stores the input MAC value.
145 148 * ccm_data_len: Length of the plaintext for CCM mode encrypt, or
146 149 * length of the ciphertext for CCM mode decrypt.
147 150 * ccm_processed_data_len:
148 151 * Length of processed plaintext in CCM mode encrypt,
149 152 * or length of processed ciphertext for CCM mode decrypt.
150 153 * ccm_processed_mac_len:
151 154 * Length of MAC data accumulated in CCM mode decrypt.
152 155 *
153 156 * ccm_pt_buf: Only used in CCM mode decrypt. It stores the
154 157 * decrypted plaintext to be returned when
155 158 * MAC verification succeeds in decrypt_final.
156 159 * Memory for this should be allocated in the AES module.
157 160 *
158 161 */
159 162 typedef struct ccm_ctx {
160 163 struct common_ctx ccm_common;
161 164 uint32_t ccm_tmp[4];
162 165 size_t ccm_mac_len;
163 166 uint64_t ccm_mac_buf[2];
164 167 size_t ccm_data_len;
165 168 size_t ccm_processed_data_len;
166 169 size_t ccm_processed_mac_len;
167 170 uint8_t *ccm_pt_buf;
168 171 uint64_t ccm_mac_input_buf[2];
169 172 uint64_t ccm_counter_mask;
170 173 } ccm_ctx_t;
171 174
172 175 #define ccm_keysched ccm_common.cc_keysched
173 176 #define ccm_keysched_len ccm_common.cc_keysched_len
174 177 #define ccm_cb ccm_common.cc_iv
175 178 #define ccm_remainder ccm_common.cc_remainder
176 179 #define ccm_remainder_len ccm_common.cc_remainder_len
177 180 #define ccm_lastp ccm_common.cc_lastp
178 181 #define ccm_copy_to ccm_common.cc_copy_to
179 182 #define ccm_flags ccm_common.cc_flags
↓ open down ↓ |
145 lines elided |
↑ open up ↑ |
180 183
181 184 /*
182 185 * gcm_tag_len: Length of authentication tag.
183 186 *
184 187 * gcm_ghash: Stores output from the GHASH function.
185 188 *
186 189 * gcm_processed_data_len:
187 190 * Length of processed plaintext (encrypt) or
188 191 * length of processed ciphertext (decrypt).
189 192 *
190 - * gcm_pt_buf: Stores the decrypted plaintext returned by
191 - * decrypt_final when the computed authentication
192 - * tag matches the user supplied tag.
193 - *
194 - * gcm_pt_buf_len: Length of the plaintext buffer.
195 - *
196 193 * gcm_H: Subkey.
197 194 *
195 + * gcm_H_table: Pipelined Karatsuba multipliers.
196 + *
198 197 * gcm_J0: Pre-counter block generated from the IV.
199 198 *
199 + * gcm_tmp: Temp storage for ciphertext when padding is needed.
200 + *
200 201 * gcm_len_a_len_c: 64-bit representations of the bit lengths of
201 202 * AAD and ciphertext.
202 203 *
203 204 * gcm_kmflag: Current value of kmflag. Used only for allocating
204 205 * the plaintext buffer during decryption.
206 + *
207 + * gcm_last_input: Buffer of (up to) two last blocks. This is used when
208 + * input is not block-aligned and to temporarily hold
209 + * the end of the ciphertext stream during decryption,
210 + * since that could potentially be the GHASH auth tag
211 + * which we must check in the final() call instead of
212 + * decrypting it.
213 + *
214 + * gcm_last_input_fill: Number of bytes actually stored in gcm_last_input.
205 215 */
206 216 typedef struct gcm_ctx {
207 217 struct common_ctx gcm_common;
208 218 size_t gcm_tag_len;
209 219 size_t gcm_processed_data_len;
210 - size_t gcm_pt_buf_len;
211 - uint32_t gcm_tmp[4];
212 220 uint64_t gcm_ghash[2];
213 221 uint64_t gcm_H[2];
222 +#ifdef __amd64
223 + uint8_t gcm_H_table[256];
224 +#endif
214 225 uint64_t gcm_J0[2];
226 + uint64_t gcm_tmp[2];
215 227 uint64_t gcm_len_a_len_c[2];
216 - uint8_t *gcm_pt_buf;
217 228 int gcm_kmflag;
229 + uint8_t gcm_last_input[32];
230 + size_t gcm_last_input_fill;
218 231 } gcm_ctx_t;
219 232
220 233 #define gcm_keysched gcm_common.cc_keysched
221 234 #define gcm_keysched_len gcm_common.cc_keysched_len
222 235 #define gcm_cb gcm_common.cc_iv
223 236 #define gcm_remainder gcm_common.cc_remainder
224 237 #define gcm_remainder_len gcm_common.cc_remainder_len
225 238 #define gcm_lastp gcm_common.cc_lastp
226 239 #define gcm_copy_to gcm_common.cc_copy_to
227 240 #define gcm_flags gcm_common.cc_flags
228 241
229 242 #define AES_GMAC_IV_LEN 12
230 243 #define AES_GMAC_TAG_BITS 128
231 244
232 245 typedef struct aes_ctx {
233 246 union {
234 247 ecb_ctx_t acu_ecb;
235 248 cbc_ctx_t acu_cbc;
236 249 ctr_ctx_t acu_ctr;
237 250 #ifdef _KERNEL
238 251 ccm_ctx_t acu_ccm;
239 252 gcm_ctx_t acu_gcm;
240 253 #endif
241 254 } acu;
242 255 } aes_ctx_t;
243 256
244 257 #define ac_flags acu.acu_ecb.ecb_common.cc_flags
245 258 #define ac_remainder_len acu.acu_ecb.ecb_common.cc_remainder_len
246 259 #define ac_keysched acu.acu_ecb.ecb_common.cc_keysched
247 260 #define ac_keysched_len acu.acu_ecb.ecb_common.cc_keysched_len
248 261 #define ac_iv acu.acu_ecb.ecb_common.cc_iv
249 262 #define ac_lastp acu.acu_ecb.ecb_common.cc_lastp
250 263 #define ac_pt_buf acu.acu_ccm.ccm_pt_buf
251 264 #define ac_mac_len acu.acu_ccm.ccm_mac_len
252 265 #define ac_data_len acu.acu_ccm.ccm_data_len
253 266 #define ac_processed_mac_len acu.acu_ccm.ccm_processed_mac_len
254 267 #define ac_processed_data_len acu.acu_ccm.ccm_processed_data_len
255 268 #define ac_tag_len acu.acu_gcm.gcm_tag_len
256 269
257 270 typedef struct blowfish_ctx {
258 271 union {
259 272 ecb_ctx_t bcu_ecb;
260 273 cbc_ctx_t bcu_cbc;
261 274 } bcu;
262 275 } blowfish_ctx_t;
263 276
264 277 #define bc_flags bcu.bcu_ecb.ecb_common.cc_flags
265 278 #define bc_remainder_len bcu.bcu_ecb.ecb_common.cc_remainder_len
266 279 #define bc_keysched bcu.bcu_ecb.ecb_common.cc_keysched
267 280 #define bc_keysched_len bcu.bcu_ecb.ecb_common.cc_keysched_len
268 281 #define bc_iv bcu.bcu_ecb.ecb_common.cc_iv
269 282 #define bc_lastp bcu.bcu_ecb.ecb_common.cc_lastp
270 283
271 284 typedef struct des_ctx {
272 285 union {
273 286 ecb_ctx_t dcu_ecb;
274 287 cbc_ctx_t dcu_cbc;
275 288 } dcu;
↓ open down ↓ |
48 lines elided |
↑ open up ↑ |
276 289 } des_ctx_t;
277 290
278 291 #define dc_flags dcu.dcu_ecb.ecb_common.cc_flags
279 292 #define dc_remainder_len dcu.dcu_ecb.ecb_common.cc_remainder_len
280 293 #define dc_keysched dcu.dcu_ecb.ecb_common.cc_keysched
281 294 #define dc_keysched_len dcu.dcu_ecb.ecb_common.cc_keysched_len
282 295 #define dc_iv dcu.dcu_ecb.ecb_common.cc_iv
283 296 #define dc_lastp dcu.dcu_ecb.ecb_common.cc_lastp
284 297
285 298 extern int ecb_cipher_contiguous_blocks(ecb_ctx_t *, char *, size_t,
286 - crypto_data_t *, size_t, int (*cipher)(const void *, const uint8_t *,
287 - uint8_t *));
299 + crypto_data_t *, size_t,
300 + int (*cipher)(const void *, const uint8_t *, uint8_t *),
301 + int (*cipher_ecb)(const void *, const uint8_t *, uint8_t *, uint64_t));
288 302
289 303 extern int cbc_encrypt_contiguous_blocks(cbc_ctx_t *, char *, size_t,
290 304 crypto_data_t *, size_t,
291 305 int (*encrypt)(const void *, const uint8_t *, uint8_t *),
292 - void (*copy_block)(uint8_t *, uint8_t *),
293 - void (*xor_block)(uint8_t *, uint8_t *));
306 + void (*copy_block)(const uint8_t *, uint8_t *),
307 + void (*xor_block)(const uint8_t *, uint8_t *),
308 + int (*encrypt_cbc)(const void *, const uint8_t *, uint8_t *,
309 + const uint8_t *, uint64_t));
294 310
295 311 extern int cbc_decrypt_contiguous_blocks(cbc_ctx_t *, char *, size_t,
296 312 crypto_data_t *, size_t,
297 313 int (*decrypt)(const void *, const uint8_t *, uint8_t *),
298 - void (*copy_block)(uint8_t *, uint8_t *),
299 - void (*xor_block)(uint8_t *, uint8_t *));
314 + void (*copy_block)(const uint8_t *, uint8_t *),
315 + void (*xor_block)(const uint8_t *, uint8_t *),
316 + int (*decrypt_ecb)(const void *, const uint8_t *, uint8_t *, uint64_t),
317 + void (*xor_block_range)(const uint8_t *, uint8_t *, uint64_t));
300 318
301 319 extern int ctr_mode_contiguous_blocks(ctr_ctx_t *, char *, size_t,
302 320 crypto_data_t *, size_t,
303 321 int (*cipher)(const void *, const uint8_t *, uint8_t *),
304 - void (*xor_block)(uint8_t *, uint8_t *));
322 + void (*xor_block)(const uint8_t *, uint8_t *),
323 + int (*cipher_ctr)(const void *, const uint8_t *, uint8_t *, uint64_t,
324 + uint64_t *));
305 325
306 326 extern int ccm_mode_encrypt_contiguous_blocks(ccm_ctx_t *, char *, size_t,
307 327 crypto_data_t *, size_t,
308 328 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
309 - void (*copy_block)(uint8_t *, uint8_t *),
310 - void (*xor_block)(uint8_t *, uint8_t *));
329 + void (*copy_block)(const uint8_t *, uint8_t *),
330 + void (*xor_block)(const uint8_t *, uint8_t *));
311 331
312 332 extern int ccm_mode_decrypt_contiguous_blocks(ccm_ctx_t *, char *, size_t,
313 333 crypto_data_t *, size_t,
314 334 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
315 - void (*copy_block)(uint8_t *, uint8_t *),
316 - void (*xor_block)(uint8_t *, uint8_t *));
335 + void (*copy_block)(const uint8_t *, uint8_t *),
336 + void (*xor_block)(const uint8_t *, uint8_t *));
317 337
318 338 extern int gcm_mode_encrypt_contiguous_blocks(gcm_ctx_t *, char *, size_t,
319 339 crypto_data_t *, size_t,
320 340 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
321 - void (*copy_block)(uint8_t *, uint8_t *),
322 - void (*xor_block)(uint8_t *, uint8_t *));
341 + void (*copy_block)(const uint8_t *, uint8_t *),
342 + void (*xor_block)(const uint8_t *, uint8_t *),
343 + int (*cipher_ctr)(const void *, const uint8_t *, uint8_t *, uint64_t,
344 + uint64_t *));
323 345
324 346 extern int gcm_mode_decrypt_contiguous_blocks(gcm_ctx_t *, char *, size_t,
325 347 crypto_data_t *, size_t,
326 348 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
327 - void (*copy_block)(uint8_t *, uint8_t *),
328 - void (*xor_block)(uint8_t *, uint8_t *));
349 + void (*copy_block)(const uint8_t *, uint8_t *),
350 + void (*xor_block)(const uint8_t *, uint8_t *),
351 + int (*cipher_ctr)(const void *, const uint8_t *, uint8_t *, uint64_t,
352 + uint64_t *));
329 353
330 354 int ccm_encrypt_final(ccm_ctx_t *, crypto_data_t *, size_t,
331 355 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
332 - void (*xor_block)(uint8_t *, uint8_t *));
356 + void (*xor_block)(const uint8_t *, uint8_t *));
333 357
334 358 int gcm_encrypt_final(gcm_ctx_t *, crypto_data_t *, size_t,
335 359 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
336 - void (*copy_block)(uint8_t *, uint8_t *),
337 - void (*xor_block)(uint8_t *, uint8_t *));
360 + void (*copy_block)(const uint8_t *, uint8_t *),
361 + void (*xor_block)(const uint8_t *, uint8_t *));
338 362
339 363 extern int ccm_decrypt_final(ccm_ctx_t *, crypto_data_t *, size_t,
340 364 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
341 - void (*copy_block)(uint8_t *, uint8_t *),
342 - void (*xor_block)(uint8_t *, uint8_t *));
365 + void (*copy_block)(const uint8_t *, uint8_t *),
366 + void (*xor_block)(const uint8_t *, uint8_t *));
343 367
344 368 extern int gcm_decrypt_final(gcm_ctx_t *, crypto_data_t *, size_t,
345 369 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
346 - void (*xor_block)(uint8_t *, uint8_t *));
370 + void (*copy_block)(const uint8_t *, uint8_t *),
371 + void (*xor_block)(const uint8_t *, uint8_t *),
372 + int (*cipher_ctr)(const void *, const uint8_t *, uint8_t *, uint64_t,
373 + uint64_t *));
347 374
348 375 extern int ctr_mode_final(ctr_ctx_t *, crypto_data_t *,
349 376 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *));
350 377
351 378 extern int cbc_init_ctx(cbc_ctx_t *, char *, size_t, size_t,
352 - void (*copy_block)(uint8_t *, uint64_t *));
379 + void (*copy_block)(const uint8_t *, uint64_t *));
353 380
354 381 extern int ctr_init_ctx(ctr_ctx_t *, ulong_t, uint8_t *,
355 - void (*copy_block)(uint8_t *, uint8_t *));
382 + void (*copy_block)(const uint8_t *, uint8_t *));
356 383
357 384 extern int ccm_init_ctx(ccm_ctx_t *, char *, int, boolean_t, size_t,
358 385 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
359 - void (*xor_block)(uint8_t *, uint8_t *));
386 + void (*xor_block)(const uint8_t *, uint8_t *));
360 387
361 388 extern int gcm_init_ctx(gcm_ctx_t *, char *, size_t,
362 389 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
363 - void (*copy_block)(uint8_t *, uint8_t *),
364 - void (*xor_block)(uint8_t *, uint8_t *));
390 + void (*copy_block)(const uint8_t *, uint8_t *),
391 + void (*xor_block)(const uint8_t *, uint8_t *));
365 392
366 393 extern int gmac_init_ctx(gcm_ctx_t *, char *, size_t,
367 394 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *),
368 - void (*copy_block)(uint8_t *, uint8_t *),
369 - void (*xor_block)(uint8_t *, uint8_t *));
395 + void (*copy_block)(const uint8_t *, uint8_t *),
396 + void (*xor_block)(const uint8_t *, uint8_t *));
370 397
371 398 extern void calculate_ccm_mac(ccm_ctx_t *, uint8_t *,
372 399 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *));
373 400
374 -extern void gcm_mul(uint64_t *, uint64_t *, uint64_t *);
375 -
376 401 extern void crypto_init_ptrs(crypto_data_t *, void **, offset_t *);
377 -extern void crypto_get_ptrs(crypto_data_t *, void **, offset_t *,
378 - uint8_t **, size_t *, uint8_t **, size_t);
379 402
380 403 extern void *ecb_alloc_ctx(int);
381 404 extern void *cbc_alloc_ctx(int);
382 405 extern void *ctr_alloc_ctx(int);
383 406 extern void *ccm_alloc_ctx(int);
384 407 extern void *gcm_alloc_ctx(int);
385 408 extern void *gmac_alloc_ctx(int);
386 409 extern void crypto_free_mode_ctx(void *);
387 410 extern void gcm_set_kmflag(gcm_ctx_t *, int);
388 411
412 +#ifdef INLINE_CRYPTO_GET_PTRS
413 +/*
414 + * Get pointers for where in the output to copy a block of encrypted or
415 + * decrypted data. The iov_or_mp argument stores a pointer to the current
416 + * iovec or mp, and offset stores an offset into the current iovec or mp.
417 + */
418 +static inline void
419 +crypto_get_ptrs(crypto_data_t *out, void **iov_or_mp, offset_t *current_offset,
420 + uint8_t **out_data_1, size_t *out_data_1_len, uint8_t **out_data_2,
421 + size_t amt)
422 +{
423 + offset_t offset;
424 +
425 + switch (out->cd_format) {
426 + case CRYPTO_DATA_RAW: {
427 + iovec_t *iov;
428 +
429 + offset = *current_offset;
430 + iov = &out->cd_raw;
431 + if ((offset + amt) <= iov->iov_len) {
432 + /* one block fits */
433 + *out_data_1 = (uint8_t *)iov->iov_base + offset;
434 + *out_data_1_len = amt;
435 + *out_data_2 = NULL;
436 + *current_offset = offset + amt;
437 + }
438 + break;
439 + }
440 +
441 + case CRYPTO_DATA_UIO: {
442 + uio_t *uio = out->cd_uio;
443 + iovec_t *iov;
444 + offset_t offset;
445 + uintptr_t vec_idx;
446 + uint8_t *p;
447 +
448 + offset = *current_offset;
449 + vec_idx = (uintptr_t)(*iov_or_mp);
450 + iov = &uio->uio_iov[vec_idx];
451 + p = (uint8_t *)iov->iov_base + offset;
452 + *out_data_1 = p;
453 +
454 + if (offset + amt <= iov->iov_len) {
455 + /* can fit one block into this iov */
456 + *out_data_1_len = amt;
457 + *out_data_2 = NULL;
458 + *current_offset = offset + amt;
459 + } else {
460 + /* one block spans two iovecs */
461 + *out_data_1_len = iov->iov_len - offset;
462 + if (vec_idx == uio->uio_iovcnt)
463 + return;
464 + vec_idx++;
465 + iov = &uio->uio_iov[vec_idx];
466 + *out_data_2 = (uint8_t *)iov->iov_base;
467 + *current_offset = amt - *out_data_1_len;
468 + }
469 + *iov_or_mp = (void *)vec_idx;
470 + break;
471 + }
472 +
473 + case CRYPTO_DATA_MBLK: {
474 + mblk_t *mp;
475 + uint8_t *p;
476 +
477 + offset = *current_offset;
478 + mp = (mblk_t *)*iov_or_mp;
479 + p = mp->b_rptr + offset;
480 + *out_data_1 = p;
481 + if ((p + amt) <= mp->b_wptr) {
482 + /* can fit one block into this mblk */
483 + *out_data_1_len = amt;
484 + *out_data_2 = NULL;
485 + *current_offset = offset + amt;
486 + } else {
487 + /* one block spans two mblks */
488 + *out_data_1_len = _PTRDIFF(mp->b_wptr, p);
489 + if ((mp = mp->b_cont) == NULL)
490 + return;
491 + *out_data_2 = mp->b_rptr;
492 + *current_offset = (amt - *out_data_1_len);
493 + }
494 + *iov_or_mp = mp;
495 + break;
496 + }
497 + } /* end switch */
498 +}
499 +#endif /* INLINE_CRYPTO_GET_PTRS */
500 +
501 +/*
502 + * Checks whether a crypto_data_t object is composed of a single contiguous
503 + * buffer. This is used in all fastpath detection code to avoid the
504 + * possibility of having to do partial block splicing.
505 + */
506 +#define CRYPTO_DATA_IS_SINGLE_BLOCK(cd) \
507 + (cd != NULL && (cd->cd_format == CRYPTO_DATA_RAW || \
508 + (cd->cd_format == CRYPTO_DATA_UIO && cd->cd_uio->uio_iovcnt == 1) || \
509 + (cd->cd_format == CRYPTO_DATA_MBLK && cd->cd_mp->b_next == NULL)))
510 +
511 +/*
512 + * Returns the first contiguous data buffer in a crypto_data_t object.
513 + */
514 +#define CRYPTO_DATA_FIRST_BLOCK(cd) \
515 + (cd->cd_format == CRYPTO_DATA_RAW ? \
516 + (void *)(cd->cd_raw.iov_base + cd->cd_offset) : \
517 + (cd->cd_format == CRYPTO_DATA_UIO ? \
518 + (void *)(cd->cd_uio->uio_iov[0].iov_base + cd->cd_offset) : \
519 + (void *)(cd->cd_mp->b_rptr + cd->cd_offset)))
520 +
389 521 #ifdef __cplusplus
390 522 }
391 523 #endif
392 524
393 525 #endif /* _COMMON_CRYPTO_MODES_H */
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX