1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21 /*
22 * Copyright 2008 Sun Microsystems, Inc. All rights reserved.
23 * Use is subject to license terms.
24 */
25 /*
26 * Copyright 2015 by Saso Kiselkov. All rights reserved.
27 */
28
29 #ifndef _KERNEL
30 #include <strings.h>
31 #include <limits.h>
32 #include <assert.h>
33 #include <security/cryptoki.h>
34 #endif
35
36 #include <sys/types.h>
37 #define INLINE_CRYPTO_GET_PTRS
38 #include <modes/modes.h>
39 #include <sys/crypto/common.h>
40 #include <sys/crypto/impl.h>
41 #include <sys/byteorder.h>
42 #include <sys/cmn_err.h>
43
44 boolean_t ctr_fastpath_enabled = B_TRUE;
45
46 /*
47 * Encrypt and decrypt multiple blocks of data in counter mode.
48 */
49 int
50 ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *data, size_t length,
51 crypto_data_t *out, size_t block_size,
52 int (*cipher)(const void *ks, const uint8_t *pt, uint8_t *ct),
53 void (*xor_block)(const uint8_t *, uint8_t *),
54 int (*cipher_ctr)(const void *ks, const uint8_t *pt, uint8_t *ct,
55 uint64_t len, uint64_t counter[2]))
56 {
57 size_t remainder = length;
58 size_t need;
59 uint8_t *datap = (uint8_t *)data;
60 uint8_t *blockp;
61 uint8_t *lastp;
62 void *iov_or_mp;
63 offset_t offset;
64 uint8_t *out_data_1;
65 uint8_t *out_data_2;
66 size_t out_data_1_len;
67 uint64_t lower_counter, upper_counter;
68
69 /*
70 * CTR encryption/decryption fastpath requirements:
71 * - fastpath is enabled
72 * - algorithm-specific acceleration function is available
73 * - input is block-aligned
74 * - the counter value won't overflow the lower counter mask
75 * - output is a single contiguous region and doesn't alias input
76 */
77 if (ctr_fastpath_enabled && cipher_ctr != NULL &&
78 ctx->ctr_remainder_len == 0 && (length & (block_size - 1)) == 0 &&
79 ntohll(ctx->ctr_cb[1]) <= ctx->ctr_lower_mask -
80 length / block_size && CRYPTO_DATA_IS_SINGLE_BLOCK(out)) {
81 cipher_ctr(ctx->ctr_keysched, (uint8_t *)data,
82 CRYPTO_DATA_FIRST_BLOCK(out), length, ctx->ctr_cb);
83 out->cd_offset += length;
84 return (CRYPTO_SUCCESS);
85 }
86
87 if (length + ctx->ctr_remainder_len < block_size) {
88 /* accumulate bytes here and return */
89 bcopy(datap,
90 (uint8_t *)ctx->ctr_remainder + ctx->ctr_remainder_len,
91 length);
92 ctx->ctr_remainder_len += length;
93 ctx->ctr_copy_to = datap;
94 return (CRYPTO_SUCCESS);
95 }
96
97 lastp = (uint8_t *)ctx->ctr_cb;
98 if (out != NULL)
99 crypto_init_ptrs(out, &iov_or_mp, &offset);
100
101 do {
102 /* Unprocessed data from last call. */
103 if (ctx->ctr_remainder_len > 0) {
104 need = block_size - ctx->ctr_remainder_len;
105
106 if (need > remainder)
107 return (CRYPTO_DATA_LEN_RANGE);
108
109 bcopy(datap, &((uint8_t *)ctx->ctr_remainder)
110 [ctx->ctr_remainder_len], need);
111
112 blockp = (uint8_t *)ctx->ctr_remainder;
113 } else {
114 blockp = datap;
115 }
116
117 /* ctr_cb is the counter block */
118 cipher(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb,
119 (uint8_t *)ctx->ctr_tmp);
120
121 lastp = (uint8_t *)ctx->ctr_tmp;
122
123 /*
124 * Increment Counter.
125 */
126 lower_counter = ntohll(ctx->ctr_cb[1] & ctx->ctr_lower_mask);
127 lower_counter = htonll(lower_counter + 1);
128 lower_counter &= ctx->ctr_lower_mask;
129 ctx->ctr_cb[1] = (ctx->ctr_cb[1] & ~(ctx->ctr_lower_mask)) |
130 lower_counter;
131
132 /* wrap around */
133 if (lower_counter == 0) {
134 upper_counter =
135 ntohll(ctx->ctr_cb[0] & ctx->ctr_upper_mask);
136 upper_counter = htonll(upper_counter + 1);
137 upper_counter &= ctx->ctr_upper_mask;
138 ctx->ctr_cb[0] =
139 (ctx->ctr_cb[0] & ~(ctx->ctr_upper_mask)) |
140 upper_counter;
141 }
142
143 /*
144 * XOR encrypted counter block with the current clear block.
145 */
146 xor_block(blockp, lastp);
147
148 if (out == NULL) {
149 if (ctx->ctr_remainder_len > 0) {
150 bcopy(lastp, ctx->ctr_copy_to,
151 ctx->ctr_remainder_len);
152 bcopy(lastp + ctx->ctr_remainder_len, datap,
153 need);
154 }
155 } else {
156 crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
157 &out_data_1_len, &out_data_2, block_size);
158
159 /* copy block to where it belongs */
160 bcopy(lastp, out_data_1, out_data_1_len);
161 if (out_data_2 != NULL) {
162 bcopy(lastp + out_data_1_len, out_data_2,
163 block_size - out_data_1_len);
164 }
165 /* update offset */
166 out->cd_offset += block_size;
167 }
168
169 /* Update pointer to next block of data to be processed. */
170 if (ctx->ctr_remainder_len != 0) {
171 datap += need;
172 ctx->ctr_remainder_len = 0;
173 } else {
174 datap += block_size;
175 }
176
177 remainder = (size_t)&data[length] - (size_t)datap;
178
179 /* Incomplete last block. */
180 if (remainder > 0 && remainder < block_size) {
181 bcopy(datap, ctx->ctr_remainder, remainder);
182 ctx->ctr_remainder_len = remainder;
183 ctx->ctr_copy_to = datap;
184 goto out;
185 }
186 ctx->ctr_copy_to = NULL;
187
188 } while (remainder > 0);
189
190 out:
191 return (CRYPTO_SUCCESS);
192 }
193
194 int
195 ctr_mode_final(ctr_ctx_t *ctx, crypto_data_t *out,
196 int (*encrypt_block)(const void *, const uint8_t *, uint8_t *))
197 {
198 uint8_t *lastp;
199 void *iov_or_mp;
200 offset_t offset;
201 uint8_t *out_data_1;
202 uint8_t *out_data_2;
203 size_t out_data_1_len;
204 uint8_t *p;
205 int i;
206
207 if (out->cd_length < ctx->ctr_remainder_len)
208 return (CRYPTO_DATA_LEN_RANGE);
209
210 encrypt_block(ctx->ctr_keysched, (uint8_t *)ctx->ctr_cb,
211 (uint8_t *)ctx->ctr_tmp);
212
213 lastp = (uint8_t *)ctx->ctr_tmp;
214 p = (uint8_t *)ctx->ctr_remainder;
215 for (i = 0; i < ctx->ctr_remainder_len; i++) {
216 p[i] ^= lastp[i];
217 }
218
219 crypto_init_ptrs(out, &iov_or_mp, &offset);
220 crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
221 &out_data_1_len, &out_data_2, ctx->ctr_remainder_len);
222
223 bcopy(p, out_data_1, out_data_1_len);
224 if (out_data_2 != NULL) {
225 bcopy((uint8_t *)p + out_data_1_len,
226 out_data_2, ctx->ctr_remainder_len - out_data_1_len);
227 }
228 out->cd_offset += ctx->ctr_remainder_len;
229 ctx->ctr_remainder_len = 0;
230 return (CRYPTO_SUCCESS);
231 }
232
233 int
234 ctr_init_ctx(ctr_ctx_t *ctr_ctx, ulong_t count, uint8_t *cb,
235 void (*copy_block)(const uint8_t *, uint8_t *))
236 {
237 uint64_t upper_mask = 0;
238 uint64_t lower_mask = 0;
239
240 if (count == 0 || count > 128) {
241 return (CRYPTO_MECHANISM_PARAM_INVALID);
242 }
243 /* upper 64 bits of the mask */
244 if (count >= 64) {
245 count -= 64;
246 upper_mask = (count == 64) ? UINT64_MAX : (1ULL << count) - 1;
247 lower_mask = UINT64_MAX;
248 } else {
249 /* now the lower 63 bits */
250 lower_mask = (1ULL << count) - 1;
251 }
252 ctr_ctx->ctr_lower_mask = htonll(lower_mask);
253 ctr_ctx->ctr_upper_mask = htonll(upper_mask);
254
255 copy_block(cb, (uchar_t *)ctr_ctx->ctr_cb);
256 ctr_ctx->ctr_lastp = (uint8_t *)&ctr_ctx->ctr_cb[0];
257 ctr_ctx->ctr_flags |= CTR_MODE;
258 return (CRYPTO_SUCCESS);
259 }
260
261 /* ARGSUSED */
262 void *
263 ctr_alloc_ctx(int kmflag)
264 {
265 ctr_ctx_t *ctr_ctx;
266
267 #ifdef _KERNEL
268 if ((ctr_ctx = kmem_zalloc(sizeof (ctr_ctx_t), kmflag)) == NULL)
269 #else
270 if ((ctr_ctx = calloc(1, sizeof (ctr_ctx_t))) == NULL)
271 #endif
272 return (NULL);
273
274 ctr_ctx->ctr_flags = CTR_MODE;
275 return (ctr_ctx);
276 }