1 /*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or https://opensource.org/licenses/CDDL-1.0.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21 /*
22 * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
23 */
24
25 /*
26 * AES provider for the Kernel Cryptographic Framework (KCF)
27 */
28
29 #include <sys/zfs_context.h>
30 #include <sys/crypto/common.h>
31 #include <sys/crypto/impl.h>
32 #include <sys/crypto/spi.h>
33 #include <sys/crypto/icp.h>
34 #include <modes/modes.h>
35 #define _AES_IMPL
36 #include <aes/aes_impl.h>
37 #include <modes/gcm_impl.h>
38
39 /*
40 * Mechanism info structure passed to KCF during registration.
41 */
42 static const crypto_mech_info_t aes_mech_info_tab[] = {
43 /* AES_ECB */
44 {SUN_CKM_AES_ECB, AES_ECB_MECH_INFO_TYPE,
45 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
46 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC},
47 /* AES_CBC */
48 {SUN_CKM_AES_CBC, AES_CBC_MECH_INFO_TYPE,
49 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
50 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC},
51 /* AES_CTR */
52 {SUN_CKM_AES_CTR, AES_CTR_MECH_INFO_TYPE,
53 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
54 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC},
55 /* AES_CCM */
56 {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
57 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
58 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC},
59 /* AES_GCM */
60 {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
61 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
62 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC},
63 /* AES_GMAC */
64 {SUN_CKM_AES_GMAC, AES_GMAC_MECH_INFO_TYPE,
65 CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC |
66 CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC |
67 CRYPTO_FG_MAC | CRYPTO_FG_MAC_ATOMIC},
68 };
69
70 static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
71 crypto_key_t *, crypto_spi_ctx_template_t);
72 static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
73 crypto_key_t *, crypto_spi_ctx_template_t);
74 static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
75 crypto_key_t *, crypto_spi_ctx_template_t, boolean_t);
76 static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
77 crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
78 static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *);
79 static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *);
80
81 static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *);
82 static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
83 crypto_data_t *);
84 static int aes_encrypt_atomic(crypto_mechanism_t *, crypto_key_t *,
85 crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
86
87 static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *);
88 static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
89 crypto_data_t *);
90 static int aes_decrypt_atomic(crypto_mechanism_t *, crypto_key_t *,
91 crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
92
93 static const crypto_cipher_ops_t aes_cipher_ops = {
94 .encrypt_init = aes_encrypt_init,
95 .encrypt = aes_encrypt,
96 .encrypt_update = aes_encrypt_update,
97 .encrypt_final = aes_encrypt_final,
98 .encrypt_atomic = aes_encrypt_atomic,
99 .decrypt_init = aes_decrypt_init,
100 .decrypt = aes_decrypt,
101 .decrypt_update = aes_decrypt_update,
102 .decrypt_final = aes_decrypt_final,
103 .decrypt_atomic = aes_decrypt_atomic
104 };
105
106 static int aes_mac_atomic(crypto_mechanism_t *, crypto_key_t *, crypto_data_t *,
107 crypto_data_t *, crypto_spi_ctx_template_t);
108 static int aes_mac_verify_atomic(crypto_mechanism_t *, crypto_key_t *,
109 crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
110
111 static const crypto_mac_ops_t aes_mac_ops = {
112 .mac_init = NULL,
113 .mac = NULL,
114 .mac_update = NULL,
115 .mac_final = NULL,
116 .mac_atomic = aes_mac_atomic,
117 .mac_verify_atomic = aes_mac_verify_atomic
118 };
119
120 static int aes_create_ctx_template(crypto_mechanism_t *, crypto_key_t *,
121 crypto_spi_ctx_template_t *, size_t *);
122 static int aes_free_context(crypto_ctx_t *);
123
124 static const crypto_ctx_ops_t aes_ctx_ops = {
125 .create_ctx_template = aes_create_ctx_template,
126 .free_context = aes_free_context
127 };
128
129 static const crypto_ops_t aes_crypto_ops = {
130 NULL,
131 &aes_cipher_ops,
132 &aes_mac_ops,
133 &aes_ctx_ops,
134 };
135
136 static const crypto_provider_info_t aes_prov_info = {
137 "AES Software Provider",
138 &aes_crypto_ops,
139 sizeof (aes_mech_info_tab) / sizeof (crypto_mech_info_t),
140 aes_mech_info_tab
141 };
142
143 static crypto_kcf_provider_handle_t aes_prov_handle = 0;
144 static crypto_data_t null_crypto_data = { CRYPTO_DATA_RAW };
145
146 int
147 aes_mod_init(void)
148 {
149 /* Determine the fastest available implementation. */
150 aes_impl_init();
151 gcm_impl_init();
152
153 /* Register with KCF. If the registration fails, remove the module. */
154 if (crypto_register_provider(&aes_prov_info, &aes_prov_handle))
155 return (EACCES);
156
157 return (0);
158 }
159
160 int
161 aes_mod_fini(void)
162 {
163 /* Unregister from KCF if module is registered */
164 if (aes_prov_handle != 0) {
165 if (crypto_unregister_provider(aes_prov_handle))
166 return (EBUSY);
167
168 aes_prov_handle = 0;
169 }
170
171 return (0);
172 }
173
174 static int
175 aes_check_mech_param(crypto_mechanism_t *mechanism, aes_ctx_t **ctx)
176 {
177 void *p = NULL;
178 boolean_t param_required = B_TRUE;
179 size_t param_len;
180 void *(*alloc_fun)(int);
181 int rv = CRYPTO_SUCCESS;
182
183 switch (mechanism->cm_type) {
184 case AES_ECB_MECH_INFO_TYPE:
185 param_required = B_FALSE;
186 alloc_fun = ecb_alloc_ctx;
187 break;
188 case AES_CBC_MECH_INFO_TYPE:
189 param_len = AES_BLOCK_LEN;
190 alloc_fun = cbc_alloc_ctx;
191 break;
192 case AES_CTR_MECH_INFO_TYPE:
193 param_len = sizeof (CK_AES_CTR_PARAMS);
194 alloc_fun = ctr_alloc_ctx;
195 break;
196 case AES_CCM_MECH_INFO_TYPE:
197 param_len = sizeof (CK_AES_CCM_PARAMS);
198 alloc_fun = ccm_alloc_ctx;
199 break;
200 case AES_GCM_MECH_INFO_TYPE:
201 param_len = sizeof (CK_AES_GCM_PARAMS);
202 alloc_fun = gcm_alloc_ctx;
203 break;
204 case AES_GMAC_MECH_INFO_TYPE:
205 param_len = sizeof (CK_AES_GMAC_PARAMS);
206 alloc_fun = gmac_alloc_ctx;
207 break;
208 default:
209 rv = CRYPTO_MECHANISM_INVALID;
210 return (rv);
211 }
212 if (param_required && mechanism->cm_param != NULL &&
213 mechanism->cm_param_len != param_len) {
214 rv = CRYPTO_MECHANISM_PARAM_INVALID;
215 }
216 if (ctx != NULL) {
217 p = (alloc_fun)(KM_SLEEP);
218 *ctx = p;
219 }
220 return (rv);
221 }
222
223 /*
224 * Initialize key schedules for AES
225 */
226 static int
227 init_keysched(crypto_key_t *key, void *newbie)
228 {
229 if (key->ck_length < AES_MINBITS ||
230 key->ck_length > AES_MAXBITS) {
231 return (CRYPTO_KEY_SIZE_RANGE);
232 }
233
234 /* key length must be either 128, 192, or 256 */
235 if ((key->ck_length & 63) != 0)
236 return (CRYPTO_KEY_SIZE_RANGE);
237
238 aes_init_keysched(key->ck_data, key->ck_length, newbie);
239 return (CRYPTO_SUCCESS);
240 }
241
242 static int
243 aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
244 crypto_key_t *key, crypto_spi_ctx_template_t template)
245 {
246 return (aes_common_init(ctx, mechanism, key, template, B_TRUE));
247 }
248
249 static int
250 aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
251 crypto_key_t *key, crypto_spi_ctx_template_t template)
252 {
253 return (aes_common_init(ctx, mechanism, key, template, B_FALSE));
254 }
255
256
257
258 /*
259 * KCF software provider encrypt entry points.
260 */
261 static int
262 aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
263 crypto_key_t *key, crypto_spi_ctx_template_t template,
264 boolean_t is_encrypt_init)
265 {
266 aes_ctx_t *aes_ctx;
267 int rv;
268
269 if ((rv = aes_check_mech_param(mechanism, &aes_ctx))
270 != CRYPTO_SUCCESS)
271 return (rv);
272
273 rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, KM_SLEEP,
274 is_encrypt_init);
275 if (rv != CRYPTO_SUCCESS) {
276 crypto_free_mode_ctx(aes_ctx);
277 return (rv);
278 }
279
280 ctx->cc_provider_private = aes_ctx;
281
282 return (CRYPTO_SUCCESS);
283 }
284
285 static void
286 aes_copy_block64(uint8_t *in, uint64_t *out)
287 {
288 if (IS_P2ALIGNED(in, sizeof (uint64_t))) {
289 /* LINTED: pointer alignment */
290 out[0] = *(uint64_t *)&in[0];
291 /* LINTED: pointer alignment */
292 out[1] = *(uint64_t *)&in[8];
293 } else {
294 uint8_t *iv8 = (uint8_t *)&out[0];
295
296 AES_COPY_BLOCK(in, iv8);
297 }
298 }
299
300
301 static int
302 aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
303 crypto_data_t *ciphertext)
304 {
305 int ret = CRYPTO_FAILED;
306
307 aes_ctx_t *aes_ctx;
308 size_t saved_length, saved_offset, length_needed;
309
310 ASSERT(ctx->cc_provider_private != NULL);
311 aes_ctx = ctx->cc_provider_private;
312
313 /*
314 * For block ciphers, plaintext must be a multiple of AES block size.
315 * This test is only valid for ciphers whose blocksize is a power of 2.
316 */
317 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
318 == 0) && (plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
319 return (CRYPTO_DATA_LEN_RANGE);
320
321 ASSERT(ciphertext != NULL);
322
323 /*
324 * We need to just return the length needed to store the output.
325 * We should not destroy the context for the following case.
326 */
327 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
328 case CCM_MODE:
329 length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
330 break;
331 case GCM_MODE:
332 length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
333 break;
334 case GMAC_MODE:
335 if (plaintext->cd_length != 0)
336 return (CRYPTO_ARGUMENTS_BAD);
337
338 length_needed = aes_ctx->ac_tag_len;
339 break;
340 default:
341 length_needed = plaintext->cd_length;
342 }
343
344 if (ciphertext->cd_length < length_needed) {
345 ciphertext->cd_length = length_needed;
346 return (CRYPTO_BUFFER_TOO_SMALL);
347 }
348
349 saved_length = ciphertext->cd_length;
350 saved_offset = ciphertext->cd_offset;
351
352 /*
353 * Do an update on the specified input data.
354 */
355 ret = aes_encrypt_update(ctx, plaintext, ciphertext);
356 if (ret != CRYPTO_SUCCESS) {
357 return (ret);
358 }
359
360 /*
361 * For CCM mode, aes_ccm_encrypt_final() will take care of any
362 * left-over unprocessed data, and compute the MAC
363 */
364 if (aes_ctx->ac_flags & CCM_MODE) {
365 /*
366 * ccm_encrypt_final() will compute the MAC and append
367 * it to existing ciphertext. So, need to adjust the left over
368 * length value accordingly
369 */
370
371 /* order of following 2 lines MUST not be reversed */
372 ciphertext->cd_offset = ciphertext->cd_length;
373 ciphertext->cd_length = saved_length - ciphertext->cd_length;
374 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
375 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
376 if (ret != CRYPTO_SUCCESS) {
377 return (ret);
378 }
379
380 if (plaintext != ciphertext) {
381 ciphertext->cd_length =
382 ciphertext->cd_offset - saved_offset;
383 }
384 ciphertext->cd_offset = saved_offset;
385 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
386 /*
387 * gcm_encrypt_final() will compute the MAC and append
388 * it to existing ciphertext. So, need to adjust the left over
389 * length value accordingly
390 */
391
392 /* order of following 2 lines MUST not be reversed */
393 ciphertext->cd_offset = ciphertext->cd_length;
394 ciphertext->cd_length = saved_length - ciphertext->cd_length;
395 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
396 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
397 aes_xor_block);
398 if (ret != CRYPTO_SUCCESS) {
399 return (ret);
400 }
401
402 if (plaintext != ciphertext) {
403 ciphertext->cd_length =
404 ciphertext->cd_offset - saved_offset;
405 }
406 ciphertext->cd_offset = saved_offset;
407 }
408
409 ASSERT(aes_ctx->ac_remainder_len == 0);
410 (void) aes_free_context(ctx);
411
412 return (ret);
413 }
414
415
416 static int
417 aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
418 crypto_data_t *plaintext)
419 {
420 int ret = CRYPTO_FAILED;
421
422 aes_ctx_t *aes_ctx;
423 off_t saved_offset;
424 size_t saved_length, length_needed;
425
426 ASSERT(ctx->cc_provider_private != NULL);
427 aes_ctx = ctx->cc_provider_private;
428
429 /*
430 * For block ciphers, plaintext must be a multiple of AES block size.
431 * This test is only valid for ciphers whose blocksize is a power of 2.
432 */
433 if (((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE))
434 == 0) && (ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0) {
435 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
436 }
437
438 ASSERT(plaintext != NULL);
439
440 /*
441 * Return length needed to store the output.
442 * Do not destroy context when plaintext buffer is too small.
443 *
444 * CCM: plaintext is MAC len smaller than cipher text
445 * GCM: plaintext is TAG len smaller than cipher text
446 * GMAC: plaintext length must be zero
447 */
448 switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) {
449 case CCM_MODE:
450 length_needed = aes_ctx->ac_processed_data_len;
451 break;
452 case GCM_MODE:
453 length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
454 break;
455 case GMAC_MODE:
456 if (plaintext->cd_length != 0)
457 return (CRYPTO_ARGUMENTS_BAD);
458
459 length_needed = 0;
460 break;
461 default:
462 length_needed = ciphertext->cd_length;
463 }
464
465 if (plaintext->cd_length < length_needed) {
466 plaintext->cd_length = length_needed;
467 return (CRYPTO_BUFFER_TOO_SMALL);
468 }
469
470 saved_offset = plaintext->cd_offset;
471 saved_length = plaintext->cd_length;
472
473 /*
474 * Do an update on the specified input data.
475 */
476 ret = aes_decrypt_update(ctx, ciphertext, plaintext);
477 if (ret != CRYPTO_SUCCESS) {
478 goto cleanup;
479 }
480
481 if (aes_ctx->ac_flags & CCM_MODE) {
482 ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
483 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
484
485 /* order of following 2 lines MUST not be reversed */
486 plaintext->cd_offset = plaintext->cd_length;
487 plaintext->cd_length = saved_length - plaintext->cd_length;
488
489 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
490 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
491 aes_xor_block);
492 if (ret == CRYPTO_SUCCESS) {
493 if (plaintext != ciphertext) {
494 plaintext->cd_length =
495 plaintext->cd_offset - saved_offset;
496 }
497 } else {
498 plaintext->cd_length = saved_length;
499 }
500
501 plaintext->cd_offset = saved_offset;
502 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
503 /* order of following 2 lines MUST not be reversed */
504 plaintext->cd_offset = plaintext->cd_length;
505 plaintext->cd_length = saved_length - plaintext->cd_length;
506
507 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
508 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
509 if (ret == CRYPTO_SUCCESS) {
510 if (plaintext != ciphertext) {
511 plaintext->cd_length =
512 plaintext->cd_offset - saved_offset;
513 }
514 } else {
515 plaintext->cd_length = saved_length;
516 }
517
518 plaintext->cd_offset = saved_offset;
519 }
520
521 ASSERT(aes_ctx->ac_remainder_len == 0);
522
523 cleanup:
524 (void) aes_free_context(ctx);
525
526 return (ret);
527 }
528
529
530 static int
531 aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
532 crypto_data_t *ciphertext)
533 {
534 off_t saved_offset;
535 size_t saved_length, out_len;
536 int ret = CRYPTO_SUCCESS;
537 aes_ctx_t *aes_ctx;
538
539 ASSERT(ctx->cc_provider_private != NULL);
540 aes_ctx = ctx->cc_provider_private;
541
542 ASSERT(ciphertext != NULL);
543
544 /* compute number of bytes that will hold the ciphertext */
545 out_len = aes_ctx->ac_remainder_len;
546 out_len += plaintext->cd_length;
547 out_len &= ~(AES_BLOCK_LEN - 1);
548
549 /* return length needed to store the output */
550 if (ciphertext->cd_length < out_len) {
551 ciphertext->cd_length = out_len;
552 return (CRYPTO_BUFFER_TOO_SMALL);
553 }
554
555 saved_offset = ciphertext->cd_offset;
556 saved_length = ciphertext->cd_length;
557
558 /*
559 * Do the AES update on the specified input data.
560 */
561 switch (plaintext->cd_format) {
562 case CRYPTO_DATA_RAW:
563 ret = crypto_update_iov(ctx->cc_provider_private,
564 plaintext, ciphertext, aes_encrypt_contiguous_blocks);
565 break;
566 case CRYPTO_DATA_UIO:
567 ret = crypto_update_uio(ctx->cc_provider_private,
568 plaintext, ciphertext, aes_encrypt_contiguous_blocks);
569 break;
570 default:
571 ret = CRYPTO_ARGUMENTS_BAD;
572 }
573
574 /*
575 * Since AES counter mode is a stream cipher, we call
576 * ctr_mode_final() to pick up any remaining bytes.
577 * It is an internal function that does not destroy
578 * the context like *normal* final routines.
579 */
580 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
581 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx,
582 ciphertext, aes_encrypt_block);
583 }
584
585 if (ret == CRYPTO_SUCCESS) {
586 if (plaintext != ciphertext)
587 ciphertext->cd_length =
588 ciphertext->cd_offset - saved_offset;
589 } else {
590 ciphertext->cd_length = saved_length;
591 }
592 ciphertext->cd_offset = saved_offset;
593
594 return (ret);
595 }
596
597
598 static int
599 aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
600 crypto_data_t *plaintext)
601 {
602 off_t saved_offset;
603 size_t saved_length, out_len;
604 int ret = CRYPTO_SUCCESS;
605 aes_ctx_t *aes_ctx;
606
607 ASSERT(ctx->cc_provider_private != NULL);
608 aes_ctx = ctx->cc_provider_private;
609
610 ASSERT(plaintext != NULL);
611
612 /*
613 * Compute number of bytes that will hold the plaintext.
614 * This is not necessary for CCM, GCM, and GMAC since these
615 * mechanisms never return plaintext for update operations.
616 */
617 if ((aes_ctx->ac_flags & (CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
618 out_len = aes_ctx->ac_remainder_len;
619 out_len += ciphertext->cd_length;
620 out_len &= ~(AES_BLOCK_LEN - 1);
621
622 /* return length needed to store the output */
623 if (plaintext->cd_length < out_len) {
624 plaintext->cd_length = out_len;
625 return (CRYPTO_BUFFER_TOO_SMALL);
626 }
627 }
628
629 saved_offset = plaintext->cd_offset;
630 saved_length = plaintext->cd_length;
631
632 /*
633 * Do the AES update on the specified input data.
634 */
635 switch (ciphertext->cd_format) {
636 case CRYPTO_DATA_RAW:
637 ret = crypto_update_iov(ctx->cc_provider_private,
638 ciphertext, plaintext, aes_decrypt_contiguous_blocks);
639 break;
640 case CRYPTO_DATA_UIO:
641 ret = crypto_update_uio(ctx->cc_provider_private,
642 ciphertext, plaintext, aes_decrypt_contiguous_blocks);
643 break;
644 default:
645 ret = CRYPTO_ARGUMENTS_BAD;
646 }
647
648 /*
649 * Since AES counter mode is a stream cipher, we call
650 * ctr_mode_final() to pick up any remaining bytes.
651 * It is an internal function that does not destroy
652 * the context like *normal* final routines.
653 */
654 if ((aes_ctx->ac_flags & CTR_MODE) && (aes_ctx->ac_remainder_len > 0)) {
655 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, plaintext,
656 aes_encrypt_block);
657 if (ret == CRYPTO_DATA_LEN_RANGE)
658 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
659 }
660
661 if (ret == CRYPTO_SUCCESS) {
662 if (ciphertext != plaintext)
663 plaintext->cd_length =
664 plaintext->cd_offset - saved_offset;
665 } else {
666 plaintext->cd_length = saved_length;
667 }
668 plaintext->cd_offset = saved_offset;
669
670
671 return (ret);
672 }
673
674 static int
675 aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data)
676 {
677 aes_ctx_t *aes_ctx;
678 int ret;
679
680 ASSERT(ctx->cc_provider_private != NULL);
681 aes_ctx = ctx->cc_provider_private;
682
683 if (data->cd_format != CRYPTO_DATA_RAW &&
684 data->cd_format != CRYPTO_DATA_UIO) {
685 return (CRYPTO_ARGUMENTS_BAD);
686 }
687
688 if (aes_ctx->ac_flags & CTR_MODE) {
689 if (aes_ctx->ac_remainder_len > 0) {
690 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
691 aes_encrypt_block);
692 if (ret != CRYPTO_SUCCESS)
693 return (ret);
694 }
695 } else if (aes_ctx->ac_flags & CCM_MODE) {
696 ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
697 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
698 if (ret != CRYPTO_SUCCESS) {
699 return (ret);
700 }
701 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
702 size_t saved_offset = data->cd_offset;
703
704 ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
705 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
706 aes_xor_block);
707 if (ret != CRYPTO_SUCCESS) {
708 return (ret);
709 }
710 data->cd_length = data->cd_offset - saved_offset;
711 data->cd_offset = saved_offset;
712 } else {
713 /*
714 * There must be no unprocessed plaintext.
715 * This happens if the length of the last data is
716 * not a multiple of the AES block length.
717 */
718 if (aes_ctx->ac_remainder_len > 0) {
719 return (CRYPTO_DATA_LEN_RANGE);
720 }
721 data->cd_length = 0;
722 }
723
724 (void) aes_free_context(ctx);
725
726 return (CRYPTO_SUCCESS);
727 }
728
729 static int
730 aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data)
731 {
732 aes_ctx_t *aes_ctx;
733 int ret;
734 off_t saved_offset;
735 size_t saved_length;
736
737 ASSERT(ctx->cc_provider_private != NULL);
738 aes_ctx = ctx->cc_provider_private;
739
740 if (data->cd_format != CRYPTO_DATA_RAW &&
741 data->cd_format != CRYPTO_DATA_UIO) {
742 return (CRYPTO_ARGUMENTS_BAD);
743 }
744
745 /*
746 * There must be no unprocessed ciphertext.
747 * This happens if the length of the last ciphertext is
748 * not a multiple of the AES block length.
749 */
750 if (aes_ctx->ac_remainder_len > 0) {
751 if ((aes_ctx->ac_flags & CTR_MODE) == 0)
752 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
753 else {
754 ret = ctr_mode_final((ctr_ctx_t *)aes_ctx, data,
755 aes_encrypt_block);
756 if (ret == CRYPTO_DATA_LEN_RANGE)
757 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
758 if (ret != CRYPTO_SUCCESS)
759 return (ret);
760 }
761 }
762
763 if (aes_ctx->ac_flags & CCM_MODE) {
764 /*
765 * This is where all the plaintext is returned, make sure
766 * the plaintext buffer is big enough
767 */
768 size_t pt_len = aes_ctx->ac_data_len;
769 if (data->cd_length < pt_len) {
770 data->cd_length = pt_len;
771 return (CRYPTO_BUFFER_TOO_SMALL);
772 }
773
774 ASSERT(aes_ctx->ac_processed_data_len == pt_len);
775 ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
776 saved_offset = data->cd_offset;
777 saved_length = data->cd_length;
778 ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
779 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
780 aes_xor_block);
781 if (ret == CRYPTO_SUCCESS) {
782 data->cd_length = data->cd_offset - saved_offset;
783 } else {
784 data->cd_length = saved_length;
785 }
786
787 data->cd_offset = saved_offset;
788 if (ret != CRYPTO_SUCCESS) {
789 return (ret);
790 }
791 } else if (aes_ctx->ac_flags & (GCM_MODE|GMAC_MODE)) {
792 /*
793 * This is where all the plaintext is returned, make sure
794 * the plaintext buffer is big enough
795 */
796 gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
797 size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
798
799 if (data->cd_length < pt_len) {
800 data->cd_length = pt_len;
801 return (CRYPTO_BUFFER_TOO_SMALL);
802 }
803
804 saved_offset = data->cd_offset;
805 saved_length = data->cd_length;
806 ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
807 AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
808 if (ret == CRYPTO_SUCCESS) {
809 data->cd_length = data->cd_offset - saved_offset;
810 } else {
811 data->cd_length = saved_length;
812 }
813
814 data->cd_offset = saved_offset;
815 if (ret != CRYPTO_SUCCESS) {
816 return (ret);
817 }
818 }
819
820
821 if ((aes_ctx->ac_flags & (CTR_MODE|CCM_MODE|GCM_MODE|GMAC_MODE)) == 0) {
822 data->cd_length = 0;
823 }
824
825 (void) aes_free_context(ctx);
826
827 return (CRYPTO_SUCCESS);
828 }
829
830 static int
831 aes_encrypt_atomic(crypto_mechanism_t *mechanism,
832 crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
833 crypto_spi_ctx_template_t template)
834 {
835 aes_ctx_t aes_ctx = {{{{0}}}};
836 off_t saved_offset;
837 size_t saved_length;
838 size_t length_needed;
839 int ret;
840
841 ASSERT(ciphertext != NULL);
842
843 /*
844 * CTR, CCM, GCM, and GMAC modes do not require that plaintext
845 * be a multiple of AES block size.
846 */
847 switch (mechanism->cm_type) {
848 case AES_CTR_MECH_INFO_TYPE:
849 case AES_CCM_MECH_INFO_TYPE:
850 case AES_GCM_MECH_INFO_TYPE:
851 case AES_GMAC_MECH_INFO_TYPE:
852 break;
853 default:
854 if ((plaintext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
855 return (CRYPTO_DATA_LEN_RANGE);
856 }
857
858 if ((ret = aes_check_mech_param(mechanism, NULL)) != CRYPTO_SUCCESS)
859 return (ret);
860
861 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
862 KM_SLEEP, B_TRUE);
863 if (ret != CRYPTO_SUCCESS)
864 return (ret);
865
866 switch (mechanism->cm_type) {
867 case AES_CCM_MECH_INFO_TYPE:
868 length_needed = plaintext->cd_length + aes_ctx.ac_mac_len;
869 break;
870 case AES_GMAC_MECH_INFO_TYPE:
871 if (plaintext->cd_length != 0)
872 return (CRYPTO_ARGUMENTS_BAD);
873 zfs_fallthrough;
874 case AES_GCM_MECH_INFO_TYPE:
875 length_needed = plaintext->cd_length + aes_ctx.ac_tag_len;
876 break;
877 default:
878 length_needed = plaintext->cd_length;
879 }
880
881 /* return size of buffer needed to store output */
882 if (ciphertext->cd_length < length_needed) {
883 ciphertext->cd_length = length_needed;
884 ret = CRYPTO_BUFFER_TOO_SMALL;
885 goto out;
886 }
887
888 saved_offset = ciphertext->cd_offset;
889 saved_length = ciphertext->cd_length;
890
891 /*
892 * Do an update on the specified input data.
893 */
894 switch (plaintext->cd_format) {
895 case CRYPTO_DATA_RAW:
896 ret = crypto_update_iov(&aes_ctx, plaintext, ciphertext,
897 aes_encrypt_contiguous_blocks);
898 break;
899 case CRYPTO_DATA_UIO:
900 ret = crypto_update_uio(&aes_ctx, plaintext, ciphertext,
901 aes_encrypt_contiguous_blocks);
902 break;
903 default:
904 ret = CRYPTO_ARGUMENTS_BAD;
905 }
906
907 if (ret == CRYPTO_SUCCESS) {
908 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
909 ret = ccm_encrypt_final((ccm_ctx_t *)&aes_ctx,
910 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
911 aes_xor_block);
912 if (ret != CRYPTO_SUCCESS)
913 goto out;
914 ASSERT(aes_ctx.ac_remainder_len == 0);
915 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
916 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
917 ret = gcm_encrypt_final((gcm_ctx_t *)&aes_ctx,
918 ciphertext, AES_BLOCK_LEN, aes_encrypt_block,
919 aes_copy_block, aes_xor_block);
920 if (ret != CRYPTO_SUCCESS)
921 goto out;
922 ASSERT(aes_ctx.ac_remainder_len == 0);
923 } else if (mechanism->cm_type == AES_CTR_MECH_INFO_TYPE) {
924 if (aes_ctx.ac_remainder_len > 0) {
925 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
926 ciphertext, aes_encrypt_block);
927 if (ret != CRYPTO_SUCCESS)
928 goto out;
929 }
930 } else {
931 ASSERT(aes_ctx.ac_remainder_len == 0);
932 }
933
934 if (plaintext != ciphertext) {
935 ciphertext->cd_length =
936 ciphertext->cd_offset - saved_offset;
937 }
938 } else {
939 ciphertext->cd_length = saved_length;
940 }
941 ciphertext->cd_offset = saved_offset;
942
943 out:
944 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
945 memset(aes_ctx.ac_keysched, 0, aes_ctx.ac_keysched_len);
946 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
947 }
948 #ifdef CAN_USE_GCM_ASM
949 if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE) &&
950 ((gcm_ctx_t *)&aes_ctx)->gcm_Htable != NULL) {
951
952 gcm_ctx_t *ctx = (gcm_ctx_t *)&aes_ctx;
953
954 memset(ctx->gcm_Htable, 0, ctx->gcm_htab_len);
955 kmem_free(ctx->gcm_Htable, ctx->gcm_htab_len);
956 }
957 #endif
958
959 return (ret);
960 }
961
962 static int
963 aes_decrypt_atomic(crypto_mechanism_t *mechanism,
964 crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
965 crypto_spi_ctx_template_t template)
966 {
967 aes_ctx_t aes_ctx = {{{{0}}}};
968 off_t saved_offset;
969 size_t saved_length;
970 size_t length_needed;
971 int ret;
972
973 ASSERT(plaintext != NULL);
974
975 /*
976 * CCM, GCM, CTR, and GMAC modes do not require that ciphertext
977 * be a multiple of AES block size.
978 */
979 switch (mechanism->cm_type) {
980 case AES_CTR_MECH_INFO_TYPE:
981 case AES_CCM_MECH_INFO_TYPE:
982 case AES_GCM_MECH_INFO_TYPE:
983 case AES_GMAC_MECH_INFO_TYPE:
984 break;
985 default:
986 if ((ciphertext->cd_length & (AES_BLOCK_LEN - 1)) != 0)
987 return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
988 }
989
990 if ((ret = aes_check_mech_param(mechanism, NULL)) != CRYPTO_SUCCESS)
991 return (ret);
992
993 ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
994 KM_SLEEP, B_FALSE);
995 if (ret != CRYPTO_SUCCESS)
996 return (ret);
997
998 switch (mechanism->cm_type) {
999 case AES_CCM_MECH_INFO_TYPE:
1000 length_needed = aes_ctx.ac_data_len;
1001 break;
1002 case AES_GCM_MECH_INFO_TYPE:
1003 length_needed = ciphertext->cd_length - aes_ctx.ac_tag_len;
1004 break;
1005 case AES_GMAC_MECH_INFO_TYPE:
1006 if (plaintext->cd_length != 0)
1007 return (CRYPTO_ARGUMENTS_BAD);
1008 length_needed = 0;
1009 break;
1010 default:
1011 length_needed = ciphertext->cd_length;
1012 }
1013
1014 /* return size of buffer needed to store output */
1015 if (plaintext->cd_length < length_needed) {
1016 plaintext->cd_length = length_needed;
1017 ret = CRYPTO_BUFFER_TOO_SMALL;
1018 goto out;
1019 }
1020
1021 saved_offset = plaintext->cd_offset;
1022 saved_length = plaintext->cd_length;
1023
1024 /*
1025 * Do an update on the specified input data.
1026 */
1027 switch (ciphertext->cd_format) {
1028 case CRYPTO_DATA_RAW:
1029 ret = crypto_update_iov(&aes_ctx, ciphertext, plaintext,
1030 aes_decrypt_contiguous_blocks);
1031 break;
1032 case CRYPTO_DATA_UIO:
1033 ret = crypto_update_uio(&aes_ctx, ciphertext, plaintext,
1034 aes_decrypt_contiguous_blocks);
1035 break;
1036 default:
1037 ret = CRYPTO_ARGUMENTS_BAD;
1038 }
1039
1040 if (ret == CRYPTO_SUCCESS) {
1041 if (mechanism->cm_type == AES_CCM_MECH_INFO_TYPE) {
1042 ASSERT(aes_ctx.ac_processed_data_len
1043 == aes_ctx.ac_data_len);
1044 ASSERT(aes_ctx.ac_processed_mac_len
1045 == aes_ctx.ac_mac_len);
1046 ret = ccm_decrypt_final((ccm_ctx_t *)&aes_ctx,
1047 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1048 aes_copy_block, aes_xor_block);
1049 ASSERT(aes_ctx.ac_remainder_len == 0);
1050 if ((ret == CRYPTO_SUCCESS) &&
1051 (ciphertext != plaintext)) {
1052 plaintext->cd_length =
1053 plaintext->cd_offset - saved_offset;
1054 } else {
1055 plaintext->cd_length = saved_length;
1056 }
1057 } else if (mechanism->cm_type == AES_GCM_MECH_INFO_TYPE ||
1058 mechanism->cm_type == AES_GMAC_MECH_INFO_TYPE) {
1059 ret = gcm_decrypt_final((gcm_ctx_t *)&aes_ctx,
1060 plaintext, AES_BLOCK_LEN, aes_encrypt_block,
1061 aes_xor_block);
1062 ASSERT(aes_ctx.ac_remainder_len == 0);
1063 if ((ret == CRYPTO_SUCCESS) &&
1064 (ciphertext != plaintext)) {
1065 plaintext->cd_length =
1066 plaintext->cd_offset - saved_offset;
1067 } else {
1068 plaintext->cd_length = saved_length;
1069 }
1070 } else if (mechanism->cm_type != AES_CTR_MECH_INFO_TYPE) {
1071 ASSERT(aes_ctx.ac_remainder_len == 0);
1072 if (ciphertext != plaintext)
1073 plaintext->cd_length =
1074 plaintext->cd_offset - saved_offset;
1075 } else {
1076 if (aes_ctx.ac_remainder_len > 0) {
1077 ret = ctr_mode_final((ctr_ctx_t *)&aes_ctx,
1078 plaintext, aes_encrypt_block);
1079 if (ret == CRYPTO_DATA_LEN_RANGE)
1080 ret = CRYPTO_ENCRYPTED_DATA_LEN_RANGE;
1081 if (ret != CRYPTO_SUCCESS)
1082 goto out;
1083 }
1084 if (ciphertext != plaintext)
1085 plaintext->cd_length =
1086 plaintext->cd_offset - saved_offset;
1087 }
1088 } else {
1089 plaintext->cd_length = saved_length;
1090 }
1091 plaintext->cd_offset = saved_offset;
1092
1093 out:
1094 if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1095 memset(aes_ctx.ac_keysched, 0, aes_ctx.ac_keysched_len);
1096 kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
1097 }
1098
1099 if (aes_ctx.ac_flags & CCM_MODE) {
1100 if (aes_ctx.ac_pt_buf != NULL) {
1101 vmem_free(aes_ctx.ac_pt_buf, aes_ctx.ac_data_len);
1102 }
1103 } else if (aes_ctx.ac_flags & (GCM_MODE|GMAC_MODE)) {
1104 if (((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf != NULL) {
1105 vmem_free(((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf,
1106 ((gcm_ctx_t *)&aes_ctx)->gcm_pt_buf_len);
1107 }
1108 #ifdef CAN_USE_GCM_ASM
1109 if (((gcm_ctx_t *)&aes_ctx)->gcm_Htable != NULL) {
1110 gcm_ctx_t *ctx = (gcm_ctx_t *)&aes_ctx;
1111
1112 memset(ctx->gcm_Htable, 0, ctx->gcm_htab_len);
1113 kmem_free(ctx->gcm_Htable, ctx->gcm_htab_len);
1114 }
1115 #endif
1116 }
1117
1118 return (ret);
1119 }
1120
1121 /*
1122 * KCF software provider context template entry points.
1123 */
1124 static int
1125 aes_create_ctx_template(crypto_mechanism_t *mechanism, crypto_key_t *key,
1126 crypto_spi_ctx_template_t *tmpl, size_t *tmpl_size)
1127 {
1128 void *keysched;
1129 size_t size;
1130 int rv;
1131
1132 if (mechanism->cm_type != AES_ECB_MECH_INFO_TYPE &&
1133 mechanism->cm_type != AES_CBC_MECH_INFO_TYPE &&
1134 mechanism->cm_type != AES_CTR_MECH_INFO_TYPE &&
1135 mechanism->cm_type != AES_CCM_MECH_INFO_TYPE &&
1136 mechanism->cm_type != AES_GCM_MECH_INFO_TYPE &&
1137 mechanism->cm_type != AES_GMAC_MECH_INFO_TYPE)
1138 return (CRYPTO_MECHANISM_INVALID);
1139
1140 if ((keysched = aes_alloc_keysched(&size, KM_SLEEP)) == NULL) {
1141 return (CRYPTO_HOST_MEMORY);
1142 }
1143
1144 /*
1145 * Initialize key schedule. Key length information is stored
1146 * in the key.
1147 */
1148 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1149 memset(keysched, 0, size);
1150 kmem_free(keysched, size);
1151 return (rv);
1152 }
1153
1154 *tmpl = keysched;
1155 *tmpl_size = size;
1156
1157 return (CRYPTO_SUCCESS);
1158 }
1159
1160
1161 static int
1162 aes_free_context(crypto_ctx_t *ctx)
1163 {
1164 aes_ctx_t *aes_ctx = ctx->cc_provider_private;
1165
1166 if (aes_ctx != NULL) {
1167 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1168 ASSERT(aes_ctx->ac_keysched_len != 0);
1169 memset(aes_ctx->ac_keysched, 0,
1170 aes_ctx->ac_keysched_len);
1171 kmem_free(aes_ctx->ac_keysched,
1172 aes_ctx->ac_keysched_len);
1173 }
1174 crypto_free_mode_ctx(aes_ctx);
1175 ctx->cc_provider_private = NULL;
1176 }
1177
1178 return (CRYPTO_SUCCESS);
1179 }
1180
1181
1182 static int
1183 aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
1184 crypto_mechanism_t *mechanism, crypto_key_t *key, int kmflag,
1185 boolean_t is_encrypt_init)
1186 {
1187 int rv = CRYPTO_SUCCESS;
1188 void *keysched;
1189 size_t size = 0;
1190
1191 if (template == NULL) {
1192 if ((keysched = aes_alloc_keysched(&size, kmflag)) == NULL)
1193 return (CRYPTO_HOST_MEMORY);
1194 /*
1195 * Initialize key schedule.
1196 * Key length is stored in the key.
1197 */
1198 if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
1199 kmem_free(keysched, size);
1200 return (rv);
1201 }
1202
1203 aes_ctx->ac_flags |= PROVIDER_OWNS_KEY_SCHEDULE;
1204 aes_ctx->ac_keysched_len = size;
1205 } else {
1206 keysched = template;
1207 }
1208 aes_ctx->ac_keysched = keysched;
1209
1210 switch (mechanism->cm_type) {
1211 case AES_CBC_MECH_INFO_TYPE:
1212 rv = cbc_init_ctx((cbc_ctx_t *)aes_ctx, mechanism->cm_param,
1213 mechanism->cm_param_len, AES_BLOCK_LEN, aes_copy_block64);
1214 break;
1215 case AES_CTR_MECH_INFO_TYPE: {
1216 CK_AES_CTR_PARAMS *pp;
1217
1218 if (mechanism->cm_param == NULL ||
1219 mechanism->cm_param_len != sizeof (CK_AES_CTR_PARAMS)) {
1220 return (CRYPTO_MECHANISM_PARAM_INVALID);
1221 }
1222 pp = (CK_AES_CTR_PARAMS *)(void *)mechanism->cm_param;
1223 rv = ctr_init_ctx((ctr_ctx_t *)aes_ctx, pp->ulCounterBits,
1224 pp->cb, aes_copy_block);
1225 break;
1226 }
1227 case AES_CCM_MECH_INFO_TYPE:
1228 if (mechanism->cm_param == NULL ||
1229 mechanism->cm_param_len != sizeof (CK_AES_CCM_PARAMS)) {
1230 return (CRYPTO_MECHANISM_PARAM_INVALID);
1231 }
1232 rv = ccm_init_ctx((ccm_ctx_t *)aes_ctx, mechanism->cm_param,
1233 kmflag, is_encrypt_init, AES_BLOCK_LEN, aes_encrypt_block,
1234 aes_xor_block);
1235 break;
1236 case AES_GCM_MECH_INFO_TYPE:
1237 if (mechanism->cm_param == NULL ||
1238 mechanism->cm_param_len != sizeof (CK_AES_GCM_PARAMS)) {
1239 return (CRYPTO_MECHANISM_PARAM_INVALID);
1240 }
1241 rv = gcm_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1242 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1243 aes_xor_block);
1244 break;
1245 case AES_GMAC_MECH_INFO_TYPE:
1246 if (mechanism->cm_param == NULL ||
1247 mechanism->cm_param_len != sizeof (CK_AES_GMAC_PARAMS)) {
1248 return (CRYPTO_MECHANISM_PARAM_INVALID);
1249 }
1250 rv = gmac_init_ctx((gcm_ctx_t *)aes_ctx, mechanism->cm_param,
1251 AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
1252 aes_xor_block);
1253 break;
1254 case AES_ECB_MECH_INFO_TYPE:
1255 aes_ctx->ac_flags |= ECB_MODE;
1256 }
1257
1258 if (rv != CRYPTO_SUCCESS) {
1259 if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
1260 memset(keysched, 0, size);
1261 kmem_free(keysched, size);
1262 }
1263 }
1264
1265 return (rv);
1266 }
1267
1268 static int
1269 process_gmac_mech(crypto_mechanism_t *mech, crypto_data_t *data,
1270 CK_AES_GCM_PARAMS *gcm_params)
1271 {
1272 /* LINTED: pointer alignment */
1273 CK_AES_GMAC_PARAMS *params = (CK_AES_GMAC_PARAMS *)mech->cm_param;
1274
1275 if (mech->cm_type != AES_GMAC_MECH_INFO_TYPE)
1276 return (CRYPTO_MECHANISM_INVALID);
1277
1278 if (mech->cm_param_len != sizeof (CK_AES_GMAC_PARAMS))
1279 return (CRYPTO_MECHANISM_PARAM_INVALID);
1280
1281 if (params->pIv == NULL)
1282 return (CRYPTO_MECHANISM_PARAM_INVALID);
1283
1284 gcm_params->pIv = params->pIv;
1285 gcm_params->ulIvLen = AES_GMAC_IV_LEN;
1286 gcm_params->ulTagBits = AES_GMAC_TAG_BITS;
1287
1288 if (data == NULL)
1289 return (CRYPTO_SUCCESS);
1290
1291 if (data->cd_format != CRYPTO_DATA_RAW)
1292 return (CRYPTO_ARGUMENTS_BAD);
1293
1294 gcm_params->pAAD = (uchar_t *)data->cd_raw.iov_base;
1295 gcm_params->ulAADLen = data->cd_length;
1296 return (CRYPTO_SUCCESS);
1297 }
1298
1299 static int
1300 aes_mac_atomic(crypto_mechanism_t *mechanism,
1301 crypto_key_t *key, crypto_data_t *data, crypto_data_t *mac,
1302 crypto_spi_ctx_template_t template)
1303 {
1304 CK_AES_GCM_PARAMS gcm_params;
1305 crypto_mechanism_t gcm_mech;
1306 int rv;
1307
1308 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1309 != CRYPTO_SUCCESS)
1310 return (rv);
1311
1312 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1313 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1314 gcm_mech.cm_param = (char *)&gcm_params;
1315
1316 return (aes_encrypt_atomic(&gcm_mech,
1317 key, &null_crypto_data, mac, template));
1318 }
1319
1320 static int
1321 aes_mac_verify_atomic(crypto_mechanism_t *mechanism, crypto_key_t *key,
1322 crypto_data_t *data, crypto_data_t *mac, crypto_spi_ctx_template_t template)
1323 {
1324 CK_AES_GCM_PARAMS gcm_params;
1325 crypto_mechanism_t gcm_mech;
1326 int rv;
1327
1328 if ((rv = process_gmac_mech(mechanism, data, &gcm_params))
1329 != CRYPTO_SUCCESS)
1330 return (rv);
1331
1332 gcm_mech.cm_type = AES_GCM_MECH_INFO_TYPE;
1333 gcm_mech.cm_param_len = sizeof (CK_AES_GCM_PARAMS);
1334 gcm_mech.cm_param = (char *)&gcm_params;
1335
1336 return (aes_decrypt_atomic(&gcm_mech,
1337 key, mac, &null_crypto_data, template));
1338 }
Cache object: 1dc77ad3dedb746065bee456a6efa1c2
|