1 /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */
2
3 /*-
4 * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
5 * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
6 *
7 * This code was written by Angelos D. Keromytis in Athens, Greece, in
8 * February 2000. Network Security Technologies Inc. (NSTI) kindly
9 * supported the development of this code.
10 *
11 * Copyright (c) 2000, 2001 Angelos D. Keromytis
12 * Copyright (c) 2014 The FreeBSD Foundation
13 * All rights reserved.
14 *
15 * Portions of this software were developed by John-Mark Gurney
16 * under sponsorship of the FreeBSD Foundation and
17 * Rubicon Communications, LLC (Netgate).
18 *
19 * Permission to use, copy, and modify this software with or without fee
20 * is hereby granted, provided that this entire notice is included in
21 * all source code copies of any software which is or includes a copy or
22 * modification of this software.
23 *
24 * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
25 * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
26 * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
27 * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
28 * PURPOSE.
29 */
30
31 #include <sys/cdefs.h>
32 __FBSDID("$FreeBSD$");
33
34 #include <sys/param.h>
35 #include <sys/systm.h>
36 #include <sys/malloc.h>
37 #include <sys/mbuf.h>
38 #include <sys/module.h>
39 #include <sys/sysctl.h>
40 #include <sys/errno.h>
41 #include <sys/random.h>
42 #include <sys/kernel.h>
43 #include <sys/uio.h>
44 #include <sys/lock.h>
45 #include <sys/rwlock.h>
46 #include <sys/endian.h>
47 #include <sys/limits.h>
48 #include <sys/mutex.h>
49
50 #include <crypto/blowfish/blowfish.h>
51 #include <crypto/sha1.h>
52 #include <opencrypto/rmd160.h>
53 #include <opencrypto/cast.h>
54 #include <opencrypto/skipjack.h>
55 #include <sys/md5.h>
56
57 #include <opencrypto/cryptodev.h>
58 #include <opencrypto/cryptosoft.h>
59 #include <opencrypto/xform.h>
60
61 #include <sys/kobj.h>
62 #include <sys/bus.h>
63 #include "cryptodev_if.h"
64
65 _Static_assert(AES_CCM_IV_LEN == AES_GCM_IV_LEN,
66 "AES_GCM_IV_LEN must currently be the same as AES_CCM_IV_LEN");
67
68 static int32_t swcr_id;
69
70 u_int8_t hmac_ipad_buffer[HMAC_MAX_BLOCK_LEN];
71 u_int8_t hmac_opad_buffer[HMAC_MAX_BLOCK_LEN];
72
73 static int swcr_encdec(struct cryptodesc *, struct swcr_data *, caddr_t, int);
74 static int swcr_authcompute(struct cryptodesc *, struct swcr_data *, caddr_t, int);
75 static int swcr_authenc(struct cryptop *crp);
76 static int swcr_compdec(struct cryptodesc *, struct swcr_data *, caddr_t, int);
77 static void swcr_freesession(device_t dev, crypto_session_t cses);
78
79 /*
80 * Apply a symmetric encryption/decryption algorithm.
81 */
82 static int
83 swcr_encdec(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
84 int flags)
85 {
86 unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN];
87 unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN];
88 struct enc_xform *exf;
89 int i, j, k, blks, ind, count, ivlen;
90 struct uio *uio, uiolcl;
91 struct iovec iovlcl[4];
92 struct iovec *iov;
93 int iovcnt, iovalloc;
94 int error;
95
96 error = 0;
97
98 exf = sw->sw_exf;
99 blks = exf->blocksize;
100 ivlen = exf->ivsize;
101
102 /* Check for non-padded data */
103 if (crd->crd_len % blks)
104 return EINVAL;
105
106 if (crd->crd_alg == CRYPTO_AES_ICM &&
107 (crd->crd_flags & CRD_F_IV_EXPLICIT) == 0)
108 return (EINVAL);
109
110 /* Initialize the IV */
111 if (crd->crd_flags & CRD_F_ENCRYPT) {
112 /* IV explicitly provided ? */
113 if (crd->crd_flags & CRD_F_IV_EXPLICIT)
114 bcopy(crd->crd_iv, iv, ivlen);
115 else
116 arc4rand(iv, ivlen, 0);
117
118 /* Do we need to write the IV */
119 if (!(crd->crd_flags & CRD_F_IV_PRESENT))
120 crypto_copyback(flags, buf, crd->crd_inject, ivlen, iv);
121
122 } else { /* Decryption */
123 /* IV explicitly provided ? */
124 if (crd->crd_flags & CRD_F_IV_EXPLICIT)
125 bcopy(crd->crd_iv, iv, ivlen);
126 else {
127 /* Get IV off buf */
128 crypto_copydata(flags, buf, crd->crd_inject, ivlen, iv);
129 }
130 }
131
132 if (crd->crd_flags & CRD_F_KEY_EXPLICIT) {
133 int error;
134
135 if (sw->sw_kschedule)
136 exf->zerokey(&(sw->sw_kschedule));
137
138 error = exf->setkey(&sw->sw_kschedule,
139 crd->crd_key, crd->crd_klen / 8);
140 if (error)
141 return (error);
142 }
143
144 iov = iovlcl;
145 iovcnt = nitems(iovlcl);
146 iovalloc = 0;
147 uio = &uiolcl;
148 if ((flags & CRYPTO_F_IMBUF) != 0) {
149 error = crypto_mbuftoiov((struct mbuf *)buf, &iov, &iovcnt,
150 &iovalloc);
151 if (error)
152 return (error);
153 uio->uio_iov = iov;
154 uio->uio_iovcnt = iovcnt;
155 } else if ((flags & CRYPTO_F_IOV) != 0)
156 uio = (struct uio *)buf;
157 else {
158 iov[0].iov_base = buf;
159 iov[0].iov_len = crd->crd_skip + crd->crd_len;
160 uio->uio_iov = iov;
161 uio->uio_iovcnt = 1;
162 }
163
164 ivp = iv;
165
166 if (exf->reinit) {
167 /*
168 * xforms that provide a reinit method perform all IV
169 * handling themselves.
170 */
171 exf->reinit(sw->sw_kschedule, iv);
172 }
173
174 count = crd->crd_skip;
175 ind = cuio_getptr(uio, count, &k);
176 if (ind == -1) {
177 error = EINVAL;
178 goto out;
179 }
180
181 i = crd->crd_len;
182
183 while (i > 0) {
184 /*
185 * If there's insufficient data at the end of
186 * an iovec, we have to do some copying.
187 */
188 if (uio->uio_iov[ind].iov_len < k + blks &&
189 uio->uio_iov[ind].iov_len != k) {
190 cuio_copydata(uio, count, blks, blk);
191
192 /* Actual encryption/decryption */
193 if (exf->reinit) {
194 if (crd->crd_flags & CRD_F_ENCRYPT) {
195 exf->encrypt(sw->sw_kschedule,
196 blk);
197 } else {
198 exf->decrypt(sw->sw_kschedule,
199 blk);
200 }
201 } else if (crd->crd_flags & CRD_F_ENCRYPT) {
202 /* XOR with previous block */
203 for (j = 0; j < blks; j++)
204 blk[j] ^= ivp[j];
205
206 exf->encrypt(sw->sw_kschedule, blk);
207
208 /*
209 * Keep encrypted block for XOR'ing
210 * with next block
211 */
212 bcopy(blk, iv, blks);
213 ivp = iv;
214 } else { /* decrypt */
215 /*
216 * Keep encrypted block for XOR'ing
217 * with next block
218 */
219 nivp = (ivp == iv) ? iv2 : iv;
220 bcopy(blk, nivp, blks);
221
222 exf->decrypt(sw->sw_kschedule, blk);
223
224 /* XOR with previous block */
225 for (j = 0; j < blks; j++)
226 blk[j] ^= ivp[j];
227
228 ivp = nivp;
229 }
230
231 /* Copy back decrypted block */
232 cuio_copyback(uio, count, blks, blk);
233
234 count += blks;
235
236 /* Advance pointer */
237 ind = cuio_getptr(uio, count, &k);
238 if (ind == -1) {
239 error = EINVAL;
240 goto out;
241 }
242
243 i -= blks;
244
245 /* Could be done... */
246 if (i == 0)
247 break;
248 }
249
250 while (uio->uio_iov[ind].iov_len >= k + blks && i > 0) {
251 uint8_t *idat;
252 size_t nb, rem;
253
254 nb = blks;
255 rem = MIN((size_t)i,
256 uio->uio_iov[ind].iov_len - (size_t)k);
257 idat = (uint8_t *)uio->uio_iov[ind].iov_base + k;
258
259 if (exf->reinit) {
260 if ((crd->crd_flags & CRD_F_ENCRYPT) != 0 &&
261 exf->encrypt_multi == NULL)
262 exf->encrypt(sw->sw_kschedule,
263 idat);
264 else if ((crd->crd_flags & CRD_F_ENCRYPT) != 0) {
265 nb = rounddown(rem, blks);
266 exf->encrypt_multi(sw->sw_kschedule,
267 idat, nb);
268 } else if (exf->decrypt_multi == NULL)
269 exf->decrypt(sw->sw_kschedule,
270 idat);
271 else {
272 nb = rounddown(rem, blks);
273 exf->decrypt_multi(sw->sw_kschedule,
274 idat, nb);
275 }
276 } else if (crd->crd_flags & CRD_F_ENCRYPT) {
277 /* XOR with previous block/IV */
278 for (j = 0; j < blks; j++)
279 idat[j] ^= ivp[j];
280
281 exf->encrypt(sw->sw_kschedule, idat);
282 ivp = idat;
283 } else { /* decrypt */
284 /*
285 * Keep encrypted block to be used
286 * in next block's processing.
287 */
288 nivp = (ivp == iv) ? iv2 : iv;
289 bcopy(idat, nivp, blks);
290
291 exf->decrypt(sw->sw_kschedule, idat);
292
293 /* XOR with previous block/IV */
294 for (j = 0; j < blks; j++)
295 idat[j] ^= ivp[j];
296
297 ivp = nivp;
298 }
299
300 count += nb;
301 k += nb;
302 i -= nb;
303 }
304
305 /*
306 * Advance to the next iov if the end of the current iov
307 * is aligned with the end of a cipher block.
308 * Note that the code is equivalent to calling:
309 * ind = cuio_getptr(uio, count, &k);
310 */
311 if (i > 0 && k == uio->uio_iov[ind].iov_len) {
312 k = 0;
313 ind++;
314 if (ind >= uio->uio_iovcnt) {
315 error = EINVAL;
316 goto out;
317 }
318 }
319 }
320
321 out:
322 if (iovalloc)
323 free(iov, M_CRYPTO_DATA);
324
325 return (error);
326 }
327
328 static int __result_use_check
329 swcr_authprepare(struct auth_hash *axf, struct swcr_data *sw, u_char *key,
330 int klen)
331 {
332 int k;
333
334 klen /= 8;
335
336 switch (axf->type) {
337 case CRYPTO_MD5_HMAC:
338 case CRYPTO_SHA1_HMAC:
339 case CRYPTO_SHA2_224_HMAC:
340 case CRYPTO_SHA2_256_HMAC:
341 case CRYPTO_SHA2_384_HMAC:
342 case CRYPTO_SHA2_512_HMAC:
343 case CRYPTO_NULL_HMAC:
344 case CRYPTO_RIPEMD160_HMAC:
345 for (k = 0; k < klen; k++)
346 key[k] ^= HMAC_IPAD_VAL;
347
348 axf->Init(sw->sw_ictx);
349 axf->Update(sw->sw_ictx, key, klen);
350 axf->Update(sw->sw_ictx, hmac_ipad_buffer, axf->blocksize - klen);
351
352 for (k = 0; k < klen; k++)
353 key[k] ^= (HMAC_IPAD_VAL ^ HMAC_OPAD_VAL);
354
355 axf->Init(sw->sw_octx);
356 axf->Update(sw->sw_octx, key, klen);
357 axf->Update(sw->sw_octx, hmac_opad_buffer, axf->blocksize - klen);
358
359 for (k = 0; k < klen; k++)
360 key[k] ^= HMAC_OPAD_VAL;
361 break;
362 case CRYPTO_MD5_KPDK:
363 case CRYPTO_SHA1_KPDK:
364 {
365 /*
366 * We need a buffer that can hold an md5 and a sha1 result
367 * just to throw it away.
368 * What we do here is the initial part of:
369 * ALGO( key, keyfill, .. )
370 * adding the key to sw_ictx and abusing Final() to get the
371 * "keyfill" padding.
372 * In addition we abuse the sw_octx to save the key to have
373 * it to be able to append it at the end in swcr_authcompute().
374 */
375 u_char buf[SHA1_RESULTLEN];
376
377 sw->sw_klen = klen;
378 bcopy(key, sw->sw_octx, klen);
379 axf->Init(sw->sw_ictx);
380 axf->Update(sw->sw_ictx, key, klen);
381 axf->Final(buf, sw->sw_ictx);
382 break;
383 }
384 case CRYPTO_POLY1305:
385 if (klen != POLY1305_KEY_LEN) {
386 CRYPTDEB("bad poly1305 key size %d", klen);
387 return EINVAL;
388 }
389 /* FALLTHROUGH */
390 case CRYPTO_BLAKE2B:
391 case CRYPTO_BLAKE2S:
392 axf->Setkey(sw->sw_ictx, key, klen);
393 axf->Init(sw->sw_ictx);
394 break;
395 default:
396 printf("%s: CRD_F_KEY_EXPLICIT flag given, but algorithm %d "
397 "doesn't use keys.\n", __func__, axf->type);
398 return EINVAL;
399 }
400 return 0;
401 }
402
403 /*
404 * Compute keyed-hash authenticator.
405 */
406 static int
407 swcr_authcompute(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
408 int flags)
409 {
410 unsigned char aalg[HASH_MAX_LEN];
411 struct auth_hash *axf;
412 union authctx ctx;
413 int err;
414
415 if (sw->sw_ictx == 0)
416 return EINVAL;
417
418 axf = sw->sw_axf;
419
420 if (crd->crd_flags & CRD_F_KEY_EXPLICIT) {
421 err = swcr_authprepare(axf, sw, crd->crd_key, crd->crd_klen);
422 if (err != 0)
423 return err;
424 }
425
426 bcopy(sw->sw_ictx, &ctx, axf->ctxsize);
427
428 err = crypto_apply(flags, buf, crd->crd_skip, crd->crd_len,
429 (int (*)(void *, void *, unsigned int))axf->Update, (caddr_t)&ctx);
430 if (err)
431 return err;
432
433 switch (sw->sw_alg) {
434 case CRYPTO_SHA1:
435 case CRYPTO_SHA2_224:
436 case CRYPTO_SHA2_256:
437 case CRYPTO_SHA2_384:
438 case CRYPTO_SHA2_512:
439 axf->Final(aalg, &ctx);
440 break;
441
442 case CRYPTO_MD5_HMAC:
443 case CRYPTO_SHA1_HMAC:
444 case CRYPTO_SHA2_224_HMAC:
445 case CRYPTO_SHA2_256_HMAC:
446 case CRYPTO_SHA2_384_HMAC:
447 case CRYPTO_SHA2_512_HMAC:
448 case CRYPTO_RIPEMD160_HMAC:
449 if (sw->sw_octx == NULL)
450 return EINVAL;
451
452 axf->Final(aalg, &ctx);
453 bcopy(sw->sw_octx, &ctx, axf->ctxsize);
454 axf->Update(&ctx, aalg, axf->hashsize);
455 axf->Final(aalg, &ctx);
456 break;
457
458 case CRYPTO_MD5_KPDK:
459 case CRYPTO_SHA1_KPDK:
460 /* If we have no key saved, return error. */
461 if (sw->sw_octx == NULL)
462 return EINVAL;
463
464 /*
465 * Add the trailing copy of the key (see comment in
466 * swcr_authprepare()) after the data:
467 * ALGO( .., key, algofill )
468 * and let Final() do the proper, natural "algofill"
469 * padding.
470 */
471 axf->Update(&ctx, sw->sw_octx, sw->sw_klen);
472 axf->Final(aalg, &ctx);
473 break;
474
475 case CRYPTO_BLAKE2B:
476 case CRYPTO_BLAKE2S:
477 case CRYPTO_NULL_HMAC:
478 case CRYPTO_POLY1305:
479 axf->Final(aalg, &ctx);
480 break;
481 }
482
483 /* Inject the authentication data */
484 crypto_copyback(flags, buf, crd->crd_inject,
485 sw->sw_mlen == 0 ? axf->hashsize : sw->sw_mlen, aalg);
486 return 0;
487 }
488
489 CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */
490 CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */
491
492 /*
493 * Apply a combined encryption-authentication transformation
494 */
495 static int
496 swcr_authenc(struct cryptop *crp)
497 {
498 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
499 u_char *blk = (u_char *)blkbuf;
500 u_char aalg[AALG_MAX_RESULT_LEN];
501 u_char uaalg[AALG_MAX_RESULT_LEN];
502 u_char iv[EALG_MAX_BLOCK_LEN];
503 union authctx ctx;
504 struct swcr_session *ses;
505 struct cryptodesc *crd, *crda = NULL, *crde = NULL;
506 struct swcr_data *sw, *swa, *swe = NULL;
507 struct auth_hash *axf = NULL;
508 struct enc_xform *exf = NULL;
509 caddr_t buf = (caddr_t)crp->crp_buf;
510 uint32_t *blkp;
511 int aadlen, blksz, i, ivlen, len, iskip, oskip, r;
512 int isccm = 0;
513
514 ivlen = blksz = iskip = oskip = 0;
515
516 ses = crypto_get_driver_session(crp->crp_session);
517
518 for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
519 for (i = 0; i < nitems(ses->swcr_algorithms) &&
520 ses->swcr_algorithms[i].sw_alg != crd->crd_alg; i++)
521 ;
522 if (i == nitems(ses->swcr_algorithms))
523 return (EINVAL);
524
525 sw = &ses->swcr_algorithms[i];
526 switch (sw->sw_alg) {
527 case CRYPTO_AES_CCM_16:
528 case CRYPTO_AES_NIST_GCM_16:
529 case CRYPTO_AES_NIST_GMAC:
530 swe = sw;
531 crde = crd;
532 exf = swe->sw_exf;
533 /* AES_CCM_IV_LEN and AES_GCM_IV_LEN are both 12 */
534 ivlen = AES_CCM_IV_LEN;
535 break;
536 case CRYPTO_AES_CCM_CBC_MAC:
537 isccm = 1;
538 /* FALLTHROUGH */
539 case CRYPTO_AES_128_NIST_GMAC:
540 case CRYPTO_AES_192_NIST_GMAC:
541 case CRYPTO_AES_256_NIST_GMAC:
542 swa = sw;
543 crda = crd;
544 axf = swa->sw_axf;
545 if (swa->sw_ictx == 0)
546 return (EINVAL);
547 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
548 blksz = axf->blocksize;
549 break;
550 default:
551 return (EINVAL);
552 }
553 }
554 if (crde == NULL || crda == NULL)
555 return (EINVAL);
556 /*
557 * We need to make sure that the auth algorithm matches the
558 * encr algorithm. Specifically, for AES-GCM must go with
559 * AES NIST GMAC, and AES-CCM must go with CBC-MAC.
560 */
561 if (crde->crd_alg == CRYPTO_AES_NIST_GCM_16) {
562 switch (crda->crd_alg) {
563 case CRYPTO_AES_128_NIST_GMAC:
564 case CRYPTO_AES_192_NIST_GMAC:
565 case CRYPTO_AES_256_NIST_GMAC:
566 break; /* Good! */
567 default:
568 return (EINVAL); /* Not good! */
569 }
570 } else if (crde->crd_alg == CRYPTO_AES_CCM_16 &&
571 crda->crd_alg != CRYPTO_AES_CCM_CBC_MAC)
572 return (EINVAL);
573
574 if ((crde->crd_alg == CRYPTO_AES_NIST_GCM_16 ||
575 crde->crd_alg == CRYPTO_AES_CCM_16) &&
576 (crde->crd_flags & CRD_F_IV_EXPLICIT) == 0)
577 return (EINVAL);
578
579 if (crde->crd_klen != crda->crd_klen)
580 return (EINVAL);
581
582 /* Initialize the IV */
583 if (crde->crd_flags & CRD_F_ENCRYPT) {
584 /* IV explicitly provided ? */
585 if (crde->crd_flags & CRD_F_IV_EXPLICIT)
586 bcopy(crde->crd_iv, iv, ivlen);
587 else
588 arc4rand(iv, ivlen, 0);
589
590 /* Do we need to write the IV */
591 if (!(crde->crd_flags & CRD_F_IV_PRESENT))
592 crypto_copyback(crp->crp_flags, buf, crde->crd_inject,
593 ivlen, iv);
594
595 } else { /* Decryption */
596 /* IV explicitly provided ? */
597 if (crde->crd_flags & CRD_F_IV_EXPLICIT)
598 bcopy(crde->crd_iv, iv, ivlen);
599 else {
600 /* Get IV off buf */
601 crypto_copydata(crp->crp_flags, buf, crde->crd_inject,
602 ivlen, iv);
603 }
604 }
605
606 if (swa->sw_alg == CRYPTO_AES_CCM_CBC_MAC) {
607 /*
608 * AES CCM-CBC needs to know the length of
609 * both the auth data, and payload data, before
610 * doing the auth computation.
611 */
612 ctx.aes_cbc_mac_ctx.authDataLength = crda->crd_len;
613 ctx.aes_cbc_mac_ctx.cryptDataLength = crde->crd_len;
614 }
615 /* Supply MAC with IV */
616 if (axf->Reinit)
617 axf->Reinit(&ctx, iv, ivlen);
618
619 /* Supply MAC with AAD */
620 aadlen = crda->crd_len;
621
622 for (i = iskip; i < crda->crd_len; i += blksz) {
623 len = MIN(crda->crd_len - i, blksz - oskip);
624 crypto_copydata(crp->crp_flags, buf, crda->crd_skip + i, len,
625 blk + oskip);
626 bzero(blk + len + oskip, blksz - len - oskip);
627 axf->Update(&ctx, blk, blksz);
628 oskip = 0; /* reset initial output offset */
629 }
630
631 if (exf->reinit)
632 exf->reinit(swe->sw_kschedule, iv);
633
634 /* Do encryption/decryption with MAC */
635 for (i = 0; i < crde->crd_len; i += len) {
636 if (exf->encrypt_multi != NULL) {
637 len = rounddown(crde->crd_len - i, blksz);
638 if (len == 0)
639 len = blksz;
640 else
641 len = MIN(len, sizeof(blkbuf));
642 } else
643 len = blksz;
644 len = MIN(crde->crd_len - i, len);
645 if (len < blksz)
646 bzero(blk, blksz);
647 crypto_copydata(crp->crp_flags, buf, crde->crd_skip + i, len,
648 blk);
649 /*
650 * One of the problems with CCM+CBC is that the authentication
651 * is done on the unecncrypted data. As a result, we have
652 * to do the authentication update at different times,
653 * depending on whether it's CCM or not.
654 */
655 if (crde->crd_flags & CRD_F_ENCRYPT) {
656 if (isccm)
657 axf->Update(&ctx, blk, len);
658 if (exf->encrypt_multi != NULL)
659 exf->encrypt_multi(swe->sw_kschedule, blk,
660 len);
661 else
662 exf->encrypt(swe->sw_kschedule, blk);
663 if (!isccm)
664 axf->Update(&ctx, blk, len);
665 crypto_copyback(crp->crp_flags, buf,
666 crde->crd_skip + i, len, blk);
667 } else {
668 if (isccm) {
669 KASSERT(exf->encrypt_multi == NULL,
670 ("assume CCM is single-block only"));
671 exf->decrypt(swe->sw_kschedule, blk);
672 }
673 axf->Update(&ctx, blk, len);
674 }
675 }
676
677 /* Do any required special finalization */
678 switch (crda->crd_alg) {
679 case CRYPTO_AES_128_NIST_GMAC:
680 case CRYPTO_AES_192_NIST_GMAC:
681 case CRYPTO_AES_256_NIST_GMAC:
682 /* length block */
683 bzero(blk, blksz);
684 blkp = (uint32_t *)blk + 1;
685 *blkp = htobe32(aadlen * 8);
686 blkp = (uint32_t *)blk + 3;
687 *blkp = htobe32(crde->crd_len * 8);
688 axf->Update(&ctx, blk, blksz);
689 break;
690 }
691
692 /* Finalize MAC */
693 axf->Final(aalg, &ctx);
694
695 /* Validate tag */
696 if (!(crde->crd_flags & CRD_F_ENCRYPT)) {
697 crypto_copydata(crp->crp_flags, buf, crda->crd_inject,
698 axf->hashsize, uaalg);
699
700 r = timingsafe_bcmp(aalg, uaalg, axf->hashsize);
701 if (r == 0) {
702 /* tag matches, decrypt data */
703 if (isccm) {
704 KASSERT(exf->reinit != NULL,
705 ("AES-CCM reinit function must be set"));
706 exf->reinit(swe->sw_kschedule, iv);
707 }
708 for (i = 0; i < crde->crd_len; i += blksz) {
709 len = MIN(crde->crd_len - i, blksz);
710 if (len < blksz)
711 bzero(blk, blksz);
712 crypto_copydata(crp->crp_flags, buf,
713 crde->crd_skip + i, len, blk);
714 exf->decrypt(swe->sw_kschedule, blk);
715 crypto_copyback(crp->crp_flags, buf,
716 crde->crd_skip + i, len, blk);
717 }
718 } else
719 return (EBADMSG);
720 } else {
721 /* Inject the authentication data */
722 crypto_copyback(crp->crp_flags, buf, crda->crd_inject,
723 axf->hashsize, aalg);
724 }
725
726 return (0);
727 }
728
729 /*
730 * Apply a compression/decompression algorithm
731 */
732 static int
733 swcr_compdec(struct cryptodesc *crd, struct swcr_data *sw,
734 caddr_t buf, int flags)
735 {
736 u_int8_t *data, *out;
737 struct comp_algo *cxf;
738 int adj;
739 u_int32_t result;
740
741 cxf = sw->sw_cxf;
742
743 /* We must handle the whole buffer of data in one time
744 * then if there is not all the data in the mbuf, we must
745 * copy in a buffer.
746 */
747
748 data = malloc(crd->crd_len, M_CRYPTO_DATA, M_NOWAIT);
749 if (data == NULL)
750 return (EINVAL);
751 crypto_copydata(flags, buf, crd->crd_skip, crd->crd_len, data);
752
753 if (crd->crd_flags & CRD_F_COMP)
754 result = cxf->compress(data, crd->crd_len, &out);
755 else
756 result = cxf->decompress(data, crd->crd_len, &out);
757
758 free(data, M_CRYPTO_DATA);
759 if (result == 0)
760 return EINVAL;
761
762 /* Copy back the (de)compressed data. m_copyback is
763 * extending the mbuf as necessary.
764 */
765 sw->sw_size = result;
766 /* Check the compressed size when doing compression */
767 if (crd->crd_flags & CRD_F_COMP) {
768 if (result >= crd->crd_len) {
769 /* Compression was useless, we lost time */
770 free(out, M_CRYPTO_DATA);
771 return 0;
772 }
773 }
774
775 crypto_copyback(flags, buf, crd->crd_skip, result, out);
776 if (result < crd->crd_len) {
777 adj = result - crd->crd_len;
778 if (flags & CRYPTO_F_IMBUF) {
779 adj = result - crd->crd_len;
780 m_adj((struct mbuf *)buf, adj);
781 } else if (flags & CRYPTO_F_IOV) {
782 struct uio *uio = (struct uio *)buf;
783 int ind;
784
785 adj = crd->crd_len - result;
786 ind = uio->uio_iovcnt - 1;
787
788 while (adj > 0 && ind >= 0) {
789 if (adj < uio->uio_iov[ind].iov_len) {
790 uio->uio_iov[ind].iov_len -= adj;
791 break;
792 }
793
794 adj -= uio->uio_iov[ind].iov_len;
795 uio->uio_iov[ind].iov_len = 0;
796 ind--;
797 uio->uio_iovcnt--;
798 }
799 }
800 }
801 free(out, M_CRYPTO_DATA);
802 return 0;
803 }
804
805 /*
806 * Generate a new software session.
807 */
808 static int
809 swcr_newsession(device_t dev, crypto_session_t cses, struct cryptoini *cri)
810 {
811 struct swcr_session *ses;
812 struct swcr_data *swd;
813 struct auth_hash *axf;
814 struct enc_xform *txf;
815 struct comp_algo *cxf;
816 size_t i;
817 int len;
818 int error;
819
820 if (cses == NULL || cri == NULL)
821 return EINVAL;
822
823 ses = crypto_get_driver_session(cses);
824 mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF);
825
826 for (i = 0; cri != NULL && i < nitems(ses->swcr_algorithms); i++) {
827 swd = &ses->swcr_algorithms[i];
828
829 switch (cri->cri_alg) {
830 case CRYPTO_DES_CBC:
831 txf = &enc_xform_des;
832 goto enccommon;
833 case CRYPTO_3DES_CBC:
834 txf = &enc_xform_3des;
835 goto enccommon;
836 case CRYPTO_BLF_CBC:
837 txf = &enc_xform_blf;
838 goto enccommon;
839 case CRYPTO_CAST_CBC:
840 txf = &enc_xform_cast5;
841 goto enccommon;
842 case CRYPTO_SKIPJACK_CBC:
843 txf = &enc_xform_skipjack;
844 goto enccommon;
845 case CRYPTO_RIJNDAEL128_CBC:
846 txf = &enc_xform_rijndael128;
847 goto enccommon;
848 case CRYPTO_AES_XTS:
849 txf = &enc_xform_aes_xts;
850 goto enccommon;
851 case CRYPTO_AES_ICM:
852 txf = &enc_xform_aes_icm;
853 goto enccommon;
854 case CRYPTO_AES_NIST_GCM_16:
855 txf = &enc_xform_aes_nist_gcm;
856 goto enccommon;
857 case CRYPTO_AES_CCM_16:
858 txf = &enc_xform_ccm;
859 goto enccommon;
860 case CRYPTO_AES_NIST_GMAC:
861 txf = &enc_xform_aes_nist_gmac;
862 swd->sw_exf = txf;
863 break;
864 case CRYPTO_CAMELLIA_CBC:
865 txf = &enc_xform_camellia;
866 goto enccommon;
867 case CRYPTO_NULL_CBC:
868 txf = &enc_xform_null;
869 goto enccommon;
870 case CRYPTO_CHACHA20:
871 txf = &enc_xform_chacha20;
872 goto enccommon;
873 enccommon:
874 if (cri->cri_key != NULL) {
875 error = txf->setkey(&swd->sw_kschedule,
876 cri->cri_key, cri->cri_klen / 8);
877 if (error) {
878 swcr_freesession(dev, cses);
879 return error;
880 }
881 }
882 swd->sw_exf = txf;
883 break;
884
885 case CRYPTO_MD5_HMAC:
886 axf = &auth_hash_hmac_md5;
887 goto authcommon;
888 case CRYPTO_SHA1_HMAC:
889 axf = &auth_hash_hmac_sha1;
890 goto authcommon;
891 case CRYPTO_SHA2_224_HMAC:
892 axf = &auth_hash_hmac_sha2_224;
893 goto authcommon;
894 case CRYPTO_SHA2_256_HMAC:
895 axf = &auth_hash_hmac_sha2_256;
896 goto authcommon;
897 case CRYPTO_SHA2_384_HMAC:
898 axf = &auth_hash_hmac_sha2_384;
899 goto authcommon;
900 case CRYPTO_SHA2_512_HMAC:
901 axf = &auth_hash_hmac_sha2_512;
902 goto authcommon;
903 case CRYPTO_NULL_HMAC:
904 axf = &auth_hash_null;
905 goto authcommon;
906 case CRYPTO_RIPEMD160_HMAC:
907 axf = &auth_hash_hmac_ripemd_160;
908 authcommon:
909 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
910 M_NOWAIT);
911 if (swd->sw_ictx == NULL) {
912 swcr_freesession(dev, cses);
913 return ENOBUFS;
914 }
915
916 swd->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
917 M_NOWAIT);
918 if (swd->sw_octx == NULL) {
919 swcr_freesession(dev, cses);
920 return ENOBUFS;
921 }
922
923 if (cri->cri_key != NULL) {
924 error = swcr_authprepare(axf, swd,
925 cri->cri_key, cri->cri_klen);
926 if (error != 0) {
927 swcr_freesession(dev, cses);
928 return error;
929 }
930 }
931
932 swd->sw_mlen = cri->cri_mlen;
933 swd->sw_axf = axf;
934 break;
935
936 case CRYPTO_MD5_KPDK:
937 axf = &auth_hash_key_md5;
938 goto auth2common;
939
940 case CRYPTO_SHA1_KPDK:
941 axf = &auth_hash_key_sha1;
942 auth2common:
943 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
944 M_NOWAIT);
945 if (swd->sw_ictx == NULL) {
946 swcr_freesession(dev, cses);
947 return ENOBUFS;
948 }
949
950 swd->sw_octx = malloc(cri->cri_klen / 8,
951 M_CRYPTO_DATA, M_NOWAIT);
952 if (swd->sw_octx == NULL) {
953 swcr_freesession(dev, cses);
954 return ENOBUFS;
955 }
956
957 /* Store the key so we can "append" it to the payload */
958 if (cri->cri_key != NULL) {
959 error = swcr_authprepare(axf, swd,
960 cri->cri_key, cri->cri_klen);
961 if (error != 0) {
962 swcr_freesession(dev, cses);
963 return error;
964 }
965 }
966
967 swd->sw_mlen = cri->cri_mlen;
968 swd->sw_axf = axf;
969 break;
970 #ifdef notdef
971 case CRYPTO_MD5:
972 axf = &auth_hash_md5;
973 goto auth3common;
974 #endif
975
976 case CRYPTO_SHA1:
977 axf = &auth_hash_sha1;
978 goto auth3common;
979 case CRYPTO_SHA2_224:
980 axf = &auth_hash_sha2_224;
981 goto auth3common;
982 case CRYPTO_SHA2_256:
983 axf = &auth_hash_sha2_256;
984 goto auth3common;
985 case CRYPTO_SHA2_384:
986 axf = &auth_hash_sha2_384;
987 goto auth3common;
988 case CRYPTO_SHA2_512:
989 axf = &auth_hash_sha2_512;
990
991 auth3common:
992 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
993 M_NOWAIT);
994 if (swd->sw_ictx == NULL) {
995 swcr_freesession(dev, cses);
996 return ENOBUFS;
997 }
998
999 axf->Init(swd->sw_ictx);
1000 swd->sw_mlen = cri->cri_mlen;
1001 swd->sw_axf = axf;
1002 break;
1003
1004 case CRYPTO_AES_CCM_CBC_MAC:
1005 switch (cri->cri_klen) {
1006 case 128:
1007 axf = &auth_hash_ccm_cbc_mac_128;
1008 break;
1009 case 192:
1010 axf = &auth_hash_ccm_cbc_mac_192;
1011 break;
1012 case 256:
1013 axf = &auth_hash_ccm_cbc_mac_256;
1014 break;
1015 default:
1016 swcr_freesession(dev, cses);
1017 return EINVAL;
1018 }
1019 goto auth4common;
1020 case CRYPTO_AES_128_NIST_GMAC:
1021 axf = &auth_hash_nist_gmac_aes_128;
1022 goto auth4common;
1023
1024 case CRYPTO_AES_192_NIST_GMAC:
1025 axf = &auth_hash_nist_gmac_aes_192;
1026 goto auth4common;
1027
1028 case CRYPTO_AES_256_NIST_GMAC:
1029 axf = &auth_hash_nist_gmac_aes_256;
1030 auth4common:
1031 len = cri->cri_klen / 8;
1032 if (len != 16 && len != 24 && len != 32) {
1033 swcr_freesession(dev, cses);
1034 return EINVAL;
1035 }
1036
1037 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
1038 M_NOWAIT);
1039 if (swd->sw_ictx == NULL) {
1040 swcr_freesession(dev, cses);
1041 return ENOBUFS;
1042 }
1043 axf->Init(swd->sw_ictx);
1044 axf->Setkey(swd->sw_ictx, cri->cri_key, len);
1045 swd->sw_axf = axf;
1046 break;
1047
1048 case CRYPTO_BLAKE2B:
1049 axf = &auth_hash_blake2b;
1050 goto auth5common;
1051 case CRYPTO_BLAKE2S:
1052 axf = &auth_hash_blake2s;
1053 goto auth5common;
1054 case CRYPTO_POLY1305:
1055 axf = &auth_hash_poly1305;
1056 auth5common:
1057 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
1058 M_NOWAIT);
1059 if (swd->sw_ictx == NULL) {
1060 swcr_freesession(dev, cses);
1061 return ENOBUFS;
1062 }
1063 axf->Setkey(swd->sw_ictx, cri->cri_key,
1064 cri->cri_klen / 8);
1065 axf->Init(swd->sw_ictx);
1066 swd->sw_axf = axf;
1067 break;
1068
1069 case CRYPTO_DEFLATE_COMP:
1070 cxf = &comp_algo_deflate;
1071 swd->sw_cxf = cxf;
1072 break;
1073 default:
1074 swcr_freesession(dev, cses);
1075 return EINVAL;
1076 }
1077
1078 swd->sw_alg = cri->cri_alg;
1079 cri = cri->cri_next;
1080 ses->swcr_nalgs++;
1081 }
1082
1083 if (cri != NULL) {
1084 CRYPTDEB("Bogus session request for three or more algorithms");
1085 return EINVAL;
1086 }
1087 return 0;
1088 }
1089
1090 static void
1091 swcr_freesession(device_t dev, crypto_session_t cses)
1092 {
1093 struct swcr_session *ses;
1094 struct swcr_data *swd;
1095 struct enc_xform *txf;
1096 struct auth_hash *axf;
1097 size_t i;
1098
1099 ses = crypto_get_driver_session(cses);
1100
1101 mtx_destroy(&ses->swcr_lock);
1102 for (i = 0; i < nitems(ses->swcr_algorithms); i++) {
1103 swd = &ses->swcr_algorithms[i];
1104
1105 switch (swd->sw_alg) {
1106 case CRYPTO_DES_CBC:
1107 case CRYPTO_3DES_CBC:
1108 case CRYPTO_BLF_CBC:
1109 case CRYPTO_CAST_CBC:
1110 case CRYPTO_SKIPJACK_CBC:
1111 case CRYPTO_RIJNDAEL128_CBC:
1112 case CRYPTO_AES_XTS:
1113 case CRYPTO_AES_ICM:
1114 case CRYPTO_AES_NIST_GCM_16:
1115 case CRYPTO_AES_NIST_GMAC:
1116 case CRYPTO_CAMELLIA_CBC:
1117 case CRYPTO_NULL_CBC:
1118 case CRYPTO_CHACHA20:
1119 case CRYPTO_AES_CCM_16:
1120 txf = swd->sw_exf;
1121
1122 if (swd->sw_kschedule)
1123 txf->zerokey(&(swd->sw_kschedule));
1124 break;
1125
1126 case CRYPTO_MD5_HMAC:
1127 case CRYPTO_SHA1_HMAC:
1128 case CRYPTO_SHA2_224_HMAC:
1129 case CRYPTO_SHA2_256_HMAC:
1130 case CRYPTO_SHA2_384_HMAC:
1131 case CRYPTO_SHA2_512_HMAC:
1132 case CRYPTO_RIPEMD160_HMAC:
1133 case CRYPTO_NULL_HMAC:
1134 case CRYPTO_AES_CCM_CBC_MAC:
1135 axf = swd->sw_axf;
1136
1137 if (swd->sw_ictx) {
1138 bzero(swd->sw_ictx, axf->ctxsize);
1139 free(swd->sw_ictx, M_CRYPTO_DATA);
1140 }
1141 if (swd->sw_octx) {
1142 bzero(swd->sw_octx, axf->ctxsize);
1143 free(swd->sw_octx, M_CRYPTO_DATA);
1144 }
1145 break;
1146
1147 case CRYPTO_MD5_KPDK:
1148 case CRYPTO_SHA1_KPDK:
1149 axf = swd->sw_axf;
1150
1151 if (swd->sw_ictx) {
1152 bzero(swd->sw_ictx, axf->ctxsize);
1153 free(swd->sw_ictx, M_CRYPTO_DATA);
1154 }
1155 if (swd->sw_octx) {
1156 bzero(swd->sw_octx, swd->sw_klen);
1157 free(swd->sw_octx, M_CRYPTO_DATA);
1158 }
1159 break;
1160
1161 case CRYPTO_BLAKE2B:
1162 case CRYPTO_BLAKE2S:
1163 case CRYPTO_MD5:
1164 case CRYPTO_POLY1305:
1165 case CRYPTO_SHA1:
1166 case CRYPTO_SHA2_224:
1167 case CRYPTO_SHA2_256:
1168 case CRYPTO_SHA2_384:
1169 case CRYPTO_SHA2_512:
1170 case CRYPTO_AES_128_NIST_GMAC:
1171 case CRYPTO_AES_192_NIST_GMAC:
1172 case CRYPTO_AES_256_NIST_GMAC:
1173 axf = swd->sw_axf;
1174
1175 if (swd->sw_ictx) {
1176 explicit_bzero(swd->sw_ictx, axf->ctxsize);
1177 free(swd->sw_ictx, M_CRYPTO_DATA);
1178 }
1179 break;
1180
1181 case CRYPTO_DEFLATE_COMP:
1182 /* Nothing to do */
1183 break;
1184 }
1185 }
1186 }
1187
1188 /*
1189 * Process a software request.
1190 */
1191 static int
1192 swcr_process(device_t dev, struct cryptop *crp, int hint)
1193 {
1194 struct swcr_session *ses = NULL;
1195 struct cryptodesc *crd;
1196 struct swcr_data *sw;
1197 size_t i;
1198
1199 /* Sanity check */
1200 if (crp == NULL)
1201 return EINVAL;
1202
1203 if (crp->crp_desc == NULL || crp->crp_buf == NULL) {
1204 crp->crp_etype = EINVAL;
1205 goto done;
1206 }
1207
1208 ses = crypto_get_driver_session(crp->crp_session);
1209 mtx_lock(&ses->swcr_lock);
1210
1211 /* Go through crypto descriptors, processing as we go */
1212 for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
1213 /*
1214 * Find the crypto context.
1215 *
1216 * XXX Note that the logic here prevents us from having
1217 * XXX the same algorithm multiple times in a session
1218 * XXX (or rather, we can but it won't give us the right
1219 * XXX results). To do that, we'd need some way of differentiating
1220 * XXX between the various instances of an algorithm (so we can
1221 * XXX locate the correct crypto context).
1222 */
1223 for (i = 0; i < nitems(ses->swcr_algorithms) &&
1224 ses->swcr_algorithms[i].sw_alg != crd->crd_alg; i++)
1225 ;
1226
1227 /* No such context ? */
1228 if (i == nitems(ses->swcr_algorithms)) {
1229 crp->crp_etype = EINVAL;
1230 goto done;
1231 }
1232 sw = &ses->swcr_algorithms[i];
1233 switch (sw->sw_alg) {
1234 case CRYPTO_DES_CBC:
1235 case CRYPTO_3DES_CBC:
1236 case CRYPTO_BLF_CBC:
1237 case CRYPTO_CAST_CBC:
1238 case CRYPTO_SKIPJACK_CBC:
1239 case CRYPTO_RIJNDAEL128_CBC:
1240 case CRYPTO_AES_XTS:
1241 case CRYPTO_AES_ICM:
1242 case CRYPTO_CAMELLIA_CBC:
1243 case CRYPTO_CHACHA20:
1244 if ((crp->crp_etype = swcr_encdec(crd, sw,
1245 crp->crp_buf, crp->crp_flags)) != 0)
1246 goto done;
1247 break;
1248 case CRYPTO_NULL_CBC:
1249 crp->crp_etype = 0;
1250 break;
1251 case CRYPTO_MD5_HMAC:
1252 case CRYPTO_SHA1_HMAC:
1253 case CRYPTO_SHA2_224_HMAC:
1254 case CRYPTO_SHA2_256_HMAC:
1255 case CRYPTO_SHA2_384_HMAC:
1256 case CRYPTO_SHA2_512_HMAC:
1257 case CRYPTO_RIPEMD160_HMAC:
1258 case CRYPTO_NULL_HMAC:
1259 case CRYPTO_MD5_KPDK:
1260 case CRYPTO_SHA1_KPDK:
1261 case CRYPTO_MD5:
1262 case CRYPTO_SHA1:
1263 case CRYPTO_SHA2_224:
1264 case CRYPTO_SHA2_256:
1265 case CRYPTO_SHA2_384:
1266 case CRYPTO_SHA2_512:
1267 case CRYPTO_BLAKE2B:
1268 case CRYPTO_BLAKE2S:
1269 case CRYPTO_POLY1305:
1270 if ((crp->crp_etype = swcr_authcompute(crd, sw,
1271 crp->crp_buf, crp->crp_flags)) != 0)
1272 goto done;
1273 break;
1274
1275 case CRYPTO_AES_NIST_GCM_16:
1276 case CRYPTO_AES_NIST_GMAC:
1277 case CRYPTO_AES_128_NIST_GMAC:
1278 case CRYPTO_AES_192_NIST_GMAC:
1279 case CRYPTO_AES_256_NIST_GMAC:
1280 case CRYPTO_AES_CCM_16:
1281 case CRYPTO_AES_CCM_CBC_MAC:
1282 crp->crp_etype = swcr_authenc(crp);
1283 goto done;
1284
1285 case CRYPTO_DEFLATE_COMP:
1286 if ((crp->crp_etype = swcr_compdec(crd, sw,
1287 crp->crp_buf, crp->crp_flags)) != 0)
1288 goto done;
1289 else
1290 crp->crp_olen = (int)sw->sw_size;
1291 break;
1292
1293 default:
1294 /* Unknown/unsupported algorithm */
1295 crp->crp_etype = EINVAL;
1296 goto done;
1297 }
1298 }
1299
1300 done:
1301 if (ses)
1302 mtx_unlock(&ses->swcr_lock);
1303 crypto_done(crp);
1304 return 0;
1305 }
1306
1307 static void
1308 swcr_identify(driver_t *drv, device_t parent)
1309 {
1310 /* NB: order 10 is so we get attached after h/w devices */
1311 if (device_find_child(parent, "cryptosoft", -1) == NULL &&
1312 BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
1313 panic("cryptosoft: could not attach");
1314 }
1315
1316 static int
1317 swcr_probe(device_t dev)
1318 {
1319 device_set_desc(dev, "software crypto");
1320 return (BUS_PROBE_NOWILDCARD);
1321 }
1322
1323 static int
1324 swcr_attach(device_t dev)
1325 {
1326 memset(hmac_ipad_buffer, HMAC_IPAD_VAL, HMAC_MAX_BLOCK_LEN);
1327 memset(hmac_opad_buffer, HMAC_OPAD_VAL, HMAC_MAX_BLOCK_LEN);
1328
1329 swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
1330 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
1331 if (swcr_id < 0) {
1332 device_printf(dev, "cannot initialize!");
1333 return ENOMEM;
1334 }
1335 #define REGISTER(alg) \
1336 crypto_register(swcr_id, alg, 0,0)
1337 REGISTER(CRYPTO_DES_CBC);
1338 REGISTER(CRYPTO_3DES_CBC);
1339 REGISTER(CRYPTO_BLF_CBC);
1340 REGISTER(CRYPTO_CAST_CBC);
1341 REGISTER(CRYPTO_SKIPJACK_CBC);
1342 REGISTER(CRYPTO_NULL_CBC);
1343 REGISTER(CRYPTO_MD5_HMAC);
1344 REGISTER(CRYPTO_SHA1_HMAC);
1345 REGISTER(CRYPTO_SHA2_224_HMAC);
1346 REGISTER(CRYPTO_SHA2_256_HMAC);
1347 REGISTER(CRYPTO_SHA2_384_HMAC);
1348 REGISTER(CRYPTO_SHA2_512_HMAC);
1349 REGISTER(CRYPTO_RIPEMD160_HMAC);
1350 REGISTER(CRYPTO_NULL_HMAC);
1351 REGISTER(CRYPTO_MD5_KPDK);
1352 REGISTER(CRYPTO_SHA1_KPDK);
1353 REGISTER(CRYPTO_MD5);
1354 REGISTER(CRYPTO_SHA1);
1355 REGISTER(CRYPTO_SHA2_224);
1356 REGISTER(CRYPTO_SHA2_256);
1357 REGISTER(CRYPTO_SHA2_384);
1358 REGISTER(CRYPTO_SHA2_512);
1359 REGISTER(CRYPTO_RIJNDAEL128_CBC);
1360 REGISTER(CRYPTO_AES_XTS);
1361 REGISTER(CRYPTO_AES_ICM);
1362 REGISTER(CRYPTO_AES_NIST_GCM_16);
1363 REGISTER(CRYPTO_AES_NIST_GMAC);
1364 REGISTER(CRYPTO_AES_128_NIST_GMAC);
1365 REGISTER(CRYPTO_AES_192_NIST_GMAC);
1366 REGISTER(CRYPTO_AES_256_NIST_GMAC);
1367 REGISTER(CRYPTO_CAMELLIA_CBC);
1368 REGISTER(CRYPTO_DEFLATE_COMP);
1369 REGISTER(CRYPTO_BLAKE2B);
1370 REGISTER(CRYPTO_BLAKE2S);
1371 REGISTER(CRYPTO_CHACHA20);
1372 REGISTER(CRYPTO_AES_CCM_16);
1373 REGISTER(CRYPTO_AES_CCM_CBC_MAC);
1374 REGISTER(CRYPTO_POLY1305);
1375 #undef REGISTER
1376
1377 return 0;
1378 }
1379
1380 static int
1381 swcr_detach(device_t dev)
1382 {
1383 crypto_unregister_all(swcr_id);
1384 return 0;
1385 }
1386
1387 static device_method_t swcr_methods[] = {
1388 DEVMETHOD(device_identify, swcr_identify),
1389 DEVMETHOD(device_probe, swcr_probe),
1390 DEVMETHOD(device_attach, swcr_attach),
1391 DEVMETHOD(device_detach, swcr_detach),
1392
1393 DEVMETHOD(cryptodev_newsession, swcr_newsession),
1394 DEVMETHOD(cryptodev_freesession,swcr_freesession),
1395 DEVMETHOD(cryptodev_process, swcr_process),
1396
1397 {0, 0},
1398 };
1399
1400 static driver_t swcr_driver = {
1401 "cryptosoft",
1402 swcr_methods,
1403 0, /* NB: no softc */
1404 };
1405 static devclass_t swcr_devclass;
1406
1407 /*
1408 * NB: We explicitly reference the crypto module so we
1409 * get the necessary ordering when built as a loadable
1410 * module. This is required because we bundle the crypto
1411 * module code together with the cryptosoft driver (otherwise
1412 * normal module dependencies would handle things).
1413 */
1414 extern int crypto_modevent(struct module *, int, void *);
1415 /* XXX where to attach */
1416 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
1417 MODULE_VERSION(cryptosoft, 1);
1418 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);
Cache object: db0dd2d704e94cc59314143d162de898
|