1 /* $OpenBSD: cryptosoft.c,v 1.35 2002/04/26 08:43:50 deraadt Exp $ */
2
3 /*-
4 * The author of this code is Angelos D. Keromytis (angelos@cis.upenn.edu)
5 * Copyright (c) 2002-2006 Sam Leffler, Errno Consulting
6 *
7 * This code was written by Angelos D. Keromytis in Athens, Greece, in
8 * February 2000. Network Security Technologies Inc. (NSTI) kindly
9 * supported the development of this code.
10 *
11 * Copyright (c) 2000, 2001 Angelos D. Keromytis
12 * Copyright (c) 2014 The FreeBSD Foundation
13 * All rights reserved.
14 *
15 * Portions of this software were developed by John-Mark Gurney
16 * under sponsorship of the FreeBSD Foundation and
17 * Rubicon Communications, LLC (Netgate).
18 *
19 * Permission to use, copy, and modify this software with or without fee
20 * is hereby granted, provided that this entire notice is included in
21 * all source code copies of any software which is or includes a copy or
22 * modification of this software.
23 *
24 * THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR
25 * IMPLIED WARRANTY. IN PARTICULAR, NONE OF THE AUTHORS MAKES ANY
26 * REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE
27 * MERCHANTABILITY OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR
28 * PURPOSE.
29 */
30
31 #include <sys/cdefs.h>
32 __FBSDID("$FreeBSD: releng/12.0/sys/opencrypto/cryptosoft.c 338953 2018-09-26 20:23:12Z sef $");
33
34 #include <sys/param.h>
35 #include <sys/systm.h>
36 #include <sys/malloc.h>
37 #include <sys/mbuf.h>
38 #include <sys/module.h>
39 #include <sys/sysctl.h>
40 #include <sys/errno.h>
41 #include <sys/random.h>
42 #include <sys/kernel.h>
43 #include <sys/uio.h>
44 #include <sys/lock.h>
45 #include <sys/rwlock.h>
46 #include <sys/endian.h>
47 #include <sys/limits.h>
48 #include <sys/mutex.h>
49
50 #include <crypto/blowfish/blowfish.h>
51 #include <crypto/sha1.h>
52 #include <opencrypto/rmd160.h>
53 #include <opencrypto/cast.h>
54 #include <opencrypto/skipjack.h>
55 #include <sys/md5.h>
56
57 #include <opencrypto/cryptodev.h>
58 #include <opencrypto/cryptosoft.h>
59 #include <opencrypto/xform.h>
60
61 #include <sys/kobj.h>
62 #include <sys/bus.h>
63 #include "cryptodev_if.h"
64
65 static int32_t swcr_id;
66
67 u_int8_t hmac_ipad_buffer[HMAC_MAX_BLOCK_LEN];
68 u_int8_t hmac_opad_buffer[HMAC_MAX_BLOCK_LEN];
69
70 static int swcr_encdec(struct cryptodesc *, struct swcr_data *, caddr_t, int);
71 static int swcr_authcompute(struct cryptodesc *, struct swcr_data *, caddr_t, int);
72 static int swcr_authenc(struct cryptop *crp);
73 static int swcr_compdec(struct cryptodesc *, struct swcr_data *, caddr_t, int);
74 static void swcr_freesession(device_t dev, crypto_session_t cses);
75
76 /*
77 * Apply a symmetric encryption/decryption algorithm.
78 */
79 static int
80 swcr_encdec(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
81 int flags)
82 {
83 unsigned char iv[EALG_MAX_BLOCK_LEN], blk[EALG_MAX_BLOCK_LEN];
84 unsigned char *ivp, *nivp, iv2[EALG_MAX_BLOCK_LEN];
85 struct enc_xform *exf;
86 int i, j, k, blks, ind, count, ivlen;
87 struct uio *uio, uiolcl;
88 struct iovec iovlcl[4];
89 struct iovec *iov;
90 int iovcnt, iovalloc;
91 int error;
92
93 error = 0;
94
95 exf = sw->sw_exf;
96 blks = exf->blocksize;
97 ivlen = exf->ivsize;
98
99 /* Check for non-padded data */
100 if (crd->crd_len % blks)
101 return EINVAL;
102
103 if (crd->crd_alg == CRYPTO_AES_ICM &&
104 (crd->crd_flags & CRD_F_IV_EXPLICIT) == 0)
105 return (EINVAL);
106
107 /* Initialize the IV */
108 if (crd->crd_flags & CRD_F_ENCRYPT) {
109 /* IV explicitly provided ? */
110 if (crd->crd_flags & CRD_F_IV_EXPLICIT)
111 bcopy(crd->crd_iv, iv, ivlen);
112 else
113 arc4rand(iv, ivlen, 0);
114
115 /* Do we need to write the IV */
116 if (!(crd->crd_flags & CRD_F_IV_PRESENT))
117 crypto_copyback(flags, buf, crd->crd_inject, ivlen, iv);
118
119 } else { /* Decryption */
120 /* IV explicitly provided ? */
121 if (crd->crd_flags & CRD_F_IV_EXPLICIT)
122 bcopy(crd->crd_iv, iv, ivlen);
123 else {
124 /* Get IV off buf */
125 crypto_copydata(flags, buf, crd->crd_inject, ivlen, iv);
126 }
127 }
128
129 if (crd->crd_flags & CRD_F_KEY_EXPLICIT) {
130 int error;
131
132 if (sw->sw_kschedule)
133 exf->zerokey(&(sw->sw_kschedule));
134
135 error = exf->setkey(&sw->sw_kschedule,
136 crd->crd_key, crd->crd_klen / 8);
137 if (error)
138 return (error);
139 }
140
141 iov = iovlcl;
142 iovcnt = nitems(iovlcl);
143 iovalloc = 0;
144 uio = &uiolcl;
145 if ((flags & CRYPTO_F_IMBUF) != 0) {
146 error = crypto_mbuftoiov((struct mbuf *)buf, &iov, &iovcnt,
147 &iovalloc);
148 if (error)
149 return (error);
150 uio->uio_iov = iov;
151 uio->uio_iovcnt = iovcnt;
152 } else if ((flags & CRYPTO_F_IOV) != 0)
153 uio = (struct uio *)buf;
154 else {
155 iov[0].iov_base = buf;
156 iov[0].iov_len = crd->crd_skip + crd->crd_len;
157 uio->uio_iov = iov;
158 uio->uio_iovcnt = 1;
159 }
160
161 ivp = iv;
162
163 if (exf->reinit) {
164 /*
165 * xforms that provide a reinit method perform all IV
166 * handling themselves.
167 */
168 exf->reinit(sw->sw_kschedule, iv);
169 }
170
171 count = crd->crd_skip;
172 ind = cuio_getptr(uio, count, &k);
173 if (ind == -1) {
174 error = EINVAL;
175 goto out;
176 }
177
178 i = crd->crd_len;
179
180 while (i > 0) {
181 /*
182 * If there's insufficient data at the end of
183 * an iovec, we have to do some copying.
184 */
185 if (uio->uio_iov[ind].iov_len < k + blks &&
186 uio->uio_iov[ind].iov_len != k) {
187 cuio_copydata(uio, count, blks, blk);
188
189 /* Actual encryption/decryption */
190 if (exf->reinit) {
191 if (crd->crd_flags & CRD_F_ENCRYPT) {
192 exf->encrypt(sw->sw_kschedule,
193 blk);
194 } else {
195 exf->decrypt(sw->sw_kschedule,
196 blk);
197 }
198 } else if (crd->crd_flags & CRD_F_ENCRYPT) {
199 /* XOR with previous block */
200 for (j = 0; j < blks; j++)
201 blk[j] ^= ivp[j];
202
203 exf->encrypt(sw->sw_kschedule, blk);
204
205 /*
206 * Keep encrypted block for XOR'ing
207 * with next block
208 */
209 bcopy(blk, iv, blks);
210 ivp = iv;
211 } else { /* decrypt */
212 /*
213 * Keep encrypted block for XOR'ing
214 * with next block
215 */
216 nivp = (ivp == iv) ? iv2 : iv;
217 bcopy(blk, nivp, blks);
218
219 exf->decrypt(sw->sw_kschedule, blk);
220
221 /* XOR with previous block */
222 for (j = 0; j < blks; j++)
223 blk[j] ^= ivp[j];
224
225 ivp = nivp;
226 }
227
228 /* Copy back decrypted block */
229 cuio_copyback(uio, count, blks, blk);
230
231 count += blks;
232
233 /* Advance pointer */
234 ind = cuio_getptr(uio, count, &k);
235 if (ind == -1) {
236 error = EINVAL;
237 goto out;
238 }
239
240 i -= blks;
241
242 /* Could be done... */
243 if (i == 0)
244 break;
245 }
246
247 while (uio->uio_iov[ind].iov_len >= k + blks && i > 0) {
248 uint8_t *idat;
249 size_t nb, rem;
250
251 nb = blks;
252 rem = MIN((size_t)i,
253 uio->uio_iov[ind].iov_len - (size_t)k);
254 idat = (uint8_t *)uio->uio_iov[ind].iov_base + k;
255
256 if (exf->reinit) {
257 if ((crd->crd_flags & CRD_F_ENCRYPT) != 0 &&
258 exf->encrypt_multi == NULL)
259 exf->encrypt(sw->sw_kschedule,
260 idat);
261 else if ((crd->crd_flags & CRD_F_ENCRYPT) != 0) {
262 nb = rounddown(rem, blks);
263 exf->encrypt_multi(sw->sw_kschedule,
264 idat, nb);
265 } else if (exf->decrypt_multi == NULL)
266 exf->decrypt(sw->sw_kschedule,
267 idat);
268 else {
269 nb = rounddown(rem, blks);
270 exf->decrypt_multi(sw->sw_kschedule,
271 idat, nb);
272 }
273 } else if (crd->crd_flags & CRD_F_ENCRYPT) {
274 /* XOR with previous block/IV */
275 for (j = 0; j < blks; j++)
276 idat[j] ^= ivp[j];
277
278 exf->encrypt(sw->sw_kschedule, idat);
279 ivp = idat;
280 } else { /* decrypt */
281 /*
282 * Keep encrypted block to be used
283 * in next block's processing.
284 */
285 nivp = (ivp == iv) ? iv2 : iv;
286 bcopy(idat, nivp, blks);
287
288 exf->decrypt(sw->sw_kschedule, idat);
289
290 /* XOR with previous block/IV */
291 for (j = 0; j < blks; j++)
292 idat[j] ^= ivp[j];
293
294 ivp = nivp;
295 }
296
297 count += nb;
298 k += nb;
299 i -= nb;
300 }
301
302 /*
303 * Advance to the next iov if the end of the current iov
304 * is aligned with the end of a cipher block.
305 * Note that the code is equivalent to calling:
306 * ind = cuio_getptr(uio, count, &k);
307 */
308 if (i > 0 && k == uio->uio_iov[ind].iov_len) {
309 k = 0;
310 ind++;
311 if (ind >= uio->uio_iovcnt) {
312 error = EINVAL;
313 goto out;
314 }
315 }
316 }
317
318 out:
319 if (iovalloc)
320 free(iov, M_CRYPTO_DATA);
321
322 return (error);
323 }
324
325 static int __result_use_check
326 swcr_authprepare(struct auth_hash *axf, struct swcr_data *sw, u_char *key,
327 int klen)
328 {
329 int k;
330
331 klen /= 8;
332
333 switch (axf->type) {
334 case CRYPTO_MD5_HMAC:
335 case CRYPTO_SHA1_HMAC:
336 case CRYPTO_SHA2_224_HMAC:
337 case CRYPTO_SHA2_256_HMAC:
338 case CRYPTO_SHA2_384_HMAC:
339 case CRYPTO_SHA2_512_HMAC:
340 case CRYPTO_NULL_HMAC:
341 case CRYPTO_RIPEMD160_HMAC:
342 for (k = 0; k < klen; k++)
343 key[k] ^= HMAC_IPAD_VAL;
344
345 axf->Init(sw->sw_ictx);
346 axf->Update(sw->sw_ictx, key, klen);
347 axf->Update(sw->sw_ictx, hmac_ipad_buffer, axf->blocksize - klen);
348
349 for (k = 0; k < klen; k++)
350 key[k] ^= (HMAC_IPAD_VAL ^ HMAC_OPAD_VAL);
351
352 axf->Init(sw->sw_octx);
353 axf->Update(sw->sw_octx, key, klen);
354 axf->Update(sw->sw_octx, hmac_opad_buffer, axf->blocksize - klen);
355
356 for (k = 0; k < klen; k++)
357 key[k] ^= HMAC_OPAD_VAL;
358 break;
359 case CRYPTO_MD5_KPDK:
360 case CRYPTO_SHA1_KPDK:
361 {
362 /*
363 * We need a buffer that can hold an md5 and a sha1 result
364 * just to throw it away.
365 * What we do here is the initial part of:
366 * ALGO( key, keyfill, .. )
367 * adding the key to sw_ictx and abusing Final() to get the
368 * "keyfill" padding.
369 * In addition we abuse the sw_octx to save the key to have
370 * it to be able to append it at the end in swcr_authcompute().
371 */
372 u_char buf[SHA1_RESULTLEN];
373
374 sw->sw_klen = klen;
375 bcopy(key, sw->sw_octx, klen);
376 axf->Init(sw->sw_ictx);
377 axf->Update(sw->sw_ictx, key, klen);
378 axf->Final(buf, sw->sw_ictx);
379 break;
380 }
381 case CRYPTO_POLY1305:
382 if (klen != POLY1305_KEY_LEN) {
383 CRYPTDEB("bad poly1305 key size %d", klen);
384 return EINVAL;
385 }
386 /* FALLTHROUGH */
387 case CRYPTO_BLAKE2B:
388 case CRYPTO_BLAKE2S:
389 axf->Setkey(sw->sw_ictx, key, klen);
390 axf->Init(sw->sw_ictx);
391 break;
392 default:
393 printf("%s: CRD_F_KEY_EXPLICIT flag given, but algorithm %d "
394 "doesn't use keys.\n", __func__, axf->type);
395 return EINVAL;
396 }
397 return 0;
398 }
399
400 /*
401 * Compute keyed-hash authenticator.
402 */
403 static int
404 swcr_authcompute(struct cryptodesc *crd, struct swcr_data *sw, caddr_t buf,
405 int flags)
406 {
407 unsigned char aalg[HASH_MAX_LEN];
408 struct auth_hash *axf;
409 union authctx ctx;
410 int err;
411
412 if (sw->sw_ictx == 0)
413 return EINVAL;
414
415 axf = sw->sw_axf;
416
417 if (crd->crd_flags & CRD_F_KEY_EXPLICIT) {
418 err = swcr_authprepare(axf, sw, crd->crd_key, crd->crd_klen);
419 if (err != 0)
420 return err;
421 }
422
423 bcopy(sw->sw_ictx, &ctx, axf->ctxsize);
424
425 err = crypto_apply(flags, buf, crd->crd_skip, crd->crd_len,
426 (int (*)(void *, void *, unsigned int))axf->Update, (caddr_t)&ctx);
427 if (err)
428 return err;
429
430 switch (sw->sw_alg) {
431 case CRYPTO_SHA1:
432 case CRYPTO_SHA2_224:
433 case CRYPTO_SHA2_256:
434 case CRYPTO_SHA2_384:
435 case CRYPTO_SHA2_512:
436 axf->Final(aalg, &ctx);
437 break;
438
439 case CRYPTO_MD5_HMAC:
440 case CRYPTO_SHA1_HMAC:
441 case CRYPTO_SHA2_224_HMAC:
442 case CRYPTO_SHA2_256_HMAC:
443 case CRYPTO_SHA2_384_HMAC:
444 case CRYPTO_SHA2_512_HMAC:
445 case CRYPTO_RIPEMD160_HMAC:
446 if (sw->sw_octx == NULL)
447 return EINVAL;
448
449 axf->Final(aalg, &ctx);
450 bcopy(sw->sw_octx, &ctx, axf->ctxsize);
451 axf->Update(&ctx, aalg, axf->hashsize);
452 axf->Final(aalg, &ctx);
453 break;
454
455 case CRYPTO_MD5_KPDK:
456 case CRYPTO_SHA1_KPDK:
457 /* If we have no key saved, return error. */
458 if (sw->sw_octx == NULL)
459 return EINVAL;
460
461 /*
462 * Add the trailing copy of the key (see comment in
463 * swcr_authprepare()) after the data:
464 * ALGO( .., key, algofill )
465 * and let Final() do the proper, natural "algofill"
466 * padding.
467 */
468 axf->Update(&ctx, sw->sw_octx, sw->sw_klen);
469 axf->Final(aalg, &ctx);
470 break;
471
472 case CRYPTO_BLAKE2B:
473 case CRYPTO_BLAKE2S:
474 case CRYPTO_NULL_HMAC:
475 case CRYPTO_POLY1305:
476 axf->Final(aalg, &ctx);
477 break;
478 }
479
480 /* Inject the authentication data */
481 crypto_copyback(flags, buf, crd->crd_inject,
482 sw->sw_mlen == 0 ? axf->hashsize : sw->sw_mlen, aalg);
483 return 0;
484 }
485
486 CTASSERT(INT_MAX <= (1ll<<39) - 256); /* GCM: plain text < 2^39-256 */
487 CTASSERT(INT_MAX <= (uint64_t)-1); /* GCM: associated data <= 2^64-1 */
488
489 /*
490 * Apply a combined encryption-authentication transformation
491 */
492 static int
493 swcr_authenc(struct cryptop *crp)
494 {
495 uint32_t blkbuf[howmany(EALG_MAX_BLOCK_LEN, sizeof(uint32_t))];
496 u_char *blk = (u_char *)blkbuf;
497 u_char aalg[AALG_MAX_RESULT_LEN];
498 u_char uaalg[AALG_MAX_RESULT_LEN];
499 u_char iv[EALG_MAX_BLOCK_LEN];
500 union authctx ctx;
501 struct swcr_session *ses;
502 struct cryptodesc *crd, *crda = NULL, *crde = NULL;
503 struct swcr_data *sw, *swa, *swe = NULL;
504 struct auth_hash *axf = NULL;
505 struct enc_xform *exf = NULL;
506 caddr_t buf = (caddr_t)crp->crp_buf;
507 uint32_t *blkp;
508 int aadlen, blksz, i, ivlen, len, iskip, oskip, r;
509
510 ivlen = blksz = iskip = oskip = 0;
511
512 ses = crypto_get_driver_session(crp->crp_session);
513
514 for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
515 for (i = 0; i < nitems(ses->swcr_algorithms) &&
516 ses->swcr_algorithms[i].sw_alg != crd->crd_alg; i++)
517 ;
518 if (i == nitems(ses->swcr_algorithms))
519 return (EINVAL);
520
521 sw = &ses->swcr_algorithms[i];
522 switch (sw->sw_alg) {
523 case CRYPTO_AES_NIST_GCM_16:
524 case CRYPTO_AES_NIST_GMAC:
525 swe = sw;
526 crde = crd;
527 exf = swe->sw_exf;
528 ivlen = 12;
529 break;
530 case CRYPTO_AES_128_NIST_GMAC:
531 case CRYPTO_AES_192_NIST_GMAC:
532 case CRYPTO_AES_256_NIST_GMAC:
533 swa = sw;
534 crda = crd;
535 axf = swa->sw_axf;
536 if (swa->sw_ictx == 0)
537 return (EINVAL);
538 bcopy(swa->sw_ictx, &ctx, axf->ctxsize);
539 blksz = axf->blocksize;
540 break;
541 default:
542 return (EINVAL);
543 }
544 }
545 if (crde == NULL || crda == NULL)
546 return (EINVAL);
547
548 if (crde->crd_alg == CRYPTO_AES_NIST_GCM_16 &&
549 (crde->crd_flags & CRD_F_IV_EXPLICIT) == 0)
550 return (EINVAL);
551
552 if (crde->crd_klen != crda->crd_klen)
553 return (EINVAL);
554
555 /* Initialize the IV */
556 if (crde->crd_flags & CRD_F_ENCRYPT) {
557 /* IV explicitly provided ? */
558 if (crde->crd_flags & CRD_F_IV_EXPLICIT)
559 bcopy(crde->crd_iv, iv, ivlen);
560 else
561 arc4rand(iv, ivlen, 0);
562
563 /* Do we need to write the IV */
564 if (!(crde->crd_flags & CRD_F_IV_PRESENT))
565 crypto_copyback(crp->crp_flags, buf, crde->crd_inject,
566 ivlen, iv);
567
568 } else { /* Decryption */
569 /* IV explicitly provided ? */
570 if (crde->crd_flags & CRD_F_IV_EXPLICIT)
571 bcopy(crde->crd_iv, iv, ivlen);
572 else {
573 /* Get IV off buf */
574 crypto_copydata(crp->crp_flags, buf, crde->crd_inject,
575 ivlen, iv);
576 }
577 }
578
579 /* Supply MAC with IV */
580 if (axf->Reinit)
581 axf->Reinit(&ctx, iv, ivlen);
582
583 /* Supply MAC with AAD */
584 aadlen = crda->crd_len;
585
586 for (i = iskip; i < crda->crd_len; i += blksz) {
587 len = MIN(crda->crd_len - i, blksz - oskip);
588 crypto_copydata(crp->crp_flags, buf, crda->crd_skip + i, len,
589 blk + oskip);
590 bzero(blk + len + oskip, blksz - len - oskip);
591 axf->Update(&ctx, blk, blksz);
592 oskip = 0; /* reset initial output offset */
593 }
594
595 if (exf->reinit)
596 exf->reinit(swe->sw_kschedule, iv);
597
598 /* Do encryption/decryption with MAC */
599 for (i = 0; i < crde->crd_len; i += len) {
600 if (exf->encrypt_multi != NULL) {
601 len = rounddown(crde->crd_len - i, blksz);
602 if (len == 0)
603 len = blksz;
604 else
605 len = MIN(len, sizeof(blkbuf));
606 } else
607 len = blksz;
608 len = MIN(crde->crd_len - i, len);
609 if (len < blksz)
610 bzero(blk, blksz);
611 crypto_copydata(crp->crp_flags, buf, crde->crd_skip + i, len,
612 blk);
613 if (crde->crd_flags & CRD_F_ENCRYPT) {
614 if (exf->encrypt_multi != NULL)
615 exf->encrypt_multi(swe->sw_kschedule, blk,
616 len);
617 else
618 exf->encrypt(swe->sw_kschedule, blk);
619 axf->Update(&ctx, blk, len);
620 crypto_copyback(crp->crp_flags, buf,
621 crde->crd_skip + i, len, blk);
622 } else {
623 axf->Update(&ctx, blk, len);
624 }
625 }
626
627 /* Do any required special finalization */
628 switch (crda->crd_alg) {
629 case CRYPTO_AES_128_NIST_GMAC:
630 case CRYPTO_AES_192_NIST_GMAC:
631 case CRYPTO_AES_256_NIST_GMAC:
632 /* length block */
633 bzero(blk, blksz);
634 blkp = (uint32_t *)blk + 1;
635 *blkp = htobe32(aadlen * 8);
636 blkp = (uint32_t *)blk + 3;
637 *blkp = htobe32(crde->crd_len * 8);
638 axf->Update(&ctx, blk, blksz);
639 break;
640 }
641
642 /* Finalize MAC */
643 axf->Final(aalg, &ctx);
644
645 /* Validate tag */
646 if (!(crde->crd_flags & CRD_F_ENCRYPT)) {
647 crypto_copydata(crp->crp_flags, buf, crda->crd_inject,
648 axf->hashsize, uaalg);
649
650 r = timingsafe_bcmp(aalg, uaalg, axf->hashsize);
651 if (r == 0) {
652 /* tag matches, decrypt data */
653 for (i = 0; i < crde->crd_len; i += blksz) {
654 len = MIN(crde->crd_len - i, blksz);
655 if (len < blksz)
656 bzero(blk, blksz);
657 crypto_copydata(crp->crp_flags, buf,
658 crde->crd_skip + i, len, blk);
659 exf->decrypt(swe->sw_kschedule, blk);
660 crypto_copyback(crp->crp_flags, buf,
661 crde->crd_skip + i, len, blk);
662 }
663 } else
664 return (EBADMSG);
665 } else {
666 /* Inject the authentication data */
667 crypto_copyback(crp->crp_flags, buf, crda->crd_inject,
668 axf->hashsize, aalg);
669 }
670
671 return (0);
672 }
673
674 /*
675 * Apply a compression/decompression algorithm
676 */
677 static int
678 swcr_compdec(struct cryptodesc *crd, struct swcr_data *sw,
679 caddr_t buf, int flags)
680 {
681 u_int8_t *data, *out;
682 struct comp_algo *cxf;
683 int adj;
684 u_int32_t result;
685
686 cxf = sw->sw_cxf;
687
688 /* We must handle the whole buffer of data in one time
689 * then if there is not all the data in the mbuf, we must
690 * copy in a buffer.
691 */
692
693 data = malloc(crd->crd_len, M_CRYPTO_DATA, M_NOWAIT);
694 if (data == NULL)
695 return (EINVAL);
696 crypto_copydata(flags, buf, crd->crd_skip, crd->crd_len, data);
697
698 if (crd->crd_flags & CRD_F_COMP)
699 result = cxf->compress(data, crd->crd_len, &out);
700 else
701 result = cxf->decompress(data, crd->crd_len, &out);
702
703 free(data, M_CRYPTO_DATA);
704 if (result == 0)
705 return EINVAL;
706
707 /* Copy back the (de)compressed data. m_copyback is
708 * extending the mbuf as necessary.
709 */
710 sw->sw_size = result;
711 /* Check the compressed size when doing compression */
712 if (crd->crd_flags & CRD_F_COMP) {
713 if (result >= crd->crd_len) {
714 /* Compression was useless, we lost time */
715 free(out, M_CRYPTO_DATA);
716 return 0;
717 }
718 }
719
720 crypto_copyback(flags, buf, crd->crd_skip, result, out);
721 if (result < crd->crd_len) {
722 adj = result - crd->crd_len;
723 if (flags & CRYPTO_F_IMBUF) {
724 adj = result - crd->crd_len;
725 m_adj((struct mbuf *)buf, adj);
726 } else if (flags & CRYPTO_F_IOV) {
727 struct uio *uio = (struct uio *)buf;
728 int ind;
729
730 adj = crd->crd_len - result;
731 ind = uio->uio_iovcnt - 1;
732
733 while (adj > 0 && ind >= 0) {
734 if (adj < uio->uio_iov[ind].iov_len) {
735 uio->uio_iov[ind].iov_len -= adj;
736 break;
737 }
738
739 adj -= uio->uio_iov[ind].iov_len;
740 uio->uio_iov[ind].iov_len = 0;
741 ind--;
742 uio->uio_iovcnt--;
743 }
744 }
745 }
746 free(out, M_CRYPTO_DATA);
747 return 0;
748 }
749
750 /*
751 * Generate a new software session.
752 */
753 static int
754 swcr_newsession(device_t dev, crypto_session_t cses, struct cryptoini *cri)
755 {
756 struct swcr_session *ses;
757 struct swcr_data *swd;
758 struct auth_hash *axf;
759 struct enc_xform *txf;
760 struct comp_algo *cxf;
761 size_t i;
762 int len;
763 int error;
764
765 if (cses == NULL || cri == NULL)
766 return EINVAL;
767
768 ses = crypto_get_driver_session(cses);
769 mtx_init(&ses->swcr_lock, "swcr session lock", NULL, MTX_DEF);
770
771 for (i = 0; cri != NULL && i < nitems(ses->swcr_algorithms); i++) {
772 swd = &ses->swcr_algorithms[i];
773
774 switch (cri->cri_alg) {
775 case CRYPTO_DES_CBC:
776 txf = &enc_xform_des;
777 goto enccommon;
778 case CRYPTO_3DES_CBC:
779 txf = &enc_xform_3des;
780 goto enccommon;
781 case CRYPTO_BLF_CBC:
782 txf = &enc_xform_blf;
783 goto enccommon;
784 case CRYPTO_CAST_CBC:
785 txf = &enc_xform_cast5;
786 goto enccommon;
787 case CRYPTO_SKIPJACK_CBC:
788 txf = &enc_xform_skipjack;
789 goto enccommon;
790 case CRYPTO_RIJNDAEL128_CBC:
791 txf = &enc_xform_rijndael128;
792 goto enccommon;
793 case CRYPTO_AES_XTS:
794 txf = &enc_xform_aes_xts;
795 goto enccommon;
796 case CRYPTO_AES_ICM:
797 txf = &enc_xform_aes_icm;
798 goto enccommon;
799 case CRYPTO_AES_NIST_GCM_16:
800 txf = &enc_xform_aes_nist_gcm;
801 goto enccommon;
802 case CRYPTO_AES_NIST_GMAC:
803 txf = &enc_xform_aes_nist_gmac;
804 swd->sw_exf = txf;
805 break;
806 case CRYPTO_CAMELLIA_CBC:
807 txf = &enc_xform_camellia;
808 goto enccommon;
809 case CRYPTO_NULL_CBC:
810 txf = &enc_xform_null;
811 goto enccommon;
812 case CRYPTO_CHACHA20:
813 txf = &enc_xform_chacha20;
814 goto enccommon;
815 enccommon:
816 if (cri->cri_key != NULL) {
817 error = txf->setkey(&swd->sw_kschedule,
818 cri->cri_key, cri->cri_klen / 8);
819 if (error) {
820 swcr_freesession(dev, cses);
821 return error;
822 }
823 }
824 swd->sw_exf = txf;
825 break;
826
827 case CRYPTO_MD5_HMAC:
828 axf = &auth_hash_hmac_md5;
829 goto authcommon;
830 case CRYPTO_SHA1_HMAC:
831 axf = &auth_hash_hmac_sha1;
832 goto authcommon;
833 case CRYPTO_SHA2_224_HMAC:
834 axf = &auth_hash_hmac_sha2_224;
835 goto authcommon;
836 case CRYPTO_SHA2_256_HMAC:
837 axf = &auth_hash_hmac_sha2_256;
838 goto authcommon;
839 case CRYPTO_SHA2_384_HMAC:
840 axf = &auth_hash_hmac_sha2_384;
841 goto authcommon;
842 case CRYPTO_SHA2_512_HMAC:
843 axf = &auth_hash_hmac_sha2_512;
844 goto authcommon;
845 case CRYPTO_NULL_HMAC:
846 axf = &auth_hash_null;
847 goto authcommon;
848 case CRYPTO_RIPEMD160_HMAC:
849 axf = &auth_hash_hmac_ripemd_160;
850 authcommon:
851 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
852 M_NOWAIT);
853 if (swd->sw_ictx == NULL) {
854 swcr_freesession(dev, cses);
855 return ENOBUFS;
856 }
857
858 swd->sw_octx = malloc(axf->ctxsize, M_CRYPTO_DATA,
859 M_NOWAIT);
860 if (swd->sw_octx == NULL) {
861 swcr_freesession(dev, cses);
862 return ENOBUFS;
863 }
864
865 if (cri->cri_key != NULL) {
866 error = swcr_authprepare(axf, swd,
867 cri->cri_key, cri->cri_klen);
868 if (error != 0) {
869 swcr_freesession(dev, cses);
870 return error;
871 }
872 }
873
874 swd->sw_mlen = cri->cri_mlen;
875 swd->sw_axf = axf;
876 break;
877
878 case CRYPTO_MD5_KPDK:
879 axf = &auth_hash_key_md5;
880 goto auth2common;
881
882 case CRYPTO_SHA1_KPDK:
883 axf = &auth_hash_key_sha1;
884 auth2common:
885 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
886 M_NOWAIT);
887 if (swd->sw_ictx == NULL) {
888 swcr_freesession(dev, cses);
889 return ENOBUFS;
890 }
891
892 swd->sw_octx = malloc(cri->cri_klen / 8,
893 M_CRYPTO_DATA, M_NOWAIT);
894 if (swd->sw_octx == NULL) {
895 swcr_freesession(dev, cses);
896 return ENOBUFS;
897 }
898
899 /* Store the key so we can "append" it to the payload */
900 if (cri->cri_key != NULL) {
901 error = swcr_authprepare(axf, swd,
902 cri->cri_key, cri->cri_klen);
903 if (error != 0) {
904 swcr_freesession(dev, cses);
905 return error;
906 }
907 }
908
909 swd->sw_mlen = cri->cri_mlen;
910 swd->sw_axf = axf;
911 break;
912 #ifdef notdef
913 case CRYPTO_MD5:
914 axf = &auth_hash_md5;
915 goto auth3common;
916 #endif
917
918 case CRYPTO_SHA1:
919 axf = &auth_hash_sha1;
920 goto auth3common;
921 case CRYPTO_SHA2_224:
922 axf = &auth_hash_sha2_224;
923 goto auth3common;
924 case CRYPTO_SHA2_256:
925 axf = &auth_hash_sha2_256;
926 goto auth3common;
927 case CRYPTO_SHA2_384:
928 axf = &auth_hash_sha2_384;
929 goto auth3common;
930 case CRYPTO_SHA2_512:
931 axf = &auth_hash_sha2_512;
932
933 auth3common:
934 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
935 M_NOWAIT);
936 if (swd->sw_ictx == NULL) {
937 swcr_freesession(dev, cses);
938 return ENOBUFS;
939 }
940
941 axf->Init(swd->sw_ictx);
942 swd->sw_mlen = cri->cri_mlen;
943 swd->sw_axf = axf;
944 break;
945
946 case CRYPTO_AES_128_NIST_GMAC:
947 axf = &auth_hash_nist_gmac_aes_128;
948 goto auth4common;
949
950 case CRYPTO_AES_192_NIST_GMAC:
951 axf = &auth_hash_nist_gmac_aes_192;
952 goto auth4common;
953
954 case CRYPTO_AES_256_NIST_GMAC:
955 axf = &auth_hash_nist_gmac_aes_256;
956 auth4common:
957 len = cri->cri_klen / 8;
958 if (len != 16 && len != 24 && len != 32) {
959 swcr_freesession(dev, cses);
960 return EINVAL;
961 }
962
963 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
964 M_NOWAIT);
965 if (swd->sw_ictx == NULL) {
966 swcr_freesession(dev, cses);
967 return ENOBUFS;
968 }
969 axf->Init(swd->sw_ictx);
970 axf->Setkey(swd->sw_ictx, cri->cri_key, len);
971 swd->sw_axf = axf;
972 break;
973
974 case CRYPTO_BLAKE2B:
975 axf = &auth_hash_blake2b;
976 goto auth5common;
977 case CRYPTO_BLAKE2S:
978 axf = &auth_hash_blake2s;
979 goto auth5common;
980 case CRYPTO_POLY1305:
981 axf = &auth_hash_poly1305;
982 auth5common:
983 swd->sw_ictx = malloc(axf->ctxsize, M_CRYPTO_DATA,
984 M_NOWAIT);
985 if (swd->sw_ictx == NULL) {
986 swcr_freesession(dev, cses);
987 return ENOBUFS;
988 }
989 axf->Setkey(swd->sw_ictx, cri->cri_key,
990 cri->cri_klen / 8);
991 axf->Init(swd->sw_ictx);
992 swd->sw_axf = axf;
993 break;
994
995 case CRYPTO_DEFLATE_COMP:
996 cxf = &comp_algo_deflate;
997 swd->sw_cxf = cxf;
998 break;
999 default:
1000 swcr_freesession(dev, cses);
1001 return EINVAL;
1002 }
1003
1004 swd->sw_alg = cri->cri_alg;
1005 cri = cri->cri_next;
1006 ses->swcr_nalgs++;
1007 }
1008
1009 if (cri != NULL) {
1010 CRYPTDEB("Bogus session request for three or more algorithms");
1011 return EINVAL;
1012 }
1013 return 0;
1014 }
1015
1016 static void
1017 swcr_freesession(device_t dev, crypto_session_t cses)
1018 {
1019 struct swcr_session *ses;
1020 struct swcr_data *swd;
1021 struct enc_xform *txf;
1022 struct auth_hash *axf;
1023 size_t i;
1024
1025 ses = crypto_get_driver_session(cses);
1026
1027 mtx_destroy(&ses->swcr_lock);
1028 for (i = 0; i < nitems(ses->swcr_algorithms); i++) {
1029 swd = &ses->swcr_algorithms[i];
1030
1031 switch (swd->sw_alg) {
1032 case CRYPTO_DES_CBC:
1033 case CRYPTO_3DES_CBC:
1034 case CRYPTO_BLF_CBC:
1035 case CRYPTO_CAST_CBC:
1036 case CRYPTO_SKIPJACK_CBC:
1037 case CRYPTO_RIJNDAEL128_CBC:
1038 case CRYPTO_AES_XTS:
1039 case CRYPTO_AES_ICM:
1040 case CRYPTO_AES_NIST_GCM_16:
1041 case CRYPTO_AES_NIST_GMAC:
1042 case CRYPTO_CAMELLIA_CBC:
1043 case CRYPTO_NULL_CBC:
1044 case CRYPTO_CHACHA20:
1045 txf = swd->sw_exf;
1046
1047 if (swd->sw_kschedule)
1048 txf->zerokey(&(swd->sw_kschedule));
1049 break;
1050
1051 case CRYPTO_MD5_HMAC:
1052 case CRYPTO_SHA1_HMAC:
1053 case CRYPTO_SHA2_224_HMAC:
1054 case CRYPTO_SHA2_256_HMAC:
1055 case CRYPTO_SHA2_384_HMAC:
1056 case CRYPTO_SHA2_512_HMAC:
1057 case CRYPTO_RIPEMD160_HMAC:
1058 case CRYPTO_NULL_HMAC:
1059 axf = swd->sw_axf;
1060
1061 if (swd->sw_ictx) {
1062 bzero(swd->sw_ictx, axf->ctxsize);
1063 free(swd->sw_ictx, M_CRYPTO_DATA);
1064 }
1065 if (swd->sw_octx) {
1066 bzero(swd->sw_octx, axf->ctxsize);
1067 free(swd->sw_octx, M_CRYPTO_DATA);
1068 }
1069 break;
1070
1071 case CRYPTO_MD5_KPDK:
1072 case CRYPTO_SHA1_KPDK:
1073 axf = swd->sw_axf;
1074
1075 if (swd->sw_ictx) {
1076 bzero(swd->sw_ictx, axf->ctxsize);
1077 free(swd->sw_ictx, M_CRYPTO_DATA);
1078 }
1079 if (swd->sw_octx) {
1080 bzero(swd->sw_octx, swd->sw_klen);
1081 free(swd->sw_octx, M_CRYPTO_DATA);
1082 }
1083 break;
1084
1085 case CRYPTO_BLAKE2B:
1086 case CRYPTO_BLAKE2S:
1087 case CRYPTO_MD5:
1088 case CRYPTO_POLY1305:
1089 case CRYPTO_SHA1:
1090 case CRYPTO_SHA2_224:
1091 case CRYPTO_SHA2_256:
1092 case CRYPTO_SHA2_384:
1093 case CRYPTO_SHA2_512:
1094 axf = swd->sw_axf;
1095
1096 if (swd->sw_ictx) {
1097 explicit_bzero(swd->sw_ictx, axf->ctxsize);
1098 free(swd->sw_ictx, M_CRYPTO_DATA);
1099 }
1100 break;
1101
1102 case CRYPTO_DEFLATE_COMP:
1103 /* Nothing to do */
1104 break;
1105 }
1106 }
1107 }
1108
1109 /*
1110 * Process a software request.
1111 */
1112 static int
1113 swcr_process(device_t dev, struct cryptop *crp, int hint)
1114 {
1115 struct swcr_session *ses = NULL;
1116 struct cryptodesc *crd;
1117 struct swcr_data *sw;
1118 size_t i;
1119
1120 /* Sanity check */
1121 if (crp == NULL)
1122 return EINVAL;
1123
1124 if (crp->crp_desc == NULL || crp->crp_buf == NULL) {
1125 crp->crp_etype = EINVAL;
1126 goto done;
1127 }
1128
1129 ses = crypto_get_driver_session(crp->crp_session);
1130 mtx_lock(&ses->swcr_lock);
1131
1132 /* Go through crypto descriptors, processing as we go */
1133 for (crd = crp->crp_desc; crd; crd = crd->crd_next) {
1134 /*
1135 * Find the crypto context.
1136 *
1137 * XXX Note that the logic here prevents us from having
1138 * XXX the same algorithm multiple times in a session
1139 * XXX (or rather, we can but it won't give us the right
1140 * XXX results). To do that, we'd need some way of differentiating
1141 * XXX between the various instances of an algorithm (so we can
1142 * XXX locate the correct crypto context).
1143 */
1144 for (i = 0; i < nitems(ses->swcr_algorithms) &&
1145 ses->swcr_algorithms[i].sw_alg != crd->crd_alg; i++)
1146 ;
1147
1148 /* No such context ? */
1149 if (i == nitems(ses->swcr_algorithms)) {
1150 crp->crp_etype = EINVAL;
1151 goto done;
1152 }
1153 sw = &ses->swcr_algorithms[i];
1154 switch (sw->sw_alg) {
1155 case CRYPTO_DES_CBC:
1156 case CRYPTO_3DES_CBC:
1157 case CRYPTO_BLF_CBC:
1158 case CRYPTO_CAST_CBC:
1159 case CRYPTO_SKIPJACK_CBC:
1160 case CRYPTO_RIJNDAEL128_CBC:
1161 case CRYPTO_AES_XTS:
1162 case CRYPTO_AES_ICM:
1163 case CRYPTO_CAMELLIA_CBC:
1164 case CRYPTO_CHACHA20:
1165 if ((crp->crp_etype = swcr_encdec(crd, sw,
1166 crp->crp_buf, crp->crp_flags)) != 0)
1167 goto done;
1168 break;
1169 case CRYPTO_NULL_CBC:
1170 crp->crp_etype = 0;
1171 break;
1172 case CRYPTO_MD5_HMAC:
1173 case CRYPTO_SHA1_HMAC:
1174 case CRYPTO_SHA2_224_HMAC:
1175 case CRYPTO_SHA2_256_HMAC:
1176 case CRYPTO_SHA2_384_HMAC:
1177 case CRYPTO_SHA2_512_HMAC:
1178 case CRYPTO_RIPEMD160_HMAC:
1179 case CRYPTO_NULL_HMAC:
1180 case CRYPTO_MD5_KPDK:
1181 case CRYPTO_SHA1_KPDK:
1182 case CRYPTO_MD5:
1183 case CRYPTO_SHA1:
1184 case CRYPTO_SHA2_224:
1185 case CRYPTO_SHA2_256:
1186 case CRYPTO_SHA2_384:
1187 case CRYPTO_SHA2_512:
1188 case CRYPTO_BLAKE2B:
1189 case CRYPTO_BLAKE2S:
1190 case CRYPTO_POLY1305:
1191 if ((crp->crp_etype = swcr_authcompute(crd, sw,
1192 crp->crp_buf, crp->crp_flags)) != 0)
1193 goto done;
1194 break;
1195
1196 case CRYPTO_AES_NIST_GCM_16:
1197 case CRYPTO_AES_NIST_GMAC:
1198 case CRYPTO_AES_128_NIST_GMAC:
1199 case CRYPTO_AES_192_NIST_GMAC:
1200 case CRYPTO_AES_256_NIST_GMAC:
1201 crp->crp_etype = swcr_authenc(crp);
1202 goto done;
1203
1204 case CRYPTO_DEFLATE_COMP:
1205 if ((crp->crp_etype = swcr_compdec(crd, sw,
1206 crp->crp_buf, crp->crp_flags)) != 0)
1207 goto done;
1208 else
1209 crp->crp_olen = (int)sw->sw_size;
1210 break;
1211
1212 default:
1213 /* Unknown/unsupported algorithm */
1214 crp->crp_etype = EINVAL;
1215 goto done;
1216 }
1217 }
1218
1219 done:
1220 if (ses)
1221 mtx_unlock(&ses->swcr_lock);
1222 crypto_done(crp);
1223 return 0;
1224 }
1225
1226 static void
1227 swcr_identify(driver_t *drv, device_t parent)
1228 {
1229 /* NB: order 10 is so we get attached after h/w devices */
1230 if (device_find_child(parent, "cryptosoft", -1) == NULL &&
1231 BUS_ADD_CHILD(parent, 10, "cryptosoft", 0) == 0)
1232 panic("cryptosoft: could not attach");
1233 }
1234
1235 static int
1236 swcr_probe(device_t dev)
1237 {
1238 device_set_desc(dev, "software crypto");
1239 return (BUS_PROBE_NOWILDCARD);
1240 }
1241
1242 static int
1243 swcr_attach(device_t dev)
1244 {
1245 memset(hmac_ipad_buffer, HMAC_IPAD_VAL, HMAC_MAX_BLOCK_LEN);
1246 memset(hmac_opad_buffer, HMAC_OPAD_VAL, HMAC_MAX_BLOCK_LEN);
1247
1248 swcr_id = crypto_get_driverid(dev, sizeof(struct swcr_session),
1249 CRYPTOCAP_F_SOFTWARE | CRYPTOCAP_F_SYNC);
1250 if (swcr_id < 0) {
1251 device_printf(dev, "cannot initialize!");
1252 return ENOMEM;
1253 }
1254 #define REGISTER(alg) \
1255 crypto_register(swcr_id, alg, 0,0)
1256 REGISTER(CRYPTO_DES_CBC);
1257 REGISTER(CRYPTO_3DES_CBC);
1258 REGISTER(CRYPTO_BLF_CBC);
1259 REGISTER(CRYPTO_CAST_CBC);
1260 REGISTER(CRYPTO_SKIPJACK_CBC);
1261 REGISTER(CRYPTO_NULL_CBC);
1262 REGISTER(CRYPTO_MD5_HMAC);
1263 REGISTER(CRYPTO_SHA1_HMAC);
1264 REGISTER(CRYPTO_SHA2_224_HMAC);
1265 REGISTER(CRYPTO_SHA2_256_HMAC);
1266 REGISTER(CRYPTO_SHA2_384_HMAC);
1267 REGISTER(CRYPTO_SHA2_512_HMAC);
1268 REGISTER(CRYPTO_RIPEMD160_HMAC);
1269 REGISTER(CRYPTO_NULL_HMAC);
1270 REGISTER(CRYPTO_MD5_KPDK);
1271 REGISTER(CRYPTO_SHA1_KPDK);
1272 REGISTER(CRYPTO_MD5);
1273 REGISTER(CRYPTO_SHA1);
1274 REGISTER(CRYPTO_SHA2_224);
1275 REGISTER(CRYPTO_SHA2_256);
1276 REGISTER(CRYPTO_SHA2_384);
1277 REGISTER(CRYPTO_SHA2_512);
1278 REGISTER(CRYPTO_RIJNDAEL128_CBC);
1279 REGISTER(CRYPTO_AES_XTS);
1280 REGISTER(CRYPTO_AES_ICM);
1281 REGISTER(CRYPTO_AES_NIST_GCM_16);
1282 REGISTER(CRYPTO_AES_NIST_GMAC);
1283 REGISTER(CRYPTO_AES_128_NIST_GMAC);
1284 REGISTER(CRYPTO_AES_192_NIST_GMAC);
1285 REGISTER(CRYPTO_AES_256_NIST_GMAC);
1286 REGISTER(CRYPTO_CAMELLIA_CBC);
1287 REGISTER(CRYPTO_DEFLATE_COMP);
1288 REGISTER(CRYPTO_BLAKE2B);
1289 REGISTER(CRYPTO_BLAKE2S);
1290 REGISTER(CRYPTO_CHACHA20);
1291 REGISTER(CRYPTO_POLY1305);
1292 #undef REGISTER
1293
1294 return 0;
1295 }
1296
1297 static int
1298 swcr_detach(device_t dev)
1299 {
1300 crypto_unregister_all(swcr_id);
1301 return 0;
1302 }
1303
1304 static device_method_t swcr_methods[] = {
1305 DEVMETHOD(device_identify, swcr_identify),
1306 DEVMETHOD(device_probe, swcr_probe),
1307 DEVMETHOD(device_attach, swcr_attach),
1308 DEVMETHOD(device_detach, swcr_detach),
1309
1310 DEVMETHOD(cryptodev_newsession, swcr_newsession),
1311 DEVMETHOD(cryptodev_freesession,swcr_freesession),
1312 DEVMETHOD(cryptodev_process, swcr_process),
1313
1314 {0, 0},
1315 };
1316
1317 static driver_t swcr_driver = {
1318 "cryptosoft",
1319 swcr_methods,
1320 0, /* NB: no softc */
1321 };
1322 static devclass_t swcr_devclass;
1323
1324 /*
1325 * NB: We explicitly reference the crypto module so we
1326 * get the necessary ordering when built as a loadable
1327 * module. This is required because we bundle the crypto
1328 * module code together with the cryptosoft driver (otherwise
1329 * normal module dependencies would handle things).
1330 */
1331 extern int crypto_modevent(struct module *, int, void *);
1332 /* XXX where to attach */
1333 DRIVER_MODULE(cryptosoft, nexus, swcr_driver, swcr_devclass, crypto_modevent,0);
1334 MODULE_VERSION(cryptosoft, 1);
1335 MODULE_DEPEND(cryptosoft, crypto, 1, 1, 1);
Cache object: adbf80bdd021701de04924f66ff25968
|