1 /*
2 * Scatterlist Cryptographic API.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 *
7 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
8 * and Nettle, by Niels Möller.
9 *
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option)
13 * any later version.
14 *
15 */
16 #ifndef _LINUX_CRYPTO_H
17 #define _LINUX_CRYPTO_H
18
19 #include <linux/module.h>
20 #include <linux/kernel.h>
21 #include <linux/types.h>
22 #include <linux/list.h>
23 #include <linux/string.h>
24 #include <asm/page.h>
25
26 /*
27 * Algorithm masks and types.
28 */
29 #define CRYPTO_ALG_TYPE_MASK 0x000000ff
30 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001
31 #define CRYPTO_ALG_TYPE_DIGEST 0x00000002
32 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000004
33
34 /*
35 * Transform masks and values (for crt_flags).
36 */
37 #define CRYPTO_TFM_MODE_MASK 0x000000ff
38 #define CRYPTO_TFM_REQ_MASK 0x000fff00
39 #define CRYPTO_TFM_RES_MASK 0xfff00000
40
41 #define CRYPTO_TFM_MODE_ECB 0x00000001
42 #define CRYPTO_TFM_MODE_CBC 0x00000002
43 #define CRYPTO_TFM_MODE_CFB 0x00000004
44 #define CRYPTO_TFM_MODE_CTR 0x00000008
45
46 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
47 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
48 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
49 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000
50 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000
51 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000
52
53 /*
54 * Miscellaneous stuff.
55 */
56 #define CRYPTO_UNSPEC 0
57 #define CRYPTO_MAX_ALG_NAME 64
58
59 struct scatterlist;
60
61 /*
62 * Algorithms: modular crypto algorithm implementations, managed
63 * via crypto_register_alg() and crypto_unregister_alg().
64 */
65 struct cipher_alg {
66 unsigned int cia_min_keysize;
67 unsigned int cia_max_keysize;
68 unsigned int cia_ivsize;
69 int (*cia_setkey)(void *ctx, const u8 *key,
70 unsigned int keylen, u32 *flags);
71 void (*cia_encrypt)(void *ctx, u8 *dst, const u8 *src);
72 void (*cia_decrypt)(void *ctx, u8 *dst, const u8 *src);
73 };
74
75 struct digest_alg {
76 unsigned int dia_digestsize;
77 void (*dia_init)(void *ctx);
78 void (*dia_update)(void *ctx, const u8 *data, unsigned int len);
79 void (*dia_final)(void *ctx, u8 *out);
80 };
81
82 struct compress_alg {
83 int (*coa_init)(void *ctx);
84 void (*coa_exit)(void *ctx);
85 int (*coa_compress)(void *ctx, const u8 *src, unsigned int slen,
86 u8 *dst, unsigned int *dlen);
87 int (*coa_decompress)(void *ctx, const u8 *src, unsigned int slen,
88 u8 *dst, unsigned int *dlen);
89 };
90
91 #define cra_cipher cra_u.cipher
92 #define cra_digest cra_u.digest
93 #define cra_compress cra_u.compress
94
95 struct crypto_alg {
96 struct list_head cra_list;
97 u32 cra_flags;
98 unsigned int cra_blocksize;
99 unsigned int cra_ctxsize;
100 const char cra_name[CRYPTO_MAX_ALG_NAME];
101
102 union {
103 struct cipher_alg cipher;
104 struct digest_alg digest;
105 struct compress_alg compress;
106 } cra_u;
107
108 struct module *cra_module;
109 };
110
111 /*
112 * Algorithm registration interface.
113 */
114 int crypto_register_alg(struct crypto_alg *alg);
115 int crypto_unregister_alg(struct crypto_alg *alg);
116
117 /*
118 * Algorithm query interface.
119 */
120 int crypto_alg_available(const char *name, u32 flags);
121
122 /*
123 * Transforms: user-instantiated objects which encapsulate algorithms
124 * and core processing logic. Managed via crypto_alloc_tfm() and
125 * crypto_free_tfm(), as well as the various helpers below.
126 */
127 struct crypto_tfm;
128
129 struct cipher_tfm {
130 void *cit_iv;
131 u32 cit_mode;
132 int (*cit_setkey)(struct crypto_tfm *tfm,
133 const u8 *key, unsigned int keylen);
134 int (*cit_encrypt)(struct crypto_tfm *tfm,
135 struct scatterlist *dst,
136 struct scatterlist *src,
137 unsigned int nbytes);
138 int (*cit_encrypt_iv)(struct crypto_tfm *tfm,
139 struct scatterlist *dst,
140 struct scatterlist *src,
141 unsigned int nbytes, u8 *iv);
142 int (*cit_decrypt)(struct crypto_tfm *tfm,
143 struct scatterlist *dst,
144 struct scatterlist *src,
145 unsigned int nbytes);
146 int (*cit_decrypt_iv)(struct crypto_tfm *tfm,
147 struct scatterlist *dst,
148 struct scatterlist *src,
149 unsigned int nbytes, u8 *iv);
150 void (*cit_xor_block)(u8 *dst, const u8 *src);
151 };
152
153 struct digest_tfm {
154 void (*dit_init)(struct crypto_tfm *tfm);
155 void (*dit_update)(struct crypto_tfm *tfm,
156 struct scatterlist *sg, unsigned int nsg);
157 void (*dit_final)(struct crypto_tfm *tfm, u8 *out);
158 void (*dit_digest)(struct crypto_tfm *tfm, struct scatterlist *sg,
159 unsigned int nsg, u8 *out);
160 #ifdef CONFIG_CRYPTO_HMAC
161 void *dit_hmac_block;
162 #endif
163 };
164
165 struct compress_tfm {
166 int (*cot_compress)(struct crypto_tfm *tfm,
167 const u8 *src, unsigned int slen,
168 u8 *dst, unsigned int *dlen);
169 int (*cot_decompress)(struct crypto_tfm *tfm,
170 const u8 *src, unsigned int slen,
171 u8 *dst, unsigned int *dlen);
172 };
173
174 #define crt_cipher crt_u.cipher
175 #define crt_digest crt_u.digest
176 #define crt_compress crt_u.compress
177
178 struct crypto_tfm {
179
180 u32 crt_flags;
181
182 union {
183 struct cipher_tfm cipher;
184 struct digest_tfm digest;
185 struct compress_tfm compress;
186 } crt_u;
187
188 struct crypto_alg *__crt_alg;
189 };
190
191 /*
192 * Transform user interface.
193 */
194
195 /*
196 * crypto_alloc_tfm() will first attempt to locate an already loaded algorithm.
197 * If that fails and the kernel supports dynamically loadable modules, it
198 * will then attempt to load a module of the same name or alias. A refcount
199 * is grabbed on the algorithm which is then associated with the new transform.
200 *
201 * crypto_free_tfm() frees up the transform and any associated resources,
202 * then drops the refcount on the associated algorithm.
203 */
204 struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags);
205 void crypto_free_tfm(struct crypto_tfm *tfm);
206
207 /*
208 * Transform helpers which query the underlying algorithm.
209 */
210 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
211 {
212 return tfm->__crt_alg->cra_name;
213 }
214
215 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm)
216 {
217 struct crypto_alg *alg = tfm->__crt_alg;
218
219 if (alg->cra_module)
220 return alg->cra_module->name;
221 else
222 return NULL;
223 }
224
225 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
226 {
227 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
228 }
229
230 static inline unsigned int crypto_tfm_alg_min_keysize(struct crypto_tfm *tfm)
231 {
232 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
233 return tfm->__crt_alg->cra_cipher.cia_min_keysize;
234 }
235
236 static inline unsigned int crypto_tfm_alg_max_keysize(struct crypto_tfm *tfm)
237 {
238 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
239 return tfm->__crt_alg->cra_cipher.cia_max_keysize;
240 }
241
242 static inline unsigned int crypto_tfm_alg_ivsize(struct crypto_tfm *tfm)
243 {
244 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
245 return tfm->__crt_alg->cra_cipher.cia_ivsize;
246 }
247
248 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
249 {
250 return tfm->__crt_alg->cra_blocksize;
251 }
252
253 static inline unsigned int crypto_tfm_alg_digestsize(struct crypto_tfm *tfm)
254 {
255 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
256 return tfm->__crt_alg->cra_digest.dia_digestsize;
257 }
258
259 /*
260 * API wrappers.
261 */
262 static inline void crypto_digest_init(struct crypto_tfm *tfm)
263 {
264 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
265 tfm->crt_digest.dit_init(tfm);
266 }
267
268 static inline void crypto_digest_update(struct crypto_tfm *tfm,
269 struct scatterlist *sg,
270 unsigned int nsg)
271 {
272 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
273 tfm->crt_digest.dit_update(tfm, sg, nsg);
274 }
275
276 static inline void crypto_digest_final(struct crypto_tfm *tfm, u8 *out)
277 {
278 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
279 tfm->crt_digest.dit_final(tfm, out);
280 }
281
282 static inline void crypto_digest_digest(struct crypto_tfm *tfm,
283 struct scatterlist *sg,
284 unsigned int nsg, u8 *out)
285 {
286 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
287 tfm->crt_digest.dit_digest(tfm, sg, nsg, out);
288 }
289
290 static inline int crypto_cipher_setkey(struct crypto_tfm *tfm,
291 const u8 *key, unsigned int keylen)
292 {
293 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
294 return tfm->crt_cipher.cit_setkey(tfm, key, keylen);
295 }
296
297 static inline int crypto_cipher_encrypt(struct crypto_tfm *tfm,
298 struct scatterlist *dst,
299 struct scatterlist *src,
300 unsigned int nbytes)
301 {
302 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
303 return tfm->crt_cipher.cit_encrypt(tfm, dst, src, nbytes);
304 }
305
306 static inline int crypto_cipher_encrypt_iv(struct crypto_tfm *tfm,
307 struct scatterlist *dst,
308 struct scatterlist *src,
309 unsigned int nbytes, u8 *iv)
310 {
311 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
312 BUG_ON(tfm->crt_cipher.cit_mode == CRYPTO_TFM_MODE_ECB);
313 return tfm->crt_cipher.cit_encrypt_iv(tfm, dst, src, nbytes, iv);
314 }
315
316 static inline int crypto_cipher_decrypt(struct crypto_tfm *tfm,
317 struct scatterlist *dst,
318 struct scatterlist *src,
319 unsigned int nbytes)
320 {
321 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
322 return tfm->crt_cipher.cit_decrypt(tfm, dst, src, nbytes);
323 }
324
325 static inline int crypto_cipher_decrypt_iv(struct crypto_tfm *tfm,
326 struct scatterlist *dst,
327 struct scatterlist *src,
328 unsigned int nbytes, u8 *iv)
329 {
330 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
331 BUG_ON(tfm->crt_cipher.cit_mode == CRYPTO_TFM_MODE_ECB);
332 return tfm->crt_cipher.cit_decrypt_iv(tfm, dst, src, nbytes, iv);
333 }
334
335 static inline void crypto_cipher_set_iv(struct crypto_tfm *tfm,
336 const u8 *src, unsigned int len)
337 {
338 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
339 memcpy(tfm->crt_cipher.cit_iv, src, len);
340 }
341
342 static inline void crypto_cipher_get_iv(struct crypto_tfm *tfm,
343 u8 *dst, unsigned int len)
344 {
345 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
346 memcpy(dst, tfm->crt_cipher.cit_iv, len);
347 }
348
349 static inline int crypto_comp_compress(struct crypto_tfm *tfm,
350 const u8 *src, unsigned int slen,
351 u8 *dst, unsigned int *dlen)
352 {
353 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_COMPRESS);
354 return tfm->crt_compress.cot_compress(tfm, src, slen, dst, dlen);
355 }
356
357 static inline int crypto_comp_decompress(struct crypto_tfm *tfm,
358 const u8 *src, unsigned int slen,
359 u8 *dst, unsigned int *dlen)
360 {
361 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_COMPRESS);
362 return tfm->crt_compress.cot_decompress(tfm, src, slen, dst, dlen);
363 }
364
365 /*
366 * HMAC support.
367 */
368 #ifdef CONFIG_CRYPTO_HMAC
369 void crypto_hmac_init(struct crypto_tfm *tfm, u8 *key, unsigned int *keylen);
370 void crypto_hmac_update(struct crypto_tfm *tfm,
371 struct scatterlist *sg, unsigned int nsg);
372 void crypto_hmac_final(struct crypto_tfm *tfm, u8 *key,
373 unsigned int *keylen, u8 *out);
374 void crypto_hmac(struct crypto_tfm *tfm, u8 *key, unsigned int *keylen,
375 struct scatterlist *sg, unsigned int nsg, u8 *out);
376 #endif /* CONFIG_CRYPTO_HMAC */
377
378 #endif /* _LINUX_CRYPTO_H */
379
Cache object: 604fce7ed9c9e106ff43423685c8732f
|