[CRYPTO] skcipher: Add givcrypt operations and givcipher type
[deliverable/linux.git] / include / linux / crypto.h
1 /*
2 * Scatterlist Cryptographic API.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
7 *
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
10 *
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
15 *
16 */
17 #ifndef _LINUX_CRYPTO_H
18 #define _LINUX_CRYPTO_H
19
20 #include <asm/atomic.h>
21 #include <linux/module.h>
22 #include <linux/kernel.h>
23 #include <linux/list.h>
24 #include <linux/slab.h>
25 #include <linux/string.h>
26 #include <linux/uaccess.h>
27
28 /*
29 * Algorithm masks and types.
30 */
31 #define CRYPTO_ALG_TYPE_MASK 0x0000000f
32 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001
33 #define CRYPTO_ALG_TYPE_DIGEST 0x00000002
34 #define CRYPTO_ALG_TYPE_HASH 0x00000003
35 #define CRYPTO_ALG_TYPE_BLKCIPHER 0x00000004
36 #define CRYPTO_ALG_TYPE_ABLKCIPHER 0x00000005
37 #define CRYPTO_ALG_TYPE_GIVCIPHER 0x00000006
38 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000008
39 #define CRYPTO_ALG_TYPE_AEAD 0x00000009
40
41 #define CRYPTO_ALG_TYPE_HASH_MASK 0x0000000e
42 #define CRYPTO_ALG_TYPE_BLKCIPHER_MASK 0x0000000c
43
44 #define CRYPTO_ALG_LARVAL 0x00000010
45 #define CRYPTO_ALG_DEAD 0x00000020
46 #define CRYPTO_ALG_DYING 0x00000040
47 #define CRYPTO_ALG_ASYNC 0x00000080
48
49 /*
50 * Set this bit if and only if the algorithm requires another algorithm of
51 * the same type to handle corner cases.
52 */
53 #define CRYPTO_ALG_NEED_FALLBACK 0x00000100
54
55 /*
56 * Transform masks and values (for crt_flags).
57 */
58 #define CRYPTO_TFM_REQ_MASK 0x000fff00
59 #define CRYPTO_TFM_RES_MASK 0xfff00000
60
61 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
62 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200
63 #define CRYPTO_TFM_REQ_MAY_BACKLOG 0x00000400
64 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
65 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
66 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000
67 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000
68 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000
69
70 /*
71 * Miscellaneous stuff.
72 */
73 #define CRYPTO_MAX_ALG_NAME 64
74
75 /*
76 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
77 * declaration) is used to ensure that the crypto_tfm context structure is
78 * aligned correctly for the given architecture so that there are no alignment
79 * faults for C data types. In particular, this is required on platforms such
80 * as arm where pointers are 32-bit aligned but there are data types such as
81 * u64 which require 64-bit alignment.
82 */
83 #if defined(ARCH_KMALLOC_MINALIGN)
84 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
85 #elif defined(ARCH_SLAB_MINALIGN)
86 #define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN
87 #endif
88
89 #ifdef CRYPTO_MINALIGN
90 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
91 #else
92 #define CRYPTO_MINALIGN_ATTR
93 #endif
94
95 struct scatterlist;
96 struct crypto_ablkcipher;
97 struct crypto_async_request;
98 struct crypto_aead;
99 struct crypto_blkcipher;
100 struct crypto_hash;
101 struct crypto_tfm;
102 struct crypto_type;
103 struct skcipher_givcrypt_request;
104
105 typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
106
107 struct crypto_async_request {
108 struct list_head list;
109 crypto_completion_t complete;
110 void *data;
111 struct crypto_tfm *tfm;
112
113 u32 flags;
114 };
115
116 struct ablkcipher_request {
117 struct crypto_async_request base;
118
119 unsigned int nbytes;
120
121 void *info;
122
123 struct scatterlist *src;
124 struct scatterlist *dst;
125
126 void *__ctx[] CRYPTO_MINALIGN_ATTR;
127 };
128
129 /**
130 * struct aead_request - AEAD request
131 * @base: Common attributes for async crypto requests
132 * @assoclen: Length in bytes of associated data for authentication
133 * @cryptlen: Length of data to be encrypted or decrypted
134 * @iv: Initialisation vector
135 * @assoc: Associated data
136 * @src: Source data
137 * @dst: Destination data
138 * @__ctx: Start of private context data
139 */
140 struct aead_request {
141 struct crypto_async_request base;
142
143 unsigned int assoclen;
144 unsigned int cryptlen;
145
146 u8 *iv;
147
148 struct scatterlist *assoc;
149 struct scatterlist *src;
150 struct scatterlist *dst;
151
152 void *__ctx[] CRYPTO_MINALIGN_ATTR;
153 };
154
155 struct blkcipher_desc {
156 struct crypto_blkcipher *tfm;
157 void *info;
158 u32 flags;
159 };
160
161 struct cipher_desc {
162 struct crypto_tfm *tfm;
163 void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
164 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
165 const u8 *src, unsigned int nbytes);
166 void *info;
167 };
168
169 struct hash_desc {
170 struct crypto_hash *tfm;
171 u32 flags;
172 };
173
174 /*
175 * Algorithms: modular crypto algorithm implementations, managed
176 * via crypto_register_alg() and crypto_unregister_alg().
177 */
178 struct ablkcipher_alg {
179 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
180 unsigned int keylen);
181 int (*encrypt)(struct ablkcipher_request *req);
182 int (*decrypt)(struct ablkcipher_request *req);
183 int (*givencrypt)(struct skcipher_givcrypt_request *req);
184 int (*givdecrypt)(struct skcipher_givcrypt_request *req);
185
186 unsigned int min_keysize;
187 unsigned int max_keysize;
188 unsigned int ivsize;
189 };
190
191 struct aead_alg {
192 int (*setkey)(struct crypto_aead *tfm, const u8 *key,
193 unsigned int keylen);
194 int (*setauthsize)(struct crypto_aead *tfm, unsigned int authsize);
195 int (*encrypt)(struct aead_request *req);
196 int (*decrypt)(struct aead_request *req);
197
198 unsigned int ivsize;
199 unsigned int maxauthsize;
200 };
201
202 struct blkcipher_alg {
203 int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
204 unsigned int keylen);
205 int (*encrypt)(struct blkcipher_desc *desc,
206 struct scatterlist *dst, struct scatterlist *src,
207 unsigned int nbytes);
208 int (*decrypt)(struct blkcipher_desc *desc,
209 struct scatterlist *dst, struct scatterlist *src,
210 unsigned int nbytes);
211
212 unsigned int min_keysize;
213 unsigned int max_keysize;
214 unsigned int ivsize;
215 };
216
217 struct cipher_alg {
218 unsigned int cia_min_keysize;
219 unsigned int cia_max_keysize;
220 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
221 unsigned int keylen);
222 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
223 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
224 };
225
226 struct digest_alg {
227 unsigned int dia_digestsize;
228 void (*dia_init)(struct crypto_tfm *tfm);
229 void (*dia_update)(struct crypto_tfm *tfm, const u8 *data,
230 unsigned int len);
231 void (*dia_final)(struct crypto_tfm *tfm, u8 *out);
232 int (*dia_setkey)(struct crypto_tfm *tfm, const u8 *key,
233 unsigned int keylen);
234 };
235
236 struct hash_alg {
237 int (*init)(struct hash_desc *desc);
238 int (*update)(struct hash_desc *desc, struct scatterlist *sg,
239 unsigned int nbytes);
240 int (*final)(struct hash_desc *desc, u8 *out);
241 int (*digest)(struct hash_desc *desc, struct scatterlist *sg,
242 unsigned int nbytes, u8 *out);
243 int (*setkey)(struct crypto_hash *tfm, const u8 *key,
244 unsigned int keylen);
245
246 unsigned int digestsize;
247 };
248
249 struct compress_alg {
250 int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
251 unsigned int slen, u8 *dst, unsigned int *dlen);
252 int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
253 unsigned int slen, u8 *dst, unsigned int *dlen);
254 };
255
256 #define cra_ablkcipher cra_u.ablkcipher
257 #define cra_aead cra_u.aead
258 #define cra_blkcipher cra_u.blkcipher
259 #define cra_cipher cra_u.cipher
260 #define cra_digest cra_u.digest
261 #define cra_hash cra_u.hash
262 #define cra_compress cra_u.compress
263
264 struct crypto_alg {
265 struct list_head cra_list;
266 struct list_head cra_users;
267
268 u32 cra_flags;
269 unsigned int cra_blocksize;
270 unsigned int cra_ctxsize;
271 unsigned int cra_alignmask;
272
273 int cra_priority;
274 atomic_t cra_refcnt;
275
276 char cra_name[CRYPTO_MAX_ALG_NAME];
277 char cra_driver_name[CRYPTO_MAX_ALG_NAME];
278
279 const struct crypto_type *cra_type;
280
281 union {
282 struct ablkcipher_alg ablkcipher;
283 struct aead_alg aead;
284 struct blkcipher_alg blkcipher;
285 struct cipher_alg cipher;
286 struct digest_alg digest;
287 struct hash_alg hash;
288 struct compress_alg compress;
289 } cra_u;
290
291 int (*cra_init)(struct crypto_tfm *tfm);
292 void (*cra_exit)(struct crypto_tfm *tfm);
293 void (*cra_destroy)(struct crypto_alg *alg);
294
295 struct module *cra_module;
296 };
297
298 /*
299 * Algorithm registration interface.
300 */
301 int crypto_register_alg(struct crypto_alg *alg);
302 int crypto_unregister_alg(struct crypto_alg *alg);
303
304 /*
305 * Algorithm query interface.
306 */
307 #ifdef CONFIG_CRYPTO
308 int crypto_has_alg(const char *name, u32 type, u32 mask);
309 #else
310 static inline int crypto_has_alg(const char *name, u32 type, u32 mask)
311 {
312 return 0;
313 }
314 #endif
315
316 /*
317 * Transforms: user-instantiated objects which encapsulate algorithms
318 * and core processing logic. Managed via crypto_alloc_*() and
319 * crypto_free_*(), as well as the various helpers below.
320 */
321
322 struct ablkcipher_tfm {
323 int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
324 unsigned int keylen);
325 int (*encrypt)(struct ablkcipher_request *req);
326 int (*decrypt)(struct ablkcipher_request *req);
327 int (*givencrypt)(struct skcipher_givcrypt_request *req);
328 int (*givdecrypt)(struct skcipher_givcrypt_request *req);
329
330 unsigned int ivsize;
331 unsigned int reqsize;
332 };
333
334 struct aead_tfm {
335 int (*setkey)(struct crypto_aead *tfm, const u8 *key,
336 unsigned int keylen);
337 int (*encrypt)(struct aead_request *req);
338 int (*decrypt)(struct aead_request *req);
339 unsigned int ivsize;
340 unsigned int authsize;
341 unsigned int reqsize;
342 };
343
344 struct blkcipher_tfm {
345 void *iv;
346 int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
347 unsigned int keylen);
348 int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
349 struct scatterlist *src, unsigned int nbytes);
350 int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
351 struct scatterlist *src, unsigned int nbytes);
352 };
353
354 struct cipher_tfm {
355 int (*cit_setkey)(struct crypto_tfm *tfm,
356 const u8 *key, unsigned int keylen);
357 void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
358 void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
359 };
360
361 struct hash_tfm {
362 int (*init)(struct hash_desc *desc);
363 int (*update)(struct hash_desc *desc,
364 struct scatterlist *sg, unsigned int nsg);
365 int (*final)(struct hash_desc *desc, u8 *out);
366 int (*digest)(struct hash_desc *desc, struct scatterlist *sg,
367 unsigned int nsg, u8 *out);
368 int (*setkey)(struct crypto_hash *tfm, const u8 *key,
369 unsigned int keylen);
370 unsigned int digestsize;
371 };
372
373 struct compress_tfm {
374 int (*cot_compress)(struct crypto_tfm *tfm,
375 const u8 *src, unsigned int slen,
376 u8 *dst, unsigned int *dlen);
377 int (*cot_decompress)(struct crypto_tfm *tfm,
378 const u8 *src, unsigned int slen,
379 u8 *dst, unsigned int *dlen);
380 };
381
382 #define crt_ablkcipher crt_u.ablkcipher
383 #define crt_aead crt_u.aead
384 #define crt_blkcipher crt_u.blkcipher
385 #define crt_cipher crt_u.cipher
386 #define crt_hash crt_u.hash
387 #define crt_compress crt_u.compress
388
389 struct crypto_tfm {
390
391 u32 crt_flags;
392
393 union {
394 struct ablkcipher_tfm ablkcipher;
395 struct aead_tfm aead;
396 struct blkcipher_tfm blkcipher;
397 struct cipher_tfm cipher;
398 struct hash_tfm hash;
399 struct compress_tfm compress;
400 } crt_u;
401
402 struct crypto_alg *__crt_alg;
403
404 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
405 };
406
407 struct crypto_ablkcipher {
408 struct crypto_tfm base;
409 };
410
411 struct crypto_aead {
412 struct crypto_tfm base;
413 };
414
415 struct crypto_blkcipher {
416 struct crypto_tfm base;
417 };
418
419 struct crypto_cipher {
420 struct crypto_tfm base;
421 };
422
423 struct crypto_comp {
424 struct crypto_tfm base;
425 };
426
427 struct crypto_hash {
428 struct crypto_tfm base;
429 };
430
431 enum {
432 CRYPTOA_UNSPEC,
433 CRYPTOA_ALG,
434 CRYPTOA_TYPE,
435 CRYPTOA_U32,
436 __CRYPTOA_MAX,
437 };
438
439 #define CRYPTOA_MAX (__CRYPTOA_MAX - 1)
440
441 /* Maximum number of (rtattr) parameters for each template. */
442 #define CRYPTO_MAX_ATTRS 32
443
444 struct crypto_attr_alg {
445 char name[CRYPTO_MAX_ALG_NAME];
446 };
447
448 struct crypto_attr_type {
449 u32 type;
450 u32 mask;
451 };
452
453 struct crypto_attr_u32 {
454 u32 num;
455 };
456
457 /*
458 * Transform user interface.
459 */
460
461 struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags);
462 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
463 void crypto_free_tfm(struct crypto_tfm *tfm);
464
465 /*
466 * Transform helpers which query the underlying algorithm.
467 */
468 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
469 {
470 return tfm->__crt_alg->cra_name;
471 }
472
473 static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
474 {
475 return tfm->__crt_alg->cra_driver_name;
476 }
477
478 static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
479 {
480 return tfm->__crt_alg->cra_priority;
481 }
482
483 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm)
484 {
485 return module_name(tfm->__crt_alg->cra_module);
486 }
487
488 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
489 {
490 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
491 }
492
493 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
494 {
495 return tfm->__crt_alg->cra_blocksize;
496 }
497
498 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
499 {
500 return tfm->__crt_alg->cra_alignmask;
501 }
502
503 static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
504 {
505 return tfm->crt_flags;
506 }
507
508 static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
509 {
510 tfm->crt_flags |= flags;
511 }
512
513 static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
514 {
515 tfm->crt_flags &= ~flags;
516 }
517
518 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
519 {
520 return tfm->__crt_ctx;
521 }
522
523 static inline unsigned int crypto_tfm_ctx_alignment(void)
524 {
525 struct crypto_tfm *tfm;
526 return __alignof__(tfm->__crt_ctx);
527 }
528
529 /*
530 * API wrappers.
531 */
532 static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast(
533 struct crypto_tfm *tfm)
534 {
535 return (struct crypto_ablkcipher *)tfm;
536 }
537
538 static inline u32 crypto_skcipher_type(u32 type)
539 {
540 type &= ~CRYPTO_ALG_TYPE_MASK;
541 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
542 return type;
543 }
544
545 static inline u32 crypto_skcipher_mask(u32 mask)
546 {
547 mask &= ~CRYPTO_ALG_TYPE_MASK;
548 mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK;
549 return mask;
550 }
551
552 static inline struct crypto_ablkcipher *crypto_alloc_ablkcipher(
553 const char *alg_name, u32 type, u32 mask)
554 {
555 return __crypto_ablkcipher_cast(
556 crypto_alloc_base(alg_name, crypto_skcipher_type(type),
557 crypto_skcipher_mask(mask)));
558 }
559
560 static inline struct crypto_tfm *crypto_ablkcipher_tfm(
561 struct crypto_ablkcipher *tfm)
562 {
563 return &tfm->base;
564 }
565
566 static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm)
567 {
568 crypto_free_tfm(crypto_ablkcipher_tfm(tfm));
569 }
570
571 static inline int crypto_has_ablkcipher(const char *alg_name, u32 type,
572 u32 mask)
573 {
574 return crypto_has_alg(alg_name, crypto_skcipher_type(type),
575 crypto_skcipher_mask(mask));
576 }
577
578 static inline struct ablkcipher_tfm *crypto_ablkcipher_crt(
579 struct crypto_ablkcipher *tfm)
580 {
581 return &crypto_ablkcipher_tfm(tfm)->crt_ablkcipher;
582 }
583
584 static inline unsigned int crypto_ablkcipher_ivsize(
585 struct crypto_ablkcipher *tfm)
586 {
587 return crypto_ablkcipher_crt(tfm)->ivsize;
588 }
589
590 static inline unsigned int crypto_ablkcipher_blocksize(
591 struct crypto_ablkcipher *tfm)
592 {
593 return crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(tfm));
594 }
595
596 static inline unsigned int crypto_ablkcipher_alignmask(
597 struct crypto_ablkcipher *tfm)
598 {
599 return crypto_tfm_alg_alignmask(crypto_ablkcipher_tfm(tfm));
600 }
601
602 static inline u32 crypto_ablkcipher_get_flags(struct crypto_ablkcipher *tfm)
603 {
604 return crypto_tfm_get_flags(crypto_ablkcipher_tfm(tfm));
605 }
606
607 static inline void crypto_ablkcipher_set_flags(struct crypto_ablkcipher *tfm,
608 u32 flags)
609 {
610 crypto_tfm_set_flags(crypto_ablkcipher_tfm(tfm), flags);
611 }
612
613 static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm,
614 u32 flags)
615 {
616 crypto_tfm_clear_flags(crypto_ablkcipher_tfm(tfm), flags);
617 }
618
619 static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
620 const u8 *key, unsigned int keylen)
621 {
622 return crypto_ablkcipher_crt(tfm)->setkey(tfm, key, keylen);
623 }
624
625 static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm(
626 struct ablkcipher_request *req)
627 {
628 return __crypto_ablkcipher_cast(req->base.tfm);
629 }
630
631 static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req)
632 {
633 struct ablkcipher_tfm *crt =
634 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
635 return crt->encrypt(req);
636 }
637
638 static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req)
639 {
640 struct ablkcipher_tfm *crt =
641 crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
642 return crt->decrypt(req);
643 }
644
645 static inline unsigned int crypto_ablkcipher_reqsize(
646 struct crypto_ablkcipher *tfm)
647 {
648 return crypto_ablkcipher_crt(tfm)->reqsize;
649 }
650
651 static inline void ablkcipher_request_set_tfm(
652 struct ablkcipher_request *req, struct crypto_ablkcipher *tfm)
653 {
654 req->base.tfm = crypto_ablkcipher_tfm(tfm);
655 }
656
657 static inline struct ablkcipher_request *ablkcipher_request_cast(
658 struct crypto_async_request *req)
659 {
660 return container_of(req, struct ablkcipher_request, base);
661 }
662
663 static inline struct ablkcipher_request *ablkcipher_request_alloc(
664 struct crypto_ablkcipher *tfm, gfp_t gfp)
665 {
666 struct ablkcipher_request *req;
667
668 req = kmalloc(sizeof(struct ablkcipher_request) +
669 crypto_ablkcipher_reqsize(tfm), gfp);
670
671 if (likely(req))
672 ablkcipher_request_set_tfm(req, tfm);
673
674 return req;
675 }
676
677 static inline void ablkcipher_request_free(struct ablkcipher_request *req)
678 {
679 kfree(req);
680 }
681
682 static inline void ablkcipher_request_set_callback(
683 struct ablkcipher_request *req,
684 u32 flags, crypto_completion_t complete, void *data)
685 {
686 req->base.complete = complete;
687 req->base.data = data;
688 req->base.flags = flags;
689 }
690
691 static inline void ablkcipher_request_set_crypt(
692 struct ablkcipher_request *req,
693 struct scatterlist *src, struct scatterlist *dst,
694 unsigned int nbytes, void *iv)
695 {
696 req->src = src;
697 req->dst = dst;
698 req->nbytes = nbytes;
699 req->info = iv;
700 }
701
702 static inline struct crypto_aead *__crypto_aead_cast(struct crypto_tfm *tfm)
703 {
704 return (struct crypto_aead *)tfm;
705 }
706
707 static inline struct crypto_aead *crypto_alloc_aead(const char *alg_name,
708 u32 type, u32 mask)
709 {
710 type &= ~CRYPTO_ALG_TYPE_MASK;
711 type |= CRYPTO_ALG_TYPE_AEAD;
712 mask |= CRYPTO_ALG_TYPE_MASK;
713
714 return __crypto_aead_cast(crypto_alloc_base(alg_name, type, mask));
715 }
716
717 static inline struct crypto_tfm *crypto_aead_tfm(struct crypto_aead *tfm)
718 {
719 return &tfm->base;
720 }
721
722 static inline void crypto_free_aead(struct crypto_aead *tfm)
723 {
724 crypto_free_tfm(crypto_aead_tfm(tfm));
725 }
726
727 static inline struct aead_tfm *crypto_aead_crt(struct crypto_aead *tfm)
728 {
729 return &crypto_aead_tfm(tfm)->crt_aead;
730 }
731
732 static inline unsigned int crypto_aead_ivsize(struct crypto_aead *tfm)
733 {
734 return crypto_aead_crt(tfm)->ivsize;
735 }
736
737 static inline unsigned int crypto_aead_authsize(struct crypto_aead *tfm)
738 {
739 return crypto_aead_crt(tfm)->authsize;
740 }
741
742 static inline unsigned int crypto_aead_blocksize(struct crypto_aead *tfm)
743 {
744 return crypto_tfm_alg_blocksize(crypto_aead_tfm(tfm));
745 }
746
747 static inline unsigned int crypto_aead_alignmask(struct crypto_aead *tfm)
748 {
749 return crypto_tfm_alg_alignmask(crypto_aead_tfm(tfm));
750 }
751
752 static inline u32 crypto_aead_get_flags(struct crypto_aead *tfm)
753 {
754 return crypto_tfm_get_flags(crypto_aead_tfm(tfm));
755 }
756
757 static inline void crypto_aead_set_flags(struct crypto_aead *tfm, u32 flags)
758 {
759 crypto_tfm_set_flags(crypto_aead_tfm(tfm), flags);
760 }
761
762 static inline void crypto_aead_clear_flags(struct crypto_aead *tfm, u32 flags)
763 {
764 crypto_tfm_clear_flags(crypto_aead_tfm(tfm), flags);
765 }
766
767 static inline int crypto_aead_setkey(struct crypto_aead *tfm, const u8 *key,
768 unsigned int keylen)
769 {
770 return crypto_aead_crt(tfm)->setkey(tfm, key, keylen);
771 }
772
773 int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize);
774
775 static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req)
776 {
777 return __crypto_aead_cast(req->base.tfm);
778 }
779
780 static inline int crypto_aead_encrypt(struct aead_request *req)
781 {
782 return crypto_aead_crt(crypto_aead_reqtfm(req))->encrypt(req);
783 }
784
785 static inline int crypto_aead_decrypt(struct aead_request *req)
786 {
787 return crypto_aead_crt(crypto_aead_reqtfm(req))->decrypt(req);
788 }
789
790 static inline unsigned int crypto_aead_reqsize(struct crypto_aead *tfm)
791 {
792 return crypto_aead_crt(tfm)->reqsize;
793 }
794
795 static inline void aead_request_set_tfm(struct aead_request *req,
796 struct crypto_aead *tfm)
797 {
798 req->base.tfm = crypto_aead_tfm(tfm);
799 }
800
801 static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm,
802 gfp_t gfp)
803 {
804 struct aead_request *req;
805
806 req = kmalloc(sizeof(*req) + crypto_aead_reqsize(tfm), gfp);
807
808 if (likely(req))
809 aead_request_set_tfm(req, tfm);
810
811 return req;
812 }
813
814 static inline void aead_request_free(struct aead_request *req)
815 {
816 kfree(req);
817 }
818
819 static inline void aead_request_set_callback(struct aead_request *req,
820 u32 flags,
821 crypto_completion_t complete,
822 void *data)
823 {
824 req->base.complete = complete;
825 req->base.data = data;
826 req->base.flags = flags;
827 }
828
829 static inline void aead_request_set_crypt(struct aead_request *req,
830 struct scatterlist *src,
831 struct scatterlist *dst,
832 unsigned int cryptlen, u8 *iv)
833 {
834 req->src = src;
835 req->dst = dst;
836 req->cryptlen = cryptlen;
837 req->iv = iv;
838 }
839
840 static inline void aead_request_set_assoc(struct aead_request *req,
841 struct scatterlist *assoc,
842 unsigned int assoclen)
843 {
844 req->assoc = assoc;
845 req->assoclen = assoclen;
846 }
847
848 static inline struct crypto_blkcipher *__crypto_blkcipher_cast(
849 struct crypto_tfm *tfm)
850 {
851 return (struct crypto_blkcipher *)tfm;
852 }
853
854 static inline struct crypto_blkcipher *crypto_blkcipher_cast(
855 struct crypto_tfm *tfm)
856 {
857 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER);
858 return __crypto_blkcipher_cast(tfm);
859 }
860
861 static inline struct crypto_blkcipher *crypto_alloc_blkcipher(
862 const char *alg_name, u32 type, u32 mask)
863 {
864 type &= ~CRYPTO_ALG_TYPE_MASK;
865 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
866 mask |= CRYPTO_ALG_TYPE_MASK;
867
868 return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask));
869 }
870
871 static inline struct crypto_tfm *crypto_blkcipher_tfm(
872 struct crypto_blkcipher *tfm)
873 {
874 return &tfm->base;
875 }
876
877 static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm)
878 {
879 crypto_free_tfm(crypto_blkcipher_tfm(tfm));
880 }
881
882 static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask)
883 {
884 type &= ~CRYPTO_ALG_TYPE_MASK;
885 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
886 mask |= CRYPTO_ALG_TYPE_MASK;
887
888 return crypto_has_alg(alg_name, type, mask);
889 }
890
891 static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm)
892 {
893 return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm));
894 }
895
896 static inline struct blkcipher_tfm *crypto_blkcipher_crt(
897 struct crypto_blkcipher *tfm)
898 {
899 return &crypto_blkcipher_tfm(tfm)->crt_blkcipher;
900 }
901
902 static inline struct blkcipher_alg *crypto_blkcipher_alg(
903 struct crypto_blkcipher *tfm)
904 {
905 return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher;
906 }
907
908 static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm)
909 {
910 return crypto_blkcipher_alg(tfm)->ivsize;
911 }
912
913 static inline unsigned int crypto_blkcipher_blocksize(
914 struct crypto_blkcipher *tfm)
915 {
916 return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm));
917 }
918
919 static inline unsigned int crypto_blkcipher_alignmask(
920 struct crypto_blkcipher *tfm)
921 {
922 return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm));
923 }
924
925 static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm)
926 {
927 return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm));
928 }
929
930 static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm,
931 u32 flags)
932 {
933 crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags);
934 }
935
936 static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm,
937 u32 flags)
938 {
939 crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags);
940 }
941
942 static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm,
943 const u8 *key, unsigned int keylen)
944 {
945 return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm),
946 key, keylen);
947 }
948
949 static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc,
950 struct scatterlist *dst,
951 struct scatterlist *src,
952 unsigned int nbytes)
953 {
954 desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
955 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
956 }
957
958 static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc,
959 struct scatterlist *dst,
960 struct scatterlist *src,
961 unsigned int nbytes)
962 {
963 return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
964 }
965
966 static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc,
967 struct scatterlist *dst,
968 struct scatterlist *src,
969 unsigned int nbytes)
970 {
971 desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
972 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
973 }
974
975 static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc,
976 struct scatterlist *dst,
977 struct scatterlist *src,
978 unsigned int nbytes)
979 {
980 return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
981 }
982
983 static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm,
984 const u8 *src, unsigned int len)
985 {
986 memcpy(crypto_blkcipher_crt(tfm)->iv, src, len);
987 }
988
989 static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm,
990 u8 *dst, unsigned int len)
991 {
992 memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len);
993 }
994
995 static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm)
996 {
997 return (struct crypto_cipher *)tfm;
998 }
999
1000 static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm)
1001 {
1002 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
1003 return __crypto_cipher_cast(tfm);
1004 }
1005
1006 static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name,
1007 u32 type, u32 mask)
1008 {
1009 type &= ~CRYPTO_ALG_TYPE_MASK;
1010 type |= CRYPTO_ALG_TYPE_CIPHER;
1011 mask |= CRYPTO_ALG_TYPE_MASK;
1012
1013 return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask));
1014 }
1015
1016 static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm)
1017 {
1018 return &tfm->base;
1019 }
1020
1021 static inline void crypto_free_cipher(struct crypto_cipher *tfm)
1022 {
1023 crypto_free_tfm(crypto_cipher_tfm(tfm));
1024 }
1025
1026 static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask)
1027 {
1028 type &= ~CRYPTO_ALG_TYPE_MASK;
1029 type |= CRYPTO_ALG_TYPE_CIPHER;
1030 mask |= CRYPTO_ALG_TYPE_MASK;
1031
1032 return crypto_has_alg(alg_name, type, mask);
1033 }
1034
1035 static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm)
1036 {
1037 return &crypto_cipher_tfm(tfm)->crt_cipher;
1038 }
1039
1040 static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm)
1041 {
1042 return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm));
1043 }
1044
1045 static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm)
1046 {
1047 return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm));
1048 }
1049
1050 static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm)
1051 {
1052 return crypto_tfm_get_flags(crypto_cipher_tfm(tfm));
1053 }
1054
1055 static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm,
1056 u32 flags)
1057 {
1058 crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags);
1059 }
1060
1061 static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm,
1062 u32 flags)
1063 {
1064 crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags);
1065 }
1066
1067 static inline int crypto_cipher_setkey(struct crypto_cipher *tfm,
1068 const u8 *key, unsigned int keylen)
1069 {
1070 return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm),
1071 key, keylen);
1072 }
1073
1074 static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
1075 u8 *dst, const u8 *src)
1076 {
1077 crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm),
1078 dst, src);
1079 }
1080
1081 static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
1082 u8 *dst, const u8 *src)
1083 {
1084 crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm),
1085 dst, src);
1086 }
1087
1088 static inline struct crypto_hash *__crypto_hash_cast(struct crypto_tfm *tfm)
1089 {
1090 return (struct crypto_hash *)tfm;
1091 }
1092
1093 static inline struct crypto_hash *crypto_hash_cast(struct crypto_tfm *tfm)
1094 {
1095 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_HASH) &
1096 CRYPTO_ALG_TYPE_HASH_MASK);
1097 return __crypto_hash_cast(tfm);
1098 }
1099
1100 static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name,
1101 u32 type, u32 mask)
1102 {
1103 type &= ~CRYPTO_ALG_TYPE_MASK;
1104 mask &= ~CRYPTO_ALG_TYPE_MASK;
1105 type |= CRYPTO_ALG_TYPE_HASH;
1106 mask |= CRYPTO_ALG_TYPE_HASH_MASK;
1107
1108 return __crypto_hash_cast(crypto_alloc_base(alg_name, type, mask));
1109 }
1110
1111 static inline struct crypto_tfm *crypto_hash_tfm(struct crypto_hash *tfm)
1112 {
1113 return &tfm->base;
1114 }
1115
1116 static inline void crypto_free_hash(struct crypto_hash *tfm)
1117 {
1118 crypto_free_tfm(crypto_hash_tfm(tfm));
1119 }
1120
1121 static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask)
1122 {
1123 type &= ~CRYPTO_ALG_TYPE_MASK;
1124 mask &= ~CRYPTO_ALG_TYPE_MASK;
1125 type |= CRYPTO_ALG_TYPE_HASH;
1126 mask |= CRYPTO_ALG_TYPE_HASH_MASK;
1127
1128 return crypto_has_alg(alg_name, type, mask);
1129 }
1130
1131 static inline struct hash_tfm *crypto_hash_crt(struct crypto_hash *tfm)
1132 {
1133 return &crypto_hash_tfm(tfm)->crt_hash;
1134 }
1135
1136 static inline unsigned int crypto_hash_blocksize(struct crypto_hash *tfm)
1137 {
1138 return crypto_tfm_alg_blocksize(crypto_hash_tfm(tfm));
1139 }
1140
1141 static inline unsigned int crypto_hash_alignmask(struct crypto_hash *tfm)
1142 {
1143 return crypto_tfm_alg_alignmask(crypto_hash_tfm(tfm));
1144 }
1145
1146 static inline unsigned int crypto_hash_digestsize(struct crypto_hash *tfm)
1147 {
1148 return crypto_hash_crt(tfm)->digestsize;
1149 }
1150
1151 static inline u32 crypto_hash_get_flags(struct crypto_hash *tfm)
1152 {
1153 return crypto_tfm_get_flags(crypto_hash_tfm(tfm));
1154 }
1155
1156 static inline void crypto_hash_set_flags(struct crypto_hash *tfm, u32 flags)
1157 {
1158 crypto_tfm_set_flags(crypto_hash_tfm(tfm), flags);
1159 }
1160
1161 static inline void crypto_hash_clear_flags(struct crypto_hash *tfm, u32 flags)
1162 {
1163 crypto_tfm_clear_flags(crypto_hash_tfm(tfm), flags);
1164 }
1165
1166 static inline int crypto_hash_init(struct hash_desc *desc)
1167 {
1168 return crypto_hash_crt(desc->tfm)->init(desc);
1169 }
1170
1171 static inline int crypto_hash_update(struct hash_desc *desc,
1172 struct scatterlist *sg,
1173 unsigned int nbytes)
1174 {
1175 return crypto_hash_crt(desc->tfm)->update(desc, sg, nbytes);
1176 }
1177
1178 static inline int crypto_hash_final(struct hash_desc *desc, u8 *out)
1179 {
1180 return crypto_hash_crt(desc->tfm)->final(desc, out);
1181 }
1182
1183 static inline int crypto_hash_digest(struct hash_desc *desc,
1184 struct scatterlist *sg,
1185 unsigned int nbytes, u8 *out)
1186 {
1187 return crypto_hash_crt(desc->tfm)->digest(desc, sg, nbytes, out);
1188 }
1189
1190 static inline int crypto_hash_setkey(struct crypto_hash *hash,
1191 const u8 *key, unsigned int keylen)
1192 {
1193 return crypto_hash_crt(hash)->setkey(hash, key, keylen);
1194 }
1195
1196 static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
1197 {
1198 return (struct crypto_comp *)tfm;
1199 }
1200
1201 static inline struct crypto_comp *crypto_comp_cast(struct crypto_tfm *tfm)
1202 {
1203 BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_COMPRESS) &
1204 CRYPTO_ALG_TYPE_MASK);
1205 return __crypto_comp_cast(tfm);
1206 }
1207
1208 static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
1209 u32 type, u32 mask)
1210 {
1211 type &= ~CRYPTO_ALG_TYPE_MASK;
1212 type |= CRYPTO_ALG_TYPE_COMPRESS;
1213 mask |= CRYPTO_ALG_TYPE_MASK;
1214
1215 return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
1216 }
1217
1218 static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
1219 {
1220 return &tfm->base;
1221 }
1222
1223 static inline void crypto_free_comp(struct crypto_comp *tfm)
1224 {
1225 crypto_free_tfm(crypto_comp_tfm(tfm));
1226 }
1227
1228 static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
1229 {
1230 type &= ~CRYPTO_ALG_TYPE_MASK;
1231 type |= CRYPTO_ALG_TYPE_COMPRESS;
1232 mask |= CRYPTO_ALG_TYPE_MASK;
1233
1234 return crypto_has_alg(alg_name, type, mask);
1235 }
1236
1237 static inline const char *crypto_comp_name(struct crypto_comp *tfm)
1238 {
1239 return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
1240 }
1241
1242 static inline struct compress_tfm *crypto_comp_crt(struct crypto_comp *tfm)
1243 {
1244 return &crypto_comp_tfm(tfm)->crt_compress;
1245 }
1246
1247 static inline int crypto_comp_compress(struct crypto_comp *tfm,
1248 const u8 *src, unsigned int slen,
1249 u8 *dst, unsigned int *dlen)
1250 {
1251 return crypto_comp_crt(tfm)->cot_compress(crypto_comp_tfm(tfm),
1252 src, slen, dst, dlen);
1253 }
1254
1255 static inline int crypto_comp_decompress(struct crypto_comp *tfm,
1256 const u8 *src, unsigned int slen,
1257 u8 *dst, unsigned int *dlen)
1258 {
1259 return crypto_comp_crt(tfm)->cot_decompress(crypto_comp_tfm(tfm),
1260 src, slen, dst, dlen);
1261 }
1262
1263 #endif /* _LINUX_CRYPTO_H */
1264
This page took 0.05643 seconds and 6 git commands to generate.