2 * Software async crypto daemon.
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * Added AEAD support to cryptd.
7 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
8 * Adrian Hoban <adrian.hoban@intel.com>
9 * Gabriele Paoloni <gabriele.paoloni@intel.com>
10 * Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Copyright (c) 2010, Intel Corporation.
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #include <crypto/algapi.h>
21 #include <crypto/internal/hash.h>
22 #include <crypto/internal/aead.h>
23 #include <crypto/cryptd.h>
24 #include <crypto/crypto_wq.h>
25 #include <linux/atomic.h>
26 #include <linux/err.h>
27 #include <linux/init.h>
28 #include <linux/kernel.h>
29 #include <linux/list.h>
30 #include <linux/module.h>
31 #include <linux/scatterlist.h>
32 #include <linux/sched.h>
33 #include <linux/slab.h>
35 #define CRYPTD_MAX_CPU_QLEN 1000
37 struct cryptd_cpu_queue
{
38 struct crypto_queue queue
;
39 struct work_struct work
;
43 struct cryptd_cpu_queue __percpu
*cpu_queue
;
46 struct cryptd_instance_ctx
{
47 struct crypto_spawn spawn
;
48 struct cryptd_queue
*queue
;
51 struct hashd_instance_ctx
{
52 struct crypto_shash_spawn spawn
;
53 struct cryptd_queue
*queue
;
56 struct aead_instance_ctx
{
57 struct crypto_aead_spawn aead_spawn
;
58 struct cryptd_queue
*queue
;
61 struct cryptd_blkcipher_ctx
{
63 struct crypto_blkcipher
*child
;
66 struct cryptd_blkcipher_request_ctx
{
67 crypto_completion_t complete
;
70 struct cryptd_hash_ctx
{
72 struct crypto_shash
*child
;
75 struct cryptd_hash_request_ctx
{
76 crypto_completion_t complete
;
77 struct shash_desc desc
;
80 struct cryptd_aead_ctx
{
82 struct crypto_aead
*child
;
85 struct cryptd_aead_request_ctx
{
86 crypto_completion_t complete
;
89 static void cryptd_queue_worker(struct work_struct
*work
);
91 static int cryptd_init_queue(struct cryptd_queue
*queue
,
92 unsigned int max_cpu_qlen
)
95 struct cryptd_cpu_queue
*cpu_queue
;
97 queue
->cpu_queue
= alloc_percpu(struct cryptd_cpu_queue
);
98 if (!queue
->cpu_queue
)
100 for_each_possible_cpu(cpu
) {
101 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
102 crypto_init_queue(&cpu_queue
->queue
, max_cpu_qlen
);
103 INIT_WORK(&cpu_queue
->work
, cryptd_queue_worker
);
108 static void cryptd_fini_queue(struct cryptd_queue
*queue
)
111 struct cryptd_cpu_queue
*cpu_queue
;
113 for_each_possible_cpu(cpu
) {
114 cpu_queue
= per_cpu_ptr(queue
->cpu_queue
, cpu
);
115 BUG_ON(cpu_queue
->queue
.qlen
);
117 free_percpu(queue
->cpu_queue
);
120 static int cryptd_enqueue_request(struct cryptd_queue
*queue
,
121 struct crypto_async_request
*request
)
124 struct cryptd_cpu_queue
*cpu_queue
;
125 struct crypto_tfm
*tfm
;
130 cpu_queue
= this_cpu_ptr(queue
->cpu_queue
);
131 err
= crypto_enqueue_request(&cpu_queue
->queue
, request
);
133 refcnt
= crypto_tfm_ctx(request
->tfm
);
134 may_backlog
= request
->flags
& CRYPTO_TFM_REQ_MAY_BACKLOG
;
136 if (err
== -EBUSY
&& !may_backlog
)
139 queue_work_on(cpu
, kcrypto_wq
, &cpu_queue
->work
);
141 if (!atomic_read(refcnt
))
153 /* Called in workqueue context, do one real cryption work (via
154 * req->complete) and reschedule itself if there are more work to
156 static void cryptd_queue_worker(struct work_struct
*work
)
158 struct cryptd_cpu_queue
*cpu_queue
;
159 struct crypto_async_request
*req
, *backlog
;
161 cpu_queue
= container_of(work
, struct cryptd_cpu_queue
, work
);
163 * Only handle one request at a time to avoid hogging crypto workqueue.
164 * preempt_disable/enable is used to prevent being preempted by
165 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
166 * cryptd_enqueue_request() being accessed from software interrupts.
170 backlog
= crypto_get_backlog(&cpu_queue
->queue
);
171 req
= crypto_dequeue_request(&cpu_queue
->queue
);
179 backlog
->complete(backlog
, -EINPROGRESS
);
180 req
->complete(req
, 0);
182 if (cpu_queue
->queue
.qlen
)
183 queue_work(kcrypto_wq
, &cpu_queue
->work
);
186 static inline struct cryptd_queue
*cryptd_get_queue(struct crypto_tfm
*tfm
)
188 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
189 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
193 static inline void cryptd_check_internal(struct rtattr
**tb
, u32
*type
,
196 struct crypto_attr_type
*algt
;
198 algt
= crypto_get_attr_type(tb
);
202 *type
|= algt
->type
& CRYPTO_ALG_INTERNAL
;
203 *mask
|= algt
->mask
& CRYPTO_ALG_INTERNAL
;
206 static int cryptd_blkcipher_setkey(struct crypto_ablkcipher
*parent
,
207 const u8
*key
, unsigned int keylen
)
209 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(parent
);
210 struct crypto_blkcipher
*child
= ctx
->child
;
213 crypto_blkcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
214 crypto_blkcipher_set_flags(child
, crypto_ablkcipher_get_flags(parent
) &
215 CRYPTO_TFM_REQ_MASK
);
216 err
= crypto_blkcipher_setkey(child
, key
, keylen
);
217 crypto_ablkcipher_set_flags(parent
, crypto_blkcipher_get_flags(child
) &
218 CRYPTO_TFM_RES_MASK
);
222 static void cryptd_blkcipher_crypt(struct ablkcipher_request
*req
,
223 struct crypto_blkcipher
*child
,
225 int (*crypt
)(struct blkcipher_desc
*desc
,
226 struct scatterlist
*dst
,
227 struct scatterlist
*src
,
230 struct cryptd_blkcipher_request_ctx
*rctx
;
231 struct cryptd_blkcipher_ctx
*ctx
;
232 struct crypto_ablkcipher
*tfm
;
233 struct blkcipher_desc desc
;
236 rctx
= ablkcipher_request_ctx(req
);
238 if (unlikely(err
== -EINPROGRESS
))
242 desc
.info
= req
->info
;
243 desc
.flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
245 err
= crypt(&desc
, req
->dst
, req
->src
, req
->nbytes
);
247 req
->base
.complete
= rctx
->complete
;
250 tfm
= crypto_ablkcipher_reqtfm(req
);
251 ctx
= crypto_ablkcipher_ctx(tfm
);
252 refcnt
= atomic_read(&ctx
->refcnt
);
255 rctx
->complete(&req
->base
, err
);
258 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
259 crypto_free_ablkcipher(tfm
);
262 static void cryptd_blkcipher_encrypt(struct crypto_async_request
*req
, int err
)
264 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(req
->tfm
);
265 struct crypto_blkcipher
*child
= ctx
->child
;
267 cryptd_blkcipher_crypt(ablkcipher_request_cast(req
), child
, err
,
268 crypto_blkcipher_crt(child
)->encrypt
);
271 static void cryptd_blkcipher_decrypt(struct crypto_async_request
*req
, int err
)
273 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(req
->tfm
);
274 struct crypto_blkcipher
*child
= ctx
->child
;
276 cryptd_blkcipher_crypt(ablkcipher_request_cast(req
), child
, err
,
277 crypto_blkcipher_crt(child
)->decrypt
);
280 static int cryptd_blkcipher_enqueue(struct ablkcipher_request
*req
,
281 crypto_completion_t
compl)
283 struct cryptd_blkcipher_request_ctx
*rctx
= ablkcipher_request_ctx(req
);
284 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
285 struct cryptd_queue
*queue
;
287 queue
= cryptd_get_queue(crypto_ablkcipher_tfm(tfm
));
288 rctx
->complete
= req
->base
.complete
;
289 req
->base
.complete
= compl;
291 return cryptd_enqueue_request(queue
, &req
->base
);
294 static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request
*req
)
296 return cryptd_blkcipher_enqueue(req
, cryptd_blkcipher_encrypt
);
299 static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request
*req
)
301 return cryptd_blkcipher_enqueue(req
, cryptd_blkcipher_decrypt
);
304 static int cryptd_blkcipher_init_tfm(struct crypto_tfm
*tfm
)
306 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
307 struct cryptd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
308 struct crypto_spawn
*spawn
= &ictx
->spawn
;
309 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
310 struct crypto_blkcipher
*cipher
;
312 cipher
= crypto_spawn_blkcipher(spawn
);
314 return PTR_ERR(cipher
);
317 tfm
->crt_ablkcipher
.reqsize
=
318 sizeof(struct cryptd_blkcipher_request_ctx
);
322 static void cryptd_blkcipher_exit_tfm(struct crypto_tfm
*tfm
)
324 struct cryptd_blkcipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
326 crypto_free_blkcipher(ctx
->child
);
329 static int cryptd_init_instance(struct crypto_instance
*inst
,
330 struct crypto_alg
*alg
)
332 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
334 alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
335 return -ENAMETOOLONG
;
337 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
339 inst
->alg
.cra_priority
= alg
->cra_priority
+ 50;
340 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
341 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
346 static void *cryptd_alloc_instance(struct crypto_alg
*alg
, unsigned int head
,
350 struct crypto_instance
*inst
;
353 p
= kzalloc(head
+ sizeof(*inst
) + tail
, GFP_KERNEL
);
355 return ERR_PTR(-ENOMEM
);
357 inst
= (void *)(p
+ head
);
359 err
= cryptd_init_instance(inst
, alg
);
372 static int cryptd_create_blkcipher(struct crypto_template
*tmpl
,
374 struct cryptd_queue
*queue
)
376 struct cryptd_instance_ctx
*ctx
;
377 struct crypto_instance
*inst
;
378 struct crypto_alg
*alg
;
379 u32 type
= CRYPTO_ALG_TYPE_BLKCIPHER
;
380 u32 mask
= CRYPTO_ALG_TYPE_MASK
;
383 cryptd_check_internal(tb
, &type
, &mask
);
385 alg
= crypto_get_attr_alg(tb
, type
, mask
);
389 inst
= cryptd_alloc_instance(alg
, 0, sizeof(*ctx
));
394 ctx
= crypto_instance_ctx(inst
);
397 err
= crypto_init_spawn(&ctx
->spawn
, alg
, inst
,
398 CRYPTO_ALG_TYPE_MASK
| CRYPTO_ALG_ASYNC
);
402 type
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
;
403 if (alg
->cra_flags
& CRYPTO_ALG_INTERNAL
)
404 type
|= CRYPTO_ALG_INTERNAL
;
405 inst
->alg
.cra_flags
= type
;
406 inst
->alg
.cra_type
= &crypto_ablkcipher_type
;
408 inst
->alg
.cra_ablkcipher
.ivsize
= alg
->cra_blkcipher
.ivsize
;
409 inst
->alg
.cra_ablkcipher
.min_keysize
= alg
->cra_blkcipher
.min_keysize
;
410 inst
->alg
.cra_ablkcipher
.max_keysize
= alg
->cra_blkcipher
.max_keysize
;
412 inst
->alg
.cra_ablkcipher
.geniv
= alg
->cra_blkcipher
.geniv
;
414 inst
->alg
.cra_ctxsize
= sizeof(struct cryptd_blkcipher_ctx
);
416 inst
->alg
.cra_init
= cryptd_blkcipher_init_tfm
;
417 inst
->alg
.cra_exit
= cryptd_blkcipher_exit_tfm
;
419 inst
->alg
.cra_ablkcipher
.setkey
= cryptd_blkcipher_setkey
;
420 inst
->alg
.cra_ablkcipher
.encrypt
= cryptd_blkcipher_encrypt_enqueue
;
421 inst
->alg
.cra_ablkcipher
.decrypt
= cryptd_blkcipher_decrypt_enqueue
;
423 err
= crypto_register_instance(tmpl
, inst
);
425 crypto_drop_spawn(&ctx
->spawn
);
435 static int cryptd_hash_init_tfm(struct crypto_tfm
*tfm
)
437 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
438 struct hashd_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
439 struct crypto_shash_spawn
*spawn
= &ictx
->spawn
;
440 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
441 struct crypto_shash
*hash
;
443 hash
= crypto_spawn_shash(spawn
);
445 return PTR_ERR(hash
);
448 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
449 sizeof(struct cryptd_hash_request_ctx
) +
450 crypto_shash_descsize(hash
));
454 static void cryptd_hash_exit_tfm(struct crypto_tfm
*tfm
)
456 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
458 crypto_free_shash(ctx
->child
);
461 static int cryptd_hash_setkey(struct crypto_ahash
*parent
,
462 const u8
*key
, unsigned int keylen
)
464 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(parent
);
465 struct crypto_shash
*child
= ctx
->child
;
468 crypto_shash_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
469 crypto_shash_set_flags(child
, crypto_ahash_get_flags(parent
) &
470 CRYPTO_TFM_REQ_MASK
);
471 err
= crypto_shash_setkey(child
, key
, keylen
);
472 crypto_ahash_set_flags(parent
, crypto_shash_get_flags(child
) &
473 CRYPTO_TFM_RES_MASK
);
477 static int cryptd_hash_enqueue(struct ahash_request
*req
,
478 crypto_completion_t
compl)
480 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
481 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
482 struct cryptd_queue
*queue
=
483 cryptd_get_queue(crypto_ahash_tfm(tfm
));
485 rctx
->complete
= req
->base
.complete
;
486 req
->base
.complete
= compl;
488 return cryptd_enqueue_request(queue
, &req
->base
);
491 static void cryptd_hash_complete(struct ahash_request
*req
, int err
)
493 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
494 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
495 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
496 int refcnt
= atomic_read(&ctx
->refcnt
);
499 rctx
->complete(&req
->base
, err
);
502 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
503 crypto_free_ahash(tfm
);
506 static void cryptd_hash_init(struct crypto_async_request
*req_async
, int err
)
508 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
509 struct crypto_shash
*child
= ctx
->child
;
510 struct ahash_request
*req
= ahash_request_cast(req_async
);
511 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
512 struct shash_desc
*desc
= &rctx
->desc
;
514 if (unlikely(err
== -EINPROGRESS
))
518 desc
->flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
520 err
= crypto_shash_init(desc
);
522 req
->base
.complete
= rctx
->complete
;
525 cryptd_hash_complete(req
, err
);
528 static int cryptd_hash_init_enqueue(struct ahash_request
*req
)
530 return cryptd_hash_enqueue(req
, cryptd_hash_init
);
533 static void cryptd_hash_update(struct crypto_async_request
*req_async
, int err
)
535 struct ahash_request
*req
= ahash_request_cast(req_async
);
536 struct cryptd_hash_request_ctx
*rctx
;
538 rctx
= ahash_request_ctx(req
);
540 if (unlikely(err
== -EINPROGRESS
))
543 err
= shash_ahash_update(req
, &rctx
->desc
);
545 req
->base
.complete
= rctx
->complete
;
548 cryptd_hash_complete(req
, err
);
551 static int cryptd_hash_update_enqueue(struct ahash_request
*req
)
553 return cryptd_hash_enqueue(req
, cryptd_hash_update
);
556 static void cryptd_hash_final(struct crypto_async_request
*req_async
, int err
)
558 struct ahash_request
*req
= ahash_request_cast(req_async
);
559 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
561 if (unlikely(err
== -EINPROGRESS
))
564 err
= crypto_shash_final(&rctx
->desc
, req
->result
);
566 req
->base
.complete
= rctx
->complete
;
569 cryptd_hash_complete(req
, err
);
572 static int cryptd_hash_final_enqueue(struct ahash_request
*req
)
574 return cryptd_hash_enqueue(req
, cryptd_hash_final
);
577 static void cryptd_hash_finup(struct crypto_async_request
*req_async
, int err
)
579 struct ahash_request
*req
= ahash_request_cast(req_async
);
580 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
582 if (unlikely(err
== -EINPROGRESS
))
585 err
= shash_ahash_finup(req
, &rctx
->desc
);
587 req
->base
.complete
= rctx
->complete
;
590 cryptd_hash_complete(req
, err
);
593 static int cryptd_hash_finup_enqueue(struct ahash_request
*req
)
595 return cryptd_hash_enqueue(req
, cryptd_hash_finup
);
598 static void cryptd_hash_digest(struct crypto_async_request
*req_async
, int err
)
600 struct cryptd_hash_ctx
*ctx
= crypto_tfm_ctx(req_async
->tfm
);
601 struct crypto_shash
*child
= ctx
->child
;
602 struct ahash_request
*req
= ahash_request_cast(req_async
);
603 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
604 struct shash_desc
*desc
= &rctx
->desc
;
606 if (unlikely(err
== -EINPROGRESS
))
610 desc
->flags
= CRYPTO_TFM_REQ_MAY_SLEEP
;
612 err
= shash_ahash_digest(req
, desc
);
614 req
->base
.complete
= rctx
->complete
;
617 cryptd_hash_complete(req
, err
);
620 static int cryptd_hash_digest_enqueue(struct ahash_request
*req
)
622 return cryptd_hash_enqueue(req
, cryptd_hash_digest
);
625 static int cryptd_hash_export(struct ahash_request
*req
, void *out
)
627 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
629 return crypto_shash_export(&rctx
->desc
, out
);
632 static int cryptd_hash_import(struct ahash_request
*req
, const void *in
)
634 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
635 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
636 struct shash_desc
*desc
= cryptd_shash_desc(req
);
638 desc
->tfm
= ctx
->child
;
639 desc
->flags
= req
->base
.flags
;
641 return crypto_shash_import(desc
, in
);
644 static int cryptd_create_hash(struct crypto_template
*tmpl
, struct rtattr
**tb
,
645 struct cryptd_queue
*queue
)
647 struct hashd_instance_ctx
*ctx
;
648 struct ahash_instance
*inst
;
649 struct shash_alg
*salg
;
650 struct crypto_alg
*alg
;
655 cryptd_check_internal(tb
, &type
, &mask
);
657 salg
= shash_attr_alg(tb
[1], type
, mask
);
659 return PTR_ERR(salg
);
662 inst
= cryptd_alloc_instance(alg
, ahash_instance_headroom(),
668 ctx
= ahash_instance_ctx(inst
);
671 err
= crypto_init_shash_spawn(&ctx
->spawn
, salg
,
672 ahash_crypto_instance(inst
));
676 type
= CRYPTO_ALG_ASYNC
;
677 if (alg
->cra_flags
& CRYPTO_ALG_INTERNAL
)
678 type
|= CRYPTO_ALG_INTERNAL
;
679 inst
->alg
.halg
.base
.cra_flags
= type
;
681 inst
->alg
.halg
.digestsize
= salg
->digestsize
;
682 inst
->alg
.halg
.statesize
= salg
->statesize
;
683 inst
->alg
.halg
.base
.cra_ctxsize
= sizeof(struct cryptd_hash_ctx
);
685 inst
->alg
.halg
.base
.cra_init
= cryptd_hash_init_tfm
;
686 inst
->alg
.halg
.base
.cra_exit
= cryptd_hash_exit_tfm
;
688 inst
->alg
.init
= cryptd_hash_init_enqueue
;
689 inst
->alg
.update
= cryptd_hash_update_enqueue
;
690 inst
->alg
.final
= cryptd_hash_final_enqueue
;
691 inst
->alg
.finup
= cryptd_hash_finup_enqueue
;
692 inst
->alg
.export
= cryptd_hash_export
;
693 inst
->alg
.import
= cryptd_hash_import
;
694 inst
->alg
.setkey
= cryptd_hash_setkey
;
695 inst
->alg
.digest
= cryptd_hash_digest_enqueue
;
697 err
= ahash_register_instance(tmpl
, inst
);
699 crypto_drop_shash(&ctx
->spawn
);
709 static int cryptd_aead_setkey(struct crypto_aead
*parent
,
710 const u8
*key
, unsigned int keylen
)
712 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
713 struct crypto_aead
*child
= ctx
->child
;
715 return crypto_aead_setkey(child
, key
, keylen
);
718 static int cryptd_aead_setauthsize(struct crypto_aead
*parent
,
719 unsigned int authsize
)
721 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
722 struct crypto_aead
*child
= ctx
->child
;
724 return crypto_aead_setauthsize(child
, authsize
);
727 static void cryptd_aead_crypt(struct aead_request
*req
,
728 struct crypto_aead
*child
,
730 int (*crypt
)(struct aead_request
*req
))
732 struct cryptd_aead_request_ctx
*rctx
;
733 struct cryptd_aead_ctx
*ctx
;
734 crypto_completion_t
compl;
735 struct crypto_aead
*tfm
;
738 rctx
= aead_request_ctx(req
);
739 compl = rctx
->complete
;
741 tfm
= crypto_aead_reqtfm(req
);
743 if (unlikely(err
== -EINPROGRESS
))
745 aead_request_set_tfm(req
, child
);
749 ctx
= crypto_aead_ctx(tfm
);
750 refcnt
= atomic_read(&ctx
->refcnt
);
753 compl(&req
->base
, err
);
756 if (err
!= -EINPROGRESS
&& refcnt
&& atomic_dec_and_test(&ctx
->refcnt
))
757 crypto_free_aead(tfm
);
760 static void cryptd_aead_encrypt(struct crypto_async_request
*areq
, int err
)
762 struct cryptd_aead_ctx
*ctx
= crypto_tfm_ctx(areq
->tfm
);
763 struct crypto_aead
*child
= ctx
->child
;
764 struct aead_request
*req
;
766 req
= container_of(areq
, struct aead_request
, base
);
767 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->encrypt
);
770 static void cryptd_aead_decrypt(struct crypto_async_request
*areq
, int err
)
772 struct cryptd_aead_ctx
*ctx
= crypto_tfm_ctx(areq
->tfm
);
773 struct crypto_aead
*child
= ctx
->child
;
774 struct aead_request
*req
;
776 req
= container_of(areq
, struct aead_request
, base
);
777 cryptd_aead_crypt(req
, child
, err
, crypto_aead_alg(child
)->decrypt
);
780 static int cryptd_aead_enqueue(struct aead_request
*req
,
781 crypto_completion_t
compl)
783 struct cryptd_aead_request_ctx
*rctx
= aead_request_ctx(req
);
784 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
785 struct cryptd_queue
*queue
= cryptd_get_queue(crypto_aead_tfm(tfm
));
787 rctx
->complete
= req
->base
.complete
;
788 req
->base
.complete
= compl;
789 return cryptd_enqueue_request(queue
, &req
->base
);
792 static int cryptd_aead_encrypt_enqueue(struct aead_request
*req
)
794 return cryptd_aead_enqueue(req
, cryptd_aead_encrypt
);
797 static int cryptd_aead_decrypt_enqueue(struct aead_request
*req
)
799 return cryptd_aead_enqueue(req
, cryptd_aead_decrypt
);
802 static int cryptd_aead_init_tfm(struct crypto_aead
*tfm
)
804 struct aead_instance
*inst
= aead_alg_instance(tfm
);
805 struct aead_instance_ctx
*ictx
= aead_instance_ctx(inst
);
806 struct crypto_aead_spawn
*spawn
= &ictx
->aead_spawn
;
807 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
808 struct crypto_aead
*cipher
;
810 cipher
= crypto_spawn_aead(spawn
);
812 return PTR_ERR(cipher
);
815 crypto_aead_set_reqsize(
816 tfm
, max((unsigned)sizeof(struct cryptd_aead_request_ctx
),
817 crypto_aead_reqsize(cipher
)));
821 static void cryptd_aead_exit_tfm(struct crypto_aead
*tfm
)
823 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
824 crypto_free_aead(ctx
->child
);
827 static int cryptd_create_aead(struct crypto_template
*tmpl
,
829 struct cryptd_queue
*queue
)
831 struct aead_instance_ctx
*ctx
;
832 struct aead_instance
*inst
;
833 struct aead_alg
*alg
;
836 u32 mask
= CRYPTO_ALG_ASYNC
;
839 cryptd_check_internal(tb
, &type
, &mask
);
841 name
= crypto_attr_alg_name(tb
[1]);
843 return PTR_ERR(name
);
845 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
849 ctx
= aead_instance_ctx(inst
);
852 crypto_set_aead_spawn(&ctx
->aead_spawn
, aead_crypto_instance(inst
));
853 err
= crypto_grab_aead(&ctx
->aead_spawn
, name
, type
, mask
);
857 alg
= crypto_spawn_aead_alg(&ctx
->aead_spawn
);
858 err
= cryptd_init_instance(aead_crypto_instance(inst
), &alg
->base
);
862 inst
->alg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
|
863 (alg
->base
.cra_flags
& CRYPTO_ALG_INTERNAL
);
864 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cryptd_aead_ctx
);
866 inst
->alg
.ivsize
= crypto_aead_alg_ivsize(alg
);
867 inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
869 inst
->alg
.init
= cryptd_aead_init_tfm
;
870 inst
->alg
.exit
= cryptd_aead_exit_tfm
;
871 inst
->alg
.setkey
= cryptd_aead_setkey
;
872 inst
->alg
.setauthsize
= cryptd_aead_setauthsize
;
873 inst
->alg
.encrypt
= cryptd_aead_encrypt_enqueue
;
874 inst
->alg
.decrypt
= cryptd_aead_decrypt_enqueue
;
876 err
= aead_register_instance(tmpl
, inst
);
879 crypto_drop_aead(&ctx
->aead_spawn
);
886 static struct cryptd_queue queue
;
888 static int cryptd_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
890 struct crypto_attr_type
*algt
;
892 algt
= crypto_get_attr_type(tb
);
894 return PTR_ERR(algt
);
896 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
897 case CRYPTO_ALG_TYPE_BLKCIPHER
:
898 return cryptd_create_blkcipher(tmpl
, tb
, &queue
);
899 case CRYPTO_ALG_TYPE_DIGEST
:
900 return cryptd_create_hash(tmpl
, tb
, &queue
);
901 case CRYPTO_ALG_TYPE_AEAD
:
902 return cryptd_create_aead(tmpl
, tb
, &queue
);
908 static void cryptd_free(struct crypto_instance
*inst
)
910 struct cryptd_instance_ctx
*ctx
= crypto_instance_ctx(inst
);
911 struct hashd_instance_ctx
*hctx
= crypto_instance_ctx(inst
);
912 struct aead_instance_ctx
*aead_ctx
= crypto_instance_ctx(inst
);
914 switch (inst
->alg
.cra_flags
& CRYPTO_ALG_TYPE_MASK
) {
915 case CRYPTO_ALG_TYPE_AHASH
:
916 crypto_drop_shash(&hctx
->spawn
);
917 kfree(ahash_instance(inst
));
919 case CRYPTO_ALG_TYPE_AEAD
:
920 crypto_drop_aead(&aead_ctx
->aead_spawn
);
921 kfree(aead_instance(inst
));
924 crypto_drop_spawn(&ctx
->spawn
);
929 static struct crypto_template cryptd_tmpl
= {
931 .create
= cryptd_create
,
933 .module
= THIS_MODULE
,
936 struct cryptd_ablkcipher
*cryptd_alloc_ablkcipher(const char *alg_name
,
939 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
940 struct cryptd_blkcipher_ctx
*ctx
;
941 struct crypto_tfm
*tfm
;
943 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
944 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
945 return ERR_PTR(-EINVAL
);
946 type
= crypto_skcipher_type(type
);
947 mask
&= ~CRYPTO_ALG_TYPE_MASK
;
948 mask
|= (CRYPTO_ALG_GENIV
| CRYPTO_ALG_TYPE_BLKCIPHER_MASK
);
949 tfm
= crypto_alloc_base(cryptd_alg_name
, type
, mask
);
951 return ERR_CAST(tfm
);
952 if (tfm
->__crt_alg
->cra_module
!= THIS_MODULE
) {
953 crypto_free_tfm(tfm
);
954 return ERR_PTR(-EINVAL
);
957 ctx
= crypto_tfm_ctx(tfm
);
958 atomic_set(&ctx
->refcnt
, 1);
960 return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm
));
962 EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher
);
964 struct crypto_blkcipher
*cryptd_ablkcipher_child(struct cryptd_ablkcipher
*tfm
)
966 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(&tfm
->base
);
969 EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child
);
971 bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher
*tfm
)
973 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(&tfm
->base
);
975 return atomic_read(&ctx
->refcnt
) - 1;
977 EXPORT_SYMBOL_GPL(cryptd_ablkcipher_queued
);
979 void cryptd_free_ablkcipher(struct cryptd_ablkcipher
*tfm
)
981 struct cryptd_blkcipher_ctx
*ctx
= crypto_ablkcipher_ctx(&tfm
->base
);
983 if (atomic_dec_and_test(&ctx
->refcnt
))
984 crypto_free_ablkcipher(&tfm
->base
);
986 EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher
);
988 struct cryptd_ahash
*cryptd_alloc_ahash(const char *alg_name
,
991 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
992 struct cryptd_hash_ctx
*ctx
;
993 struct crypto_ahash
*tfm
;
995 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
996 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
997 return ERR_PTR(-EINVAL
);
998 tfm
= crypto_alloc_ahash(cryptd_alg_name
, type
, mask
);
1000 return ERR_CAST(tfm
);
1001 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
1002 crypto_free_ahash(tfm
);
1003 return ERR_PTR(-EINVAL
);
1006 ctx
= crypto_ahash_ctx(tfm
);
1007 atomic_set(&ctx
->refcnt
, 1);
1009 return __cryptd_ahash_cast(tfm
);
1011 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash
);
1013 struct crypto_shash
*cryptd_ahash_child(struct cryptd_ahash
*tfm
)
1015 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1019 EXPORT_SYMBOL_GPL(cryptd_ahash_child
);
1021 struct shash_desc
*cryptd_shash_desc(struct ahash_request
*req
)
1023 struct cryptd_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
1026 EXPORT_SYMBOL_GPL(cryptd_shash_desc
);
1028 bool cryptd_ahash_queued(struct cryptd_ahash
*tfm
)
1030 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1032 return atomic_read(&ctx
->refcnt
) - 1;
1034 EXPORT_SYMBOL_GPL(cryptd_ahash_queued
);
1036 void cryptd_free_ahash(struct cryptd_ahash
*tfm
)
1038 struct cryptd_hash_ctx
*ctx
= crypto_ahash_ctx(&tfm
->base
);
1040 if (atomic_dec_and_test(&ctx
->refcnt
))
1041 crypto_free_ahash(&tfm
->base
);
1043 EXPORT_SYMBOL_GPL(cryptd_free_ahash
);
1045 struct cryptd_aead
*cryptd_alloc_aead(const char *alg_name
,
1048 char cryptd_alg_name
[CRYPTO_MAX_ALG_NAME
];
1049 struct cryptd_aead_ctx
*ctx
;
1050 struct crypto_aead
*tfm
;
1052 if (snprintf(cryptd_alg_name
, CRYPTO_MAX_ALG_NAME
,
1053 "cryptd(%s)", alg_name
) >= CRYPTO_MAX_ALG_NAME
)
1054 return ERR_PTR(-EINVAL
);
1055 tfm
= crypto_alloc_aead(cryptd_alg_name
, type
, mask
);
1057 return ERR_CAST(tfm
);
1058 if (tfm
->base
.__crt_alg
->cra_module
!= THIS_MODULE
) {
1059 crypto_free_aead(tfm
);
1060 return ERR_PTR(-EINVAL
);
1063 ctx
= crypto_aead_ctx(tfm
);
1064 atomic_set(&ctx
->refcnt
, 1);
1066 return __cryptd_aead_cast(tfm
);
1068 EXPORT_SYMBOL_GPL(cryptd_alloc_aead
);
1070 struct crypto_aead
*cryptd_aead_child(struct cryptd_aead
*tfm
)
1072 struct cryptd_aead_ctx
*ctx
;
1073 ctx
= crypto_aead_ctx(&tfm
->base
);
1076 EXPORT_SYMBOL_GPL(cryptd_aead_child
);
1078 bool cryptd_aead_queued(struct cryptd_aead
*tfm
)
1080 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(&tfm
->base
);
1082 return atomic_read(&ctx
->refcnt
) - 1;
1084 EXPORT_SYMBOL_GPL(cryptd_aead_queued
);
1086 void cryptd_free_aead(struct cryptd_aead
*tfm
)
1088 struct cryptd_aead_ctx
*ctx
= crypto_aead_ctx(&tfm
->base
);
1090 if (atomic_dec_and_test(&ctx
->refcnt
))
1091 crypto_free_aead(&tfm
->base
);
1093 EXPORT_SYMBOL_GPL(cryptd_free_aead
);
1095 static int __init
cryptd_init(void)
1099 err
= cryptd_init_queue(&queue
, CRYPTD_MAX_CPU_QLEN
);
1103 err
= crypto_register_template(&cryptd_tmpl
);
1105 cryptd_fini_queue(&queue
);
1110 static void __exit
cryptd_exit(void)
1112 cryptd_fini_queue(&queue
);
1113 crypto_unregister_template(&cryptd_tmpl
);
1116 subsys_initcall(cryptd_init
);
1117 module_exit(cryptd_exit
);
1119 MODULE_LICENSE("GPL");
1120 MODULE_DESCRIPTION("Software async crypto daemon");
1121 MODULE_ALIAS_CRYPTO("cryptd");