2 * Software async crypto daemon.
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 * Added AEAD support to cryptd.
7 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
8 * Adrian Hoban <adrian.hoban@intel.com>
9 * Gabriele Paoloni <gabriele.paoloni@intel.com>
10 * Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Copyright (c) 2010, Intel Corporation.
13 * This program is free software; you can redistribute it and/or modify it
14 * under the terms of the GNU General Public License as published by the Free
15 * Software Foundation; either version 2 of the License, or (at your option)
20 #include <crypto/algapi.h>
21 #include <crypto/internal/hash.h>
22 #include <crypto/internal/aead.h>
23 #include <crypto/cryptd.h>
24 #include <crypto/crypto_wq.h>
25 #include <linux/err.h>
26 #include <linux/init.h>
27 #include <linux/kernel.h>
28 #include <linux/list.h>
29 #include <linux/module.h>
30 #include <linux/scatterlist.h>
31 #include <linux/sched.h>
32 #include <linux/slab.h>
34 #define CRYPTD_MAX_CPU_QLEN 100
36 struct cryptd_cpu_queue {
37 struct crypto_queue queue;
38 struct work_struct work;
42 struct cryptd_cpu_queue __percpu *cpu_queue;
45 struct cryptd_instance_ctx {
46 struct crypto_spawn spawn;
47 struct cryptd_queue *queue;
50 struct hashd_instance_ctx {
51 struct crypto_shash_spawn spawn;
52 struct cryptd_queue *queue;
55 struct aead_instance_ctx {
56 struct crypto_aead_spawn aead_spawn;
57 struct cryptd_queue *queue;
60 struct cryptd_blkcipher_ctx {
61 struct crypto_blkcipher *child;
64 struct cryptd_blkcipher_request_ctx {
65 crypto_completion_t complete;
68 struct cryptd_hash_ctx {
69 struct crypto_shash *child;
72 struct cryptd_hash_request_ctx {
73 crypto_completion_t complete;
74 struct shash_desc desc;
77 struct cryptd_aead_ctx {
78 struct crypto_aead *child;
81 struct cryptd_aead_request_ctx {
82 crypto_completion_t complete;
85 static void cryptd_queue_worker(struct work_struct *work);
87 static int cryptd_init_queue(struct cryptd_queue *queue,
88 unsigned int max_cpu_qlen)
91 struct cryptd_cpu_queue *cpu_queue;
93 queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
94 if (!queue->cpu_queue)
96 for_each_possible_cpu(cpu) {
97 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
98 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
99 INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
104 static void cryptd_fini_queue(struct cryptd_queue *queue)
107 struct cryptd_cpu_queue *cpu_queue;
109 for_each_possible_cpu(cpu) {
110 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
111 BUG_ON(cpu_queue->queue.qlen);
113 free_percpu(queue->cpu_queue);
116 static int cryptd_enqueue_request(struct cryptd_queue *queue,
117 struct crypto_async_request *request)
120 struct cryptd_cpu_queue *cpu_queue;
123 cpu_queue = this_cpu_ptr(queue->cpu_queue);
124 err = crypto_enqueue_request(&cpu_queue->queue, request);
125 queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
131 /* Called in workqueue context, do one real cryption work (via
132 * req->complete) and reschedule itself if there are more work to
134 static void cryptd_queue_worker(struct work_struct *work)
136 struct cryptd_cpu_queue *cpu_queue;
137 struct crypto_async_request *req, *backlog;
139 cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
141 * Only handle one request at a time to avoid hogging crypto workqueue.
142 * preempt_disable/enable is used to prevent being preempted by
143 * cryptd_enqueue_request(). local_bh_disable/enable is used to prevent
144 * cryptd_enqueue_request() being accessed from software interrupts.
148 backlog = crypto_get_backlog(&cpu_queue->queue);
149 req = crypto_dequeue_request(&cpu_queue->queue);
157 backlog->complete(backlog, -EINPROGRESS);
158 req->complete(req, 0);
160 if (cpu_queue->queue.qlen)
161 queue_work(kcrypto_wq, &cpu_queue->work);
164 static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
166 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
167 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
171 static inline void cryptd_check_internal(struct rtattr **tb, u32 *type,
174 struct crypto_attr_type *algt;
176 algt = crypto_get_attr_type(tb);
180 *type |= algt->type & CRYPTO_ALG_INTERNAL;
181 *mask |= algt->mask & CRYPTO_ALG_INTERNAL;
184 static int cryptd_blkcipher_setkey(struct crypto_ablkcipher *parent,
185 const u8 *key, unsigned int keylen)
187 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(parent);
188 struct crypto_blkcipher *child = ctx->child;
191 crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
192 crypto_blkcipher_set_flags(child, crypto_ablkcipher_get_flags(parent) &
193 CRYPTO_TFM_REQ_MASK);
194 err = crypto_blkcipher_setkey(child, key, keylen);
195 crypto_ablkcipher_set_flags(parent, crypto_blkcipher_get_flags(child) &
196 CRYPTO_TFM_RES_MASK);
200 static void cryptd_blkcipher_crypt(struct ablkcipher_request *req,
201 struct crypto_blkcipher *child,
203 int (*crypt)(struct blkcipher_desc *desc,
204 struct scatterlist *dst,
205 struct scatterlist *src,
208 struct cryptd_blkcipher_request_ctx *rctx;
209 struct blkcipher_desc desc;
211 rctx = ablkcipher_request_ctx(req);
213 if (unlikely(err == -EINPROGRESS))
217 desc.info = req->info;
218 desc.flags = CRYPTO_TFM_REQ_MAY_SLEEP;
220 err = crypt(&desc, req->dst, req->src, req->nbytes);
222 req->base.complete = rctx->complete;
226 rctx->complete(&req->base, err);
230 static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err)
232 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
233 struct crypto_blkcipher *child = ctx->child;
235 cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
236 crypto_blkcipher_crt(child)->encrypt);
239 static void cryptd_blkcipher_decrypt(struct crypto_async_request *req, int err)
241 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(req->tfm);
242 struct crypto_blkcipher *child = ctx->child;
244 cryptd_blkcipher_crypt(ablkcipher_request_cast(req), child, err,
245 crypto_blkcipher_crt(child)->decrypt);
248 static int cryptd_blkcipher_enqueue(struct ablkcipher_request *req,
249 crypto_completion_t compl)
251 struct cryptd_blkcipher_request_ctx *rctx = ablkcipher_request_ctx(req);
252 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
253 struct cryptd_queue *queue;
255 queue = cryptd_get_queue(crypto_ablkcipher_tfm(tfm));
256 rctx->complete = req->base.complete;
257 req->base.complete = compl;
259 return cryptd_enqueue_request(queue, &req->base);
262 static int cryptd_blkcipher_encrypt_enqueue(struct ablkcipher_request *req)
264 return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_encrypt);
267 static int cryptd_blkcipher_decrypt_enqueue(struct ablkcipher_request *req)
269 return cryptd_blkcipher_enqueue(req, cryptd_blkcipher_decrypt);
272 static int cryptd_blkcipher_init_tfm(struct crypto_tfm *tfm)
274 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
275 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
276 struct crypto_spawn *spawn = &ictx->spawn;
277 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
278 struct crypto_blkcipher *cipher;
280 cipher = crypto_spawn_blkcipher(spawn);
282 return PTR_ERR(cipher);
285 tfm->crt_ablkcipher.reqsize =
286 sizeof(struct cryptd_blkcipher_request_ctx);
290 static void cryptd_blkcipher_exit_tfm(struct crypto_tfm *tfm)
292 struct cryptd_blkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
294 crypto_free_blkcipher(ctx->child);
297 static int cryptd_init_instance(struct crypto_instance *inst,
298 struct crypto_alg *alg)
300 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
302 alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
303 return -ENAMETOOLONG;
305 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
307 inst->alg.cra_priority = alg->cra_priority + 50;
308 inst->alg.cra_blocksize = alg->cra_blocksize;
309 inst->alg.cra_alignmask = alg->cra_alignmask;
314 static void *cryptd_alloc_instance(struct crypto_alg *alg, unsigned int head,
318 struct crypto_instance *inst;
321 p = kzalloc(head + sizeof(*inst) + tail, GFP_KERNEL);
323 return ERR_PTR(-ENOMEM);
325 inst = (void *)(p + head);
327 err = cryptd_init_instance(inst, alg);
340 static int cryptd_create_blkcipher(struct crypto_template *tmpl,
342 struct cryptd_queue *queue)
344 struct cryptd_instance_ctx *ctx;
345 struct crypto_instance *inst;
346 struct crypto_alg *alg;
347 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
348 u32 mask = CRYPTO_ALG_TYPE_MASK;
351 cryptd_check_internal(tb, &type, &mask);
353 alg = crypto_get_attr_alg(tb, type, mask);
357 inst = cryptd_alloc_instance(alg, 0, sizeof(*ctx));
362 ctx = crypto_instance_ctx(inst);
365 err = crypto_init_spawn(&ctx->spawn, alg, inst,
366 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
370 type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC;
371 if (alg->cra_flags & CRYPTO_ALG_INTERNAL)
372 type |= CRYPTO_ALG_INTERNAL;
373 inst->alg.cra_flags = type;
374 inst->alg.cra_type = &crypto_ablkcipher_type;
376 inst->alg.cra_ablkcipher.ivsize = alg->cra_blkcipher.ivsize;
377 inst->alg.cra_ablkcipher.min_keysize = alg->cra_blkcipher.min_keysize;
378 inst->alg.cra_ablkcipher.max_keysize = alg->cra_blkcipher.max_keysize;
380 inst->alg.cra_ablkcipher.geniv = alg->cra_blkcipher.geniv;
382 inst->alg.cra_ctxsize = sizeof(struct cryptd_blkcipher_ctx);
384 inst->alg.cra_init = cryptd_blkcipher_init_tfm;
385 inst->alg.cra_exit = cryptd_blkcipher_exit_tfm;
387 inst->alg.cra_ablkcipher.setkey = cryptd_blkcipher_setkey;
388 inst->alg.cra_ablkcipher.encrypt = cryptd_blkcipher_encrypt_enqueue;
389 inst->alg.cra_ablkcipher.decrypt = cryptd_blkcipher_decrypt_enqueue;
391 err = crypto_register_instance(tmpl, inst);
393 crypto_drop_spawn(&ctx->spawn);
403 static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
405 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
406 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
407 struct crypto_shash_spawn *spawn = &ictx->spawn;
408 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
409 struct crypto_shash *hash;
411 hash = crypto_spawn_shash(spawn);
413 return PTR_ERR(hash);
416 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
417 sizeof(struct cryptd_hash_request_ctx) +
418 crypto_shash_descsize(hash));
422 static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
424 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
426 crypto_free_shash(ctx->child);
429 static int cryptd_hash_setkey(struct crypto_ahash *parent,
430 const u8 *key, unsigned int keylen)
432 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent);
433 struct crypto_shash *child = ctx->child;
436 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
437 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
438 CRYPTO_TFM_REQ_MASK);
439 err = crypto_shash_setkey(child, key, keylen);
440 crypto_ahash_set_flags(parent, crypto_shash_get_flags(child) &
441 CRYPTO_TFM_RES_MASK);
445 static int cryptd_hash_enqueue(struct ahash_request *req,
446 crypto_completion_t compl)
448 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
449 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
450 struct cryptd_queue *queue =
451 cryptd_get_queue(crypto_ahash_tfm(tfm));
453 rctx->complete = req->base.complete;
454 req->base.complete = compl;
456 return cryptd_enqueue_request(queue, &req->base);
459 static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
461 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
462 struct crypto_shash *child = ctx->child;
463 struct ahash_request *req = ahash_request_cast(req_async);
464 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
465 struct shash_desc *desc = &rctx->desc;
467 if (unlikely(err == -EINPROGRESS))
471 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
473 err = crypto_shash_init(desc);
475 req->base.complete = rctx->complete;
479 rctx->complete(&req->base, err);
483 static int cryptd_hash_init_enqueue(struct ahash_request *req)
485 return cryptd_hash_enqueue(req, cryptd_hash_init);
488 static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
490 struct ahash_request *req = ahash_request_cast(req_async);
491 struct cryptd_hash_request_ctx *rctx;
493 rctx = ahash_request_ctx(req);
495 if (unlikely(err == -EINPROGRESS))
498 err = shash_ahash_update(req, &rctx->desc);
500 req->base.complete = rctx->complete;
504 rctx->complete(&req->base, err);
508 static int cryptd_hash_update_enqueue(struct ahash_request *req)
510 return cryptd_hash_enqueue(req, cryptd_hash_update);
513 static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
515 struct ahash_request *req = ahash_request_cast(req_async);
516 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
518 if (unlikely(err == -EINPROGRESS))
521 err = crypto_shash_final(&rctx->desc, req->result);
523 req->base.complete = rctx->complete;
527 rctx->complete(&req->base, err);
531 static int cryptd_hash_final_enqueue(struct ahash_request *req)
533 return cryptd_hash_enqueue(req, cryptd_hash_final);
536 static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
538 struct ahash_request *req = ahash_request_cast(req_async);
539 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
541 if (unlikely(err == -EINPROGRESS))
544 err = shash_ahash_finup(req, &rctx->desc);
546 req->base.complete = rctx->complete;
550 rctx->complete(&req->base, err);
554 static int cryptd_hash_finup_enqueue(struct ahash_request *req)
556 return cryptd_hash_enqueue(req, cryptd_hash_finup);
559 static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
561 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
562 struct crypto_shash *child = ctx->child;
563 struct ahash_request *req = ahash_request_cast(req_async);
564 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
565 struct shash_desc *desc = &rctx->desc;
567 if (unlikely(err == -EINPROGRESS))
571 desc->flags = CRYPTO_TFM_REQ_MAY_SLEEP;
573 err = shash_ahash_digest(req, desc);
575 req->base.complete = rctx->complete;
579 rctx->complete(&req->base, err);
583 static int cryptd_hash_digest_enqueue(struct ahash_request *req)
585 return cryptd_hash_enqueue(req, cryptd_hash_digest);
588 static int cryptd_hash_export(struct ahash_request *req, void *out)
590 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
592 return crypto_shash_export(&rctx->desc, out);
595 static int cryptd_hash_import(struct ahash_request *req, const void *in)
597 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
599 return crypto_shash_import(&rctx->desc, in);
602 static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
603 struct cryptd_queue *queue)
605 struct hashd_instance_ctx *ctx;
606 struct ahash_instance *inst;
607 struct shash_alg *salg;
608 struct crypto_alg *alg;
613 cryptd_check_internal(tb, &type, &mask);
615 salg = shash_attr_alg(tb[1], type, mask);
617 return PTR_ERR(salg);
620 inst = cryptd_alloc_instance(alg, ahash_instance_headroom(),
626 ctx = ahash_instance_ctx(inst);
629 err = crypto_init_shash_spawn(&ctx->spawn, salg,
630 ahash_crypto_instance(inst));
634 type = CRYPTO_ALG_ASYNC;
635 if (alg->cra_flags & CRYPTO_ALG_INTERNAL)
636 type |= CRYPTO_ALG_INTERNAL;
637 inst->alg.halg.base.cra_flags = type;
639 inst->alg.halg.digestsize = salg->digestsize;
640 inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
642 inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
643 inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
645 inst->alg.init = cryptd_hash_init_enqueue;
646 inst->alg.update = cryptd_hash_update_enqueue;
647 inst->alg.final = cryptd_hash_final_enqueue;
648 inst->alg.finup = cryptd_hash_finup_enqueue;
649 inst->alg.export = cryptd_hash_export;
650 inst->alg.import = cryptd_hash_import;
651 inst->alg.setkey = cryptd_hash_setkey;
652 inst->alg.digest = cryptd_hash_digest_enqueue;
654 err = ahash_register_instance(tmpl, inst);
656 crypto_drop_shash(&ctx->spawn);
666 static int cryptd_aead_setkey(struct crypto_aead *parent,
667 const u8 *key, unsigned int keylen)
669 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
670 struct crypto_aead *child = ctx->child;
672 return crypto_aead_setkey(child, key, keylen);
675 static int cryptd_aead_setauthsize(struct crypto_aead *parent,
676 unsigned int authsize)
678 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
679 struct crypto_aead *child = ctx->child;
681 return crypto_aead_setauthsize(child, authsize);
684 static void cryptd_aead_crypt(struct aead_request *req,
685 struct crypto_aead *child,
687 int (*crypt)(struct aead_request *req))
689 struct cryptd_aead_request_ctx *rctx;
690 crypto_completion_t compl;
692 rctx = aead_request_ctx(req);
693 compl = rctx->complete;
695 if (unlikely(err == -EINPROGRESS))
697 aead_request_set_tfm(req, child);
701 compl(&req->base, err);
705 static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
707 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
708 struct crypto_aead *child = ctx->child;
709 struct aead_request *req;
711 req = container_of(areq, struct aead_request, base);
712 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt);
715 static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
717 struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
718 struct crypto_aead *child = ctx->child;
719 struct aead_request *req;
721 req = container_of(areq, struct aead_request, base);
722 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt);
725 static int cryptd_aead_enqueue(struct aead_request *req,
726 crypto_completion_t compl)
728 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
729 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
730 struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
732 rctx->complete = req->base.complete;
733 req->base.complete = compl;
734 return cryptd_enqueue_request(queue, &req->base);
737 static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
739 return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
742 static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
744 return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
747 static int cryptd_aead_init_tfm(struct crypto_aead *tfm)
749 struct aead_instance *inst = aead_alg_instance(tfm);
750 struct aead_instance_ctx *ictx = aead_instance_ctx(inst);
751 struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
752 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
753 struct crypto_aead *cipher;
755 cipher = crypto_spawn_aead(spawn);
757 return PTR_ERR(cipher);
760 crypto_aead_set_reqsize(
761 tfm, max((unsigned)sizeof(struct cryptd_aead_request_ctx),
762 crypto_aead_reqsize(cipher)));
766 static void cryptd_aead_exit_tfm(struct crypto_aead *tfm)
768 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
769 crypto_free_aead(ctx->child);
772 static int cryptd_create_aead(struct crypto_template *tmpl,
774 struct cryptd_queue *queue)
776 struct aead_instance_ctx *ctx;
777 struct aead_instance *inst;
778 struct aead_alg *alg;
781 u32 mask = CRYPTO_ALG_ASYNC;
784 cryptd_check_internal(tb, &type, &mask);
786 name = crypto_attr_alg_name(tb[1]);
788 return PTR_ERR(name);
790 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
794 ctx = aead_instance_ctx(inst);
797 crypto_set_aead_spawn(&ctx->aead_spawn, aead_crypto_instance(inst));
798 err = crypto_grab_aead(&ctx->aead_spawn, name, type, mask);
802 alg = crypto_spawn_aead_alg(&ctx->aead_spawn);
803 err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base);
807 inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC |
808 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
809 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
811 inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
812 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
814 inst->alg.init = cryptd_aead_init_tfm;
815 inst->alg.exit = cryptd_aead_exit_tfm;
816 inst->alg.setkey = cryptd_aead_setkey;
817 inst->alg.setauthsize = cryptd_aead_setauthsize;
818 inst->alg.encrypt = cryptd_aead_encrypt_enqueue;
819 inst->alg.decrypt = cryptd_aead_decrypt_enqueue;
821 err = aead_register_instance(tmpl, inst);
824 crypto_drop_aead(&ctx->aead_spawn);
831 static struct cryptd_queue queue;
833 static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
835 struct crypto_attr_type *algt;
837 algt = crypto_get_attr_type(tb);
839 return PTR_ERR(algt);
841 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
842 case CRYPTO_ALG_TYPE_BLKCIPHER:
843 return cryptd_create_blkcipher(tmpl, tb, &queue);
844 case CRYPTO_ALG_TYPE_DIGEST:
845 return cryptd_create_hash(tmpl, tb, &queue);
846 case CRYPTO_ALG_TYPE_AEAD:
847 return cryptd_create_aead(tmpl, tb, &queue);
853 static void cryptd_free(struct crypto_instance *inst)
855 struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
856 struct hashd_instance_ctx *hctx = crypto_instance_ctx(inst);
857 struct aead_instance_ctx *aead_ctx = crypto_instance_ctx(inst);
859 switch (inst->alg.cra_flags & CRYPTO_ALG_TYPE_MASK) {
860 case CRYPTO_ALG_TYPE_AHASH:
861 crypto_drop_shash(&hctx->spawn);
862 kfree(ahash_instance(inst));
864 case CRYPTO_ALG_TYPE_AEAD:
865 crypto_drop_aead(&aead_ctx->aead_spawn);
866 kfree(aead_instance(inst));
869 crypto_drop_spawn(&ctx->spawn);
874 static struct crypto_template cryptd_tmpl = {
876 .create = cryptd_create,
878 .module = THIS_MODULE,
881 struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
884 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
885 struct crypto_tfm *tfm;
887 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
888 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
889 return ERR_PTR(-EINVAL);
890 type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
891 type |= CRYPTO_ALG_TYPE_BLKCIPHER;
892 mask &= ~CRYPTO_ALG_TYPE_MASK;
893 mask |= (CRYPTO_ALG_GENIV | CRYPTO_ALG_TYPE_BLKCIPHER_MASK);
894 tfm = crypto_alloc_base(cryptd_alg_name, type, mask);
896 return ERR_CAST(tfm);
897 if (tfm->__crt_alg->cra_module != THIS_MODULE) {
898 crypto_free_tfm(tfm);
899 return ERR_PTR(-EINVAL);
902 return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm));
904 EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher);
906 struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm)
908 struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
911 EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child);
913 void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
915 crypto_free_ablkcipher(&tfm->base);
917 EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
919 struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
922 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
923 struct crypto_ahash *tfm;
925 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
926 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
927 return ERR_PTR(-EINVAL);
928 tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
930 return ERR_CAST(tfm);
931 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
932 crypto_free_ahash(tfm);
933 return ERR_PTR(-EINVAL);
936 return __cryptd_ahash_cast(tfm);
938 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
940 struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
942 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
946 EXPORT_SYMBOL_GPL(cryptd_ahash_child);
948 struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
950 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
953 EXPORT_SYMBOL_GPL(cryptd_shash_desc);
955 void cryptd_free_ahash(struct cryptd_ahash *tfm)
957 crypto_free_ahash(&tfm->base);
959 EXPORT_SYMBOL_GPL(cryptd_free_ahash);
961 struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
964 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
965 struct crypto_aead *tfm;
967 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
968 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
969 return ERR_PTR(-EINVAL);
970 tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
972 return ERR_CAST(tfm);
973 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
974 crypto_free_aead(tfm);
975 return ERR_PTR(-EINVAL);
977 return __cryptd_aead_cast(tfm);
979 EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
981 struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
983 struct cryptd_aead_ctx *ctx;
984 ctx = crypto_aead_ctx(&tfm->base);
987 EXPORT_SYMBOL_GPL(cryptd_aead_child);
989 void cryptd_free_aead(struct cryptd_aead *tfm)
991 crypto_free_aead(&tfm->base);
993 EXPORT_SYMBOL_GPL(cryptd_free_aead);
995 static int __init cryptd_init(void)
999 err = cryptd_init_queue(&queue, CRYPTD_MAX_CPU_QLEN);
1003 err = crypto_register_template(&cryptd_tmpl);
1005 cryptd_fini_queue(&queue);
1010 static void __exit cryptd_exit(void)
1012 cryptd_fini_queue(&queue);
1013 crypto_unregister_template(&cryptd_tmpl);
1016 subsys_initcall(cryptd_init);
1017 module_exit(cryptd_exit);
1019 MODULE_LICENSE("GPL");
1020 MODULE_DESCRIPTION("Software async crypto daemon");
1021 MODULE_ALIAS_CRYPTO("cryptd");