Skip to content

Commit 81760ea

Browse files
committed
crypto: cryptd - Add helpers to check whether a tfm is queued
This patch adds helpers to check whether a given tfm is currently queued. This is meant to be used by ablk_helper and similar entities to ensure that no reordering is introduced because of requests queued in cryptd with respect to requests being processed in softirq context. The per-cpu queue length limit is also increased to 1000 in line with network limits. Signed-off-by: Herbert Xu <[email protected]>
1 parent 47a1f0b commit 81760ea

File tree

2 files changed

+118
-19
lines changed

2 files changed

+118
-19
lines changed

crypto/cryptd.c

Lines changed: 113 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
#include <crypto/internal/aead.h>
2323
#include <crypto/cryptd.h>
2424
#include <crypto/crypto_wq.h>
25+
#include <linux/atomic.h>
2526
#include <linux/err.h>
2627
#include <linux/init.h>
2728
#include <linux/kernel.h>
@@ -31,7 +32,7 @@
3132
#include <linux/sched.h>
3233
#include <linux/slab.h>
3334

34-
#define CRYPTD_MAX_CPU_QLEN 100
35+
#define CRYPTD_MAX_CPU_QLEN 1000
3536

3637
struct cryptd_cpu_queue {
3738
struct crypto_queue queue;
@@ -58,6 +59,7 @@ struct aead_instance_ctx {
5859
};
5960

6061
struct cryptd_blkcipher_ctx {
62+
atomic_t refcnt;
6163
struct crypto_blkcipher *child;
6264
};
6365

@@ -66,6 +68,7 @@ struct cryptd_blkcipher_request_ctx {
6668
};
6769

6870
struct cryptd_hash_ctx {
71+
atomic_t refcnt;
6972
struct crypto_shash *child;
7073
};
7174

@@ -75,6 +78,7 @@ struct cryptd_hash_request_ctx {
7578
};
7679

7780
struct cryptd_aead_ctx {
81+
atomic_t refcnt;
7882
struct crypto_aead *child;
7983
};
8084

@@ -118,11 +122,29 @@ static int cryptd_enqueue_request(struct cryptd_queue *queue,
118122
{
119123
int cpu, err;
120124
struct cryptd_cpu_queue *cpu_queue;
125+
struct crypto_tfm *tfm;
126+
atomic_t *refcnt;
127+
bool may_backlog;
121128

122129
cpu = get_cpu();
123130
cpu_queue = this_cpu_ptr(queue->cpu_queue);
124131
err = crypto_enqueue_request(&cpu_queue->queue, request);
132+
133+
refcnt = crypto_tfm_ctx(request->tfm);
134+
may_backlog = request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG;
135+
136+
if (err == -EBUSY && !may_backlog)
137+
goto out_put_cpu;
138+
125139
queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
140+
141+
if (!atomic_read(refcnt))
142+
goto out_put_cpu;
143+
144+
tfm = request->tfm;
145+
atomic_inc(refcnt);
146+
147+
out_put_cpu:
126148
put_cpu();
127149

128150
return err;
@@ -206,7 +228,10 @@ static void cryptd_blkcipher_crypt(struct ablkcipher_request *req,
206228
unsigned int len))
207229
{
208230
struct cryptd_blkcipher_request_ctx *rctx;
231+
struct cryptd_blkcipher_ctx *ctx;
232+
struct crypto_ablkcipher *tfm;
209233
struct blkcipher_desc desc;
234+
int refcnt;
210235

211236
rctx = ablkcipher_request_ctx(req);
212237

@@ -222,9 +247,16 @@ static void cryptd_blkcipher_crypt(struct ablkcipher_request *req,
222247
req->base.complete = rctx->complete;
223248

224249
out:
250+
tfm = crypto_ablkcipher_reqtfm(req);
251+
ctx = crypto_ablkcipher_ctx(tfm);
252+
refcnt = atomic_read(&ctx->refcnt);
253+
225254
local_bh_disable();
226255
rctx->complete(&req->base, err);
227256
local_bh_enable();
257+
258+
if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
259+
crypto_free_ablkcipher(tfm);
228260
}
229261

230262
static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err)
@@ -456,6 +488,21 @@ static int cryptd_hash_enqueue(struct ahash_request *req,
456488
return cryptd_enqueue_request(queue, &req->base);
457489
}
458490

491+
static void cryptd_hash_complete(struct ahash_request *req, int err)
492+
{
493+
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
494+
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
495+
struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
496+
int refcnt = atomic_read(&ctx->refcnt);
497+
498+
local_bh_disable();
499+
rctx->complete(&req->base, err);
500+
local_bh_enable();
501+
502+
if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
503+
crypto_free_ahash(tfm);
504+
}
505+
459506
static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
460507
{
461508
struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
@@ -475,9 +522,7 @@ static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
475522
req->base.complete = rctx->complete;
476523

477524
out:
478-
local_bh_disable();
479-
rctx->complete(&req->base, err);
480-
local_bh_enable();
525+
cryptd_hash_complete(req, err);
481526
}
482527

483528
static int cryptd_hash_init_enqueue(struct ahash_request *req)
@@ -500,9 +545,7 @@ static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
500545
req->base.complete = rctx->complete;
501546

502547
out:
503-
local_bh_disable();
504-
rctx->complete(&req->base, err);
505-
local_bh_enable();
548+
cryptd_hash_complete(req, err);
506549
}
507550

508551
static int cryptd_hash_update_enqueue(struct ahash_request *req)
@@ -523,9 +566,7 @@ static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
523566
req->base.complete = rctx->complete;
524567

525568
out:
526-
local_bh_disable();
527-
rctx->complete(&req->base, err);
528-
local_bh_enable();
569+
cryptd_hash_complete(req, err);
529570
}
530571

531572
static int cryptd_hash_final_enqueue(struct ahash_request *req)
@@ -546,9 +587,7 @@ static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
546587
req->base.complete = rctx->complete;
547588

548589
out:
549-
local_bh_disable();
550-
rctx->complete(&req->base, err);
551-
local_bh_enable();
590+
cryptd_hash_complete(req, err);
552591
}
553592

554593
static int cryptd_hash_finup_enqueue(struct ahash_request *req)
@@ -575,9 +614,7 @@ static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
575614
req->base.complete = rctx->complete;
576615

577616
out:
578-
local_bh_disable();
579-
rctx->complete(&req->base, err);
580-
local_bh_enable();
617+
cryptd_hash_complete(req, err);
581618
}
582619

583620
static int cryptd_hash_digest_enqueue(struct ahash_request *req)
@@ -688,7 +725,10 @@ static void cryptd_aead_crypt(struct aead_request *req,
688725
int (*crypt)(struct aead_request *req))
689726
{
690727
struct cryptd_aead_request_ctx *rctx;
728+
struct cryptd_aead_ctx *ctx;
691729
crypto_completion_t compl;
730+
struct crypto_aead *tfm;
731+
int refcnt;
692732

693733
rctx = aead_request_ctx(req);
694734
compl = rctx->complete;
@@ -697,10 +737,18 @@ static void cryptd_aead_crypt(struct aead_request *req,
697737
goto out;
698738
aead_request_set_tfm(req, child);
699739
err = crypt( req );
740+
700741
out:
742+
tfm = crypto_aead_reqtfm(req);
743+
ctx = crypto_aead_ctx(tfm);
744+
refcnt = atomic_read(&ctx->refcnt);
745+
701746
local_bh_disable();
702747
compl(&req->base, err);
703748
local_bh_enable();
749+
750+
if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
751+
crypto_free_aead(tfm);
704752
}
705753

706754
static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
@@ -883,6 +931,7 @@ struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
883931
u32 type, u32 mask)
884932
{
885933
char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
934+
struct cryptd_blkcipher_ctx *ctx;
886935
struct crypto_tfm *tfm;
887936

888937
if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
@@ -899,6 +948,9 @@ struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
899948
return ERR_PTR(-EINVAL);
900949
}
901950

951+
ctx = crypto_tfm_ctx(tfm);
952+
atomic_set(&ctx->refcnt, 1);
953+
902954
return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm));
903955
}
904956
EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher);
@@ -910,16 +962,28 @@ struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm)
910962
}
911963
EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child);
912964

965+
bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm)
966+
{
967+
struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
968+
969+
return atomic_read(&ctx->refcnt) - 1;
970+
}
971+
EXPORT_SYMBOL_GPL(cryptd_ablkcipher_queued);
972+
913973
void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
914974
{
915-
crypto_free_ablkcipher(&tfm->base);
975+
struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
976+
977+
if (atomic_dec_and_test(&ctx->refcnt))
978+
crypto_free_ablkcipher(&tfm->base);
916979
}
917980
EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
918981

919982
struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
920983
u32 type, u32 mask)
921984
{
922985
char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
986+
struct cryptd_hash_ctx *ctx;
923987
struct crypto_ahash *tfm;
924988

925989
if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
@@ -933,6 +997,9 @@ struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
933997
return ERR_PTR(-EINVAL);
934998
}
935999

1000+
ctx = crypto_ahash_ctx(tfm);
1001+
atomic_set(&ctx->refcnt, 1);
1002+
9361003
return __cryptd_ahash_cast(tfm);
9371004
}
9381005
EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
@@ -952,16 +1019,28 @@ struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
9521019
}
9531020
EXPORT_SYMBOL_GPL(cryptd_shash_desc);
9541021

1022+
bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
1023+
{
1024+
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1025+
1026+
return atomic_read(&ctx->refcnt) - 1;
1027+
}
1028+
EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
1029+
9551030
void cryptd_free_ahash(struct cryptd_ahash *tfm)
9561031
{
957-
crypto_free_ahash(&tfm->base);
1032+
struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1033+
1034+
if (atomic_dec_and_test(&ctx->refcnt))
1035+
crypto_free_ahash(&tfm->base);
9581036
}
9591037
EXPORT_SYMBOL_GPL(cryptd_free_ahash);
9601038

9611039
struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
9621040
u32 type, u32 mask)
9631041
{
9641042
char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1043+
struct cryptd_aead_ctx *ctx;
9651044
struct crypto_aead *tfm;
9661045

9671046
if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
@@ -974,6 +1053,10 @@ struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
9741053
crypto_free_aead(tfm);
9751054
return ERR_PTR(-EINVAL);
9761055
}
1056+
1057+
ctx = crypto_aead_ctx(tfm);
1058+
atomic_set(&ctx->refcnt, 1);
1059+
9771060
return __cryptd_aead_cast(tfm);
9781061
}
9791062
EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
@@ -986,9 +1069,20 @@ struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
9861069
}
9871070
EXPORT_SYMBOL_GPL(cryptd_aead_child);
9881071

1072+
bool cryptd_aead_queued(struct cryptd_aead *tfm)
1073+
{
1074+
struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1075+
1076+
return atomic_read(&ctx->refcnt) - 1;
1077+
}
1078+
EXPORT_SYMBOL_GPL(cryptd_aead_queued);
1079+
9891080
void cryptd_free_aead(struct cryptd_aead *tfm)
9901081
{
991-
crypto_free_aead(&tfm->base);
1082+
struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1083+
1084+
if (atomic_dec_and_test(&ctx->refcnt))
1085+
crypto_free_aead(&tfm->base);
9921086
}
9931087
EXPORT_SYMBOL_GPL(cryptd_free_aead);
9941088

include/crypto/cryptd.h

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ static inline struct cryptd_ablkcipher *__cryptd_ablkcipher_cast(
3131
struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
3232
u32 type, u32 mask);
3333
struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm);
34+
bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm);
3435
void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm);
3536

3637
struct cryptd_ahash {
@@ -48,6 +49,8 @@ struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
4849
u32 type, u32 mask);
4950
struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm);
5051
struct shash_desc *cryptd_shash_desc(struct ahash_request *req);
52+
/* Must be called without moving CPUs. */
53+
bool cryptd_ahash_queued(struct cryptd_ahash *tfm);
5154
void cryptd_free_ahash(struct cryptd_ahash *tfm);
5255

5356
struct cryptd_aead {
@@ -64,6 +67,8 @@ struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
6467
u32 type, u32 mask);
6568

6669
struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm);
70+
/* Must be called without moving CPUs. */
71+
bool cryptd_aead_queued(struct cryptd_aead *tfm);
6772

6873
void cryptd_free_aead(struct cryptd_aead *tfm);
6974

0 commit comments

Comments
 (0)