Skip to content

Commit f2ffe5a

Browse files
committed
crypto: hash - Add request chaining API
This adds request chaining to the ahash interface. Request chaining allows multiple requests to be submitted in one shot. An algorithm can elect to receive chained requests by setting the flag CRYPTO_ALG_REQ_CHAIN. If this bit is not set, the API will break up chained requests and submit them one-by-one. A new err field is added to struct crypto_async_request to record the return value for each individual request. Signed-off-by: Herbert Xu <[email protected]>
1 parent f407764 commit f2ffe5a

File tree

6 files changed

+299
-37
lines changed

6 files changed

+299
-37
lines changed

crypto/ahash.c

Lines changed: 235 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -42,11 +42,19 @@ struct crypto_hash_walk {
4242
};
4343

4444
struct ahash_save_req_state {
45-
struct ahash_request *req;
45+
struct list_head head;
46+
struct ahash_request *req0;
47+
struct ahash_request *cur;
48+
int (*op)(struct ahash_request *req);
4649
crypto_completion_t compl;
4750
void *data;
4851
};
4952

53+
static void ahash_reqchain_done(void *data, int err);
54+
static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt);
55+
static void ahash_restore_req(struct ahash_request *req);
56+
static int ahash_def_finup(struct ahash_request *req);
57+
5058
static int hash_walk_next(struct crypto_hash_walk *walk)
5159
{
5260
unsigned int offset = walk->offset;
@@ -273,24 +281,145 @@ int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
273281
}
274282
EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
275283

284+
static int ahash_reqchain_finish(struct ahash_save_req_state *state,
285+
int err, u32 mask)
286+
{
287+
struct ahash_request *req0 = state->req0;
288+
struct ahash_request *req = state->cur;
289+
struct ahash_request *n;
290+
291+
req->base.err = err;
292+
293+
if (req != req0)
294+
list_add_tail(&req->base.list, &req0->base.list);
295+
296+
list_for_each_entry_safe(req, n, &state->head, base.list) {
297+
list_del_init(&req->base.list);
298+
299+
req->base.flags &= mask;
300+
req->base.complete = ahash_reqchain_done;
301+
req->base.data = state;
302+
state->cur = req;
303+
err = state->op(req);
304+
305+
if (err == -EINPROGRESS) {
306+
if (!list_empty(&state->head))
307+
err = -EBUSY;
308+
goto out;
309+
}
310+
311+
if (err == -EBUSY)
312+
goto out;
313+
314+
req->base.err = err;
315+
list_add_tail(&req->base.list, &req0->base.list);
316+
}
317+
318+
ahash_restore_req(req0);
319+
320+
out:
321+
return err;
322+
}
323+
324+
static void ahash_reqchain_done(void *data, int err)
325+
{
326+
struct ahash_save_req_state *state = data;
327+
crypto_completion_t compl = state->compl;
328+
329+
data = state->data;
330+
331+
if (err == -EINPROGRESS) {
332+
if (!list_empty(&state->head))
333+
return;
334+
goto notify;
335+
}
336+
337+
err = ahash_reqchain_finish(state, err, CRYPTO_TFM_REQ_MAY_BACKLOG);
338+
if (err == -EBUSY)
339+
return;
340+
341+
notify:
342+
compl(data, err);
343+
}
344+
345+
static int ahash_do_req_chain(struct ahash_request *req,
346+
int (*op)(struct ahash_request *req))
347+
{
348+
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
349+
struct ahash_save_req_state *state;
350+
struct ahash_save_req_state state0;
351+
int err;
352+
353+
if (!ahash_request_chained(req) || crypto_ahash_req_chain(tfm))
354+
return op(req);
355+
356+
state = &state0;
357+
358+
if (ahash_is_async(tfm)) {
359+
err = ahash_save_req(req, ahash_reqchain_done);
360+
if (err) {
361+
struct ahash_request *r2;
362+
363+
req->base.err = err;
364+
list_for_each_entry(r2, &req->base.list, base.list)
365+
r2->base.err = err;
366+
367+
return err;
368+
}
369+
370+
state = req->base.data;
371+
}
372+
373+
state->op = op;
374+
state->cur = req;
375+
INIT_LIST_HEAD(&state->head);
376+
list_splice_init(&req->base.list, &state->head);
377+
378+
err = op(req);
379+
if (err == -EBUSY || err == -EINPROGRESS)
380+
return -EBUSY;
381+
382+
return ahash_reqchain_finish(state, err, ~0);
383+
}
384+
276385
int crypto_ahash_init(struct ahash_request *req)
277386
{
278387
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
279388

280-
if (likely(tfm->using_shash))
281-
return crypto_shash_init(prepare_shash_desc(req, tfm));
389+
if (likely(tfm->using_shash)) {
390+
struct ahash_request *r2;
391+
int err;
392+
393+
err = crypto_shash_init(prepare_shash_desc(req, tfm));
394+
req->base.err = err;
395+
396+
list_for_each_entry(r2, &req->base.list, base.list) {
397+
struct shash_desc *desc;
398+
399+
desc = prepare_shash_desc(r2, tfm);
400+
r2->base.err = crypto_shash_init(desc);
401+
}
402+
403+
return err;
404+
}
405+
282406
if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
283407
return -ENOKEY;
284-
return crypto_ahash_alg(tfm)->init(req);
408+
409+
return ahash_do_req_chain(req, crypto_ahash_alg(tfm)->init);
285410
}
286411
EXPORT_SYMBOL_GPL(crypto_ahash_init);
287412

288413
static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt)
289414
{
415+
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
290416
struct ahash_save_req_state *state;
291417
gfp_t gfp;
292418
u32 flags;
293419

420+
if (!ahash_is_async(tfm))
421+
return 0;
422+
294423
flags = ahash_request_flags(req);
295424
gfp = (flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : GFP_ATOMIC;
296425
state = kmalloc(sizeof(*state), gfp);
@@ -301,14 +430,20 @@ static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt)
301430
state->data = req->base.data;
302431
req->base.complete = cplt;
303432
req->base.data = state;
304-
state->req = req;
433+
state->req0 = req;
305434

306435
return 0;
307436
}
308437

309438
static void ahash_restore_req(struct ahash_request *req)
310439
{
311-
struct ahash_save_req_state *state = req->base.data;
440+
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
441+
struct ahash_save_req_state *state;
442+
443+
if (!ahash_is_async(tfm))
444+
return;
445+
446+
state = req->base.data;
312447

313448
req->base.complete = state->compl;
314449
req->base.data = state->data;
@@ -319,53 +454,112 @@ int crypto_ahash_update(struct ahash_request *req)
319454
{
320455
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
321456

322-
if (likely(tfm->using_shash))
323-
return shash_ahash_update(req, ahash_request_ctx(req));
457+
if (likely(tfm->using_shash)) {
458+
struct ahash_request *r2;
459+
int err;
460+
461+
err = shash_ahash_update(req, ahash_request_ctx(req));
462+
req->base.err = err;
463+
464+
list_for_each_entry(r2, &req->base.list, base.list) {
465+
struct shash_desc *desc;
324466

325-
return crypto_ahash_alg(tfm)->update(req);
467+
desc = ahash_request_ctx(r2);
468+
r2->base.err = shash_ahash_update(r2, desc);
469+
}
470+
471+
return err;
472+
}
473+
474+
return ahash_do_req_chain(req, crypto_ahash_alg(tfm)->update);
326475
}
327476
EXPORT_SYMBOL_GPL(crypto_ahash_update);
328477

329478
int crypto_ahash_final(struct ahash_request *req)
330479
{
331480
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
332481

333-
if (likely(tfm->using_shash))
334-
return crypto_shash_final(ahash_request_ctx(req), req->result);
482+
if (likely(tfm->using_shash)) {
483+
struct ahash_request *r2;
484+
int err;
485+
486+
err = crypto_shash_final(ahash_request_ctx(req), req->result);
487+
req->base.err = err;
488+
489+
list_for_each_entry(r2, &req->base.list, base.list) {
490+
struct shash_desc *desc;
335491

336-
return crypto_ahash_alg(tfm)->final(req);
492+
desc = ahash_request_ctx(r2);
493+
r2->base.err = crypto_shash_final(desc, r2->result);
494+
}
495+
496+
return err;
497+
}
498+
499+
return ahash_do_req_chain(req, crypto_ahash_alg(tfm)->final);
337500
}
338501
EXPORT_SYMBOL_GPL(crypto_ahash_final);
339502

340503
int crypto_ahash_finup(struct ahash_request *req)
341504
{
342505
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
343506

344-
if (likely(tfm->using_shash))
345-
return shash_ahash_finup(req, ahash_request_ctx(req));
507+
if (likely(tfm->using_shash)) {
508+
struct ahash_request *r2;
509+
int err;
510+
511+
err = shash_ahash_finup(req, ahash_request_ctx(req));
512+
req->base.err = err;
513+
514+
list_for_each_entry(r2, &req->base.list, base.list) {
515+
struct shash_desc *desc;
516+
517+
desc = ahash_request_ctx(r2);
518+
r2->base.err = shash_ahash_finup(r2, desc);
519+
}
520+
521+
return err;
522+
}
346523

347-
return crypto_ahash_alg(tfm)->finup(req);
524+
if (!crypto_ahash_alg(tfm)->finup)
525+
return ahash_def_finup(req);
526+
527+
return ahash_do_req_chain(req, crypto_ahash_alg(tfm)->finup);
348528
}
349529
EXPORT_SYMBOL_GPL(crypto_ahash_finup);
350530

351531
int crypto_ahash_digest(struct ahash_request *req)
352532
{
353533
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
354534

355-
if (likely(tfm->using_shash))
356-
return shash_ahash_digest(req, prepare_shash_desc(req, tfm));
535+
if (likely(tfm->using_shash)) {
536+
struct ahash_request *r2;
537+
int err;
538+
539+
err = shash_ahash_digest(req, prepare_shash_desc(req, tfm));
540+
req->base.err = err;
541+
542+
list_for_each_entry(r2, &req->base.list, base.list) {
543+
struct shash_desc *desc;
544+
545+
desc = prepare_shash_desc(r2, tfm);
546+
r2->base.err = shash_ahash_digest(r2, desc);
547+
}
548+
549+
return err;
550+
}
357551

358552
if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
359553
return -ENOKEY;
360554

361-
return crypto_ahash_alg(tfm)->digest(req);
555+
return ahash_do_req_chain(req, crypto_ahash_alg(tfm)->digest);
362556
}
363557
EXPORT_SYMBOL_GPL(crypto_ahash_digest);
364558

365559
static void ahash_def_finup_done2(void *data, int err)
366560
{
367561
struct ahash_save_req_state *state = data;
368-
struct ahash_request *areq = state->req;
562+
struct ahash_request *areq = state->req0;
369563

370564
if (err == -EINPROGRESS)
371565
return;
@@ -376,12 +570,15 @@ static void ahash_def_finup_done2(void *data, int err)
376570

377571
static int ahash_def_finup_finish1(struct ahash_request *req, int err)
378572
{
573+
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
574+
379575
if (err)
380576
goto out;
381577

382-
req->base.complete = ahash_def_finup_done2;
578+
if (ahash_is_async(tfm))
579+
req->base.complete = ahash_def_finup_done2;
383580

384-
err = crypto_ahash_alg(crypto_ahash_reqtfm(req))->final(req);
581+
err = crypto_ahash_final(req);
385582
if (err == -EINPROGRESS || err == -EBUSY)
386583
return err;
387584

@@ -397,7 +594,7 @@ static void ahash_def_finup_done1(void *data, int err)
397594
struct ahash_request *areq;
398595

399596
state = *state0;
400-
areq = state.req;
597+
areq = state.req0;
401598
if (err == -EINPROGRESS)
402599
goto out;
403600

@@ -413,14 +610,13 @@ static void ahash_def_finup_done1(void *data, int err)
413610

414611
static int ahash_def_finup(struct ahash_request *req)
415612
{
416-
struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
417613
int err;
418614

419615
err = ahash_save_req(req, ahash_def_finup_done1);
420616
if (err)
421617
return err;
422618

423-
err = crypto_ahash_alg(tfm)->update(req);
619+
err = crypto_ahash_update(req);
424620
if (err == -EINPROGRESS || err == -EBUSY)
425621
return err;
426622

@@ -635,8 +831,6 @@ static int ahash_prepare_alg(struct ahash_alg *alg)
635831
base->cra_type = &crypto_ahash_type;
636832
base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
637833

638-
if (!alg->finup)
639-
alg->finup = ahash_def_finup;
640834
if (!alg->setkey)
641835
alg->setkey = ahash_nosetkey;
642836

@@ -707,5 +901,20 @@ int ahash_register_instance(struct crypto_template *tmpl,
707901
}
708902
EXPORT_SYMBOL_GPL(ahash_register_instance);
709903

904+
void ahash_request_free(struct ahash_request *req)
905+
{
906+
struct ahash_request *tmp;
907+
struct ahash_request *r2;
908+
909+
if (unlikely(!req))
910+
return;
911+
912+
list_for_each_entry_safe(r2, tmp, &req->base.list, base.list)
913+
kfree_sensitive(r2);
914+
915+
kfree_sensitive(req);
916+
}
917+
EXPORT_SYMBOL_GPL(ahash_request_free);
918+
710919
MODULE_LICENSE("GPL");
711920
MODULE_DESCRIPTION("Asynchronous cryptographic hash type");

0 commit comments

Comments
 (0)