17
17
*
18
18
*/
19
19
20
- #include <crypto/algapi.h>
21
20
#include <crypto/internal/hash.h>
22
21
#include <crypto/internal/aead.h>
22
+ #include <crypto/internal/skcipher.h>
23
23
#include <crypto/cryptd.h>
24
24
#include <crypto/crypto_wq.h>
25
25
#include <linux/atomic.h>
@@ -48,6 +48,11 @@ struct cryptd_instance_ctx {
48
48
struct cryptd_queue * queue ;
49
49
};
50
50
51
+ struct skcipherd_instance_ctx {
52
+ struct crypto_skcipher_spawn spawn ;
53
+ struct cryptd_queue * queue ;
54
+ };
55
+
51
56
struct hashd_instance_ctx {
52
57
struct crypto_shash_spawn spawn ;
53
58
struct cryptd_queue * queue ;
@@ -67,6 +72,15 @@ struct cryptd_blkcipher_request_ctx {
67
72
crypto_completion_t complete ;
68
73
};
69
74
75
+ struct cryptd_skcipher_ctx {
76
+ atomic_t refcnt ;
77
+ struct crypto_skcipher * child ;
78
+ };
79
+
80
+ struct cryptd_skcipher_request_ctx {
81
+ crypto_completion_t complete ;
82
+ };
83
+
70
84
struct cryptd_hash_ctx {
71
85
atomic_t refcnt ;
72
86
struct crypto_shash * child ;
@@ -430,6 +444,216 @@ static int cryptd_create_blkcipher(struct crypto_template *tmpl,
430
444
return err ;
431
445
}
432
446
447
+ static int cryptd_skcipher_setkey (struct crypto_skcipher * parent ,
448
+ const u8 * key , unsigned int keylen )
449
+ {
450
+ struct cryptd_skcipher_ctx * ctx = crypto_skcipher_ctx (parent );
451
+ struct crypto_skcipher * child = ctx -> child ;
452
+ int err ;
453
+
454
+ crypto_skcipher_clear_flags (child , CRYPTO_TFM_REQ_MASK );
455
+ crypto_skcipher_set_flags (child , crypto_skcipher_get_flags (parent ) &
456
+ CRYPTO_TFM_REQ_MASK );
457
+ err = crypto_skcipher_setkey (child , key , keylen );
458
+ crypto_skcipher_set_flags (parent , crypto_skcipher_get_flags (child ) &
459
+ CRYPTO_TFM_RES_MASK );
460
+ return err ;
461
+ }
462
+
463
+ static void cryptd_skcipher_complete (struct skcipher_request * req , int err )
464
+ {
465
+ struct crypto_skcipher * tfm = crypto_skcipher_reqtfm (req );
466
+ struct cryptd_skcipher_ctx * ctx = crypto_skcipher_ctx (tfm );
467
+ struct cryptd_skcipher_request_ctx * rctx = skcipher_request_ctx (req );
468
+ int refcnt = atomic_read (& ctx -> refcnt );
469
+
470
+ local_bh_disable ();
471
+ rctx -> complete (& req -> base , err );
472
+ local_bh_enable ();
473
+
474
+ if (err != - EINPROGRESS && refcnt && atomic_dec_and_test (& ctx -> refcnt ))
475
+ crypto_free_skcipher (tfm );
476
+ }
477
+
478
+ static void cryptd_skcipher_encrypt (struct crypto_async_request * base ,
479
+ int err )
480
+ {
481
+ struct skcipher_request * req = skcipher_request_cast (base );
482
+ struct cryptd_skcipher_request_ctx * rctx = skcipher_request_ctx (req );
483
+ struct crypto_skcipher * tfm = crypto_skcipher_reqtfm (req );
484
+ struct cryptd_skcipher_ctx * ctx = crypto_skcipher_ctx (tfm );
485
+ struct crypto_skcipher * child = ctx -> child ;
486
+ SKCIPHER_REQUEST_ON_STACK (subreq , child );
487
+
488
+ if (unlikely (err == - EINPROGRESS ))
489
+ goto out ;
490
+
491
+ skcipher_request_set_tfm (subreq , child );
492
+ skcipher_request_set_callback (subreq , CRYPTO_TFM_REQ_MAY_SLEEP ,
493
+ NULL , NULL );
494
+ skcipher_request_set_crypt (subreq , req -> src , req -> dst , req -> cryptlen ,
495
+ req -> iv );
496
+
497
+ err = crypto_skcipher_encrypt (subreq );
498
+ skcipher_request_zero (subreq );
499
+
500
+ req -> base .complete = rctx -> complete ;
501
+
502
+ out :
503
+ cryptd_skcipher_complete (req , err );
504
+ }
505
+
506
+ static void cryptd_skcipher_decrypt (struct crypto_async_request * base ,
507
+ int err )
508
+ {
509
+ struct skcipher_request * req = skcipher_request_cast (base );
510
+ struct cryptd_skcipher_request_ctx * rctx = skcipher_request_ctx (req );
511
+ struct crypto_skcipher * tfm = crypto_skcipher_reqtfm (req );
512
+ struct cryptd_skcipher_ctx * ctx = crypto_skcipher_ctx (tfm );
513
+ struct crypto_skcipher * child = ctx -> child ;
514
+ SKCIPHER_REQUEST_ON_STACK (subreq , child );
515
+
516
+ if (unlikely (err == - EINPROGRESS ))
517
+ goto out ;
518
+
519
+ skcipher_request_set_tfm (subreq , child );
520
+ skcipher_request_set_callback (subreq , CRYPTO_TFM_REQ_MAY_SLEEP ,
521
+ NULL , NULL );
522
+ skcipher_request_set_crypt (subreq , req -> src , req -> dst , req -> cryptlen ,
523
+ req -> iv );
524
+
525
+ err = crypto_skcipher_decrypt (subreq );
526
+ skcipher_request_zero (subreq );
527
+
528
+ req -> base .complete = rctx -> complete ;
529
+
530
+ out :
531
+ cryptd_skcipher_complete (req , err );
532
+ }
533
+
534
+ static int cryptd_skcipher_enqueue (struct skcipher_request * req ,
535
+ crypto_completion_t compl )
536
+ {
537
+ struct cryptd_skcipher_request_ctx * rctx = skcipher_request_ctx (req );
538
+ struct crypto_skcipher * tfm = crypto_skcipher_reqtfm (req );
539
+ struct cryptd_queue * queue ;
540
+
541
+ queue = cryptd_get_queue (crypto_skcipher_tfm (tfm ));
542
+ rctx -> complete = req -> base .complete ;
543
+ req -> base .complete = compl ;
544
+
545
+ return cryptd_enqueue_request (queue , & req -> base );
546
+ }
547
+
548
+ static int cryptd_skcipher_encrypt_enqueue (struct skcipher_request * req )
549
+ {
550
+ return cryptd_skcipher_enqueue (req , cryptd_skcipher_encrypt );
551
+ }
552
+
553
+ static int cryptd_skcipher_decrypt_enqueue (struct skcipher_request * req )
554
+ {
555
+ return cryptd_skcipher_enqueue (req , cryptd_skcipher_decrypt );
556
+ }
557
+
558
+ static int cryptd_skcipher_init_tfm (struct crypto_skcipher * tfm )
559
+ {
560
+ struct skcipher_instance * inst = skcipher_alg_instance (tfm );
561
+ struct skcipherd_instance_ctx * ictx = skcipher_instance_ctx (inst );
562
+ struct crypto_skcipher_spawn * spawn = & ictx -> spawn ;
563
+ struct cryptd_skcipher_ctx * ctx = crypto_skcipher_ctx (tfm );
564
+ struct crypto_skcipher * cipher ;
565
+
566
+ cipher = crypto_spawn_skcipher (spawn );
567
+ if (IS_ERR (cipher ))
568
+ return PTR_ERR (cipher );
569
+
570
+ ctx -> child = cipher ;
571
+ crypto_skcipher_set_reqsize (
572
+ tfm , sizeof (struct cryptd_skcipher_request_ctx ));
573
+ return 0 ;
574
+ }
575
+
576
+ static void cryptd_skcipher_exit_tfm (struct crypto_skcipher * tfm )
577
+ {
578
+ struct cryptd_skcipher_ctx * ctx = crypto_skcipher_ctx (tfm );
579
+
580
+ crypto_free_skcipher (ctx -> child );
581
+ }
582
+
583
+ static void cryptd_skcipher_free (struct skcipher_instance * inst )
584
+ {
585
+ struct skcipherd_instance_ctx * ctx = skcipher_instance_ctx (inst );
586
+
587
+ crypto_drop_skcipher (& ctx -> spawn );
588
+ }
589
+
590
+ static int cryptd_create_skcipher (struct crypto_template * tmpl ,
591
+ struct rtattr * * tb ,
592
+ struct cryptd_queue * queue )
593
+ {
594
+ struct skcipherd_instance_ctx * ctx ;
595
+ struct skcipher_instance * inst ;
596
+ struct skcipher_alg * alg ;
597
+ const char * name ;
598
+ u32 type ;
599
+ u32 mask ;
600
+ int err ;
601
+
602
+ type = 0 ;
603
+ mask = CRYPTO_ALG_ASYNC ;
604
+
605
+ cryptd_check_internal (tb , & type , & mask );
606
+
607
+ name = crypto_attr_alg_name (tb [1 ]);
608
+ if (IS_ERR (name ))
609
+ return PTR_ERR (name );
610
+
611
+ inst = kzalloc (sizeof (* inst ) + sizeof (* ctx ), GFP_KERNEL );
612
+ if (!inst )
613
+ return - ENOMEM ;
614
+
615
+ ctx = skcipher_instance_ctx (inst );
616
+ ctx -> queue = queue ;
617
+
618
+ crypto_set_skcipher_spawn (& ctx -> spawn , skcipher_crypto_instance (inst ));
619
+ err = crypto_grab_skcipher (& ctx -> spawn , name , type , mask );
620
+ if (err )
621
+ goto out_free_inst ;
622
+
623
+ alg = crypto_spawn_skcipher_alg (& ctx -> spawn );
624
+ err = cryptd_init_instance (skcipher_crypto_instance (inst ), & alg -> base );
625
+ if (err )
626
+ goto out_drop_skcipher ;
627
+
628
+ inst -> alg .base .cra_flags = CRYPTO_ALG_ASYNC |
629
+ (alg -> base .cra_flags & CRYPTO_ALG_INTERNAL );
630
+
631
+ inst -> alg .ivsize = crypto_skcipher_alg_ivsize (alg );
632
+ inst -> alg .chunksize = crypto_skcipher_alg_chunksize (alg );
633
+ inst -> alg .min_keysize = crypto_skcipher_alg_min_keysize (alg );
634
+ inst -> alg .max_keysize = crypto_skcipher_alg_max_keysize (alg );
635
+
636
+ inst -> alg .base .cra_ctxsize = sizeof (struct cryptd_skcipher_ctx );
637
+
638
+ inst -> alg .init = cryptd_skcipher_init_tfm ;
639
+ inst -> alg .exit = cryptd_skcipher_exit_tfm ;
640
+
641
+ inst -> alg .setkey = cryptd_skcipher_setkey ;
642
+ inst -> alg .encrypt = cryptd_skcipher_encrypt_enqueue ;
643
+ inst -> alg .decrypt = cryptd_skcipher_decrypt_enqueue ;
644
+
645
+ inst -> free = cryptd_skcipher_free ;
646
+
647
+ err = skcipher_register_instance (tmpl , inst );
648
+ if (err ) {
649
+ out_drop_skcipher :
650
+ crypto_drop_skcipher (& ctx -> spawn );
651
+ out_free_inst :
652
+ kfree (inst );
653
+ }
654
+ return err ;
655
+ }
656
+
433
657
static int cryptd_hash_init_tfm (struct crypto_tfm * tfm )
434
658
{
435
659
struct crypto_instance * inst = crypto_tfm_alg_instance (tfm );
@@ -893,7 +1117,11 @@ static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
893
1117
894
1118
switch (algt -> type & algt -> mask & CRYPTO_ALG_TYPE_MASK ) {
895
1119
case CRYPTO_ALG_TYPE_BLKCIPHER :
896
- return cryptd_create_blkcipher (tmpl , tb , & queue );
1120
+ if ((algt -> type & CRYPTO_ALG_TYPE_MASK ) ==
1121
+ CRYPTO_ALG_TYPE_BLKCIPHER )
1122
+ return cryptd_create_blkcipher (tmpl , tb , & queue );
1123
+
1124
+ return cryptd_create_skcipher (tmpl , tb , & queue );
897
1125
case CRYPTO_ALG_TYPE_DIGEST :
898
1126
return cryptd_create_hash (tmpl , tb , & queue );
899
1127
case CRYPTO_ALG_TYPE_AEAD :
@@ -983,6 +1211,58 @@ void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
983
1211
}
984
1212
EXPORT_SYMBOL_GPL (cryptd_free_ablkcipher );
985
1213
1214
+ struct cryptd_skcipher * cryptd_alloc_skcipher (const char * alg_name ,
1215
+ u32 type , u32 mask )
1216
+ {
1217
+ char cryptd_alg_name [CRYPTO_MAX_ALG_NAME ];
1218
+ struct cryptd_skcipher_ctx * ctx ;
1219
+ struct crypto_skcipher * tfm ;
1220
+
1221
+ if (snprintf (cryptd_alg_name , CRYPTO_MAX_ALG_NAME ,
1222
+ "cryptd(%s)" , alg_name ) >= CRYPTO_MAX_ALG_NAME )
1223
+ return ERR_PTR (- EINVAL );
1224
+
1225
+ tfm = crypto_alloc_skcipher (cryptd_alg_name , type , mask );
1226
+ if (IS_ERR (tfm ))
1227
+ return ERR_CAST (tfm );
1228
+
1229
+ if (tfm -> base .__crt_alg -> cra_module != THIS_MODULE ) {
1230
+ crypto_free_skcipher (tfm );
1231
+ return ERR_PTR (- EINVAL );
1232
+ }
1233
+
1234
+ ctx = crypto_skcipher_ctx (tfm );
1235
+ atomic_set (& ctx -> refcnt , 1 );
1236
+
1237
+ return container_of (tfm , struct cryptd_skcipher , base );
1238
+ }
1239
+ EXPORT_SYMBOL_GPL (cryptd_alloc_skcipher );
1240
+
1241
+ struct crypto_skcipher * cryptd_skcipher_child (struct cryptd_skcipher * tfm )
1242
+ {
1243
+ struct cryptd_skcipher_ctx * ctx = crypto_skcipher_ctx (& tfm -> base );
1244
+
1245
+ return ctx -> child ;
1246
+ }
1247
+ EXPORT_SYMBOL_GPL (cryptd_skcipher_child );
1248
+
1249
+ bool cryptd_skcipher_queued (struct cryptd_skcipher * tfm )
1250
+ {
1251
+ struct cryptd_skcipher_ctx * ctx = crypto_skcipher_ctx (& tfm -> base );
1252
+
1253
+ return atomic_read (& ctx -> refcnt ) - 1 ;
1254
+ }
1255
+ EXPORT_SYMBOL_GPL (cryptd_skcipher_queued );
1256
+
1257
+ void cryptd_free_skcipher (struct cryptd_skcipher * tfm )
1258
+ {
1259
+ struct cryptd_skcipher_ctx * ctx = crypto_skcipher_ctx (& tfm -> base );
1260
+
1261
+ if (atomic_dec_and_test (& ctx -> refcnt ))
1262
+ crypto_free_skcipher (& tfm -> base );
1263
+ }
1264
+ EXPORT_SYMBOL_GPL (cryptd_free_skcipher );
1265
+
986
1266
struct cryptd_ahash * cryptd_alloc_ahash (const char * alg_name ,
987
1267
u32 type , u32 mask )
988
1268
{
0 commit comments