@@ -791,6 +791,127 @@ static int generic_gcmaes_set_authsize(struct crypto_aead *tfm,
791
791
return 0 ;
792
792
}
793
793
794
+ static int gcmaes_crypt_by_sg (bool enc , struct aead_request * req ,
795
+ unsigned int assoclen , u8 * hash_subkey ,
796
+ u8 * iv , void * aes_ctx )
797
+ {
798
+ struct crypto_aead * tfm = crypto_aead_reqtfm (req );
799
+ unsigned long auth_tag_len = crypto_aead_authsize (tfm );
800
+ struct gcm_context_data data AESNI_ALIGN_ATTR ;
801
+ struct scatter_walk dst_sg_walk = {};
802
+ unsigned long left = req -> cryptlen ;
803
+ unsigned long len , srclen , dstlen ;
804
+ struct scatter_walk assoc_sg_walk ;
805
+ struct scatter_walk src_sg_walk ;
806
+ struct scatterlist src_start [2 ];
807
+ struct scatterlist dst_start [2 ];
808
+ struct scatterlist * src_sg ;
809
+ struct scatterlist * dst_sg ;
810
+ u8 * src , * dst , * assoc ;
811
+ u8 * assocmem = NULL ;
812
+ u8 authTag [16 ];
813
+
814
+ if (!enc )
815
+ left -= auth_tag_len ;
816
+
817
+ /* Linearize assoc, if not already linear */
818
+ if (req -> src -> length >= assoclen && req -> src -> length &&
819
+ (!PageHighMem (sg_page (req -> src )) ||
820
+ req -> src -> offset + req -> src -> length < PAGE_SIZE )) {
821
+ scatterwalk_start (& assoc_sg_walk , req -> src );
822
+ assoc = scatterwalk_map (& assoc_sg_walk );
823
+ } else {
824
+ /* assoc can be any length, so must be on heap */
825
+ assocmem = kmalloc (assoclen , GFP_ATOMIC );
826
+ if (unlikely (!assocmem ))
827
+ return - ENOMEM ;
828
+ assoc = assocmem ;
829
+
830
+ scatterwalk_map_and_copy (assoc , req -> src , 0 , assoclen , 0 );
831
+ }
832
+
833
+ src_sg = scatterwalk_ffwd (src_start , req -> src , req -> assoclen );
834
+ scatterwalk_start (& src_sg_walk , src_sg );
835
+ if (req -> src != req -> dst ) {
836
+ dst_sg = scatterwalk_ffwd (dst_start , req -> dst , req -> assoclen );
837
+ scatterwalk_start (& dst_sg_walk , dst_sg );
838
+ }
839
+
840
+ kernel_fpu_begin ();
841
+ aesni_gcm_init (aes_ctx , & data , iv ,
842
+ hash_subkey , assoc , assoclen );
843
+ if (req -> src != req -> dst ) {
844
+ while (left ) {
845
+ src = scatterwalk_map (& src_sg_walk );
846
+ dst = scatterwalk_map (& dst_sg_walk );
847
+ srclen = scatterwalk_clamp (& src_sg_walk , left );
848
+ dstlen = scatterwalk_clamp (& dst_sg_walk , left );
849
+ len = min (srclen , dstlen );
850
+ if (len ) {
851
+ if (enc )
852
+ aesni_gcm_enc_update (aes_ctx , & data ,
853
+ dst , src , len );
854
+ else
855
+ aesni_gcm_dec_update (aes_ctx , & data ,
856
+ dst , src , len );
857
+ }
858
+ left -= len ;
859
+
860
+ scatterwalk_unmap (src );
861
+ scatterwalk_unmap (dst );
862
+ scatterwalk_advance (& src_sg_walk , len );
863
+ scatterwalk_advance (& dst_sg_walk , len );
864
+ scatterwalk_done (& src_sg_walk , 0 , left );
865
+ scatterwalk_done (& dst_sg_walk , 1 , left );
866
+ }
867
+ } else {
868
+ while (left ) {
869
+ dst = src = scatterwalk_map (& src_sg_walk );
870
+ len = scatterwalk_clamp (& src_sg_walk , left );
871
+ if (len ) {
872
+ if (enc )
873
+ aesni_gcm_enc_update (aes_ctx , & data ,
874
+ src , src , len );
875
+ else
876
+ aesni_gcm_dec_update (aes_ctx , & data ,
877
+ src , src , len );
878
+ }
879
+ left -= len ;
880
+ scatterwalk_unmap (src );
881
+ scatterwalk_advance (& src_sg_walk , len );
882
+ scatterwalk_done (& src_sg_walk , 1 , left );
883
+ }
884
+ }
885
+ aesni_gcm_finalize (aes_ctx , & data , authTag , auth_tag_len );
886
+ kernel_fpu_end ();
887
+
888
+ if (!assocmem )
889
+ scatterwalk_unmap (assoc );
890
+ else
891
+ kfree (assocmem );
892
+
893
+ if (!enc ) {
894
+ u8 authTagMsg [16 ];
895
+
896
+ /* Copy out original authTag */
897
+ scatterwalk_map_and_copy (authTagMsg , req -> src ,
898
+ req -> assoclen + req -> cryptlen -
899
+ auth_tag_len ,
900
+ auth_tag_len , 0 );
901
+
902
+ /* Compare generated tag with passed in tag. */
903
+ return crypto_memneq (authTagMsg , authTag , auth_tag_len ) ?
904
+ - EBADMSG : 0 ;
905
+ }
906
+
907
+ /* Copy in the authTag */
908
+ scatterwalk_map_and_copy (authTag , req -> dst ,
909
+ req -> assoclen + req -> cryptlen ,
910
+ auth_tag_len , 1 );
911
+
912
+ return 0 ;
913
+ }
914
+
794
915
static int gcmaes_encrypt (struct aead_request * req , unsigned int assoclen ,
795
916
u8 * hash_subkey , u8 * iv , void * aes_ctx )
796
917
{
@@ -802,6 +923,12 @@ static int gcmaes_encrypt(struct aead_request *req, unsigned int assoclen,
802
923
struct scatter_walk dst_sg_walk = {};
803
924
struct gcm_context_data data AESNI_ALIGN_ATTR ;
804
925
926
+ if (((struct crypto_aes_ctx * )aes_ctx )-> key_length != AES_KEYSIZE_128 ||
927
+ aesni_gcm_enc_tfm == aesni_gcm_enc ||
928
+ req -> cryptlen < AVX_GEN2_OPTSIZE ) {
929
+ return gcmaes_crypt_by_sg (true, req , assoclen , hash_subkey , iv ,
930
+ aes_ctx );
931
+ }
805
932
if (sg_is_last (req -> src ) &&
806
933
(!PageHighMem (sg_page (req -> src )) ||
807
934
req -> src -> offset + req -> src -> length <= PAGE_SIZE ) &&
@@ -868,6 +995,12 @@ static int gcmaes_decrypt(struct aead_request *req, unsigned int assoclen,
868
995
struct gcm_context_data data AESNI_ALIGN_ATTR ;
869
996
int retval = 0 ;
870
997
998
+ if (((struct crypto_aes_ctx * )aes_ctx )-> key_length != AES_KEYSIZE_128 ||
999
+ aesni_gcm_enc_tfm == aesni_gcm_enc ||
1000
+ req -> cryptlen < AVX_GEN2_OPTSIZE ) {
1001
+ return gcmaes_crypt_by_sg (false, req , assoclen , hash_subkey , iv ,
1002
+ aes_ctx );
1003
+ }
871
1004
tempCipherLen = (unsigned long )(req -> cryptlen - auth_tag_len );
872
1005
873
1006
if (sg_is_last (req -> src ) &&
0 commit comments