@@ -84,8 +84,6 @@ static inline unsigned int blkcipher_done_slow(struct crypto_blkcipher *tfm,
84
84
static inline unsigned int blkcipher_done_fast (struct blkcipher_walk * walk ,
85
85
unsigned int n )
86
86
{
87
- n = walk -> nbytes - n ;
88
-
89
87
if (walk -> flags & BLKCIPHER_WALK_COPY ) {
90
88
blkcipher_map_dst (walk );
91
89
memcpy (walk -> dst .virt .addr , walk -> page , n );
@@ -109,13 +107,15 @@ int blkcipher_walk_done(struct blkcipher_desc *desc,
109
107
unsigned int nbytes = 0 ;
110
108
111
109
if (likely (err >= 0 )) {
112
- unsigned int bsize = crypto_blkcipher_blocksize (tfm );
113
- unsigned int n ;
110
+ unsigned int n = walk -> nbytes - err ;
114
111
115
112
if (likely (!(walk -> flags & BLKCIPHER_WALK_SLOW )))
116
- n = blkcipher_done_fast (walk , err );
117
- else
118
- n = blkcipher_done_slow (tfm , walk , bsize );
113
+ n = blkcipher_done_fast (walk , n );
114
+ else if (WARN_ON (err )) {
115
+ err = - EINVAL ;
116
+ goto err ;
117
+ } else
118
+ n = blkcipher_done_slow (tfm , walk , n );
119
119
120
120
nbytes = walk -> total - n ;
121
121
err = 0 ;
@@ -132,6 +132,7 @@ int blkcipher_walk_done(struct blkcipher_desc *desc,
132
132
return blkcipher_walk_next (desc , walk );
133
133
}
134
134
135
+ err :
135
136
if (walk -> iv != desc -> info )
136
137
memcpy (desc -> info , walk -> iv , crypto_blkcipher_ivsize (tfm ));
137
138
if (walk -> buffer != walk -> page )
@@ -225,12 +226,12 @@ static int blkcipher_walk_next(struct blkcipher_desc *desc,
225
226
{
226
227
struct crypto_blkcipher * tfm = desc -> tfm ;
227
228
unsigned int alignmask = crypto_blkcipher_alignmask (tfm );
228
- unsigned int bsize = crypto_blkcipher_blocksize ( tfm ) ;
229
+ unsigned int bsize ;
229
230
unsigned int n ;
230
231
int err ;
231
232
232
233
n = walk -> total ;
233
- if (unlikely (n < bsize )) {
234
+ if (unlikely (n < crypto_blkcipher_blocksize ( tfm ) )) {
234
235
desc -> flags |= CRYPTO_TFM_RES_BAD_BLOCK_LEN ;
235
236
return blkcipher_walk_done (desc , walk , - EINVAL );
236
237
}
@@ -247,6 +248,7 @@ static int blkcipher_walk_next(struct blkcipher_desc *desc,
247
248
}
248
249
}
249
250
251
+ bsize = min (walk -> blocksize , n );
250
252
n = scatterwalk_clamp (& walk -> in , n );
251
253
n = scatterwalk_clamp (& walk -> out , n );
252
254
@@ -277,7 +279,7 @@ static inline int blkcipher_copy_iv(struct blkcipher_walk *walk,
277
279
struct crypto_blkcipher * tfm ,
278
280
unsigned int alignmask )
279
281
{
280
- unsigned bs = crypto_blkcipher_blocksize ( tfm ) ;
282
+ unsigned bs = walk -> blocksize ;
281
283
unsigned int ivsize = crypto_blkcipher_ivsize (tfm );
282
284
unsigned aligned_bs = ALIGN (bs , alignmask + 1 );
283
285
unsigned int size = aligned_bs * 2 + ivsize + max (aligned_bs , ivsize ) -
@@ -302,6 +304,7 @@ int blkcipher_walk_virt(struct blkcipher_desc *desc,
302
304
struct blkcipher_walk * walk )
303
305
{
304
306
walk -> flags &= ~BLKCIPHER_WALK_PHYS ;
307
+ walk -> blocksize = crypto_blkcipher_blocksize (desc -> tfm );
305
308
return blkcipher_walk_first (desc , walk );
306
309
}
307
310
EXPORT_SYMBOL_GPL (blkcipher_walk_virt );
@@ -310,6 +313,7 @@ int blkcipher_walk_phys(struct blkcipher_desc *desc,
310
313
struct blkcipher_walk * walk )
311
314
{
312
315
walk -> flags |= BLKCIPHER_WALK_PHYS ;
316
+ walk -> blocksize = crypto_blkcipher_blocksize (desc -> tfm );
313
317
return blkcipher_walk_first (desc , walk );
314
318
}
315
319
EXPORT_SYMBOL_GPL (blkcipher_walk_phys );
@@ -342,6 +346,16 @@ static int blkcipher_walk_first(struct blkcipher_desc *desc,
342
346
return blkcipher_walk_next (desc , walk );
343
347
}
344
348
349
+ int blkcipher_walk_virt_block (struct blkcipher_desc * desc ,
350
+ struct blkcipher_walk * walk ,
351
+ unsigned int blocksize )
352
+ {
353
+ walk -> flags &= ~BLKCIPHER_WALK_PHYS ;
354
+ walk -> blocksize = blocksize ;
355
+ return blkcipher_walk_first (desc , walk );
356
+ }
357
+ EXPORT_SYMBOL_GPL (blkcipher_walk_virt_block );
358
+
345
359
static int setkey_unaligned (struct crypto_tfm * tfm , const u8 * key ,
346
360
unsigned int keylen )
347
361
{
0 commit comments