11
11
#include <asm/neon.h>
12
12
#include <asm/unaligned.h>
13
13
#include <crypto/aes.h>
14
- #include <crypto/algapi.h>
15
14
#include <crypto/scatterwalk.h>
16
15
#include <crypto/internal/aead.h>
16
+ #include <crypto/internal/skcipher.h>
17
17
#include <linux/module.h>
18
18
19
19
#include "aes-ce-setkey.h"
@@ -149,12 +149,7 @@ static int ccm_encrypt(struct aead_request *req)
149
149
{
150
150
struct crypto_aead * aead = crypto_aead_reqtfm (req );
151
151
struct crypto_aes_ctx * ctx = crypto_aead_ctx (aead );
152
- struct blkcipher_desc desc = { .info = req -> iv };
153
- struct blkcipher_walk walk ;
154
- struct scatterlist srcbuf [2 ];
155
- struct scatterlist dstbuf [2 ];
156
- struct scatterlist * src ;
157
- struct scatterlist * dst ;
152
+ struct skcipher_walk walk ;
158
153
u8 __aligned (8 ) mac [AES_BLOCK_SIZE ];
159
154
u8 buf [AES_BLOCK_SIZE ];
160
155
u32 len = req -> cryptlen ;
@@ -172,27 +167,19 @@ static int ccm_encrypt(struct aead_request *req)
172
167
/* preserve the original iv for the final round */
173
168
memcpy (buf , req -> iv , AES_BLOCK_SIZE );
174
169
175
- src = scatterwalk_ffwd (srcbuf , req -> src , req -> assoclen );
176
- dst = src ;
177
- if (req -> src != req -> dst )
178
- dst = scatterwalk_ffwd (dstbuf , req -> dst , req -> assoclen );
179
-
180
- blkcipher_walk_init (& walk , dst , src , len );
181
- err = blkcipher_aead_walk_virt_block (& desc , & walk , aead ,
182
- AES_BLOCK_SIZE );
170
+ err = skcipher_walk_aead (& walk , req , true);
183
171
184
172
while (walk .nbytes ) {
185
173
u32 tail = walk .nbytes % AES_BLOCK_SIZE ;
186
174
187
- if (walk .nbytes == len )
175
+ if (walk .nbytes == walk . total )
188
176
tail = 0 ;
189
177
190
178
ce_aes_ccm_encrypt (walk .dst .virt .addr , walk .src .virt .addr ,
191
179
walk .nbytes - tail , ctx -> key_enc ,
192
180
num_rounds (ctx ), mac , walk .iv );
193
181
194
- len -= walk .nbytes - tail ;
195
- err = blkcipher_walk_done (& desc , & walk , tail );
182
+ err = skcipher_walk_done (& walk , tail );
196
183
}
197
184
if (!err )
198
185
ce_aes_ccm_final (mac , buf , ctx -> key_enc , num_rounds (ctx ));
@@ -203,7 +190,7 @@ static int ccm_encrypt(struct aead_request *req)
203
190
return err ;
204
191
205
192
/* copy authtag to end of dst */
206
- scatterwalk_map_and_copy (mac , dst , req -> cryptlen ,
193
+ scatterwalk_map_and_copy (mac , req -> dst , req -> assoclen + req -> cryptlen ,
207
194
crypto_aead_authsize (aead ), 1 );
208
195
209
196
return 0 ;
@@ -214,12 +201,7 @@ static int ccm_decrypt(struct aead_request *req)
214
201
struct crypto_aead * aead = crypto_aead_reqtfm (req );
215
202
struct crypto_aes_ctx * ctx = crypto_aead_ctx (aead );
216
203
unsigned int authsize = crypto_aead_authsize (aead );
217
- struct blkcipher_desc desc = { .info = req -> iv };
218
- struct blkcipher_walk walk ;
219
- struct scatterlist srcbuf [2 ];
220
- struct scatterlist dstbuf [2 ];
221
- struct scatterlist * src ;
222
- struct scatterlist * dst ;
204
+ struct skcipher_walk walk ;
223
205
u8 __aligned (8 ) mac [AES_BLOCK_SIZE ];
224
206
u8 buf [AES_BLOCK_SIZE ];
225
207
u32 len = req -> cryptlen - authsize ;
@@ -237,27 +219,19 @@ static int ccm_decrypt(struct aead_request *req)
237
219
/* preserve the original iv for the final round */
238
220
memcpy (buf , req -> iv , AES_BLOCK_SIZE );
239
221
240
- src = scatterwalk_ffwd (srcbuf , req -> src , req -> assoclen );
241
- dst = src ;
242
- if (req -> src != req -> dst )
243
- dst = scatterwalk_ffwd (dstbuf , req -> dst , req -> assoclen );
244
-
245
- blkcipher_walk_init (& walk , dst , src , len );
246
- err = blkcipher_aead_walk_virt_block (& desc , & walk , aead ,
247
- AES_BLOCK_SIZE );
222
+ err = skcipher_walk_aead (& walk , req , true);
248
223
249
224
while (walk .nbytes ) {
250
225
u32 tail = walk .nbytes % AES_BLOCK_SIZE ;
251
226
252
- if (walk .nbytes == len )
227
+ if (walk .nbytes == walk . total )
253
228
tail = 0 ;
254
229
255
230
ce_aes_ccm_decrypt (walk .dst .virt .addr , walk .src .virt .addr ,
256
231
walk .nbytes - tail , ctx -> key_enc ,
257
232
num_rounds (ctx ), mac , walk .iv );
258
233
259
- len -= walk .nbytes - tail ;
260
- err = blkcipher_walk_done (& desc , & walk , tail );
234
+ err = skcipher_walk_done (& walk , tail );
261
235
}
262
236
if (!err )
263
237
ce_aes_ccm_final (mac , buf , ctx -> key_enc , num_rounds (ctx ));
@@ -268,7 +242,8 @@ static int ccm_decrypt(struct aead_request *req)
268
242
return err ;
269
243
270
244
/* compare calculated auth tag with the stored one */
271
- scatterwalk_map_and_copy (buf , src , req -> cryptlen - authsize ,
245
+ scatterwalk_map_and_copy (buf , req -> src ,
246
+ req -> assoclen + req -> cryptlen - authsize ,
272
247
authsize , 0 );
273
248
274
249
if (crypto_memneq (mac , buf , authsize ))
@@ -287,6 +262,7 @@ static struct aead_alg ccm_aes_alg = {
287
262
.cra_module = THIS_MODULE ,
288
263
},
289
264
.ivsize = AES_BLOCK_SIZE ,
265
+ .chunksize = AES_BLOCK_SIZE ,
290
266
.maxauthsize = AES_BLOCK_SIZE ,
291
267
.setkey = ccm_setkey ,
292
268
.setauthsize = ccm_setauthsize ,
0 commit comments