36
36
#include <linux/cryptohash.h>
37
37
#include <linux/types.h>
38
38
#include <crypto/sha.h>
39
- #include <asm/byteorder .h>
39
+ #include <crypto/sha256_base .h>
40
40
#include <asm/i387.h>
41
41
#include <asm/xcr.h>
42
42
#include <asm/xsave.h>
43
43
#include <linux/string.h>
44
44
45
- asmlinkage void sha256_transform_ssse3 (const char * data , u32 * digest ,
46
- u64 rounds );
45
+ asmlinkage void sha256_transform_ssse3 (u32 * digest , const char * data ,
46
+ u64 rounds );
47
47
#ifdef CONFIG_AS_AVX
48
- asmlinkage void sha256_transform_avx (const char * data , u32 * digest ,
48
+ asmlinkage void sha256_transform_avx (u32 * digest , const char * data ,
49
49
u64 rounds );
50
50
#endif
51
51
#ifdef CONFIG_AS_AVX2
52
- asmlinkage void sha256_transform_rorx (const char * data , u32 * digest ,
53
- u64 rounds );
52
+ asmlinkage void sha256_transform_rorx (u32 * digest , const char * data ,
53
+ u64 rounds );
54
54
#endif
55
55
56
- static asmlinkage void (* sha256_transform_asm )(const char * , u32 * , u64 );
57
-
58
-
59
- static int sha256_ssse3_init (struct shash_desc * desc )
60
- {
61
- struct sha256_state * sctx = shash_desc_ctx (desc );
62
-
63
- sctx -> state [0 ] = SHA256_H0 ;
64
- sctx -> state [1 ] = SHA256_H1 ;
65
- sctx -> state [2 ] = SHA256_H2 ;
66
- sctx -> state [3 ] = SHA256_H3 ;
67
- sctx -> state [4 ] = SHA256_H4 ;
68
- sctx -> state [5 ] = SHA256_H5 ;
69
- sctx -> state [6 ] = SHA256_H6 ;
70
- sctx -> state [7 ] = SHA256_H7 ;
71
- sctx -> count = 0 ;
72
-
73
- return 0 ;
74
- }
75
-
76
- static int __sha256_ssse3_update (struct shash_desc * desc , const u8 * data ,
77
- unsigned int len , unsigned int partial )
78
- {
79
- struct sha256_state * sctx = shash_desc_ctx (desc );
80
- unsigned int done = 0 ;
81
-
82
- sctx -> count += len ;
83
-
84
- if (partial ) {
85
- done = SHA256_BLOCK_SIZE - partial ;
86
- memcpy (sctx -> buf + partial , data , done );
87
- sha256_transform_asm (sctx -> buf , sctx -> state , 1 );
88
- }
89
-
90
- if (len - done >= SHA256_BLOCK_SIZE ) {
91
- const unsigned int rounds = (len - done ) / SHA256_BLOCK_SIZE ;
92
-
93
- sha256_transform_asm (data + done , sctx -> state , (u64 ) rounds );
94
-
95
- done += rounds * SHA256_BLOCK_SIZE ;
96
- }
97
-
98
- memcpy (sctx -> buf , data + done , len - done );
99
-
100
- return 0 ;
101
- }
56
+ static void (* sha256_transform_asm )(u32 * , const char * , u64 );
102
57
103
58
static int sha256_ssse3_update (struct shash_desc * desc , const u8 * data ,
104
59
unsigned int len )
105
60
{
106
61
struct sha256_state * sctx = shash_desc_ctx (desc );
107
- unsigned int partial = sctx -> count % SHA256_BLOCK_SIZE ;
108
- int res ;
109
62
110
- /* Handle the fast case right here */
111
- if (partial + len < SHA256_BLOCK_SIZE ) {
112
- sctx -> count += len ;
113
- memcpy (sctx -> buf + partial , data , len );
63
+ if (!irq_fpu_usable () ||
64
+ (sctx -> count % SHA256_BLOCK_SIZE ) + len < SHA256_BLOCK_SIZE )
65
+ return crypto_sha256_update (desc , data , len );
114
66
115
- return 0 ;
116
- }
117
-
118
- if (!irq_fpu_usable ()) {
119
- res = crypto_sha256_update (desc , data , len );
120
- } else {
121
- kernel_fpu_begin ();
122
- res = __sha256_ssse3_update (desc , data , len , partial );
123
- kernel_fpu_end ();
124
- }
125
-
126
- return res ;
127
- }
67
+ /* make sure casting to sha256_block_fn() is safe */
68
+ BUILD_BUG_ON (offsetof(struct sha256_state , state ) != 0 );
128
69
129
-
130
- /* Add padding and return the message digest. */
131
- static int sha256_ssse3_final (struct shash_desc * desc , u8 * out )
132
- {
133
- struct sha256_state * sctx = shash_desc_ctx (desc );
134
- unsigned int i , index , padlen ;
135
- __be32 * dst = (__be32 * )out ;
136
- __be64 bits ;
137
- static const u8 padding [SHA256_BLOCK_SIZE ] = { 0x80 , };
138
-
139
- bits = cpu_to_be64 (sctx -> count << 3 );
140
-
141
- /* Pad out to 56 mod 64 and append length */
142
- index = sctx -> count % SHA256_BLOCK_SIZE ;
143
- padlen = (index < 56 ) ? (56 - index ) : ((SHA256_BLOCK_SIZE + 56 )- index );
144
-
145
- if (!irq_fpu_usable ()) {
146
- crypto_sha256_update (desc , padding , padlen );
147
- crypto_sha256_update (desc , (const u8 * )& bits , sizeof (bits ));
148
- } else {
149
- kernel_fpu_begin ();
150
- /* We need to fill a whole block for __sha256_ssse3_update() */
151
- if (padlen <= 56 ) {
152
- sctx -> count += padlen ;
153
- memcpy (sctx -> buf + index , padding , padlen );
154
- } else {
155
- __sha256_ssse3_update (desc , padding , padlen , index );
156
- }
157
- __sha256_ssse3_update (desc , (const u8 * )& bits ,
158
- sizeof (bits ), 56 );
159
- kernel_fpu_end ();
160
- }
161
-
162
- /* Store state in digest */
163
- for (i = 0 ; i < 8 ; i ++ )
164
- dst [i ] = cpu_to_be32 (sctx -> state [i ]);
165
-
166
- /* Wipe context */
167
- memset (sctx , 0 , sizeof (* sctx ));
70
+ kernel_fpu_begin ();
71
+ sha256_base_do_update (desc , data , len ,
72
+ (sha256_block_fn * )sha256_transform_asm );
73
+ kernel_fpu_end ();
168
74
169
75
return 0 ;
170
76
}
171
77
172
- static int sha256_ssse3_export (struct shash_desc * desc , void * out )
78
+ static int sha256_ssse3_finup (struct shash_desc * desc , const u8 * data ,
79
+ unsigned int len , u8 * out )
173
80
{
174
- struct sha256_state * sctx = shash_desc_ctx (desc );
81
+ if (!irq_fpu_usable ())
82
+ return crypto_sha256_finup (desc , data , len , out );
175
83
176
- memcpy (out , sctx , sizeof (* sctx ));
84
+ kernel_fpu_begin ();
85
+ if (len )
86
+ sha256_base_do_update (desc , data , len ,
87
+ (sha256_block_fn * )sha256_transform_asm );
88
+ sha256_base_do_finalize (desc , (sha256_block_fn * )sha256_transform_asm );
89
+ kernel_fpu_end ();
177
90
178
- return 0 ;
91
+ return sha256_base_finish ( desc , out ) ;
179
92
}
180
93
181
- static int sha256_ssse3_import (struct shash_desc * desc , const void * in )
182
- {
183
- struct sha256_state * sctx = shash_desc_ctx (desc );
184
-
185
- memcpy (sctx , in , sizeof (* sctx ));
186
-
187
- return 0 ;
188
- }
189
-
190
- static int sha224_ssse3_init (struct shash_desc * desc )
191
- {
192
- struct sha256_state * sctx = shash_desc_ctx (desc );
193
-
194
- sctx -> state [0 ] = SHA224_H0 ;
195
- sctx -> state [1 ] = SHA224_H1 ;
196
- sctx -> state [2 ] = SHA224_H2 ;
197
- sctx -> state [3 ] = SHA224_H3 ;
198
- sctx -> state [4 ] = SHA224_H4 ;
199
- sctx -> state [5 ] = SHA224_H5 ;
200
- sctx -> state [6 ] = SHA224_H6 ;
201
- sctx -> state [7 ] = SHA224_H7 ;
202
- sctx -> count = 0 ;
203
-
204
- return 0 ;
205
- }
206
-
207
- static int sha224_ssse3_final (struct shash_desc * desc , u8 * hash )
94
+ /* Add padding and return the message digest. */
95
+ static int sha256_ssse3_final (struct shash_desc * desc , u8 * out )
208
96
{
209
- u8 D [SHA256_DIGEST_SIZE ];
210
-
211
- sha256_ssse3_final (desc , D );
212
-
213
- memcpy (hash , D , SHA224_DIGEST_SIZE );
214
- memzero_explicit (D , SHA256_DIGEST_SIZE );
215
-
216
- return 0 ;
97
+ return sha256_ssse3_finup (desc , NULL , 0 , out );
217
98
}
218
99
219
100
static struct shash_alg algs [] = { {
220
101
.digestsize = SHA256_DIGEST_SIZE ,
221
- .init = sha256_ssse3_init ,
102
+ .init = sha256_base_init ,
222
103
.update = sha256_ssse3_update ,
223
104
.final = sha256_ssse3_final ,
224
- .export = sha256_ssse3_export ,
225
- .import = sha256_ssse3_import ,
105
+ .finup = sha256_ssse3_finup ,
226
106
.descsize = sizeof (struct sha256_state ),
227
- .statesize = sizeof (struct sha256_state ),
228
107
.base = {
229
108
.cra_name = "sha256" ,
230
109
.cra_driver_name = "sha256-ssse3" ,
@@ -235,13 +114,11 @@ static struct shash_alg algs[] = { {
235
114
}
236
115
}, {
237
116
.digestsize = SHA224_DIGEST_SIZE ,
238
- .init = sha224_ssse3_init ,
117
+ .init = sha224_base_init ,
239
118
.update = sha256_ssse3_update ,
240
- .final = sha224_ssse3_final ,
241
- .export = sha256_ssse3_export ,
242
- .import = sha256_ssse3_import ,
119
+ .final = sha256_ssse3_final ,
120
+ .finup = sha256_ssse3_finup ,
243
121
.descsize = sizeof (struct sha256_state ),
244
- .statesize = sizeof (struct sha256_state ),
245
122
.base = {
246
123
.cra_name = "sha224" ,
247
124
.cra_driver_name = "sha224-ssse3" ,
0 commit comments