@@ -209,6 +209,56 @@ let incrementalPatterns: [[Int]] = [
209
209
[ 7 , 9 ] ,
210
210
]
211
211
212
+ func loadUnalignedUInt64LE(
213
+ from p: UnsafeRawPointer
214
+ ) -> UInt64 {
215
+ return
216
+ UInt64 ( p. load ( fromByteOffset: 0 , as: UInt8 . self) ) |
217
+ ( UInt64 ( p. load ( fromByteOffset: 1 , as: UInt8 . self) ) << 8 ) |
218
+ ( UInt64 ( p. load ( fromByteOffset: 2 , as: UInt8 . self) ) << 16 ) |
219
+ ( UInt64 ( p. load ( fromByteOffset: 3 , as: UInt8 . self) ) << 24 ) |
220
+ ( UInt64 ( p. load ( fromByteOffset: 4 , as: UInt8 . self) ) << 32 ) |
221
+ ( UInt64 ( p. load ( fromByteOffset: 5 , as: UInt8 . self) ) << 40 ) |
222
+ ( UInt64 ( p. load ( fromByteOffset: 6 , as: UInt8 . self) ) << 48 ) |
223
+ ( UInt64 ( p. load ( fromByteOffset: 7 , as: UInt8 . self) ) << 56 )
224
+ }
225
+
226
+ func loadUnalignedUInt32LE(
227
+ from p: UnsafeRawPointer
228
+ ) -> UInt32 {
229
+ return
230
+ UInt32 ( p. load ( fromByteOffset: 0 , as: UInt8 . self) ) |
231
+ ( UInt32 ( p. load ( fromByteOffset: 1 , as: UInt8 . self) ) << 8 ) |
232
+ ( UInt32 ( p. load ( fromByteOffset: 2 , as: UInt8 . self) ) << 16 ) |
233
+ ( UInt32 ( p. load ( fromByteOffset: 3 , as: UInt8 . self) ) << 24 )
234
+ }
235
+
236
+ func loadUnalignedUIntLE(
237
+ from p: UnsafeRawPointer
238
+ ) -> UInt {
239
+ #if arch(i386) || arch(arm)
240
+ return UInt ( loadUnalignedUInt32LE ( from: p) )
241
+ #elseif arch(x86_64) || arch(arm64) || arch(powerpc64) || arch(powerpc64le) || arch(s390x)
242
+ return UInt ( loadUnalignedUInt64LE ( from: p) )
243
+ #endif
244
+ }
245
+
246
+ % for data_type in [ 'Int', 'Int64 ', 'Int32 '] :
247
+ func loadUnaligned${ data_type} LE(
248
+ from p: UnsafeRawPointer
249
+ ) -> ${ data_type} {
250
+ return ${ data_type} ( bitPattern: loadUnalignedU ${ data_type} LE( from: p) )
251
+ }
252
+ % end
253
+
254
+ % for data_type in [ 'UInt', 'Int', 'UInt64 ', 'Int64 ', 'UInt32 ', 'Int32 '] :
255
+ func loadUnaligned${ data_type} (
256
+ from p: UnsafeRawPointer
257
+ ) - > ${ data_type} {
258
+ return ${ data_type} ( littleEndian: loadUnaligned ${ data_type} LE( from: p) )
259
+ }
260
+ % end
261
+
212
262
% for ( Self, tests) in [
213
263
% ( '_SipHash13 Context', 'sipHash13 Tests') ,
214
264
% ( '_SipHash24 Context', 'sipHash24 Tests')
@@ -224,7 +274,7 @@ SipHashTests.test("${Self}/Oneshot").forEach(in: ${tests}) {
224
274
key: test. key) )
225
275
}
226
276
227
- SipHashTests. test ( " ${Self}/Incremental " )
277
+ SipHashTests. test ( " ${Self}.append(UnsafeRawPointer) " )
228
278
. forEach ( in: cartesianProduct ( ${ tests} , incrementalPatterns) ) {
229
279
test_ in
230
280
let ( test, pattern) = test_
@@ -253,7 +303,34 @@ SipHashTests.test("${Self}/Incremental")
253
303
context. finalizeAndReturnHash ( ) )
254
304
}
255
305
256
- SipHashTests. test ( " ${Self}/Incremental/AppendAfterFinalizing " ) {
306
+ % for data_type in [ 'UInt', 'Int', 'UInt64 ', 'Int64 ', 'UInt32 ', 'Int32 '] :
307
+ SipHashTests. test ( " ${Self}.append(${data_type}) " ) . forEach ( in: ${ tests} ) {
308
+ test in
309
+
310
+ var context = ${ Self} ( key: test. key)
311
+
312
+ let chunkSize = MemoryLayout < ${ data_type} > . size
313
+
314
+ var startIndex = 0
315
+ let endIndex = test. input. count - ( test. input. count % chunkSize)
316
+ while startIndex != endIndex {
317
+ context. append (
318
+ loadUnaligned ${ data_type} (
319
+ from: Array (
320
+ test. input [ startIndex..< ( startIndex+ chunkSize) ] ) ) )
321
+ startIndex += chunkSize
322
+ }
323
+ context. append (
324
+ Array ( test. input. suffix ( from: endIndex) ) ,
325
+ byteCount: test. input. count - endIndex)
326
+
327
+ expectEqual (
328
+ test. output,
329
+ context. finalizeAndReturnHash ( ) )
330
+ }
331
+ % end
332
+
333
+ SipHashTests. test( " ${Self}/AppendAfterFinalizing " ) {
257
334
var context = ${ Self} ( key: ( 0 , 0 ) )
258
335
_ = context. finalizeAndReturnHash ( )
259
336
expectCrashLater ( )
0 commit comments