]> git.proxmox.com Git - ceph.git/blob - ceph/src/crypto/isa-l/isa-l_crypto/sha512_mb/sha512_mb_x4_avx2.asm
0058f33a6c3384ad6bfae967c8584238b7e6c895
[ceph.git] / ceph / src / crypto / isa-l / isa-l_crypto / sha512_mb / sha512_mb_x4_avx2.asm
1 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
2 ; Copyright(c) 2011-2016 Intel Corporation All rights reserved.
3 ;
4 ; Redistribution and use in source and binary forms, with or without
5 ; modification, are permitted provided that the following conditions
6 ; are met:
7 ; * Redistributions of source code must retain the above copyright
8 ; notice, this list of conditions and the following disclaimer.
9 ; * Redistributions in binary form must reproduce the above copyright
10 ; notice, this list of conditions and the following disclaimer in
11 ; the documentation and/or other materials provided with the
12 ; distribution.
13 ; * Neither the name of Intel Corporation nor the names of its
14 ; contributors may be used to endorse or promote products derived
15 ; from this software without specific prior written permission.
16 ;
17 ; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 ; "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 ; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 ; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 ; OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 ; SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 ; LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 ; DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 ; THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 ; (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 ; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
29
30 %include "sha512_mb_mgr_datastruct.asm"
31 %include "reg_sizes.asm"
32
33 [bits 64]
34 default rel
35 section .text
36
37 ;; code to compute quad SHA512 using AVX2
38 ;; use YMMs to tackle the larger digest size
39 ;; outer calling routine takes care of save and restore of XMM registers
40 ;; Logic designed/laid out by JDG
41
42 ;; Function clobbers: rax, rcx, rdx, rbx, rsi, rdi, r9-r15; ymm0-15
43 ;; Stack must be aligned to 32 bytes before call
44 ;; Windows clobbers: rax rbx rdx r8 r9 r10 r11 r12
45 ;; Windows preserves: rcx rsi rdi rbp r13 r14 r15
46 ;;
47 ;; Linux clobbers: rax rbx rcx rsi r8 r9 r10 r11 r12
48 ;; Linux preserves: rcx rdx rdi rbp r13 r14 r15
49 ;;
50 ;; clobbers ymm0-15
51
52 %define SHA512_DIGEST_WORD_SIZE 8
53 %define NUM_SHA512_DIGEST_WORDS 8
54 %define SHA512_DIGEST_ROW_SIZE 8*4
55 %define PTR_SZ 8
56 %define _data_ptr_sha512 _data_ptr
57
58 %ifidn __OUTPUT_FORMAT__, elf64
59 ; LINUX register definitions
60 %define arg1 rdi
61 %define arg2 rsi
62 %else
63 ; Windows register definitions
64 %define arg1 rcx
65 %define arg2 rdx
66 %endif
67
68 ; Common definitions
69 %define STATE arg1
70 %define INP_SIZE arg2
71
72 %define IDX rax
73 %define ROUND rbx
74 %define TBL r8
75
76 %define inp0 r9
77 %define inp1 r10
78 %define inp2 r11
79 %define inp3 r12
80
81 %define a ymm0
82 %define b ymm1
83 %define c ymm2
84 %define d ymm3
85 %define e ymm4
86 %define f ymm5
87 %define g ymm6
88 %define h ymm7
89
90 %define a0 ymm8
91 %define a1 ymm9
92 %define a2 ymm10
93
94 %define TT0 ymm14
95 %define TT1 ymm13
96 %define TT2 ymm12
97 %define TT3 ymm11
98 %define TT4 ymm10
99 %define TT5 ymm9
100
101 %define T1 ymm14
102 %define TMP ymm15
103
104 %define SZ4 4*SHA512_DIGEST_WORD_SIZE ; Size of one vector register
105 %define ROUNDS 80*SZ4
106
107 ; Define stack usage
108
109 ;; Assume stack aligned to 32 bytes before call
110 ;; Therefore FRAMESZ mod 32 must be 32-8 = 24
111 struc stack_frame
112 .data resb 16*SZ4
113 .digest resb NUM_SHA512_DIGEST_WORDS*SZ4
114 .align resb 24
115 endstruc
116
117 %define _DIGEST stack_frame.digest
118
119 %define VMOVPD vmovupd
120
121 ; operates on YMMs
122 ; transpose r0, r1, r2, r3, t0, t1
123 ; "transpose" data in {r0..r3} using temps {t0..t3}
124 ; Input looks like: {r0 r1 r2 r3}
125 ; r0 = {a7 a6 a5 a4 a3 a2 a1 a0}
126 ; r1 = {b7 b6 b5 b4 b3 b2 b1 b0}
127 ; r2 = {c7 c6 c5 c4 c3 c2 c1 c0}
128 ; r3 = {d7 d6 d5 d4 d3 d2 d1 d0}
129 ;
130 ; output looks like: {t0 r1 r0 r3}
131 ; t0 = {d1 d0 c1 c0 b1 b0 a1 a0}
132 ; r1 = {d3 d2 c3 c2 b3 b2 a3 a2}
133 ; r0 = {d5 d4 c5 c4 b5 b4 a5 a4}
134 ; r3 = {d7 d6 c7 c6 b7 b6 a7 a6}
135 ;
136 %macro TRANSPOSE 6
137 %define %%r0 %1
138 %define %%r1 %2
139 %define %%r2 %3
140 %define %%r3 %4
141 %define %%t0 %5
142 %define %%t1 %6
143 ; vshufps does not cross the mid-way boundary and hence is cheaper
144 vshufps %%t0, %%r0, %%r1, 0x44 ; t0 = {b5 b4 a5 a4 b1 b0 a1 a0}
145 vshufps %%r0, %%r0, %%r1, 0xEE ; r0 = {b7 b6 a7 a6 b3 b2 a3 a2}
146
147 vshufps %%t1, %%r2, %%r3, 0x44 ; t1 = {d5 d4 c5 c4 d1 d0 c1 c0}
148 vshufps %%r2, %%r2, %%r3, 0xEE ; r2 = {d7 d6 c7 c6 d3 d2 c3 c2}
149
150 vperm2f128 %%r1, %%r0, %%r2, 0x20; r1 = {d3 d2 c3 c2 b3 b2 a3 a2}
151
152 vperm2f128 %%r3, %%r0, %%r2, 0x31; r3 = {d7 d6 c7 c6 b7 b6 a7 a6}
153
154 vperm2f128 %%r0, %%t0, %%t1, 0x31; r0 = {d5 d4 c5 c4 b5 b4 a5 a4}
155
156 ; now ok to clobber t0
157 vperm2f128 %%t0, %%t0, %%t1, 0x20; t0 = {d1 d0 c1 c0 b1 b0 a1 a0}
158
159 %endmacro
160
161
162 %macro ROTATE_ARGS 0
163 %xdefine TMP_ h
164 %xdefine h g
165 %xdefine g f
166 %xdefine f e
167 %xdefine e d
168 %xdefine d c
169 %xdefine c b
170 %xdefine b a
171 %xdefine a TMP_
172 %endm
173
174 ; PRORQ reg, imm, tmp
175 ; packed-rotate-right-double
176 ; does a rotate by doing two shifts and an or
177 %macro PRORQ 3
178 %define %%reg %1
179 %define %%imm %2
180 %define %%tmp %3
181 vpsllq %%tmp, %%reg, (64-(%%imm))
182 vpsrlq %%reg, %%reg, %%imm
183 vpor %%reg, %%reg, %%tmp
184 %endmacro
185
186 ; non-destructive
187 ; PRORQ_nd reg, imm, tmp, src
188 %macro PRORQ_nd 4
189 %define %%reg %1
190 %define %%imm %2
191 %define %%tmp %3
192 %define %%src %4
193 vpsllq %%tmp, %%src, (64-(%%imm))
194 vpsrlq %%reg, %%src, %%imm
195 vpor %%reg, %%reg, %%tmp
196 %endmacro
197
198 ; PRORQ dst/src, amt
199 %macro PRORQ 2
200 PRORQ %1, %2, TMP
201 %endmacro
202
203 ; PRORQ_nd dst, src, amt
204 %macro PRORQ_nd 3
205 PRORQ_nd %1, %3, TMP, %2
206 %endmacro
207
208
209
210 ;; arguments passed implicitly in preprocessor symbols i, a...h
211 %macro ROUND_00_15 2
212 %define %%T1 %1
213 %define %%i %2
214 PRORQ_nd a0, e, (18-14) ; sig1: a0 = (e >> 4)
215
216 vpxor a2, f, g ; ch: a2 = f^g
217 vpand a2, a2, e ; ch: a2 = (f^g)&e
218 vpxor a2, a2, g ; a2 = ch
219
220 PRORQ_nd a1, e, 41 ; sig1: a1 = (e >> 41)
221 vmovdqa [SZ4*(%%i&0xf) + rsp],%%T1
222 vpaddq %%T1,%%T1,[TBL + ROUND] ; T1 = W + K
223 vpxor a0, a0, e ; sig1: a0 = e ^ (e >> 5)
224 PRORQ a0, 14 ; sig1: a0 = (e >> 14) ^ (e >> 18)
225 vpaddq h, h, a2 ; h = h + ch
226 PRORQ_nd a2, a, (34-28) ; sig0: a2 = (a >> 6)
227 vpaddq h, h, %%T1 ; h = h + ch + W + K
228 vpxor a0, a0, a1 ; a0 = sigma1
229 vmovdqa %%T1, a ; maj: T1 = a
230 PRORQ_nd a1, a, 39 ; sig0: a1 = (a >> 39)
231 vpxor %%T1, %%T1, c ; maj: T1 = a^c
232 add ROUND, SZ4 ; ROUND++
233 vpand %%T1, %%T1, b ; maj: T1 = (a^c)&b
234 vpaddq h, h, a0
235
236 vpaddq d, d, h
237
238 vpxor a2, a2, a ; sig0: a2 = a ^ (a >> 11)
239 PRORQ a2, 28 ; sig0: a2 = (a >> 28) ^ (a >> 34)
240 vpxor a2, a2, a1 ; a2 = sig0
241 vpand a1, a, c ; maj: a1 = a&c
242 vpor a1, a1, %%T1 ; a1 = maj
243 vpaddq h, h, a1 ; h = h + ch + W + K + maj
244 vpaddq h, h, a2 ; h = h + ch + W + K + maj + sigma0
245 ROTATE_ARGS
246
247 %endm
248
249
250 ;; arguments passed implicitly in preprocessor symbols i, a...h
251 %macro ROUND_16_XX 2
252 %define %%T1 %1
253 %define %%i %2
254 vmovdqa %%T1, [SZ4*((%%i-15)&0xf) + rsp]
255 vmovdqa a1, [SZ4*((%%i-2)&0xf) + rsp]
256 vmovdqa a0, %%T1
257 PRORQ %%T1, 8-1
258 vmovdqa a2, a1
259 PRORQ a1, 61-19
260 vpxor %%T1, %%T1, a0
261 PRORQ %%T1, 1
262 vpxor a1, a1, a2
263 PRORQ a1, 19
264 vpsrlq a0, a0, 7
265 vpxor %%T1, %%T1, a0
266 vpsrlq a2, a2, 6
267 vpxor a1, a1, a2
268 vpaddq %%T1, %%T1, [SZ4*((%%i-16)&0xf) + rsp]
269 vpaddq a1, a1, [SZ4*((%%i-7)&0xf) + rsp]
270 vpaddq %%T1, %%T1, a1
271
272 ROUND_00_15 %%T1, %%i
273
274 %endm
275
276
277 ;; void sha512_mb_x4_avx2(SHA512_MB_ARGS_X4 *STATE, const int INP_SIZE)
278 ;; arg 1 : STATE : pointer to input data
279 ;; arg 2 : INP_SIZE : size of data in blocks (assumed >= 1)
280 mk_global sha512_mb_x4_avx2, function, internal
281 align 32
282 sha512_mb_x4_avx2:
283 endbranch
284 ; general registers preserved in outer calling routine
285 ; outer calling routine saves all the XMM registers
286
287 sub rsp, stack_frame_size
288
289 ;; Load the pre-transposed incoming digest.
290 vmovdqu a, [STATE+ 0*SHA512_DIGEST_ROW_SIZE]
291 vmovdqu b, [STATE+ 1*SHA512_DIGEST_ROW_SIZE]
292 vmovdqu c, [STATE+ 2*SHA512_DIGEST_ROW_SIZE]
293 vmovdqu d, [STATE+ 3*SHA512_DIGEST_ROW_SIZE]
294 vmovdqu e, [STATE+ 4*SHA512_DIGEST_ROW_SIZE]
295 vmovdqu f, [STATE+ 5*SHA512_DIGEST_ROW_SIZE]
296 vmovdqu g, [STATE+ 6*SHA512_DIGEST_ROW_SIZE]
297 vmovdqu h, [STATE+ 7*SHA512_DIGEST_ROW_SIZE]
298
299
300 lea TBL,[K512_4_MB]
301
302 ;; load the address of each of the MAX_LANES (4) message lanes
303 ;; getting ready to transpose input onto stack
304 mov inp0,[STATE + _data_ptr_sha512 + 0*PTR_SZ]
305 mov inp1,[STATE + _data_ptr_sha512 + 1*PTR_SZ]
306 mov inp2,[STATE + _data_ptr_sha512 + 2*PTR_SZ]
307 mov inp3,[STATE + _data_ptr_sha512 + 3*PTR_SZ]
308
309 xor IDX, IDX
310 lloop:
311 xor ROUND, ROUND
312
313 ;; save old digest
314 vmovdqa [rsp + _DIGEST + 0*SZ4], a
315 vmovdqa [rsp + _DIGEST + 1*SZ4], b
316 vmovdqa [rsp + _DIGEST + 2*SZ4], c
317 vmovdqa [rsp + _DIGEST + 3*SZ4], d
318 vmovdqa [rsp + _DIGEST + 4*SZ4], e
319 vmovdqa [rsp + _DIGEST + 5*SZ4], f
320 vmovdqa [rsp + _DIGEST + 6*SZ4], g
321 vmovdqa [rsp + _DIGEST + 7*SZ4], h
322
323 %assign i 0
324 %rep 4
325 ;; load up the shuffler for little-endian to big-endian format
326 vmovdqa TMP, [PSHUFFLE_BYTE_FLIP_MASK]
327 VMOVPD TT2,[inp0+IDX+i*32]
328 VMOVPD TT1,[inp1+IDX+i*32]
329 VMOVPD TT4,[inp2+IDX+i*32]
330 VMOVPD TT3,[inp3+IDX+i*32]
331 TRANSPOSE TT2, TT1, TT4, TT3, TT0, TT5
332 vpshufb TT0, TT0, TMP
333 vpshufb TT1, TT1, TMP
334 vpshufb TT2, TT2, TMP
335 vpshufb TT3, TT3, TMP
336 ROUND_00_15 TT0,(i*4+0)
337 ROUND_00_15 TT1,(i*4+1)
338 ROUND_00_15 TT2,(i*4+2)
339 ROUND_00_15 TT3,(i*4+3)
340 %assign i (i+1)
341 %endrep
342 ;; Increment IDX by message block size == 8 (loop) * 16 (XMM width in bytes)
343 add IDX, 4 * 32
344
345 %assign i (i*4)
346
347 jmp Lrounds_16_xx
348 align 16
349 Lrounds_16_xx:
350 %rep 16
351 ROUND_16_XX T1, i
352 %assign i (i+1)
353 %endrep
354
355 cmp ROUND,ROUNDS
356 jb Lrounds_16_xx
357
358 ;; add old digest
359 vpaddq a, a, [rsp + _DIGEST + 0*SZ4]
360 vpaddq b, b, [rsp + _DIGEST + 1*SZ4]
361 vpaddq c, c, [rsp + _DIGEST + 2*SZ4]
362 vpaddq d, d, [rsp + _DIGEST + 3*SZ4]
363 vpaddq e, e, [rsp + _DIGEST + 4*SZ4]
364 vpaddq f, f, [rsp + _DIGEST + 5*SZ4]
365 vpaddq g, g, [rsp + _DIGEST + 6*SZ4]
366 vpaddq h, h, [rsp + _DIGEST + 7*SZ4]
367
368 sub INP_SIZE, 1 ;; consumed one message block
369 jne lloop
370
371 ; write back to memory (state object) the transposed digest
372 vmovdqu [STATE+ 0*SHA512_DIGEST_ROW_SIZE ],a
373 vmovdqu [STATE+ 1*SHA512_DIGEST_ROW_SIZE ],b
374 vmovdqu [STATE+ 2*SHA512_DIGEST_ROW_SIZE ],c
375 vmovdqu [STATE+ 3*SHA512_DIGEST_ROW_SIZE ],d
376 vmovdqu [STATE+ 4*SHA512_DIGEST_ROW_SIZE ],e
377 vmovdqu [STATE+ 5*SHA512_DIGEST_ROW_SIZE ],f
378 vmovdqu [STATE+ 6*SHA512_DIGEST_ROW_SIZE ],g
379 vmovdqu [STATE+ 7*SHA512_DIGEST_ROW_SIZE ],h
380
381 ;; update input data pointers
382 add inp0, IDX
383 mov [STATE + _data_ptr_sha512 + 0*PTR_SZ], inp0
384 add inp1, IDX
385 mov [STATE + _data_ptr_sha512 + 1*PTR_SZ], inp1
386 add inp2, IDX
387 mov [STATE + _data_ptr_sha512 + 2*PTR_SZ], inp2
388 add inp3, IDX
389 mov [STATE + _data_ptr_sha512 + 3*PTR_SZ], inp3
390
391 ;;;;;;;;;;;;;;;;
392 ;; Postamble
393
394 add rsp, stack_frame_size
395
396 ; outer calling routine restores XMM and other GP registers
397 ret
398
399 section .data
400 align 64
401 K512_4_MB:
402 dq 0x428a2f98d728ae22, 0x428a2f98d728ae22, 0x428a2f98d728ae22, 0x428a2f98d728ae22
403 dq 0x7137449123ef65cd, 0x7137449123ef65cd, 0x7137449123ef65cd, 0x7137449123ef65cd
404 dq 0xb5c0fbcfec4d3b2f, 0xb5c0fbcfec4d3b2f, 0xb5c0fbcfec4d3b2f, 0xb5c0fbcfec4d3b2f
405 dq 0xe9b5dba58189dbbc, 0xe9b5dba58189dbbc, 0xe9b5dba58189dbbc, 0xe9b5dba58189dbbc
406 dq 0x3956c25bf348b538, 0x3956c25bf348b538, 0x3956c25bf348b538, 0x3956c25bf348b538
407 dq 0x59f111f1b605d019, 0x59f111f1b605d019, 0x59f111f1b605d019, 0x59f111f1b605d019
408 dq 0x923f82a4af194f9b, 0x923f82a4af194f9b, 0x923f82a4af194f9b, 0x923f82a4af194f9b
409 dq 0xab1c5ed5da6d8118, 0xab1c5ed5da6d8118, 0xab1c5ed5da6d8118, 0xab1c5ed5da6d8118
410 dq 0xd807aa98a3030242, 0xd807aa98a3030242, 0xd807aa98a3030242, 0xd807aa98a3030242
411 dq 0x12835b0145706fbe, 0x12835b0145706fbe, 0x12835b0145706fbe, 0x12835b0145706fbe
412 dq 0x243185be4ee4b28c, 0x243185be4ee4b28c, 0x243185be4ee4b28c, 0x243185be4ee4b28c
413 dq 0x550c7dc3d5ffb4e2, 0x550c7dc3d5ffb4e2, 0x550c7dc3d5ffb4e2, 0x550c7dc3d5ffb4e2
414 dq 0x72be5d74f27b896f, 0x72be5d74f27b896f, 0x72be5d74f27b896f, 0x72be5d74f27b896f
415 dq 0x80deb1fe3b1696b1, 0x80deb1fe3b1696b1, 0x80deb1fe3b1696b1, 0x80deb1fe3b1696b1
416 dq 0x9bdc06a725c71235, 0x9bdc06a725c71235, 0x9bdc06a725c71235, 0x9bdc06a725c71235
417 dq 0xc19bf174cf692694, 0xc19bf174cf692694, 0xc19bf174cf692694, 0xc19bf174cf692694
418 dq 0xe49b69c19ef14ad2, 0xe49b69c19ef14ad2, 0xe49b69c19ef14ad2, 0xe49b69c19ef14ad2
419 dq 0xefbe4786384f25e3, 0xefbe4786384f25e3, 0xefbe4786384f25e3, 0xefbe4786384f25e3
420 dq 0x0fc19dc68b8cd5b5, 0x0fc19dc68b8cd5b5, 0x0fc19dc68b8cd5b5, 0x0fc19dc68b8cd5b5
421 dq 0x240ca1cc77ac9c65, 0x240ca1cc77ac9c65, 0x240ca1cc77ac9c65, 0x240ca1cc77ac9c65
422 dq 0x2de92c6f592b0275, 0x2de92c6f592b0275, 0x2de92c6f592b0275, 0x2de92c6f592b0275
423 dq 0x4a7484aa6ea6e483, 0x4a7484aa6ea6e483, 0x4a7484aa6ea6e483, 0x4a7484aa6ea6e483
424 dq 0x5cb0a9dcbd41fbd4, 0x5cb0a9dcbd41fbd4, 0x5cb0a9dcbd41fbd4, 0x5cb0a9dcbd41fbd4
425 dq 0x76f988da831153b5, 0x76f988da831153b5, 0x76f988da831153b5, 0x76f988da831153b5
426 dq 0x983e5152ee66dfab, 0x983e5152ee66dfab, 0x983e5152ee66dfab, 0x983e5152ee66dfab
427 dq 0xa831c66d2db43210, 0xa831c66d2db43210, 0xa831c66d2db43210, 0xa831c66d2db43210
428 dq 0xb00327c898fb213f, 0xb00327c898fb213f, 0xb00327c898fb213f, 0xb00327c898fb213f
429 dq 0xbf597fc7beef0ee4, 0xbf597fc7beef0ee4, 0xbf597fc7beef0ee4, 0xbf597fc7beef0ee4
430 dq 0xc6e00bf33da88fc2, 0xc6e00bf33da88fc2, 0xc6e00bf33da88fc2, 0xc6e00bf33da88fc2
431 dq 0xd5a79147930aa725, 0xd5a79147930aa725, 0xd5a79147930aa725, 0xd5a79147930aa725
432 dq 0x06ca6351e003826f, 0x06ca6351e003826f, 0x06ca6351e003826f, 0x06ca6351e003826f
433 dq 0x142929670a0e6e70, 0x142929670a0e6e70, 0x142929670a0e6e70, 0x142929670a0e6e70
434 dq 0x27b70a8546d22ffc, 0x27b70a8546d22ffc, 0x27b70a8546d22ffc, 0x27b70a8546d22ffc
435 dq 0x2e1b21385c26c926, 0x2e1b21385c26c926, 0x2e1b21385c26c926, 0x2e1b21385c26c926
436 dq 0x4d2c6dfc5ac42aed, 0x4d2c6dfc5ac42aed, 0x4d2c6dfc5ac42aed, 0x4d2c6dfc5ac42aed
437 dq 0x53380d139d95b3df, 0x53380d139d95b3df, 0x53380d139d95b3df, 0x53380d139d95b3df
438 dq 0x650a73548baf63de, 0x650a73548baf63de, 0x650a73548baf63de, 0x650a73548baf63de
439 dq 0x766a0abb3c77b2a8, 0x766a0abb3c77b2a8, 0x766a0abb3c77b2a8, 0x766a0abb3c77b2a8
440 dq 0x81c2c92e47edaee6, 0x81c2c92e47edaee6, 0x81c2c92e47edaee6, 0x81c2c92e47edaee6
441 dq 0x92722c851482353b, 0x92722c851482353b, 0x92722c851482353b, 0x92722c851482353b
442 dq 0xa2bfe8a14cf10364, 0xa2bfe8a14cf10364, 0xa2bfe8a14cf10364, 0xa2bfe8a14cf10364
443 dq 0xa81a664bbc423001, 0xa81a664bbc423001, 0xa81a664bbc423001, 0xa81a664bbc423001
444 dq 0xc24b8b70d0f89791, 0xc24b8b70d0f89791, 0xc24b8b70d0f89791, 0xc24b8b70d0f89791
445 dq 0xc76c51a30654be30, 0xc76c51a30654be30, 0xc76c51a30654be30, 0xc76c51a30654be30
446 dq 0xd192e819d6ef5218, 0xd192e819d6ef5218, 0xd192e819d6ef5218, 0xd192e819d6ef5218
447 dq 0xd69906245565a910, 0xd69906245565a910, 0xd69906245565a910, 0xd69906245565a910
448 dq 0xf40e35855771202a, 0xf40e35855771202a, 0xf40e35855771202a, 0xf40e35855771202a
449 dq 0x106aa07032bbd1b8, 0x106aa07032bbd1b8, 0x106aa07032bbd1b8, 0x106aa07032bbd1b8
450 dq 0x19a4c116b8d2d0c8, 0x19a4c116b8d2d0c8, 0x19a4c116b8d2d0c8, 0x19a4c116b8d2d0c8
451 dq 0x1e376c085141ab53, 0x1e376c085141ab53, 0x1e376c085141ab53, 0x1e376c085141ab53
452 dq 0x2748774cdf8eeb99, 0x2748774cdf8eeb99, 0x2748774cdf8eeb99, 0x2748774cdf8eeb99
453 dq 0x34b0bcb5e19b48a8, 0x34b0bcb5e19b48a8, 0x34b0bcb5e19b48a8, 0x34b0bcb5e19b48a8
454 dq 0x391c0cb3c5c95a63, 0x391c0cb3c5c95a63, 0x391c0cb3c5c95a63, 0x391c0cb3c5c95a63
455 dq 0x4ed8aa4ae3418acb, 0x4ed8aa4ae3418acb, 0x4ed8aa4ae3418acb, 0x4ed8aa4ae3418acb
456 dq 0x5b9cca4f7763e373, 0x5b9cca4f7763e373, 0x5b9cca4f7763e373, 0x5b9cca4f7763e373
457 dq 0x682e6ff3d6b2b8a3, 0x682e6ff3d6b2b8a3, 0x682e6ff3d6b2b8a3, 0x682e6ff3d6b2b8a3
458 dq 0x748f82ee5defb2fc, 0x748f82ee5defb2fc, 0x748f82ee5defb2fc, 0x748f82ee5defb2fc
459 dq 0x78a5636f43172f60, 0x78a5636f43172f60, 0x78a5636f43172f60, 0x78a5636f43172f60
460 dq 0x84c87814a1f0ab72, 0x84c87814a1f0ab72, 0x84c87814a1f0ab72, 0x84c87814a1f0ab72
461 dq 0x8cc702081a6439ec, 0x8cc702081a6439ec, 0x8cc702081a6439ec, 0x8cc702081a6439ec
462 dq 0x90befffa23631e28, 0x90befffa23631e28, 0x90befffa23631e28, 0x90befffa23631e28
463 dq 0xa4506cebde82bde9, 0xa4506cebde82bde9, 0xa4506cebde82bde9, 0xa4506cebde82bde9
464 dq 0xbef9a3f7b2c67915, 0xbef9a3f7b2c67915, 0xbef9a3f7b2c67915, 0xbef9a3f7b2c67915
465 dq 0xc67178f2e372532b, 0xc67178f2e372532b, 0xc67178f2e372532b, 0xc67178f2e372532b
466 dq 0xca273eceea26619c, 0xca273eceea26619c, 0xca273eceea26619c, 0xca273eceea26619c
467 dq 0xd186b8c721c0c207, 0xd186b8c721c0c207, 0xd186b8c721c0c207, 0xd186b8c721c0c207
468 dq 0xeada7dd6cde0eb1e, 0xeada7dd6cde0eb1e, 0xeada7dd6cde0eb1e, 0xeada7dd6cde0eb1e
469 dq 0xf57d4f7fee6ed178, 0xf57d4f7fee6ed178, 0xf57d4f7fee6ed178, 0xf57d4f7fee6ed178
470 dq 0x06f067aa72176fba, 0x06f067aa72176fba, 0x06f067aa72176fba, 0x06f067aa72176fba
471 dq 0x0a637dc5a2c898a6, 0x0a637dc5a2c898a6, 0x0a637dc5a2c898a6, 0x0a637dc5a2c898a6
472 dq 0x113f9804bef90dae, 0x113f9804bef90dae, 0x113f9804bef90dae, 0x113f9804bef90dae
473 dq 0x1b710b35131c471b, 0x1b710b35131c471b, 0x1b710b35131c471b, 0x1b710b35131c471b
474 dq 0x28db77f523047d84, 0x28db77f523047d84, 0x28db77f523047d84, 0x28db77f523047d84
475 dq 0x32caab7b40c72493, 0x32caab7b40c72493, 0x32caab7b40c72493, 0x32caab7b40c72493
476 dq 0x3c9ebe0a15c9bebc, 0x3c9ebe0a15c9bebc, 0x3c9ebe0a15c9bebc, 0x3c9ebe0a15c9bebc
477 dq 0x431d67c49c100d4c, 0x431d67c49c100d4c, 0x431d67c49c100d4c, 0x431d67c49c100d4c
478 dq 0x4cc5d4becb3e42b6, 0x4cc5d4becb3e42b6, 0x4cc5d4becb3e42b6, 0x4cc5d4becb3e42b6
479 dq 0x597f299cfc657e2a, 0x597f299cfc657e2a, 0x597f299cfc657e2a, 0x597f299cfc657e2a
480 dq 0x5fcb6fab3ad6faec, 0x5fcb6fab3ad6faec, 0x5fcb6fab3ad6faec, 0x5fcb6fab3ad6faec
481 dq 0x6c44198c4a475817, 0x6c44198c4a475817, 0x6c44198c4a475817, 0x6c44198c4a475817
482
483 align 32
484 PSHUFFLE_BYTE_FLIP_MASK: dq 0x0001020304050607, 0x08090a0b0c0d0e0f
485 dq 0x1011121314151617, 0x18191a1b1c1d1e1f
486
487