]>
Commit | Line | Data |
---|---|---|
4102950a CZ |
1 | # WARNING: do not edit!\r |
2 | # Generated from openssl/crypto/aes/asm/aesni-x86.pl\r | |
3 | #\r | |
4 | # Copyright 2009-2020 The OpenSSL Project Authors. All Rights Reserved.\r | |
5 | #\r | |
6 | # Licensed under the OpenSSL license (the "License"). You may not use\r | |
7 | # this file except in compliance with the License. You can obtain a copy\r | |
8 | # in the file LICENSE in the source distribution or at\r | |
9 | # https://www.openssl.org/source/license.html\r | |
10 | \r | |
11 | .text\r | |
12 | .globl aesni_encrypt\r | |
13 | .type aesni_encrypt,@function\r | |
14 | .align 16\r | |
15 | aesni_encrypt:\r | |
16 | .L_aesni_encrypt_begin:\r | |
17 | movl 4(%esp),%eax\r | |
18 | movl 12(%esp),%edx\r | |
19 | movups (%eax),%xmm2\r | |
20 | movl 240(%edx),%ecx\r | |
21 | movl 8(%esp),%eax\r | |
22 | movups (%edx),%xmm0\r | |
23 | movups 16(%edx),%xmm1\r | |
24 | leal 32(%edx),%edx\r | |
25 | xorps %xmm0,%xmm2\r | |
26 | .L000enc1_loop_1:\r | |
27 | .byte 102,15,56,220,209\r | |
28 | decl %ecx\r | |
29 | movups (%edx),%xmm1\r | |
30 | leal 16(%edx),%edx\r | |
31 | jnz .L000enc1_loop_1\r | |
32 | .byte 102,15,56,221,209\r | |
33 | pxor %xmm0,%xmm0\r | |
34 | pxor %xmm1,%xmm1\r | |
35 | movups %xmm2,(%eax)\r | |
36 | pxor %xmm2,%xmm2\r | |
37 | ret\r | |
38 | .size aesni_encrypt,.-.L_aesni_encrypt_begin\r | |
39 | .globl aesni_decrypt\r | |
40 | .type aesni_decrypt,@function\r | |
41 | .align 16\r | |
42 | aesni_decrypt:\r | |
43 | .L_aesni_decrypt_begin:\r | |
44 | movl 4(%esp),%eax\r | |
45 | movl 12(%esp),%edx\r | |
46 | movups (%eax),%xmm2\r | |
47 | movl 240(%edx),%ecx\r | |
48 | movl 8(%esp),%eax\r | |
49 | movups (%edx),%xmm0\r | |
50 | movups 16(%edx),%xmm1\r | |
51 | leal 32(%edx),%edx\r | |
52 | xorps %xmm0,%xmm2\r | |
53 | .L001dec1_loop_2:\r | |
54 | .byte 102,15,56,222,209\r | |
55 | decl %ecx\r | |
56 | movups (%edx),%xmm1\r | |
57 | leal 16(%edx),%edx\r | |
58 | jnz .L001dec1_loop_2\r | |
59 | .byte 102,15,56,223,209\r | |
60 | pxor %xmm0,%xmm0\r | |
61 | pxor %xmm1,%xmm1\r | |
62 | movups %xmm2,(%eax)\r | |
63 | pxor %xmm2,%xmm2\r | |
64 | ret\r | |
65 | .size aesni_decrypt,.-.L_aesni_decrypt_begin\r | |
66 | .type _aesni_encrypt2,@function\r | |
67 | .align 16\r | |
68 | _aesni_encrypt2:\r | |
69 | movups (%edx),%xmm0\r | |
70 | shll $4,%ecx\r | |
71 | movups 16(%edx),%xmm1\r | |
72 | xorps %xmm0,%xmm2\r | |
73 | pxor %xmm0,%xmm3\r | |
74 | movups 32(%edx),%xmm0\r | |
75 | leal 32(%edx,%ecx,1),%edx\r | |
76 | negl %ecx\r | |
77 | addl $16,%ecx\r | |
78 | .L002enc2_loop:\r | |
79 | .byte 102,15,56,220,209\r | |
80 | .byte 102,15,56,220,217\r | |
81 | movups (%edx,%ecx,1),%xmm1\r | |
82 | addl $32,%ecx\r | |
83 | .byte 102,15,56,220,208\r | |
84 | .byte 102,15,56,220,216\r | |
85 | movups -16(%edx,%ecx,1),%xmm0\r | |
86 | jnz .L002enc2_loop\r | |
87 | .byte 102,15,56,220,209\r | |
88 | .byte 102,15,56,220,217\r | |
89 | .byte 102,15,56,221,208\r | |
90 | .byte 102,15,56,221,216\r | |
91 | ret\r | |
92 | .size _aesni_encrypt2,.-_aesni_encrypt2\r | |
93 | .type _aesni_decrypt2,@function\r | |
94 | .align 16\r | |
95 | _aesni_decrypt2:\r | |
96 | movups (%edx),%xmm0\r | |
97 | shll $4,%ecx\r | |
98 | movups 16(%edx),%xmm1\r | |
99 | xorps %xmm0,%xmm2\r | |
100 | pxor %xmm0,%xmm3\r | |
101 | movups 32(%edx),%xmm0\r | |
102 | leal 32(%edx,%ecx,1),%edx\r | |
103 | negl %ecx\r | |
104 | addl $16,%ecx\r | |
105 | .L003dec2_loop:\r | |
106 | .byte 102,15,56,222,209\r | |
107 | .byte 102,15,56,222,217\r | |
108 | movups (%edx,%ecx,1),%xmm1\r | |
109 | addl $32,%ecx\r | |
110 | .byte 102,15,56,222,208\r | |
111 | .byte 102,15,56,222,216\r | |
112 | movups -16(%edx,%ecx,1),%xmm0\r | |
113 | jnz .L003dec2_loop\r | |
114 | .byte 102,15,56,222,209\r | |
115 | .byte 102,15,56,222,217\r | |
116 | .byte 102,15,56,223,208\r | |
117 | .byte 102,15,56,223,216\r | |
118 | ret\r | |
119 | .size _aesni_decrypt2,.-_aesni_decrypt2\r | |
120 | .type _aesni_encrypt3,@function\r | |
121 | .align 16\r | |
122 | _aesni_encrypt3:\r | |
123 | movups (%edx),%xmm0\r | |
124 | shll $4,%ecx\r | |
125 | movups 16(%edx),%xmm1\r | |
126 | xorps %xmm0,%xmm2\r | |
127 | pxor %xmm0,%xmm3\r | |
128 | pxor %xmm0,%xmm4\r | |
129 | movups 32(%edx),%xmm0\r | |
130 | leal 32(%edx,%ecx,1),%edx\r | |
131 | negl %ecx\r | |
132 | addl $16,%ecx\r | |
133 | .L004enc3_loop:\r | |
134 | .byte 102,15,56,220,209\r | |
135 | .byte 102,15,56,220,217\r | |
136 | .byte 102,15,56,220,225\r | |
137 | movups (%edx,%ecx,1),%xmm1\r | |
138 | addl $32,%ecx\r | |
139 | .byte 102,15,56,220,208\r | |
140 | .byte 102,15,56,220,216\r | |
141 | .byte 102,15,56,220,224\r | |
142 | movups -16(%edx,%ecx,1),%xmm0\r | |
143 | jnz .L004enc3_loop\r | |
144 | .byte 102,15,56,220,209\r | |
145 | .byte 102,15,56,220,217\r | |
146 | .byte 102,15,56,220,225\r | |
147 | .byte 102,15,56,221,208\r | |
148 | .byte 102,15,56,221,216\r | |
149 | .byte 102,15,56,221,224\r | |
150 | ret\r | |
151 | .size _aesni_encrypt3,.-_aesni_encrypt3\r | |
152 | .type _aesni_decrypt3,@function\r | |
153 | .align 16\r | |
154 | _aesni_decrypt3:\r | |
155 | movups (%edx),%xmm0\r | |
156 | shll $4,%ecx\r | |
157 | movups 16(%edx),%xmm1\r | |
158 | xorps %xmm0,%xmm2\r | |
159 | pxor %xmm0,%xmm3\r | |
160 | pxor %xmm0,%xmm4\r | |
161 | movups 32(%edx),%xmm0\r | |
162 | leal 32(%edx,%ecx,1),%edx\r | |
163 | negl %ecx\r | |
164 | addl $16,%ecx\r | |
165 | .L005dec3_loop:\r | |
166 | .byte 102,15,56,222,209\r | |
167 | .byte 102,15,56,222,217\r | |
168 | .byte 102,15,56,222,225\r | |
169 | movups (%edx,%ecx,1),%xmm1\r | |
170 | addl $32,%ecx\r | |
171 | .byte 102,15,56,222,208\r | |
172 | .byte 102,15,56,222,216\r | |
173 | .byte 102,15,56,222,224\r | |
174 | movups -16(%edx,%ecx,1),%xmm0\r | |
175 | jnz .L005dec3_loop\r | |
176 | .byte 102,15,56,222,209\r | |
177 | .byte 102,15,56,222,217\r | |
178 | .byte 102,15,56,222,225\r | |
179 | .byte 102,15,56,223,208\r | |
180 | .byte 102,15,56,223,216\r | |
181 | .byte 102,15,56,223,224\r | |
182 | ret\r | |
183 | .size _aesni_decrypt3,.-_aesni_decrypt3\r | |
184 | .type _aesni_encrypt4,@function\r | |
185 | .align 16\r | |
186 | _aesni_encrypt4:\r | |
187 | movups (%edx),%xmm0\r | |
188 | movups 16(%edx),%xmm1\r | |
189 | shll $4,%ecx\r | |
190 | xorps %xmm0,%xmm2\r | |
191 | pxor %xmm0,%xmm3\r | |
192 | pxor %xmm0,%xmm4\r | |
193 | pxor %xmm0,%xmm5\r | |
194 | movups 32(%edx),%xmm0\r | |
195 | leal 32(%edx,%ecx,1),%edx\r | |
196 | negl %ecx\r | |
197 | .byte 15,31,64,0\r | |
198 | addl $16,%ecx\r | |
199 | .L006enc4_loop:\r | |
200 | .byte 102,15,56,220,209\r | |
201 | .byte 102,15,56,220,217\r | |
202 | .byte 102,15,56,220,225\r | |
203 | .byte 102,15,56,220,233\r | |
204 | movups (%edx,%ecx,1),%xmm1\r | |
205 | addl $32,%ecx\r | |
206 | .byte 102,15,56,220,208\r | |
207 | .byte 102,15,56,220,216\r | |
208 | .byte 102,15,56,220,224\r | |
209 | .byte 102,15,56,220,232\r | |
210 | movups -16(%edx,%ecx,1),%xmm0\r | |
211 | jnz .L006enc4_loop\r | |
212 | .byte 102,15,56,220,209\r | |
213 | .byte 102,15,56,220,217\r | |
214 | .byte 102,15,56,220,225\r | |
215 | .byte 102,15,56,220,233\r | |
216 | .byte 102,15,56,221,208\r | |
217 | .byte 102,15,56,221,216\r | |
218 | .byte 102,15,56,221,224\r | |
219 | .byte 102,15,56,221,232\r | |
220 | ret\r | |
221 | .size _aesni_encrypt4,.-_aesni_encrypt4\r | |
222 | .type _aesni_decrypt4,@function\r | |
223 | .align 16\r | |
224 | _aesni_decrypt4:\r | |
225 | movups (%edx),%xmm0\r | |
226 | movups 16(%edx),%xmm1\r | |
227 | shll $4,%ecx\r | |
228 | xorps %xmm0,%xmm2\r | |
229 | pxor %xmm0,%xmm3\r | |
230 | pxor %xmm0,%xmm4\r | |
231 | pxor %xmm0,%xmm5\r | |
232 | movups 32(%edx),%xmm0\r | |
233 | leal 32(%edx,%ecx,1),%edx\r | |
234 | negl %ecx\r | |
235 | .byte 15,31,64,0\r | |
236 | addl $16,%ecx\r | |
237 | .L007dec4_loop:\r | |
238 | .byte 102,15,56,222,209\r | |
239 | .byte 102,15,56,222,217\r | |
240 | .byte 102,15,56,222,225\r | |
241 | .byte 102,15,56,222,233\r | |
242 | movups (%edx,%ecx,1),%xmm1\r | |
243 | addl $32,%ecx\r | |
244 | .byte 102,15,56,222,208\r | |
245 | .byte 102,15,56,222,216\r | |
246 | .byte 102,15,56,222,224\r | |
247 | .byte 102,15,56,222,232\r | |
248 | movups -16(%edx,%ecx,1),%xmm0\r | |
249 | jnz .L007dec4_loop\r | |
250 | .byte 102,15,56,222,209\r | |
251 | .byte 102,15,56,222,217\r | |
252 | .byte 102,15,56,222,225\r | |
253 | .byte 102,15,56,222,233\r | |
254 | .byte 102,15,56,223,208\r | |
255 | .byte 102,15,56,223,216\r | |
256 | .byte 102,15,56,223,224\r | |
257 | .byte 102,15,56,223,232\r | |
258 | ret\r | |
259 | .size _aesni_decrypt4,.-_aesni_decrypt4\r | |
260 | .type _aesni_encrypt6,@function\r | |
261 | .align 16\r | |
262 | _aesni_encrypt6:\r | |
263 | movups (%edx),%xmm0\r | |
264 | shll $4,%ecx\r | |
265 | movups 16(%edx),%xmm1\r | |
266 | xorps %xmm0,%xmm2\r | |
267 | pxor %xmm0,%xmm3\r | |
268 | pxor %xmm0,%xmm4\r | |
269 | .byte 102,15,56,220,209\r | |
270 | pxor %xmm0,%xmm5\r | |
271 | pxor %xmm0,%xmm6\r | |
272 | .byte 102,15,56,220,217\r | |
273 | leal 32(%edx,%ecx,1),%edx\r | |
274 | negl %ecx\r | |
275 | .byte 102,15,56,220,225\r | |
276 | pxor %xmm0,%xmm7\r | |
277 | movups (%edx,%ecx,1),%xmm0\r | |
278 | addl $16,%ecx\r | |
279 | jmp .L008_aesni_encrypt6_inner\r | |
280 | .align 16\r | |
281 | .L009enc6_loop:\r | |
282 | .byte 102,15,56,220,209\r | |
283 | .byte 102,15,56,220,217\r | |
284 | .byte 102,15,56,220,225\r | |
285 | .L008_aesni_encrypt6_inner:\r | |
286 | .byte 102,15,56,220,233\r | |
287 | .byte 102,15,56,220,241\r | |
288 | .byte 102,15,56,220,249\r | |
289 | .L_aesni_encrypt6_enter:\r | |
290 | movups (%edx,%ecx,1),%xmm1\r | |
291 | addl $32,%ecx\r | |
292 | .byte 102,15,56,220,208\r | |
293 | .byte 102,15,56,220,216\r | |
294 | .byte 102,15,56,220,224\r | |
295 | .byte 102,15,56,220,232\r | |
296 | .byte 102,15,56,220,240\r | |
297 | .byte 102,15,56,220,248\r | |
298 | movups -16(%edx,%ecx,1),%xmm0\r | |
299 | jnz .L009enc6_loop\r | |
300 | .byte 102,15,56,220,209\r | |
301 | .byte 102,15,56,220,217\r | |
302 | .byte 102,15,56,220,225\r | |
303 | .byte 102,15,56,220,233\r | |
304 | .byte 102,15,56,220,241\r | |
305 | .byte 102,15,56,220,249\r | |
306 | .byte 102,15,56,221,208\r | |
307 | .byte 102,15,56,221,216\r | |
308 | .byte 102,15,56,221,224\r | |
309 | .byte 102,15,56,221,232\r | |
310 | .byte 102,15,56,221,240\r | |
311 | .byte 102,15,56,221,248\r | |
312 | ret\r | |
313 | .size _aesni_encrypt6,.-_aesni_encrypt6\r | |
314 | .type _aesni_decrypt6,@function\r | |
315 | .align 16\r | |
316 | _aesni_decrypt6:\r | |
317 | movups (%edx),%xmm0\r | |
318 | shll $4,%ecx\r | |
319 | movups 16(%edx),%xmm1\r | |
320 | xorps %xmm0,%xmm2\r | |
321 | pxor %xmm0,%xmm3\r | |
322 | pxor %xmm0,%xmm4\r | |
323 | .byte 102,15,56,222,209\r | |
324 | pxor %xmm0,%xmm5\r | |
325 | pxor %xmm0,%xmm6\r | |
326 | .byte 102,15,56,222,217\r | |
327 | leal 32(%edx,%ecx,1),%edx\r | |
328 | negl %ecx\r | |
329 | .byte 102,15,56,222,225\r | |
330 | pxor %xmm0,%xmm7\r | |
331 | movups (%edx,%ecx,1),%xmm0\r | |
332 | addl $16,%ecx\r | |
333 | jmp .L010_aesni_decrypt6_inner\r | |
334 | .align 16\r | |
335 | .L011dec6_loop:\r | |
336 | .byte 102,15,56,222,209\r | |
337 | .byte 102,15,56,222,217\r | |
338 | .byte 102,15,56,222,225\r | |
339 | .L010_aesni_decrypt6_inner:\r | |
340 | .byte 102,15,56,222,233\r | |
341 | .byte 102,15,56,222,241\r | |
342 | .byte 102,15,56,222,249\r | |
343 | .L_aesni_decrypt6_enter:\r | |
344 | movups (%edx,%ecx,1),%xmm1\r | |
345 | addl $32,%ecx\r | |
346 | .byte 102,15,56,222,208\r | |
347 | .byte 102,15,56,222,216\r | |
348 | .byte 102,15,56,222,224\r | |
349 | .byte 102,15,56,222,232\r | |
350 | .byte 102,15,56,222,240\r | |
351 | .byte 102,15,56,222,248\r | |
352 | movups -16(%edx,%ecx,1),%xmm0\r | |
353 | jnz .L011dec6_loop\r | |
354 | .byte 102,15,56,222,209\r | |
355 | .byte 102,15,56,222,217\r | |
356 | .byte 102,15,56,222,225\r | |
357 | .byte 102,15,56,222,233\r | |
358 | .byte 102,15,56,222,241\r | |
359 | .byte 102,15,56,222,249\r | |
360 | .byte 102,15,56,223,208\r | |
361 | .byte 102,15,56,223,216\r | |
362 | .byte 102,15,56,223,224\r | |
363 | .byte 102,15,56,223,232\r | |
364 | .byte 102,15,56,223,240\r | |
365 | .byte 102,15,56,223,248\r | |
366 | ret\r | |
367 | .size _aesni_decrypt6,.-_aesni_decrypt6\r | |
368 | .globl aesni_ecb_encrypt\r | |
369 | .type aesni_ecb_encrypt,@function\r | |
370 | .align 16\r | |
371 | aesni_ecb_encrypt:\r | |
372 | .L_aesni_ecb_encrypt_begin:\r | |
373 | pushl %ebp\r | |
374 | pushl %ebx\r | |
375 | pushl %esi\r | |
376 | pushl %edi\r | |
377 | movl 20(%esp),%esi\r | |
378 | movl 24(%esp),%edi\r | |
379 | movl 28(%esp),%eax\r | |
380 | movl 32(%esp),%edx\r | |
381 | movl 36(%esp),%ebx\r | |
382 | andl $-16,%eax\r | |
383 | jz .L012ecb_ret\r | |
384 | movl 240(%edx),%ecx\r | |
385 | testl %ebx,%ebx\r | |
386 | jz .L013ecb_decrypt\r | |
387 | movl %edx,%ebp\r | |
388 | movl %ecx,%ebx\r | |
389 | cmpl $96,%eax\r | |
390 | jb .L014ecb_enc_tail\r | |
391 | movdqu (%esi),%xmm2\r | |
392 | movdqu 16(%esi),%xmm3\r | |
393 | movdqu 32(%esi),%xmm4\r | |
394 | movdqu 48(%esi),%xmm5\r | |
395 | movdqu 64(%esi),%xmm6\r | |
396 | movdqu 80(%esi),%xmm7\r | |
397 | leal 96(%esi),%esi\r | |
398 | subl $96,%eax\r | |
399 | jmp .L015ecb_enc_loop6_enter\r | |
400 | .align 16\r | |
401 | .L016ecb_enc_loop6:\r | |
402 | movups %xmm2,(%edi)\r | |
403 | movdqu (%esi),%xmm2\r | |
404 | movups %xmm3,16(%edi)\r | |
405 | movdqu 16(%esi),%xmm3\r | |
406 | movups %xmm4,32(%edi)\r | |
407 | movdqu 32(%esi),%xmm4\r | |
408 | movups %xmm5,48(%edi)\r | |
409 | movdqu 48(%esi),%xmm5\r | |
410 | movups %xmm6,64(%edi)\r | |
411 | movdqu 64(%esi),%xmm6\r | |
412 | movups %xmm7,80(%edi)\r | |
413 | leal 96(%edi),%edi\r | |
414 | movdqu 80(%esi),%xmm7\r | |
415 | leal 96(%esi),%esi\r | |
416 | .L015ecb_enc_loop6_enter:\r | |
417 | call _aesni_encrypt6\r | |
418 | movl %ebp,%edx\r | |
419 | movl %ebx,%ecx\r | |
420 | subl $96,%eax\r | |
421 | jnc .L016ecb_enc_loop6\r | |
422 | movups %xmm2,(%edi)\r | |
423 | movups %xmm3,16(%edi)\r | |
424 | movups %xmm4,32(%edi)\r | |
425 | movups %xmm5,48(%edi)\r | |
426 | movups %xmm6,64(%edi)\r | |
427 | movups %xmm7,80(%edi)\r | |
428 | leal 96(%edi),%edi\r | |
429 | addl $96,%eax\r | |
430 | jz .L012ecb_ret\r | |
431 | .L014ecb_enc_tail:\r | |
432 | movups (%esi),%xmm2\r | |
433 | cmpl $32,%eax\r | |
434 | jb .L017ecb_enc_one\r | |
435 | movups 16(%esi),%xmm3\r | |
436 | je .L018ecb_enc_two\r | |
437 | movups 32(%esi),%xmm4\r | |
438 | cmpl $64,%eax\r | |
439 | jb .L019ecb_enc_three\r | |
440 | movups 48(%esi),%xmm5\r | |
441 | je .L020ecb_enc_four\r | |
442 | movups 64(%esi),%xmm6\r | |
443 | xorps %xmm7,%xmm7\r | |
444 | call _aesni_encrypt6\r | |
445 | movups %xmm2,(%edi)\r | |
446 | movups %xmm3,16(%edi)\r | |
447 | movups %xmm4,32(%edi)\r | |
448 | movups %xmm5,48(%edi)\r | |
449 | movups %xmm6,64(%edi)\r | |
450 | jmp .L012ecb_ret\r | |
451 | .align 16\r | |
452 | .L017ecb_enc_one:\r | |
453 | movups (%edx),%xmm0\r | |
454 | movups 16(%edx),%xmm1\r | |
455 | leal 32(%edx),%edx\r | |
456 | xorps %xmm0,%xmm2\r | |
457 | .L021enc1_loop_3:\r | |
458 | .byte 102,15,56,220,209\r | |
459 | decl %ecx\r | |
460 | movups (%edx),%xmm1\r | |
461 | leal 16(%edx),%edx\r | |
462 | jnz .L021enc1_loop_3\r | |
463 | .byte 102,15,56,221,209\r | |
464 | movups %xmm2,(%edi)\r | |
465 | jmp .L012ecb_ret\r | |
466 | .align 16\r | |
467 | .L018ecb_enc_two:\r | |
468 | call _aesni_encrypt2\r | |
469 | movups %xmm2,(%edi)\r | |
470 | movups %xmm3,16(%edi)\r | |
471 | jmp .L012ecb_ret\r | |
472 | .align 16\r | |
473 | .L019ecb_enc_three:\r | |
474 | call _aesni_encrypt3\r | |
475 | movups %xmm2,(%edi)\r | |
476 | movups %xmm3,16(%edi)\r | |
477 | movups %xmm4,32(%edi)\r | |
478 | jmp .L012ecb_ret\r | |
479 | .align 16\r | |
480 | .L020ecb_enc_four:\r | |
481 | call _aesni_encrypt4\r | |
482 | movups %xmm2,(%edi)\r | |
483 | movups %xmm3,16(%edi)\r | |
484 | movups %xmm4,32(%edi)\r | |
485 | movups %xmm5,48(%edi)\r | |
486 | jmp .L012ecb_ret\r | |
487 | .align 16\r | |
488 | .L013ecb_decrypt:\r | |
489 | movl %edx,%ebp\r | |
490 | movl %ecx,%ebx\r | |
491 | cmpl $96,%eax\r | |
492 | jb .L022ecb_dec_tail\r | |
493 | movdqu (%esi),%xmm2\r | |
494 | movdqu 16(%esi),%xmm3\r | |
495 | movdqu 32(%esi),%xmm4\r | |
496 | movdqu 48(%esi),%xmm5\r | |
497 | movdqu 64(%esi),%xmm6\r | |
498 | movdqu 80(%esi),%xmm7\r | |
499 | leal 96(%esi),%esi\r | |
500 | subl $96,%eax\r | |
501 | jmp .L023ecb_dec_loop6_enter\r | |
502 | .align 16\r | |
503 | .L024ecb_dec_loop6:\r | |
504 | movups %xmm2,(%edi)\r | |
505 | movdqu (%esi),%xmm2\r | |
506 | movups %xmm3,16(%edi)\r | |
507 | movdqu 16(%esi),%xmm3\r | |
508 | movups %xmm4,32(%edi)\r | |
509 | movdqu 32(%esi),%xmm4\r | |
510 | movups %xmm5,48(%edi)\r | |
511 | movdqu 48(%esi),%xmm5\r | |
512 | movups %xmm6,64(%edi)\r | |
513 | movdqu 64(%esi),%xmm6\r | |
514 | movups %xmm7,80(%edi)\r | |
515 | leal 96(%edi),%edi\r | |
516 | movdqu 80(%esi),%xmm7\r | |
517 | leal 96(%esi),%esi\r | |
518 | .L023ecb_dec_loop6_enter:\r | |
519 | call _aesni_decrypt6\r | |
520 | movl %ebp,%edx\r | |
521 | movl %ebx,%ecx\r | |
522 | subl $96,%eax\r | |
523 | jnc .L024ecb_dec_loop6\r | |
524 | movups %xmm2,(%edi)\r | |
525 | movups %xmm3,16(%edi)\r | |
526 | movups %xmm4,32(%edi)\r | |
527 | movups %xmm5,48(%edi)\r | |
528 | movups %xmm6,64(%edi)\r | |
529 | movups %xmm7,80(%edi)\r | |
530 | leal 96(%edi),%edi\r | |
531 | addl $96,%eax\r | |
532 | jz .L012ecb_ret\r | |
533 | .L022ecb_dec_tail:\r | |
534 | movups (%esi),%xmm2\r | |
535 | cmpl $32,%eax\r | |
536 | jb .L025ecb_dec_one\r | |
537 | movups 16(%esi),%xmm3\r | |
538 | je .L026ecb_dec_two\r | |
539 | movups 32(%esi),%xmm4\r | |
540 | cmpl $64,%eax\r | |
541 | jb .L027ecb_dec_three\r | |
542 | movups 48(%esi),%xmm5\r | |
543 | je .L028ecb_dec_four\r | |
544 | movups 64(%esi),%xmm6\r | |
545 | xorps %xmm7,%xmm7\r | |
546 | call _aesni_decrypt6\r | |
547 | movups %xmm2,(%edi)\r | |
548 | movups %xmm3,16(%edi)\r | |
549 | movups %xmm4,32(%edi)\r | |
550 | movups %xmm5,48(%edi)\r | |
551 | movups %xmm6,64(%edi)\r | |
552 | jmp .L012ecb_ret\r | |
553 | .align 16\r | |
554 | .L025ecb_dec_one:\r | |
555 | movups (%edx),%xmm0\r | |
556 | movups 16(%edx),%xmm1\r | |
557 | leal 32(%edx),%edx\r | |
558 | xorps %xmm0,%xmm2\r | |
559 | .L029dec1_loop_4:\r | |
560 | .byte 102,15,56,222,209\r | |
561 | decl %ecx\r | |
562 | movups (%edx),%xmm1\r | |
563 | leal 16(%edx),%edx\r | |
564 | jnz .L029dec1_loop_4\r | |
565 | .byte 102,15,56,223,209\r | |
566 | movups %xmm2,(%edi)\r | |
567 | jmp .L012ecb_ret\r | |
568 | .align 16\r | |
569 | .L026ecb_dec_two:\r | |
570 | call _aesni_decrypt2\r | |
571 | movups %xmm2,(%edi)\r | |
572 | movups %xmm3,16(%edi)\r | |
573 | jmp .L012ecb_ret\r | |
574 | .align 16\r | |
575 | .L027ecb_dec_three:\r | |
576 | call _aesni_decrypt3\r | |
577 | movups %xmm2,(%edi)\r | |
578 | movups %xmm3,16(%edi)\r | |
579 | movups %xmm4,32(%edi)\r | |
580 | jmp .L012ecb_ret\r | |
581 | .align 16\r | |
582 | .L028ecb_dec_four:\r | |
583 | call _aesni_decrypt4\r | |
584 | movups %xmm2,(%edi)\r | |
585 | movups %xmm3,16(%edi)\r | |
586 | movups %xmm4,32(%edi)\r | |
587 | movups %xmm5,48(%edi)\r | |
588 | .L012ecb_ret:\r | |
589 | pxor %xmm0,%xmm0\r | |
590 | pxor %xmm1,%xmm1\r | |
591 | pxor %xmm2,%xmm2\r | |
592 | pxor %xmm3,%xmm3\r | |
593 | pxor %xmm4,%xmm4\r | |
594 | pxor %xmm5,%xmm5\r | |
595 | pxor %xmm6,%xmm6\r | |
596 | pxor %xmm7,%xmm7\r | |
597 | popl %edi\r | |
598 | popl %esi\r | |
599 | popl %ebx\r | |
600 | popl %ebp\r | |
601 | ret\r | |
602 | .size aesni_ecb_encrypt,.-.L_aesni_ecb_encrypt_begin\r | |
603 | .globl aesni_ccm64_encrypt_blocks\r | |
604 | .type aesni_ccm64_encrypt_blocks,@function\r | |
605 | .align 16\r | |
606 | aesni_ccm64_encrypt_blocks:\r | |
607 | .L_aesni_ccm64_encrypt_blocks_begin:\r | |
608 | pushl %ebp\r | |
609 | pushl %ebx\r | |
610 | pushl %esi\r | |
611 | pushl %edi\r | |
612 | movl 20(%esp),%esi\r | |
613 | movl 24(%esp),%edi\r | |
614 | movl 28(%esp),%eax\r | |
615 | movl 32(%esp),%edx\r | |
616 | movl 36(%esp),%ebx\r | |
617 | movl 40(%esp),%ecx\r | |
618 | movl %esp,%ebp\r | |
619 | subl $60,%esp\r | |
620 | andl $-16,%esp\r | |
621 | movl %ebp,48(%esp)\r | |
622 | movdqu (%ebx),%xmm7\r | |
623 | movdqu (%ecx),%xmm3\r | |
624 | movl 240(%edx),%ecx\r | |
625 | movl $202182159,(%esp)\r | |
626 | movl $134810123,4(%esp)\r | |
627 | movl $67438087,8(%esp)\r | |
628 | movl $66051,12(%esp)\r | |
629 | movl $1,%ebx\r | |
630 | xorl %ebp,%ebp\r | |
631 | movl %ebx,16(%esp)\r | |
632 | movl %ebp,20(%esp)\r | |
633 | movl %ebp,24(%esp)\r | |
634 | movl %ebp,28(%esp)\r | |
635 | shll $4,%ecx\r | |
636 | movl $16,%ebx\r | |
637 | leal (%edx),%ebp\r | |
638 | movdqa (%esp),%xmm5\r | |
639 | movdqa %xmm7,%xmm2\r | |
640 | leal 32(%edx,%ecx,1),%edx\r | |
641 | subl %ecx,%ebx\r | |
642 | .byte 102,15,56,0,253\r | |
643 | .L030ccm64_enc_outer:\r | |
644 | movups (%ebp),%xmm0\r | |
645 | movl %ebx,%ecx\r | |
646 | movups (%esi),%xmm6\r | |
647 | xorps %xmm0,%xmm2\r | |
648 | movups 16(%ebp),%xmm1\r | |
649 | xorps %xmm6,%xmm0\r | |
650 | xorps %xmm0,%xmm3\r | |
651 | movups 32(%ebp),%xmm0\r | |
652 | .L031ccm64_enc2_loop:\r | |
653 | .byte 102,15,56,220,209\r | |
654 | .byte 102,15,56,220,217\r | |
655 | movups (%edx,%ecx,1),%xmm1\r | |
656 | addl $32,%ecx\r | |
657 | .byte 102,15,56,220,208\r | |
658 | .byte 102,15,56,220,216\r | |
659 | movups -16(%edx,%ecx,1),%xmm0\r | |
660 | jnz .L031ccm64_enc2_loop\r | |
661 | .byte 102,15,56,220,209\r | |
662 | .byte 102,15,56,220,217\r | |
663 | paddq 16(%esp),%xmm7\r | |
664 | decl %eax\r | |
665 | .byte 102,15,56,221,208\r | |
666 | .byte 102,15,56,221,216\r | |
667 | leal 16(%esi),%esi\r | |
668 | xorps %xmm2,%xmm6\r | |
669 | movdqa %xmm7,%xmm2\r | |
670 | movups %xmm6,(%edi)\r | |
671 | .byte 102,15,56,0,213\r | |
672 | leal 16(%edi),%edi\r | |
673 | jnz .L030ccm64_enc_outer\r | |
674 | movl 48(%esp),%esp\r | |
675 | movl 40(%esp),%edi\r | |
676 | movups %xmm3,(%edi)\r | |
677 | pxor %xmm0,%xmm0\r | |
678 | pxor %xmm1,%xmm1\r | |
679 | pxor %xmm2,%xmm2\r | |
680 | pxor %xmm3,%xmm3\r | |
681 | pxor %xmm4,%xmm4\r | |
682 | pxor %xmm5,%xmm5\r | |
683 | pxor %xmm6,%xmm6\r | |
684 | pxor %xmm7,%xmm7\r | |
685 | popl %edi\r | |
686 | popl %esi\r | |
687 | popl %ebx\r | |
688 | popl %ebp\r | |
689 | ret\r | |
690 | .size aesni_ccm64_encrypt_blocks,.-.L_aesni_ccm64_encrypt_blocks_begin\r | |
691 | .globl aesni_ccm64_decrypt_blocks\r | |
692 | .type aesni_ccm64_decrypt_blocks,@function\r | |
693 | .align 16\r | |
694 | aesni_ccm64_decrypt_blocks:\r | |
695 | .L_aesni_ccm64_decrypt_blocks_begin:\r | |
696 | pushl %ebp\r | |
697 | pushl %ebx\r | |
698 | pushl %esi\r | |
699 | pushl %edi\r | |
700 | movl 20(%esp),%esi\r | |
701 | movl 24(%esp),%edi\r | |
702 | movl 28(%esp),%eax\r | |
703 | movl 32(%esp),%edx\r | |
704 | movl 36(%esp),%ebx\r | |
705 | movl 40(%esp),%ecx\r | |
706 | movl %esp,%ebp\r | |
707 | subl $60,%esp\r | |
708 | andl $-16,%esp\r | |
709 | movl %ebp,48(%esp)\r | |
710 | movdqu (%ebx),%xmm7\r | |
711 | movdqu (%ecx),%xmm3\r | |
712 | movl 240(%edx),%ecx\r | |
713 | movl $202182159,(%esp)\r | |
714 | movl $134810123,4(%esp)\r | |
715 | movl $67438087,8(%esp)\r | |
716 | movl $66051,12(%esp)\r | |
717 | movl $1,%ebx\r | |
718 | xorl %ebp,%ebp\r | |
719 | movl %ebx,16(%esp)\r | |
720 | movl %ebp,20(%esp)\r | |
721 | movl %ebp,24(%esp)\r | |
722 | movl %ebp,28(%esp)\r | |
723 | movdqa (%esp),%xmm5\r | |
724 | movdqa %xmm7,%xmm2\r | |
725 | movl %edx,%ebp\r | |
726 | movl %ecx,%ebx\r | |
727 | .byte 102,15,56,0,253\r | |
728 | movups (%edx),%xmm0\r | |
729 | movups 16(%edx),%xmm1\r | |
730 | leal 32(%edx),%edx\r | |
731 | xorps %xmm0,%xmm2\r | |
732 | .L032enc1_loop_5:\r | |
733 | .byte 102,15,56,220,209\r | |
734 | decl %ecx\r | |
735 | movups (%edx),%xmm1\r | |
736 | leal 16(%edx),%edx\r | |
737 | jnz .L032enc1_loop_5\r | |
738 | .byte 102,15,56,221,209\r | |
739 | shll $4,%ebx\r | |
740 | movl $16,%ecx\r | |
741 | movups (%esi),%xmm6\r | |
742 | paddq 16(%esp),%xmm7\r | |
743 | leal 16(%esi),%esi\r | |
744 | subl %ebx,%ecx\r | |
745 | leal 32(%ebp,%ebx,1),%edx\r | |
746 | movl %ecx,%ebx\r | |
747 | jmp .L033ccm64_dec_outer\r | |
748 | .align 16\r | |
749 | .L033ccm64_dec_outer:\r | |
750 | xorps %xmm2,%xmm6\r | |
751 | movdqa %xmm7,%xmm2\r | |
752 | movups %xmm6,(%edi)\r | |
753 | leal 16(%edi),%edi\r | |
754 | .byte 102,15,56,0,213\r | |
755 | subl $1,%eax\r | |
756 | jz .L034ccm64_dec_break\r | |
757 | movups (%ebp),%xmm0\r | |
758 | movl %ebx,%ecx\r | |
759 | movups 16(%ebp),%xmm1\r | |
760 | xorps %xmm0,%xmm6\r | |
761 | xorps %xmm0,%xmm2\r | |
762 | xorps %xmm6,%xmm3\r | |
763 | movups 32(%ebp),%xmm0\r | |
764 | .L035ccm64_dec2_loop:\r | |
765 | .byte 102,15,56,220,209\r | |
766 | .byte 102,15,56,220,217\r | |
767 | movups (%edx,%ecx,1),%xmm1\r | |
768 | addl $32,%ecx\r | |
769 | .byte 102,15,56,220,208\r | |
770 | .byte 102,15,56,220,216\r | |
771 | movups -16(%edx,%ecx,1),%xmm0\r | |
772 | jnz .L035ccm64_dec2_loop\r | |
773 | movups (%esi),%xmm6\r | |
774 | paddq 16(%esp),%xmm7\r | |
775 | .byte 102,15,56,220,209\r | |
776 | .byte 102,15,56,220,217\r | |
777 | .byte 102,15,56,221,208\r | |
778 | .byte 102,15,56,221,216\r | |
779 | leal 16(%esi),%esi\r | |
780 | jmp .L033ccm64_dec_outer\r | |
781 | .align 16\r | |
782 | .L034ccm64_dec_break:\r | |
783 | movl 240(%ebp),%ecx\r | |
784 | movl %ebp,%edx\r | |
785 | movups (%edx),%xmm0\r | |
786 | movups 16(%edx),%xmm1\r | |
787 | xorps %xmm0,%xmm6\r | |
788 | leal 32(%edx),%edx\r | |
789 | xorps %xmm6,%xmm3\r | |
790 | .L036enc1_loop_6:\r | |
791 | .byte 102,15,56,220,217\r | |
792 | decl %ecx\r | |
793 | movups (%edx),%xmm1\r | |
794 | leal 16(%edx),%edx\r | |
795 | jnz .L036enc1_loop_6\r | |
796 | .byte 102,15,56,221,217\r | |
797 | movl 48(%esp),%esp\r | |
798 | movl 40(%esp),%edi\r | |
799 | movups %xmm3,(%edi)\r | |
800 | pxor %xmm0,%xmm0\r | |
801 | pxor %xmm1,%xmm1\r | |
802 | pxor %xmm2,%xmm2\r | |
803 | pxor %xmm3,%xmm3\r | |
804 | pxor %xmm4,%xmm4\r | |
805 | pxor %xmm5,%xmm5\r | |
806 | pxor %xmm6,%xmm6\r | |
807 | pxor %xmm7,%xmm7\r | |
808 | popl %edi\r | |
809 | popl %esi\r | |
810 | popl %ebx\r | |
811 | popl %ebp\r | |
812 | ret\r | |
813 | .size aesni_ccm64_decrypt_blocks,.-.L_aesni_ccm64_decrypt_blocks_begin\r | |
814 | .globl aesni_ctr32_encrypt_blocks\r | |
815 | .type aesni_ctr32_encrypt_blocks,@function\r | |
816 | .align 16\r | |
817 | aesni_ctr32_encrypt_blocks:\r | |
818 | .L_aesni_ctr32_encrypt_blocks_begin:\r | |
819 | pushl %ebp\r | |
820 | pushl %ebx\r | |
821 | pushl %esi\r | |
822 | pushl %edi\r | |
823 | movl 20(%esp),%esi\r | |
824 | movl 24(%esp),%edi\r | |
825 | movl 28(%esp),%eax\r | |
826 | movl 32(%esp),%edx\r | |
827 | movl 36(%esp),%ebx\r | |
828 | movl %esp,%ebp\r | |
829 | subl $88,%esp\r | |
830 | andl $-16,%esp\r | |
831 | movl %ebp,80(%esp)\r | |
832 | cmpl $1,%eax\r | |
833 | je .L037ctr32_one_shortcut\r | |
834 | movdqu (%ebx),%xmm7\r | |
835 | movl $202182159,(%esp)\r | |
836 | movl $134810123,4(%esp)\r | |
837 | movl $67438087,8(%esp)\r | |
838 | movl $66051,12(%esp)\r | |
839 | movl $6,%ecx\r | |
840 | xorl %ebp,%ebp\r | |
841 | movl %ecx,16(%esp)\r | |
842 | movl %ecx,20(%esp)\r | |
843 | movl %ecx,24(%esp)\r | |
844 | movl %ebp,28(%esp)\r | |
845 | .byte 102,15,58,22,251,3\r | |
846 | .byte 102,15,58,34,253,3\r | |
847 | movl 240(%edx),%ecx\r | |
848 | bswap %ebx\r | |
849 | pxor %xmm0,%xmm0\r | |
850 | pxor %xmm1,%xmm1\r | |
851 | movdqa (%esp),%xmm2\r | |
852 | .byte 102,15,58,34,195,0\r | |
853 | leal 3(%ebx),%ebp\r | |
854 | .byte 102,15,58,34,205,0\r | |
855 | incl %ebx\r | |
856 | .byte 102,15,58,34,195,1\r | |
857 | incl %ebp\r | |
858 | .byte 102,15,58,34,205,1\r | |
859 | incl %ebx\r | |
860 | .byte 102,15,58,34,195,2\r | |
861 | incl %ebp\r | |
862 | .byte 102,15,58,34,205,2\r | |
863 | movdqa %xmm0,48(%esp)\r | |
864 | .byte 102,15,56,0,194\r | |
865 | movdqu (%edx),%xmm6\r | |
866 | movdqa %xmm1,64(%esp)\r | |
867 | .byte 102,15,56,0,202\r | |
868 | pshufd $192,%xmm0,%xmm2\r | |
869 | pshufd $128,%xmm0,%xmm3\r | |
870 | cmpl $6,%eax\r | |
871 | jb .L038ctr32_tail\r | |
872 | pxor %xmm6,%xmm7\r | |
873 | shll $4,%ecx\r | |
874 | movl $16,%ebx\r | |
875 | movdqa %xmm7,32(%esp)\r | |
876 | movl %edx,%ebp\r | |
877 | subl %ecx,%ebx\r | |
878 | leal 32(%edx,%ecx,1),%edx\r | |
879 | subl $6,%eax\r | |
880 | jmp .L039ctr32_loop6\r | |
881 | .align 16\r | |
882 | .L039ctr32_loop6:\r | |
883 | pshufd $64,%xmm0,%xmm4\r | |
884 | movdqa 32(%esp),%xmm0\r | |
885 | pshufd $192,%xmm1,%xmm5\r | |
886 | pxor %xmm0,%xmm2\r | |
887 | pshufd $128,%xmm1,%xmm6\r | |
888 | pxor %xmm0,%xmm3\r | |
889 | pshufd $64,%xmm1,%xmm7\r | |
890 | movups 16(%ebp),%xmm1\r | |
891 | pxor %xmm0,%xmm4\r | |
892 | pxor %xmm0,%xmm5\r | |
893 | .byte 102,15,56,220,209\r | |
894 | pxor %xmm0,%xmm6\r | |
895 | pxor %xmm0,%xmm7\r | |
896 | .byte 102,15,56,220,217\r | |
897 | movups 32(%ebp),%xmm0\r | |
898 | movl %ebx,%ecx\r | |
899 | .byte 102,15,56,220,225\r | |
900 | .byte 102,15,56,220,233\r | |
901 | .byte 102,15,56,220,241\r | |
902 | .byte 102,15,56,220,249\r | |
903 | call .L_aesni_encrypt6_enter\r | |
904 | movups (%esi),%xmm1\r | |
905 | movups 16(%esi),%xmm0\r | |
906 | xorps %xmm1,%xmm2\r | |
907 | movups 32(%esi),%xmm1\r | |
908 | xorps %xmm0,%xmm3\r | |
909 | movups %xmm2,(%edi)\r | |
910 | movdqa 16(%esp),%xmm0\r | |
911 | xorps %xmm1,%xmm4\r | |
912 | movdqa 64(%esp),%xmm1\r | |
913 | movups %xmm3,16(%edi)\r | |
914 | movups %xmm4,32(%edi)\r | |
915 | paddd %xmm0,%xmm1\r | |
916 | paddd 48(%esp),%xmm0\r | |
917 | movdqa (%esp),%xmm2\r | |
918 | movups 48(%esi),%xmm3\r | |
919 | movups 64(%esi),%xmm4\r | |
920 | xorps %xmm3,%xmm5\r | |
921 | movups 80(%esi),%xmm3\r | |
922 | leal 96(%esi),%esi\r | |
923 | movdqa %xmm0,48(%esp)\r | |
924 | .byte 102,15,56,0,194\r | |
925 | xorps %xmm4,%xmm6\r | |
926 | movups %xmm5,48(%edi)\r | |
927 | xorps %xmm3,%xmm7\r | |
928 | movdqa %xmm1,64(%esp)\r | |
929 | .byte 102,15,56,0,202\r | |
930 | movups %xmm6,64(%edi)\r | |
931 | pshufd $192,%xmm0,%xmm2\r | |
932 | movups %xmm7,80(%edi)\r | |
933 | leal 96(%edi),%edi\r | |
934 | pshufd $128,%xmm0,%xmm3\r | |
935 | subl $6,%eax\r | |
936 | jnc .L039ctr32_loop6\r | |
937 | addl $6,%eax\r | |
938 | jz .L040ctr32_ret\r | |
939 | movdqu (%ebp),%xmm7\r | |
940 | movl %ebp,%edx\r | |
941 | pxor 32(%esp),%xmm7\r | |
942 | movl 240(%ebp),%ecx\r | |
943 | .L038ctr32_tail:\r | |
944 | por %xmm7,%xmm2\r | |
945 | cmpl $2,%eax\r | |
946 | jb .L041ctr32_one\r | |
947 | pshufd $64,%xmm0,%xmm4\r | |
948 | por %xmm7,%xmm3\r | |
949 | je .L042ctr32_two\r | |
950 | pshufd $192,%xmm1,%xmm5\r | |
951 | por %xmm7,%xmm4\r | |
952 | cmpl $4,%eax\r | |
953 | jb .L043ctr32_three\r | |
954 | pshufd $128,%xmm1,%xmm6\r | |
955 | por %xmm7,%xmm5\r | |
956 | je .L044ctr32_four\r | |
957 | por %xmm7,%xmm6\r | |
958 | call _aesni_encrypt6\r | |
959 | movups (%esi),%xmm1\r | |
960 | movups 16(%esi),%xmm0\r | |
961 | xorps %xmm1,%xmm2\r | |
962 | movups 32(%esi),%xmm1\r | |
963 | xorps %xmm0,%xmm3\r | |
964 | movups 48(%esi),%xmm0\r | |
965 | xorps %xmm1,%xmm4\r | |
966 | movups 64(%esi),%xmm1\r | |
967 | xorps %xmm0,%xmm5\r | |
968 | movups %xmm2,(%edi)\r | |
969 | xorps %xmm1,%xmm6\r | |
970 | movups %xmm3,16(%edi)\r | |
971 | movups %xmm4,32(%edi)\r | |
972 | movups %xmm5,48(%edi)\r | |
973 | movups %xmm6,64(%edi)\r | |
974 | jmp .L040ctr32_ret\r | |
975 | .align 16\r | |
976 | .L037ctr32_one_shortcut:\r | |
977 | movups (%ebx),%xmm2\r | |
978 | movl 240(%edx),%ecx\r | |
979 | .L041ctr32_one:\r | |
980 | movups (%edx),%xmm0\r | |
981 | movups 16(%edx),%xmm1\r | |
982 | leal 32(%edx),%edx\r | |
983 | xorps %xmm0,%xmm2\r | |
984 | .L045enc1_loop_7:\r | |
985 | .byte 102,15,56,220,209\r | |
986 | decl %ecx\r | |
987 | movups (%edx),%xmm1\r | |
988 | leal 16(%edx),%edx\r | |
989 | jnz .L045enc1_loop_7\r | |
990 | .byte 102,15,56,221,209\r | |
991 | movups (%esi),%xmm6\r | |
992 | xorps %xmm2,%xmm6\r | |
993 | movups %xmm6,(%edi)\r | |
994 | jmp .L040ctr32_ret\r | |
995 | .align 16\r | |
996 | .L042ctr32_two:\r | |
997 | call _aesni_encrypt2\r | |
998 | movups (%esi),%xmm5\r | |
999 | movups 16(%esi),%xmm6\r | |
1000 | xorps %xmm5,%xmm2\r | |
1001 | xorps %xmm6,%xmm3\r | |
1002 | movups %xmm2,(%edi)\r | |
1003 | movups %xmm3,16(%edi)\r | |
1004 | jmp .L040ctr32_ret\r | |
1005 | .align 16\r | |
1006 | .L043ctr32_three:\r | |
1007 | call _aesni_encrypt3\r | |
1008 | movups (%esi),%xmm5\r | |
1009 | movups 16(%esi),%xmm6\r | |
1010 | xorps %xmm5,%xmm2\r | |
1011 | movups 32(%esi),%xmm7\r | |
1012 | xorps %xmm6,%xmm3\r | |
1013 | movups %xmm2,(%edi)\r | |
1014 | xorps %xmm7,%xmm4\r | |
1015 | movups %xmm3,16(%edi)\r | |
1016 | movups %xmm4,32(%edi)\r | |
1017 | jmp .L040ctr32_ret\r | |
1018 | .align 16\r | |
1019 | .L044ctr32_four:\r | |
1020 | call _aesni_encrypt4\r | |
1021 | movups (%esi),%xmm6\r | |
1022 | movups 16(%esi),%xmm7\r | |
1023 | movups 32(%esi),%xmm1\r | |
1024 | xorps %xmm6,%xmm2\r | |
1025 | movups 48(%esi),%xmm0\r | |
1026 | xorps %xmm7,%xmm3\r | |
1027 | movups %xmm2,(%edi)\r | |
1028 | xorps %xmm1,%xmm4\r | |
1029 | movups %xmm3,16(%edi)\r | |
1030 | xorps %xmm0,%xmm5\r | |
1031 | movups %xmm4,32(%edi)\r | |
1032 | movups %xmm5,48(%edi)\r | |
1033 | .L040ctr32_ret:\r | |
1034 | pxor %xmm0,%xmm0\r | |
1035 | pxor %xmm1,%xmm1\r | |
1036 | pxor %xmm2,%xmm2\r | |
1037 | pxor %xmm3,%xmm3\r | |
1038 | pxor %xmm4,%xmm4\r | |
1039 | movdqa %xmm0,32(%esp)\r | |
1040 | pxor %xmm5,%xmm5\r | |
1041 | movdqa %xmm0,48(%esp)\r | |
1042 | pxor %xmm6,%xmm6\r | |
1043 | movdqa %xmm0,64(%esp)\r | |
1044 | pxor %xmm7,%xmm7\r | |
1045 | movl 80(%esp),%esp\r | |
1046 | popl %edi\r | |
1047 | popl %esi\r | |
1048 | popl %ebx\r | |
1049 | popl %ebp\r | |
1050 | ret\r | |
1051 | .size aesni_ctr32_encrypt_blocks,.-.L_aesni_ctr32_encrypt_blocks_begin\r | |
1052 | .globl aesni_xts_encrypt\r | |
1053 | .type aesni_xts_encrypt,@function\r | |
1054 | .align 16\r | |
1055 | aesni_xts_encrypt:\r | |
1056 | .L_aesni_xts_encrypt_begin:\r | |
1057 | pushl %ebp\r | |
1058 | pushl %ebx\r | |
1059 | pushl %esi\r | |
1060 | pushl %edi\r | |
1061 | movl 36(%esp),%edx\r | |
1062 | movl 40(%esp),%esi\r | |
1063 | movl 240(%edx),%ecx\r | |
1064 | movups (%esi),%xmm2\r | |
1065 | movups (%edx),%xmm0\r | |
1066 | movups 16(%edx),%xmm1\r | |
1067 | leal 32(%edx),%edx\r | |
1068 | xorps %xmm0,%xmm2\r | |
1069 | .L046enc1_loop_8:\r | |
1070 | .byte 102,15,56,220,209\r | |
1071 | decl %ecx\r | |
1072 | movups (%edx),%xmm1\r | |
1073 | leal 16(%edx),%edx\r | |
1074 | jnz .L046enc1_loop_8\r | |
1075 | .byte 102,15,56,221,209\r | |
1076 | movl 20(%esp),%esi\r | |
1077 | movl 24(%esp),%edi\r | |
1078 | movl 28(%esp),%eax\r | |
1079 | movl 32(%esp),%edx\r | |
1080 | movl %esp,%ebp\r | |
1081 | subl $120,%esp\r | |
1082 | movl 240(%edx),%ecx\r | |
1083 | andl $-16,%esp\r | |
1084 | movl $135,96(%esp)\r | |
1085 | movl $0,100(%esp)\r | |
1086 | movl $1,104(%esp)\r | |
1087 | movl $0,108(%esp)\r | |
1088 | movl %eax,112(%esp)\r | |
1089 | movl %ebp,116(%esp)\r | |
1090 | movdqa %xmm2,%xmm1\r | |
1091 | pxor %xmm0,%xmm0\r | |
1092 | movdqa 96(%esp),%xmm3\r | |
1093 | pcmpgtd %xmm1,%xmm0\r | |
1094 | andl $-16,%eax\r | |
1095 | movl %edx,%ebp\r | |
1096 | movl %ecx,%ebx\r | |
1097 | subl $96,%eax\r | |
1098 | jc .L047xts_enc_short\r | |
1099 | shll $4,%ecx\r | |
1100 | movl $16,%ebx\r | |
1101 | subl %ecx,%ebx\r | |
1102 | leal 32(%edx,%ecx,1),%edx\r | |
1103 | jmp .L048xts_enc_loop6\r | |
1104 | .align 16\r | |
1105 | .L048xts_enc_loop6:\r | |
1106 | pshufd $19,%xmm0,%xmm2\r | |
1107 | pxor %xmm0,%xmm0\r | |
1108 | movdqa %xmm1,(%esp)\r | |
1109 | paddq %xmm1,%xmm1\r | |
1110 | pand %xmm3,%xmm2\r | |
1111 | pcmpgtd %xmm1,%xmm0\r | |
1112 | pxor %xmm2,%xmm1\r | |
1113 | pshufd $19,%xmm0,%xmm2\r | |
1114 | pxor %xmm0,%xmm0\r | |
1115 | movdqa %xmm1,16(%esp)\r | |
1116 | paddq %xmm1,%xmm1\r | |
1117 | pand %xmm3,%xmm2\r | |
1118 | pcmpgtd %xmm1,%xmm0\r | |
1119 | pxor %xmm2,%xmm1\r | |
1120 | pshufd $19,%xmm0,%xmm2\r | |
1121 | pxor %xmm0,%xmm0\r | |
1122 | movdqa %xmm1,32(%esp)\r | |
1123 | paddq %xmm1,%xmm1\r | |
1124 | pand %xmm3,%xmm2\r | |
1125 | pcmpgtd %xmm1,%xmm0\r | |
1126 | pxor %xmm2,%xmm1\r | |
1127 | pshufd $19,%xmm0,%xmm2\r | |
1128 | pxor %xmm0,%xmm0\r | |
1129 | movdqa %xmm1,48(%esp)\r | |
1130 | paddq %xmm1,%xmm1\r | |
1131 | pand %xmm3,%xmm2\r | |
1132 | pcmpgtd %xmm1,%xmm0\r | |
1133 | pxor %xmm2,%xmm1\r | |
1134 | pshufd $19,%xmm0,%xmm7\r | |
1135 | movdqa %xmm1,64(%esp)\r | |
1136 | paddq %xmm1,%xmm1\r | |
1137 | movups (%ebp),%xmm0\r | |
1138 | pand %xmm3,%xmm7\r | |
1139 | movups (%esi),%xmm2\r | |
1140 | pxor %xmm1,%xmm7\r | |
1141 | movl %ebx,%ecx\r | |
1142 | movdqu 16(%esi),%xmm3\r | |
1143 | xorps %xmm0,%xmm2\r | |
1144 | movdqu 32(%esi),%xmm4\r | |
1145 | pxor %xmm0,%xmm3\r | |
1146 | movdqu 48(%esi),%xmm5\r | |
1147 | pxor %xmm0,%xmm4\r | |
1148 | movdqu 64(%esi),%xmm6\r | |
1149 | pxor %xmm0,%xmm5\r | |
1150 | movdqu 80(%esi),%xmm1\r | |
1151 | pxor %xmm0,%xmm6\r | |
1152 | leal 96(%esi),%esi\r | |
1153 | pxor (%esp),%xmm2\r | |
1154 | movdqa %xmm7,80(%esp)\r | |
1155 | pxor %xmm1,%xmm7\r | |
1156 | movups 16(%ebp),%xmm1\r | |
1157 | pxor 16(%esp),%xmm3\r | |
1158 | pxor 32(%esp),%xmm4\r | |
1159 | .byte 102,15,56,220,209\r | |
1160 | pxor 48(%esp),%xmm5\r | |
1161 | pxor 64(%esp),%xmm6\r | |
1162 | .byte 102,15,56,220,217\r | |
1163 | pxor %xmm0,%xmm7\r | |
1164 | movups 32(%ebp),%xmm0\r | |
1165 | .byte 102,15,56,220,225\r | |
1166 | .byte 102,15,56,220,233\r | |
1167 | .byte 102,15,56,220,241\r | |
1168 | .byte 102,15,56,220,249\r | |
1169 | call .L_aesni_encrypt6_enter\r | |
1170 | movdqa 80(%esp),%xmm1\r | |
1171 | pxor %xmm0,%xmm0\r | |
1172 | xorps (%esp),%xmm2\r | |
1173 | pcmpgtd %xmm1,%xmm0\r | |
1174 | xorps 16(%esp),%xmm3\r | |
1175 | movups %xmm2,(%edi)\r | |
1176 | xorps 32(%esp),%xmm4\r | |
1177 | movups %xmm3,16(%edi)\r | |
1178 | xorps 48(%esp),%xmm5\r | |
1179 | movups %xmm4,32(%edi)\r | |
1180 | xorps 64(%esp),%xmm6\r | |
1181 | movups %xmm5,48(%edi)\r | |
1182 | xorps %xmm1,%xmm7\r | |
1183 | movups %xmm6,64(%edi)\r | |
1184 | pshufd $19,%xmm0,%xmm2\r | |
1185 | movups %xmm7,80(%edi)\r | |
1186 | leal 96(%edi),%edi\r | |
1187 | movdqa 96(%esp),%xmm3\r | |
1188 | pxor %xmm0,%xmm0\r | |
1189 | paddq %xmm1,%xmm1\r | |
1190 | pand %xmm3,%xmm2\r | |
1191 | pcmpgtd %xmm1,%xmm0\r | |
1192 | pxor %xmm2,%xmm1\r | |
1193 | subl $96,%eax\r | |
1194 | jnc .L048xts_enc_loop6\r | |
1195 | movl 240(%ebp),%ecx\r | |
1196 | movl %ebp,%edx\r | |
1197 | movl %ecx,%ebx\r | |
1198 | .L047xts_enc_short:\r | |
1199 | addl $96,%eax\r | |
1200 | jz .L049xts_enc_done6x\r | |
1201 | movdqa %xmm1,%xmm5\r | |
1202 | cmpl $32,%eax\r | |
1203 | jb .L050xts_enc_one\r | |
1204 | pshufd $19,%xmm0,%xmm2\r | |
1205 | pxor %xmm0,%xmm0\r | |
1206 | paddq %xmm1,%xmm1\r | |
1207 | pand %xmm3,%xmm2\r | |
1208 | pcmpgtd %xmm1,%xmm0\r | |
1209 | pxor %xmm2,%xmm1\r | |
1210 | je .L051xts_enc_two\r | |
1211 | pshufd $19,%xmm0,%xmm2\r | |
1212 | pxor %xmm0,%xmm0\r | |
1213 | movdqa %xmm1,%xmm6\r | |
1214 | paddq %xmm1,%xmm1\r | |
1215 | pand %xmm3,%xmm2\r | |
1216 | pcmpgtd %xmm1,%xmm0\r | |
1217 | pxor %xmm2,%xmm1\r | |
1218 | cmpl $64,%eax\r | |
1219 | jb .L052xts_enc_three\r | |
1220 | pshufd $19,%xmm0,%xmm2\r | |
1221 | pxor %xmm0,%xmm0\r | |
1222 | movdqa %xmm1,%xmm7\r | |
1223 | paddq %xmm1,%xmm1\r | |
1224 | pand %xmm3,%xmm2\r | |
1225 | pcmpgtd %xmm1,%xmm0\r | |
1226 | pxor %xmm2,%xmm1\r | |
1227 | movdqa %xmm5,(%esp)\r | |
1228 | movdqa %xmm6,16(%esp)\r | |
1229 | je .L053xts_enc_four\r | |
1230 | movdqa %xmm7,32(%esp)\r | |
1231 | pshufd $19,%xmm0,%xmm7\r | |
1232 | movdqa %xmm1,48(%esp)\r | |
1233 | paddq %xmm1,%xmm1\r | |
1234 | pand %xmm3,%xmm7\r | |
1235 | pxor %xmm1,%xmm7\r | |
1236 | movdqu (%esi),%xmm2\r | |
1237 | movdqu 16(%esi),%xmm3\r | |
1238 | movdqu 32(%esi),%xmm4\r | |
1239 | pxor (%esp),%xmm2\r | |
1240 | movdqu 48(%esi),%xmm5\r | |
1241 | pxor 16(%esp),%xmm3\r | |
1242 | movdqu 64(%esi),%xmm6\r | |
1243 | pxor 32(%esp),%xmm4\r | |
1244 | leal 80(%esi),%esi\r | |
1245 | pxor 48(%esp),%xmm5\r | |
1246 | movdqa %xmm7,64(%esp)\r | |
1247 | pxor %xmm7,%xmm6\r | |
1248 | call _aesni_encrypt6\r | |
1249 | movaps 64(%esp),%xmm1\r | |
1250 | xorps (%esp),%xmm2\r | |
1251 | xorps 16(%esp),%xmm3\r | |
1252 | xorps 32(%esp),%xmm4\r | |
1253 | movups %xmm2,(%edi)\r | |
1254 | xorps 48(%esp),%xmm5\r | |
1255 | movups %xmm3,16(%edi)\r | |
1256 | xorps %xmm1,%xmm6\r | |
1257 | movups %xmm4,32(%edi)\r | |
1258 | movups %xmm5,48(%edi)\r | |
1259 | movups %xmm6,64(%edi)\r | |
1260 | leal 80(%edi),%edi\r | |
1261 | jmp .L054xts_enc_done\r | |
1262 | .align 16\r | |
1263 | .L050xts_enc_one:\r | |
1264 | movups (%esi),%xmm2\r | |
1265 | leal 16(%esi),%esi\r | |
1266 | xorps %xmm5,%xmm2\r | |
1267 | movups (%edx),%xmm0\r | |
1268 | movups 16(%edx),%xmm1\r | |
1269 | leal 32(%edx),%edx\r | |
1270 | xorps %xmm0,%xmm2\r | |
1271 | .L055enc1_loop_9:\r | |
1272 | .byte 102,15,56,220,209\r | |
1273 | decl %ecx\r | |
1274 | movups (%edx),%xmm1\r | |
1275 | leal 16(%edx),%edx\r | |
1276 | jnz .L055enc1_loop_9\r | |
1277 | .byte 102,15,56,221,209\r | |
1278 | xorps %xmm5,%xmm2\r | |
1279 | movups %xmm2,(%edi)\r | |
1280 | leal 16(%edi),%edi\r | |
1281 | movdqa %xmm5,%xmm1\r | |
1282 | jmp .L054xts_enc_done\r | |
1283 | .align 16\r | |
1284 | .L051xts_enc_two:\r | |
1285 | movaps %xmm1,%xmm6\r | |
1286 | movups (%esi),%xmm2\r | |
1287 | movups 16(%esi),%xmm3\r | |
1288 | leal 32(%esi),%esi\r | |
1289 | xorps %xmm5,%xmm2\r | |
1290 | xorps %xmm6,%xmm3\r | |
1291 | call _aesni_encrypt2\r | |
1292 | xorps %xmm5,%xmm2\r | |
1293 | xorps %xmm6,%xmm3\r | |
1294 | movups %xmm2,(%edi)\r | |
1295 | movups %xmm3,16(%edi)\r | |
1296 | leal 32(%edi),%edi\r | |
1297 | movdqa %xmm6,%xmm1\r | |
1298 | jmp .L054xts_enc_done\r | |
1299 | .align 16\r | |
1300 | .L052xts_enc_three:\r | |
1301 | movaps %xmm1,%xmm7\r | |
1302 | movups (%esi),%xmm2\r | |
1303 | movups 16(%esi),%xmm3\r | |
1304 | movups 32(%esi),%xmm4\r | |
1305 | leal 48(%esi),%esi\r | |
1306 | xorps %xmm5,%xmm2\r | |
1307 | xorps %xmm6,%xmm3\r | |
1308 | xorps %xmm7,%xmm4\r | |
1309 | call _aesni_encrypt3\r | |
1310 | xorps %xmm5,%xmm2\r | |
1311 | xorps %xmm6,%xmm3\r | |
1312 | xorps %xmm7,%xmm4\r | |
1313 | movups %xmm2,(%edi)\r | |
1314 | movups %xmm3,16(%edi)\r | |
1315 | movups %xmm4,32(%edi)\r | |
1316 | leal 48(%edi),%edi\r | |
1317 | movdqa %xmm7,%xmm1\r | |
1318 | jmp .L054xts_enc_done\r | |
1319 | .align 16\r | |
1320 | .L053xts_enc_four:\r | |
1321 | movaps %xmm1,%xmm6\r | |
1322 | movups (%esi),%xmm2\r | |
1323 | movups 16(%esi),%xmm3\r | |
1324 | movups 32(%esi),%xmm4\r | |
1325 | xorps (%esp),%xmm2\r | |
1326 | movups 48(%esi),%xmm5\r | |
1327 | leal 64(%esi),%esi\r | |
1328 | xorps 16(%esp),%xmm3\r | |
1329 | xorps %xmm7,%xmm4\r | |
1330 | xorps %xmm6,%xmm5\r | |
1331 | call _aesni_encrypt4\r | |
1332 | xorps (%esp),%xmm2\r | |
1333 | xorps 16(%esp),%xmm3\r | |
1334 | xorps %xmm7,%xmm4\r | |
1335 | movups %xmm2,(%edi)\r | |
1336 | xorps %xmm6,%xmm5\r | |
1337 | movups %xmm3,16(%edi)\r | |
1338 | movups %xmm4,32(%edi)\r | |
1339 | movups %xmm5,48(%edi)\r | |
1340 | leal 64(%edi),%edi\r | |
1341 | movdqa %xmm6,%xmm1\r | |
1342 | jmp .L054xts_enc_done\r | |
1343 | .align 16\r | |
1344 | .L049xts_enc_done6x:\r | |
1345 | movl 112(%esp),%eax\r | |
1346 | andl $15,%eax\r | |
1347 | jz .L056xts_enc_ret\r | |
1348 | movdqa %xmm1,%xmm5\r | |
1349 | movl %eax,112(%esp)\r | |
1350 | jmp .L057xts_enc_steal\r | |
1351 | .align 16\r | |
1352 | .L054xts_enc_done:\r | |
1353 | movl 112(%esp),%eax\r | |
1354 | pxor %xmm0,%xmm0\r | |
1355 | andl $15,%eax\r | |
1356 | jz .L056xts_enc_ret\r | |
1357 | pcmpgtd %xmm1,%xmm0\r | |
1358 | movl %eax,112(%esp)\r | |
1359 | pshufd $19,%xmm0,%xmm5\r | |
1360 | paddq %xmm1,%xmm1\r | |
1361 | pand 96(%esp),%xmm5\r | |
1362 | pxor %xmm1,%xmm5\r | |
1363 | .L057xts_enc_steal:\r | |
1364 | movzbl (%esi),%ecx\r | |
1365 | movzbl -16(%edi),%edx\r | |
1366 | leal 1(%esi),%esi\r | |
1367 | movb %cl,-16(%edi)\r | |
1368 | movb %dl,(%edi)\r | |
1369 | leal 1(%edi),%edi\r | |
1370 | subl $1,%eax\r | |
1371 | jnz .L057xts_enc_steal\r | |
1372 | subl 112(%esp),%edi\r | |
1373 | movl %ebp,%edx\r | |
1374 | movl %ebx,%ecx\r | |
1375 | movups -16(%edi),%xmm2\r | |
1376 | xorps %xmm5,%xmm2\r | |
1377 | movups (%edx),%xmm0\r | |
1378 | movups 16(%edx),%xmm1\r | |
1379 | leal 32(%edx),%edx\r | |
1380 | xorps %xmm0,%xmm2\r | |
1381 | .L058enc1_loop_10:\r | |
1382 | .byte 102,15,56,220,209\r | |
1383 | decl %ecx\r | |
1384 | movups (%edx),%xmm1\r | |
1385 | leal 16(%edx),%edx\r | |
1386 | jnz .L058enc1_loop_10\r | |
1387 | .byte 102,15,56,221,209\r | |
1388 | xorps %xmm5,%xmm2\r | |
1389 | movups %xmm2,-16(%edi)\r | |
1390 | .L056xts_enc_ret:\r | |
1391 | pxor %xmm0,%xmm0\r | |
1392 | pxor %xmm1,%xmm1\r | |
1393 | pxor %xmm2,%xmm2\r | |
1394 | movdqa %xmm0,(%esp)\r | |
1395 | pxor %xmm3,%xmm3\r | |
1396 | movdqa %xmm0,16(%esp)\r | |
1397 | pxor %xmm4,%xmm4\r | |
1398 | movdqa %xmm0,32(%esp)\r | |
1399 | pxor %xmm5,%xmm5\r | |
1400 | movdqa %xmm0,48(%esp)\r | |
1401 | pxor %xmm6,%xmm6\r | |
1402 | movdqa %xmm0,64(%esp)\r | |
1403 | pxor %xmm7,%xmm7\r | |
1404 | movdqa %xmm0,80(%esp)\r | |
1405 | movl 116(%esp),%esp\r | |
1406 | popl %edi\r | |
1407 | popl %esi\r | |
1408 | popl %ebx\r | |
1409 | popl %ebp\r | |
1410 | ret\r | |
1411 | .size aesni_xts_encrypt,.-.L_aesni_xts_encrypt_begin\r | |
1412 | .globl aesni_xts_decrypt\r | |
1413 | .type aesni_xts_decrypt,@function\r | |
1414 | .align 16\r | |
1415 | aesni_xts_decrypt:\r | |
1416 | .L_aesni_xts_decrypt_begin:\r | |
1417 | pushl %ebp\r | |
1418 | pushl %ebx\r | |
1419 | pushl %esi\r | |
1420 | pushl %edi\r | |
1421 | movl 36(%esp),%edx\r | |
1422 | movl 40(%esp),%esi\r | |
1423 | movl 240(%edx),%ecx\r | |
1424 | movups (%esi),%xmm2\r | |
1425 | movups (%edx),%xmm0\r | |
1426 | movups 16(%edx),%xmm1\r | |
1427 | leal 32(%edx),%edx\r | |
1428 | xorps %xmm0,%xmm2\r | |
1429 | .L059enc1_loop_11:\r | |
1430 | .byte 102,15,56,220,209\r | |
1431 | decl %ecx\r | |
1432 | movups (%edx),%xmm1\r | |
1433 | leal 16(%edx),%edx\r | |
1434 | jnz .L059enc1_loop_11\r | |
1435 | .byte 102,15,56,221,209\r | |
1436 | movl 20(%esp),%esi\r | |
1437 | movl 24(%esp),%edi\r | |
1438 | movl 28(%esp),%eax\r | |
1439 | movl 32(%esp),%edx\r | |
1440 | movl %esp,%ebp\r | |
1441 | subl $120,%esp\r | |
1442 | andl $-16,%esp\r | |
1443 | xorl %ebx,%ebx\r | |
1444 | testl $15,%eax\r | |
1445 | setnz %bl\r | |
1446 | shll $4,%ebx\r | |
1447 | subl %ebx,%eax\r | |
1448 | movl $135,96(%esp)\r | |
1449 | movl $0,100(%esp)\r | |
1450 | movl $1,104(%esp)\r | |
1451 | movl $0,108(%esp)\r | |
1452 | movl %eax,112(%esp)\r | |
1453 | movl %ebp,116(%esp)\r | |
1454 | movl 240(%edx),%ecx\r | |
1455 | movl %edx,%ebp\r | |
1456 | movl %ecx,%ebx\r | |
1457 | movdqa %xmm2,%xmm1\r | |
1458 | pxor %xmm0,%xmm0\r | |
1459 | movdqa 96(%esp),%xmm3\r | |
1460 | pcmpgtd %xmm1,%xmm0\r | |
1461 | andl $-16,%eax\r | |
1462 | subl $96,%eax\r | |
1463 | jc .L060xts_dec_short\r | |
1464 | shll $4,%ecx\r | |
1465 | movl $16,%ebx\r | |
1466 | subl %ecx,%ebx\r | |
1467 | leal 32(%edx,%ecx,1),%edx\r | |
1468 | jmp .L061xts_dec_loop6\r | |
1469 | .align 16\r | |
1470 | .L061xts_dec_loop6:\r | |
1471 | pshufd $19,%xmm0,%xmm2\r | |
1472 | pxor %xmm0,%xmm0\r | |
1473 | movdqa %xmm1,(%esp)\r | |
1474 | paddq %xmm1,%xmm1\r | |
1475 | pand %xmm3,%xmm2\r | |
1476 | pcmpgtd %xmm1,%xmm0\r | |
1477 | pxor %xmm2,%xmm1\r | |
1478 | pshufd $19,%xmm0,%xmm2\r | |
1479 | pxor %xmm0,%xmm0\r | |
1480 | movdqa %xmm1,16(%esp)\r | |
1481 | paddq %xmm1,%xmm1\r | |
1482 | pand %xmm3,%xmm2\r | |
1483 | pcmpgtd %xmm1,%xmm0\r | |
1484 | pxor %xmm2,%xmm1\r | |
1485 | pshufd $19,%xmm0,%xmm2\r | |
1486 | pxor %xmm0,%xmm0\r | |
1487 | movdqa %xmm1,32(%esp)\r | |
1488 | paddq %xmm1,%xmm1\r | |
1489 | pand %xmm3,%xmm2\r | |
1490 | pcmpgtd %xmm1,%xmm0\r | |
1491 | pxor %xmm2,%xmm1\r | |
1492 | pshufd $19,%xmm0,%xmm2\r | |
1493 | pxor %xmm0,%xmm0\r | |
1494 | movdqa %xmm1,48(%esp)\r | |
1495 | paddq %xmm1,%xmm1\r | |
1496 | pand %xmm3,%xmm2\r | |
1497 | pcmpgtd %xmm1,%xmm0\r | |
1498 | pxor %xmm2,%xmm1\r | |
1499 | pshufd $19,%xmm0,%xmm7\r | |
1500 | movdqa %xmm1,64(%esp)\r | |
1501 | paddq %xmm1,%xmm1\r | |
1502 | movups (%ebp),%xmm0\r | |
1503 | pand %xmm3,%xmm7\r | |
1504 | movups (%esi),%xmm2\r | |
1505 | pxor %xmm1,%xmm7\r | |
1506 | movl %ebx,%ecx\r | |
1507 | movdqu 16(%esi),%xmm3\r | |
1508 | xorps %xmm0,%xmm2\r | |
1509 | movdqu 32(%esi),%xmm4\r | |
1510 | pxor %xmm0,%xmm3\r | |
1511 | movdqu 48(%esi),%xmm5\r | |
1512 | pxor %xmm0,%xmm4\r | |
1513 | movdqu 64(%esi),%xmm6\r | |
1514 | pxor %xmm0,%xmm5\r | |
1515 | movdqu 80(%esi),%xmm1\r | |
1516 | pxor %xmm0,%xmm6\r | |
1517 | leal 96(%esi),%esi\r | |
1518 | pxor (%esp),%xmm2\r | |
1519 | movdqa %xmm7,80(%esp)\r | |
1520 | pxor %xmm1,%xmm7\r | |
1521 | movups 16(%ebp),%xmm1\r | |
1522 | pxor 16(%esp),%xmm3\r | |
1523 | pxor 32(%esp),%xmm4\r | |
1524 | .byte 102,15,56,222,209\r | |
1525 | pxor 48(%esp),%xmm5\r | |
1526 | pxor 64(%esp),%xmm6\r | |
1527 | .byte 102,15,56,222,217\r | |
1528 | pxor %xmm0,%xmm7\r | |
1529 | movups 32(%ebp),%xmm0\r | |
1530 | .byte 102,15,56,222,225\r | |
1531 | .byte 102,15,56,222,233\r | |
1532 | .byte 102,15,56,222,241\r | |
1533 | .byte 102,15,56,222,249\r | |
1534 | call .L_aesni_decrypt6_enter\r | |
1535 | movdqa 80(%esp),%xmm1\r | |
1536 | pxor %xmm0,%xmm0\r | |
1537 | xorps (%esp),%xmm2\r | |
1538 | pcmpgtd %xmm1,%xmm0\r | |
1539 | xorps 16(%esp),%xmm3\r | |
1540 | movups %xmm2,(%edi)\r | |
1541 | xorps 32(%esp),%xmm4\r | |
1542 | movups %xmm3,16(%edi)\r | |
1543 | xorps 48(%esp),%xmm5\r | |
1544 | movups %xmm4,32(%edi)\r | |
1545 | xorps 64(%esp),%xmm6\r | |
1546 | movups %xmm5,48(%edi)\r | |
1547 | xorps %xmm1,%xmm7\r | |
1548 | movups %xmm6,64(%edi)\r | |
1549 | pshufd $19,%xmm0,%xmm2\r | |
1550 | movups %xmm7,80(%edi)\r | |
1551 | leal 96(%edi),%edi\r | |
1552 | movdqa 96(%esp),%xmm3\r | |
1553 | pxor %xmm0,%xmm0\r | |
1554 | paddq %xmm1,%xmm1\r | |
1555 | pand %xmm3,%xmm2\r | |
1556 | pcmpgtd %xmm1,%xmm0\r | |
1557 | pxor %xmm2,%xmm1\r | |
1558 | subl $96,%eax\r | |
1559 | jnc .L061xts_dec_loop6\r | |
1560 | movl 240(%ebp),%ecx\r | |
1561 | movl %ebp,%edx\r | |
1562 | movl %ecx,%ebx\r | |
1563 | .L060xts_dec_short:\r | |
1564 | addl $96,%eax\r | |
1565 | jz .L062xts_dec_done6x\r | |
1566 | movdqa %xmm1,%xmm5\r | |
1567 | cmpl $32,%eax\r | |
1568 | jb .L063xts_dec_one\r | |
1569 | pshufd $19,%xmm0,%xmm2\r | |
1570 | pxor %xmm0,%xmm0\r | |
1571 | paddq %xmm1,%xmm1\r | |
1572 | pand %xmm3,%xmm2\r | |
1573 | pcmpgtd %xmm1,%xmm0\r | |
1574 | pxor %xmm2,%xmm1\r | |
1575 | je .L064xts_dec_two\r | |
1576 | pshufd $19,%xmm0,%xmm2\r | |
1577 | pxor %xmm0,%xmm0\r | |
1578 | movdqa %xmm1,%xmm6\r | |
1579 | paddq %xmm1,%xmm1\r | |
1580 | pand %xmm3,%xmm2\r | |
1581 | pcmpgtd %xmm1,%xmm0\r | |
1582 | pxor %xmm2,%xmm1\r | |
1583 | cmpl $64,%eax\r | |
1584 | jb .L065xts_dec_three\r | |
1585 | pshufd $19,%xmm0,%xmm2\r | |
1586 | pxor %xmm0,%xmm0\r | |
1587 | movdqa %xmm1,%xmm7\r | |
1588 | paddq %xmm1,%xmm1\r | |
1589 | pand %xmm3,%xmm2\r | |
1590 | pcmpgtd %xmm1,%xmm0\r | |
1591 | pxor %xmm2,%xmm1\r | |
1592 | movdqa %xmm5,(%esp)\r | |
1593 | movdqa %xmm6,16(%esp)\r | |
1594 | je .L066xts_dec_four\r | |
1595 | movdqa %xmm7,32(%esp)\r | |
1596 | pshufd $19,%xmm0,%xmm7\r | |
1597 | movdqa %xmm1,48(%esp)\r | |
1598 | paddq %xmm1,%xmm1\r | |
1599 | pand %xmm3,%xmm7\r | |
1600 | pxor %xmm1,%xmm7\r | |
1601 | movdqu (%esi),%xmm2\r | |
1602 | movdqu 16(%esi),%xmm3\r | |
1603 | movdqu 32(%esi),%xmm4\r | |
1604 | pxor (%esp),%xmm2\r | |
1605 | movdqu 48(%esi),%xmm5\r | |
1606 | pxor 16(%esp),%xmm3\r | |
1607 | movdqu 64(%esi),%xmm6\r | |
1608 | pxor 32(%esp),%xmm4\r | |
1609 | leal 80(%esi),%esi\r | |
1610 | pxor 48(%esp),%xmm5\r | |
1611 | movdqa %xmm7,64(%esp)\r | |
1612 | pxor %xmm7,%xmm6\r | |
1613 | call _aesni_decrypt6\r | |
1614 | movaps 64(%esp),%xmm1\r | |
1615 | xorps (%esp),%xmm2\r | |
1616 | xorps 16(%esp),%xmm3\r | |
1617 | xorps 32(%esp),%xmm4\r | |
1618 | movups %xmm2,(%edi)\r | |
1619 | xorps 48(%esp),%xmm5\r | |
1620 | movups %xmm3,16(%edi)\r | |
1621 | xorps %xmm1,%xmm6\r | |
1622 | movups %xmm4,32(%edi)\r | |
1623 | movups %xmm5,48(%edi)\r | |
1624 | movups %xmm6,64(%edi)\r | |
1625 | leal 80(%edi),%edi\r | |
1626 | jmp .L067xts_dec_done\r | |
1627 | .align 16\r | |
1628 | .L063xts_dec_one:\r | |
1629 | movups (%esi),%xmm2\r | |
1630 | leal 16(%esi),%esi\r | |
1631 | xorps %xmm5,%xmm2\r | |
1632 | movups (%edx),%xmm0\r | |
1633 | movups 16(%edx),%xmm1\r | |
1634 | leal 32(%edx),%edx\r | |
1635 | xorps %xmm0,%xmm2\r | |
1636 | .L068dec1_loop_12:\r | |
1637 | .byte 102,15,56,222,209\r | |
1638 | decl %ecx\r | |
1639 | movups (%edx),%xmm1\r | |
1640 | leal 16(%edx),%edx\r | |
1641 | jnz .L068dec1_loop_12\r | |
1642 | .byte 102,15,56,223,209\r | |
1643 | xorps %xmm5,%xmm2\r | |
1644 | movups %xmm2,(%edi)\r | |
1645 | leal 16(%edi),%edi\r | |
1646 | movdqa %xmm5,%xmm1\r | |
1647 | jmp .L067xts_dec_done\r | |
1648 | .align 16\r | |
1649 | .L064xts_dec_two:\r | |
1650 | movaps %xmm1,%xmm6\r | |
1651 | movups (%esi),%xmm2\r | |
1652 | movups 16(%esi),%xmm3\r | |
1653 | leal 32(%esi),%esi\r | |
1654 | xorps %xmm5,%xmm2\r | |
1655 | xorps %xmm6,%xmm3\r | |
1656 | call _aesni_decrypt2\r | |
1657 | xorps %xmm5,%xmm2\r | |
1658 | xorps %xmm6,%xmm3\r | |
1659 | movups %xmm2,(%edi)\r | |
1660 | movups %xmm3,16(%edi)\r | |
1661 | leal 32(%edi),%edi\r | |
1662 | movdqa %xmm6,%xmm1\r | |
1663 | jmp .L067xts_dec_done\r | |
1664 | .align 16\r | |
1665 | .L065xts_dec_three:\r | |
1666 | movaps %xmm1,%xmm7\r | |
1667 | movups (%esi),%xmm2\r | |
1668 | movups 16(%esi),%xmm3\r | |
1669 | movups 32(%esi),%xmm4\r | |
1670 | leal 48(%esi),%esi\r | |
1671 | xorps %xmm5,%xmm2\r | |
1672 | xorps %xmm6,%xmm3\r | |
1673 | xorps %xmm7,%xmm4\r | |
1674 | call _aesni_decrypt3\r | |
1675 | xorps %xmm5,%xmm2\r | |
1676 | xorps %xmm6,%xmm3\r | |
1677 | xorps %xmm7,%xmm4\r | |
1678 | movups %xmm2,(%edi)\r | |
1679 | movups %xmm3,16(%edi)\r | |
1680 | movups %xmm4,32(%edi)\r | |
1681 | leal 48(%edi),%edi\r | |
1682 | movdqa %xmm7,%xmm1\r | |
1683 | jmp .L067xts_dec_done\r | |
1684 | .align 16\r | |
1685 | .L066xts_dec_four:\r | |
1686 | movaps %xmm1,%xmm6\r | |
1687 | movups (%esi),%xmm2\r | |
1688 | movups 16(%esi),%xmm3\r | |
1689 | movups 32(%esi),%xmm4\r | |
1690 | xorps (%esp),%xmm2\r | |
1691 | movups 48(%esi),%xmm5\r | |
1692 | leal 64(%esi),%esi\r | |
1693 | xorps 16(%esp),%xmm3\r | |
1694 | xorps %xmm7,%xmm4\r | |
1695 | xorps %xmm6,%xmm5\r | |
1696 | call _aesni_decrypt4\r | |
1697 | xorps (%esp),%xmm2\r | |
1698 | xorps 16(%esp),%xmm3\r | |
1699 | xorps %xmm7,%xmm4\r | |
1700 | movups %xmm2,(%edi)\r | |
1701 | xorps %xmm6,%xmm5\r | |
1702 | movups %xmm3,16(%edi)\r | |
1703 | movups %xmm4,32(%edi)\r | |
1704 | movups %xmm5,48(%edi)\r | |
1705 | leal 64(%edi),%edi\r | |
1706 | movdqa %xmm6,%xmm1\r | |
1707 | jmp .L067xts_dec_done\r | |
1708 | .align 16\r | |
1709 | .L062xts_dec_done6x:\r | |
1710 | movl 112(%esp),%eax\r | |
1711 | andl $15,%eax\r | |
1712 | jz .L069xts_dec_ret\r | |
1713 | movl %eax,112(%esp)\r | |
1714 | jmp .L070xts_dec_only_one_more\r | |
1715 | .align 16\r | |
1716 | .L067xts_dec_done:\r | |
1717 | movl 112(%esp),%eax\r | |
1718 | pxor %xmm0,%xmm0\r | |
1719 | andl $15,%eax\r | |
1720 | jz .L069xts_dec_ret\r | |
1721 | pcmpgtd %xmm1,%xmm0\r | |
1722 | movl %eax,112(%esp)\r | |
1723 | pshufd $19,%xmm0,%xmm2\r | |
1724 | pxor %xmm0,%xmm0\r | |
1725 | movdqa 96(%esp),%xmm3\r | |
1726 | paddq %xmm1,%xmm1\r | |
1727 | pand %xmm3,%xmm2\r | |
1728 | pcmpgtd %xmm1,%xmm0\r | |
1729 | pxor %xmm2,%xmm1\r | |
1730 | .L070xts_dec_only_one_more:\r | |
1731 | pshufd $19,%xmm0,%xmm5\r | |
1732 | movdqa %xmm1,%xmm6\r | |
1733 | paddq %xmm1,%xmm1\r | |
1734 | pand %xmm3,%xmm5\r | |
1735 | pxor %xmm1,%xmm5\r | |
1736 | movl %ebp,%edx\r | |
1737 | movl %ebx,%ecx\r | |
1738 | movups (%esi),%xmm2\r | |
1739 | xorps %xmm5,%xmm2\r | |
1740 | movups (%edx),%xmm0\r | |
1741 | movups 16(%edx),%xmm1\r | |
1742 | leal 32(%edx),%edx\r | |
1743 | xorps %xmm0,%xmm2\r | |
1744 | .L071dec1_loop_13:\r | |
1745 | .byte 102,15,56,222,209\r | |
1746 | decl %ecx\r | |
1747 | movups (%edx),%xmm1\r | |
1748 | leal 16(%edx),%edx\r | |
1749 | jnz .L071dec1_loop_13\r | |
1750 | .byte 102,15,56,223,209\r | |
1751 | xorps %xmm5,%xmm2\r | |
1752 | movups %xmm2,(%edi)\r | |
1753 | .L072xts_dec_steal:\r | |
1754 | movzbl 16(%esi),%ecx\r | |
1755 | movzbl (%edi),%edx\r | |
1756 | leal 1(%esi),%esi\r | |
1757 | movb %cl,(%edi)\r | |
1758 | movb %dl,16(%edi)\r | |
1759 | leal 1(%edi),%edi\r | |
1760 | subl $1,%eax\r | |
1761 | jnz .L072xts_dec_steal\r | |
1762 | subl 112(%esp),%edi\r | |
1763 | movl %ebp,%edx\r | |
1764 | movl %ebx,%ecx\r | |
1765 | movups (%edi),%xmm2\r | |
1766 | xorps %xmm6,%xmm2\r | |
1767 | movups (%edx),%xmm0\r | |
1768 | movups 16(%edx),%xmm1\r | |
1769 | leal 32(%edx),%edx\r | |
1770 | xorps %xmm0,%xmm2\r | |
1771 | .L073dec1_loop_14:\r | |
1772 | .byte 102,15,56,222,209\r | |
1773 | decl %ecx\r | |
1774 | movups (%edx),%xmm1\r | |
1775 | leal 16(%edx),%edx\r | |
1776 | jnz .L073dec1_loop_14\r | |
1777 | .byte 102,15,56,223,209\r | |
1778 | xorps %xmm6,%xmm2\r | |
1779 | movups %xmm2,(%edi)\r | |
1780 | .L069xts_dec_ret:\r | |
1781 | pxor %xmm0,%xmm0\r | |
1782 | pxor %xmm1,%xmm1\r | |
1783 | pxor %xmm2,%xmm2\r | |
1784 | movdqa %xmm0,(%esp)\r | |
1785 | pxor %xmm3,%xmm3\r | |
1786 | movdqa %xmm0,16(%esp)\r | |
1787 | pxor %xmm4,%xmm4\r | |
1788 | movdqa %xmm0,32(%esp)\r | |
1789 | pxor %xmm5,%xmm5\r | |
1790 | movdqa %xmm0,48(%esp)\r | |
1791 | pxor %xmm6,%xmm6\r | |
1792 | movdqa %xmm0,64(%esp)\r | |
1793 | pxor %xmm7,%xmm7\r | |
1794 | movdqa %xmm0,80(%esp)\r | |
1795 | movl 116(%esp),%esp\r | |
1796 | popl %edi\r | |
1797 | popl %esi\r | |
1798 | popl %ebx\r | |
1799 | popl %ebp\r | |
1800 | ret\r | |
1801 | .size aesni_xts_decrypt,.-.L_aesni_xts_decrypt_begin\r | |
1802 | .globl aesni_ocb_encrypt\r | |
1803 | .type aesni_ocb_encrypt,@function\r | |
1804 | .align 16\r | |
1805 | aesni_ocb_encrypt:\r | |
1806 | .L_aesni_ocb_encrypt_begin:\r | |
1807 | pushl %ebp\r | |
1808 | pushl %ebx\r | |
1809 | pushl %esi\r | |
1810 | pushl %edi\r | |
1811 | movl 40(%esp),%ecx\r | |
1812 | movl 48(%esp),%ebx\r | |
1813 | movl 20(%esp),%esi\r | |
1814 | movl 24(%esp),%edi\r | |
1815 | movl 28(%esp),%eax\r | |
1816 | movl 32(%esp),%edx\r | |
1817 | movdqu (%ecx),%xmm0\r | |
1818 | movl 36(%esp),%ebp\r | |
1819 | movdqu (%ebx),%xmm1\r | |
1820 | movl 44(%esp),%ebx\r | |
1821 | movl %esp,%ecx\r | |
1822 | subl $132,%esp\r | |
1823 | andl $-16,%esp\r | |
1824 | subl %esi,%edi\r | |
1825 | shll $4,%eax\r | |
1826 | leal -96(%esi,%eax,1),%eax\r | |
1827 | movl %edi,120(%esp)\r | |
1828 | movl %eax,124(%esp)\r | |
1829 | movl %ecx,128(%esp)\r | |
1830 | movl 240(%edx),%ecx\r | |
1831 | testl $1,%ebp\r | |
1832 | jnz .L074odd\r | |
1833 | bsfl %ebp,%eax\r | |
1834 | addl $1,%ebp\r | |
1835 | shll $4,%eax\r | |
1836 | movdqu (%ebx,%eax,1),%xmm7\r | |
1837 | movl %edx,%eax\r | |
1838 | movdqu (%esi),%xmm2\r | |
1839 | leal 16(%esi),%esi\r | |
1840 | pxor %xmm0,%xmm7\r | |
1841 | pxor %xmm2,%xmm1\r | |
1842 | pxor %xmm7,%xmm2\r | |
1843 | movdqa %xmm1,%xmm6\r | |
1844 | movups (%edx),%xmm0\r | |
1845 | movups 16(%edx),%xmm1\r | |
1846 | leal 32(%edx),%edx\r | |
1847 | xorps %xmm0,%xmm2\r | |
1848 | .L075enc1_loop_15:\r | |
1849 | .byte 102,15,56,220,209\r | |
1850 | decl %ecx\r | |
1851 | movups (%edx),%xmm1\r | |
1852 | leal 16(%edx),%edx\r | |
1853 | jnz .L075enc1_loop_15\r | |
1854 | .byte 102,15,56,221,209\r | |
1855 | xorps %xmm7,%xmm2\r | |
1856 | movdqa %xmm7,%xmm0\r | |
1857 | movdqa %xmm6,%xmm1\r | |
1858 | movups %xmm2,-16(%edi,%esi,1)\r | |
1859 | movl 240(%eax),%ecx\r | |
1860 | movl %eax,%edx\r | |
1861 | movl 124(%esp),%eax\r | |
1862 | .L074odd:\r | |
1863 | shll $4,%ecx\r | |
1864 | movl $16,%edi\r | |
1865 | subl %ecx,%edi\r | |
1866 | movl %edx,112(%esp)\r | |
1867 | leal 32(%edx,%ecx,1),%edx\r | |
1868 | movl %edi,116(%esp)\r | |
1869 | cmpl %eax,%esi\r | |
1870 | ja .L076short\r | |
1871 | jmp .L077grandloop\r | |
1872 | .align 32\r | |
1873 | .L077grandloop:\r | |
1874 | leal 1(%ebp),%ecx\r | |
1875 | leal 3(%ebp),%eax\r | |
1876 | leal 5(%ebp),%edi\r | |
1877 | addl $6,%ebp\r | |
1878 | bsfl %ecx,%ecx\r | |
1879 | bsfl %eax,%eax\r | |
1880 | bsfl %edi,%edi\r | |
1881 | shll $4,%ecx\r | |
1882 | shll $4,%eax\r | |
1883 | shll $4,%edi\r | |
1884 | movdqu (%ebx),%xmm2\r | |
1885 | movdqu (%ebx,%ecx,1),%xmm3\r | |
1886 | movl 116(%esp),%ecx\r | |
1887 | movdqa %xmm2,%xmm4\r | |
1888 | movdqu (%ebx,%eax,1),%xmm5\r | |
1889 | movdqa %xmm2,%xmm6\r | |
1890 | movdqu (%ebx,%edi,1),%xmm7\r | |
1891 | pxor %xmm0,%xmm2\r | |
1892 | pxor %xmm2,%xmm3\r | |
1893 | movdqa %xmm2,(%esp)\r | |
1894 | pxor %xmm3,%xmm4\r | |
1895 | movdqa %xmm3,16(%esp)\r | |
1896 | pxor %xmm4,%xmm5\r | |
1897 | movdqa %xmm4,32(%esp)\r | |
1898 | pxor %xmm5,%xmm6\r | |
1899 | movdqa %xmm5,48(%esp)\r | |
1900 | pxor %xmm6,%xmm7\r | |
1901 | movdqa %xmm6,64(%esp)\r | |
1902 | movdqa %xmm7,80(%esp)\r | |
1903 | movups -48(%edx,%ecx,1),%xmm0\r | |
1904 | movdqu (%esi),%xmm2\r | |
1905 | movdqu 16(%esi),%xmm3\r | |
1906 | movdqu 32(%esi),%xmm4\r | |
1907 | movdqu 48(%esi),%xmm5\r | |
1908 | movdqu 64(%esi),%xmm6\r | |
1909 | movdqu 80(%esi),%xmm7\r | |
1910 | leal 96(%esi),%esi\r | |
1911 | pxor %xmm2,%xmm1\r | |
1912 | pxor %xmm0,%xmm2\r | |
1913 | pxor %xmm3,%xmm1\r | |
1914 | pxor %xmm0,%xmm3\r | |
1915 | pxor %xmm4,%xmm1\r | |
1916 | pxor %xmm0,%xmm4\r | |
1917 | pxor %xmm5,%xmm1\r | |
1918 | pxor %xmm0,%xmm5\r | |
1919 | pxor %xmm6,%xmm1\r | |
1920 | pxor %xmm0,%xmm6\r | |
1921 | pxor %xmm7,%xmm1\r | |
1922 | pxor %xmm0,%xmm7\r | |
1923 | movdqa %xmm1,96(%esp)\r | |
1924 | movups -32(%edx,%ecx,1),%xmm1\r | |
1925 | pxor (%esp),%xmm2\r | |
1926 | pxor 16(%esp),%xmm3\r | |
1927 | pxor 32(%esp),%xmm4\r | |
1928 | pxor 48(%esp),%xmm5\r | |
1929 | pxor 64(%esp),%xmm6\r | |
1930 | pxor 80(%esp),%xmm7\r | |
1931 | movups -16(%edx,%ecx,1),%xmm0\r | |
1932 | .byte 102,15,56,220,209\r | |
1933 | .byte 102,15,56,220,217\r | |
1934 | .byte 102,15,56,220,225\r | |
1935 | .byte 102,15,56,220,233\r | |
1936 | .byte 102,15,56,220,241\r | |
1937 | .byte 102,15,56,220,249\r | |
1938 | movl 120(%esp),%edi\r | |
1939 | movl 124(%esp),%eax\r | |
1940 | call .L_aesni_encrypt6_enter\r | |
1941 | movdqa 80(%esp),%xmm0\r | |
1942 | pxor (%esp),%xmm2\r | |
1943 | pxor 16(%esp),%xmm3\r | |
1944 | pxor 32(%esp),%xmm4\r | |
1945 | pxor 48(%esp),%xmm5\r | |
1946 | pxor 64(%esp),%xmm6\r | |
1947 | pxor %xmm0,%xmm7\r | |
1948 | movdqa 96(%esp),%xmm1\r | |
1949 | movdqu %xmm2,-96(%edi,%esi,1)\r | |
1950 | movdqu %xmm3,-80(%edi,%esi,1)\r | |
1951 | movdqu %xmm4,-64(%edi,%esi,1)\r | |
1952 | movdqu %xmm5,-48(%edi,%esi,1)\r | |
1953 | movdqu %xmm6,-32(%edi,%esi,1)\r | |
1954 | movdqu %xmm7,-16(%edi,%esi,1)\r | |
1955 | cmpl %eax,%esi\r | |
1956 | jb .L077grandloop\r | |
1957 | .L076short:\r | |
1958 | addl $96,%eax\r | |
1959 | subl %esi,%eax\r | |
1960 | jz .L078done\r | |
1961 | cmpl $32,%eax\r | |
1962 | jb .L079one\r | |
1963 | je .L080two\r | |
1964 | cmpl $64,%eax\r | |
1965 | jb .L081three\r | |
1966 | je .L082four\r | |
1967 | leal 1(%ebp),%ecx\r | |
1968 | leal 3(%ebp),%eax\r | |
1969 | bsfl %ecx,%ecx\r | |
1970 | bsfl %eax,%eax\r | |
1971 | shll $4,%ecx\r | |
1972 | shll $4,%eax\r | |
1973 | movdqu (%ebx),%xmm2\r | |
1974 | movdqu (%ebx,%ecx,1),%xmm3\r | |
1975 | movl 116(%esp),%ecx\r | |
1976 | movdqa %xmm2,%xmm4\r | |
1977 | movdqu (%ebx,%eax,1),%xmm5\r | |
1978 | movdqa %xmm2,%xmm6\r | |
1979 | pxor %xmm0,%xmm2\r | |
1980 | pxor %xmm2,%xmm3\r | |
1981 | movdqa %xmm2,(%esp)\r | |
1982 | pxor %xmm3,%xmm4\r | |
1983 | movdqa %xmm3,16(%esp)\r | |
1984 | pxor %xmm4,%xmm5\r | |
1985 | movdqa %xmm4,32(%esp)\r | |
1986 | pxor %xmm5,%xmm6\r | |
1987 | movdqa %xmm5,48(%esp)\r | |
1988 | pxor %xmm6,%xmm7\r | |
1989 | movdqa %xmm6,64(%esp)\r | |
1990 | movups -48(%edx,%ecx,1),%xmm0\r | |
1991 | movdqu (%esi),%xmm2\r | |
1992 | movdqu 16(%esi),%xmm3\r | |
1993 | movdqu 32(%esi),%xmm4\r | |
1994 | movdqu 48(%esi),%xmm5\r | |
1995 | movdqu 64(%esi),%xmm6\r | |
1996 | pxor %xmm7,%xmm7\r | |
1997 | pxor %xmm2,%xmm1\r | |
1998 | pxor %xmm0,%xmm2\r | |
1999 | pxor %xmm3,%xmm1\r | |
2000 | pxor %xmm0,%xmm3\r | |
2001 | pxor %xmm4,%xmm1\r | |
2002 | pxor %xmm0,%xmm4\r | |
2003 | pxor %xmm5,%xmm1\r | |
2004 | pxor %xmm0,%xmm5\r | |
2005 | pxor %xmm6,%xmm1\r | |
2006 | pxor %xmm0,%xmm6\r | |
2007 | movdqa %xmm1,96(%esp)\r | |
2008 | movups -32(%edx,%ecx,1),%xmm1\r | |
2009 | pxor (%esp),%xmm2\r | |
2010 | pxor 16(%esp),%xmm3\r | |
2011 | pxor 32(%esp),%xmm4\r | |
2012 | pxor 48(%esp),%xmm5\r | |
2013 | pxor 64(%esp),%xmm6\r | |
2014 | movups -16(%edx,%ecx,1),%xmm0\r | |
2015 | .byte 102,15,56,220,209\r | |
2016 | .byte 102,15,56,220,217\r | |
2017 | .byte 102,15,56,220,225\r | |
2018 | .byte 102,15,56,220,233\r | |
2019 | .byte 102,15,56,220,241\r | |
2020 | .byte 102,15,56,220,249\r | |
2021 | movl 120(%esp),%edi\r | |
2022 | call .L_aesni_encrypt6_enter\r | |
2023 | movdqa 64(%esp),%xmm0\r | |
2024 | pxor (%esp),%xmm2\r | |
2025 | pxor 16(%esp),%xmm3\r | |
2026 | pxor 32(%esp),%xmm4\r | |
2027 | pxor 48(%esp),%xmm5\r | |
2028 | pxor %xmm0,%xmm6\r | |
2029 | movdqa 96(%esp),%xmm1\r | |
2030 | movdqu %xmm2,(%edi,%esi,1)\r | |
2031 | movdqu %xmm3,16(%edi,%esi,1)\r | |
2032 | movdqu %xmm4,32(%edi,%esi,1)\r | |
2033 | movdqu %xmm5,48(%edi,%esi,1)\r | |
2034 | movdqu %xmm6,64(%edi,%esi,1)\r | |
2035 | jmp .L078done\r | |
2036 | .align 16\r | |
2037 | .L079one:\r | |
2038 | movdqu (%ebx),%xmm7\r | |
2039 | movl 112(%esp),%edx\r | |
2040 | movdqu (%esi),%xmm2\r | |
2041 | movl 240(%edx),%ecx\r | |
2042 | pxor %xmm0,%xmm7\r | |
2043 | pxor %xmm2,%xmm1\r | |
2044 | pxor %xmm7,%xmm2\r | |
2045 | movdqa %xmm1,%xmm6\r | |
2046 | movl 120(%esp),%edi\r | |
2047 | movups (%edx),%xmm0\r | |
2048 | movups 16(%edx),%xmm1\r | |
2049 | leal 32(%edx),%edx\r | |
2050 | xorps %xmm0,%xmm2\r | |
2051 | .L083enc1_loop_16:\r | |
2052 | .byte 102,15,56,220,209\r | |
2053 | decl %ecx\r | |
2054 | movups (%edx),%xmm1\r | |
2055 | leal 16(%edx),%edx\r | |
2056 | jnz .L083enc1_loop_16\r | |
2057 | .byte 102,15,56,221,209\r | |
2058 | xorps %xmm7,%xmm2\r | |
2059 | movdqa %xmm7,%xmm0\r | |
2060 | movdqa %xmm6,%xmm1\r | |
2061 | movups %xmm2,(%edi,%esi,1)\r | |
2062 | jmp .L078done\r | |
2063 | .align 16\r | |
2064 | .L080two:\r | |
2065 | leal 1(%ebp),%ecx\r | |
2066 | movl 112(%esp),%edx\r | |
2067 | bsfl %ecx,%ecx\r | |
2068 | shll $4,%ecx\r | |
2069 | movdqu (%ebx),%xmm6\r | |
2070 | movdqu (%ebx,%ecx,1),%xmm7\r | |
2071 | movdqu (%esi),%xmm2\r | |
2072 | movdqu 16(%esi),%xmm3\r | |
2073 | movl 240(%edx),%ecx\r | |
2074 | pxor %xmm0,%xmm6\r | |
2075 | pxor %xmm6,%xmm7\r | |
2076 | pxor %xmm2,%xmm1\r | |
2077 | pxor %xmm6,%xmm2\r | |
2078 | pxor %xmm3,%xmm1\r | |
2079 | pxor %xmm7,%xmm3\r | |
2080 | movdqa %xmm1,%xmm5\r | |
2081 | movl 120(%esp),%edi\r | |
2082 | call _aesni_encrypt2\r | |
2083 | xorps %xmm6,%xmm2\r | |
2084 | xorps %xmm7,%xmm3\r | |
2085 | movdqa %xmm7,%xmm0\r | |
2086 | movdqa %xmm5,%xmm1\r | |
2087 | movups %xmm2,(%edi,%esi,1)\r | |
2088 | movups %xmm3,16(%edi,%esi,1)\r | |
2089 | jmp .L078done\r | |
2090 | .align 16\r | |
2091 | .L081three:\r | |
2092 | leal 1(%ebp),%ecx\r | |
2093 | movl 112(%esp),%edx\r | |
2094 | bsfl %ecx,%ecx\r | |
2095 | shll $4,%ecx\r | |
2096 | movdqu (%ebx),%xmm5\r | |
2097 | movdqu (%ebx,%ecx,1),%xmm6\r | |
2098 | movdqa %xmm5,%xmm7\r | |
2099 | movdqu (%esi),%xmm2\r | |
2100 | movdqu 16(%esi),%xmm3\r | |
2101 | movdqu 32(%esi),%xmm4\r | |
2102 | movl 240(%edx),%ecx\r | |
2103 | pxor %xmm0,%xmm5\r | |
2104 | pxor %xmm5,%xmm6\r | |
2105 | pxor %xmm6,%xmm7\r | |
2106 | pxor %xmm2,%xmm1\r | |
2107 | pxor %xmm5,%xmm2\r | |
2108 | pxor %xmm3,%xmm1\r | |
2109 | pxor %xmm6,%xmm3\r | |
2110 | pxor %xmm4,%xmm1\r | |
2111 | pxor %xmm7,%xmm4\r | |
2112 | movdqa %xmm1,96(%esp)\r | |
2113 | movl 120(%esp),%edi\r | |
2114 | call _aesni_encrypt3\r | |
2115 | xorps %xmm5,%xmm2\r | |
2116 | xorps %xmm6,%xmm3\r | |
2117 | xorps %xmm7,%xmm4\r | |
2118 | movdqa %xmm7,%xmm0\r | |
2119 | movdqa 96(%esp),%xmm1\r | |
2120 | movups %xmm2,(%edi,%esi,1)\r | |
2121 | movups %xmm3,16(%edi,%esi,1)\r | |
2122 | movups %xmm4,32(%edi,%esi,1)\r | |
2123 | jmp .L078done\r | |
2124 | .align 16\r | |
2125 | .L082four:\r | |
2126 | leal 1(%ebp),%ecx\r | |
2127 | leal 3(%ebp),%eax\r | |
2128 | bsfl %ecx,%ecx\r | |
2129 | bsfl %eax,%eax\r | |
2130 | movl 112(%esp),%edx\r | |
2131 | shll $4,%ecx\r | |
2132 | shll $4,%eax\r | |
2133 | movdqu (%ebx),%xmm4\r | |
2134 | movdqu (%ebx,%ecx,1),%xmm5\r | |
2135 | movdqa %xmm4,%xmm6\r | |
2136 | movdqu (%ebx,%eax,1),%xmm7\r | |
2137 | pxor %xmm0,%xmm4\r | |
2138 | movdqu (%esi),%xmm2\r | |
2139 | pxor %xmm4,%xmm5\r | |
2140 | movdqu 16(%esi),%xmm3\r | |
2141 | pxor %xmm5,%xmm6\r | |
2142 | movdqa %xmm4,(%esp)\r | |
2143 | pxor %xmm6,%xmm7\r | |
2144 | movdqa %xmm5,16(%esp)\r | |
2145 | movdqu 32(%esi),%xmm4\r | |
2146 | movdqu 48(%esi),%xmm5\r | |
2147 | movl 240(%edx),%ecx\r | |
2148 | pxor %xmm2,%xmm1\r | |
2149 | pxor (%esp),%xmm2\r | |
2150 | pxor %xmm3,%xmm1\r | |
2151 | pxor 16(%esp),%xmm3\r | |
2152 | pxor %xmm4,%xmm1\r | |
2153 | pxor %xmm6,%xmm4\r | |
2154 | pxor %xmm5,%xmm1\r | |
2155 | pxor %xmm7,%xmm5\r | |
2156 | movdqa %xmm1,96(%esp)\r | |
2157 | movl 120(%esp),%edi\r | |
2158 | call _aesni_encrypt4\r | |
2159 | xorps (%esp),%xmm2\r | |
2160 | xorps 16(%esp),%xmm3\r | |
2161 | xorps %xmm6,%xmm4\r | |
2162 | movups %xmm2,(%edi,%esi,1)\r | |
2163 | xorps %xmm7,%xmm5\r | |
2164 | movups %xmm3,16(%edi,%esi,1)\r | |
2165 | movdqa %xmm7,%xmm0\r | |
2166 | movups %xmm4,32(%edi,%esi,1)\r | |
2167 | movdqa 96(%esp),%xmm1\r | |
2168 | movups %xmm5,48(%edi,%esi,1)\r | |
2169 | .L078done:\r | |
2170 | movl 128(%esp),%edx\r | |
2171 | pxor %xmm2,%xmm2\r | |
2172 | pxor %xmm3,%xmm3\r | |
2173 | movdqa %xmm2,(%esp)\r | |
2174 | pxor %xmm4,%xmm4\r | |
2175 | movdqa %xmm2,16(%esp)\r | |
2176 | pxor %xmm5,%xmm5\r | |
2177 | movdqa %xmm2,32(%esp)\r | |
2178 | pxor %xmm6,%xmm6\r | |
2179 | movdqa %xmm2,48(%esp)\r | |
2180 | pxor %xmm7,%xmm7\r | |
2181 | movdqa %xmm2,64(%esp)\r | |
2182 | movdqa %xmm2,80(%esp)\r | |
2183 | movdqa %xmm2,96(%esp)\r | |
2184 | leal (%edx),%esp\r | |
2185 | movl 40(%esp),%ecx\r | |
2186 | movl 48(%esp),%ebx\r | |
2187 | movdqu %xmm0,(%ecx)\r | |
2188 | pxor %xmm0,%xmm0\r | |
2189 | movdqu %xmm1,(%ebx)\r | |
2190 | pxor %xmm1,%xmm1\r | |
2191 | popl %edi\r | |
2192 | popl %esi\r | |
2193 | popl %ebx\r | |
2194 | popl %ebp\r | |
2195 | ret\r | |
2196 | .size aesni_ocb_encrypt,.-.L_aesni_ocb_encrypt_begin\r | |
2197 | .globl aesni_ocb_decrypt\r | |
2198 | .type aesni_ocb_decrypt,@function\r | |
2199 | .align 16\r | |
2200 | aesni_ocb_decrypt:\r | |
2201 | .L_aesni_ocb_decrypt_begin:\r | |
2202 | pushl %ebp\r | |
2203 | pushl %ebx\r | |
2204 | pushl %esi\r | |
2205 | pushl %edi\r | |
2206 | movl 40(%esp),%ecx\r | |
2207 | movl 48(%esp),%ebx\r | |
2208 | movl 20(%esp),%esi\r | |
2209 | movl 24(%esp),%edi\r | |
2210 | movl 28(%esp),%eax\r | |
2211 | movl 32(%esp),%edx\r | |
2212 | movdqu (%ecx),%xmm0\r | |
2213 | movl 36(%esp),%ebp\r | |
2214 | movdqu (%ebx),%xmm1\r | |
2215 | movl 44(%esp),%ebx\r | |
2216 | movl %esp,%ecx\r | |
2217 | subl $132,%esp\r | |
2218 | andl $-16,%esp\r | |
2219 | subl %esi,%edi\r | |
2220 | shll $4,%eax\r | |
2221 | leal -96(%esi,%eax,1),%eax\r | |
2222 | movl %edi,120(%esp)\r | |
2223 | movl %eax,124(%esp)\r | |
2224 | movl %ecx,128(%esp)\r | |
2225 | movl 240(%edx),%ecx\r | |
2226 | testl $1,%ebp\r | |
2227 | jnz .L084odd\r | |
2228 | bsfl %ebp,%eax\r | |
2229 | addl $1,%ebp\r | |
2230 | shll $4,%eax\r | |
2231 | movdqu (%ebx,%eax,1),%xmm7\r | |
2232 | movl %edx,%eax\r | |
2233 | movdqu (%esi),%xmm2\r | |
2234 | leal 16(%esi),%esi\r | |
2235 | pxor %xmm0,%xmm7\r | |
2236 | pxor %xmm7,%xmm2\r | |
2237 | movdqa %xmm1,%xmm6\r | |
2238 | movups (%edx),%xmm0\r | |
2239 | movups 16(%edx),%xmm1\r | |
2240 | leal 32(%edx),%edx\r | |
2241 | xorps %xmm0,%xmm2\r | |
2242 | .L085dec1_loop_17:\r | |
2243 | .byte 102,15,56,222,209\r | |
2244 | decl %ecx\r | |
2245 | movups (%edx),%xmm1\r | |
2246 | leal 16(%edx),%edx\r | |
2247 | jnz .L085dec1_loop_17\r | |
2248 | .byte 102,15,56,223,209\r | |
2249 | xorps %xmm7,%xmm2\r | |
2250 | movaps %xmm6,%xmm1\r | |
2251 | movdqa %xmm7,%xmm0\r | |
2252 | xorps %xmm2,%xmm1\r | |
2253 | movups %xmm2,-16(%edi,%esi,1)\r | |
2254 | movl 240(%eax),%ecx\r | |
2255 | movl %eax,%edx\r | |
2256 | movl 124(%esp),%eax\r | |
2257 | .L084odd:\r | |
2258 | shll $4,%ecx\r | |
2259 | movl $16,%edi\r | |
2260 | subl %ecx,%edi\r | |
2261 | movl %edx,112(%esp)\r | |
2262 | leal 32(%edx,%ecx,1),%edx\r | |
2263 | movl %edi,116(%esp)\r | |
2264 | cmpl %eax,%esi\r | |
2265 | ja .L086short\r | |
2266 | jmp .L087grandloop\r | |
2267 | .align 32\r | |
2268 | .L087grandloop:\r | |
2269 | leal 1(%ebp),%ecx\r | |
2270 | leal 3(%ebp),%eax\r | |
2271 | leal 5(%ebp),%edi\r | |
2272 | addl $6,%ebp\r | |
2273 | bsfl %ecx,%ecx\r | |
2274 | bsfl %eax,%eax\r | |
2275 | bsfl %edi,%edi\r | |
2276 | shll $4,%ecx\r | |
2277 | shll $4,%eax\r | |
2278 | shll $4,%edi\r | |
2279 | movdqu (%ebx),%xmm2\r | |
2280 | movdqu (%ebx,%ecx,1),%xmm3\r | |
2281 | movl 116(%esp),%ecx\r | |
2282 | movdqa %xmm2,%xmm4\r | |
2283 | movdqu (%ebx,%eax,1),%xmm5\r | |
2284 | movdqa %xmm2,%xmm6\r | |
2285 | movdqu (%ebx,%edi,1),%xmm7\r | |
2286 | pxor %xmm0,%xmm2\r | |
2287 | pxor %xmm2,%xmm3\r | |
2288 | movdqa %xmm2,(%esp)\r | |
2289 | pxor %xmm3,%xmm4\r | |
2290 | movdqa %xmm3,16(%esp)\r | |
2291 | pxor %xmm4,%xmm5\r | |
2292 | movdqa %xmm4,32(%esp)\r | |
2293 | pxor %xmm5,%xmm6\r | |
2294 | movdqa %xmm5,48(%esp)\r | |
2295 | pxor %xmm6,%xmm7\r | |
2296 | movdqa %xmm6,64(%esp)\r | |
2297 | movdqa %xmm7,80(%esp)\r | |
2298 | movups -48(%edx,%ecx,1),%xmm0\r | |
2299 | movdqu (%esi),%xmm2\r | |
2300 | movdqu 16(%esi),%xmm3\r | |
2301 | movdqu 32(%esi),%xmm4\r | |
2302 | movdqu 48(%esi),%xmm5\r | |
2303 | movdqu 64(%esi),%xmm6\r | |
2304 | movdqu 80(%esi),%xmm7\r | |
2305 | leal 96(%esi),%esi\r | |
2306 | movdqa %xmm1,96(%esp)\r | |
2307 | pxor %xmm0,%xmm2\r | |
2308 | pxor %xmm0,%xmm3\r | |
2309 | pxor %xmm0,%xmm4\r | |
2310 | pxor %xmm0,%xmm5\r | |
2311 | pxor %xmm0,%xmm6\r | |
2312 | pxor %xmm0,%xmm7\r | |
2313 | movups -32(%edx,%ecx,1),%xmm1\r | |
2314 | pxor (%esp),%xmm2\r | |
2315 | pxor 16(%esp),%xmm3\r | |
2316 | pxor 32(%esp),%xmm4\r | |
2317 | pxor 48(%esp),%xmm5\r | |
2318 | pxor 64(%esp),%xmm6\r | |
2319 | pxor 80(%esp),%xmm7\r | |
2320 | movups -16(%edx,%ecx,1),%xmm0\r | |
2321 | .byte 102,15,56,222,209\r | |
2322 | .byte 102,15,56,222,217\r | |
2323 | .byte 102,15,56,222,225\r | |
2324 | .byte 102,15,56,222,233\r | |
2325 | .byte 102,15,56,222,241\r | |
2326 | .byte 102,15,56,222,249\r | |
2327 | movl 120(%esp),%edi\r | |
2328 | movl 124(%esp),%eax\r | |
2329 | call .L_aesni_decrypt6_enter\r | |
2330 | movdqa 80(%esp),%xmm0\r | |
2331 | pxor (%esp),%xmm2\r | |
2332 | movdqa 96(%esp),%xmm1\r | |
2333 | pxor 16(%esp),%xmm3\r | |
2334 | pxor 32(%esp),%xmm4\r | |
2335 | pxor 48(%esp),%xmm5\r | |
2336 | pxor 64(%esp),%xmm6\r | |
2337 | pxor %xmm0,%xmm7\r | |
2338 | pxor %xmm2,%xmm1\r | |
2339 | movdqu %xmm2,-96(%edi,%esi,1)\r | |
2340 | pxor %xmm3,%xmm1\r | |
2341 | movdqu %xmm3,-80(%edi,%esi,1)\r | |
2342 | pxor %xmm4,%xmm1\r | |
2343 | movdqu %xmm4,-64(%edi,%esi,1)\r | |
2344 | pxor %xmm5,%xmm1\r | |
2345 | movdqu %xmm5,-48(%edi,%esi,1)\r | |
2346 | pxor %xmm6,%xmm1\r | |
2347 | movdqu %xmm6,-32(%edi,%esi,1)\r | |
2348 | pxor %xmm7,%xmm1\r | |
2349 | movdqu %xmm7,-16(%edi,%esi,1)\r | |
2350 | cmpl %eax,%esi\r | |
2351 | jb .L087grandloop\r | |
2352 | .L086short:\r | |
2353 | addl $96,%eax\r | |
2354 | subl %esi,%eax\r | |
2355 | jz .L088done\r | |
2356 | cmpl $32,%eax\r | |
2357 | jb .L089one\r | |
2358 | je .L090two\r | |
2359 | cmpl $64,%eax\r | |
2360 | jb .L091three\r | |
2361 | je .L092four\r | |
2362 | leal 1(%ebp),%ecx\r | |
2363 | leal 3(%ebp),%eax\r | |
2364 | bsfl %ecx,%ecx\r | |
2365 | bsfl %eax,%eax\r | |
2366 | shll $4,%ecx\r | |
2367 | shll $4,%eax\r | |
2368 | movdqu (%ebx),%xmm2\r | |
2369 | movdqu (%ebx,%ecx,1),%xmm3\r | |
2370 | movl 116(%esp),%ecx\r | |
2371 | movdqa %xmm2,%xmm4\r | |
2372 | movdqu (%ebx,%eax,1),%xmm5\r | |
2373 | movdqa %xmm2,%xmm6\r | |
2374 | pxor %xmm0,%xmm2\r | |
2375 | pxor %xmm2,%xmm3\r | |
2376 | movdqa %xmm2,(%esp)\r | |
2377 | pxor %xmm3,%xmm4\r | |
2378 | movdqa %xmm3,16(%esp)\r | |
2379 | pxor %xmm4,%xmm5\r | |
2380 | movdqa %xmm4,32(%esp)\r | |
2381 | pxor %xmm5,%xmm6\r | |
2382 | movdqa %xmm5,48(%esp)\r | |
2383 | pxor %xmm6,%xmm7\r | |
2384 | movdqa %xmm6,64(%esp)\r | |
2385 | movups -48(%edx,%ecx,1),%xmm0\r | |
2386 | movdqu (%esi),%xmm2\r | |
2387 | movdqu 16(%esi),%xmm3\r | |
2388 | movdqu 32(%esi),%xmm4\r | |
2389 | movdqu 48(%esi),%xmm5\r | |
2390 | movdqu 64(%esi),%xmm6\r | |
2391 | pxor %xmm7,%xmm7\r | |
2392 | movdqa %xmm1,96(%esp)\r | |
2393 | pxor %xmm0,%xmm2\r | |
2394 | pxor %xmm0,%xmm3\r | |
2395 | pxor %xmm0,%xmm4\r | |
2396 | pxor %xmm0,%xmm5\r | |
2397 | pxor %xmm0,%xmm6\r | |
2398 | movups -32(%edx,%ecx,1),%xmm1\r | |
2399 | pxor (%esp),%xmm2\r | |
2400 | pxor 16(%esp),%xmm3\r | |
2401 | pxor 32(%esp),%xmm4\r | |
2402 | pxor 48(%esp),%xmm5\r | |
2403 | pxor 64(%esp),%xmm6\r | |
2404 | movups -16(%edx,%ecx,1),%xmm0\r | |
2405 | .byte 102,15,56,222,209\r | |
2406 | .byte 102,15,56,222,217\r | |
2407 | .byte 102,15,56,222,225\r | |
2408 | .byte 102,15,56,222,233\r | |
2409 | .byte 102,15,56,222,241\r | |
2410 | .byte 102,15,56,222,249\r | |
2411 | movl 120(%esp),%edi\r | |
2412 | call .L_aesni_decrypt6_enter\r | |
2413 | movdqa 64(%esp),%xmm0\r | |
2414 | pxor (%esp),%xmm2\r | |
2415 | movdqa 96(%esp),%xmm1\r | |
2416 | pxor 16(%esp),%xmm3\r | |
2417 | pxor 32(%esp),%xmm4\r | |
2418 | pxor 48(%esp),%xmm5\r | |
2419 | pxor %xmm0,%xmm6\r | |
2420 | pxor %xmm2,%xmm1\r | |
2421 | movdqu %xmm2,(%edi,%esi,1)\r | |
2422 | pxor %xmm3,%xmm1\r | |
2423 | movdqu %xmm3,16(%edi,%esi,1)\r | |
2424 | pxor %xmm4,%xmm1\r | |
2425 | movdqu %xmm4,32(%edi,%esi,1)\r | |
2426 | pxor %xmm5,%xmm1\r | |
2427 | movdqu %xmm5,48(%edi,%esi,1)\r | |
2428 | pxor %xmm6,%xmm1\r | |
2429 | movdqu %xmm6,64(%edi,%esi,1)\r | |
2430 | jmp .L088done\r | |
2431 | .align 16\r | |
2432 | .L089one:\r | |
2433 | movdqu (%ebx),%xmm7\r | |
2434 | movl 112(%esp),%edx\r | |
2435 | movdqu (%esi),%xmm2\r | |
2436 | movl 240(%edx),%ecx\r | |
2437 | pxor %xmm0,%xmm7\r | |
2438 | pxor %xmm7,%xmm2\r | |
2439 | movdqa %xmm1,%xmm6\r | |
2440 | movl 120(%esp),%edi\r | |
2441 | movups (%edx),%xmm0\r | |
2442 | movups 16(%edx),%xmm1\r | |
2443 | leal 32(%edx),%edx\r | |
2444 | xorps %xmm0,%xmm2\r | |
2445 | .L093dec1_loop_18:\r | |
2446 | .byte 102,15,56,222,209\r | |
2447 | decl %ecx\r | |
2448 | movups (%edx),%xmm1\r | |
2449 | leal 16(%edx),%edx\r | |
2450 | jnz .L093dec1_loop_18\r | |
2451 | .byte 102,15,56,223,209\r | |
2452 | xorps %xmm7,%xmm2\r | |
2453 | movaps %xmm6,%xmm1\r | |
2454 | movdqa %xmm7,%xmm0\r | |
2455 | xorps %xmm2,%xmm1\r | |
2456 | movups %xmm2,(%edi,%esi,1)\r | |
2457 | jmp .L088done\r | |
2458 | .align 16\r | |
2459 | .L090two:\r | |
2460 | leal 1(%ebp),%ecx\r | |
2461 | movl 112(%esp),%edx\r | |
2462 | bsfl %ecx,%ecx\r | |
2463 | shll $4,%ecx\r | |
2464 | movdqu (%ebx),%xmm6\r | |
2465 | movdqu (%ebx,%ecx,1),%xmm7\r | |
2466 | movdqu (%esi),%xmm2\r | |
2467 | movdqu 16(%esi),%xmm3\r | |
2468 | movl 240(%edx),%ecx\r | |
2469 | movdqa %xmm1,%xmm5\r | |
2470 | pxor %xmm0,%xmm6\r | |
2471 | pxor %xmm6,%xmm7\r | |
2472 | pxor %xmm6,%xmm2\r | |
2473 | pxor %xmm7,%xmm3\r | |
2474 | movl 120(%esp),%edi\r | |
2475 | call _aesni_decrypt2\r | |
2476 | xorps %xmm6,%xmm2\r | |
2477 | xorps %xmm7,%xmm3\r | |
2478 | movdqa %xmm7,%xmm0\r | |
2479 | xorps %xmm2,%xmm5\r | |
2480 | movups %xmm2,(%edi,%esi,1)\r | |
2481 | xorps %xmm3,%xmm5\r | |
2482 | movups %xmm3,16(%edi,%esi,1)\r | |
2483 | movaps %xmm5,%xmm1\r | |
2484 | jmp .L088done\r | |
2485 | .align 16\r | |
2486 | .L091three:\r | |
2487 | leal 1(%ebp),%ecx\r | |
2488 | movl 112(%esp),%edx\r | |
2489 | bsfl %ecx,%ecx\r | |
2490 | shll $4,%ecx\r | |
2491 | movdqu (%ebx),%xmm5\r | |
2492 | movdqu (%ebx,%ecx,1),%xmm6\r | |
2493 | movdqa %xmm5,%xmm7\r | |
2494 | movdqu (%esi),%xmm2\r | |
2495 | movdqu 16(%esi),%xmm3\r | |
2496 | movdqu 32(%esi),%xmm4\r | |
2497 | movl 240(%edx),%ecx\r | |
2498 | movdqa %xmm1,96(%esp)\r | |
2499 | pxor %xmm0,%xmm5\r | |
2500 | pxor %xmm5,%xmm6\r | |
2501 | pxor %xmm6,%xmm7\r | |
2502 | pxor %xmm5,%xmm2\r | |
2503 | pxor %xmm6,%xmm3\r | |
2504 | pxor %xmm7,%xmm4\r | |
2505 | movl 120(%esp),%edi\r | |
2506 | call _aesni_decrypt3\r | |
2507 | movdqa 96(%esp),%xmm1\r | |
2508 | xorps %xmm5,%xmm2\r | |
2509 | xorps %xmm6,%xmm3\r | |
2510 | xorps %xmm7,%xmm4\r | |
2511 | movups %xmm2,(%edi,%esi,1)\r | |
2512 | pxor %xmm2,%xmm1\r | |
2513 | movdqa %xmm7,%xmm0\r | |
2514 | movups %xmm3,16(%edi,%esi,1)\r | |
2515 | pxor %xmm3,%xmm1\r | |
2516 | movups %xmm4,32(%edi,%esi,1)\r | |
2517 | pxor %xmm4,%xmm1\r | |
2518 | jmp .L088done\r | |
2519 | .align 16\r | |
2520 | .L092four:\r | |
2521 | leal 1(%ebp),%ecx\r | |
2522 | leal 3(%ebp),%eax\r | |
2523 | bsfl %ecx,%ecx\r | |
2524 | bsfl %eax,%eax\r | |
2525 | movl 112(%esp),%edx\r | |
2526 | shll $4,%ecx\r | |
2527 | shll $4,%eax\r | |
2528 | movdqu (%ebx),%xmm4\r | |
2529 | movdqu (%ebx,%ecx,1),%xmm5\r | |
2530 | movdqa %xmm4,%xmm6\r | |
2531 | movdqu (%ebx,%eax,1),%xmm7\r | |
2532 | pxor %xmm0,%xmm4\r | |
2533 | movdqu (%esi),%xmm2\r | |
2534 | pxor %xmm4,%xmm5\r | |
2535 | movdqu 16(%esi),%xmm3\r | |
2536 | pxor %xmm5,%xmm6\r | |
2537 | movdqa %xmm4,(%esp)\r | |
2538 | pxor %xmm6,%xmm7\r | |
2539 | movdqa %xmm5,16(%esp)\r | |
2540 | movdqu 32(%esi),%xmm4\r | |
2541 | movdqu 48(%esi),%xmm5\r | |
2542 | movl 240(%edx),%ecx\r | |
2543 | movdqa %xmm1,96(%esp)\r | |
2544 | pxor (%esp),%xmm2\r | |
2545 | pxor 16(%esp),%xmm3\r | |
2546 | pxor %xmm6,%xmm4\r | |
2547 | pxor %xmm7,%xmm5\r | |
2548 | movl 120(%esp),%edi\r | |
2549 | call _aesni_decrypt4\r | |
2550 | movdqa 96(%esp),%xmm1\r | |
2551 | xorps (%esp),%xmm2\r | |
2552 | xorps 16(%esp),%xmm3\r | |
2553 | xorps %xmm6,%xmm4\r | |
2554 | movups %xmm2,(%edi,%esi,1)\r | |
2555 | pxor %xmm2,%xmm1\r | |
2556 | xorps %xmm7,%xmm5\r | |
2557 | movups %xmm3,16(%edi,%esi,1)\r | |
2558 | pxor %xmm3,%xmm1\r | |
2559 | movdqa %xmm7,%xmm0\r | |
2560 | movups %xmm4,32(%edi,%esi,1)\r | |
2561 | pxor %xmm4,%xmm1\r | |
2562 | movups %xmm5,48(%edi,%esi,1)\r | |
2563 | pxor %xmm5,%xmm1\r | |
2564 | .L088done:\r | |
2565 | movl 128(%esp),%edx\r | |
2566 | pxor %xmm2,%xmm2\r | |
2567 | pxor %xmm3,%xmm3\r | |
2568 | movdqa %xmm2,(%esp)\r | |
2569 | pxor %xmm4,%xmm4\r | |
2570 | movdqa %xmm2,16(%esp)\r | |
2571 | pxor %xmm5,%xmm5\r | |
2572 | movdqa %xmm2,32(%esp)\r | |
2573 | pxor %xmm6,%xmm6\r | |
2574 | movdqa %xmm2,48(%esp)\r | |
2575 | pxor %xmm7,%xmm7\r | |
2576 | movdqa %xmm2,64(%esp)\r | |
2577 | movdqa %xmm2,80(%esp)\r | |
2578 | movdqa %xmm2,96(%esp)\r | |
2579 | leal (%edx),%esp\r | |
2580 | movl 40(%esp),%ecx\r | |
2581 | movl 48(%esp),%ebx\r | |
2582 | movdqu %xmm0,(%ecx)\r | |
2583 | pxor %xmm0,%xmm0\r | |
2584 | movdqu %xmm1,(%ebx)\r | |
2585 | pxor %xmm1,%xmm1\r | |
2586 | popl %edi\r | |
2587 | popl %esi\r | |
2588 | popl %ebx\r | |
2589 | popl %ebp\r | |
2590 | ret\r | |
2591 | .size aesni_ocb_decrypt,.-.L_aesni_ocb_decrypt_begin\r | |
2592 | .globl aesni_cbc_encrypt\r | |
2593 | .type aesni_cbc_encrypt,@function\r | |
2594 | .align 16\r | |
2595 | aesni_cbc_encrypt:\r | |
2596 | .L_aesni_cbc_encrypt_begin:\r | |
2597 | pushl %ebp\r | |
2598 | pushl %ebx\r | |
2599 | pushl %esi\r | |
2600 | pushl %edi\r | |
2601 | movl 20(%esp),%esi\r | |
2602 | movl %esp,%ebx\r | |
2603 | movl 24(%esp),%edi\r | |
2604 | subl $24,%ebx\r | |
2605 | movl 28(%esp),%eax\r | |
2606 | andl $-16,%ebx\r | |
2607 | movl 32(%esp),%edx\r | |
2608 | movl 36(%esp),%ebp\r | |
2609 | testl %eax,%eax\r | |
2610 | jz .L094cbc_abort\r | |
2611 | cmpl $0,40(%esp)\r | |
2612 | xchgl %esp,%ebx\r | |
2613 | movups (%ebp),%xmm7\r | |
2614 | movl 240(%edx),%ecx\r | |
2615 | movl %edx,%ebp\r | |
2616 | movl %ebx,16(%esp)\r | |
2617 | movl %ecx,%ebx\r | |
2618 | je .L095cbc_decrypt\r | |
2619 | movaps %xmm7,%xmm2\r | |
2620 | cmpl $16,%eax\r | |
2621 | jb .L096cbc_enc_tail\r | |
2622 | subl $16,%eax\r | |
2623 | jmp .L097cbc_enc_loop\r | |
2624 | .align 16\r | |
2625 | .L097cbc_enc_loop:\r | |
2626 | movups (%esi),%xmm7\r | |
2627 | leal 16(%esi),%esi\r | |
2628 | movups (%edx),%xmm0\r | |
2629 | movups 16(%edx),%xmm1\r | |
2630 | xorps %xmm0,%xmm7\r | |
2631 | leal 32(%edx),%edx\r | |
2632 | xorps %xmm7,%xmm2\r | |
2633 | .L098enc1_loop_19:\r | |
2634 | .byte 102,15,56,220,209\r | |
2635 | decl %ecx\r | |
2636 | movups (%edx),%xmm1\r | |
2637 | leal 16(%edx),%edx\r | |
2638 | jnz .L098enc1_loop_19\r | |
2639 | .byte 102,15,56,221,209\r | |
2640 | movl %ebx,%ecx\r | |
2641 | movl %ebp,%edx\r | |
2642 | movups %xmm2,(%edi)\r | |
2643 | leal 16(%edi),%edi\r | |
2644 | subl $16,%eax\r | |
2645 | jnc .L097cbc_enc_loop\r | |
2646 | addl $16,%eax\r | |
2647 | jnz .L096cbc_enc_tail\r | |
2648 | movaps %xmm2,%xmm7\r | |
2649 | pxor %xmm2,%xmm2\r | |
2650 | jmp .L099cbc_ret\r | |
2651 | .L096cbc_enc_tail:\r | |
2652 | movl %eax,%ecx\r | |
2653 | .long 2767451785\r | |
2654 | movl $16,%ecx\r | |
2655 | subl %eax,%ecx\r | |
2656 | xorl %eax,%eax\r | |
2657 | .long 2868115081\r | |
2658 | leal -16(%edi),%edi\r | |
2659 | movl %ebx,%ecx\r | |
2660 | movl %edi,%esi\r | |
2661 | movl %ebp,%edx\r | |
2662 | jmp .L097cbc_enc_loop\r | |
2663 | .align 16\r | |
2664 | .L095cbc_decrypt:\r | |
2665 | cmpl $80,%eax\r | |
2666 | jbe .L100cbc_dec_tail\r | |
2667 | movaps %xmm7,(%esp)\r | |
2668 | subl $80,%eax\r | |
2669 | jmp .L101cbc_dec_loop6_enter\r | |
2670 | .align 16\r | |
2671 | .L102cbc_dec_loop6:\r | |
2672 | movaps %xmm0,(%esp)\r | |
2673 | movups %xmm7,(%edi)\r | |
2674 | leal 16(%edi),%edi\r | |
2675 | .L101cbc_dec_loop6_enter:\r | |
2676 | movdqu (%esi),%xmm2\r | |
2677 | movdqu 16(%esi),%xmm3\r | |
2678 | movdqu 32(%esi),%xmm4\r | |
2679 | movdqu 48(%esi),%xmm5\r | |
2680 | movdqu 64(%esi),%xmm6\r | |
2681 | movdqu 80(%esi),%xmm7\r | |
2682 | call _aesni_decrypt6\r | |
2683 | movups (%esi),%xmm1\r | |
2684 | movups 16(%esi),%xmm0\r | |
2685 | xorps (%esp),%xmm2\r | |
2686 | xorps %xmm1,%xmm3\r | |
2687 | movups 32(%esi),%xmm1\r | |
2688 | xorps %xmm0,%xmm4\r | |
2689 | movups 48(%esi),%xmm0\r | |
2690 | xorps %xmm1,%xmm5\r | |
2691 | movups 64(%esi),%xmm1\r | |
2692 | xorps %xmm0,%xmm6\r | |
2693 | movups 80(%esi),%xmm0\r | |
2694 | xorps %xmm1,%xmm7\r | |
2695 | movups %xmm2,(%edi)\r | |
2696 | movups %xmm3,16(%edi)\r | |
2697 | leal 96(%esi),%esi\r | |
2698 | movups %xmm4,32(%edi)\r | |
2699 | movl %ebx,%ecx\r | |
2700 | movups %xmm5,48(%edi)\r | |
2701 | movl %ebp,%edx\r | |
2702 | movups %xmm6,64(%edi)\r | |
2703 | leal 80(%edi),%edi\r | |
2704 | subl $96,%eax\r | |
2705 | ja .L102cbc_dec_loop6\r | |
2706 | movaps %xmm7,%xmm2\r | |
2707 | movaps %xmm0,%xmm7\r | |
2708 | addl $80,%eax\r | |
2709 | jle .L103cbc_dec_clear_tail_collected\r | |
2710 | movups %xmm2,(%edi)\r | |
2711 | leal 16(%edi),%edi\r | |
2712 | .L100cbc_dec_tail:\r | |
2713 | movups (%esi),%xmm2\r | |
2714 | movaps %xmm2,%xmm6\r | |
2715 | cmpl $16,%eax\r | |
2716 | jbe .L104cbc_dec_one\r | |
2717 | movups 16(%esi),%xmm3\r | |
2718 | movaps %xmm3,%xmm5\r | |
2719 | cmpl $32,%eax\r | |
2720 | jbe .L105cbc_dec_two\r | |
2721 | movups 32(%esi),%xmm4\r | |
2722 | cmpl $48,%eax\r | |
2723 | jbe .L106cbc_dec_three\r | |
2724 | movups 48(%esi),%xmm5\r | |
2725 | cmpl $64,%eax\r | |
2726 | jbe .L107cbc_dec_four\r | |
2727 | movups 64(%esi),%xmm6\r | |
2728 | movaps %xmm7,(%esp)\r | |
2729 | movups (%esi),%xmm2\r | |
2730 | xorps %xmm7,%xmm7\r | |
2731 | call _aesni_decrypt6\r | |
2732 | movups (%esi),%xmm1\r | |
2733 | movups 16(%esi),%xmm0\r | |
2734 | xorps (%esp),%xmm2\r | |
2735 | xorps %xmm1,%xmm3\r | |
2736 | movups 32(%esi),%xmm1\r | |
2737 | xorps %xmm0,%xmm4\r | |
2738 | movups 48(%esi),%xmm0\r | |
2739 | xorps %xmm1,%xmm5\r | |
2740 | movups 64(%esi),%xmm7\r | |
2741 | xorps %xmm0,%xmm6\r | |
2742 | movups %xmm2,(%edi)\r | |
2743 | movups %xmm3,16(%edi)\r | |
2744 | pxor %xmm3,%xmm3\r | |
2745 | movups %xmm4,32(%edi)\r | |
2746 | pxor %xmm4,%xmm4\r | |
2747 | movups %xmm5,48(%edi)\r | |
2748 | pxor %xmm5,%xmm5\r | |
2749 | leal 64(%edi),%edi\r | |
2750 | movaps %xmm6,%xmm2\r | |
2751 | pxor %xmm6,%xmm6\r | |
2752 | subl $80,%eax\r | |
2753 | jmp .L108cbc_dec_tail_collected\r | |
2754 | .align 16\r | |
2755 | .L104cbc_dec_one:\r | |
2756 | movups (%edx),%xmm0\r | |
2757 | movups 16(%edx),%xmm1\r | |
2758 | leal 32(%edx),%edx\r | |
2759 | xorps %xmm0,%xmm2\r | |
2760 | .L109dec1_loop_20:\r | |
2761 | .byte 102,15,56,222,209\r | |
2762 | decl %ecx\r | |
2763 | movups (%edx),%xmm1\r | |
2764 | leal 16(%edx),%edx\r | |
2765 | jnz .L109dec1_loop_20\r | |
2766 | .byte 102,15,56,223,209\r | |
2767 | xorps %xmm7,%xmm2\r | |
2768 | movaps %xmm6,%xmm7\r | |
2769 | subl $16,%eax\r | |
2770 | jmp .L108cbc_dec_tail_collected\r | |
2771 | .align 16\r | |
2772 | .L105cbc_dec_two:\r | |
2773 | call _aesni_decrypt2\r | |
2774 | xorps %xmm7,%xmm2\r | |
2775 | xorps %xmm6,%xmm3\r | |
2776 | movups %xmm2,(%edi)\r | |
2777 | movaps %xmm3,%xmm2\r | |
2778 | pxor %xmm3,%xmm3\r | |
2779 | leal 16(%edi),%edi\r | |
2780 | movaps %xmm5,%xmm7\r | |
2781 | subl $32,%eax\r | |
2782 | jmp .L108cbc_dec_tail_collected\r | |
2783 | .align 16\r | |
2784 | .L106cbc_dec_three:\r | |
2785 | call _aesni_decrypt3\r | |
2786 | xorps %xmm7,%xmm2\r | |
2787 | xorps %xmm6,%xmm3\r | |
2788 | xorps %xmm5,%xmm4\r | |
2789 | movups %xmm2,(%edi)\r | |
2790 | movaps %xmm4,%xmm2\r | |
2791 | pxor %xmm4,%xmm4\r | |
2792 | movups %xmm3,16(%edi)\r | |
2793 | pxor %xmm3,%xmm3\r | |
2794 | leal 32(%edi),%edi\r | |
2795 | movups 32(%esi),%xmm7\r | |
2796 | subl $48,%eax\r | |
2797 | jmp .L108cbc_dec_tail_collected\r | |
2798 | .align 16\r | |
2799 | .L107cbc_dec_four:\r | |
2800 | call _aesni_decrypt4\r | |
2801 | movups 16(%esi),%xmm1\r | |
2802 | movups 32(%esi),%xmm0\r | |
2803 | xorps %xmm7,%xmm2\r | |
2804 | movups 48(%esi),%xmm7\r | |
2805 | xorps %xmm6,%xmm3\r | |
2806 | movups %xmm2,(%edi)\r | |
2807 | xorps %xmm1,%xmm4\r | |
2808 | movups %xmm3,16(%edi)\r | |
2809 | pxor %xmm3,%xmm3\r | |
2810 | xorps %xmm0,%xmm5\r | |
2811 | movups %xmm4,32(%edi)\r | |
2812 | pxor %xmm4,%xmm4\r | |
2813 | leal 48(%edi),%edi\r | |
2814 | movaps %xmm5,%xmm2\r | |
2815 | pxor %xmm5,%xmm5\r | |
2816 | subl $64,%eax\r | |
2817 | jmp .L108cbc_dec_tail_collected\r | |
2818 | .align 16\r | |
2819 | .L103cbc_dec_clear_tail_collected:\r | |
2820 | pxor %xmm3,%xmm3\r | |
2821 | pxor %xmm4,%xmm4\r | |
2822 | pxor %xmm5,%xmm5\r | |
2823 | pxor %xmm6,%xmm6\r | |
2824 | .L108cbc_dec_tail_collected:\r | |
2825 | andl $15,%eax\r | |
2826 | jnz .L110cbc_dec_tail_partial\r | |
2827 | movups %xmm2,(%edi)\r | |
2828 | pxor %xmm0,%xmm0\r | |
2829 | jmp .L099cbc_ret\r | |
2830 | .align 16\r | |
2831 | .L110cbc_dec_tail_partial:\r | |
2832 | movaps %xmm2,(%esp)\r | |
2833 | pxor %xmm0,%xmm0\r | |
2834 | movl $16,%ecx\r | |
2835 | movl %esp,%esi\r | |
2836 | subl %eax,%ecx\r | |
2837 | .long 2767451785\r | |
2838 | movdqa %xmm2,(%esp)\r | |
2839 | .L099cbc_ret:\r | |
2840 | movl 16(%esp),%esp\r | |
2841 | movl 36(%esp),%ebp\r | |
2842 | pxor %xmm2,%xmm2\r | |
2843 | pxor %xmm1,%xmm1\r | |
2844 | movups %xmm7,(%ebp)\r | |
2845 | pxor %xmm7,%xmm7\r | |
2846 | .L094cbc_abort:\r | |
2847 | popl %edi\r | |
2848 | popl %esi\r | |
2849 | popl %ebx\r | |
2850 | popl %ebp\r | |
2851 | ret\r | |
2852 | .size aesni_cbc_encrypt,.-.L_aesni_cbc_encrypt_begin\r | |
2853 | .type _aesni_set_encrypt_key,@function\r | |
2854 | .align 16\r | |
2855 | _aesni_set_encrypt_key:\r | |
2856 | pushl %ebp\r | |
2857 | pushl %ebx\r | |
2858 | testl %eax,%eax\r | |
2859 | jz .L111bad_pointer\r | |
2860 | testl %edx,%edx\r | |
2861 | jz .L111bad_pointer\r | |
2862 | call .L112pic\r | |
2863 | .L112pic:\r | |
2864 | popl %ebx\r | |
2865 | leal .Lkey_const-.L112pic(%ebx),%ebx\r | |
2866 | leal OPENSSL_ia32cap_P,%ebp\r | |
2867 | movups (%eax),%xmm0\r | |
2868 | xorps %xmm4,%xmm4\r | |
2869 | movl 4(%ebp),%ebp\r | |
2870 | leal 16(%edx),%edx\r | |
2871 | andl $268437504,%ebp\r | |
2872 | cmpl $256,%ecx\r | |
2873 | je .L11314rounds\r | |
2874 | cmpl $192,%ecx\r | |
2875 | je .L11412rounds\r | |
2876 | cmpl $128,%ecx\r | |
2877 | jne .L115bad_keybits\r | |
2878 | .align 16\r | |
2879 | .L11610rounds:\r | |
2880 | cmpl $268435456,%ebp\r | |
2881 | je .L11710rounds_alt\r | |
2882 | movl $9,%ecx\r | |
2883 | movups %xmm0,-16(%edx)\r | |
2884 | .byte 102,15,58,223,200,1\r | |
2885 | call .L118key_128_cold\r | |
2886 | .byte 102,15,58,223,200,2\r | |
2887 | call .L119key_128\r | |
2888 | .byte 102,15,58,223,200,4\r | |
2889 | call .L119key_128\r | |
2890 | .byte 102,15,58,223,200,8\r | |
2891 | call .L119key_128\r | |
2892 | .byte 102,15,58,223,200,16\r | |
2893 | call .L119key_128\r | |
2894 | .byte 102,15,58,223,200,32\r | |
2895 | call .L119key_128\r | |
2896 | .byte 102,15,58,223,200,64\r | |
2897 | call .L119key_128\r | |
2898 | .byte 102,15,58,223,200,128\r | |
2899 | call .L119key_128\r | |
2900 | .byte 102,15,58,223,200,27\r | |
2901 | call .L119key_128\r | |
2902 | .byte 102,15,58,223,200,54\r | |
2903 | call .L119key_128\r | |
2904 | movups %xmm0,(%edx)\r | |
2905 | movl %ecx,80(%edx)\r | |
2906 | jmp .L120good_key\r | |
2907 | .align 16\r | |
2908 | .L119key_128:\r | |
2909 | movups %xmm0,(%edx)\r | |
2910 | leal 16(%edx),%edx\r | |
2911 | .L118key_128_cold:\r | |
2912 | shufps $16,%xmm0,%xmm4\r | |
2913 | xorps %xmm4,%xmm0\r | |
2914 | shufps $140,%xmm0,%xmm4\r | |
2915 | xorps %xmm4,%xmm0\r | |
2916 | shufps $255,%xmm1,%xmm1\r | |
2917 | xorps %xmm1,%xmm0\r | |
2918 | ret\r | |
2919 | .align 16\r | |
2920 | .L11710rounds_alt:\r | |
2921 | movdqa (%ebx),%xmm5\r | |
2922 | movl $8,%ecx\r | |
2923 | movdqa 32(%ebx),%xmm4\r | |
2924 | movdqa %xmm0,%xmm2\r | |
2925 | movdqu %xmm0,-16(%edx)\r | |
2926 | .L121loop_key128:\r | |
2927 | .byte 102,15,56,0,197\r | |
2928 | .byte 102,15,56,221,196\r | |
2929 | pslld $1,%xmm4\r | |
2930 | leal 16(%edx),%edx\r | |
2931 | movdqa %xmm2,%xmm3\r | |
2932 | pslldq $4,%xmm2\r | |
2933 | pxor %xmm2,%xmm3\r | |
2934 | pslldq $4,%xmm2\r | |
2935 | pxor %xmm2,%xmm3\r | |
2936 | pslldq $4,%xmm2\r | |
2937 | pxor %xmm3,%xmm2\r | |
2938 | pxor %xmm2,%xmm0\r | |
2939 | movdqu %xmm0,-16(%edx)\r | |
2940 | movdqa %xmm0,%xmm2\r | |
2941 | decl %ecx\r | |
2942 | jnz .L121loop_key128\r | |
2943 | movdqa 48(%ebx),%xmm4\r | |
2944 | .byte 102,15,56,0,197\r | |
2945 | .byte 102,15,56,221,196\r | |
2946 | pslld $1,%xmm4\r | |
2947 | movdqa %xmm2,%xmm3\r | |
2948 | pslldq $4,%xmm2\r | |
2949 | pxor %xmm2,%xmm3\r | |
2950 | pslldq $4,%xmm2\r | |
2951 | pxor %xmm2,%xmm3\r | |
2952 | pslldq $4,%xmm2\r | |
2953 | pxor %xmm3,%xmm2\r | |
2954 | pxor %xmm2,%xmm0\r | |
2955 | movdqu %xmm0,(%edx)\r | |
2956 | movdqa %xmm0,%xmm2\r | |
2957 | .byte 102,15,56,0,197\r | |
2958 | .byte 102,15,56,221,196\r | |
2959 | movdqa %xmm2,%xmm3\r | |
2960 | pslldq $4,%xmm2\r | |
2961 | pxor %xmm2,%xmm3\r | |
2962 | pslldq $4,%xmm2\r | |
2963 | pxor %xmm2,%xmm3\r | |
2964 | pslldq $4,%xmm2\r | |
2965 | pxor %xmm3,%xmm2\r | |
2966 | pxor %xmm2,%xmm0\r | |
2967 | movdqu %xmm0,16(%edx)\r | |
2968 | movl $9,%ecx\r | |
2969 | movl %ecx,96(%edx)\r | |
2970 | jmp .L120good_key\r | |
2971 | .align 16\r | |
2972 | .L11412rounds:\r | |
2973 | movq 16(%eax),%xmm2\r | |
2974 | cmpl $268435456,%ebp\r | |
2975 | je .L12212rounds_alt\r | |
2976 | movl $11,%ecx\r | |
2977 | movups %xmm0,-16(%edx)\r | |
2978 | .byte 102,15,58,223,202,1\r | |
2979 | call .L123key_192a_cold\r | |
2980 | .byte 102,15,58,223,202,2\r | |
2981 | call .L124key_192b\r | |
2982 | .byte 102,15,58,223,202,4\r | |
2983 | call .L125key_192a\r | |
2984 | .byte 102,15,58,223,202,8\r | |
2985 | call .L124key_192b\r | |
2986 | .byte 102,15,58,223,202,16\r | |
2987 | call .L125key_192a\r | |
2988 | .byte 102,15,58,223,202,32\r | |
2989 | call .L124key_192b\r | |
2990 | .byte 102,15,58,223,202,64\r | |
2991 | call .L125key_192a\r | |
2992 | .byte 102,15,58,223,202,128\r | |
2993 | call .L124key_192b\r | |
2994 | movups %xmm0,(%edx)\r | |
2995 | movl %ecx,48(%edx)\r | |
2996 | jmp .L120good_key\r | |
2997 | .align 16\r | |
2998 | .L125key_192a:\r | |
2999 | movups %xmm0,(%edx)\r | |
3000 | leal 16(%edx),%edx\r | |
3001 | .align 16\r | |
3002 | .L123key_192a_cold:\r | |
3003 | movaps %xmm2,%xmm5\r | |
3004 | .L126key_192b_warm:\r | |
3005 | shufps $16,%xmm0,%xmm4\r | |
3006 | movdqa %xmm2,%xmm3\r | |
3007 | xorps %xmm4,%xmm0\r | |
3008 | shufps $140,%xmm0,%xmm4\r | |
3009 | pslldq $4,%xmm3\r | |
3010 | xorps %xmm4,%xmm0\r | |
3011 | pshufd $85,%xmm1,%xmm1\r | |
3012 | pxor %xmm3,%xmm2\r | |
3013 | pxor %xmm1,%xmm0\r | |
3014 | pshufd $255,%xmm0,%xmm3\r | |
3015 | pxor %xmm3,%xmm2\r | |
3016 | ret\r | |
3017 | .align 16\r | |
3018 | .L124key_192b:\r | |
3019 | movaps %xmm0,%xmm3\r | |
3020 | shufps $68,%xmm0,%xmm5\r | |
3021 | movups %xmm5,(%edx)\r | |
3022 | shufps $78,%xmm2,%xmm3\r | |
3023 | movups %xmm3,16(%edx)\r | |
3024 | leal 32(%edx),%edx\r | |
3025 | jmp .L126key_192b_warm\r | |
3026 | .align 16\r | |
3027 | .L12212rounds_alt:\r | |
3028 | movdqa 16(%ebx),%xmm5\r | |
3029 | movdqa 32(%ebx),%xmm4\r | |
3030 | movl $8,%ecx\r | |
3031 | movdqu %xmm0,-16(%edx)\r | |
3032 | .L127loop_key192:\r | |
3033 | movq %xmm2,(%edx)\r | |
3034 | movdqa %xmm2,%xmm1\r | |
3035 | .byte 102,15,56,0,213\r | |
3036 | .byte 102,15,56,221,212\r | |
3037 | pslld $1,%xmm4\r | |
3038 | leal 24(%edx),%edx\r | |
3039 | movdqa %xmm0,%xmm3\r | |
3040 | pslldq $4,%xmm0\r | |
3041 | pxor %xmm0,%xmm3\r | |
3042 | pslldq $4,%xmm0\r | |
3043 | pxor %xmm0,%xmm3\r | |
3044 | pslldq $4,%xmm0\r | |
3045 | pxor %xmm3,%xmm0\r | |
3046 | pshufd $255,%xmm0,%xmm3\r | |
3047 | pxor %xmm1,%xmm3\r | |
3048 | pslldq $4,%xmm1\r | |
3049 | pxor %xmm1,%xmm3\r | |
3050 | pxor %xmm2,%xmm0\r | |
3051 | pxor %xmm3,%xmm2\r | |
3052 | movdqu %xmm0,-16(%edx)\r | |
3053 | decl %ecx\r | |
3054 | jnz .L127loop_key192\r | |
3055 | movl $11,%ecx\r | |
3056 | movl %ecx,32(%edx)\r | |
3057 | jmp .L120good_key\r | |
3058 | .align 16\r | |
3059 | .L11314rounds:\r | |
3060 | movups 16(%eax),%xmm2\r | |
3061 | leal 16(%edx),%edx\r | |
3062 | cmpl $268435456,%ebp\r | |
3063 | je .L12814rounds_alt\r | |
3064 | movl $13,%ecx\r | |
3065 | movups %xmm0,-32(%edx)\r | |
3066 | movups %xmm2,-16(%edx)\r | |
3067 | .byte 102,15,58,223,202,1\r | |
3068 | call .L129key_256a_cold\r | |
3069 | .byte 102,15,58,223,200,1\r | |
3070 | call .L130key_256b\r | |
3071 | .byte 102,15,58,223,202,2\r | |
3072 | call .L131key_256a\r | |
3073 | .byte 102,15,58,223,200,2\r | |
3074 | call .L130key_256b\r | |
3075 | .byte 102,15,58,223,202,4\r | |
3076 | call .L131key_256a\r | |
3077 | .byte 102,15,58,223,200,4\r | |
3078 | call .L130key_256b\r | |
3079 | .byte 102,15,58,223,202,8\r | |
3080 | call .L131key_256a\r | |
3081 | .byte 102,15,58,223,200,8\r | |
3082 | call .L130key_256b\r | |
3083 | .byte 102,15,58,223,202,16\r | |
3084 | call .L131key_256a\r | |
3085 | .byte 102,15,58,223,200,16\r | |
3086 | call .L130key_256b\r | |
3087 | .byte 102,15,58,223,202,32\r | |
3088 | call .L131key_256a\r | |
3089 | .byte 102,15,58,223,200,32\r | |
3090 | call .L130key_256b\r | |
3091 | .byte 102,15,58,223,202,64\r | |
3092 | call .L131key_256a\r | |
3093 | movups %xmm0,(%edx)\r | |
3094 | movl %ecx,16(%edx)\r | |
3095 | xorl %eax,%eax\r | |
3096 | jmp .L120good_key\r | |
3097 | .align 16\r | |
3098 | .L131key_256a:\r | |
3099 | movups %xmm2,(%edx)\r | |
3100 | leal 16(%edx),%edx\r | |
3101 | .L129key_256a_cold:\r | |
3102 | shufps $16,%xmm0,%xmm4\r | |
3103 | xorps %xmm4,%xmm0\r | |
3104 | shufps $140,%xmm0,%xmm4\r | |
3105 | xorps %xmm4,%xmm0\r | |
3106 | shufps $255,%xmm1,%xmm1\r | |
3107 | xorps %xmm1,%xmm0\r | |
3108 | ret\r | |
3109 | .align 16\r | |
3110 | .L130key_256b:\r | |
3111 | movups %xmm0,(%edx)\r | |
3112 | leal 16(%edx),%edx\r | |
3113 | shufps $16,%xmm2,%xmm4\r | |
3114 | xorps %xmm4,%xmm2\r | |
3115 | shufps $140,%xmm2,%xmm4\r | |
3116 | xorps %xmm4,%xmm2\r | |
3117 | shufps $170,%xmm1,%xmm1\r | |
3118 | xorps %xmm1,%xmm2\r | |
3119 | ret\r | |
3120 | .align 16\r | |
3121 | .L12814rounds_alt:\r | |
3122 | movdqa (%ebx),%xmm5\r | |
3123 | movdqa 32(%ebx),%xmm4\r | |
3124 | movl $7,%ecx\r | |
3125 | movdqu %xmm0,-32(%edx)\r | |
3126 | movdqa %xmm2,%xmm1\r | |
3127 | movdqu %xmm2,-16(%edx)\r | |
3128 | .L132loop_key256:\r | |
3129 | .byte 102,15,56,0,213\r | |
3130 | .byte 102,15,56,221,212\r | |
3131 | movdqa %xmm0,%xmm3\r | |
3132 | pslldq $4,%xmm0\r | |
3133 | pxor %xmm0,%xmm3\r | |
3134 | pslldq $4,%xmm0\r | |
3135 | pxor %xmm0,%xmm3\r | |
3136 | pslldq $4,%xmm0\r | |
3137 | pxor %xmm3,%xmm0\r | |
3138 | pslld $1,%xmm4\r | |
3139 | pxor %xmm2,%xmm0\r | |
3140 | movdqu %xmm0,(%edx)\r | |
3141 | decl %ecx\r | |
3142 | jz .L133done_key256\r | |
3143 | pshufd $255,%xmm0,%xmm2\r | |
3144 | pxor %xmm3,%xmm3\r | |
3145 | .byte 102,15,56,221,211\r | |
3146 | movdqa %xmm1,%xmm3\r | |
3147 | pslldq $4,%xmm1\r | |
3148 | pxor %xmm1,%xmm3\r | |
3149 | pslldq $4,%xmm1\r | |
3150 | pxor %xmm1,%xmm3\r | |
3151 | pslldq $4,%xmm1\r | |
3152 | pxor %xmm3,%xmm1\r | |
3153 | pxor %xmm1,%xmm2\r | |
3154 | movdqu %xmm2,16(%edx)\r | |
3155 | leal 32(%edx),%edx\r | |
3156 | movdqa %xmm2,%xmm1\r | |
3157 | jmp .L132loop_key256\r | |
3158 | .L133done_key256:\r | |
3159 | movl $13,%ecx\r | |
3160 | movl %ecx,16(%edx)\r | |
3161 | .L120good_key:\r | |
3162 | pxor %xmm0,%xmm0\r | |
3163 | pxor %xmm1,%xmm1\r | |
3164 | pxor %xmm2,%xmm2\r | |
3165 | pxor %xmm3,%xmm3\r | |
3166 | pxor %xmm4,%xmm4\r | |
3167 | pxor %xmm5,%xmm5\r | |
3168 | xorl %eax,%eax\r | |
3169 | popl %ebx\r | |
3170 | popl %ebp\r | |
3171 | ret\r | |
3172 | .align 4\r | |
3173 | .L111bad_pointer:\r | |
3174 | movl $-1,%eax\r | |
3175 | popl %ebx\r | |
3176 | popl %ebp\r | |
3177 | ret\r | |
3178 | .align 4\r | |
3179 | .L115bad_keybits:\r | |
3180 | pxor %xmm0,%xmm0\r | |
3181 | movl $-2,%eax\r | |
3182 | popl %ebx\r | |
3183 | popl %ebp\r | |
3184 | ret\r | |
3185 | .size _aesni_set_encrypt_key,.-_aesni_set_encrypt_key\r | |
3186 | .globl aesni_set_encrypt_key\r | |
3187 | .type aesni_set_encrypt_key,@function\r | |
3188 | .align 16\r | |
3189 | aesni_set_encrypt_key:\r | |
3190 | .L_aesni_set_encrypt_key_begin:\r | |
3191 | movl 4(%esp),%eax\r | |
3192 | movl 8(%esp),%ecx\r | |
3193 | movl 12(%esp),%edx\r | |
3194 | call _aesni_set_encrypt_key\r | |
3195 | ret\r | |
3196 | .size aesni_set_encrypt_key,.-.L_aesni_set_encrypt_key_begin\r | |
3197 | .globl aesni_set_decrypt_key\r | |
3198 | .type aesni_set_decrypt_key,@function\r | |
3199 | .align 16\r | |
3200 | aesni_set_decrypt_key:\r | |
3201 | .L_aesni_set_decrypt_key_begin:\r | |
3202 | movl 4(%esp),%eax\r | |
3203 | movl 8(%esp),%ecx\r | |
3204 | movl 12(%esp),%edx\r | |
3205 | call _aesni_set_encrypt_key\r | |
3206 | movl 12(%esp),%edx\r | |
3207 | shll $4,%ecx\r | |
3208 | testl %eax,%eax\r | |
3209 | jnz .L134dec_key_ret\r | |
3210 | leal 16(%edx,%ecx,1),%eax\r | |
3211 | movups (%edx),%xmm0\r | |
3212 | movups (%eax),%xmm1\r | |
3213 | movups %xmm0,(%eax)\r | |
3214 | movups %xmm1,(%edx)\r | |
3215 | leal 16(%edx),%edx\r | |
3216 | leal -16(%eax),%eax\r | |
3217 | .L135dec_key_inverse:\r | |
3218 | movups (%edx),%xmm0\r | |
3219 | movups (%eax),%xmm1\r | |
3220 | .byte 102,15,56,219,192\r | |
3221 | .byte 102,15,56,219,201\r | |
3222 | leal 16(%edx),%edx\r | |
3223 | leal -16(%eax),%eax\r | |
3224 | movups %xmm0,16(%eax)\r | |
3225 | movups %xmm1,-16(%edx)\r | |
3226 | cmpl %edx,%eax\r | |
3227 | ja .L135dec_key_inverse\r | |
3228 | movups (%edx),%xmm0\r | |
3229 | .byte 102,15,56,219,192\r | |
3230 | movups %xmm0,(%edx)\r | |
3231 | pxor %xmm0,%xmm0\r | |
3232 | pxor %xmm1,%xmm1\r | |
3233 | xorl %eax,%eax\r | |
3234 | .L134dec_key_ret:\r | |
3235 | ret\r | |
3236 | .size aesni_set_decrypt_key,.-.L_aesni_set_decrypt_key_begin\r | |
3237 | .align 64\r | |
3238 | .Lkey_const:\r | |
3239 | .long 202313229,202313229,202313229,202313229\r | |
3240 | .long 67569157,67569157,67569157,67569157\r | |
3241 | .long 1,1,1,1\r | |
3242 | .long 27,27,27,27\r | |
3243 | .byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69\r | |
3244 | .byte 83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83\r | |
3245 | .byte 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115\r | |
3246 | .byte 115,108,46,111,114,103,62,0\r | |
3247 | .comm OPENSSL_ia32cap_P,16,4\r |