1 #include <machine/asm.h>
5 .globl aesni_cbc_sha1_enc
6 .type aesni_cbc_sha1_enc,@function
10 movq OPENSSL_ia32cap_P@GOTPCREL(%rip),%r10
12 jc aesni_cbc_sha1_enc_avx
13 jmp aesni_cbc_sha1_enc_ssse3
15 .size aesni_cbc_sha1_enc,.-aesni_cbc_sha1_enc
16 .type aesni_cbc_sha1_enc_ssse3,@function
18 aesni_cbc_sha1_enc_ssse3:
42 leaq K_XX_XX(%rip),%r11
71 movups 16(%r15),%xmm14
80 .byte 102,69,15,56,220,222
81 movups 32(%r15),%xmm15
84 .byte 102,15,58,15,224,8
104 movdqa %xmm9,48(%rsp)
106 .byte 102,69,15,56,220,223
107 movups 48(%r15),%xmm14
133 .byte 102,69,15,56,220,222
134 movups 64(%r15),%xmm15
139 movdqa 0(%r11),%xmm10
149 .byte 102,15,58,15,233,8
163 .byte 102,69,15,56,220,223
164 movups 80(%r15),%xmm14
171 movdqa %xmm10,0(%rsp)
188 .byte 102,69,15,56,220,222
189 movups 96(%r15),%xmm15
204 movdqa 16(%r11),%xmm8
214 .byte 102,15,58,15,242,8
217 .byte 102,69,15,56,220,223
218 movups 112(%r15),%xmm14
236 movdqa %xmm8,16(%rsp)
244 .byte 102,69,15,56,220,222
245 movups 128(%r15),%xmm15
269 movdqa 16(%r11),%xmm9
271 .byte 102,69,15,56,220,223
272 movups 144(%r15),%xmm14
281 .byte 102,15,58,15,251,8
298 .byte 102,69,15,56,220,222
299 movups 160(%r15),%xmm15
303 movdqa %xmm9,32(%rsp)
329 movups 176(%r15),%xmm14
330 .byte 102,69,15,56,220,223
331 movups 192(%r15),%xmm15
332 .byte 102,69,15,56,220,222
334 movups 208(%r15),%xmm14
335 .byte 102,69,15,56,220,223
336 movups 224(%r15),%xmm15
337 .byte 102,69,15,56,220,222
339 .byte 102,69,15,56,221,223
340 movups 16(%r15),%xmm14
348 movdqa 16(%r11),%xmm10
357 .byte 102,68,15,58,15,206,8
367 movups 16(%r12),%xmm12
369 movups %xmm11,0(%r13,%r12,1)
371 .byte 102,69,15,56,220,222
372 movups 32(%r15),%xmm15
380 movdqa %xmm10,48(%rsp)
395 .byte 102,69,15,56,220,223
396 movups 48(%r15),%xmm14
416 .byte 102,69,15,56,220,222
417 movups 64(%r15),%xmm15
419 .byte 102,68,15,58,15,215,8
448 .byte 102,69,15,56,220,223
449 movups 80(%r15),%xmm14
465 .byte 102,68,15,58,15,192,8
472 movdqa 32(%r11),%xmm10
478 .byte 102,69,15,56,220,222
479 movups 96(%r15),%xmm15
484 movdqa %xmm9,16(%rsp)
506 .byte 102,69,15,56,220,223
507 movups 112(%r15),%xmm14
513 .byte 102,68,15,58,15,201,8
530 movdqa %xmm10,32(%rsp)
537 .byte 102,69,15,56,220,222
538 movups 128(%r15),%xmm15
559 .byte 102,68,15,58,15,210,8
565 .byte 102,69,15,56,220,223
566 movups 144(%r15),%xmm14
578 movdqa %xmm8,48(%rsp)
595 .byte 102,69,15,56,220,222
596 movups 160(%r15),%xmm15
607 .byte 102,68,15,58,15,195,8
628 movups 176(%r15),%xmm14
629 .byte 102,69,15,56,220,223
630 movups 192(%r15),%xmm15
631 .byte 102,69,15,56,220,222
633 movups 208(%r15),%xmm14
634 .byte 102,69,15,56,220,223
635 movups 224(%r15),%xmm15
636 .byte 102,69,15,56,220,222
638 .byte 102,69,15,56,221,223
639 movups 16(%r15),%xmm14
664 movups 32(%r12),%xmm12
666 movups %xmm11,16(%r13,%r12,1)
668 .byte 102,69,15,56,220,222
669 movups 32(%r15),%xmm15
671 .byte 102,68,15,58,15,204,8
688 movdqa %xmm10,16(%rsp)
700 .byte 102,69,15,56,220,223
701 movups 48(%r15),%xmm14
724 .byte 102,69,15,56,220,222
725 movups 64(%r15),%xmm15
735 .byte 102,68,15,58,15,213,8
742 movdqa 48(%r11),%xmm9
752 movdqa %xmm8,32(%rsp)
754 .byte 102,69,15,56,220,223
755 movups 80(%r15),%xmm14
780 .byte 102,69,15,56,220,222
781 movups 96(%r15),%xmm15
799 .byte 102,68,15,58,15,198,8
805 .byte 102,69,15,56,220,223
806 movups 112(%r15),%xmm14
818 movdqa %xmm9,48(%rsp)
835 .byte 102,69,15,56,220,222
836 movups 128(%r15),%xmm15
858 .byte 102,69,15,56,220,223
859 movups 144(%r15),%xmm14
865 .byte 102,68,15,58,15,207,8
882 movdqa %xmm10,0(%rsp)
889 .byte 102,69,15,56,220,222
890 movups 160(%r15),%xmm15
916 movups 176(%r15),%xmm14
917 .byte 102,69,15,56,220,223
918 movups 192(%r15),%xmm15
919 .byte 102,69,15,56,220,222
921 movups 208(%r15),%xmm14
922 .byte 102,69,15,56,220,223
923 movups 224(%r15),%xmm15
924 .byte 102,69,15,56,220,222
926 .byte 102,69,15,56,221,223
927 movups 16(%r15),%xmm14
941 .byte 102,68,15,58,15,208,8
954 movups 48(%r12),%xmm12
956 movups %xmm11,32(%r13,%r12,1)
958 .byte 102,69,15,56,220,222
959 movups 32(%r15),%xmm15
964 movdqa %xmm8,16(%rsp)
986 .byte 102,69,15,56,220,223
987 movups 48(%r15),%xmm14
1008 .byte 102,69,15,56,220,222
1009 movups 64(%r15),%xmm15
1011 .byte 102,68,15,58,15,193,8
1028 movdqa %xmm9,32(%rsp)
1040 .byte 102,69,15,56,220,223
1041 movups 80(%r15),%xmm14
1060 movdqa %xmm10,48(%rsp)
1065 .byte 102,69,15,56,220,222
1066 movups 96(%r15),%xmm15
1087 .byte 102,69,15,56,220,223
1088 movups 112(%r15),%xmm14
1094 movdqa 64(%r11),%xmm6
1095 movdqa 0(%r11),%xmm9
1096 movdqu 0(%r10),%xmm0
1097 movdqu 16(%r10),%xmm1
1098 movdqu 32(%r10),%xmm2
1099 movdqu 48(%r10),%xmm3
1100 .byte 102,15,56,0,198
1104 .byte 102,15,56,0,206
1112 movdqa %xmm0,0(%rsp)
1123 .byte 102,69,15,56,220,222
1124 movups 128(%r15),%xmm15
1142 .byte 102,15,56,0,214
1147 .byte 102,69,15,56,220,223
1148 movups 144(%r15),%xmm14
1152 movdqa %xmm1,16(%rsp)
1171 .byte 102,69,15,56,220,222
1172 movups 160(%r15),%xmm15
1182 .byte 102,15,56,0,222
1190 movdqa %xmm2,32(%rsp)
1199 movups 176(%r15),%xmm14
1200 .byte 102,69,15,56,220,223
1201 movups 192(%r15),%xmm15
1202 .byte 102,69,15,56,220,222
1204 movups 208(%r15),%xmm14
1205 .byte 102,69,15,56,220,223
1206 movups 224(%r15),%xmm15
1207 .byte 102,69,15,56,220,222
1209 .byte 102,69,15,56,221,223
1210 movups 16(%r15),%xmm14
1230 movups %xmm11,48(%r13,%r12,1)
1265 .byte 102,69,15,56,220,222
1266 movups 128(%r15),%xmm15
1287 .byte 102,69,15,56,220,223
1288 movups 144(%r15),%xmm14
1309 .byte 102,69,15,56,220,222
1310 movups 160(%r15),%xmm15
1333 movups 176(%r15),%xmm14
1334 .byte 102,69,15,56,220,223
1335 movups 192(%r15),%xmm15
1336 .byte 102,69,15,56,220,222
1338 movups 208(%r15),%xmm14
1339 .byte 102,69,15,56,220,223
1340 movups 224(%r15),%xmm15
1341 .byte 102,69,15,56,220,222
1343 .byte 102,69,15,56,221,223
1344 movups 16(%r15),%xmm14
1364 movups %xmm11,48(%r13,%r12,1)
1388 .size aesni_cbc_sha1_enc_ssse3,.-aesni_cbc_sha1_enc_ssse3
1389 .type aesni_cbc_sha1_enc_avx,@function
1391 aesni_cbc_sha1_enc_avx:
1401 leaq -104(%rsp),%rsp
1409 vmovdqu (%r8),%xmm11
1417 leaq K_XX_XX(%rip),%r11
1425 vmovdqa 64(%r11),%xmm6
1426 vmovdqa 0(%r11),%xmm9
1427 vmovdqu 0(%r10),%xmm0
1428 vmovdqu 16(%r10),%xmm1
1429 vmovdqu 32(%r10),%xmm2
1430 vmovdqu 48(%r10),%xmm3
1431 vpshufb %xmm6,%xmm0,%xmm0
1433 vpshufb %xmm6,%xmm1,%xmm1
1434 vpshufb %xmm6,%xmm2,%xmm2
1435 vpshufb %xmm6,%xmm3,%xmm3
1436 vpaddd %xmm9,%xmm0,%xmm4
1437 vpaddd %xmm9,%xmm1,%xmm5
1438 vpaddd %xmm9,%xmm2,%xmm6
1439 vmovdqa %xmm4,0(%rsp)
1440 vmovdqa %xmm5,16(%rsp)
1441 vmovdqa %xmm6,32(%rsp)
1442 vmovups -112(%r15),%xmm13
1443 vmovups 16-112(%r15),%xmm14
1448 vmovups 0(%r12),%xmm12
1449 vxorps %xmm13,%xmm12,%xmm12
1450 vxorps %xmm12,%xmm11,%xmm11
1451 vaesenc %xmm14,%xmm11,%xmm11
1452 vmovups -80(%r15),%xmm15
1454 vpalignr $8,%xmm0,%xmm1,%xmm4
1457 vpaddd %xmm3,%xmm9,%xmm9
1460 vpsrldq $4,%xmm3,%xmm8
1463 vpxor %xmm0,%xmm4,%xmm4
1466 vpxor %xmm2,%xmm8,%xmm8
1471 vpxor %xmm8,%xmm4,%xmm4
1474 vmovdqa %xmm9,48(%rsp)
1476 vaesenc %xmm15,%xmm11,%xmm11
1477 vmovups -64(%r15),%xmm14
1479 vpsrld $31,%xmm4,%xmm8
1484 vpslldq $12,%xmm4,%xmm10
1485 vpaddd %xmm4,%xmm4,%xmm4
1490 vpsrld $30,%xmm10,%xmm9
1491 vpor %xmm8,%xmm4,%xmm4
1496 vpslld $2,%xmm10,%xmm10
1497 vpxor %xmm9,%xmm4,%xmm4
1502 vaesenc %xmm14,%xmm11,%xmm11
1503 vmovups -48(%r15),%xmm15
1504 vpxor %xmm10,%xmm4,%xmm4
1507 vmovdqa 0(%r11),%xmm10
1514 vpalignr $8,%xmm1,%xmm2,%xmm5
1517 vpaddd %xmm4,%xmm10,%xmm10
1520 vpsrldq $4,%xmm4,%xmm9
1523 vpxor %xmm1,%xmm5,%xmm5
1526 vpxor %xmm3,%xmm9,%xmm9
1528 vaesenc %xmm15,%xmm11,%xmm11
1529 vmovups -32(%r15),%xmm14
1533 vpxor %xmm9,%xmm5,%xmm5
1536 vmovdqa %xmm10,0(%rsp)
1539 vpsrld $31,%xmm5,%xmm9
1544 vpslldq $12,%xmm5,%xmm8
1545 vpaddd %xmm5,%xmm5,%xmm5
1550 vpsrld $30,%xmm8,%xmm10
1551 vpor %xmm9,%xmm5,%xmm5
1553 vaesenc %xmm14,%xmm11,%xmm11
1554 vmovups -16(%r15),%xmm15
1558 vpslld $2,%xmm8,%xmm8
1559 vpxor %xmm10,%xmm5,%xmm5
1564 vpxor %xmm8,%xmm5,%xmm5
1567 vmovdqa 16(%r11),%xmm8
1574 vpalignr $8,%xmm2,%xmm3,%xmm6
1577 vaesenc %xmm15,%xmm11,%xmm11
1578 vmovups 0(%r15),%xmm14
1579 vpaddd %xmm5,%xmm8,%xmm8
1582 vpsrldq $4,%xmm5,%xmm10
1585 vpxor %xmm2,%xmm6,%xmm6
1588 vpxor %xmm4,%xmm10,%xmm10
1593 vpxor %xmm10,%xmm6,%xmm6
1596 vmovdqa %xmm8,16(%rsp)
1599 vpsrld $31,%xmm6,%xmm10
1603 vaesenc %xmm14,%xmm11,%xmm11
1604 vmovups 16(%r15),%xmm15
1606 vpslldq $12,%xmm6,%xmm9
1607 vpaddd %xmm6,%xmm6,%xmm6
1612 vpsrld $30,%xmm9,%xmm8
1613 vpor %xmm10,%xmm6,%xmm6
1618 vpslld $2,%xmm9,%xmm9
1619 vpxor %xmm8,%xmm6,%xmm6
1624 vpxor %xmm9,%xmm6,%xmm6
1627 vmovdqa 16(%r11),%xmm9
1629 vaesenc %xmm15,%xmm11,%xmm11
1630 vmovups 32(%r15),%xmm14
1636 vpalignr $8,%xmm3,%xmm4,%xmm7
1639 vpaddd %xmm6,%xmm9,%xmm9
1642 vpsrldq $4,%xmm6,%xmm8
1645 vpxor %xmm3,%xmm7,%xmm7
1648 vpxor %xmm5,%xmm8,%xmm8
1653 vaesenc %xmm14,%xmm11,%xmm11
1654 vmovups 48(%r15),%xmm15
1655 vpxor %xmm8,%xmm7,%xmm7
1658 vmovdqa %xmm9,32(%rsp)
1661 vpsrld $31,%xmm7,%xmm8
1666 vpslldq $12,%xmm7,%xmm10
1667 vpaddd %xmm7,%xmm7,%xmm7
1672 vpsrld $30,%xmm10,%xmm9
1673 vpor %xmm8,%xmm7,%xmm7
1678 vpslld $2,%xmm10,%xmm10
1679 vpxor %xmm9,%xmm7,%xmm7
1683 vaesenc %xmm15,%xmm11,%xmm11
1684 vmovups 64(%r15),%xmm14
1685 vaesenc %xmm14,%xmm11,%xmm11
1686 vmovups 80(%r15),%xmm15
1688 vaesenc %xmm15,%xmm11,%xmm11
1689 vmovups 96(%r15),%xmm14
1690 vaesenc %xmm14,%xmm11,%xmm11
1691 vmovups 112(%r15),%xmm15
1693 vaesenclast %xmm15,%xmm11,%xmm11
1694 vmovups 16-112(%r15),%xmm14
1698 vpxor %xmm10,%xmm7,%xmm7
1701 vmovdqa 16(%r11),%xmm10
1706 vpalignr $8,%xmm6,%xmm7,%xmm9
1707 vpxor %xmm4,%xmm0,%xmm0
1712 vpxor %xmm1,%xmm0,%xmm0
1715 vmovdqa %xmm10,%xmm8
1716 vpaddd %xmm7,%xmm10,%xmm10
1718 vmovups 16(%r12),%xmm12
1719 vxorps %xmm13,%xmm12,%xmm12
1720 vmovups %xmm11,0(%r13,%r12,1)
1721 vxorps %xmm12,%xmm11,%xmm11
1722 vaesenc %xmm14,%xmm11,%xmm11
1723 vmovups -80(%r15),%xmm15
1725 vpxor %xmm9,%xmm0,%xmm0
1730 vpsrld $30,%xmm0,%xmm9
1731 vmovdqa %xmm10,48(%rsp)
1736 vpslld $2,%xmm0,%xmm0
1745 vaesenc %xmm15,%xmm11,%xmm11
1746 vmovups -64(%r15),%xmm14
1747 vpor %xmm9,%xmm0,%xmm0
1750 vmovdqa %xmm0,%xmm10
1765 vpalignr $8,%xmm7,%xmm0,%xmm10
1766 vpxor %xmm5,%xmm1,%xmm1
1768 vaesenc %xmm14,%xmm11,%xmm11
1769 vmovups -48(%r15),%xmm15
1773 vpxor %xmm2,%xmm1,%xmm1
1777 vpaddd %xmm0,%xmm8,%xmm8
1780 vpxor %xmm10,%xmm1,%xmm1
1785 vpsrld $30,%xmm1,%xmm10
1786 vmovdqa %xmm8,0(%rsp)
1791 vpslld $2,%xmm1,%xmm1
1797 vaesenc %xmm15,%xmm11,%xmm11
1798 vmovups -32(%r15),%xmm14
1802 vpor %xmm10,%xmm1,%xmm1
1812 vpalignr $8,%xmm0,%xmm1,%xmm8
1813 vpxor %xmm6,%xmm2,%xmm2
1818 vpxor %xmm3,%xmm2,%xmm2
1821 vmovdqa 32(%r11),%xmm10
1822 vpaddd %xmm1,%xmm9,%xmm9
1825 vpxor %xmm8,%xmm2,%xmm2
1827 vaesenc %xmm14,%xmm11,%xmm11
1828 vmovups -16(%r15),%xmm15
1832 vpsrld $30,%xmm2,%xmm8
1833 vmovdqa %xmm9,16(%rsp)
1838 vpslld $2,%xmm2,%xmm2
1847 vpor %xmm8,%xmm2,%xmm2
1854 vaesenc %xmm15,%xmm11,%xmm11
1855 vmovups 0(%r15),%xmm14
1859 vpalignr $8,%xmm1,%xmm2,%xmm9
1860 vpxor %xmm7,%xmm3,%xmm3
1865 vpxor %xmm4,%xmm3,%xmm3
1868 vmovdqa %xmm10,%xmm8
1869 vpaddd %xmm2,%xmm10,%xmm10
1872 vpxor %xmm9,%xmm3,%xmm3
1877 vpsrld $30,%xmm3,%xmm9
1878 vmovdqa %xmm10,32(%rsp)
1883 vpslld $2,%xmm3,%xmm3
1885 vaesenc %xmm14,%xmm11,%xmm11
1886 vmovups 16(%r15),%xmm15
1894 vpor %xmm9,%xmm3,%xmm3
1897 vmovdqa %xmm3,%xmm10
1904 vpalignr $8,%xmm2,%xmm3,%xmm10
1905 vpxor %xmm0,%xmm4,%xmm4
1910 vpxor %xmm5,%xmm4,%xmm4
1912 vaesenc %xmm15,%xmm11,%xmm11
1913 vmovups 32(%r15),%xmm14
1916 vpaddd %xmm3,%xmm8,%xmm8
1919 vpxor %xmm10,%xmm4,%xmm4
1924 vpsrld $30,%xmm4,%xmm10
1925 vmovdqa %xmm8,48(%rsp)
1930 vpslld $2,%xmm4,%xmm4
1939 vpor %xmm10,%xmm4,%xmm4
1941 vaesenc %xmm14,%xmm11,%xmm11
1942 vmovups 48(%r15),%xmm15
1951 vpalignr $8,%xmm3,%xmm4,%xmm8
1952 vpxor %xmm1,%xmm5,%xmm5
1957 vpxor %xmm6,%xmm5,%xmm5
1960 vmovdqa %xmm9,%xmm10
1961 vpaddd %xmm4,%xmm9,%xmm9
1964 vpxor %xmm8,%xmm5,%xmm5
1969 vpsrld $30,%xmm5,%xmm8
1970 vmovdqa %xmm9,0(%rsp)
1974 vaesenc %xmm15,%xmm11,%xmm11
1975 vmovups 64(%r15),%xmm14
1976 vaesenc %xmm14,%xmm11,%xmm11
1977 vmovups 80(%r15),%xmm15
1979 vaesenc %xmm15,%xmm11,%xmm11
1980 vmovups 96(%r15),%xmm14
1981 vaesenc %xmm14,%xmm11,%xmm11
1982 vmovups 112(%r15),%xmm15
1984 vaesenclast %xmm15,%xmm11,%xmm11
1985 vmovups 16-112(%r15),%xmm14
1989 vpslld $2,%xmm5,%xmm5
1998 vpor %xmm8,%xmm5,%xmm5
2008 vpalignr $8,%xmm4,%xmm5,%xmm9
2009 vpxor %xmm2,%xmm6,%xmm6
2011 vmovups 32(%r12),%xmm12
2012 vxorps %xmm13,%xmm12,%xmm12
2013 vmovups %xmm11,16(%r13,%r12,1)
2014 vxorps %xmm12,%xmm11,%xmm11
2015 vaesenc %xmm14,%xmm11,%xmm11
2016 vmovups -80(%r15),%xmm15
2020 vpxor %xmm7,%xmm6,%xmm6
2023 vmovdqa %xmm10,%xmm8
2024 vpaddd %xmm5,%xmm10,%xmm10
2027 vpxor %xmm9,%xmm6,%xmm6
2032 vpsrld $30,%xmm6,%xmm9
2033 vmovdqa %xmm10,16(%rsp)
2038 vpslld $2,%xmm6,%xmm6
2044 vaesenc %xmm15,%xmm11,%xmm11
2045 vmovups -64(%r15),%xmm14
2049 vpor %xmm9,%xmm6,%xmm6
2052 vmovdqa %xmm6,%xmm10
2068 vaesenc %xmm14,%xmm11,%xmm11
2069 vmovups -48(%r15),%xmm15
2077 vpalignr $8,%xmm5,%xmm6,%xmm10
2078 vpxor %xmm3,%xmm7,%xmm7
2083 vpxor %xmm0,%xmm7,%xmm7
2086 vmovdqa 48(%r11),%xmm9
2087 vpaddd %xmm6,%xmm8,%xmm8
2090 vpxor %xmm10,%xmm7,%xmm7
2095 vpsrld $30,%xmm7,%xmm10
2096 vmovdqa %xmm8,32(%rsp)
2098 vaesenc %xmm15,%xmm11,%xmm11
2099 vmovups -32(%r15),%xmm14
2103 vpslld $2,%xmm7,%xmm7
2112 vpor %xmm10,%xmm7,%xmm7
2123 vaesenc %xmm14,%xmm11,%xmm11
2124 vmovups -16(%r15),%xmm15
2140 vpalignr $8,%xmm6,%xmm7,%xmm8
2141 vpxor %xmm4,%xmm0,%xmm0
2146 vpxor %xmm1,%xmm0,%xmm0
2148 vaesenc %xmm15,%xmm11,%xmm11
2149 vmovups 0(%r15),%xmm14
2151 vmovdqa %xmm9,%xmm10
2152 vpaddd %xmm7,%xmm9,%xmm9
2155 vpxor %xmm8,%xmm0,%xmm0
2160 vpsrld $30,%xmm0,%xmm8
2161 vmovdqa %xmm9,48(%rsp)
2166 vpslld $2,%xmm0,%xmm0
2175 vpor %xmm8,%xmm0,%xmm0
2177 vaesenc %xmm14,%xmm11,%xmm11
2178 vmovups 16(%r15),%xmm15
2200 vaesenc %xmm15,%xmm11,%xmm11
2201 vmovups 32(%r15),%xmm14
2205 vpalignr $8,%xmm7,%xmm0,%xmm9
2206 vpxor %xmm5,%xmm1,%xmm1
2211 vpxor %xmm2,%xmm1,%xmm1
2214 vmovdqa %xmm10,%xmm8
2215 vpaddd %xmm0,%xmm10,%xmm10
2218 vpxor %xmm9,%xmm1,%xmm1
2223 vpsrld $30,%xmm1,%xmm9
2224 vmovdqa %xmm10,0(%rsp)
2229 vpslld $2,%xmm1,%xmm1
2231 vaesenc %xmm14,%xmm11,%xmm11
2232 vmovups 48(%r15),%xmm15
2240 vpor %xmm9,%xmm1,%xmm1
2243 vmovdqa %xmm1,%xmm10
2257 vaesenc %xmm15,%xmm11,%xmm11
2258 vmovups 64(%r15),%xmm14
2259 vaesenc %xmm14,%xmm11,%xmm11
2260 vmovups 80(%r15),%xmm15
2262 vaesenc %xmm15,%xmm11,%xmm11
2263 vmovups 96(%r15),%xmm14
2264 vaesenc %xmm14,%xmm11,%xmm11
2265 vmovups 112(%r15),%xmm15
2267 vaesenclast %xmm15,%xmm11,%xmm11
2268 vmovups 16-112(%r15),%xmm14
2280 vpalignr $8,%xmm0,%xmm1,%xmm10
2281 vpxor %xmm6,%xmm2,%xmm2
2286 vpxor %xmm3,%xmm2,%xmm2
2290 vpaddd %xmm1,%xmm8,%xmm8
2293 vpxor %xmm10,%xmm2,%xmm2
2295 vmovups 48(%r12),%xmm12
2296 vxorps %xmm13,%xmm12,%xmm12
2297 vmovups %xmm11,32(%r13,%r12,1)
2298 vxorps %xmm12,%xmm11,%xmm11
2299 vaesenc %xmm14,%xmm11,%xmm11
2300 vmovups -80(%r15),%xmm15
2304 vpsrld $30,%xmm2,%xmm10
2305 vmovdqa %xmm8,16(%rsp)
2310 vpslld $2,%xmm2,%xmm2
2319 vpor %xmm10,%xmm2,%xmm2
2326 vaesenc %xmm15,%xmm11,%xmm11
2327 vmovups -64(%r15),%xmm14
2347 vpalignr $8,%xmm1,%xmm2,%xmm8
2348 vpxor %xmm7,%xmm3,%xmm3
2350 vaesenc %xmm14,%xmm11,%xmm11
2351 vmovups -48(%r15),%xmm15
2355 vpxor %xmm4,%xmm3,%xmm3
2358 vmovdqa %xmm9,%xmm10
2359 vpaddd %xmm2,%xmm9,%xmm9
2362 vpxor %xmm8,%xmm3,%xmm3
2367 vpsrld $30,%xmm3,%xmm8
2368 vmovdqa %xmm9,32(%rsp)
2373 vpslld $2,%xmm3,%xmm3
2379 vaesenc %xmm15,%xmm11,%xmm11
2380 vmovups -32(%r15),%xmm14
2384 vpor %xmm8,%xmm3,%xmm3
2394 vpaddd %xmm3,%xmm10,%xmm10
2399 movdqa %xmm10,48(%rsp)
2404 vaesenc %xmm14,%xmm11,%xmm11
2405 vmovups -16(%r15),%xmm15
2426 vaesenc %xmm15,%xmm11,%xmm11
2427 vmovups 0(%r15),%xmm14
2433 vmovdqa 64(%r11),%xmm6
2434 vmovdqa 0(%r11),%xmm9
2435 vmovdqu 0(%r10),%xmm0
2436 vmovdqu 16(%r10),%xmm1
2437 vmovdqu 32(%r10),%xmm2
2438 vmovdqu 48(%r10),%xmm3
2439 vpshufb %xmm6,%xmm0,%xmm0
2443 vpshufb %xmm6,%xmm1,%xmm1
2446 vpaddd %xmm9,%xmm0,%xmm4
2451 vmovdqa %xmm4,0(%rsp)
2461 vaesenc %xmm14,%xmm11,%xmm11
2462 vmovups 16(%r15),%xmm15
2480 vpshufb %xmm6,%xmm2,%xmm2
2483 vpaddd %xmm9,%xmm1,%xmm5
2485 vaesenc %xmm15,%xmm11,%xmm11
2486 vmovups 32(%r15),%xmm14
2490 vmovdqa %xmm5,16(%rsp)
2508 vaesenc %xmm14,%xmm11,%xmm11
2509 vmovups 48(%r15),%xmm15
2519 vpshufb %xmm6,%xmm3,%xmm3
2522 vpaddd %xmm9,%xmm2,%xmm6
2527 vmovdqa %xmm6,32(%rsp)
2535 vaesenc %xmm15,%xmm11,%xmm11
2536 vmovups 64(%r15),%xmm14
2537 vaesenc %xmm14,%xmm11,%xmm11
2538 vmovups 80(%r15),%xmm15
2540 vaesenc %xmm15,%xmm11,%xmm11
2541 vmovups 96(%r15),%xmm14
2542 vaesenc %xmm14,%xmm11,%xmm11
2543 vmovups 112(%r15),%xmm15
2545 vaesenclast %xmm15,%xmm11,%xmm11
2546 vmovups 16-112(%r15),%xmm14
2566 vmovups %xmm11,48(%r13,%r12,1)
2601 vaesenc %xmm14,%xmm11,%xmm11
2602 vmovups 16(%r15),%xmm15
2623 vaesenc %xmm15,%xmm11,%xmm11
2624 vmovups 32(%r15),%xmm14
2645 vaesenc %xmm14,%xmm11,%xmm11
2646 vmovups 48(%r15),%xmm15
2669 vaesenc %xmm15,%xmm11,%xmm11
2670 vmovups 64(%r15),%xmm14
2671 vaesenc %xmm14,%xmm11,%xmm11
2672 vmovups 80(%r15),%xmm15
2674 vaesenc %xmm15,%xmm11,%xmm11
2675 vmovups 96(%r15),%xmm14
2676 vaesenc %xmm14,%xmm11,%xmm11
2677 vmovups 112(%r15),%xmm15
2679 vaesenclast %xmm15,%xmm11,%xmm11
2680 vmovups 16-112(%r15),%xmm14
2700 vmovups %xmm11,48(%r13,%r12,1)
2713 vmovups %xmm11,(%r8)
2725 .size aesni_cbc_sha1_enc_avx,.-aesni_cbc_sha1_enc_avx
2728 .long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
2729 .long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
2730 .long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
2731 .long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
2732 .long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
2734 .byte 65,69,83,78,73,45,67,66,67,43,83,72,65,49,32,115,116,105,116,99,104,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0