4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
22 * Copyright 2011 Nexenta Systems, Inc. All rights reserved.
25 * Copyright 2006 Sun Microsystems, Inc. All rights reserved.
26 * Use is subject to license terms.
29 .file "__vcos_ultra3.S"
33 .type __vcos,#function
34 __vcos = __vcos_ultra3
39 .word 0x42c80000,0x00000000 ! 3 * 2^44
40 .word 0x43380000,0x00000000 ! 3 * 2^51
41 .word 0x3fe45f30,0x6dc9c883 ! invpio2
42 .word 0x3ff921fb,0x54442c00 ! pio2_1
43 .word 0x3d318469,0x898cc400 ! pio2_2
44 .word 0x3a71701b,0x839a2520 ! pio2_3
45 .word 0xbfc55555,0x55555533 ! pp1
46 .word 0x3f811111,0x10e7d53b ! pp2
47 .word 0xbf2a0167,0xe6b3cf9b ! pp3
48 .word 0xbfdfffff,0xffffff65 ! qq1
49 .word 0x3fa55555,0x54f88ed0 ! qq2
50 .word 0xbf56c12c,0xdd185f60 ! qq3
52 ! local storage indices
54 #define xsave STACK_BIAS-0x8
55 #define ysave STACK_BIAS-0x10
56 #define nsave STACK_BIAS-0x14
57 #define sxsave STACK_BIAS-0x18
58 #define sysave STACK_BIAS-0x1c
59 #define biguns STACK_BIAS-0x20
60 #define nk3 STACK_BIAS-0x24
61 #define nk2 STACK_BIAS-0x28
62 #define nk1 STACK_BIAS-0x2c
63 #define nk0 STACK_BIAS-0x30
64 #define junk STACK_BIAS-0x38
65 ! sizeof temp storage - must be a multiple of 16 for V9
86 ! the following are 64-bit registers in both V8+ and V9
88 ! g1 __vlibm_TBL_sincos2
96 ! o5 0x3fe921fb,0x4099251e
134 save %sp,-SA(MINFRAME)-tmps,%sp
136 PIC_SET(l7,constants,o0)
137 PIC_SET(l7,__vlibm_TBL_sincos2,o1)
139 wr %g0,0x82,%asi ! set %asi for non-faulting loads
141 stx %i1,[%fp+xsave] ! save arguments
144 st %i1,[%fp+xsave] ! save arguments
150 st %g0,[%fp+biguns] ! biguns = 0
151 ldd [%o0+0x00],c3two44 ! load/set up constants
152 ldd [%o0+0x08],c3two51
153 ldd [%o0+0x10],invpio2
154 ldd [%o0+0x18],pio2_1
155 ldd [%o0+0x20],pio2_2
156 ldd [%o0+0x28],pio2_3
163 sethi %hi(0x80000000),%i5
164 sethi %hi(0x3e400000),%o4
165 sethi %hi(0x3fe921fb),%o5
166 or %o5,%lo(0x3fe921fb),%o5
168 sethi %hi(0x4099251e),%o7
169 or %o7,%lo(0x4099251e),%o7
171 sll %i2,3,%i2 ! scale strides
173 add %fp,junk,%o1 ! loop prologue
179 andn %l0,%i5,%l0 ! mask off sign
180 add %i1,%i2,%i1 ! x += stridex
187 lda [%i1]%asi,%l1 ! preload next argument
194 mov %i3,%o0 ! py0 = y
195 bl,pn %icc,.range0 ! hx < 0x3e400000 or hx > 0x4099251e
200 add %i3,%i4,%i3 ! y += stridey
205 add %i1,%i2,%i1 ! x += stridex
206 faddd %f2,c3two44,%f4
210 lda [%i1]%asi,%l2 ! preload next argument
217 mov %i3,%o1 ! py1 = y
218 bl,pn %icc,.range1 ! hx < 0x3e400000 or hx > 0x4099251e
223 add %i3,%i4,%i3 ! y += stridey
228 add %i1,%i2,%i1 ! x += stridex
229 faddd %f10,c3two44,%f12
233 lda [%i1]%asi,%l3 ! preload next argument
240 mov %i3,%o2 ! py2 = y
241 bl,pn %icc,.range2 ! hx < 0x3e400000 or hx > 0x4099251e
246 add %i3,%i4,%i3 ! y += stridey
251 add %i1,%i2,%i1 ! x += stridex
252 faddd %f18,c3two44,%f20
262 mov %i3,%o3 ! py3 = y
263 bl,pn %icc,.range3 ! hx < 0x3e400000 or > hx 0x4099251e
270 add %i3,%i4,%i3 ! y += stridey
276 faddd %f26,c3two44,%f28
288 fmuld %f0,invpio2,%f6 ! x * invpio2, for medium range
290 fmuld %f8,invpio2,%f14
293 fmuld %f16,invpio2,%f22
299 fmuld %f24,invpio2,%f30
304 fcmpd %fcc0,%f0,pio2_3 ! x < pio2_3 iff x < 0
308 fcmpd %fcc1,%f8,pio2_3
312 fcmpd %fcc2,%f16,pio2_3
316 fcmpd %fcc3,%f24,pio2_3
319 fsubd %f2,%f4,%f2 ! x -= __vlibm_TBL_sincos2[k]
327 fmuld %f2,%f2,%f0 ! z = x * x
428 lda [%i1]%asi,%l0 ! preload next argument
464 faddd %f6,c3two51,%f4
467 faddd %f14,c3two51,%f12
470 faddd %f22,c3two51,%f20
473 faddd %f30,c3two51,%f28
476 fsubd %f4,c3two51,%f6
478 fsubd %f12,c3two51,%f14
480 fsubd %f20,c3two51,%f22
482 fsubd %f28,c3two51,%f30
487 fmuld %f14,pio2_1,%f10
490 fmuld %f22,pio2_1,%f18
493 fmuld %f30,pio2_1,%f26
501 fmuld %f14,pio2_2,%f12
505 fmuld %f22,pio2_2,%f20
509 fmuld %f30,pio2_2,%f28
521 fcmple32 %f32,pio2_3,%l4 ! x <= pio2_3 iff x < 0
524 fcmple32 %f34,pio2_3,%l5
527 fcmple32 %f36,pio2_3,%l6
530 fcmple32 %f38,pio2_3,%l7
534 sll %l4,30,%l4 ! if (x < 0) n = -n ^ 2
537 fmuld %f14,pio2_3,%f14
541 fmuld %f22,pio2_3,%f22
545 fmuld %f30,pio2_3,%f30
560 fsubd %f32,%f6,%f0 ! reduced x
584 faddd %f2,c3two44,%f4
588 faddd %f10,c3two44,%f12
592 faddd %f18,c3two44,%f20
596 faddd %f26,c3two44,%f28
615 fsubd %f4,%f6,%f6 ! w
631 fand %f0,%f38,%f32 ! sign bit of x
644 fsubd %f2,%f4,%f2 ! x -= __vlibm_TBL_sincos2[k]
655 fmuld %f2,%f2,%f0 ! z = x * x
680 fmuld %f0,qq3,%f6 ! cos(x0)
682 fmuld %f8,qq3,%f14 ! cos(x1)
684 fmuld %f16,qq3,%f22 ! cos(x2)
686 fmuld %f24,qq3,%f30 ! cos(x3)
794 lda [%i1]%asi,%l0 ! preload next argument
811 fmovdnz %icc,%f12,%f14
815 fmovdnz %icc,%f20,%f22
819 fmovdnz %icc,%f28,%f30
833 fmuld %f24,pp3,%f30 ! sin(x3)
835 fmuld %f0,qq3,%f6 ! cos(x0)
837 fmuld %f8,qq3,%f14 ! cos(x1)
839 fmuld %f16,qq3,%f22 ! cos(x2)
948 lda [%i1]%asi,%l0 ! preload next argument
965 fmovdnz %icc,%f12,%f14
969 fmovdnz %icc,%f20,%f22
973 fmovdnz %icc,%f28,%f30
993 fmuld %f16,pp3,%f22 ! sin(x2)
995 fmuld %f0,qq3,%f6 ! cos(x0)
997 fmuld %f8,qq3,%f14 ! cos(x1)
1002 fmuld %f24,qq3,%f30 ! cos(x3)
1010 fmuld %f16,%f22,%f22
1023 fmuld %f16,%f20,%f20
1026 fmuld %f24,%f30,%f30
1037 fmuld %f16,%f22,%f22
1040 fmuld %f24,%f28,%f28
1045 fmuld %f10,%f12,%f12
1047 fmuld %f18,%f22,%f22
1050 fmuld %f26,%f28,%f28
1057 faddd %f12,%f34,%f12
1060 fmuld %f16,%f20,%f20
1061 faddd %f36,%f22,%f22
1063 fmuld %f24,%f30,%f30
1064 faddd %f28,%f38,%f28
1072 faddd %f12,%f10,%f12
1075 faddd %f18,%f22,%f22
1078 fmuld %f24,%f30,%f30
1079 faddd %f28,%f26,%f28
1084 fmuld %f34,%f12,%f12
1086 fmuld %f36,%f22,%f22
1088 fmuld %f38,%f28,%f28
1092 fsubd %f14,%f12,%f14
1094 faddd %f22,%f20,%f22
1096 fsubd %f30,%f28,%f30
1102 faddd %f22,%f16,%f22
1104 faddd %f30,%f24,%f30
1108 lda [%i1]%asi,%l0 ! preload next argument
1121 fmovdnz %icc,%f4,%f6
1125 fmovdnz %icc,%f12,%f14
1129 fmovdnz %icc,%f20,%f22
1133 fmovdnz %icc,%f28,%f30
1147 fmuld %f16,pp3,%f22 ! sin(x2)
1149 fmuld %f24,pp3,%f30 ! sin(x3)
1151 fmuld %f0,qq3,%f6 ! cos(x0)
1153 fmuld %f8,qq3,%f14 ! cos(x1)
1167 fmuld %f16,%f22,%f22
1170 fmuld %f24,%f30,%f30
1180 fmuld %f16,%f20,%f20
1184 fmuld %f24,%f28,%f28
1195 fmuld %f16,%f22,%f22
1197 fmuld %f24,%f30,%f30
1201 fmuld %f10,%f12,%f12
1203 fmuld %f18,%f22,%f22
1206 fmuld %f26,%f30,%f30
1214 faddd %f12,%f34,%f12
1217 fmuld %f16,%f20,%f20
1218 faddd %f36,%f22,%f22
1220 fmuld %f24,%f28,%f28
1221 faddd %f38,%f30,%f30
1228 faddd %f12,%f10,%f12
1231 faddd %f18,%f22,%f22
1234 faddd %f26,%f30,%f30
1239 fmuld %f34,%f12,%f12
1241 fmuld %f36,%f22,%f22
1243 fmuld %f38,%f30,%f30
1247 fsubd %f14,%f12,%f14
1249 faddd %f22,%f20,%f22
1251 faddd %f30,%f28,%f30
1257 faddd %f22,%f16,%f22
1259 faddd %f30,%f24,%f30
1263 lda [%i1]%asi,%l0 ! preload next argument
1276 fmovdnz %icc,%f4,%f6
1280 fmovdnz %icc,%f12,%f14
1284 fmovdnz %icc,%f20,%f22
1288 fmovdnz %icc,%f28,%f30
1302 fmuld %f18,%f18,%f16
1308 fmuld %f26,%f26,%f24
1314 fmuld %f8,pp3,%f14 ! sin(x1)
1316 fmuld %f0,qq3,%f6 ! cos(x0)
1321 fmuld %f16,qq3,%f22 ! cos(x2)
1323 fmuld %f24,qq3,%f30 ! cos(x3)
1344 fmuld %f16,%f22,%f22
1347 fmuld %f24,%f30,%f30
1357 fmuld %f16,%f20,%f20
1361 fmuld %f24,%f28,%f28
1366 fmuld %f10,%f14,%f14
1369 fmuld %f18,%f20,%f20
1371 fmuld %f26,%f28,%f28
1378 faddd %f34,%f14,%f14
1380 fmuld %f16,%f22,%f22
1381 faddd %f20,%f36,%f20
1384 fmuld %f24,%f30,%f30
1385 faddd %f28,%f38,%f28
1392 faddd %f10,%f14,%f14
1395 fmuld %f16,%f22,%f22
1396 faddd %f20,%f18,%f20
1399 fmuld %f24,%f30,%f30
1400 faddd %f28,%f26,%f28
1405 fmuld %f34,%f14,%f14
1407 fmuld %f36,%f20,%f20
1409 fmuld %f38,%f28,%f28
1413 faddd %f14,%f12,%f14
1415 fsubd %f22,%f20,%f22
1417 fsubd %f30,%f28,%f30
1423 faddd %f22,%f16,%f22
1425 faddd %f30,%f24,%f30
1429 lda [%i1]%asi,%l0 ! preload next argument
1442 fmovdnz %icc,%f4,%f6
1446 fmovdnz %icc,%f12,%f14
1450 fmovdnz %icc,%f20,%f22
1454 fmovdnz %icc,%f28,%f30
1468 fmuld %f8,pp3,%f14 ! sin(x1)
1470 fmuld %f24,pp3,%f30 ! sin(x3)
1472 fmuld %f0,qq3,%f6 ! cos(x0)
1477 fmuld %f16,qq3,%f22 ! cos(x2)
1491 fmuld %f24,%f30,%f30
1501 fmuld %f16,%f22,%f22
1505 fmuld %f24,%f28,%f28
1515 fmuld %f16,%f20,%f20
1518 fmuld %f24,%f30,%f30
1522 fmuld %f10,%f14,%f14
1525 fmuld %f18,%f20,%f20
1527 fmuld %f26,%f30,%f30
1535 faddd %f34,%f14,%f14
1537 fmuld %f16,%f22,%f22
1538 faddd %f20,%f36,%f20
1541 fmuld %f24,%f28,%f28
1542 faddd %f38,%f30,%f30
1548 faddd %f10,%f14,%f14
1551 fmuld %f16,%f22,%f22
1552 faddd %f20,%f18,%f20
1555 faddd %f26,%f30,%f30
1560 fmuld %f34,%f14,%f14
1562 fmuld %f36,%f20,%f20
1564 fmuld %f38,%f30,%f30
1568 faddd %f14,%f12,%f14
1570 fsubd %f22,%f20,%f22
1572 faddd %f30,%f28,%f30
1578 faddd %f22,%f16,%f22
1580 faddd %f30,%f24,%f30
1584 lda [%i1]%asi,%l0 ! preload next argument
1597 fmovdnz %icc,%f4,%f6
1601 fmovdnz %icc,%f12,%f14
1605 fmovdnz %icc,%f20,%f22
1609 fmovdnz %icc,%f28,%f30
1623 fmuld %f26,%f26,%f24
1629 fmuld %f8,pp3,%f14 ! sin(x1)
1631 fmuld %f16,pp3,%f22 ! sin(x2)
1633 fmuld %f0,qq3,%f6 ! cos(x0)
1641 fmuld %f24,qq3,%f30 ! cos(x3)
1649 fmuld %f16,%f22,%f22
1663 fmuld %f16,%f20,%f20
1666 fmuld %f24,%f30,%f30
1675 fmuld %f16,%f22,%f22
1678 fmuld %f24,%f28,%f28
1683 fmuld %f10,%f14,%f14
1686 fmuld %f18,%f22,%f22
1689 fmuld %f26,%f28,%f28
1696 faddd %f34,%f14,%f14
1698 fmuld %f16,%f20,%f20
1699 faddd %f36,%f22,%f22
1701 fmuld %f24,%f30,%f30
1702 faddd %f28,%f38,%f28
1709 faddd %f10,%f14,%f14
1712 faddd %f18,%f22,%f22
1715 fmuld %f24,%f30,%f30
1716 faddd %f28,%f26,%f28
1721 fmuld %f34,%f14,%f14
1723 fmuld %f36,%f22,%f22
1725 fmuld %f38,%f28,%f28
1729 faddd %f14,%f12,%f14
1731 faddd %f22,%f20,%f22
1733 fsubd %f30,%f28,%f30
1739 faddd %f22,%f16,%f22
1741 faddd %f30,%f24,%f30
1745 lda [%i1]%asi,%l0 ! preload next argument
1758 fmovdnz %icc,%f4,%f6
1762 fmovdnz %icc,%f12,%f14
1766 fmovdnz %icc,%f20,%f22
1770 fmovdnz %icc,%f28,%f30
1784 fmuld %f8,pp3,%f14 ! sin(x1)
1786 fmuld %f16,pp3,%f22 ! sin(x2)
1788 fmuld %f24,pp3,%f30 ! sin(x3)
1790 fmuld %f0,qq3,%f6 ! cos(x0)
1807 fmuld %f16,%f22,%f22
1810 fmuld %f24,%f30,%f30
1821 fmuld %f16,%f20,%f20
1825 fmuld %f24,%f28,%f28
1834 fmuld %f16,%f22,%f22
1836 fmuld %f24,%f30,%f30
1840 fmuld %f10,%f14,%f14
1843 fmuld %f18,%f22,%f22
1846 fmuld %f26,%f30,%f30
1854 faddd %f34,%f14,%f14
1856 fmuld %f16,%f20,%f20
1857 faddd %f36,%f22,%f22
1859 fmuld %f24,%f28,%f28
1860 faddd %f38,%f30,%f30
1866 faddd %f10,%f14,%f14
1869 faddd %f18,%f22,%f22
1872 faddd %f26,%f30,%f30
1877 fmuld %f34,%f14,%f14
1879 fmuld %f36,%f22,%f22
1881 fmuld %f38,%f30,%f30
1885 faddd %f14,%f12,%f14
1887 faddd %f22,%f20,%f22
1889 faddd %f30,%f28,%f30
1895 faddd %f22,%f16,%f22
1897 faddd %f30,%f24,%f30
1901 lda [%i1]%asi,%l0 ! preload next argument
1914 fmovdnz %icc,%f4,%f6
1918 fmovdnz %icc,%f12,%f14
1922 fmovdnz %icc,%f20,%f22
1926 fmovdnz %icc,%f28,%f30
1946 fmuld %f18,%f18,%f16
1952 fmuld %f26,%f26,%f24
1958 fmuld %f0,pp3,%f6 ! sin(x0)
1963 fmuld %f8,qq3,%f14 ! cos(x1)
1965 fmuld %f16,qq3,%f22 ! cos(x2)
1967 fmuld %f24,qq3,%f30 ! cos(x3)
1988 fmuld %f16,%f22,%f22
1991 fmuld %f24,%f30,%f30
2001 fmuld %f16,%f20,%f20
2005 fmuld %f24,%f28,%f28
2011 fmuld %f10,%f12,%f12
2013 fmuld %f18,%f20,%f20
2015 fmuld %f26,%f28,%f28
2021 faddd %f12,%f34,%f12
2024 fmuld %f16,%f22,%f22
2025 faddd %f20,%f36,%f20
2028 fmuld %f24,%f30,%f30
2029 faddd %f28,%f38,%f28
2036 faddd %f12,%f10,%f12
2039 fmuld %f16,%f22,%f22
2040 faddd %f20,%f18,%f20
2043 fmuld %f24,%f30,%f30
2044 faddd %f28,%f26,%f28
2049 fmuld %f34,%f12,%f12
2051 fmuld %f36,%f20,%f20
2053 fmuld %f38,%f28,%f28
2057 fsubd %f14,%f12,%f14
2059 fsubd %f22,%f20,%f22
2061 fsubd %f30,%f28,%f30
2067 faddd %f22,%f16,%f22
2069 faddd %f30,%f24,%f30
2073 lda [%i1]%asi,%l0 ! preload next argument
2086 fmovdnz %icc,%f4,%f6
2090 fmovdnz %icc,%f12,%f14
2094 fmovdnz %icc,%f20,%f22
2098 fmovdnz %icc,%f28,%f30
2112 fmuld %f0,pp3,%f6 ! sin(x0)
2114 fmuld %f24,pp3,%f30 ! sin(x3)
2119 fmuld %f8,qq3,%f14 ! cos(x1)
2121 fmuld %f16,qq3,%f22 ! cos(x2)
2135 fmuld %f24,%f30,%f30
2145 fmuld %f16,%f22,%f22
2149 fmuld %f24,%f28,%f28
2159 fmuld %f16,%f20,%f20
2162 fmuld %f24,%f30,%f30
2167 fmuld %f10,%f12,%f12
2169 fmuld %f18,%f20,%f20
2171 fmuld %f26,%f30,%f30
2178 faddd %f12,%f34,%f12
2181 fmuld %f16,%f22,%f22
2182 faddd %f20,%f36,%f20
2185 fmuld %f24,%f28,%f28
2186 faddd %f38,%f30,%f30
2192 faddd %f12,%f10,%f12
2195 fmuld %f16,%f22,%f22
2196 faddd %f20,%f18,%f20
2199 faddd %f26,%f30,%f30
2204 fmuld %f34,%f12,%f12
2206 fmuld %f36,%f20,%f20
2208 fmuld %f38,%f30,%f30
2212 fsubd %f14,%f12,%f14
2214 fsubd %f22,%f20,%f22
2216 faddd %f30,%f28,%f30
2222 faddd %f22,%f16,%f22
2224 faddd %f30,%f24,%f30
2228 lda [%i1]%asi,%l0 ! preload next argument
2241 fmovdnz %icc,%f4,%f6
2245 fmovdnz %icc,%f12,%f14
2249 fmovdnz %icc,%f20,%f22
2253 fmovdnz %icc,%f28,%f30
2267 fmuld %f26,%f26,%f24
2273 fmuld %f0,pp3,%f6 ! sin(x0)
2275 fmuld %f16,pp3,%f22 ! sin(x2)
2280 fmuld %f8,qq3,%f14 ! cos(x1)
2285 fmuld %f24,qq3,%f30 ! cos(x3)
2293 fmuld %f16,%f22,%f22
2307 fmuld %f16,%f20,%f20
2310 fmuld %f24,%f30,%f30
2319 fmuld %f16,%f22,%f22
2322 fmuld %f24,%f28,%f28
2328 fmuld %f10,%f12,%f12
2330 fmuld %f18,%f22,%f22
2333 fmuld %f26,%f28,%f28
2339 faddd %f12,%f34,%f12
2342 fmuld %f16,%f20,%f20
2343 faddd %f36,%f22,%f22
2345 fmuld %f24,%f30,%f30
2346 faddd %f28,%f38,%f28
2353 faddd %f12,%f10,%f12
2356 faddd %f18,%f22,%f22
2359 fmuld %f24,%f30,%f30
2360 faddd %f28,%f26,%f28
2365 fmuld %f34,%f12,%f12
2367 fmuld %f36,%f22,%f22
2369 fmuld %f38,%f28,%f28
2373 fsubd %f14,%f12,%f14
2375 faddd %f22,%f20,%f22
2377 fsubd %f30,%f28,%f30
2383 faddd %f22,%f16,%f22
2385 faddd %f30,%f24,%f30
2389 lda [%i1]%asi,%l0 ! preload next argument
2402 fmovdnz %icc,%f4,%f6
2406 fmovdnz %icc,%f12,%f14
2410 fmovdnz %icc,%f20,%f22
2414 fmovdnz %icc,%f28,%f30
2428 fmuld %f0,pp3,%f6 ! sin(x0)
2430 fmuld %f16,pp3,%f22 ! sin(x2)
2432 fmuld %f24,pp3,%f30 ! sin(x3)
2437 fmuld %f8,qq3,%f14 ! cos(x1)
2451 fmuld %f16,%f22,%f22
2454 fmuld %f24,%f30,%f30
2465 fmuld %f16,%f20,%f20
2469 fmuld %f24,%f28,%f28
2478 fmuld %f16,%f22,%f22
2480 fmuld %f24,%f30,%f30
2485 fmuld %f10,%f12,%f12
2487 fmuld %f18,%f22,%f22
2490 fmuld %f26,%f30,%f30
2497 faddd %f12,%f34,%f12
2500 fmuld %f16,%f20,%f20
2501 faddd %f36,%f22,%f22
2503 fmuld %f24,%f28,%f28
2504 faddd %f38,%f30,%f30
2510 faddd %f12,%f10,%f12
2513 faddd %f18,%f22,%f22
2516 faddd %f26,%f30,%f30
2521 fmuld %f34,%f12,%f12
2523 fmuld %f36,%f22,%f22
2525 fmuld %f38,%f30,%f30
2529 fsubd %f14,%f12,%f14
2531 faddd %f22,%f20,%f22
2533 faddd %f30,%f28,%f30
2539 faddd %f22,%f16,%f22
2541 faddd %f30,%f24,%f30
2545 lda [%i1]%asi,%l0 ! preload next argument
2558 fmovdnz %icc,%f4,%f6
2562 fmovdnz %icc,%f12,%f14
2566 fmovdnz %icc,%f20,%f22
2570 fmovdnz %icc,%f28,%f30
2584 fmuld %f18,%f18,%f16
2590 fmuld %f26,%f26,%f24
2596 fmuld %f0,pp3,%f6 ! sin(x0)
2598 fmuld %f8,pp3,%f14 ! sin(x1)
2606 fmuld %f16,qq3,%f22 ! cos(x2)
2608 fmuld %f24,qq3,%f30 ! cos(x3)
2630 fmuld %f16,%f22,%f22
2633 fmuld %f24,%f30,%f30
2641 fmuld %f16,%f20,%f20
2645 fmuld %f24,%f28,%f28
2651 fmuld %f10,%f14,%f14
2654 fmuld %f18,%f20,%f20
2656 fmuld %f26,%f28,%f28
2662 faddd %f34,%f14,%f14
2664 fmuld %f16,%f22,%f22
2665 faddd %f20,%f36,%f20
2668 fmuld %f24,%f30,%f30
2669 faddd %f28,%f38,%f28
2675 faddd %f10,%f14,%f14
2678 fmuld %f16,%f22,%f22
2679 faddd %f20,%f18,%f20
2682 fmuld %f24,%f30,%f30
2683 faddd %f28,%f26,%f28
2688 fmuld %f34,%f14,%f14
2690 fmuld %f36,%f20,%f20
2692 fmuld %f38,%f28,%f28
2696 faddd %f14,%f12,%f14
2698 fsubd %f22,%f20,%f22
2700 fsubd %f30,%f28,%f30
2706 faddd %f22,%f16,%f22
2708 faddd %f30,%f24,%f30
2712 lda [%i1]%asi,%l0 ! preload next argument
2725 fmovdnz %icc,%f4,%f6
2729 fmovdnz %icc,%f12,%f14
2733 fmovdnz %icc,%f20,%f22
2737 fmovdnz %icc,%f28,%f30
2751 fmuld %f0,pp3,%f6 ! sin(x0)
2753 fmuld %f8,pp3,%f14 ! sin(x1)
2755 fmuld %f24,pp3,%f30 ! sin(x3)
2763 fmuld %f16,qq3,%f22 ! cos(x2)
2777 fmuld %f24,%f30,%f30
2788 fmuld %f16,%f22,%f22
2792 fmuld %f24,%f28,%f28
2800 fmuld %f16,%f20,%f20
2803 fmuld %f24,%f30,%f30
2808 fmuld %f10,%f14,%f14
2811 fmuld %f18,%f20,%f20
2813 fmuld %f26,%f30,%f30
2820 faddd %f34,%f14,%f14
2822 fmuld %f16,%f22,%f22
2823 faddd %f20,%f36,%f20
2826 fmuld %f24,%f28,%f28
2827 faddd %f38,%f30,%f30
2832 faddd %f10,%f14,%f14
2835 fmuld %f16,%f22,%f22
2836 faddd %f20,%f18,%f20
2839 faddd %f26,%f30,%f30
2844 fmuld %f34,%f14,%f14
2846 fmuld %f36,%f20,%f20
2848 fmuld %f38,%f30,%f30
2852 faddd %f14,%f12,%f14
2854 fsubd %f22,%f20,%f22
2856 faddd %f30,%f28,%f30
2862 faddd %f22,%f16,%f22
2864 faddd %f30,%f24,%f30
2868 lda [%i1]%asi,%l0 ! preload next argument
2881 fmovdnz %icc,%f4,%f6
2885 fmovdnz %icc,%f12,%f14
2889 fmovdnz %icc,%f20,%f22
2893 fmovdnz %icc,%f28,%f30
2907 fmuld %f26,%f26,%f24
2913 fmuld %f0,pp3,%f6 ! sin(x0)
2915 fmuld %f8,pp3,%f14 ! sin(x1)
2917 fmuld %f16,pp3,%f22 ! sin(x2)
2928 fmuld %f24,qq3,%f30 ! cos(x3)
2936 fmuld %f16,%f22,%f22
2951 fmuld %f16,%f20,%f20
2954 fmuld %f24,%f30,%f30
2961 fmuld %f16,%f22,%f22
2964 fmuld %f24,%f28,%f28
2970 fmuld %f10,%f14,%f14
2973 fmuld %f18,%f22,%f22
2976 fmuld %f26,%f28,%f28
2982 faddd %f34,%f14,%f14
2984 fmuld %f16,%f20,%f20
2985 faddd %f36,%f22,%f22
2987 fmuld %f24,%f30,%f30
2988 faddd %f28,%f38,%f28
2994 faddd %f10,%f14,%f14
2997 faddd %f18,%f22,%f22
3000 fmuld %f24,%f30,%f30
3001 faddd %f28,%f26,%f28
3006 fmuld %f34,%f14,%f14
3008 fmuld %f36,%f22,%f22
3010 fmuld %f38,%f28,%f28
3014 faddd %f14,%f12,%f14
3016 faddd %f22,%f20,%f22
3018 fsubd %f30,%f28,%f30
3024 faddd %f22,%f16,%f22
3026 faddd %f30,%f24,%f30
3030 lda [%i1]%asi,%l0 ! preload next argument
3043 fmovdnz %icc,%f4,%f6
3047 fmovdnz %icc,%f12,%f14
3051 fmovdnz %icc,%f20,%f22
3055 fmovdnz %icc,%f28,%f30
3069 fmuld %f0,pp3,%f6 ! sin(x0)
3071 fmuld %f8,pp3,%f14 ! sin(x1)
3073 fmuld %f16,pp3,%f22 ! sin(x2)
3075 fmuld %f24,pp3,%f30 ! sin(x3)
3095 fmuld %f16,%f22,%f22
3098 fmuld %f24,%f30,%f30
3110 fmuld %f16,%f20,%f20
3114 fmuld %f24,%f28,%f28
3121 fmuld %f16,%f22,%f22
3123 fmuld %f24,%f30,%f30
3128 fmuld %f10,%f14,%f14
3131 fmuld %f18,%f22,%f22
3134 fmuld %f26,%f30,%f30
3141 faddd %f34,%f14,%f14
3143 fmuld %f16,%f20,%f20
3144 faddd %f36,%f22,%f22
3146 fmuld %f24,%f28,%f28
3147 faddd %f38,%f30,%f30
3152 faddd %f10,%f14,%f14
3155 faddd %f18,%f22,%f22
3158 faddd %f26,%f30,%f30
3163 fmuld %f34,%f14,%f14
3165 fmuld %f36,%f22,%f22
3167 fmuld %f38,%f30,%f30
3171 faddd %f14,%f12,%f14
3173 faddd %f22,%f20,%f22
3175 faddd %f30,%f28,%f30
3181 faddd %f22,%f16,%f22
3183 faddd %f30,%f24,%f30
3187 lda [%i1]%asi,%l0 ! preload next argument
3200 fmovdnz %icc,%f4,%f6
3204 fmovdnz %icc,%f12,%f14
3208 fmovdnz %icc,%f20,%f22
3212 fmovdnz %icc,%f28,%f30
3231 tst %i5 ! check for huge arguments remaining
3245 sra %o2,0,%o2 ! sign-extend for V9
3247 call __vlibm_vcos_big_ultra3
3248 sra %o5,0,%o5 ! delay slot
3257 faddd %f2,c3two44,%f4
3265 faddd %f10,c3two44,%f12
3273 faddd %f18,c3two44,%f20
3289 bl,pt %icc,1f ! hx < 0x3e400000
3290 ! delay slot, harmless if branch taken
3291 sethi %hi(0x7ff00000),%o7
3293 bl,a,pt %icc,2f ! branch if finite
3294 ! delay slot, squashed if branch not taken
3295 st %o4,[%fp+biguns] ! set biguns
3303 fdtoi %f2,%f4 ! raise inexact if not zero
3304 sethi %hi(0x3ff00000),%o7
3310 ! delay slot, harmless if branch taken
3311 add %i3,%i4,%i3 ! y += stridey
3312 andn %l1,%i5,%l0 ! hx &= ~0x80000000
3317 add %i1,%i2,%i1 ! x += stridex
3323 bl,pt %icc,1f ! hx < 0x3e400000
3324 ! delay slot, harmless if branch taken
3325 sethi %hi(0x7ff00000),%o7
3327 bl,a,pt %icc,2f ! branch if finite
3328 ! delay slot, squashed if branch not taken
3329 st %o4,[%fp+biguns] ! set biguns
3337 fdtoi %f10,%f12 ! raise inexact if not zero
3338 sethi %hi(0x3ff00000),%o7
3343 ble,pn %icc,.last1_from_range1
3344 ! delay slot, harmless if branch taken
3345 add %i3,%i4,%i3 ! y += stridey
3346 andn %l2,%i5,%l1 ! hx &= ~0x80000000
3351 add %i1,%i2,%i1 ! x += stridex
3357 bl,pt %icc,1f ! hx < 0x3e400000
3358 ! delay slot, harmless if branch taken
3359 sethi %hi(0x7ff00000),%o7
3361 bl,a,pt %icc,2f ! branch if finite
3362 ! delay slot, squashed if branch not taken
3363 st %o4,[%fp+biguns] ! set biguns
3365 fmuld %f18,%f16,%f18
3371 fdtoi %f18,%f20 ! raise inexact if not zero
3372 sethi %hi(0x3ff00000),%o7
3377 ble,pn %icc,.last2_from_range2
3378 ! delay slot, harmless if branch taken
3379 add %i3,%i4,%i3 ! y += stridey
3380 andn %l3,%i5,%l2 ! hx &= ~0x80000000
3385 add %i1,%i2,%i1 ! x += stridex
3391 bl,pt %icc,1f ! hx < 0x3e400000
3392 ! delay slot, harmless if branch taken
3393 sethi %hi(0x7ff00000),%o7
3395 bl,a,pt %icc,2f ! branch if finite
3396 ! delay slot, squashed if branch not taken
3397 st %o4,[%fp+biguns] ! set biguns
3399 fmuld %f26,%f24,%f26
3405 fdtoi %f26,%f28 ! raise inexact if not zero
3406 sethi %hi(0x3ff00000),%o7
3411 ble,pn %icc,.last3_from_range3
3412 ! delay slot, harmless if branch taken
3413 add %i3,%i4,%i3 ! y += stridey
3417 andn %l3,%i5,%l3 ! hx &= ~0x80000000
3420 add %i1,%i2,%i1 ! x += stridex
3422 SET_SIZE(__vcos_ultra3)