1 ; RUN: llc < %s -mtriple=x86_64-apple-darwin -mcpu=corei7 | FileCheck --check-prefixes=ALL %s
2 ; RUN: llc < %s -mtriple=x86_64-apple-darwin -mcpu=corei7-avx | FileCheck --check-prefixes=ALL,AVX %s
4 ; Don't need to preserve registers before using them.
5 define preserve_nonecc double @preserve_nonecc1() nounwind {
7 ;ALL-LABEL: preserve_nonecc1
22 ;ALL-NOT: movaps %xmm1
23 ;ALL-NOT: movaps %xmm0
24 ;AVX-NOT: vmovups %ymm1
25 ;AVX-NOT: vmovups %ymm0
26 ;ALL-NOT: movq {{.*}}, %rax
27 ;ALL-NOT: movq {{.*}}, %rbx
28 ;ALL-NOT: movq {{.*}}, %rcx
29 ;ALL-NOT: movq {{.*}}, %rdx
30 ;ALL-NOT: movq {{.*}}, %rsi
31 ;ALL-NOT: movq {{.*}}, %rdi
32 ;ALL-NOT: movq {{.*}}, %r8
33 ;ALL-NOT: movq {{.*}}, %r9
34 ;ALL-NOT: movq {{.*}}, %r10
35 ;ALL-NOT: movq {{.*}}, %r11
36 ;ALL-NOT: movq {{.*}}, %r12
37 ;ALL-NOT: movq {{.*}}, %r13
38 ;ALL-NOT: movq {{.*}}, %r14
39 ;ALL-NOT: movq {{.*}}, %r15
40 ;ALL-NOT: movaps {{.*}} %xmm0
41 ;ALL-NOT: movaps {{.*}} %xmm1
42 ;AVX-NOT: vmovups {{.*}} %ymm0
43 ;AVX-NOT: vmovups {{.*}} %ymm1
44 call void asm sideeffect "", "~{rax},~{rbx},~{rcx},~{rdx},~{rsi},~{rdi},~{r8},~{r9},~{r10},~{r11},~{r12},~{r13},~{r14},~{r15},~{rbp},~{xmm0},~{xmm1},~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15}"()
48 ; Save/restore live registers across preserve_none function call.
49 declare preserve_nonecc double @bar_double(i64, i64)
50 define void @preserve_nonecc2() nounwind {
52 ;ALL-LABEL: preserve_nonecc2
76 ;ALL: movq {{.*}}, %rax
77 ;ALL: movq {{.*}}, %rcx
78 ;ALL: movq {{.*}}, %rdx
79 ;ALL: movq {{.*}}, %r8
80 ;ALL: movq {{.*}}, %r9
81 ;ALL: movq {{.*}}, %r10
82 ;ALL: movq {{.*}}, %r11
83 ;ALL: movaps {{.*}} %xmm0
84 ;ALL: movaps {{.*}} %xmm1
85 ;ALL: movaps {{.*}} %xmm2
86 ;ALL: movaps {{.*}} %xmm3
87 ;ALL: movaps {{.*}} %xmm4
88 ;ALL: movaps {{.*}} %xmm5
89 ;ALL: movaps {{.*}} %xmm6
90 ;ALL: movaps {{.*}} %xmm7
91 ;ALL: movaps {{.*}} %xmm8
92 ;ALL: movaps {{.*}} %xmm9
93 ;ALL: movaps {{.*}} %xmm10
94 ;ALL: movaps {{.*}} %xmm11
95 ;ALL: movaps {{.*}} %xmm12
96 ;ALL: movaps {{.*}} %xmm13
97 ;ALL: movaps {{.*}} %xmm14
98 ;ALL: movaps {{.*}} %xmm15
99 %a0 = call i64 asm sideeffect "", "={rax}"() nounwind
100 %a1 = call i64 asm sideeffect "", "={rcx}"() nounwind
101 %a2 = call i64 asm sideeffect "", "={rdx}"() nounwind
102 %a3 = call i64 asm sideeffect "", "={r8}"() nounwind
103 %a4 = call i64 asm sideeffect "", "={r9}"() nounwind
104 %a5 = call i64 asm sideeffect "", "={r10}"() nounwind
105 %a6 = call i64 asm sideeffect "", "={r11}"() nounwind
106 %a10 = call <2 x double> asm sideeffect "", "={xmm0}"() nounwind
107 %a11 = call <2 x double> asm sideeffect "", "={xmm1}"() nounwind
108 %a12 = call <2 x double> asm sideeffect "", "={xmm2}"() nounwind
109 %a13 = call <2 x double> asm sideeffect "", "={xmm3}"() nounwind
110 %a14 = call <2 x double> asm sideeffect "", "={xmm4}"() nounwind
111 %a15 = call <2 x double> asm sideeffect "", "={xmm5}"() nounwind
112 %a16 = call <2 x double> asm sideeffect "", "={xmm6}"() nounwind
113 %a17 = call <2 x double> asm sideeffect "", "={xmm7}"() nounwind
114 %a18 = call <2 x double> asm sideeffect "", "={xmm8}"() nounwind
115 %a19 = call <2 x double> asm sideeffect "", "={xmm9}"() nounwind
116 %a20 = call <2 x double> asm sideeffect "", "={xmm10}"() nounwind
117 %a21 = call <2 x double> asm sideeffect "", "={xmm11}"() nounwind
118 %a22 = call <2 x double> asm sideeffect "", "={xmm12}"() nounwind
119 %a23 = call <2 x double> asm sideeffect "", "={xmm13}"() nounwind
120 %a24 = call <2 x double> asm sideeffect "", "={xmm14}"() nounwind
121 %a25 = call <2 x double> asm sideeffect "", "={xmm15}"() nounwind
122 call preserve_nonecc double @bar_double(i64 1, i64 2)
123 call void asm sideeffect "", "{rax},{rcx},{rdx},{r8},{r9},{r10},{r11},{xmm0},{xmm1},{xmm2},{xmm3},{xmm4},{xmm5},{xmm6},{xmm7},{xmm8},{xmm9},{xmm10},{xmm11},{xmm12},{xmm13},{xmm14},{xmm15}"(i64 %a0, i64 %a1, i64 %a2, i64 %a3, i64 %a4, i64 %a5, i64 %a6, <2 x double> %a10, <2 x double> %a11, <2 x double> %a12, <2 x double> %a13, <2 x double> %a14, <2 x double> %a15, <2 x double> %a16, <2 x double> %a17, <2 x double> %a18, <2 x double> %a19, <2 x double> %a20, <2 x double> %a21, <2 x double> %a22, <2 x double> %a23, <2 x double> %a24, <2 x double> %a25)