1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 3
2 ; RUN: llc -verify-machineinstrs -mcpu=pwr7 -ppc-asm-full-reg-names \
3 ; RUN: -mtriple powerpc64-ibm-aix-xcoff -mattr=+aix-small-local-exec-tls < %s \
4 ; RUN: | FileCheck %s --check-prefix=SMALL-LOCAL-EXEC-SMALLCM64
5 ; RUN: llc -verify-machineinstrs -mcpu=pwr7 -ppc-asm-full-reg-names \
6 ; RUN: -mtriple powerpc64-ibm-aix-xcoff --code-model=large \
7 ; RUN: -mattr=+aix-small-local-exec-tls < %s | FileCheck %s \
8 ; RUN: --check-prefix=SMALL-LOCAL-EXEC-LARGECM64
10 @ThreadLocalVarInit = thread_local(localexec) global double 1.000000e+00, align 8
11 @VarInit = local_unnamed_addr global double 8.700000e+01, align 8
12 @IThreadLocalVarInit = internal thread_local(localexec) global double 1.000000e+00, align 8
13 declare nonnull ptr @llvm.threadlocal.address.p0(ptr nonnull) #1
14 @f = thread_local(localexec) global [87 x double] zeroinitializer, align 8
16 define nonnull ptr @AddrTest1() local_unnamed_addr #0 {
17 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: AddrTest1:
18 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
19 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: la r3, f[TL]@le+48(r13)
20 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
22 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: AddrTest1:
23 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
24 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: la r3, f[TL]@le+48(r13)
25 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
27 %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @f)
28 %arrayidx = getelementptr inbounds [87 x double], ptr %0, i64 0, i64 6
32 define void @storeITLInit(double noundef %x) {
33 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: storeITLInit:
34 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
35 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: stfd f1, IThreadLocalVarInit[TL]@le(r13)
36 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
38 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: storeITLInit:
39 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
40 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: stfd f1, IThreadLocalVarInit[TL]@le(r13)
41 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
43 %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @IThreadLocalVarInit)
44 store double %x, ptr %0, align 8
48 define void @storeTLInit(double noundef %x) {
49 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: storeTLInit:
50 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
51 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: stfd f1, ThreadLocalVarInit[TL]@le(r13)
52 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
54 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: storeTLInit:
55 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
56 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: stfd f1, ThreadLocalVarInit[TL]@le(r13)
57 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
59 %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @ThreadLocalVarInit)
60 store double %x, ptr %0, align 8
64 define double @loadITLInit() {
65 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadITLInit:
66 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
67 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lfd f1, IThreadLocalVarInit[TL]@le(r13)
68 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
70 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadITLInit:
71 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
72 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lfd f1, IThreadLocalVarInit[TL]@le(r13)
73 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
75 %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @IThreadLocalVarInit)
76 %1 = load double, ptr %0, align 8
80 define double @loadITLInit2() {
81 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadITLInit2:
82 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
83 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: ld r3, L..C0(r2) # @VarInit
84 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lfd f0, IThreadLocalVarInit[TL]@le(r13)
85 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lfd f1, 0(r3)
86 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: xsadddp f1, f0, f1
87 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
89 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadITLInit2:
90 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
91 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: addis r3, L..C0@u(r2)
92 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lfd f0, IThreadLocalVarInit[TL]@le(r13)
93 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: ld r3, L..C0@l(r3)
94 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lfd f1, 0(r3)
95 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: xsadddp f1, f0, f1
96 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
98 %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @IThreadLocalVarInit)
99 %1 = load double, ptr %0, align 8
100 %2 = load double, ptr @VarInit, align 8
101 %add = fadd double %1, %2
105 define double @loadTLInit() {
106 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadTLInit:
107 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
108 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lfd f1, ThreadLocalVarInit[TL]@le(r13)
109 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
111 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadTLInit:
112 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
113 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lfd f1, ThreadLocalVarInit[TL]@le(r13)
114 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
116 %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @ThreadLocalVarInit)
117 %1 = load double, ptr %0, align 8
121 define double @loadTLInit2() {
122 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadTLInit2:
123 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
124 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: ld r3, L..C0(r2) # @VarInit
125 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lfd f0, ThreadLocalVarInit[TL]@le(r13)
126 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lfd f1, 0(r3)
127 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: xsadddp f1, f0, f1
128 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
130 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadTLInit2:
131 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
132 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: addis r3, L..C0@u(r2)
133 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lfd f0, ThreadLocalVarInit[TL]@le(r13)
134 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: ld r3, L..C0@l(r3)
135 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lfd f1, 0(r3)
136 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: xsadddp f1, f0, f1
137 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
139 %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @ThreadLocalVarInit)
140 %1 = load double, ptr %0, align 8
141 %2 = load double, ptr @VarInit, align 8
142 %add = fadd double %1, %2
146 define void @loadStore1(double noundef %x) {
147 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadStore1:
148 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
149 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: vspltisw v2, 1
150 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: lfd f1, IThreadLocalVarInit[TL]@le(r13)
151 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: xvcvsxwdp vs0, vs34
152 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: vspltisw v3, 8
153 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: xsadddp f0, f1, f0
154 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: xvcvsxwdp vs1, vs35
155 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: xsadddp f0, f0, f1
156 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: stfd f0, IThreadLocalVarInit[TL]@le(r13)
157 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
159 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadStore1:
160 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
161 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: vspltisw v2, 1
162 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: lfd f1, IThreadLocalVarInit[TL]@le(r13)
163 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: xvcvsxwdp vs0, vs34
164 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: vspltisw v3, 8
165 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: xsadddp f0, f1, f0
166 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: xvcvsxwdp vs1, vs35
167 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: xsadddp f0, f0, f1
168 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: stfd f0, IThreadLocalVarInit[TL]@le(r13)
169 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
171 %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @IThreadLocalVarInit)
172 %1 = load double, ptr %0, align 8
173 %inc = fadd double %1, 1.000000e+00
174 %add = fadd double %inc, 8.000000e+00
175 store double %add, ptr %0, align 8