1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 3
2 ; RUN: llc -verify-machineinstrs -mcpu=pwr7 -ppc-asm-full-reg-names \
3 ; RUN: -mtriple powerpc64-ibm-aix-xcoff -mattr=+aix-small-local-exec-tls < %s \
4 ; RUN: | FileCheck %s --check-prefix=SMALL-LOCAL-EXEC-SMALLCM64
5 ; RUN: llc -verify-machineinstrs -mcpu=pwr7 -ppc-asm-full-reg-names \
6 ; RUN: -mtriple powerpc64-ibm-aix-xcoff --code-model=large \
7 ; RUN: -mattr=+aix-small-local-exec-tls < %s | FileCheck %s \
8 ; RUN: --check-prefix=SMALL-LOCAL-EXEC-LARGECM64
10 @ThreadLocalVarInit = thread_local(localexec) global i64 1, align 8
11 @VarInit = local_unnamed_addr global i64 87, align 8
12 @IThreadLocalVarInit = internal thread_local(localexec) global i64 1, align 8
13 declare nonnull ptr @llvm.threadlocal.address.p0(ptr nonnull) #1
14 %struct.anon = type { i64 }
15 @ThreadLocalStruct = thread_local(localexec) global %struct.anon zeroinitializer, align 1
16 @d = thread_local(localexec) global [87 x i64] zeroinitializer, align 8
18 define nonnull ptr @AddrTest1() local_unnamed_addr #0 {
19 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: AddrTest1:
20 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
21 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: la r3, d[TL]@le(r13)
22 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
24 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: AddrTest1:
25 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
26 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: la r3, d[TL]@le(r13)
27 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
29 %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @d)
33 define i64 @testUnaligned() {
34 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: testUnaligned:
35 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
36 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: la r3, ThreadLocalStruct[TL]@le(r13)
37 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: ld r3, 0(r3)
38 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
40 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: testUnaligned:
41 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
42 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: la r3, ThreadLocalStruct[TL]@le(r13)
43 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: ld r3, 0(r3)
44 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
46 %0 = call align 1 ptr @llvm.threadlocal.address.p0(ptr align 1 @ThreadLocalStruct)
47 %x = getelementptr inbounds %struct.anon, ptr %0, i32 0, i32 0
48 %1 = load i64, ptr %x, align 1
52 define void @storeITLInit(i64 noundef %x) {
53 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: storeITLInit:
54 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
55 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: std r3, IThreadLocalVarInit[TL]@le(r13)
56 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
58 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: storeITLInit:
59 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
60 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: std r3, IThreadLocalVarInit[TL]@le(r13)
61 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
63 %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @IThreadLocalVarInit)
64 store i64 %x, ptr %0, align 8
68 define void @storeTLInit(i64 noundef %x) {
69 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: storeTLInit:
70 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
71 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: std r3, ThreadLocalVarInit[TL]@le(r13)
72 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
74 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: storeTLInit:
75 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
76 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: std r3, ThreadLocalVarInit[TL]@le(r13)
77 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
79 %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @ThreadLocalVarInit)
80 store i64 %x, ptr %0, align 8
84 define i64 @loadITLInit() {
85 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadITLInit:
86 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
87 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: ld r3, IThreadLocalVarInit[TL]@le(r13)
88 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
90 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadITLInit:
91 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
92 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: ld r3, IThreadLocalVarInit[TL]@le(r13)
93 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
95 %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @IThreadLocalVarInit)
96 %1 = load i64, ptr %0, align 8
100 define i64 @loadITLInit2() {
101 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadITLInit2:
102 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
103 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: ld r4, L..C0(r2) # @VarInit
104 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: ld r3, IThreadLocalVarInit[TL]@le(r13)
105 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: ld r4, 0(r4)
106 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: add r3, r4, r3
107 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
109 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadITLInit2:
110 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
111 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: addis r4, L..C0@u(r2)
112 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: ld r3, IThreadLocalVarInit[TL]@le(r13)
113 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: ld r4, L..C0@l(r4)
114 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: ld r4, 0(r4)
115 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: add r3, r4, r3
116 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
118 %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @IThreadLocalVarInit)
119 %1 = load i64, ptr %0, align 8
120 %2 = load i64, ptr @VarInit, align 8
121 %add = add nsw i64 %2, %1
125 define i64 @loadTLInit() {
126 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadTLInit:
127 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
128 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: ld r3, ThreadLocalVarInit[TL]@le(r13)
129 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
131 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadTLInit:
132 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
133 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: ld r3, ThreadLocalVarInit[TL]@le(r13)
134 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
136 %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @ThreadLocalVarInit)
137 %1 = load i64, ptr %0, align 8
141 define i64 @loadTLInit2() {
142 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadTLInit2:
143 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
144 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: ld r4, L..C0(r2) # @VarInit
145 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: ld r3, ThreadLocalVarInit[TL]@le(r13)
146 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: ld r4, 0(r4)
147 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: add r3, r4, r3
148 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
150 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadTLInit2:
151 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
152 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: addis r4, L..C0@u(r2)
153 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: ld r3, ThreadLocalVarInit[TL]@le(r13)
154 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: ld r4, L..C0@l(r4)
155 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: ld r4, 0(r4)
156 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: add r3, r4, r3
157 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
159 %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @ThreadLocalVarInit)
160 %1 = load i64, ptr %0, align 8
161 %2 = load i64, ptr @VarInit, align 8
162 %add = add nsw i64 %2, %1
166 define void @loadStore1(i64 noundef %x) {
167 ; SMALL-LOCAL-EXEC-SMALLCM64-LABEL: loadStore1:
168 ; SMALL-LOCAL-EXEC-SMALLCM64: # %bb.0: # %entry
169 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: ld r3, IThreadLocalVarInit[TL]@le(r13)
170 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: addi r3, r3, 9
171 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: std r3, IThreadLocalVarInit[TL]@le(r13)
172 ; SMALL-LOCAL-EXEC-SMALLCM64-NEXT: blr
174 ; SMALL-LOCAL-EXEC-LARGECM64-LABEL: loadStore1:
175 ; SMALL-LOCAL-EXEC-LARGECM64: # %bb.0: # %entry
176 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: ld r3, IThreadLocalVarInit[TL]@le(r13)
177 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: addi r3, r3, 9
178 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: std r3, IThreadLocalVarInit[TL]@le(r13)
179 ; SMALL-LOCAL-EXEC-LARGECM64-NEXT: blr
181 %0 = tail call align 8 ptr @llvm.threadlocal.address.p0(ptr align 8 @IThreadLocalVarInit)
182 %1 = load i64, ptr %0, align 8
183 %add = add nsw i64 %1, 9
184 store i64 %add, ptr %0, align 8