1 ; RUN: llc -mtriple=amdgcn -verify-machineinstrs < %s | FileCheck -check-prefix=GCN %s
3 ; GCN-LABEL: {{^}}setcc_sgt_true_sext:
4 ; GCN: v_cmp_le_u32_e{{32|64}} [[CC:[^,]+]], v{{[0-9]+}}, v{{[0-9]+}}
5 ; GCN-NEXT: s_and_saveexec_b64 {{[^,]+}}, [[CC]]
8 define amdgpu_kernel void @setcc_sgt_true_sext(ptr addrspace(1) nocapture %arg) {
10 %x = tail call i32 @llvm.amdgcn.workitem.id.x()
11 %y = tail call i32 @llvm.amdgcn.workitem.id.y()
12 %cmp = icmp ugt i32 %x, %y
13 %ext = sext i1 %cmp to i32
14 %cond = icmp sgt i32 %ext, -1
15 br i1 %cond, label %then, label %endif
18 store i32 1, ptr addrspace(1) %arg, align 4
25 ; GCN-LABEL: {{^}}setcc_sgt_true_sext_swap:
26 ; GCN: v_cmp_le_u32_e{{32|64}} [[CC:[^,]+]], v{{[0-9]+}}, v{{[0-9]+}}
27 ; GCN-NEXT: s_and_saveexec_b64 {{[^,]+}}, [[CC]]
30 define amdgpu_kernel void @setcc_sgt_true_sext_swap(ptr addrspace(1) nocapture %arg) {
32 %x = tail call i32 @llvm.amdgcn.workitem.id.x()
33 %y = tail call i32 @llvm.amdgcn.workitem.id.y()
34 %cmp = icmp ugt i32 %x, %y
35 %ext = sext i1 %cmp to i32
36 %cond = icmp slt i32 -1, %ext
37 br i1 %cond, label %then, label %endif
40 store i32 1, ptr addrspace(1) %arg, align 4
47 ; GCN-LABEL: {{^}}setcc_ne_true_sext:
48 ; GCN: v_cmp_le_u32_e{{32|64}} [[CC:[^,]+]], v{{[0-9]+}}, v{{[0-9]+}}
49 ; GCN-NEXT: s_and_saveexec_b64 {{[^,]+}}, [[CC]]
52 define amdgpu_kernel void @setcc_ne_true_sext(ptr addrspace(1) nocapture %arg) {
54 %x = tail call i32 @llvm.amdgcn.workitem.id.x()
55 %y = tail call i32 @llvm.amdgcn.workitem.id.y()
56 %cmp = icmp ugt i32 %x, %y
57 %ext = sext i1 %cmp to i32
58 %cond = icmp ne i32 %ext, -1
59 br i1 %cond, label %then, label %endif
62 store i32 1, ptr addrspace(1) %arg, align 4
69 ; GCN-LABEL: {{^}}setcc_ult_true_sext:
70 ; GCN: v_cmp_le_u32_e{{32|64}} [[CC:[^,]+]], v{{[0-9]+}}, v{{[0-9]+}}
71 ; GCN-NEXT: s_and_saveexec_b64 {{[^,]+}}, [[CC]]
74 define amdgpu_kernel void @setcc_ult_true_sext(ptr addrspace(1) nocapture %arg) {
76 %x = tail call i32 @llvm.amdgcn.workitem.id.x()
77 %y = tail call i32 @llvm.amdgcn.workitem.id.y()
78 %cmp = icmp ugt i32 %x, %y
79 %ext = sext i1 %cmp to i32
80 %cond = icmp ult i32 %ext, -1
81 br i1 %cond, label %then, label %endif
84 store i32 1, ptr addrspace(1) %arg, align 4
91 ; GCN-LABEL: {{^}}setcc_eq_true_sext:
92 ; GCN: v_cmp_gt_u32_e{{32|64}} [[CC:[^,]+]], v{{[0-9]+}}, v{{[0-9]+}}
93 ; GCN-NEXT: s_and_saveexec_b64 {{[^,]+}}, [[CC]]
96 define amdgpu_kernel void @setcc_eq_true_sext(ptr addrspace(1) nocapture %arg) {
98 %x = tail call i32 @llvm.amdgcn.workitem.id.x()
99 %y = tail call i32 @llvm.amdgcn.workitem.id.y()
100 %cmp = icmp ugt i32 %x, %y
101 %ext = sext i1 %cmp to i32
102 %cond = icmp eq i32 %ext, -1
103 br i1 %cond, label %then, label %endif
106 store i32 1, ptr addrspace(1) %arg, align 4
113 ; GCN-LABEL: {{^}}setcc_sle_true_sext:
114 ; GCN: v_cmp_gt_u32_e{{32|64}} [[CC:[^,]+]], v{{[0-9]+}}, v{{[0-9]+}}
115 ; GCN-NEXT: s_and_saveexec_b64 {{[^,]+}}, [[CC]]
116 ; GCN-NOT: v_cndmask_
118 define amdgpu_kernel void @setcc_sle_true_sext(ptr addrspace(1) nocapture %arg) {
120 %x = tail call i32 @llvm.amdgcn.workitem.id.x()
121 %y = tail call i32 @llvm.amdgcn.workitem.id.y()
122 %cmp = icmp ugt i32 %x, %y
123 %ext = sext i1 %cmp to i32
124 %cond = icmp sle i32 %ext, -1
125 br i1 %cond, label %then, label %endif
128 store i32 1, ptr addrspace(1) %arg, align 4
135 ; GCN-LABEL: {{^}}setcc_uge_true_sext:
136 ; GCN: v_cmp_gt_u32_e{{32|64}} [[CC:[^,]+]], v{{[0-9]+}}, v{{[0-9]+}}
137 ; GCN-NEXT: s_and_saveexec_b64 {{[^,]+}}, [[CC]]
138 ; GCN-NOT: v_cndmask_
140 define amdgpu_kernel void @setcc_uge_true_sext(ptr addrspace(1) nocapture %arg) {
142 %x = tail call i32 @llvm.amdgcn.workitem.id.x()
143 %y = tail call i32 @llvm.amdgcn.workitem.id.y()
144 %cmp = icmp ugt i32 %x, %y
145 %ext = sext i1 %cmp to i32
146 %cond = icmp uge i32 %ext, -1
147 br i1 %cond, label %then, label %endif
150 store i32 1, ptr addrspace(1) %arg, align 4
157 ; GCN-LABEL: {{^}}setcc_eq_false_sext:
158 ; GCN: v_cmp_le_u32_e{{32|64}} [[CC:[^,]+]], v{{[0-9]+}}, v{{[0-9]+}}
159 ; GCN-NEXT: s_and_saveexec_b64 {{[^,]+}}, [[CC]]
160 ; GCN-NOT: v_cndmask_
162 define amdgpu_kernel void @setcc_eq_false_sext(ptr addrspace(1) nocapture %arg) {
164 %x = tail call i32 @llvm.amdgcn.workitem.id.x()
165 %y = tail call i32 @llvm.amdgcn.workitem.id.y()
166 %cmp = icmp ugt i32 %x, %y
167 %ext = sext i1 %cmp to i32
168 %cond = icmp eq i32 %ext, 0
169 br i1 %cond, label %then, label %endif
172 store i32 1, ptr addrspace(1) %arg, align 4
179 ; GCN-LABEL: {{^}}setcc_sge_false_sext:
180 ; GCN: v_cmp_le_u32_e{{32|64}} [[CC:[^,]+]], v{{[0-9]+}}, v{{[0-9]+}}
181 ; GCN-NEXT: s_and_saveexec_b64 {{[^,]+}}, [[CC]]
182 ; GCN-NOT: v_cndmask_
184 define amdgpu_kernel void @setcc_sge_false_sext(ptr addrspace(1) nocapture %arg) {
186 %x = tail call i32 @llvm.amdgcn.workitem.id.x()
187 %y = tail call i32 @llvm.amdgcn.workitem.id.y()
188 %cmp = icmp ugt i32 %x, %y
189 %ext = sext i1 %cmp to i32
190 %cond = icmp sge i32 %ext, 0
191 br i1 %cond, label %then, label %endif
194 store i32 1, ptr addrspace(1) %arg, align 4
201 ; GCN-LABEL: {{^}}setcc_ule_false_sext:
202 ; GCN: v_cmp_le_u32_e{{32|64}} [[CC:[^,]+]], v{{[0-9]+}}, v{{[0-9]+}}
203 ; GCN-NEXT: s_and_saveexec_b64 {{[^,]+}}, [[CC]]
204 ; GCN-NOT: v_cndmask_
206 define amdgpu_kernel void @setcc_ule_false_sext(ptr addrspace(1) nocapture %arg) {
208 %x = tail call i32 @llvm.amdgcn.workitem.id.x()
209 %y = tail call i32 @llvm.amdgcn.workitem.id.y()
210 %cmp = icmp ugt i32 %x, %y
211 %ext = sext i1 %cmp to i32
212 %cond = icmp ule i32 %ext, 0
213 br i1 %cond, label %then, label %endif
216 store i32 1, ptr addrspace(1) %arg, align 4
223 ; GCN-LABEL: {{^}}setcc_ne_false_sext:
224 ; GCN: v_cmp_gt_u32_e{{32|64}} [[CC:[^,]+]], v{{[0-9]+}}, v{{[0-9]+}}
225 ; GCN-NEXT: s_and_saveexec_b64 {{[^,]+}}, [[CC]]
226 ; GCN-NOT: v_cndmask_
228 define amdgpu_kernel void @setcc_ne_false_sext(ptr addrspace(1) nocapture %arg) {
230 %x = tail call i32 @llvm.amdgcn.workitem.id.x()
231 %y = tail call i32 @llvm.amdgcn.workitem.id.y()
232 %cmp = icmp ugt i32 %x, %y
233 %ext = sext i1 %cmp to i32
234 %cond = icmp ne i32 %ext, 0
235 br i1 %cond, label %then, label %endif
238 store i32 1, ptr addrspace(1) %arg, align 4
244 ; GCN-LABEL: {{^}}setcc_ugt_false_sext:
245 ; GCN: v_cmp_gt_u32_e{{32|64}} [[CC:[^,]+]], v{{[0-9]+}}, v{{[0-9]+}}
246 ; GCN-NEXT: s_and_saveexec_b64 {{[^,]+}}, [[CC]]
247 ; GCN-NOT: v_cndmask_
249 define amdgpu_kernel void @setcc_ugt_false_sext(ptr addrspace(1) nocapture %arg) {
251 %x = tail call i32 @llvm.amdgcn.workitem.id.x()
252 %y = tail call i32 @llvm.amdgcn.workitem.id.y()
253 %cmp = icmp ugt i32 %x, %y
254 %ext = sext i1 %cmp to i32
255 %cond = icmp ugt i32 %ext, 0
256 br i1 %cond, label %then, label %endif
259 store i32 1, ptr addrspace(1) %arg, align 4
265 ; GCN-LABEL: {{^}}setcc_slt_false_sext:
266 ; GCN: v_cmp_gt_u32_e{{32|64}} [[CC:[^,]+]], v{{[0-9]+}}, v{{[0-9]+}}
267 ; GCN-NEXT: s_and_saveexec_b64 {{[^,]+}}, [[CC]]
268 ; GCN-NOT: v_cndmask_
270 define amdgpu_kernel void @setcc_slt_false_sext(ptr addrspace(1) nocapture %arg) {
272 %x = tail call i32 @llvm.amdgcn.workitem.id.x()
273 %y = tail call i32 @llvm.amdgcn.workitem.id.y()
274 %cmp = icmp ugt i32 %x, %y
275 %ext = sext i1 %cmp to i32
276 %cond = icmp slt i32 %ext, 0
277 br i1 %cond, label %then, label %endif
280 store i32 1, ptr addrspace(1) %arg, align 4
288 declare i32 @llvm.amdgcn.workitem.id.x() #0
290 declare i32 @llvm.amdgcn.workitem.id.y() #0
292 attributes #0 = { nounwind readnone speculatable }