1 ;; #pragma OPENCL EXTENSION cl_khr_subgroup_non_uniform_arithmetic : enable
2 ;; #pragma OPENCL EXTENSION cl_khr_fp16 : enable
3 ;; #pragma OPENCL EXTENSION cl_khr_fp64 : enable
5 ;; kernel void testNonUniformArithmeticChar(global char* dst)
8 ;; dst[0] = sub_group_non_uniform_reduce_add(v);
9 ;; dst[1] = sub_group_non_uniform_reduce_mul(v);
10 ;; dst[2] = sub_group_non_uniform_reduce_min(v);
11 ;; dst[3] = sub_group_non_uniform_reduce_max(v);
12 ;; dst[4] = sub_group_non_uniform_scan_inclusive_add(v);
13 ;; dst[5] = sub_group_non_uniform_scan_inclusive_mul(v);
14 ;; dst[6] = sub_group_non_uniform_scan_inclusive_min(v);
15 ;; dst[7] = sub_group_non_uniform_scan_inclusive_max(v);
16 ;; dst[8] = sub_group_non_uniform_scan_exclusive_add(v);
17 ;; dst[9] = sub_group_non_uniform_scan_exclusive_mul(v);
18 ;; dst[10] = sub_group_non_uniform_scan_exclusive_min(v);
19 ;; dst[11] = sub_group_non_uniform_scan_exclusive_max(v);
22 ;; kernel void testNonUniformArithmeticUChar(global uchar* dst)
25 ;; dst[0] = sub_group_non_uniform_reduce_add(v);
26 ;; dst[1] = sub_group_non_uniform_reduce_mul(v);
27 ;; dst[2] = sub_group_non_uniform_reduce_min(v);
28 ;; dst[3] = sub_group_non_uniform_reduce_max(v);
29 ;; dst[4] = sub_group_non_uniform_scan_inclusive_add(v);
30 ;; dst[5] = sub_group_non_uniform_scan_inclusive_mul(v);
31 ;; dst[6] = sub_group_non_uniform_scan_inclusive_min(v);
32 ;; dst[7] = sub_group_non_uniform_scan_inclusive_max(v);
33 ;; dst[8] = sub_group_non_uniform_scan_exclusive_add(v);
34 ;; dst[9] = sub_group_non_uniform_scan_exclusive_mul(v);
35 ;; dst[10] = sub_group_non_uniform_scan_exclusive_min(v);
36 ;; dst[11] = sub_group_non_uniform_scan_exclusive_max(v);
39 ;; kernel void testNonUniformArithmeticShort(global short* dst)
42 ;; dst[0] = sub_group_non_uniform_reduce_add(v);
43 ;; dst[1] = sub_group_non_uniform_reduce_mul(v);
44 ;; dst[2] = sub_group_non_uniform_reduce_min(v);
45 ;; dst[3] = sub_group_non_uniform_reduce_max(v);
46 ;; dst[4] = sub_group_non_uniform_scan_inclusive_add(v);
47 ;; dst[5] = sub_group_non_uniform_scan_inclusive_mul(v);
48 ;; dst[6] = sub_group_non_uniform_scan_inclusive_min(v);
49 ;; dst[7] = sub_group_non_uniform_scan_inclusive_max(v);
50 ;; dst[8] = sub_group_non_uniform_scan_exclusive_add(v);
51 ;; dst[9] = sub_group_non_uniform_scan_exclusive_mul(v);
52 ;; dst[10] = sub_group_non_uniform_scan_exclusive_min(v);
53 ;; dst[11] = sub_group_non_uniform_scan_exclusive_max(v);
56 ;; kernel void testNonUniformArithmeticUShort(global ushort* dst)
59 ;; dst[0] = sub_group_non_uniform_reduce_add(v);
60 ;; dst[1] = sub_group_non_uniform_reduce_mul(v);
61 ;; dst[2] = sub_group_non_uniform_reduce_min(v);
62 ;; dst[3] = sub_group_non_uniform_reduce_max(v);
63 ;; dst[4] = sub_group_non_uniform_scan_inclusive_add(v);
64 ;; dst[5] = sub_group_non_uniform_scan_inclusive_mul(v);
65 ;; dst[6] = sub_group_non_uniform_scan_inclusive_min(v);
66 ;; dst[7] = sub_group_non_uniform_scan_inclusive_max(v);
67 ;; dst[8] = sub_group_non_uniform_scan_exclusive_add(v);
68 ;; dst[9] = sub_group_non_uniform_scan_exclusive_mul(v);
69 ;; dst[10] = sub_group_non_uniform_scan_exclusive_min(v);
70 ;; dst[11] = sub_group_non_uniform_scan_exclusive_max(v);
73 ;; kernel void testNonUniformArithmeticInt(global int* dst)
76 ;; dst[0] = sub_group_non_uniform_reduce_add(v);
77 ;; dst[1] = sub_group_non_uniform_reduce_mul(v);
78 ;; dst[2] = sub_group_non_uniform_reduce_min(v);
79 ;; dst[3] = sub_group_non_uniform_reduce_max(v);
80 ;; dst[4] = sub_group_non_uniform_scan_inclusive_add(v);
81 ;; dst[5] = sub_group_non_uniform_scan_inclusive_mul(v);
82 ;; dst[6] = sub_group_non_uniform_scan_inclusive_min(v);
83 ;; dst[7] = sub_group_non_uniform_scan_inclusive_max(v);
84 ;; dst[8] = sub_group_non_uniform_scan_exclusive_add(v);
85 ;; dst[9] = sub_group_non_uniform_scan_exclusive_mul(v);
86 ;; dst[10] = sub_group_non_uniform_scan_exclusive_min(v);
87 ;; dst[11] = sub_group_non_uniform_scan_exclusive_max(v);
90 ;; kernel void testNonUniformArithmeticUInt(global uint* dst)
93 ;; dst[0] = sub_group_non_uniform_reduce_add(v);
94 ;; dst[1] = sub_group_non_uniform_reduce_mul(v);
95 ;; dst[2] = sub_group_non_uniform_reduce_min(v);
96 ;; dst[3] = sub_group_non_uniform_reduce_max(v);
97 ;; dst[4] = sub_group_non_uniform_scan_inclusive_add(v);
98 ;; dst[5] = sub_group_non_uniform_scan_inclusive_mul(v);
99 ;; dst[6] = sub_group_non_uniform_scan_inclusive_min(v);
100 ;; dst[7] = sub_group_non_uniform_scan_inclusive_max(v);
101 ;; dst[8] = sub_group_non_uniform_scan_exclusive_add(v);
102 ;; dst[9] = sub_group_non_uniform_scan_exclusive_mul(v);
103 ;; dst[10] = sub_group_non_uniform_scan_exclusive_min(v);
104 ;; dst[11] = sub_group_non_uniform_scan_exclusive_max(v);
107 ;; kernel void testNonUniformArithmeticLong(global long* dst)
110 ;; dst[0] = sub_group_non_uniform_reduce_add(v);
111 ;; dst[1] = sub_group_non_uniform_reduce_mul(v);
112 ;; dst[2] = sub_group_non_uniform_reduce_min(v);
113 ;; dst[3] = sub_group_non_uniform_reduce_max(v);
114 ;; dst[4] = sub_group_non_uniform_scan_inclusive_add(v);
115 ;; dst[5] = sub_group_non_uniform_scan_inclusive_mul(v);
116 ;; dst[6] = sub_group_non_uniform_scan_inclusive_min(v);
117 ;; dst[7] = sub_group_non_uniform_scan_inclusive_max(v);
118 ;; dst[8] = sub_group_non_uniform_scan_exclusive_add(v);
119 ;; dst[9] = sub_group_non_uniform_scan_exclusive_mul(v);
120 ;; dst[10] = sub_group_non_uniform_scan_exclusive_min(v);
121 ;; dst[11] = sub_group_non_uniform_scan_exclusive_max(v);
124 ;; kernel void testNonUniformArithmeticULong(global ulong* dst)
127 ;; dst[0] = sub_group_non_uniform_reduce_add(v);
128 ;; dst[1] = sub_group_non_uniform_reduce_mul(v);
129 ;; dst[2] = sub_group_non_uniform_reduce_min(v);
130 ;; dst[3] = sub_group_non_uniform_reduce_max(v);
131 ;; dst[4] = sub_group_non_uniform_scan_inclusive_add(v);
132 ;; dst[5] = sub_group_non_uniform_scan_inclusive_mul(v);
133 ;; dst[6] = sub_group_non_uniform_scan_inclusive_min(v);
134 ;; dst[7] = sub_group_non_uniform_scan_inclusive_max(v);
135 ;; dst[8] = sub_group_non_uniform_scan_exclusive_add(v);
136 ;; dst[9] = sub_group_non_uniform_scan_exclusive_mul(v);
137 ;; dst[10] = sub_group_non_uniform_scan_exclusive_min(v);
138 ;; dst[11] = sub_group_non_uniform_scan_exclusive_max(v);
141 ;; kernel void testNonUniformArithmeticFloat(global float* dst)
144 ;; dst[0] = sub_group_non_uniform_reduce_add(v);
145 ;; dst[1] = sub_group_non_uniform_reduce_mul(v);
146 ;; dst[2] = sub_group_non_uniform_reduce_min(v);
147 ;; dst[3] = sub_group_non_uniform_reduce_max(v);
148 ;; dst[4] = sub_group_non_uniform_scan_inclusive_add(v);
149 ;; dst[5] = sub_group_non_uniform_scan_inclusive_mul(v);
150 ;; dst[6] = sub_group_non_uniform_scan_inclusive_min(v);
151 ;; dst[7] = sub_group_non_uniform_scan_inclusive_max(v);
152 ;; dst[8] = sub_group_non_uniform_scan_exclusive_add(v);
153 ;; dst[9] = sub_group_non_uniform_scan_exclusive_mul(v);
154 ;; dst[10] = sub_group_non_uniform_scan_exclusive_min(v);
155 ;; dst[11] = sub_group_non_uniform_scan_exclusive_max(v);
158 ;; kernel void testNonUniformArithmeticHalf(global half* dst)
161 ;; dst[0] = sub_group_non_uniform_reduce_add(v);
162 ;; dst[1] = sub_group_non_uniform_reduce_mul(v);
163 ;; dst[2] = sub_group_non_uniform_reduce_min(v);
164 ;; dst[3] = sub_group_non_uniform_reduce_max(v);
165 ;; dst[4] = sub_group_non_uniform_scan_inclusive_add(v);
166 ;; dst[5] = sub_group_non_uniform_scan_inclusive_mul(v);
167 ;; dst[6] = sub_group_non_uniform_scan_inclusive_min(v);
168 ;; dst[7] = sub_group_non_uniform_scan_inclusive_max(v);
169 ;; dst[8] = sub_group_non_uniform_scan_exclusive_add(v);
170 ;; dst[9] = sub_group_non_uniform_scan_exclusive_mul(v);
171 ;; dst[10] = sub_group_non_uniform_scan_exclusive_min(v);
172 ;; dst[11] = sub_group_non_uniform_scan_exclusive_max(v);
175 ;; kernel void testNonUniformArithmeticDouble(global double* dst)
178 ;; dst[0] = sub_group_non_uniform_reduce_add(v);
179 ;; dst[1] = sub_group_non_uniform_reduce_mul(v);
180 ;; dst[2] = sub_group_non_uniform_reduce_min(v);
181 ;; dst[3] = sub_group_non_uniform_reduce_max(v);
182 ;; dst[4] = sub_group_non_uniform_scan_inclusive_add(v);
183 ;; dst[5] = sub_group_non_uniform_scan_inclusive_mul(v);
184 ;; dst[6] = sub_group_non_uniform_scan_inclusive_min(v);
185 ;; dst[7] = sub_group_non_uniform_scan_inclusive_max(v);
186 ;; dst[8] = sub_group_non_uniform_scan_exclusive_add(v);
187 ;; dst[9] = sub_group_non_uniform_scan_exclusive_mul(v);
188 ;; dst[10] = sub_group_non_uniform_scan_exclusive_min(v);
189 ;; dst[11] = sub_group_non_uniform_scan_exclusive_max(v);
192 ;; kernel void testNonUniformBitwiseChar(global char* dst)
195 ;; dst[0] = sub_group_non_uniform_reduce_and(v);
196 ;; dst[1] = sub_group_non_uniform_reduce_or(v);
197 ;; dst[2] = sub_group_non_uniform_reduce_xor(v);
198 ;; dst[3] = sub_group_non_uniform_scan_inclusive_and(v);
199 ;; dst[4] = sub_group_non_uniform_scan_inclusive_or(v);
200 ;; dst[5] = sub_group_non_uniform_scan_inclusive_xor(v);
201 ;; dst[6] = sub_group_non_uniform_scan_exclusive_and(v);
202 ;; dst[7] = sub_group_non_uniform_scan_exclusive_or(v);
203 ;; dst[8] = sub_group_non_uniform_scan_exclusive_xor(v);
206 ;; kernel void testNonUniformBitwiseUChar(global uchar* dst)
209 ;; dst[0] = sub_group_non_uniform_reduce_and(v);
210 ;; dst[1] = sub_group_non_uniform_reduce_or(v);
211 ;; dst[2] = sub_group_non_uniform_reduce_xor(v);
212 ;; dst[3] = sub_group_non_uniform_scan_inclusive_and(v);
213 ;; dst[4] = sub_group_non_uniform_scan_inclusive_or(v);
214 ;; dst[5] = sub_group_non_uniform_scan_inclusive_xor(v);
215 ;; dst[6] = sub_group_non_uniform_scan_exclusive_and(v);
216 ;; dst[7] = sub_group_non_uniform_scan_exclusive_or(v);
217 ;; dst[8] = sub_group_non_uniform_scan_exclusive_xor(v);
220 ;; kernel void testNonUniformBitwiseShort(global short* dst)
223 ;; dst[0] = sub_group_non_uniform_reduce_and(v);
224 ;; dst[1] = sub_group_non_uniform_reduce_or(v);
225 ;; dst[2] = sub_group_non_uniform_reduce_xor(v);
226 ;; dst[3] = sub_group_non_uniform_scan_inclusive_and(v);
227 ;; dst[4] = sub_group_non_uniform_scan_inclusive_or(v);
228 ;; dst[5] = sub_group_non_uniform_scan_inclusive_xor(v);
229 ;; dst[6] = sub_group_non_uniform_scan_exclusive_and(v);
230 ;; dst[7] = sub_group_non_uniform_scan_exclusive_or(v);
231 ;; dst[8] = sub_group_non_uniform_scan_exclusive_xor(v);
234 ;; kernel void testNonUniformBitwiseUShort(global ushort* dst)
237 ;; dst[0] = sub_group_non_uniform_reduce_and(v);
238 ;; dst[1] = sub_group_non_uniform_reduce_or(v);
239 ;; dst[2] = sub_group_non_uniform_reduce_xor(v);
240 ;; dst[3] = sub_group_non_uniform_scan_inclusive_and(v);
241 ;; dst[4] = sub_group_non_uniform_scan_inclusive_or(v);
242 ;; dst[5] = sub_group_non_uniform_scan_inclusive_xor(v);
243 ;; dst[6] = sub_group_non_uniform_scan_exclusive_and(v);
244 ;; dst[7] = sub_group_non_uniform_scan_exclusive_or(v);
245 ;; dst[8] = sub_group_non_uniform_scan_exclusive_xor(v);
248 ;; kernel void testNonUniformBitwiseInt(global int* dst)
251 ;; dst[0] = sub_group_non_uniform_reduce_and(v);
252 ;; dst[1] = sub_group_non_uniform_reduce_or(v);
253 ;; dst[2] = sub_group_non_uniform_reduce_xor(v);
254 ;; dst[3] = sub_group_non_uniform_scan_inclusive_and(v);
255 ;; dst[4] = sub_group_non_uniform_scan_inclusive_or(v);
256 ;; dst[5] = sub_group_non_uniform_scan_inclusive_xor(v);
257 ;; dst[6] = sub_group_non_uniform_scan_exclusive_and(v);
258 ;; dst[7] = sub_group_non_uniform_scan_exclusive_or(v);
259 ;; dst[8] = sub_group_non_uniform_scan_exclusive_xor(v);
262 ;; kernel void testNonUniformBitwiseUInt(global uint* dst)
265 ;; dst[0] = sub_group_non_uniform_reduce_and(v);
266 ;; dst[1] = sub_group_non_uniform_reduce_or(v);
267 ;; dst[2] = sub_group_non_uniform_reduce_xor(v);
268 ;; dst[3] = sub_group_non_uniform_scan_inclusive_and(v);
269 ;; dst[4] = sub_group_non_uniform_scan_inclusive_or(v);
270 ;; dst[5] = sub_group_non_uniform_scan_inclusive_xor(v);
271 ;; dst[6] = sub_group_non_uniform_scan_exclusive_and(v);
272 ;; dst[7] = sub_group_non_uniform_scan_exclusive_or(v);
273 ;; dst[8] = sub_group_non_uniform_scan_exclusive_xor(v);
276 ;; kernel void testNonUniformBitwiseLong(global long* dst)
279 ;; dst[0] = sub_group_non_uniform_reduce_and(v);
280 ;; dst[1] = sub_group_non_uniform_reduce_or(v);
281 ;; dst[2] = sub_group_non_uniform_reduce_xor(v);
282 ;; dst[3] = sub_group_non_uniform_scan_inclusive_and(v);
283 ;; dst[4] = sub_group_non_uniform_scan_inclusive_or(v);
284 ;; dst[5] = sub_group_non_uniform_scan_inclusive_xor(v);
285 ;; dst[6] = sub_group_non_uniform_scan_exclusive_and(v);
286 ;; dst[7] = sub_group_non_uniform_scan_exclusive_or(v);
287 ;; dst[8] = sub_group_non_uniform_scan_exclusive_xor(v);
290 ;; kernel void testNonUniformBitwiseULong(global ulong* dst)
293 ;; dst[0] = sub_group_non_uniform_reduce_and(v);
294 ;; dst[1] = sub_group_non_uniform_reduce_or(v);
295 ;; dst[2] = sub_group_non_uniform_reduce_xor(v);
296 ;; dst[3] = sub_group_non_uniform_scan_inclusive_and(v);
297 ;; dst[4] = sub_group_non_uniform_scan_inclusive_or(v);
298 ;; dst[5] = sub_group_non_uniform_scan_inclusive_xor(v);
299 ;; dst[6] = sub_group_non_uniform_scan_exclusive_and(v);
300 ;; dst[7] = sub_group_non_uniform_scan_exclusive_or(v);
301 ;; dst[8] = sub_group_non_uniform_scan_exclusive_xor(v);
304 ;; kernel void testNonUniformLogical(global int* dst)
307 ;; dst[0] = sub_group_non_uniform_reduce_logical_and(v);
308 ;; dst[1] = sub_group_non_uniform_reduce_logical_or(v);
309 ;; dst[2] = sub_group_non_uniform_reduce_logical_xor(v);
310 ;; dst[3] = sub_group_non_uniform_scan_inclusive_logical_and(v);
311 ;; dst[4] = sub_group_non_uniform_scan_inclusive_logical_or(v);
312 ;; dst[5] = sub_group_non_uniform_scan_inclusive_logical_xor(v);
313 ;; dst[6] = sub_group_non_uniform_scan_exclusive_logical_and(v);
314 ;; dst[7] = sub_group_non_uniform_scan_exclusive_logical_or(v);
315 ;; dst[8] = sub_group_non_uniform_scan_exclusive_logical_xor(v);
318 ; RUN: llc -O0 -mtriple=spirv64-unknown-unknown %s -o - | FileCheck %s --check-prefix=CHECK-SPIRV
320 ; CHECK-SPIRV-DAG: OpCapability GroupNonUniformArithmetic
322 ; CHECK-SPIRV-DAG: %[[#bool:]] = OpTypeBool
323 ; CHECK-SPIRV-DAG: %[[#char:]] = OpTypeInt 8 0
324 ; CHECK-SPIRV-DAG: %[[#short:]] = OpTypeInt 16 0
325 ; CHECK-SPIRV-DAG: %[[#int:]] = OpTypeInt 32 0
326 ; CHECK-SPIRV-DAG: %[[#long:]] = OpTypeInt 64 0
327 ; CHECK-SPIRV-DAG: %[[#half:]] = OpTypeFloat 16
328 ; CHECK-SPIRV-DAG: %[[#float:]] = OpTypeFloat 32
329 ; CHECK-SPIRV-DAG: %[[#double:]] = OpTypeFloat 64
331 ; CHECK-SPIRV-DAG: %[[#false:]] = OpConstantFalse %[[#bool]]
332 ; CHECK-SPIRV-DAG: %[[#ScopeSubgroup:]] = OpConstant %[[#int]] 3
333 ; CHECK-SPIRV-DAG: %[[#char_0:]] = OpConstant %[[#char]] 0
334 ; CHECK-SPIRV-DAG: %[[#short_0:]] = OpConstant %[[#short]] 0
335 ; CHECK-SPIRV-DAG: %[[#int_0:]] = OpConstant %[[#int]] 0
336 ; CHECK-SPIRV-DAG: %[[#long_0:]] = OpConstantNull %[[#long]]
337 ; CHECK-SPIRV-DAG: %[[#half_0:]] = OpConstant %[[#half]] 0
338 ; CHECK-SPIRV-DAG: %[[#float_0:]] = OpConstant %[[#float]] 0
339 ; CHECK-SPIRV-DAG: %[[#double_0:]] = OpConstant %[[#double]] 0
341 ; CHECK-SPIRV: OpFunction
342 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#char]] %[[#ScopeSubgroup]] Reduce %[[#char_0]]
343 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#char]] %[[#ScopeSubgroup]] Reduce %[[#char_0]]
344 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMin %[[#char]] %[[#ScopeSubgroup]] Reduce %[[#char_0]]
345 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMax %[[#char]] %[[#ScopeSubgroup]] Reduce %[[#char_0]]
346 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#char]] %[[#ScopeSubgroup]] InclusiveScan %[[#char_0]]
347 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#char]] %[[#ScopeSubgroup]] InclusiveScan %[[#char_0]]
348 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMin %[[#char]] %[[#ScopeSubgroup]] InclusiveScan %[[#char_0]]
349 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMax %[[#char]] %[[#ScopeSubgroup]] InclusiveScan %[[#char_0]]
350 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#char]] %[[#ScopeSubgroup]] ExclusiveScan %[[#char_0]]
351 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#char]] %[[#ScopeSubgroup]] ExclusiveScan %[[#char_0]]
352 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMin %[[#char]] %[[#ScopeSubgroup]] ExclusiveScan %[[#char_0]]
353 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMax %[[#char]] %[[#ScopeSubgroup]] ExclusiveScan %[[#char_0]]
354 ; CHECK-SPIRV: OpFunctionEnd
356 define dso_local spir_kernel void @testNonUniformArithmeticChar(i8 addrspace(1)* nocapture) local_unnamed_addr {
357 %2 = tail call spir_func signext i8 @_Z32sub_group_non_uniform_reduce_addc(i8 signext 0)
358 store i8 %2, i8 addrspace(1)* %0, align 1
359 %3 = tail call spir_func signext i8 @_Z32sub_group_non_uniform_reduce_mulc(i8 signext 0)
360 %4 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 1
361 store i8 %3, i8 addrspace(1)* %4, align 1
362 %5 = tail call spir_func signext i8 @_Z32sub_group_non_uniform_reduce_minc(i8 signext 0)
363 %6 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 2
364 store i8 %5, i8 addrspace(1)* %6, align 1
365 %7 = tail call spir_func signext i8 @_Z32sub_group_non_uniform_reduce_maxc(i8 signext 0)
366 %8 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 3
367 store i8 %7, i8 addrspace(1)* %8, align 1
368 %9 = tail call spir_func signext i8 @_Z40sub_group_non_uniform_scan_inclusive_addc(i8 signext 0)
369 %10 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 4
370 store i8 %9, i8 addrspace(1)* %10, align 1
371 %11 = tail call spir_func signext i8 @_Z40sub_group_non_uniform_scan_inclusive_mulc(i8 signext 0)
372 %12 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 5
373 store i8 %11, i8 addrspace(1)* %12, align 1
374 %13 = tail call spir_func signext i8 @_Z40sub_group_non_uniform_scan_inclusive_minc(i8 signext 0)
375 %14 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 6
376 store i8 %13, i8 addrspace(1)* %14, align 1
377 %15 = tail call spir_func signext i8 @_Z40sub_group_non_uniform_scan_inclusive_maxc(i8 signext 0)
378 %16 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 7
379 store i8 %15, i8 addrspace(1)* %16, align 1
380 %17 = tail call spir_func signext i8 @_Z40sub_group_non_uniform_scan_exclusive_addc(i8 signext 0)
381 %18 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 8
382 store i8 %17, i8 addrspace(1)* %18, align 1
383 %19 = tail call spir_func signext i8 @_Z40sub_group_non_uniform_scan_exclusive_mulc(i8 signext 0)
384 %20 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 9
385 store i8 %19, i8 addrspace(1)* %20, align 1
386 %21 = tail call spir_func signext i8 @_Z40sub_group_non_uniform_scan_exclusive_minc(i8 signext 0)
387 %22 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 10
388 store i8 %21, i8 addrspace(1)* %22, align 1
389 %23 = tail call spir_func signext i8 @_Z40sub_group_non_uniform_scan_exclusive_maxc(i8 signext 0)
390 %24 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 11
391 store i8 %23, i8 addrspace(1)* %24, align 1
395 declare dso_local spir_func signext i8 @_Z32sub_group_non_uniform_reduce_addc(i8 signext) local_unnamed_addr
397 declare dso_local spir_func signext i8 @_Z32sub_group_non_uniform_reduce_mulc(i8 signext) local_unnamed_addr
399 declare dso_local spir_func signext i8 @_Z32sub_group_non_uniform_reduce_minc(i8 signext) local_unnamed_addr
401 declare dso_local spir_func signext i8 @_Z32sub_group_non_uniform_reduce_maxc(i8 signext) local_unnamed_addr
403 declare dso_local spir_func signext i8 @_Z40sub_group_non_uniform_scan_inclusive_addc(i8 signext) local_unnamed_addr
405 declare dso_local spir_func signext i8 @_Z40sub_group_non_uniform_scan_inclusive_mulc(i8 signext) local_unnamed_addr
407 declare dso_local spir_func signext i8 @_Z40sub_group_non_uniform_scan_inclusive_minc(i8 signext) local_unnamed_addr
409 declare dso_local spir_func signext i8 @_Z40sub_group_non_uniform_scan_inclusive_maxc(i8 signext) local_unnamed_addr
411 declare dso_local spir_func signext i8 @_Z40sub_group_non_uniform_scan_exclusive_addc(i8 signext) local_unnamed_addr
413 declare dso_local spir_func signext i8 @_Z40sub_group_non_uniform_scan_exclusive_mulc(i8 signext) local_unnamed_addr
415 declare dso_local spir_func signext i8 @_Z40sub_group_non_uniform_scan_exclusive_minc(i8 signext) local_unnamed_addr
417 declare dso_local spir_func signext i8 @_Z40sub_group_non_uniform_scan_exclusive_maxc(i8 signext) local_unnamed_addr
419 ; CHECK-SPIRV: OpFunction
420 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#char]] %[[#ScopeSubgroup]] Reduce %[[#char_0]]
421 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#char]] %[[#ScopeSubgroup]] Reduce %[[#char_0]]
422 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMin %[[#char]] %[[#ScopeSubgroup]] Reduce %[[#char_0]]
423 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMax %[[#char]] %[[#ScopeSubgroup]] Reduce %[[#char_0]]
424 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#char]] %[[#ScopeSubgroup]] InclusiveScan %[[#char_0]]
425 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#char]] %[[#ScopeSubgroup]] InclusiveScan %[[#char_0]]
426 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMin %[[#char]] %[[#ScopeSubgroup]] InclusiveScan %[[#char_0]]
427 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMax %[[#char]] %[[#ScopeSubgroup]] InclusiveScan %[[#char_0]]
428 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#char]] %[[#ScopeSubgroup]] ExclusiveScan %[[#char_0]]
429 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#char]] %[[#ScopeSubgroup]] ExclusiveScan %[[#char_0]]
430 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMin %[[#char]] %[[#ScopeSubgroup]] ExclusiveScan %[[#char_0]]
431 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMax %[[#char]] %[[#ScopeSubgroup]] ExclusiveScan %[[#char_0]]
432 ; CHECK-SPIRV: OpFunctionEnd
434 define dso_local spir_kernel void @testNonUniformArithmeticUChar(i8 addrspace(1)* nocapture) local_unnamed_addr {
435 %2 = tail call spir_func zeroext i8 @_Z32sub_group_non_uniform_reduce_addh(i8 zeroext 0)
436 store i8 %2, i8 addrspace(1)* %0, align 1
437 %3 = tail call spir_func zeroext i8 @_Z32sub_group_non_uniform_reduce_mulh(i8 zeroext 0)
438 %4 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 1
439 store i8 %3, i8 addrspace(1)* %4, align 1
440 %5 = tail call spir_func zeroext i8 @_Z32sub_group_non_uniform_reduce_minh(i8 zeroext 0)
441 %6 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 2
442 store i8 %5, i8 addrspace(1)* %6, align 1
443 %7 = tail call spir_func zeroext i8 @_Z32sub_group_non_uniform_reduce_maxh(i8 zeroext 0)
444 %8 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 3
445 store i8 %7, i8 addrspace(1)* %8, align 1
446 %9 = tail call spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_inclusive_addh(i8 zeroext 0)
447 %10 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 4
448 store i8 %9, i8 addrspace(1)* %10, align 1
449 %11 = tail call spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_inclusive_mulh(i8 zeroext 0)
450 %12 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 5
451 store i8 %11, i8 addrspace(1)* %12, align 1
452 %13 = tail call spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_inclusive_minh(i8 zeroext 0)
453 %14 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 6
454 store i8 %13, i8 addrspace(1)* %14, align 1
455 %15 = tail call spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_inclusive_maxh(i8 zeroext 0)
456 %16 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 7
457 store i8 %15, i8 addrspace(1)* %16, align 1
458 %17 = tail call spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_exclusive_addh(i8 zeroext 0)
459 %18 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 8
460 store i8 %17, i8 addrspace(1)* %18, align 1
461 %19 = tail call spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_exclusive_mulh(i8 zeroext 0)
462 %20 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 9
463 store i8 %19, i8 addrspace(1)* %20, align 1
464 %21 = tail call spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_exclusive_minh(i8 zeroext 0)
465 %22 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 10
466 store i8 %21, i8 addrspace(1)* %22, align 1
467 %23 = tail call spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_exclusive_maxh(i8 zeroext 0)
468 %24 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 11
469 store i8 %23, i8 addrspace(1)* %24, align 1
473 declare dso_local spir_func zeroext i8 @_Z32sub_group_non_uniform_reduce_addh(i8 zeroext) local_unnamed_addr
475 declare dso_local spir_func zeroext i8 @_Z32sub_group_non_uniform_reduce_mulh(i8 zeroext) local_unnamed_addr
477 declare dso_local spir_func zeroext i8 @_Z32sub_group_non_uniform_reduce_minh(i8 zeroext) local_unnamed_addr
479 declare dso_local spir_func zeroext i8 @_Z32sub_group_non_uniform_reduce_maxh(i8 zeroext) local_unnamed_addr
481 declare dso_local spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_inclusive_addh(i8 zeroext) local_unnamed_addr
483 declare dso_local spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_inclusive_mulh(i8 zeroext) local_unnamed_addr
485 declare dso_local spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_inclusive_minh(i8 zeroext) local_unnamed_addr
487 declare dso_local spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_inclusive_maxh(i8 zeroext) local_unnamed_addr
489 declare dso_local spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_exclusive_addh(i8 zeroext) local_unnamed_addr
491 declare dso_local spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_exclusive_mulh(i8 zeroext) local_unnamed_addr
493 declare dso_local spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_exclusive_minh(i8 zeroext) local_unnamed_addr
495 declare dso_local spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_exclusive_maxh(i8 zeroext) local_unnamed_addr
497 ; CHECK-SPIRV: OpFunction
498 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#short]] %[[#ScopeSubgroup]] Reduce %[[#short_0]]
499 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#short]] %[[#ScopeSubgroup]] Reduce %[[#short_0]]
500 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMin %[[#short]] %[[#ScopeSubgroup]] Reduce %[[#short_0]]
501 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMax %[[#short]] %[[#ScopeSubgroup]] Reduce %[[#short_0]]
502 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#short]] %[[#ScopeSubgroup]] InclusiveScan %[[#short_0]]
503 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#short]] %[[#ScopeSubgroup]] InclusiveScan %[[#short_0]]
504 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMin %[[#short]] %[[#ScopeSubgroup]] InclusiveScan %[[#short_0]]
505 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMax %[[#short]] %[[#ScopeSubgroup]] InclusiveScan %[[#short_0]]
506 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#short]] %[[#ScopeSubgroup]] ExclusiveScan %[[#short_0]]
507 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#short]] %[[#ScopeSubgroup]] ExclusiveScan %[[#short_0]]
508 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMin %[[#short]] %[[#ScopeSubgroup]] ExclusiveScan %[[#short_0]]
509 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMax %[[#short]] %[[#ScopeSubgroup]] ExclusiveScan %[[#short_0]]
510 ; CHECK-SPIRV: OpFunctionEnd
512 define dso_local spir_kernel void @testNonUniformArithmeticShort(i16 addrspace(1)* nocapture) local_unnamed_addr {
513 %2 = tail call spir_func signext i16 @_Z32sub_group_non_uniform_reduce_adds(i16 signext 0)
514 store i16 %2, i16 addrspace(1)* %0, align 2
515 %3 = tail call spir_func signext i16 @_Z32sub_group_non_uniform_reduce_muls(i16 signext 0)
516 %4 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 1
517 store i16 %3, i16 addrspace(1)* %4, align 2
518 %5 = tail call spir_func signext i16 @_Z32sub_group_non_uniform_reduce_mins(i16 signext 0)
519 %6 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 2
520 store i16 %5, i16 addrspace(1)* %6, align 2
521 %7 = tail call spir_func signext i16 @_Z32sub_group_non_uniform_reduce_maxs(i16 signext 0)
522 %8 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 3
523 store i16 %7, i16 addrspace(1)* %8, align 2
524 %9 = tail call spir_func signext i16 @_Z40sub_group_non_uniform_scan_inclusive_adds(i16 signext 0)
525 %10 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 4
526 store i16 %9, i16 addrspace(1)* %10, align 2
527 %11 = tail call spir_func signext i16 @_Z40sub_group_non_uniform_scan_inclusive_muls(i16 signext 0)
528 %12 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 5
529 store i16 %11, i16 addrspace(1)* %12, align 2
530 %13 = tail call spir_func signext i16 @_Z40sub_group_non_uniform_scan_inclusive_mins(i16 signext 0)
531 %14 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 6
532 store i16 %13, i16 addrspace(1)* %14, align 2
533 %15 = tail call spir_func signext i16 @_Z40sub_group_non_uniform_scan_inclusive_maxs(i16 signext 0)
534 %16 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 7
535 store i16 %15, i16 addrspace(1)* %16, align 2
536 %17 = tail call spir_func signext i16 @_Z40sub_group_non_uniform_scan_exclusive_adds(i16 signext 0)
537 %18 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 8
538 store i16 %17, i16 addrspace(1)* %18, align 2
539 %19 = tail call spir_func signext i16 @_Z40sub_group_non_uniform_scan_exclusive_muls(i16 signext 0)
540 %20 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 9
541 store i16 %19, i16 addrspace(1)* %20, align 2
542 %21 = tail call spir_func signext i16 @_Z40sub_group_non_uniform_scan_exclusive_mins(i16 signext 0)
543 %22 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 10
544 store i16 %21, i16 addrspace(1)* %22, align 2
545 %23 = tail call spir_func signext i16 @_Z40sub_group_non_uniform_scan_exclusive_maxs(i16 signext 0)
546 %24 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 11
547 store i16 %23, i16 addrspace(1)* %24, align 2
551 declare dso_local spir_func signext i16 @_Z32sub_group_non_uniform_reduce_adds(i16 signext) local_unnamed_addr
553 declare dso_local spir_func signext i16 @_Z32sub_group_non_uniform_reduce_muls(i16 signext) local_unnamed_addr
555 declare dso_local spir_func signext i16 @_Z32sub_group_non_uniform_reduce_mins(i16 signext) local_unnamed_addr
557 declare dso_local spir_func signext i16 @_Z32sub_group_non_uniform_reduce_maxs(i16 signext) local_unnamed_addr
559 declare dso_local spir_func signext i16 @_Z40sub_group_non_uniform_scan_inclusive_adds(i16 signext) local_unnamed_addr
561 declare dso_local spir_func signext i16 @_Z40sub_group_non_uniform_scan_inclusive_muls(i16 signext) local_unnamed_addr
563 declare dso_local spir_func signext i16 @_Z40sub_group_non_uniform_scan_inclusive_mins(i16 signext) local_unnamed_addr
565 declare dso_local spir_func signext i16 @_Z40sub_group_non_uniform_scan_inclusive_maxs(i16 signext) local_unnamed_addr
567 declare dso_local spir_func signext i16 @_Z40sub_group_non_uniform_scan_exclusive_adds(i16 signext) local_unnamed_addr
569 declare dso_local spir_func signext i16 @_Z40sub_group_non_uniform_scan_exclusive_muls(i16 signext) local_unnamed_addr
571 declare dso_local spir_func signext i16 @_Z40sub_group_non_uniform_scan_exclusive_mins(i16 signext) local_unnamed_addr
573 declare dso_local spir_func signext i16 @_Z40sub_group_non_uniform_scan_exclusive_maxs(i16 signext) local_unnamed_addr
575 ; CHECK-SPIRV: OpFunction
576 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#short]] %[[#ScopeSubgroup]] Reduce %[[#short_0]]
577 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#short]] %[[#ScopeSubgroup]] Reduce %[[#short_0]]
578 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMin %[[#short]] %[[#ScopeSubgroup]] Reduce %[[#short_0]]
579 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMax %[[#short]] %[[#ScopeSubgroup]] Reduce %[[#short_0]]
580 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#short]] %[[#ScopeSubgroup]] InclusiveScan %[[#short_0]]
581 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#short]] %[[#ScopeSubgroup]] InclusiveScan %[[#short_0]]
582 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMin %[[#short]] %[[#ScopeSubgroup]] InclusiveScan %[[#short_0]]
583 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMax %[[#short]] %[[#ScopeSubgroup]] InclusiveScan %[[#short_0]]
584 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#short]] %[[#ScopeSubgroup]] ExclusiveScan %[[#short_0]]
585 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#short]] %[[#ScopeSubgroup]] ExclusiveScan %[[#short_0]]
586 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMin %[[#short]] %[[#ScopeSubgroup]] ExclusiveScan %[[#short_0]]
587 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMax %[[#short]] %[[#ScopeSubgroup]] ExclusiveScan %[[#short_0]]
588 ; CHECK-SPIRV: OpFunctionEnd
590 define dso_local spir_kernel void @testNonUniformArithmeticUShort(i16 addrspace(1)* nocapture) local_unnamed_addr {
591 %2 = tail call spir_func zeroext i16 @_Z32sub_group_non_uniform_reduce_addt(i16 zeroext 0)
592 store i16 %2, i16 addrspace(1)* %0, align 2
593 %3 = tail call spir_func zeroext i16 @_Z32sub_group_non_uniform_reduce_mult(i16 zeroext 0)
594 %4 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 1
595 store i16 %3, i16 addrspace(1)* %4, align 2
596 %5 = tail call spir_func zeroext i16 @_Z32sub_group_non_uniform_reduce_mint(i16 zeroext 0)
597 %6 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 2
598 store i16 %5, i16 addrspace(1)* %6, align 2
599 %7 = tail call spir_func zeroext i16 @_Z32sub_group_non_uniform_reduce_maxt(i16 zeroext 0)
600 %8 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 3
601 store i16 %7, i16 addrspace(1)* %8, align 2
602 %9 = tail call spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_inclusive_addt(i16 zeroext 0)
603 %10 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 4
604 store i16 %9, i16 addrspace(1)* %10, align 2
605 %11 = tail call spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_inclusive_mult(i16 zeroext 0)
606 %12 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 5
607 store i16 %11, i16 addrspace(1)* %12, align 2
608 %13 = tail call spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_inclusive_mint(i16 zeroext 0)
609 %14 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 6
610 store i16 %13, i16 addrspace(1)* %14, align 2
611 %15 = tail call spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_inclusive_maxt(i16 zeroext 0)
612 %16 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 7
613 store i16 %15, i16 addrspace(1)* %16, align 2
614 %17 = tail call spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_exclusive_addt(i16 zeroext 0)
615 %18 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 8
616 store i16 %17, i16 addrspace(1)* %18, align 2
617 %19 = tail call spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_exclusive_mult(i16 zeroext 0)
618 %20 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 9
619 store i16 %19, i16 addrspace(1)* %20, align 2
620 %21 = tail call spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_exclusive_mint(i16 zeroext 0)
621 %22 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 10
622 store i16 %21, i16 addrspace(1)* %22, align 2
623 %23 = tail call spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_exclusive_maxt(i16 zeroext 0)
624 %24 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 11
625 store i16 %23, i16 addrspace(1)* %24, align 2
629 declare dso_local spir_func zeroext i16 @_Z32sub_group_non_uniform_reduce_addt(i16 zeroext) local_unnamed_addr
631 declare dso_local spir_func zeroext i16 @_Z32sub_group_non_uniform_reduce_mult(i16 zeroext) local_unnamed_addr
633 declare dso_local spir_func zeroext i16 @_Z32sub_group_non_uniform_reduce_mint(i16 zeroext) local_unnamed_addr
635 declare dso_local spir_func zeroext i16 @_Z32sub_group_non_uniform_reduce_maxt(i16 zeroext) local_unnamed_addr
637 declare dso_local spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_inclusive_addt(i16 zeroext) local_unnamed_addr
639 declare dso_local spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_inclusive_mult(i16 zeroext) local_unnamed_addr
641 declare dso_local spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_inclusive_mint(i16 zeroext) local_unnamed_addr
643 declare dso_local spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_inclusive_maxt(i16 zeroext) local_unnamed_addr
645 declare dso_local spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_exclusive_addt(i16 zeroext) local_unnamed_addr
647 declare dso_local spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_exclusive_mult(i16 zeroext) local_unnamed_addr
649 declare dso_local spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_exclusive_mint(i16 zeroext) local_unnamed_addr
651 declare dso_local spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_exclusive_maxt(i16 zeroext) local_unnamed_addr
653 ; CHECK-SPIRV: OpFunction
654 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#int]] %[[#ScopeSubgroup]] Reduce %[[#int_0]]
655 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#int]] %[[#ScopeSubgroup]] Reduce %[[#int_0]]
656 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMin %[[#int]] %[[#ScopeSubgroup]] Reduce %[[#int_0]]
657 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMax %[[#int]] %[[#ScopeSubgroup]] Reduce %[[#int_0]]
658 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#int]] %[[#ScopeSubgroup]] InclusiveScan %[[#int_0]]
659 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#int]] %[[#ScopeSubgroup]] InclusiveScan %[[#int_0]]
660 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMin %[[#int]] %[[#ScopeSubgroup]] InclusiveScan %[[#int_0]]
661 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMax %[[#int]] %[[#ScopeSubgroup]] InclusiveScan %[[#int_0]]
662 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#int]] %[[#ScopeSubgroup]] ExclusiveScan %[[#int_0]]
663 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#int]] %[[#ScopeSubgroup]] ExclusiveScan %[[#int_0]]
664 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMin %[[#int]] %[[#ScopeSubgroup]] ExclusiveScan %[[#int_0]]
665 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMax %[[#int]] %[[#ScopeSubgroup]] ExclusiveScan %[[#int_0]]
666 ; CHECK-SPIRV: OpFunctionEnd
668 define dso_local spir_kernel void @testNonUniformArithmeticInt(i32 addrspace(1)* nocapture) local_unnamed_addr {
669 %2 = tail call spir_func i32 @_Z32sub_group_non_uniform_reduce_addi(i32 0)
670 store i32 %2, i32 addrspace(1)* %0, align 4
671 %3 = tail call spir_func i32 @_Z32sub_group_non_uniform_reduce_muli(i32 0)
672 %4 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 1
673 store i32 %3, i32 addrspace(1)* %4, align 4
674 %5 = tail call spir_func i32 @_Z32sub_group_non_uniform_reduce_mini(i32 0)
675 %6 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 2
676 store i32 %5, i32 addrspace(1)* %6, align 4
677 %7 = tail call spir_func i32 @_Z32sub_group_non_uniform_reduce_maxi(i32 0)
678 %8 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 3
679 store i32 %7, i32 addrspace(1)* %8, align 4
680 %9 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_addi(i32 0)
681 %10 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 4
682 store i32 %9, i32 addrspace(1)* %10, align 4
683 %11 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_muli(i32 0)
684 %12 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 5
685 store i32 %11, i32 addrspace(1)* %12, align 4
686 %13 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_mini(i32 0)
687 %14 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 6
688 store i32 %13, i32 addrspace(1)* %14, align 4
689 %15 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_maxi(i32 0)
690 %16 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 7
691 store i32 %15, i32 addrspace(1)* %16, align 4
692 %17 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_addi(i32 0)
693 %18 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 8
694 store i32 %17, i32 addrspace(1)* %18, align 4
695 %19 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_muli(i32 0)
696 %20 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 9
697 store i32 %19, i32 addrspace(1)* %20, align 4
698 %21 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_mini(i32 0)
699 %22 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 10
700 store i32 %21, i32 addrspace(1)* %22, align 4
701 %23 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_maxi(i32 0)
702 %24 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 11
703 store i32 %23, i32 addrspace(1)* %24, align 4
707 declare dso_local spir_func i32 @_Z32sub_group_non_uniform_reduce_addi(i32) local_unnamed_addr
709 declare dso_local spir_func i32 @_Z32sub_group_non_uniform_reduce_muli(i32) local_unnamed_addr
711 declare dso_local spir_func i32 @_Z32sub_group_non_uniform_reduce_mini(i32) local_unnamed_addr
713 declare dso_local spir_func i32 @_Z32sub_group_non_uniform_reduce_maxi(i32) local_unnamed_addr
715 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_addi(i32) local_unnamed_addr
717 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_muli(i32) local_unnamed_addr
719 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_mini(i32) local_unnamed_addr
721 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_maxi(i32) local_unnamed_addr
723 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_addi(i32) local_unnamed_addr
725 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_muli(i32) local_unnamed_addr
727 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_mini(i32) local_unnamed_addr
729 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_maxi(i32) local_unnamed_addr
731 ; CHECK-SPIRV: OpFunction
732 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#int]] %[[#ScopeSubgroup]] Reduce %[[#int_0]]
733 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#int]] %[[#ScopeSubgroup]] Reduce %[[#int_0]]
734 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMin %[[#int]] %[[#ScopeSubgroup]] Reduce %[[#int_0]]
735 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMax %[[#int]] %[[#ScopeSubgroup]] Reduce %[[#int_0]]
736 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#int]] %[[#ScopeSubgroup]] InclusiveScan %[[#int_0]]
737 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#int]] %[[#ScopeSubgroup]] InclusiveScan %[[#int_0]]
738 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMin %[[#int]] %[[#ScopeSubgroup]] InclusiveScan %[[#int_0]]
739 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMax %[[#int]] %[[#ScopeSubgroup]] InclusiveScan %[[#int_0]]
740 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#int]] %[[#ScopeSubgroup]] ExclusiveScan %[[#int_0]]
741 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#int]] %[[#ScopeSubgroup]] ExclusiveScan %[[#int_0]]
742 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMin %[[#int]] %[[#ScopeSubgroup]] ExclusiveScan %[[#int_0]]
743 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMax %[[#int]] %[[#ScopeSubgroup]] ExclusiveScan %[[#int_0]]
744 ; CHECK-SPIRV: OpFunctionEnd
746 define dso_local spir_kernel void @testNonUniformArithmeticUInt(i32 addrspace(1)* nocapture) local_unnamed_addr {
747 %2 = tail call spir_func i32 @_Z32sub_group_non_uniform_reduce_addj(i32 0)
748 store i32 %2, i32 addrspace(1)* %0, align 4
749 %3 = tail call spir_func i32 @_Z32sub_group_non_uniform_reduce_mulj(i32 0)
750 %4 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 1
751 store i32 %3, i32 addrspace(1)* %4, align 4
752 %5 = tail call spir_func i32 @_Z32sub_group_non_uniform_reduce_minj(i32 0)
753 %6 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 2
754 store i32 %5, i32 addrspace(1)* %6, align 4
755 %7 = tail call spir_func i32 @_Z32sub_group_non_uniform_reduce_maxj(i32 0)
756 %8 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 3
757 store i32 %7, i32 addrspace(1)* %8, align 4
758 %9 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_addj(i32 0)
759 %10 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 4
760 store i32 %9, i32 addrspace(1)* %10, align 4
761 %11 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_mulj(i32 0)
762 %12 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 5
763 store i32 %11, i32 addrspace(1)* %12, align 4
764 %13 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_minj(i32 0)
765 %14 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 6
766 store i32 %13, i32 addrspace(1)* %14, align 4
767 %15 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_maxj(i32 0)
768 %16 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 7
769 store i32 %15, i32 addrspace(1)* %16, align 4
770 %17 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_addj(i32 0)
771 %18 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 8
772 store i32 %17, i32 addrspace(1)* %18, align 4
773 %19 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_mulj(i32 0)
774 %20 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 9
775 store i32 %19, i32 addrspace(1)* %20, align 4
776 %21 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_minj(i32 0)
777 %22 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 10
778 store i32 %21, i32 addrspace(1)* %22, align 4
779 %23 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_maxj(i32 0)
780 %24 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 11
781 store i32 %23, i32 addrspace(1)* %24, align 4
785 declare dso_local spir_func i32 @_Z32sub_group_non_uniform_reduce_addj(i32) local_unnamed_addr
787 declare dso_local spir_func i32 @_Z32sub_group_non_uniform_reduce_mulj(i32) local_unnamed_addr
789 declare dso_local spir_func i32 @_Z32sub_group_non_uniform_reduce_minj(i32) local_unnamed_addr
791 declare dso_local spir_func i32 @_Z32sub_group_non_uniform_reduce_maxj(i32) local_unnamed_addr
793 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_addj(i32) local_unnamed_addr
795 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_mulj(i32) local_unnamed_addr
797 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_minj(i32) local_unnamed_addr
799 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_maxj(i32) local_unnamed_addr
801 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_addj(i32) local_unnamed_addr
803 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_mulj(i32) local_unnamed_addr
805 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_minj(i32) local_unnamed_addr
807 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_maxj(i32) local_unnamed_addr
809 ; CHECK-SPIRV: OpFunction
810 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#long]] %[[#ScopeSubgroup]] Reduce %[[#long_0]]
811 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#long]] %[[#ScopeSubgroup]] Reduce %[[#long_0]]
812 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMin %[[#long]] %[[#ScopeSubgroup]] Reduce %[[#long_0]]
813 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMax %[[#long]] %[[#ScopeSubgroup]] Reduce %[[#long_0]]
814 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#long]] %[[#ScopeSubgroup]] InclusiveScan %[[#long_0]]
815 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#long]] %[[#ScopeSubgroup]] InclusiveScan %[[#long_0]]
816 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMin %[[#long]] %[[#ScopeSubgroup]] InclusiveScan %[[#long_0]]
817 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMax %[[#long]] %[[#ScopeSubgroup]] InclusiveScan %[[#long_0]]
818 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#long]] %[[#ScopeSubgroup]] ExclusiveScan %[[#long_0]]
819 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#long]] %[[#ScopeSubgroup]] ExclusiveScan %[[#long_0]]
820 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMin %[[#long]] %[[#ScopeSubgroup]] ExclusiveScan %[[#long_0]]
821 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformSMax %[[#long]] %[[#ScopeSubgroup]] ExclusiveScan %[[#long_0]]
822 ; CHECK-SPIRV: OpFunctionEnd
824 define dso_local spir_kernel void @testNonUniformArithmeticLong(i64 addrspace(1)* nocapture) local_unnamed_addr {
825 %2 = tail call spir_func i64 @_Z32sub_group_non_uniform_reduce_addl(i64 0)
826 store i64 %2, i64 addrspace(1)* %0, align 8
827 %3 = tail call spir_func i64 @_Z32sub_group_non_uniform_reduce_mull(i64 0)
828 %4 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 1
829 store i64 %3, i64 addrspace(1)* %4, align 8
830 %5 = tail call spir_func i64 @_Z32sub_group_non_uniform_reduce_minl(i64 0)
831 %6 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 2
832 store i64 %5, i64 addrspace(1)* %6, align 8
833 %7 = tail call spir_func i64 @_Z32sub_group_non_uniform_reduce_maxl(i64 0)
834 %8 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 3
835 store i64 %7, i64 addrspace(1)* %8, align 8
836 %9 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_addl(i64 0)
837 %10 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 4
838 store i64 %9, i64 addrspace(1)* %10, align 8
839 %11 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_mull(i64 0)
840 %12 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 5
841 store i64 %11, i64 addrspace(1)* %12, align 8
842 %13 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_minl(i64 0)
843 %14 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 6
844 store i64 %13, i64 addrspace(1)* %14, align 8
845 %15 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_maxl(i64 0)
846 %16 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 7
847 store i64 %15, i64 addrspace(1)* %16, align 8
848 %17 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_addl(i64 0)
849 %18 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 8
850 store i64 %17, i64 addrspace(1)* %18, align 8
851 %19 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_mull(i64 0)
852 %20 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 9
853 store i64 %19, i64 addrspace(1)* %20, align 8
854 %21 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_minl(i64 0)
855 %22 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 10
856 store i64 %21, i64 addrspace(1)* %22, align 8
857 %23 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_maxl(i64 0)
858 %24 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 11
859 store i64 %23, i64 addrspace(1)* %24, align 8
863 declare dso_local spir_func i64 @_Z32sub_group_non_uniform_reduce_addl(i64) local_unnamed_addr
865 declare dso_local spir_func i64 @_Z32sub_group_non_uniform_reduce_mull(i64) local_unnamed_addr
867 declare dso_local spir_func i64 @_Z32sub_group_non_uniform_reduce_minl(i64) local_unnamed_addr
869 declare dso_local spir_func i64 @_Z32sub_group_non_uniform_reduce_maxl(i64) local_unnamed_addr
871 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_addl(i64) local_unnamed_addr
873 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_mull(i64) local_unnamed_addr
875 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_minl(i64) local_unnamed_addr
877 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_maxl(i64) local_unnamed_addr
879 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_addl(i64) local_unnamed_addr
881 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_mull(i64) local_unnamed_addr
883 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_minl(i64) local_unnamed_addr
885 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_maxl(i64) local_unnamed_addr
887 ; CHECK-SPIRV: OpFunction
888 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#long]] %[[#ScopeSubgroup]] Reduce %[[#long_0]]
889 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#long]] %[[#ScopeSubgroup]] Reduce %[[#long_0]]
890 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMin %[[#long]] %[[#ScopeSubgroup]] Reduce %[[#long_0]]
891 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMax %[[#long]] %[[#ScopeSubgroup]] Reduce %[[#long_0]]
892 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#long]] %[[#ScopeSubgroup]] InclusiveScan %[[#long_0]]
893 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#long]] %[[#ScopeSubgroup]] InclusiveScan %[[#long_0]]
894 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMin %[[#long]] %[[#ScopeSubgroup]] InclusiveScan %[[#long_0]]
895 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMax %[[#long]] %[[#ScopeSubgroup]] InclusiveScan %[[#long_0]]
896 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIAdd %[[#long]] %[[#ScopeSubgroup]] ExclusiveScan %[[#long_0]]
897 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformIMul %[[#long]] %[[#ScopeSubgroup]] ExclusiveScan %[[#long_0]]
898 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMin %[[#long]] %[[#ScopeSubgroup]] ExclusiveScan %[[#long_0]]
899 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformUMax %[[#long]] %[[#ScopeSubgroup]] ExclusiveScan %[[#long_0]]
900 ; CHECK-SPIRV: OpFunctionEnd
902 define dso_local spir_kernel void @testNonUniformArithmeticULong(i64 addrspace(1)* nocapture) local_unnamed_addr {
903 %2 = tail call spir_func i64 @_Z32sub_group_non_uniform_reduce_addm(i64 0)
904 store i64 %2, i64 addrspace(1)* %0, align 8
905 %3 = tail call spir_func i64 @_Z32sub_group_non_uniform_reduce_mulm(i64 0)
906 %4 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 1
907 store i64 %3, i64 addrspace(1)* %4, align 8
908 %5 = tail call spir_func i64 @_Z32sub_group_non_uniform_reduce_minm(i64 0)
909 %6 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 2
910 store i64 %5, i64 addrspace(1)* %6, align 8
911 %7 = tail call spir_func i64 @_Z32sub_group_non_uniform_reduce_maxm(i64 0)
912 %8 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 3
913 store i64 %7, i64 addrspace(1)* %8, align 8
914 %9 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_addm(i64 0)
915 %10 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 4
916 store i64 %9, i64 addrspace(1)* %10, align 8
917 %11 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_mulm(i64 0)
918 %12 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 5
919 store i64 %11, i64 addrspace(1)* %12, align 8
920 %13 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_minm(i64 0)
921 %14 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 6
922 store i64 %13, i64 addrspace(1)* %14, align 8
923 %15 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_maxm(i64 0)
924 %16 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 7
925 store i64 %15, i64 addrspace(1)* %16, align 8
926 %17 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_addm(i64 0)
927 %18 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 8
928 store i64 %17, i64 addrspace(1)* %18, align 8
929 %19 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_mulm(i64 0)
930 %20 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 9
931 store i64 %19, i64 addrspace(1)* %20, align 8
932 %21 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_minm(i64 0)
933 %22 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 10
934 store i64 %21, i64 addrspace(1)* %22, align 8
935 %23 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_maxm(i64 0)
936 %24 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 11
937 store i64 %23, i64 addrspace(1)* %24, align 8
941 declare dso_local spir_func i64 @_Z32sub_group_non_uniform_reduce_addm(i64) local_unnamed_addr
943 declare dso_local spir_func i64 @_Z32sub_group_non_uniform_reduce_mulm(i64) local_unnamed_addr
945 declare dso_local spir_func i64 @_Z32sub_group_non_uniform_reduce_minm(i64) local_unnamed_addr
947 declare dso_local spir_func i64 @_Z32sub_group_non_uniform_reduce_maxm(i64) local_unnamed_addr
949 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_addm(i64) local_unnamed_addr
951 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_mulm(i64) local_unnamed_addr
953 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_minm(i64) local_unnamed_addr
955 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_maxm(i64) local_unnamed_addr
957 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_addm(i64) local_unnamed_addr
959 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_mulm(i64) local_unnamed_addr
961 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_minm(i64) local_unnamed_addr
963 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_maxm(i64) local_unnamed_addr
965 ; CHECK-SPIRV: OpFunction
966 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFAdd %[[#float]] %[[#ScopeSubgroup]] Reduce %[[#float_0]]
967 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMul %[[#float]] %[[#ScopeSubgroup]] Reduce %[[#float_0]]
968 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMin %[[#float]] %[[#ScopeSubgroup]] Reduce %[[#float_0]]
969 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMax %[[#float]] %[[#ScopeSubgroup]] Reduce %[[#float_0]]
970 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFAdd %[[#float]] %[[#ScopeSubgroup]] InclusiveScan %[[#float_0]]
971 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMul %[[#float]] %[[#ScopeSubgroup]] InclusiveScan %[[#float_0]]
972 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMin %[[#float]] %[[#ScopeSubgroup]] InclusiveScan %[[#float_0]]
973 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMax %[[#float]] %[[#ScopeSubgroup]] InclusiveScan %[[#float_0]]
974 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFAdd %[[#float]] %[[#ScopeSubgroup]] ExclusiveScan %[[#float_0]]
975 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMul %[[#float]] %[[#ScopeSubgroup]] ExclusiveScan %[[#float_0]]
976 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMin %[[#float]] %[[#ScopeSubgroup]] ExclusiveScan %[[#float_0]]
977 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMax %[[#float]] %[[#ScopeSubgroup]] ExclusiveScan %[[#float_0]]
978 ; CHECK-SPIRV: OpFunctionEnd
980 define dso_local spir_kernel void @testNonUniformArithmeticFloat(float addrspace(1)* nocapture) local_unnamed_addr {
981 %2 = tail call spir_func float @_Z32sub_group_non_uniform_reduce_addf(float 0.000000e+00)
982 store float %2, float addrspace(1)* %0, align 4
983 %3 = tail call spir_func float @_Z32sub_group_non_uniform_reduce_mulf(float 0.000000e+00)
984 %4 = getelementptr inbounds float, float addrspace(1)* %0, i64 1
985 store float %3, float addrspace(1)* %4, align 4
986 %5 = tail call spir_func float @_Z32sub_group_non_uniform_reduce_minf(float 0.000000e+00)
987 %6 = getelementptr inbounds float, float addrspace(1)* %0, i64 2
988 store float %5, float addrspace(1)* %6, align 4
989 %7 = tail call spir_func float @_Z32sub_group_non_uniform_reduce_maxf(float 0.000000e+00)
990 %8 = getelementptr inbounds float, float addrspace(1)* %0, i64 3
991 store float %7, float addrspace(1)* %8, align 4
992 %9 = tail call spir_func float @_Z40sub_group_non_uniform_scan_inclusive_addf(float 0.000000e+00)
993 %10 = getelementptr inbounds float, float addrspace(1)* %0, i64 4
994 store float %9, float addrspace(1)* %10, align 4
995 %11 = tail call spir_func float @_Z40sub_group_non_uniform_scan_inclusive_mulf(float 0.000000e+00)
996 %12 = getelementptr inbounds float, float addrspace(1)* %0, i64 5
997 store float %11, float addrspace(1)* %12, align 4
998 %13 = tail call spir_func float @_Z40sub_group_non_uniform_scan_inclusive_minf(float 0.000000e+00)
999 %14 = getelementptr inbounds float, float addrspace(1)* %0, i64 6
1000 store float %13, float addrspace(1)* %14, align 4
1001 %15 = tail call spir_func float @_Z40sub_group_non_uniform_scan_inclusive_maxf(float 0.000000e+00)
1002 %16 = getelementptr inbounds float, float addrspace(1)* %0, i64 7
1003 store float %15, float addrspace(1)* %16, align 4
1004 %17 = tail call spir_func float @_Z40sub_group_non_uniform_scan_exclusive_addf(float 0.000000e+00)
1005 %18 = getelementptr inbounds float, float addrspace(1)* %0, i64 8
1006 store float %17, float addrspace(1)* %18, align 4
1007 %19 = tail call spir_func float @_Z40sub_group_non_uniform_scan_exclusive_mulf(float 0.000000e+00)
1008 %20 = getelementptr inbounds float, float addrspace(1)* %0, i64 9
1009 store float %19, float addrspace(1)* %20, align 4
1010 %21 = tail call spir_func float @_Z40sub_group_non_uniform_scan_exclusive_minf(float 0.000000e+00)
1011 %22 = getelementptr inbounds float, float addrspace(1)* %0, i64 10
1012 store float %21, float addrspace(1)* %22, align 4
1013 %23 = tail call spir_func float @_Z40sub_group_non_uniform_scan_exclusive_maxf(float 0.000000e+00)
1014 %24 = getelementptr inbounds float, float addrspace(1)* %0, i64 11
1015 store float %23, float addrspace(1)* %24, align 4
1019 declare dso_local spir_func float @_Z32sub_group_non_uniform_reduce_addf(float) local_unnamed_addr
1021 declare dso_local spir_func float @_Z32sub_group_non_uniform_reduce_mulf(float) local_unnamed_addr
1023 declare dso_local spir_func float @_Z32sub_group_non_uniform_reduce_minf(float) local_unnamed_addr
1025 declare dso_local spir_func float @_Z32sub_group_non_uniform_reduce_maxf(float) local_unnamed_addr
1027 declare dso_local spir_func float @_Z40sub_group_non_uniform_scan_inclusive_addf(float) local_unnamed_addr
1029 declare dso_local spir_func float @_Z40sub_group_non_uniform_scan_inclusive_mulf(float) local_unnamed_addr
1031 declare dso_local spir_func float @_Z40sub_group_non_uniform_scan_inclusive_minf(float) local_unnamed_addr
1033 declare dso_local spir_func float @_Z40sub_group_non_uniform_scan_inclusive_maxf(float) local_unnamed_addr
1035 declare dso_local spir_func float @_Z40sub_group_non_uniform_scan_exclusive_addf(float) local_unnamed_addr
1037 declare dso_local spir_func float @_Z40sub_group_non_uniform_scan_exclusive_mulf(float) local_unnamed_addr
1039 declare dso_local spir_func float @_Z40sub_group_non_uniform_scan_exclusive_minf(float) local_unnamed_addr
1041 declare dso_local spir_func float @_Z40sub_group_non_uniform_scan_exclusive_maxf(float) local_unnamed_addr
1043 ; CHECK-SPIRV: OpFunction
1044 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFAdd %[[#half]] %[[#ScopeSubgroup]] Reduce %[[#half_0]]
1045 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMul %[[#half]] %[[#ScopeSubgroup]] Reduce %[[#half_0]]
1046 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMin %[[#half]] %[[#ScopeSubgroup]] Reduce %[[#half_0]]
1047 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMax %[[#half]] %[[#ScopeSubgroup]] Reduce %[[#half_0]]
1048 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFAdd %[[#half]] %[[#ScopeSubgroup]] InclusiveScan %[[#half_0]]
1049 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMul %[[#half]] %[[#ScopeSubgroup]] InclusiveScan %[[#half_0]]
1050 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMin %[[#half]] %[[#ScopeSubgroup]] InclusiveScan %[[#half_0]]
1051 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMax %[[#half]] %[[#ScopeSubgroup]] InclusiveScan %[[#half_0]]
1052 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFAdd %[[#half]] %[[#ScopeSubgroup]] ExclusiveScan %[[#half_0]]
1053 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMul %[[#half]] %[[#ScopeSubgroup]] ExclusiveScan %[[#half_0]]
1054 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMin %[[#half]] %[[#ScopeSubgroup]] ExclusiveScan %[[#half_0]]
1055 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMax %[[#half]] %[[#ScopeSubgroup]] ExclusiveScan %[[#half_0]]
1056 ; CHECK-SPIRV: OpFunctionEnd
1058 define dso_local spir_kernel void @testNonUniformArithmeticHalf(half addrspace(1)* nocapture) local_unnamed_addr {
1059 %2 = tail call spir_func half @_Z32sub_group_non_uniform_reduce_addDh(half 0xH0000)
1060 store half %2, half addrspace(1)* %0, align 2
1061 %3 = tail call spir_func half @_Z32sub_group_non_uniform_reduce_mulDh(half 0xH0000)
1062 %4 = getelementptr inbounds half, half addrspace(1)* %0, i64 1
1063 store half %3, half addrspace(1)* %4, align 2
1064 %5 = tail call spir_func half @_Z32sub_group_non_uniform_reduce_minDh(half 0xH0000)
1065 %6 = getelementptr inbounds half, half addrspace(1)* %0, i64 2
1066 store half %5, half addrspace(1)* %6, align 2
1067 %7 = tail call spir_func half @_Z32sub_group_non_uniform_reduce_maxDh(half 0xH0000)
1068 %8 = getelementptr inbounds half, half addrspace(1)* %0, i64 3
1069 store half %7, half addrspace(1)* %8, align 2
1070 %9 = tail call spir_func half @_Z40sub_group_non_uniform_scan_inclusive_addDh(half 0xH0000)
1071 %10 = getelementptr inbounds half, half addrspace(1)* %0, i64 4
1072 store half %9, half addrspace(1)* %10, align 2
1073 %11 = tail call spir_func half @_Z40sub_group_non_uniform_scan_inclusive_mulDh(half 0xH0000)
1074 %12 = getelementptr inbounds half, half addrspace(1)* %0, i64 5
1075 store half %11, half addrspace(1)* %12, align 2
1076 %13 = tail call spir_func half @_Z40sub_group_non_uniform_scan_inclusive_minDh(half 0xH0000)
1077 %14 = getelementptr inbounds half, half addrspace(1)* %0, i64 6
1078 store half %13, half addrspace(1)* %14, align 2
1079 %15 = tail call spir_func half @_Z40sub_group_non_uniform_scan_inclusive_maxDh(half 0xH0000)
1080 %16 = getelementptr inbounds half, half addrspace(1)* %0, i64 7
1081 store half %15, half addrspace(1)* %16, align 2
1082 %17 = tail call spir_func half @_Z40sub_group_non_uniform_scan_exclusive_addDh(half 0xH0000)
1083 %18 = getelementptr inbounds half, half addrspace(1)* %0, i64 8
1084 store half %17, half addrspace(1)* %18, align 2
1085 %19 = tail call spir_func half @_Z40sub_group_non_uniform_scan_exclusive_mulDh(half 0xH0000)
1086 %20 = getelementptr inbounds half, half addrspace(1)* %0, i64 9
1087 store half %19, half addrspace(1)* %20, align 2
1088 %21 = tail call spir_func half @_Z40sub_group_non_uniform_scan_exclusive_minDh(half 0xH0000)
1089 %22 = getelementptr inbounds half, half addrspace(1)* %0, i64 10
1090 store half %21, half addrspace(1)* %22, align 2
1091 %23 = tail call spir_func half @_Z40sub_group_non_uniform_scan_exclusive_maxDh(half 0xH0000)
1092 %24 = getelementptr inbounds half, half addrspace(1)* %0, i64 11
1093 store half %23, half addrspace(1)* %24, align 2
1097 declare dso_local spir_func half @_Z32sub_group_non_uniform_reduce_addDh(half) local_unnamed_addr
1099 declare dso_local spir_func half @_Z32sub_group_non_uniform_reduce_mulDh(half) local_unnamed_addr
1101 declare dso_local spir_func half @_Z32sub_group_non_uniform_reduce_minDh(half) local_unnamed_addr
1103 declare dso_local spir_func half @_Z32sub_group_non_uniform_reduce_maxDh(half) local_unnamed_addr
1105 declare dso_local spir_func half @_Z40sub_group_non_uniform_scan_inclusive_addDh(half) local_unnamed_addr
1107 declare dso_local spir_func half @_Z40sub_group_non_uniform_scan_inclusive_mulDh(half) local_unnamed_addr
1109 declare dso_local spir_func half @_Z40sub_group_non_uniform_scan_inclusive_minDh(half) local_unnamed_addr
1111 declare dso_local spir_func half @_Z40sub_group_non_uniform_scan_inclusive_maxDh(half) local_unnamed_addr
1113 declare dso_local spir_func half @_Z40sub_group_non_uniform_scan_exclusive_addDh(half) local_unnamed_addr
1115 declare dso_local spir_func half @_Z40sub_group_non_uniform_scan_exclusive_mulDh(half) local_unnamed_addr
1117 declare dso_local spir_func half @_Z40sub_group_non_uniform_scan_exclusive_minDh(half) local_unnamed_addr
1119 declare dso_local spir_func half @_Z40sub_group_non_uniform_scan_exclusive_maxDh(half) local_unnamed_addr
1121 ; CHECK-SPIRV: OpFunction
1122 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFAdd %[[#double]] %[[#ScopeSubgroup]] Reduce %[[#double_0]]
1123 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMul %[[#double]] %[[#ScopeSubgroup]] Reduce %[[#double_0]]
1124 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMin %[[#double]] %[[#ScopeSubgroup]] Reduce %[[#double_0]]
1125 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMax %[[#double]] %[[#ScopeSubgroup]] Reduce %[[#double_0]]
1126 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFAdd %[[#double]] %[[#ScopeSubgroup]] InclusiveScan %[[#double_0]]
1127 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMul %[[#double]] %[[#ScopeSubgroup]] InclusiveScan %[[#double_0]]
1128 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMin %[[#double]] %[[#ScopeSubgroup]] InclusiveScan %[[#double_0]]
1129 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMax %[[#double]] %[[#ScopeSubgroup]] InclusiveScan %[[#double_0]]
1130 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFAdd %[[#double]] %[[#ScopeSubgroup]] ExclusiveScan %[[#double_0]]
1131 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMul %[[#double]] %[[#ScopeSubgroup]] ExclusiveScan %[[#double_0]]
1132 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMin %[[#double]] %[[#ScopeSubgroup]] ExclusiveScan %[[#double_0]]
1133 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformFMax %[[#double]] %[[#ScopeSubgroup]] ExclusiveScan %[[#double_0]]
1134 ; CHECK-SPIRV: OpFunctionEnd
1136 define dso_local spir_kernel void @testNonUniformArithmeticDouble(double addrspace(1)* nocapture) local_unnamed_addr {
1137 %2 = tail call spir_func double @_Z32sub_group_non_uniform_reduce_addd(double 0.000000e+00)
1138 store double %2, double addrspace(1)* %0, align 8
1139 %3 = tail call spir_func double @_Z32sub_group_non_uniform_reduce_muld(double 0.000000e+00)
1140 %4 = getelementptr inbounds double, double addrspace(1)* %0, i64 1
1141 store double %3, double addrspace(1)* %4, align 8
1142 %5 = tail call spir_func double @_Z32sub_group_non_uniform_reduce_mind(double 0.000000e+00)
1143 %6 = getelementptr inbounds double, double addrspace(1)* %0, i64 2
1144 store double %5, double addrspace(1)* %6, align 8
1145 %7 = tail call spir_func double @_Z32sub_group_non_uniform_reduce_maxd(double 0.000000e+00)
1146 %8 = getelementptr inbounds double, double addrspace(1)* %0, i64 3
1147 store double %7, double addrspace(1)* %8, align 8
1148 %9 = tail call spir_func double @_Z40sub_group_non_uniform_scan_inclusive_addd(double 0.000000e+00)
1149 %10 = getelementptr inbounds double, double addrspace(1)* %0, i64 4
1150 store double %9, double addrspace(1)* %10, align 8
1151 %11 = tail call spir_func double @_Z40sub_group_non_uniform_scan_inclusive_muld(double 0.000000e+00)
1152 %12 = getelementptr inbounds double, double addrspace(1)* %0, i64 5
1153 store double %11, double addrspace(1)* %12, align 8
1154 %13 = tail call spir_func double @_Z40sub_group_non_uniform_scan_inclusive_mind(double 0.000000e+00)
1155 %14 = getelementptr inbounds double, double addrspace(1)* %0, i64 6
1156 store double %13, double addrspace(1)* %14, align 8
1157 %15 = tail call spir_func double @_Z40sub_group_non_uniform_scan_inclusive_maxd(double 0.000000e+00)
1158 %16 = getelementptr inbounds double, double addrspace(1)* %0, i64 7
1159 store double %15, double addrspace(1)* %16, align 8
1160 %17 = tail call spir_func double @_Z40sub_group_non_uniform_scan_exclusive_addd(double 0.000000e+00)
1161 %18 = getelementptr inbounds double, double addrspace(1)* %0, i64 8
1162 store double %17, double addrspace(1)* %18, align 8
1163 %19 = tail call spir_func double @_Z40sub_group_non_uniform_scan_exclusive_muld(double 0.000000e+00)
1164 %20 = getelementptr inbounds double, double addrspace(1)* %0, i64 9
1165 store double %19, double addrspace(1)* %20, align 8
1166 %21 = tail call spir_func double @_Z40sub_group_non_uniform_scan_exclusive_mind(double 0.000000e+00)
1167 %22 = getelementptr inbounds double, double addrspace(1)* %0, i64 10
1168 store double %21, double addrspace(1)* %22, align 8
1169 %23 = tail call spir_func double @_Z40sub_group_non_uniform_scan_exclusive_maxd(double 0.000000e+00)
1170 %24 = getelementptr inbounds double, double addrspace(1)* %0, i64 11
1171 store double %23, double addrspace(1)* %24, align 8
1175 declare dso_local spir_func double @_Z32sub_group_non_uniform_reduce_addd(double) local_unnamed_addr
1177 declare dso_local spir_func double @_Z32sub_group_non_uniform_reduce_muld(double) local_unnamed_addr
1179 declare dso_local spir_func double @_Z32sub_group_non_uniform_reduce_mind(double) local_unnamed_addr
1181 declare dso_local spir_func double @_Z32sub_group_non_uniform_reduce_maxd(double) local_unnamed_addr
1183 declare dso_local spir_func double @_Z40sub_group_non_uniform_scan_inclusive_addd(double) local_unnamed_addr
1185 declare dso_local spir_func double @_Z40sub_group_non_uniform_scan_inclusive_muld(double) local_unnamed_addr
1187 declare dso_local spir_func double @_Z40sub_group_non_uniform_scan_inclusive_mind(double) local_unnamed_addr
1189 declare dso_local spir_func double @_Z40sub_group_non_uniform_scan_inclusive_maxd(double) local_unnamed_addr
1191 declare dso_local spir_func double @_Z40sub_group_non_uniform_scan_exclusive_addd(double) local_unnamed_addr
1193 declare dso_local spir_func double @_Z40sub_group_non_uniform_scan_exclusive_muld(double) local_unnamed_addr
1195 declare dso_local spir_func double @_Z40sub_group_non_uniform_scan_exclusive_mind(double) local_unnamed_addr
1197 declare dso_local spir_func double @_Z40sub_group_non_uniform_scan_exclusive_maxd(double) local_unnamed_addr
1199 ; CHECK-SPIRV: OpFunction
1200 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#char]] %[[#ScopeSubgroup]] Reduce %[[#char_0]]
1201 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#char]] %[[#ScopeSubgroup]] Reduce %[[#char_0]]
1202 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#char]] %[[#ScopeSubgroup]] Reduce %[[#char_0]]
1203 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#char]] %[[#ScopeSubgroup]] InclusiveScan %[[#char_0]]
1204 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#char]] %[[#ScopeSubgroup]] InclusiveScan %[[#char_0]]
1205 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#char]] %[[#ScopeSubgroup]] InclusiveScan %[[#char_0]]
1206 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#char]] %[[#ScopeSubgroup]] ExclusiveScan %[[#char_0]]
1207 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#char]] %[[#ScopeSubgroup]] ExclusiveScan %[[#char_0]]
1208 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#char]] %[[#ScopeSubgroup]] ExclusiveScan %[[#char_0]]
1209 ; CHECK-SPIRV: OpFunctionEnd
1211 define dso_local spir_kernel void @testNonUniformBitwiseChar(i8 addrspace(1)* nocapture) local_unnamed_addr {
1212 %2 = tail call spir_func signext i8 @_Z32sub_group_non_uniform_reduce_andc(i8 signext 0)
1213 store i8 %2, i8 addrspace(1)* %0, align 1
1214 %3 = tail call spir_func signext i8 @_Z31sub_group_non_uniform_reduce_orc(i8 signext 0)
1215 %4 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 1
1216 store i8 %3, i8 addrspace(1)* %4, align 1
1217 %5 = tail call spir_func signext i8 @_Z32sub_group_non_uniform_reduce_xorc(i8 signext 0)
1218 %6 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 2
1219 store i8 %5, i8 addrspace(1)* %6, align 1
1220 %7 = tail call spir_func signext i8 @_Z40sub_group_non_uniform_scan_inclusive_andc(i8 signext 0)
1221 %8 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 3
1222 store i8 %7, i8 addrspace(1)* %8, align 1
1223 %9 = tail call spir_func signext i8 @_Z39sub_group_non_uniform_scan_inclusive_orc(i8 signext 0)
1224 %10 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 4
1225 store i8 %9, i8 addrspace(1)* %10, align 1
1226 %11 = tail call spir_func signext i8 @_Z40sub_group_non_uniform_scan_inclusive_xorc(i8 signext 0)
1227 %12 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 5
1228 store i8 %11, i8 addrspace(1)* %12, align 1
1229 %13 = tail call spir_func signext i8 @_Z40sub_group_non_uniform_scan_exclusive_andc(i8 signext 0)
1230 %14 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 6
1231 store i8 %13, i8 addrspace(1)* %14, align 1
1232 %15 = tail call spir_func signext i8 @_Z39sub_group_non_uniform_scan_exclusive_orc(i8 signext 0)
1233 %16 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 7
1234 store i8 %15, i8 addrspace(1)* %16, align 1
1235 %17 = tail call spir_func signext i8 @_Z40sub_group_non_uniform_scan_exclusive_xorc(i8 signext 0)
1236 %18 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 8
1237 store i8 %17, i8 addrspace(1)* %18, align 1
1241 declare dso_local spir_func signext i8 @_Z32sub_group_non_uniform_reduce_andc(i8 signext) local_unnamed_addr
1243 declare dso_local spir_func signext i8 @_Z31sub_group_non_uniform_reduce_orc(i8 signext) local_unnamed_addr
1245 declare dso_local spir_func signext i8 @_Z32sub_group_non_uniform_reduce_xorc(i8 signext) local_unnamed_addr
1247 declare dso_local spir_func signext i8 @_Z40sub_group_non_uniform_scan_inclusive_andc(i8 signext) local_unnamed_addr
1249 declare dso_local spir_func signext i8 @_Z39sub_group_non_uniform_scan_inclusive_orc(i8 signext) local_unnamed_addr
1251 declare dso_local spir_func signext i8 @_Z40sub_group_non_uniform_scan_inclusive_xorc(i8 signext) local_unnamed_addr
1253 declare dso_local spir_func signext i8 @_Z40sub_group_non_uniform_scan_exclusive_andc(i8 signext) local_unnamed_addr
1255 declare dso_local spir_func signext i8 @_Z39sub_group_non_uniform_scan_exclusive_orc(i8 signext) local_unnamed_addr
1257 declare dso_local spir_func signext i8 @_Z40sub_group_non_uniform_scan_exclusive_xorc(i8 signext) local_unnamed_addr
1259 ; CHECK-SPIRV: OpFunction
1260 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#char]] %[[#ScopeSubgroup]] Reduce %[[#char_0]]
1261 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#char]] %[[#ScopeSubgroup]] Reduce %[[#char_0]]
1262 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#char]] %[[#ScopeSubgroup]] Reduce %[[#char_0]]
1263 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#char]] %[[#ScopeSubgroup]] InclusiveScan %[[#char_0]]
1264 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#char]] %[[#ScopeSubgroup]] InclusiveScan %[[#char_0]]
1265 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#char]] %[[#ScopeSubgroup]] InclusiveScan %[[#char_0]]
1266 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#char]] %[[#ScopeSubgroup]] ExclusiveScan %[[#char_0]]
1267 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#char]] %[[#ScopeSubgroup]] ExclusiveScan %[[#char_0]]
1268 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#char]] %[[#ScopeSubgroup]] ExclusiveScan %[[#char_0]]
1269 ; CHECK-SPIRV: OpFunctionEnd
1271 define dso_local spir_kernel void @testNonUniformBitwiseUChar(i8 addrspace(1)* nocapture) local_unnamed_addr {
1272 %2 = tail call spir_func zeroext i8 @_Z32sub_group_non_uniform_reduce_andh(i8 zeroext 0)
1273 store i8 %2, i8 addrspace(1)* %0, align 1
1274 %3 = tail call spir_func zeroext i8 @_Z31sub_group_non_uniform_reduce_orh(i8 zeroext 0)
1275 %4 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 1
1276 store i8 %3, i8 addrspace(1)* %4, align 1
1277 %5 = tail call spir_func zeroext i8 @_Z32sub_group_non_uniform_reduce_xorh(i8 zeroext 0)
1278 %6 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 2
1279 store i8 %5, i8 addrspace(1)* %6, align 1
1280 %7 = tail call spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_inclusive_andh(i8 zeroext 0)
1281 %8 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 3
1282 store i8 %7, i8 addrspace(1)* %8, align 1
1283 %9 = tail call spir_func zeroext i8 @_Z39sub_group_non_uniform_scan_inclusive_orh(i8 zeroext 0)
1284 %10 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 4
1285 store i8 %9, i8 addrspace(1)* %10, align 1
1286 %11 = tail call spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_inclusive_xorh(i8 zeroext 0)
1287 %12 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 5
1288 store i8 %11, i8 addrspace(1)* %12, align 1
1289 %13 = tail call spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_exclusive_andh(i8 zeroext 0)
1290 %14 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 6
1291 store i8 %13, i8 addrspace(1)* %14, align 1
1292 %15 = tail call spir_func zeroext i8 @_Z39sub_group_non_uniform_scan_exclusive_orh(i8 zeroext 0)
1293 %16 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 7
1294 store i8 %15, i8 addrspace(1)* %16, align 1
1295 %17 = tail call spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_exclusive_xorh(i8 zeroext 0)
1296 %18 = getelementptr inbounds i8, i8 addrspace(1)* %0, i64 8
1297 store i8 %17, i8 addrspace(1)* %18, align 1
1301 declare dso_local spir_func zeroext i8 @_Z32sub_group_non_uniform_reduce_andh(i8 zeroext) local_unnamed_addr
1303 declare dso_local spir_func zeroext i8 @_Z31sub_group_non_uniform_reduce_orh(i8 zeroext) local_unnamed_addr
1305 declare dso_local spir_func zeroext i8 @_Z32sub_group_non_uniform_reduce_xorh(i8 zeroext) local_unnamed_addr
1307 declare dso_local spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_inclusive_andh(i8 zeroext) local_unnamed_addr
1309 declare dso_local spir_func zeroext i8 @_Z39sub_group_non_uniform_scan_inclusive_orh(i8 zeroext) local_unnamed_addr
1311 declare dso_local spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_inclusive_xorh(i8 zeroext) local_unnamed_addr
1313 declare dso_local spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_exclusive_andh(i8 zeroext) local_unnamed_addr
1315 declare dso_local spir_func zeroext i8 @_Z39sub_group_non_uniform_scan_exclusive_orh(i8 zeroext) local_unnamed_addr
1317 declare dso_local spir_func zeroext i8 @_Z40sub_group_non_uniform_scan_exclusive_xorh(i8 zeroext) local_unnamed_addr
1319 ; CHECK-SPIRV: OpFunction
1320 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#short]] %[[#ScopeSubgroup]] Reduce %[[#short_0]]
1321 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#short]] %[[#ScopeSubgroup]] Reduce %[[#short_0]]
1322 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#short]] %[[#ScopeSubgroup]] Reduce %[[#short_0]]
1323 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#short]] %[[#ScopeSubgroup]] InclusiveScan %[[#short_0]]
1324 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#short]] %[[#ScopeSubgroup]] InclusiveScan %[[#short_0]]
1325 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#short]] %[[#ScopeSubgroup]] InclusiveScan %[[#short_0]]
1326 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#short]] %[[#ScopeSubgroup]] ExclusiveScan %[[#short_0]]
1327 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#short]] %[[#ScopeSubgroup]] ExclusiveScan %[[#short_0]]
1328 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#short]] %[[#ScopeSubgroup]] ExclusiveScan %[[#short_0]]
1329 ; CHECK-SPIRV: OpFunctionEnd
1331 define dso_local spir_kernel void @testNonUniformBitwiseShort(i16 addrspace(1)* nocapture) local_unnamed_addr {
1332 %2 = tail call spir_func signext i16 @_Z32sub_group_non_uniform_reduce_ands(i16 signext 0)
1333 store i16 %2, i16 addrspace(1)* %0, align 2
1334 %3 = tail call spir_func signext i16 @_Z31sub_group_non_uniform_reduce_ors(i16 signext 0)
1335 %4 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 1
1336 store i16 %3, i16 addrspace(1)* %4, align 2
1337 %5 = tail call spir_func signext i16 @_Z32sub_group_non_uniform_reduce_xors(i16 signext 0)
1338 %6 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 2
1339 store i16 %5, i16 addrspace(1)* %6, align 2
1340 %7 = tail call spir_func signext i16 @_Z40sub_group_non_uniform_scan_inclusive_ands(i16 signext 0)
1341 %8 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 3
1342 store i16 %7, i16 addrspace(1)* %8, align 2
1343 %9 = tail call spir_func signext i16 @_Z39sub_group_non_uniform_scan_inclusive_ors(i16 signext 0)
1344 %10 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 4
1345 store i16 %9, i16 addrspace(1)* %10, align 2
1346 %11 = tail call spir_func signext i16 @_Z40sub_group_non_uniform_scan_inclusive_xors(i16 signext 0)
1347 %12 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 5
1348 store i16 %11, i16 addrspace(1)* %12, align 2
1349 %13 = tail call spir_func signext i16 @_Z40sub_group_non_uniform_scan_exclusive_ands(i16 signext 0)
1350 %14 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 6
1351 store i16 %13, i16 addrspace(1)* %14, align 2
1352 %15 = tail call spir_func signext i16 @_Z39sub_group_non_uniform_scan_exclusive_ors(i16 signext 0)
1353 %16 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 7
1354 store i16 %15, i16 addrspace(1)* %16, align 2
1355 %17 = tail call spir_func signext i16 @_Z40sub_group_non_uniform_scan_exclusive_xors(i16 signext 0)
1356 %18 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 8
1357 store i16 %17, i16 addrspace(1)* %18, align 2
1361 declare dso_local spir_func signext i16 @_Z32sub_group_non_uniform_reduce_ands(i16 signext) local_unnamed_addr
1363 declare dso_local spir_func signext i16 @_Z31sub_group_non_uniform_reduce_ors(i16 signext) local_unnamed_addr
1365 declare dso_local spir_func signext i16 @_Z32sub_group_non_uniform_reduce_xors(i16 signext) local_unnamed_addr
1367 declare dso_local spir_func signext i16 @_Z40sub_group_non_uniform_scan_inclusive_ands(i16 signext) local_unnamed_addr
1369 declare dso_local spir_func signext i16 @_Z39sub_group_non_uniform_scan_inclusive_ors(i16 signext) local_unnamed_addr
1371 declare dso_local spir_func signext i16 @_Z40sub_group_non_uniform_scan_inclusive_xors(i16 signext) local_unnamed_addr
1373 declare dso_local spir_func signext i16 @_Z40sub_group_non_uniform_scan_exclusive_ands(i16 signext) local_unnamed_addr
1375 declare dso_local spir_func signext i16 @_Z39sub_group_non_uniform_scan_exclusive_ors(i16 signext) local_unnamed_addr
1377 declare dso_local spir_func signext i16 @_Z40sub_group_non_uniform_scan_exclusive_xors(i16 signext) local_unnamed_addr
1379 ; CHECK-SPIRV: OpFunction
1380 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#short]] %[[#ScopeSubgroup]] Reduce %[[#short_0]]
1381 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#short]] %[[#ScopeSubgroup]] Reduce %[[#short_0]]
1382 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#short]] %[[#ScopeSubgroup]] Reduce %[[#short_0]]
1383 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#short]] %[[#ScopeSubgroup]] InclusiveScan %[[#short_0]]
1384 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#short]] %[[#ScopeSubgroup]] InclusiveScan %[[#short_0]]
1385 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#short]] %[[#ScopeSubgroup]] InclusiveScan %[[#short_0]]
1386 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#short]] %[[#ScopeSubgroup]] ExclusiveScan %[[#short_0]]
1387 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#short]] %[[#ScopeSubgroup]] ExclusiveScan %[[#short_0]]
1388 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#short]] %[[#ScopeSubgroup]] ExclusiveScan %[[#short_0]]
1389 ; CHECK-SPIRV: OpFunctionEnd
1391 define dso_local spir_kernel void @testNonUniformBitwiseUShort(i16 addrspace(1)* nocapture) local_unnamed_addr {
1392 %2 = tail call spir_func zeroext i16 @_Z32sub_group_non_uniform_reduce_andt(i16 zeroext 0)
1393 store i16 %2, i16 addrspace(1)* %0, align 2
1394 %3 = tail call spir_func zeroext i16 @_Z31sub_group_non_uniform_reduce_ort(i16 zeroext 0)
1395 %4 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 1
1396 store i16 %3, i16 addrspace(1)* %4, align 2
1397 %5 = tail call spir_func zeroext i16 @_Z32sub_group_non_uniform_reduce_xort(i16 zeroext 0)
1398 %6 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 2
1399 store i16 %5, i16 addrspace(1)* %6, align 2
1400 %7 = tail call spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_inclusive_andt(i16 zeroext 0)
1401 %8 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 3
1402 store i16 %7, i16 addrspace(1)* %8, align 2
1403 %9 = tail call spir_func zeroext i16 @_Z39sub_group_non_uniform_scan_inclusive_ort(i16 zeroext 0)
1404 %10 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 4
1405 store i16 %9, i16 addrspace(1)* %10, align 2
1406 %11 = tail call spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_inclusive_xort(i16 zeroext 0)
1407 %12 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 5
1408 store i16 %11, i16 addrspace(1)* %12, align 2
1409 %13 = tail call spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_exclusive_andt(i16 zeroext 0)
1410 %14 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 6
1411 store i16 %13, i16 addrspace(1)* %14, align 2
1412 %15 = tail call spir_func zeroext i16 @_Z39sub_group_non_uniform_scan_exclusive_ort(i16 zeroext 0)
1413 %16 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 7
1414 store i16 %15, i16 addrspace(1)* %16, align 2
1415 %17 = tail call spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_exclusive_xort(i16 zeroext 0)
1416 %18 = getelementptr inbounds i16, i16 addrspace(1)* %0, i64 8
1417 store i16 %17, i16 addrspace(1)* %18, align 2
1421 declare dso_local spir_func zeroext i16 @_Z32sub_group_non_uniform_reduce_andt(i16 zeroext) local_unnamed_addr
1423 declare dso_local spir_func zeroext i16 @_Z31sub_group_non_uniform_reduce_ort(i16 zeroext) local_unnamed_addr
1425 declare dso_local spir_func zeroext i16 @_Z32sub_group_non_uniform_reduce_xort(i16 zeroext) local_unnamed_addr
1427 declare dso_local spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_inclusive_andt(i16 zeroext) local_unnamed_addr
1429 declare dso_local spir_func zeroext i16 @_Z39sub_group_non_uniform_scan_inclusive_ort(i16 zeroext) local_unnamed_addr
1431 declare dso_local spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_inclusive_xort(i16 zeroext) local_unnamed_addr
1433 declare dso_local spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_exclusive_andt(i16 zeroext) local_unnamed_addr
1435 declare dso_local spir_func zeroext i16 @_Z39sub_group_non_uniform_scan_exclusive_ort(i16 zeroext) local_unnamed_addr
1437 declare dso_local spir_func zeroext i16 @_Z40sub_group_non_uniform_scan_exclusive_xort(i16 zeroext) local_unnamed_addr
1439 ; CHECK-SPIRV: OpFunction
1440 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#int]] %[[#ScopeSubgroup]] Reduce %[[#int_0]]
1441 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#int]] %[[#ScopeSubgroup]] Reduce %[[#int_0]]
1442 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#int]] %[[#ScopeSubgroup]] Reduce %[[#int_0]]
1443 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#int]] %[[#ScopeSubgroup]] InclusiveScan %[[#int_0]]
1444 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#int]] %[[#ScopeSubgroup]] InclusiveScan %[[#int_0]]
1445 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#int]] %[[#ScopeSubgroup]] InclusiveScan %[[#int_0]]
1446 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#int]] %[[#ScopeSubgroup]] ExclusiveScan %[[#int_0]]
1447 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#int]] %[[#ScopeSubgroup]] ExclusiveScan %[[#int_0]]
1448 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#int]] %[[#ScopeSubgroup]] ExclusiveScan %[[#int_0]]
1449 ; CHECK-SPIRV: OpFunctionEnd
1451 define dso_local spir_kernel void @testNonUniformBitwiseInt(i32 addrspace(1)* nocapture) local_unnamed_addr {
1452 %2 = tail call spir_func i32 @_Z32sub_group_non_uniform_reduce_andi(i32 0)
1453 store i32 %2, i32 addrspace(1)* %0, align 4
1454 %3 = tail call spir_func i32 @_Z31sub_group_non_uniform_reduce_ori(i32 0)
1455 %4 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 1
1456 store i32 %3, i32 addrspace(1)* %4, align 4
1457 %5 = tail call spir_func i32 @_Z32sub_group_non_uniform_reduce_xori(i32 0)
1458 %6 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 2
1459 store i32 %5, i32 addrspace(1)* %6, align 4
1460 %7 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_andi(i32 0)
1461 %8 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 3
1462 store i32 %7, i32 addrspace(1)* %8, align 4
1463 %9 = tail call spir_func i32 @_Z39sub_group_non_uniform_scan_inclusive_ori(i32 0)
1464 %10 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 4
1465 store i32 %9, i32 addrspace(1)* %10, align 4
1466 %11 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_xori(i32 0)
1467 %12 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 5
1468 store i32 %11, i32 addrspace(1)* %12, align 4
1469 %13 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_andi(i32 0)
1470 %14 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 6
1471 store i32 %13, i32 addrspace(1)* %14, align 4
1472 %15 = tail call spir_func i32 @_Z39sub_group_non_uniform_scan_exclusive_ori(i32 0)
1473 %16 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 7
1474 store i32 %15, i32 addrspace(1)* %16, align 4
1475 %17 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_xori(i32 0)
1476 %18 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 8
1477 store i32 %17, i32 addrspace(1)* %18, align 4
1481 declare dso_local spir_func i32 @_Z32sub_group_non_uniform_reduce_andi(i32) local_unnamed_addr
1483 declare dso_local spir_func i32 @_Z31sub_group_non_uniform_reduce_ori(i32) local_unnamed_addr
1485 declare dso_local spir_func i32 @_Z32sub_group_non_uniform_reduce_xori(i32) local_unnamed_addr
1487 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_andi(i32) local_unnamed_addr
1489 declare dso_local spir_func i32 @_Z39sub_group_non_uniform_scan_inclusive_ori(i32) local_unnamed_addr
1491 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_xori(i32) local_unnamed_addr
1493 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_andi(i32) local_unnamed_addr
1495 declare dso_local spir_func i32 @_Z39sub_group_non_uniform_scan_exclusive_ori(i32) local_unnamed_addr
1497 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_xori(i32) local_unnamed_addr
1499 ; CHECK-SPIRV: OpFunction
1500 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#int]] %[[#ScopeSubgroup]] Reduce %[[#int_0]]
1501 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#int]] %[[#ScopeSubgroup]] Reduce %[[#int_0]]
1502 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#int]] %[[#ScopeSubgroup]] Reduce %[[#int_0]]
1503 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#int]] %[[#ScopeSubgroup]] InclusiveScan %[[#int_0]]
1504 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#int]] %[[#ScopeSubgroup]] InclusiveScan %[[#int_0]]
1505 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#int]] %[[#ScopeSubgroup]] InclusiveScan %[[#int_0]]
1506 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#int]] %[[#ScopeSubgroup]] ExclusiveScan %[[#int_0]]
1507 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#int]] %[[#ScopeSubgroup]] ExclusiveScan %[[#int_0]]
1508 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#int]] %[[#ScopeSubgroup]] ExclusiveScan %[[#int_0]]
1509 ; CHECK-SPIRV: OpFunctionEnd
1511 define dso_local spir_kernel void @testNonUniformBitwiseUInt(i32 addrspace(1)* nocapture) local_unnamed_addr {
1512 %2 = tail call spir_func i32 @_Z32sub_group_non_uniform_reduce_andj(i32 0)
1513 store i32 %2, i32 addrspace(1)* %0, align 4
1514 %3 = tail call spir_func i32 @_Z31sub_group_non_uniform_reduce_orj(i32 0)
1515 %4 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 1
1516 store i32 %3, i32 addrspace(1)* %4, align 4
1517 %5 = tail call spir_func i32 @_Z32sub_group_non_uniform_reduce_xorj(i32 0)
1518 %6 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 2
1519 store i32 %5, i32 addrspace(1)* %6, align 4
1520 %7 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_andj(i32 0)
1521 %8 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 3
1522 store i32 %7, i32 addrspace(1)* %8, align 4
1523 %9 = tail call spir_func i32 @_Z39sub_group_non_uniform_scan_inclusive_orj(i32 0)
1524 %10 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 4
1525 store i32 %9, i32 addrspace(1)* %10, align 4
1526 %11 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_xorj(i32 0)
1527 %12 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 5
1528 store i32 %11, i32 addrspace(1)* %12, align 4
1529 %13 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_andj(i32 0)
1530 %14 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 6
1531 store i32 %13, i32 addrspace(1)* %14, align 4
1532 %15 = tail call spir_func i32 @_Z39sub_group_non_uniform_scan_exclusive_orj(i32 0)
1533 %16 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 7
1534 store i32 %15, i32 addrspace(1)* %16, align 4
1535 %17 = tail call spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_xorj(i32 0)
1536 %18 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 8
1537 store i32 %17, i32 addrspace(1)* %18, align 4
1541 declare dso_local spir_func i32 @_Z32sub_group_non_uniform_reduce_andj(i32) local_unnamed_addr
1543 declare dso_local spir_func i32 @_Z31sub_group_non_uniform_reduce_orj(i32) local_unnamed_addr
1545 declare dso_local spir_func i32 @_Z32sub_group_non_uniform_reduce_xorj(i32) local_unnamed_addr
1547 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_andj(i32) local_unnamed_addr
1549 declare dso_local spir_func i32 @_Z39sub_group_non_uniform_scan_inclusive_orj(i32) local_unnamed_addr
1551 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_inclusive_xorj(i32) local_unnamed_addr
1553 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_andj(i32) local_unnamed_addr
1555 declare dso_local spir_func i32 @_Z39sub_group_non_uniform_scan_exclusive_orj(i32) local_unnamed_addr
1557 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_scan_exclusive_xorj(i32) local_unnamed_addr
1559 ; CHECK-SPIRV: OpFunction
1560 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#long]] %[[#ScopeSubgroup]] Reduce %[[#long_0]]
1561 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#long]] %[[#ScopeSubgroup]] Reduce %[[#long_0]]
1562 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#long]] %[[#ScopeSubgroup]] Reduce %[[#long_0]]
1563 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#long]] %[[#ScopeSubgroup]] InclusiveScan %[[#long_0]]
1564 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#long]] %[[#ScopeSubgroup]] InclusiveScan %[[#long_0]]
1565 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#long]] %[[#ScopeSubgroup]] InclusiveScan %[[#long_0]]
1566 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#long]] %[[#ScopeSubgroup]] ExclusiveScan %[[#long_0]]
1567 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#long]] %[[#ScopeSubgroup]] ExclusiveScan %[[#long_0]]
1568 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#long]] %[[#ScopeSubgroup]] ExclusiveScan %[[#long_0]]
1569 ; CHECK-SPIRV: OpFunctionEnd
1571 define dso_local spir_kernel void @testNonUniformBitwiseLong(i64 addrspace(1)* nocapture) local_unnamed_addr {
1572 %2 = tail call spir_func i64 @_Z32sub_group_non_uniform_reduce_andl(i64 0)
1573 store i64 %2, i64 addrspace(1)* %0, align 8
1574 %3 = tail call spir_func i64 @_Z31sub_group_non_uniform_reduce_orl(i64 0)
1575 %4 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 1
1576 store i64 %3, i64 addrspace(1)* %4, align 8
1577 %5 = tail call spir_func i64 @_Z32sub_group_non_uniform_reduce_xorl(i64 0)
1578 %6 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 2
1579 store i64 %5, i64 addrspace(1)* %6, align 8
1580 %7 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_andl(i64 0)
1581 %8 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 3
1582 store i64 %7, i64 addrspace(1)* %8, align 8
1583 %9 = tail call spir_func i64 @_Z39sub_group_non_uniform_scan_inclusive_orl(i64 0)
1584 %10 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 4
1585 store i64 %9, i64 addrspace(1)* %10, align 8
1586 %11 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_xorl(i64 0)
1587 %12 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 5
1588 store i64 %11, i64 addrspace(1)* %12, align 8
1589 %13 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_andl(i64 0)
1590 %14 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 6
1591 store i64 %13, i64 addrspace(1)* %14, align 8
1592 %15 = tail call spir_func i64 @_Z39sub_group_non_uniform_scan_exclusive_orl(i64 0)
1593 %16 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 7
1594 store i64 %15, i64 addrspace(1)* %16, align 8
1595 %17 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_xorl(i64 0)
1596 %18 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 8
1597 store i64 %17, i64 addrspace(1)* %18, align 8
1601 declare dso_local spir_func i64 @_Z32sub_group_non_uniform_reduce_andl(i64) local_unnamed_addr
1603 declare dso_local spir_func i64 @_Z31sub_group_non_uniform_reduce_orl(i64) local_unnamed_addr
1605 declare dso_local spir_func i64 @_Z32sub_group_non_uniform_reduce_xorl(i64) local_unnamed_addr
1607 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_andl(i64) local_unnamed_addr
1609 declare dso_local spir_func i64 @_Z39sub_group_non_uniform_scan_inclusive_orl(i64) local_unnamed_addr
1611 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_xorl(i64) local_unnamed_addr
1613 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_andl(i64) local_unnamed_addr
1615 declare dso_local spir_func i64 @_Z39sub_group_non_uniform_scan_exclusive_orl(i64) local_unnamed_addr
1617 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_xorl(i64) local_unnamed_addr
1619 ; CHECK-SPIRV: OpFunction
1620 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#long]] %[[#ScopeSubgroup]] Reduce %[[#long_0]]
1621 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#long]] %[[#ScopeSubgroup]] Reduce %[[#long_0]]
1622 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#long]] %[[#ScopeSubgroup]] Reduce %[[#long_0]]
1623 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#long]] %[[#ScopeSubgroup]] InclusiveScan %[[#long_0]]
1624 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#long]] %[[#ScopeSubgroup]] InclusiveScan %[[#long_0]]
1625 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#long]] %[[#ScopeSubgroup]] InclusiveScan %[[#long_0]]
1626 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseAnd %[[#long]] %[[#ScopeSubgroup]] ExclusiveScan %[[#long_0]]
1627 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseOr %[[#long]] %[[#ScopeSubgroup]] ExclusiveScan %[[#long_0]]
1628 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformBitwiseXor %[[#long]] %[[#ScopeSubgroup]] ExclusiveScan %[[#long_0]]
1629 ; CHECK-SPIRV: OpFunctionEnd
1631 define dso_local spir_kernel void @testNonUniformBitwiseULong(i64 addrspace(1)* nocapture) local_unnamed_addr {
1632 %2 = tail call spir_func i64 @_Z32sub_group_non_uniform_reduce_andm(i64 0)
1633 store i64 %2, i64 addrspace(1)* %0, align 8
1634 %3 = tail call spir_func i64 @_Z31sub_group_non_uniform_reduce_orm(i64 0)
1635 %4 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 1
1636 store i64 %3, i64 addrspace(1)* %4, align 8
1637 %5 = tail call spir_func i64 @_Z32sub_group_non_uniform_reduce_xorm(i64 0)
1638 %6 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 2
1639 store i64 %5, i64 addrspace(1)* %6, align 8
1640 %7 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_andm(i64 0)
1641 %8 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 3
1642 store i64 %7, i64 addrspace(1)* %8, align 8
1643 %9 = tail call spir_func i64 @_Z39sub_group_non_uniform_scan_inclusive_orm(i64 0)
1644 %10 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 4
1645 store i64 %9, i64 addrspace(1)* %10, align 8
1646 %11 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_xorm(i64 0)
1647 %12 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 5
1648 store i64 %11, i64 addrspace(1)* %12, align 8
1649 %13 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_andm(i64 0)
1650 %14 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 6
1651 store i64 %13, i64 addrspace(1)* %14, align 8
1652 %15 = tail call spir_func i64 @_Z39sub_group_non_uniform_scan_exclusive_orm(i64 0)
1653 %16 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 7
1654 store i64 %15, i64 addrspace(1)* %16, align 8
1655 %17 = tail call spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_xorm(i64 0)
1656 %18 = getelementptr inbounds i64, i64 addrspace(1)* %0, i64 8
1657 store i64 %17, i64 addrspace(1)* %18, align 8
1661 declare dso_local spir_func i64 @_Z32sub_group_non_uniform_reduce_andm(i64) local_unnamed_addr
1663 declare dso_local spir_func i64 @_Z31sub_group_non_uniform_reduce_orm(i64) local_unnamed_addr
1665 declare dso_local spir_func i64 @_Z32sub_group_non_uniform_reduce_xorm(i64) local_unnamed_addr
1667 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_andm(i64) local_unnamed_addr
1669 declare dso_local spir_func i64 @_Z39sub_group_non_uniform_scan_inclusive_orm(i64) local_unnamed_addr
1671 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_inclusive_xorm(i64) local_unnamed_addr
1673 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_andm(i64) local_unnamed_addr
1675 declare dso_local spir_func i64 @_Z39sub_group_non_uniform_scan_exclusive_orm(i64) local_unnamed_addr
1677 declare dso_local spir_func i64 @_Z40sub_group_non_uniform_scan_exclusive_xorm(i64) local_unnamed_addr
1679 ; CHECK-SPIRV: OpFunction
1680 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformLogicalAnd %[[#bool]] %[[#ScopeSubgroup]] Reduce %[[#false]]
1681 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformLogicalOr %[[#bool]] %[[#ScopeSubgroup]] Reduce %[[#false]]
1682 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformLogicalXor %[[#bool]] %[[#ScopeSubgroup]] Reduce %[[#false]]
1683 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformLogicalAnd %[[#bool]] %[[#ScopeSubgroup]] InclusiveScan %[[#false]]
1684 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformLogicalOr %[[#bool]] %[[#ScopeSubgroup]] InclusiveScan %[[#false]]
1685 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformLogicalXor %[[#bool]] %[[#ScopeSubgroup]] InclusiveScan %[[#false]]
1686 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformLogicalAnd %[[#bool]] %[[#ScopeSubgroup]] ExclusiveScan %[[#false]]
1687 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformLogicalOr %[[#bool]] %[[#ScopeSubgroup]] ExclusiveScan %[[#false]]
1688 ; CHECK-SPIRV: %[[#]] = OpGroupNonUniformLogicalXor %[[#bool]] %[[#ScopeSubgroup]] ExclusiveScan %[[#false]]
1689 ; CHECK-SPIRV: OpFunctionEnd
1691 define dso_local spir_kernel void @testNonUniformLogical(i32 addrspace(1)* nocapture) local_unnamed_addr {
1692 %2 = tail call spir_func i32 @_Z40sub_group_non_uniform_reduce_logical_andi(i32 0)
1693 store i32 %2, i32 addrspace(1)* %0, align 4
1694 %3 = tail call spir_func i32 @_Z39sub_group_non_uniform_reduce_logical_ori(i32 0)
1695 %4 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 1
1696 store i32 %3, i32 addrspace(1)* %4, align 4
1697 %5 = tail call spir_func i32 @_Z40sub_group_non_uniform_reduce_logical_xori(i32 0)
1698 %6 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 2
1699 store i32 %5, i32 addrspace(1)* %6, align 4
1700 %7 = tail call spir_func i32 @_Z48sub_group_non_uniform_scan_inclusive_logical_andi(i32 0)
1701 %8 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 3
1702 store i32 %7, i32 addrspace(1)* %8, align 4
1703 %9 = tail call spir_func i32 @_Z47sub_group_non_uniform_scan_inclusive_logical_ori(i32 0)
1704 %10 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 4
1705 store i32 %9, i32 addrspace(1)* %10, align 4
1706 %11 = tail call spir_func i32 @_Z48sub_group_non_uniform_scan_inclusive_logical_xori(i32 0)
1707 %12 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 5
1708 store i32 %11, i32 addrspace(1)* %12, align 4
1709 %13 = tail call spir_func i32 @_Z48sub_group_non_uniform_scan_exclusive_logical_andi(i32 0)
1710 %14 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 6
1711 store i32 %13, i32 addrspace(1)* %14, align 4
1712 %15 = tail call spir_func i32 @_Z47sub_group_non_uniform_scan_exclusive_logical_ori(i32 0)
1713 %16 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 7
1714 store i32 %15, i32 addrspace(1)* %16, align 4
1715 %17 = tail call spir_func i32 @_Z48sub_group_non_uniform_scan_exclusive_logical_xori(i32 0)
1716 %18 = getelementptr inbounds i32, i32 addrspace(1)* %0, i64 8
1717 store i32 %17, i32 addrspace(1)* %18, align 4
1721 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_reduce_logical_andi(i32) local_unnamed_addr
1723 declare dso_local spir_func i32 @_Z39sub_group_non_uniform_reduce_logical_ori(i32) local_unnamed_addr
1725 declare dso_local spir_func i32 @_Z40sub_group_non_uniform_reduce_logical_xori(i32) local_unnamed_addr
1727 declare dso_local spir_func i32 @_Z48sub_group_non_uniform_scan_inclusive_logical_andi(i32) local_unnamed_addr
1729 declare dso_local spir_func i32 @_Z47sub_group_non_uniform_scan_inclusive_logical_ori(i32) local_unnamed_addr
1731 declare dso_local spir_func i32 @_Z48sub_group_non_uniform_scan_inclusive_logical_xori(i32) local_unnamed_addr
1733 declare dso_local spir_func i32 @_Z48sub_group_non_uniform_scan_exclusive_logical_andi(i32) local_unnamed_addr
1735 declare dso_local spir_func i32 @_Z47sub_group_non_uniform_scan_exclusive_logical_ori(i32) local_unnamed_addr
1737 declare dso_local spir_func i32 @_Z48sub_group_non_uniform_scan_exclusive_logical_xori(i32) local_unnamed_addr