[clangd] Re-land "support outgoing calls in call hierarchy" (#117673)
[llvm-project.git] / clang / lib / Headers / __clang_cuda_device_functions.h
blob86123727a1bc3f14dbb049cb467c9e260a0d67ba
1 /*===---- __clang_cuda_device_functions.h - CUDA runtime support -----------===
3 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 * See https://llvm.org/LICENSE.txt for license information.
5 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 *===-----------------------------------------------------------------------===
8 */
10 #ifndef __CLANG_CUDA_DEVICE_FUNCTIONS_H__
11 #define __CLANG_CUDA_DEVICE_FUNCTIONS_H__
13 #ifndef __OPENMP_NVPTX__
14 #if CUDA_VERSION < 9000
15 #error This file is intended to be used with CUDA-9+ only.
16 #endif
17 #endif
19 // __DEVICE__ is a helper macro with common set of attributes for the wrappers
20 // we implement in this file. We need static in order to avoid emitting unused
21 // functions and __forceinline__ helps inlining these wrappers at -O1.
22 #pragma push_macro("__DEVICE__")
23 #ifdef __OPENMP_NVPTX__
24 #define __DEVICE__ static __attribute__((always_inline, nothrow))
25 #else
26 #define __DEVICE__ static __device__ __forceinline__
27 #endif
29 __DEVICE__ int __all(int __a) { return __nvvm_vote_all(__a); }
30 __DEVICE__ int __any(int __a) { return __nvvm_vote_any(__a); }
31 __DEVICE__ unsigned int __ballot(int __a) { return __nvvm_vote_ballot(__a); }
32 __DEVICE__ unsigned int __brev(unsigned int __a) { return __nv_brev(__a); }
33 __DEVICE__ unsigned long long __brevll(unsigned long long __a) {
34 return __nv_brevll(__a);
36 #if defined(__cplusplus)
37 __DEVICE__ void __brkpt() { __asm__ __volatile__("brkpt;"); }
38 __DEVICE__ void __brkpt(int __a) { __brkpt(); }
39 #else
40 __DEVICE__ void __attribute__((overloadable)) __brkpt(void) {
41 __asm__ __volatile__("brkpt;");
43 __DEVICE__ void __attribute__((overloadable)) __brkpt(int __a) { __brkpt(); }
44 #endif
45 __DEVICE__ unsigned int __byte_perm(unsigned int __a, unsigned int __b,
46 unsigned int __c) {
47 return __nv_byte_perm(__a, __b, __c);
49 __DEVICE__ int __clz(int __a) { return __nv_clz(__a); }
50 __DEVICE__ int __clzll(long long __a) { return __nv_clzll(__a); }
51 __DEVICE__ float __cosf(float __a) { return __nv_fast_cosf(__a); }
52 __DEVICE__ double __dAtomicAdd(double *__p, double __v) {
53 return __nvvm_atom_add_gen_d(__p, __v);
55 __DEVICE__ double __dAtomicAdd_block(double *__p, double __v) {
56 return __nvvm_atom_cta_add_gen_d(__p, __v);
58 __DEVICE__ double __dAtomicAdd_system(double *__p, double __v) {
59 return __nvvm_atom_sys_add_gen_d(__p, __v);
61 __DEVICE__ double __dadd_rd(double __a, double __b) {
62 return __nv_dadd_rd(__a, __b);
64 __DEVICE__ double __dadd_rn(double __a, double __b) {
65 return __nv_dadd_rn(__a, __b);
67 __DEVICE__ double __dadd_ru(double __a, double __b) {
68 return __nv_dadd_ru(__a, __b);
70 __DEVICE__ double __dadd_rz(double __a, double __b) {
71 return __nv_dadd_rz(__a, __b);
73 __DEVICE__ double __ddiv_rd(double __a, double __b) {
74 return __nv_ddiv_rd(__a, __b);
76 __DEVICE__ double __ddiv_rn(double __a, double __b) {
77 return __nv_ddiv_rn(__a, __b);
79 __DEVICE__ double __ddiv_ru(double __a, double __b) {
80 return __nv_ddiv_ru(__a, __b);
82 __DEVICE__ double __ddiv_rz(double __a, double __b) {
83 return __nv_ddiv_rz(__a, __b);
85 __DEVICE__ double __dmul_rd(double __a, double __b) {
86 return __nv_dmul_rd(__a, __b);
88 __DEVICE__ double __dmul_rn(double __a, double __b) {
89 return __nv_dmul_rn(__a, __b);
91 __DEVICE__ double __dmul_ru(double __a, double __b) {
92 return __nv_dmul_ru(__a, __b);
94 __DEVICE__ double __dmul_rz(double __a, double __b) {
95 return __nv_dmul_rz(__a, __b);
97 __DEVICE__ float __double2float_rd(double __a) {
98 return __nv_double2float_rd(__a);
100 __DEVICE__ float __double2float_rn(double __a) {
101 return __nv_double2float_rn(__a);
103 __DEVICE__ float __double2float_ru(double __a) {
104 return __nv_double2float_ru(__a);
106 __DEVICE__ float __double2float_rz(double __a) {
107 return __nv_double2float_rz(__a);
109 __DEVICE__ int __double2hiint(double __a) { return __nv_double2hiint(__a); }
110 __DEVICE__ int __double2int_rd(double __a) { return __nv_double2int_rd(__a); }
111 __DEVICE__ int __double2int_rn(double __a) { return __nv_double2int_rn(__a); }
112 __DEVICE__ int __double2int_ru(double __a) { return __nv_double2int_ru(__a); }
113 __DEVICE__ int __double2int_rz(double __a) { return __nv_double2int_rz(__a); }
114 __DEVICE__ long long __double2ll_rd(double __a) {
115 return __nv_double2ll_rd(__a);
117 __DEVICE__ long long __double2ll_rn(double __a) {
118 return __nv_double2ll_rn(__a);
120 __DEVICE__ long long __double2ll_ru(double __a) {
121 return __nv_double2ll_ru(__a);
123 __DEVICE__ long long __double2ll_rz(double __a) {
124 return __nv_double2ll_rz(__a);
126 __DEVICE__ int __double2loint(double __a) { return __nv_double2loint(__a); }
127 __DEVICE__ unsigned int __double2uint_rd(double __a) {
128 return __nv_double2uint_rd(__a);
130 __DEVICE__ unsigned int __double2uint_rn(double __a) {
131 return __nv_double2uint_rn(__a);
133 __DEVICE__ unsigned int __double2uint_ru(double __a) {
134 return __nv_double2uint_ru(__a);
136 __DEVICE__ unsigned int __double2uint_rz(double __a) {
137 return __nv_double2uint_rz(__a);
139 __DEVICE__ unsigned long long __double2ull_rd(double __a) {
140 return __nv_double2ull_rd(__a);
142 __DEVICE__ unsigned long long __double2ull_rn(double __a) {
143 return __nv_double2ull_rn(__a);
145 __DEVICE__ unsigned long long __double2ull_ru(double __a) {
146 return __nv_double2ull_ru(__a);
148 __DEVICE__ unsigned long long __double2ull_rz(double __a) {
149 return __nv_double2ull_rz(__a);
151 __DEVICE__ long long __double_as_longlong(double __a) {
152 return __nv_double_as_longlong(__a);
154 __DEVICE__ double __drcp_rd(double __a) { return __nv_drcp_rd(__a); }
155 __DEVICE__ double __drcp_rn(double __a) { return __nv_drcp_rn(__a); }
156 __DEVICE__ double __drcp_ru(double __a) { return __nv_drcp_ru(__a); }
157 __DEVICE__ double __drcp_rz(double __a) { return __nv_drcp_rz(__a); }
158 __DEVICE__ double __dsqrt_rd(double __a) { return __nv_dsqrt_rd(__a); }
159 __DEVICE__ double __dsqrt_rn(double __a) { return __nv_dsqrt_rn(__a); }
160 __DEVICE__ double __dsqrt_ru(double __a) { return __nv_dsqrt_ru(__a); }
161 __DEVICE__ double __dsqrt_rz(double __a) { return __nv_dsqrt_rz(__a); }
162 __DEVICE__ double __dsub_rd(double __a, double __b) {
163 return __nv_dsub_rd(__a, __b);
165 __DEVICE__ double __dsub_rn(double __a, double __b) {
166 return __nv_dsub_rn(__a, __b);
168 __DEVICE__ double __dsub_ru(double __a, double __b) {
169 return __nv_dsub_ru(__a, __b);
171 __DEVICE__ double __dsub_rz(double __a, double __b) {
172 return __nv_dsub_rz(__a, __b);
174 __DEVICE__ float __exp10f(float __a) { return __nv_fast_exp10f(__a); }
175 __DEVICE__ float __expf(float __a) { return __nv_fast_expf(__a); }
176 __DEVICE__ float __fAtomicAdd(float *__p, float __v) {
177 return __nvvm_atom_add_gen_f(__p, __v);
179 __DEVICE__ float __fAtomicAdd_block(float *__p, float __v) {
180 return __nvvm_atom_cta_add_gen_f(__p, __v);
182 __DEVICE__ float __fAtomicAdd_system(float *__p, float __v) {
183 return __nvvm_atom_sys_add_gen_f(__p, __v);
185 __DEVICE__ float __fAtomicExch(float *__p, float __v) {
186 return __nv_int_as_float(
187 __nvvm_atom_xchg_gen_i((int *)__p, __nv_float_as_int(__v)));
189 __DEVICE__ float __fAtomicExch_block(float *__p, float __v) {
190 return __nv_int_as_float(
191 __nvvm_atom_cta_xchg_gen_i((int *)__p, __nv_float_as_int(__v)));
193 __DEVICE__ float __fAtomicExch_system(float *__p, float __v) {
194 return __nv_int_as_float(
195 __nvvm_atom_sys_xchg_gen_i((int *)__p, __nv_float_as_int(__v)));
197 __DEVICE__ float __fadd_rd(float __a, float __b) {
198 return __nv_fadd_rd(__a, __b);
200 __DEVICE__ float __fadd_rn(float __a, float __b) {
201 return __nv_fadd_rn(__a, __b);
203 __DEVICE__ float __fadd_ru(float __a, float __b) {
204 return __nv_fadd_ru(__a, __b);
206 __DEVICE__ float __fadd_rz(float __a, float __b) {
207 return __nv_fadd_rz(__a, __b);
209 __DEVICE__ float __fdiv_rd(float __a, float __b) {
210 return __nv_fdiv_rd(__a, __b);
212 __DEVICE__ float __fdiv_rn(float __a, float __b) {
213 return __nv_fdiv_rn(__a, __b);
215 __DEVICE__ float __fdiv_ru(float __a, float __b) {
216 return __nv_fdiv_ru(__a, __b);
218 __DEVICE__ float __fdiv_rz(float __a, float __b) {
219 return __nv_fdiv_rz(__a, __b);
221 __DEVICE__ float __fdividef(float __a, float __b) {
222 return __nv_fast_fdividef(__a, __b);
224 __DEVICE__ int __ffs(int __a) { return __nv_ffs(__a); }
225 __DEVICE__ int __ffsll(long long __a) { return __nv_ffsll(__a); }
226 __DEVICE__ int __finite(double __a) { return __nv_isfinited(__a); }
227 __DEVICE__ int __finitef(float __a) { return __nv_finitef(__a); }
228 #ifdef _MSC_VER
229 __DEVICE__ int __finitel(long double __a);
230 #endif
231 __DEVICE__ int __float2int_rd(float __a) { return __nv_float2int_rd(__a); }
232 __DEVICE__ int __float2int_rn(float __a) { return __nv_float2int_rn(__a); }
233 __DEVICE__ int __float2int_ru(float __a) { return __nv_float2int_ru(__a); }
234 __DEVICE__ int __float2int_rz(float __a) { return __nv_float2int_rz(__a); }
235 __DEVICE__ long long __float2ll_rd(float __a) { return __nv_float2ll_rd(__a); }
236 __DEVICE__ long long __float2ll_rn(float __a) { return __nv_float2ll_rn(__a); }
237 __DEVICE__ long long __float2ll_ru(float __a) { return __nv_float2ll_ru(__a); }
238 __DEVICE__ long long __float2ll_rz(float __a) { return __nv_float2ll_rz(__a); }
239 __DEVICE__ unsigned int __float2uint_rd(float __a) {
240 return __nv_float2uint_rd(__a);
242 __DEVICE__ unsigned int __float2uint_rn(float __a) {
243 return __nv_float2uint_rn(__a);
245 __DEVICE__ unsigned int __float2uint_ru(float __a) {
246 return __nv_float2uint_ru(__a);
248 __DEVICE__ unsigned int __float2uint_rz(float __a) {
249 return __nv_float2uint_rz(__a);
251 __DEVICE__ unsigned long long __float2ull_rd(float __a) {
252 return __nv_float2ull_rd(__a);
254 __DEVICE__ unsigned long long __float2ull_rn(float __a) {
255 return __nv_float2ull_rn(__a);
257 __DEVICE__ unsigned long long __float2ull_ru(float __a) {
258 return __nv_float2ull_ru(__a);
260 __DEVICE__ unsigned long long __float2ull_rz(float __a) {
261 return __nv_float2ull_rz(__a);
263 __DEVICE__ int __float_as_int(float __a) { return __nv_float_as_int(__a); }
264 __DEVICE__ unsigned int __float_as_uint(float __a) {
265 return __nv_float_as_uint(__a);
267 __DEVICE__ double __fma_rd(double __a, double __b, double __c) {
268 return __nv_fma_rd(__a, __b, __c);
270 __DEVICE__ double __fma_rn(double __a, double __b, double __c) {
271 return __nv_fma_rn(__a, __b, __c);
273 __DEVICE__ double __fma_ru(double __a, double __b, double __c) {
274 return __nv_fma_ru(__a, __b, __c);
276 __DEVICE__ double __fma_rz(double __a, double __b, double __c) {
277 return __nv_fma_rz(__a, __b, __c);
279 __DEVICE__ float __fmaf_ieee_rd(float __a, float __b, float __c) {
280 return __nv_fmaf_ieee_rd(__a, __b, __c);
282 __DEVICE__ float __fmaf_ieee_rn(float __a, float __b, float __c) {
283 return __nv_fmaf_ieee_rn(__a, __b, __c);
285 __DEVICE__ float __fmaf_ieee_ru(float __a, float __b, float __c) {
286 return __nv_fmaf_ieee_ru(__a, __b, __c);
288 __DEVICE__ float __fmaf_ieee_rz(float __a, float __b, float __c) {
289 return __nv_fmaf_ieee_rz(__a, __b, __c);
291 __DEVICE__ float __fmaf_rd(float __a, float __b, float __c) {
292 return __nv_fmaf_rd(__a, __b, __c);
294 __DEVICE__ float __fmaf_rn(float __a, float __b, float __c) {
295 return __nv_fmaf_rn(__a, __b, __c);
297 __DEVICE__ float __fmaf_ru(float __a, float __b, float __c) {
298 return __nv_fmaf_ru(__a, __b, __c);
300 __DEVICE__ float __fmaf_rz(float __a, float __b, float __c) {
301 return __nv_fmaf_rz(__a, __b, __c);
303 __DEVICE__ float __fmul_rd(float __a, float __b) {
304 return __nv_fmul_rd(__a, __b);
306 __DEVICE__ float __fmul_rn(float __a, float __b) {
307 return __nv_fmul_rn(__a, __b);
309 __DEVICE__ float __fmul_ru(float __a, float __b) {
310 return __nv_fmul_ru(__a, __b);
312 __DEVICE__ float __fmul_rz(float __a, float __b) {
313 return __nv_fmul_rz(__a, __b);
315 __DEVICE__ float __frcp_rd(float __a) { return __nv_frcp_rd(__a); }
316 __DEVICE__ float __frcp_rn(float __a) { return __nv_frcp_rn(__a); }
317 __DEVICE__ float __frcp_ru(float __a) { return __nv_frcp_ru(__a); }
318 __DEVICE__ float __frcp_rz(float __a) { return __nv_frcp_rz(__a); }
319 __DEVICE__ float __frsqrt_rn(float __a) { return __nv_frsqrt_rn(__a); }
320 __DEVICE__ float __fsqrt_rd(float __a) { return __nv_fsqrt_rd(__a); }
321 __DEVICE__ float __fsqrt_rn(float __a) { return __nv_fsqrt_rn(__a); }
322 __DEVICE__ float __fsqrt_ru(float __a) { return __nv_fsqrt_ru(__a); }
323 __DEVICE__ float __fsqrt_rz(float __a) { return __nv_fsqrt_rz(__a); }
324 __DEVICE__ float __fsub_rd(float __a, float __b) {
325 return __nv_fsub_rd(__a, __b);
327 __DEVICE__ float __fsub_rn(float __a, float __b) {
328 return __nv_fsub_rn(__a, __b);
330 __DEVICE__ float __fsub_ru(float __a, float __b) {
331 return __nv_fsub_ru(__a, __b);
333 __DEVICE__ float __fsub_rz(float __a, float __b) {
334 return __nv_fsub_rz(__a, __b);
336 __DEVICE__ int __hadd(int __a, int __b) { return __nv_hadd(__a, __b); }
337 __DEVICE__ double __hiloint2double(int __a, int __b) {
338 return __nv_hiloint2double(__a, __b);
340 __DEVICE__ int __iAtomicAdd(int *__p, int __v) {
341 return __nvvm_atom_add_gen_i(__p, __v);
343 __DEVICE__ int __iAtomicAdd_block(int *__p, int __v) {
344 return __nvvm_atom_cta_add_gen_i(__p, __v);
346 __DEVICE__ int __iAtomicAdd_system(int *__p, int __v) {
347 return __nvvm_atom_sys_add_gen_i(__p, __v);
349 __DEVICE__ int __iAtomicAnd(int *__p, int __v) {
350 return __nvvm_atom_and_gen_i(__p, __v);
352 __DEVICE__ int __iAtomicAnd_block(int *__p, int __v) {
353 return __nvvm_atom_cta_and_gen_i(__p, __v);
355 __DEVICE__ int __iAtomicAnd_system(int *__p, int __v) {
356 return __nvvm_atom_sys_and_gen_i(__p, __v);
358 __DEVICE__ int __iAtomicCAS(int *__p, int __cmp, int __v) {
359 return __nvvm_atom_cas_gen_i(__p, __cmp, __v);
361 __DEVICE__ int __iAtomicCAS_block(int *__p, int __cmp, int __v) {
362 return __nvvm_atom_cta_cas_gen_i(__p, __cmp, __v);
364 __DEVICE__ int __iAtomicCAS_system(int *__p, int __cmp, int __v) {
365 return __nvvm_atom_sys_cas_gen_i(__p, __cmp, __v);
367 __DEVICE__ int __iAtomicExch(int *__p, int __v) {
368 return __nvvm_atom_xchg_gen_i(__p, __v);
370 __DEVICE__ int __iAtomicExch_block(int *__p, int __v) {
371 return __nvvm_atom_cta_xchg_gen_i(__p, __v);
373 __DEVICE__ int __iAtomicExch_system(int *__p, int __v) {
374 return __nvvm_atom_sys_xchg_gen_i(__p, __v);
376 __DEVICE__ int __iAtomicMax(int *__p, int __v) {
377 return __nvvm_atom_max_gen_i(__p, __v);
379 __DEVICE__ int __iAtomicMax_block(int *__p, int __v) {
380 return __nvvm_atom_cta_max_gen_i(__p, __v);
382 __DEVICE__ int __iAtomicMax_system(int *__p, int __v) {
383 return __nvvm_atom_sys_max_gen_i(__p, __v);
385 __DEVICE__ int __iAtomicMin(int *__p, int __v) {
386 return __nvvm_atom_min_gen_i(__p, __v);
388 __DEVICE__ int __iAtomicMin_block(int *__p, int __v) {
389 return __nvvm_atom_cta_min_gen_i(__p, __v);
391 __DEVICE__ int __iAtomicMin_system(int *__p, int __v) {
392 return __nvvm_atom_sys_min_gen_i(__p, __v);
394 __DEVICE__ int __iAtomicOr(int *__p, int __v) {
395 return __nvvm_atom_or_gen_i(__p, __v);
397 __DEVICE__ int __iAtomicOr_block(int *__p, int __v) {
398 return __nvvm_atom_cta_or_gen_i(__p, __v);
400 __DEVICE__ int __iAtomicOr_system(int *__p, int __v) {
401 return __nvvm_atom_sys_or_gen_i(__p, __v);
403 __DEVICE__ int __iAtomicXor(int *__p, int __v) {
404 return __nvvm_atom_xor_gen_i(__p, __v);
406 __DEVICE__ int __iAtomicXor_block(int *__p, int __v) {
407 return __nvvm_atom_cta_xor_gen_i(__p, __v);
409 __DEVICE__ int __iAtomicXor_system(int *__p, int __v) {
410 return __nvvm_atom_sys_xor_gen_i(__p, __v);
412 __DEVICE__ long long __illAtomicMax(long long *__p, long long __v) {
413 return __nvvm_atom_max_gen_ll(__p, __v);
415 __DEVICE__ long long __illAtomicMax_block(long long *__p, long long __v) {
416 return __nvvm_atom_cta_max_gen_ll(__p, __v);
418 __DEVICE__ long long __illAtomicMax_system(long long *__p, long long __v) {
419 return __nvvm_atom_sys_max_gen_ll(__p, __v);
421 __DEVICE__ long long __illAtomicMin(long long *__p, long long __v) {
422 return __nvvm_atom_min_gen_ll(__p, __v);
424 __DEVICE__ long long __illAtomicMin_block(long long *__p, long long __v) {
425 return __nvvm_atom_cta_min_gen_ll(__p, __v);
427 __DEVICE__ long long __illAtomicMin_system(long long *__p, long long __v) {
428 return __nvvm_atom_sys_min_gen_ll(__p, __v);
430 __DEVICE__ double __int2double_rn(int __a) { return __nv_int2double_rn(__a); }
431 __DEVICE__ float __int2float_rd(int __a) { return __nv_int2float_rd(__a); }
432 __DEVICE__ float __int2float_rn(int __a) { return __nv_int2float_rn(__a); }
433 __DEVICE__ float __int2float_ru(int __a) { return __nv_int2float_ru(__a); }
434 __DEVICE__ float __int2float_rz(int __a) { return __nv_int2float_rz(__a); }
435 __DEVICE__ float __int_as_float(int __a) { return __nv_int_as_float(__a); }
436 __DEVICE__ int __isfinited(double __a) { return __nv_isfinited(__a); }
437 __DEVICE__ int __isinf(double __a) { return __nv_isinfd(__a); }
438 __DEVICE__ int __isinff(float __a) { return __nv_isinff(__a); }
439 #ifdef _MSC_VER
440 __DEVICE__ int __isinfl(long double __a);
441 #endif
442 __DEVICE__ int __isnan(double __a) { return __nv_isnand(__a); }
443 __DEVICE__ int __isnanf(float __a) { return __nv_isnanf(__a); }
444 #ifdef _MSC_VER
445 __DEVICE__ int __isnanl(long double __a);
446 #endif
447 __DEVICE__ double __ll2double_rd(long long __a) {
448 return __nv_ll2double_rd(__a);
450 __DEVICE__ double __ll2double_rn(long long __a) {
451 return __nv_ll2double_rn(__a);
453 __DEVICE__ double __ll2double_ru(long long __a) {
454 return __nv_ll2double_ru(__a);
456 __DEVICE__ double __ll2double_rz(long long __a) {
457 return __nv_ll2double_rz(__a);
459 __DEVICE__ float __ll2float_rd(long long __a) { return __nv_ll2float_rd(__a); }
460 __DEVICE__ float __ll2float_rn(long long __a) { return __nv_ll2float_rn(__a); }
461 __DEVICE__ float __ll2float_ru(long long __a) { return __nv_ll2float_ru(__a); }
462 __DEVICE__ float __ll2float_rz(long long __a) { return __nv_ll2float_rz(__a); }
463 __DEVICE__ long long __llAtomicAnd(long long *__p, long long __v) {
464 return __nvvm_atom_and_gen_ll(__p, __v);
466 __DEVICE__ long long __llAtomicAnd_block(long long *__p, long long __v) {
467 return __nvvm_atom_cta_and_gen_ll(__p, __v);
469 __DEVICE__ long long __llAtomicAnd_system(long long *__p, long long __v) {
470 return __nvvm_atom_sys_and_gen_ll(__p, __v);
472 __DEVICE__ long long __llAtomicOr(long long *__p, long long __v) {
473 return __nvvm_atom_or_gen_ll(__p, __v);
475 __DEVICE__ long long __llAtomicOr_block(long long *__p, long long __v) {
476 return __nvvm_atom_cta_or_gen_ll(__p, __v);
478 __DEVICE__ long long __llAtomicOr_system(long long *__p, long long __v) {
479 return __nvvm_atom_sys_or_gen_ll(__p, __v);
481 __DEVICE__ long long __llAtomicXor(long long *__p, long long __v) {
482 return __nvvm_atom_xor_gen_ll(__p, __v);
484 __DEVICE__ long long __llAtomicXor_block(long long *__p, long long __v) {
485 return __nvvm_atom_cta_xor_gen_ll(__p, __v);
487 __DEVICE__ long long __llAtomicXor_system(long long *__p, long long __v) {
488 return __nvvm_atom_sys_xor_gen_ll(__p, __v);
490 __DEVICE__ float __log10f(float __a) { return __nv_fast_log10f(__a); }
491 __DEVICE__ float __log2f(float __a) { return __nv_fast_log2f(__a); }
492 __DEVICE__ float __logf(float __a) { return __nv_fast_logf(__a); }
493 __DEVICE__ double __longlong_as_double(long long __a) {
494 return __nv_longlong_as_double(__a);
496 __DEVICE__ int __mul24(int __a, int __b) { return __nv_mul24(__a, __b); }
497 __DEVICE__ long long __mul64hi(long long __a, long long __b) {
498 return __nv_mul64hi(__a, __b);
500 __DEVICE__ int __mulhi(int __a, int __b) { return __nv_mulhi(__a, __b); }
501 __DEVICE__ unsigned int __pm0(void) { return __nvvm_read_ptx_sreg_pm0(); }
502 __DEVICE__ unsigned int __pm1(void) { return __nvvm_read_ptx_sreg_pm1(); }
503 __DEVICE__ unsigned int __pm2(void) { return __nvvm_read_ptx_sreg_pm2(); }
504 __DEVICE__ unsigned int __pm3(void) { return __nvvm_read_ptx_sreg_pm3(); }
505 __DEVICE__ int __popc(unsigned int __a) { return __nv_popc(__a); }
506 __DEVICE__ int __popcll(unsigned long long __a) { return __nv_popcll(__a); }
507 __DEVICE__ float __powf(float __a, float __b) {
508 return __nv_fast_powf(__a, __b);
511 // Parameter must have a known integer value.
512 #define __prof_trigger(__a) __asm__ __volatile__("pmevent \t%0;" ::"i"(__a))
513 __DEVICE__ int __rhadd(int __a, int __b) { return __nv_rhadd(__a, __b); }
514 __DEVICE__ unsigned int __sad(int __a, int __b, unsigned int __c) {
515 return __nv_sad(__a, __b, __c);
517 __DEVICE__ float __saturatef(float __a) { return __nv_saturatef(__a); }
518 __DEVICE__ int __signbitd(double __a) { return __nv_signbitd(__a); }
519 __DEVICE__ int __signbitf(float __a) { return __nv_signbitf(__a); }
520 __DEVICE__ void __sincosf(float __a, float *__s, float *__c) {
521 return __nv_fast_sincosf(__a, __s, __c);
523 __DEVICE__ float __sinf(float __a) { return __nv_fast_sinf(__a); }
524 __DEVICE__ int __syncthreads_and(int __a) { return __nvvm_bar0_and(__a); }
525 __DEVICE__ int __syncthreads_count(int __a) { return __nvvm_bar0_popc(__a); }
526 __DEVICE__ int __syncthreads_or(int __a) { return __nvvm_bar0_or(__a); }
527 __DEVICE__ float __tanf(float __a) { return __nv_fast_tanf(__a); }
528 __DEVICE__ void __threadfence(void) { __nvvm_membar_gl(); }
529 __DEVICE__ void __threadfence_block(void) { __nvvm_membar_cta(); };
530 __DEVICE__ void __threadfence_system(void) { __nvvm_membar_sys(); };
531 __DEVICE__ void __trap(void) { __asm__ __volatile__("trap;"); }
532 __DEVICE__ unsigned short
533 __usAtomicCAS(unsigned short *__p, unsigned short __cmp, unsigned short __v) {
534 return __nvvm_atom_cas_gen_us(__p, __cmp, __v);
536 __DEVICE__ unsigned short __usAtomicCAS_block(unsigned short *__p,
537 unsigned short __cmp,
538 unsigned short __v) {
539 return __nvvm_atom_cta_cas_gen_us(__p, __cmp, __v);
541 __DEVICE__ unsigned short __usAtomicCAS_system(unsigned short *__p,
542 unsigned short __cmp,
543 unsigned short __v) {
544 return __nvvm_atom_sys_cas_gen_us(__p, __cmp, __v);
546 __DEVICE__ unsigned int __uAtomicAdd(unsigned int *__p, unsigned int __v) {
547 return __nvvm_atom_add_gen_i((int *)__p, __v);
549 __DEVICE__ unsigned int __uAtomicAdd_block(unsigned int *__p,
550 unsigned int __v) {
551 return __nvvm_atom_cta_add_gen_i((int *)__p, __v);
553 __DEVICE__ unsigned int __uAtomicAdd_system(unsigned int *__p,
554 unsigned int __v) {
555 return __nvvm_atom_sys_add_gen_i((int *)__p, __v);
557 __DEVICE__ unsigned int __uAtomicAnd(unsigned int *__p, unsigned int __v) {
558 return __nvvm_atom_and_gen_i((int *)__p, __v);
560 __DEVICE__ unsigned int __uAtomicAnd_block(unsigned int *__p,
561 unsigned int __v) {
562 return __nvvm_atom_cta_and_gen_i((int *)__p, __v);
564 __DEVICE__ unsigned int __uAtomicAnd_system(unsigned int *__p,
565 unsigned int __v) {
566 return __nvvm_atom_sys_and_gen_i((int *)__p, __v);
568 __DEVICE__ unsigned int __uAtomicCAS(unsigned int *__p, unsigned int __cmp,
569 unsigned int __v) {
570 return __nvvm_atom_cas_gen_i((int *)__p, __cmp, __v);
572 __DEVICE__ unsigned int
573 __uAtomicCAS_block(unsigned int *__p, unsigned int __cmp, unsigned int __v) {
574 return __nvvm_atom_cta_cas_gen_i((int *)__p, __cmp, __v);
576 __DEVICE__ unsigned int
577 __uAtomicCAS_system(unsigned int *__p, unsigned int __cmp, unsigned int __v) {
578 return __nvvm_atom_sys_cas_gen_i((int *)__p, __cmp, __v);
580 __DEVICE__ unsigned int __uAtomicDec(unsigned int *__p, unsigned int __v) {
581 return __nvvm_atom_dec_gen_ui(__p, __v);
583 __DEVICE__ unsigned int __uAtomicDec_block(unsigned int *__p,
584 unsigned int __v) {
585 return __nvvm_atom_cta_dec_gen_ui(__p, __v);
587 __DEVICE__ unsigned int __uAtomicDec_system(unsigned int *__p,
588 unsigned int __v) {
589 return __nvvm_atom_sys_dec_gen_ui(__p, __v);
591 __DEVICE__ unsigned int __uAtomicExch(unsigned int *__p, unsigned int __v) {
592 return __nvvm_atom_xchg_gen_i((int *)__p, __v);
594 __DEVICE__ unsigned int __uAtomicExch_block(unsigned int *__p,
595 unsigned int __v) {
596 return __nvvm_atom_cta_xchg_gen_i((int *)__p, __v);
598 __DEVICE__ unsigned int __uAtomicExch_system(unsigned int *__p,
599 unsigned int __v) {
600 return __nvvm_atom_sys_xchg_gen_i((int *)__p, __v);
602 __DEVICE__ unsigned int __uAtomicInc(unsigned int *__p, unsigned int __v) {
603 return __nvvm_atom_inc_gen_ui(__p, __v);
605 __DEVICE__ unsigned int __uAtomicInc_block(unsigned int *__p,
606 unsigned int __v) {
607 return __nvvm_atom_cta_inc_gen_ui(__p, __v);
609 __DEVICE__ unsigned int __uAtomicInc_system(unsigned int *__p,
610 unsigned int __v) {
611 return __nvvm_atom_sys_inc_gen_ui(__p, __v);
613 __DEVICE__ unsigned int __uAtomicMax(unsigned int *__p, unsigned int __v) {
614 return __nvvm_atom_max_gen_ui(__p, __v);
616 __DEVICE__ unsigned int __uAtomicMax_block(unsigned int *__p,
617 unsigned int __v) {
618 return __nvvm_atom_cta_max_gen_ui(__p, __v);
620 __DEVICE__ unsigned int __uAtomicMax_system(unsigned int *__p,
621 unsigned int __v) {
622 return __nvvm_atom_sys_max_gen_ui(__p, __v);
624 __DEVICE__ unsigned int __uAtomicMin(unsigned int *__p, unsigned int __v) {
625 return __nvvm_atom_min_gen_ui(__p, __v);
627 __DEVICE__ unsigned int __uAtomicMin_block(unsigned int *__p,
628 unsigned int __v) {
629 return __nvvm_atom_cta_min_gen_ui(__p, __v);
631 __DEVICE__ unsigned int __uAtomicMin_system(unsigned int *__p,
632 unsigned int __v) {
633 return __nvvm_atom_sys_min_gen_ui(__p, __v);
635 __DEVICE__ unsigned int __uAtomicOr(unsigned int *__p, unsigned int __v) {
636 return __nvvm_atom_or_gen_i((int *)__p, __v);
638 __DEVICE__ unsigned int __uAtomicOr_block(unsigned int *__p, unsigned int __v) {
639 return __nvvm_atom_cta_or_gen_i((int *)__p, __v);
641 __DEVICE__ unsigned int __uAtomicOr_system(unsigned int *__p,
642 unsigned int __v) {
643 return __nvvm_atom_sys_or_gen_i((int *)__p, __v);
645 __DEVICE__ unsigned int __uAtomicXor(unsigned int *__p, unsigned int __v) {
646 return __nvvm_atom_xor_gen_i((int *)__p, __v);
648 __DEVICE__ unsigned int __uAtomicXor_block(unsigned int *__p,
649 unsigned int __v) {
650 return __nvvm_atom_cta_xor_gen_i((int *)__p, __v);
652 __DEVICE__ unsigned int __uAtomicXor_system(unsigned int *__p,
653 unsigned int __v) {
654 return __nvvm_atom_sys_xor_gen_i((int *)__p, __v);
656 __DEVICE__ unsigned int __uhadd(unsigned int __a, unsigned int __b) {
657 return __nv_uhadd(__a, __b);
659 __DEVICE__ double __uint2double_rn(unsigned int __a) {
660 return __nv_uint2double_rn(__a);
662 __DEVICE__ float __uint2float_rd(unsigned int __a) {
663 return __nv_uint2float_rd(__a);
665 __DEVICE__ float __uint2float_rn(unsigned int __a) {
666 return __nv_uint2float_rn(__a);
668 __DEVICE__ float __uint2float_ru(unsigned int __a) {
669 return __nv_uint2float_ru(__a);
671 __DEVICE__ float __uint2float_rz(unsigned int __a) {
672 return __nv_uint2float_rz(__a);
674 __DEVICE__ float __uint_as_float(unsigned int __a) {
675 return __nv_uint_as_float(__a);
676 } //
677 __DEVICE__ double __ull2double_rd(unsigned long long __a) {
678 return __nv_ull2double_rd(__a);
680 __DEVICE__ double __ull2double_rn(unsigned long long __a) {
681 return __nv_ull2double_rn(__a);
683 __DEVICE__ double __ull2double_ru(unsigned long long __a) {
684 return __nv_ull2double_ru(__a);
686 __DEVICE__ double __ull2double_rz(unsigned long long __a) {
687 return __nv_ull2double_rz(__a);
689 __DEVICE__ float __ull2float_rd(unsigned long long __a) {
690 return __nv_ull2float_rd(__a);
692 __DEVICE__ float __ull2float_rn(unsigned long long __a) {
693 return __nv_ull2float_rn(__a);
695 __DEVICE__ float __ull2float_ru(unsigned long long __a) {
696 return __nv_ull2float_ru(__a);
698 __DEVICE__ float __ull2float_rz(unsigned long long __a) {
699 return __nv_ull2float_rz(__a);
701 __DEVICE__ unsigned long long __ullAtomicAdd(unsigned long long *__p,
702 unsigned long long __v) {
703 return __nvvm_atom_add_gen_ll((long long *)__p, __v);
705 __DEVICE__ unsigned long long __ullAtomicAdd_block(unsigned long long *__p,
706 unsigned long long __v) {
707 return __nvvm_atom_cta_add_gen_ll((long long *)__p, __v);
709 __DEVICE__ unsigned long long __ullAtomicAdd_system(unsigned long long *__p,
710 unsigned long long __v) {
711 return __nvvm_atom_sys_add_gen_ll((long long *)__p, __v);
713 __DEVICE__ unsigned long long __ullAtomicAnd(unsigned long long *__p,
714 unsigned long long __v) {
715 return __nvvm_atom_and_gen_ll((long long *)__p, __v);
717 __DEVICE__ unsigned long long __ullAtomicAnd_block(unsigned long long *__p,
718 unsigned long long __v) {
719 return __nvvm_atom_cta_and_gen_ll((long long *)__p, __v);
721 __DEVICE__ unsigned long long __ullAtomicAnd_system(unsigned long long *__p,
722 unsigned long long __v) {
723 return __nvvm_atom_sys_and_gen_ll((long long *)__p, __v);
725 __DEVICE__ unsigned long long __ullAtomicCAS(unsigned long long *__p,
726 unsigned long long __cmp,
727 unsigned long long __v) {
728 return __nvvm_atom_cas_gen_ll((long long *)__p, __cmp, __v);
730 __DEVICE__ unsigned long long __ullAtomicCAS_block(unsigned long long *__p,
731 unsigned long long __cmp,
732 unsigned long long __v) {
733 return __nvvm_atom_cta_cas_gen_ll((long long *)__p, __cmp, __v);
735 __DEVICE__ unsigned long long __ullAtomicCAS_system(unsigned long long *__p,
736 unsigned long long __cmp,
737 unsigned long long __v) {
738 return __nvvm_atom_sys_cas_gen_ll((long long *)__p, __cmp, __v);
740 __DEVICE__ unsigned long long __ullAtomicExch(unsigned long long *__p,
741 unsigned long long __v) {
742 return __nvvm_atom_xchg_gen_ll((long long *)__p, __v);
744 __DEVICE__ unsigned long long __ullAtomicExch_block(unsigned long long *__p,
745 unsigned long long __v) {
746 return __nvvm_atom_cta_xchg_gen_ll((long long *)__p, __v);
748 __DEVICE__ unsigned long long __ullAtomicExch_system(unsigned long long *__p,
749 unsigned long long __v) {
750 return __nvvm_atom_sys_xchg_gen_ll((long long *)__p, __v);
752 __DEVICE__ unsigned long long __ullAtomicMax(unsigned long long *__p,
753 unsigned long long __v) {
754 return __nvvm_atom_max_gen_ull(__p, __v);
756 __DEVICE__ unsigned long long __ullAtomicMax_block(unsigned long long *__p,
757 unsigned long long __v) {
758 return __nvvm_atom_cta_max_gen_ull(__p, __v);
760 __DEVICE__ unsigned long long __ullAtomicMax_system(unsigned long long *__p,
761 unsigned long long __v) {
762 return __nvvm_atom_sys_max_gen_ull(__p, __v);
764 __DEVICE__ unsigned long long __ullAtomicMin(unsigned long long *__p,
765 unsigned long long __v) {
766 return __nvvm_atom_min_gen_ull(__p, __v);
768 __DEVICE__ unsigned long long __ullAtomicMin_block(unsigned long long *__p,
769 unsigned long long __v) {
770 return __nvvm_atom_cta_min_gen_ull(__p, __v);
772 __DEVICE__ unsigned long long __ullAtomicMin_system(unsigned long long *__p,
773 unsigned long long __v) {
774 return __nvvm_atom_sys_min_gen_ull(__p, __v);
776 __DEVICE__ unsigned long long __ullAtomicOr(unsigned long long *__p,
777 unsigned long long __v) {
778 return __nvvm_atom_or_gen_ll((long long *)__p, __v);
780 __DEVICE__ unsigned long long __ullAtomicOr_block(unsigned long long *__p,
781 unsigned long long __v) {
782 return __nvvm_atom_cta_or_gen_ll((long long *)__p, __v);
784 __DEVICE__ unsigned long long __ullAtomicOr_system(unsigned long long *__p,
785 unsigned long long __v) {
786 return __nvvm_atom_sys_or_gen_ll((long long *)__p, __v);
788 __DEVICE__ unsigned long long __ullAtomicXor(unsigned long long *__p,
789 unsigned long long __v) {
790 return __nvvm_atom_xor_gen_ll((long long *)__p, __v);
792 __DEVICE__ unsigned long long __ullAtomicXor_block(unsigned long long *__p,
793 unsigned long long __v) {
794 return __nvvm_atom_cta_xor_gen_ll((long long *)__p, __v);
796 __DEVICE__ unsigned long long __ullAtomicXor_system(unsigned long long *__p,
797 unsigned long long __v) {
798 return __nvvm_atom_sys_xor_gen_ll((long long *)__p, __v);
800 __DEVICE__ unsigned int __umul24(unsigned int __a, unsigned int __b) {
801 return __nv_umul24(__a, __b);
803 __DEVICE__ unsigned long long __umul64hi(unsigned long long __a,
804 unsigned long long __b) {
805 return __nv_umul64hi(__a, __b);
807 __DEVICE__ unsigned int __umulhi(unsigned int __a, unsigned int __b) {
808 return __nv_umulhi(__a, __b);
810 __DEVICE__ unsigned int __urhadd(unsigned int __a, unsigned int __b) {
811 return __nv_urhadd(__a, __b);
813 __DEVICE__ unsigned int __usad(unsigned int __a, unsigned int __b,
814 unsigned int __c) {
815 return __nv_usad(__a, __b, __c);
818 #if CUDA_VERSION >= 9000 && CUDA_VERSION < 9020
819 __DEVICE__ unsigned int __vabs2(unsigned int __a) { return __nv_vabs2(__a); }
820 __DEVICE__ unsigned int __vabs4(unsigned int __a) { return __nv_vabs4(__a); }
821 __DEVICE__ unsigned int __vabsdiffs2(unsigned int __a, unsigned int __b) {
822 return __nv_vabsdiffs2(__a, __b);
824 __DEVICE__ unsigned int __vabsdiffs4(unsigned int __a, unsigned int __b) {
825 return __nv_vabsdiffs4(__a, __b);
827 __DEVICE__ unsigned int __vabsdiffu2(unsigned int __a, unsigned int __b) {
828 return __nv_vabsdiffu2(__a, __b);
830 __DEVICE__ unsigned int __vabsdiffu4(unsigned int __a, unsigned int __b) {
831 return __nv_vabsdiffu4(__a, __b);
833 __DEVICE__ unsigned int __vabsss2(unsigned int __a) {
834 return __nv_vabsss2(__a);
836 __DEVICE__ unsigned int __vabsss4(unsigned int __a) {
837 return __nv_vabsss4(__a);
839 __DEVICE__ unsigned int __vadd2(unsigned int __a, unsigned int __b) {
840 return __nv_vadd2(__a, __b);
842 __DEVICE__ unsigned int __vadd4(unsigned int __a, unsigned int __b) {
843 return __nv_vadd4(__a, __b);
845 __DEVICE__ unsigned int __vaddss2(unsigned int __a, unsigned int __b) {
846 return __nv_vaddss2(__a, __b);
848 __DEVICE__ unsigned int __vaddss4(unsigned int __a, unsigned int __b) {
849 return __nv_vaddss4(__a, __b);
851 __DEVICE__ unsigned int __vaddus2(unsigned int __a, unsigned int __b) {
852 return __nv_vaddus2(__a, __b);
854 __DEVICE__ unsigned int __vaddus4(unsigned int __a, unsigned int __b) {
855 return __nv_vaddus4(__a, __b);
857 __DEVICE__ unsigned int __vavgs2(unsigned int __a, unsigned int __b) {
858 return __nv_vavgs2(__a, __b);
860 __DEVICE__ unsigned int __vavgs4(unsigned int __a, unsigned int __b) {
861 return __nv_vavgs4(__a, __b);
863 __DEVICE__ unsigned int __vavgu2(unsigned int __a, unsigned int __b) {
864 return __nv_vavgu2(__a, __b);
866 __DEVICE__ unsigned int __vavgu4(unsigned int __a, unsigned int __b) {
867 return __nv_vavgu4(__a, __b);
869 __DEVICE__ unsigned int __vcmpeq2(unsigned int __a, unsigned int __b) {
870 return __nv_vcmpeq2(__a, __b);
872 __DEVICE__ unsigned int __vcmpeq4(unsigned int __a, unsigned int __b) {
873 return __nv_vcmpeq4(__a, __b);
875 __DEVICE__ unsigned int __vcmpges2(unsigned int __a, unsigned int __b) {
876 return __nv_vcmpges2(__a, __b);
878 __DEVICE__ unsigned int __vcmpges4(unsigned int __a, unsigned int __b) {
879 return __nv_vcmpges4(__a, __b);
881 __DEVICE__ unsigned int __vcmpgeu2(unsigned int __a, unsigned int __b) {
882 return __nv_vcmpgeu2(__a, __b);
884 __DEVICE__ unsigned int __vcmpgeu4(unsigned int __a, unsigned int __b) {
885 return __nv_vcmpgeu4(__a, __b);
887 __DEVICE__ unsigned int __vcmpgts2(unsigned int __a, unsigned int __b) {
888 return __nv_vcmpgts2(__a, __b);
890 __DEVICE__ unsigned int __vcmpgts4(unsigned int __a, unsigned int __b) {
891 return __nv_vcmpgts4(__a, __b);
893 __DEVICE__ unsigned int __vcmpgtu2(unsigned int __a, unsigned int __b) {
894 return __nv_vcmpgtu2(__a, __b);
896 __DEVICE__ unsigned int __vcmpgtu4(unsigned int __a, unsigned int __b) {
897 return __nv_vcmpgtu4(__a, __b);
899 __DEVICE__ unsigned int __vcmples2(unsigned int __a, unsigned int __b) {
900 return __nv_vcmples2(__a, __b);
902 __DEVICE__ unsigned int __vcmples4(unsigned int __a, unsigned int __b) {
903 return __nv_vcmples4(__a, __b);
905 __DEVICE__ unsigned int __vcmpleu2(unsigned int __a, unsigned int __b) {
906 return __nv_vcmpleu2(__a, __b);
908 __DEVICE__ unsigned int __vcmpleu4(unsigned int __a, unsigned int __b) {
909 return __nv_vcmpleu4(__a, __b);
911 __DEVICE__ unsigned int __vcmplts2(unsigned int __a, unsigned int __b) {
912 return __nv_vcmplts2(__a, __b);
914 __DEVICE__ unsigned int __vcmplts4(unsigned int __a, unsigned int __b) {
915 return __nv_vcmplts4(__a, __b);
917 __DEVICE__ unsigned int __vcmpltu2(unsigned int __a, unsigned int __b) {
918 return __nv_vcmpltu2(__a, __b);
920 __DEVICE__ unsigned int __vcmpltu4(unsigned int __a, unsigned int __b) {
921 return __nv_vcmpltu4(__a, __b);
923 __DEVICE__ unsigned int __vcmpne2(unsigned int __a, unsigned int __b) {
924 return __nv_vcmpne2(__a, __b);
926 __DEVICE__ unsigned int __vcmpne4(unsigned int __a, unsigned int __b) {
927 return __nv_vcmpne4(__a, __b);
929 __DEVICE__ unsigned int __vhaddu2(unsigned int __a, unsigned int __b) {
930 return __nv_vhaddu2(__a, __b);
932 __DEVICE__ unsigned int __vhaddu4(unsigned int __a, unsigned int __b) {
933 return __nv_vhaddu4(__a, __b);
935 __DEVICE__ unsigned int __vmaxs2(unsigned int __a, unsigned int __b) {
936 return __nv_vmaxs2(__a, __b);
938 __DEVICE__ unsigned int __vmaxs4(unsigned int __a, unsigned int __b) {
939 return __nv_vmaxs4(__a, __b);
941 __DEVICE__ unsigned int __vmaxu2(unsigned int __a, unsigned int __b) {
942 return __nv_vmaxu2(__a, __b);
944 __DEVICE__ unsigned int __vmaxu4(unsigned int __a, unsigned int __b) {
945 return __nv_vmaxu4(__a, __b);
947 __DEVICE__ unsigned int __vmins2(unsigned int __a, unsigned int __b) {
948 return __nv_vmins2(__a, __b);
950 __DEVICE__ unsigned int __vmins4(unsigned int __a, unsigned int __b) {
951 return __nv_vmins4(__a, __b);
953 __DEVICE__ unsigned int __vminu2(unsigned int __a, unsigned int __b) {
954 return __nv_vminu2(__a, __b);
956 __DEVICE__ unsigned int __vminu4(unsigned int __a, unsigned int __b) {
957 return __nv_vminu4(__a, __b);
959 __DEVICE__ unsigned int __vneg2(unsigned int __a) { return __nv_vneg2(__a); }
960 __DEVICE__ unsigned int __vneg4(unsigned int __a) { return __nv_vneg4(__a); }
961 __DEVICE__ unsigned int __vnegss2(unsigned int __a) {
962 return __nv_vnegss2(__a);
964 __DEVICE__ unsigned int __vnegss4(unsigned int __a) {
965 return __nv_vnegss4(__a);
967 __DEVICE__ unsigned int __vsads2(unsigned int __a, unsigned int __b) {
968 return __nv_vsads2(__a, __b);
970 __DEVICE__ unsigned int __vsads4(unsigned int __a, unsigned int __b) {
971 return __nv_vsads4(__a, __b);
973 __DEVICE__ unsigned int __vsadu2(unsigned int __a, unsigned int __b) {
974 return __nv_vsadu2(__a, __b);
976 __DEVICE__ unsigned int __vsadu4(unsigned int __a, unsigned int __b) {
977 return __nv_vsadu4(__a, __b);
979 __DEVICE__ unsigned int __vseteq2(unsigned int __a, unsigned int __b) {
980 return __nv_vseteq2(__a, __b);
982 __DEVICE__ unsigned int __vseteq4(unsigned int __a, unsigned int __b) {
983 return __nv_vseteq4(__a, __b);
985 __DEVICE__ unsigned int __vsetges2(unsigned int __a, unsigned int __b) {
986 return __nv_vsetges2(__a, __b);
988 __DEVICE__ unsigned int __vsetges4(unsigned int __a, unsigned int __b) {
989 return __nv_vsetges4(__a, __b);
991 __DEVICE__ unsigned int __vsetgeu2(unsigned int __a, unsigned int __b) {
992 return __nv_vsetgeu2(__a, __b);
994 __DEVICE__ unsigned int __vsetgeu4(unsigned int __a, unsigned int __b) {
995 return __nv_vsetgeu4(__a, __b);
997 __DEVICE__ unsigned int __vsetgts2(unsigned int __a, unsigned int __b) {
998 return __nv_vsetgts2(__a, __b);
1000 __DEVICE__ unsigned int __vsetgts4(unsigned int __a, unsigned int __b) {
1001 return __nv_vsetgts4(__a, __b);
1003 __DEVICE__ unsigned int __vsetgtu2(unsigned int __a, unsigned int __b) {
1004 return __nv_vsetgtu2(__a, __b);
1006 __DEVICE__ unsigned int __vsetgtu4(unsigned int __a, unsigned int __b) {
1007 return __nv_vsetgtu4(__a, __b);
1009 __DEVICE__ unsigned int __vsetles2(unsigned int __a, unsigned int __b) {
1010 return __nv_vsetles2(__a, __b);
1012 __DEVICE__ unsigned int __vsetles4(unsigned int __a, unsigned int __b) {
1013 return __nv_vsetles4(__a, __b);
1015 __DEVICE__ unsigned int __vsetleu2(unsigned int __a, unsigned int __b) {
1016 return __nv_vsetleu2(__a, __b);
1018 __DEVICE__ unsigned int __vsetleu4(unsigned int __a, unsigned int __b) {
1019 return __nv_vsetleu4(__a, __b);
1021 __DEVICE__ unsigned int __vsetlts2(unsigned int __a, unsigned int __b) {
1022 return __nv_vsetlts2(__a, __b);
1024 __DEVICE__ unsigned int __vsetlts4(unsigned int __a, unsigned int __b) {
1025 return __nv_vsetlts4(__a, __b);
1027 __DEVICE__ unsigned int __vsetltu2(unsigned int __a, unsigned int __b) {
1028 return __nv_vsetltu2(__a, __b);
1030 __DEVICE__ unsigned int __vsetltu4(unsigned int __a, unsigned int __b) {
1031 return __nv_vsetltu4(__a, __b);
1033 __DEVICE__ unsigned int __vsetne2(unsigned int __a, unsigned int __b) {
1034 return __nv_vsetne2(__a, __b);
1036 __DEVICE__ unsigned int __vsetne4(unsigned int __a, unsigned int __b) {
1037 return __nv_vsetne4(__a, __b);
1039 __DEVICE__ unsigned int __vsub2(unsigned int __a, unsigned int __b) {
1040 return __nv_vsub2(__a, __b);
1042 __DEVICE__ unsigned int __vsub4(unsigned int __a, unsigned int __b) {
1043 return __nv_vsub4(__a, __b);
1045 __DEVICE__ unsigned int __vsubss2(unsigned int __a, unsigned int __b) {
1046 return __nv_vsubss2(__a, __b);
1048 __DEVICE__ unsigned int __vsubss4(unsigned int __a, unsigned int __b) {
1049 return __nv_vsubss4(__a, __b);
1051 __DEVICE__ unsigned int __vsubus2(unsigned int __a, unsigned int __b) {
1052 return __nv_vsubus2(__a, __b);
1054 __DEVICE__ unsigned int __vsubus4(unsigned int __a, unsigned int __b) {
1055 return __nv_vsubus4(__a, __b);
1057 #else // CUDA_VERSION >= 9020
1058 // CUDA no longer provides inline assembly (or bitcode) implementation of these
1059 // functions, so we have to reimplment them. The implementation is naive and is
1060 // not optimized for performance.
1062 // Helper function to convert N-bit boolean subfields into all-0 or all-1.
1063 // E.g. __bool2mask(0x01000100,8) -> 0xff00ff00
1064 // __bool2mask(0x00010000,16) -> 0xffff0000
1065 __DEVICE__ unsigned int __bool2mask(unsigned int __a, int shift) {
1066 return (__a << shift) - __a;
1068 __DEVICE__ unsigned int __vabs2(unsigned int __a) {
1069 unsigned int r;
1070 __asm__("vabsdiff2.s32.s32.s32 %0,%1,%2,%3;"
1071 : "=r"(r)
1072 : "r"(__a), "r"(0), "r"(0));
1073 return r;
1075 __DEVICE__ unsigned int __vabs4(unsigned int __a) {
1076 unsigned int r;
1077 __asm__("vabsdiff4.s32.s32.s32 %0,%1,%2,%3;"
1078 : "=r"(r)
1079 : "r"(__a), "r"(0), "r"(0));
1080 return r;
1082 __DEVICE__ unsigned int __vabsdiffs2(unsigned int __a, unsigned int __b) {
1083 unsigned int r;
1084 __asm__("vabsdiff2.s32.s32.s32 %0,%1,%2,%3;"
1085 : "=r"(r)
1086 : "r"(__a), "r"(__b), "r"(0));
1087 return r;
1090 __DEVICE__ unsigned int __vabsdiffs4(unsigned int __a, unsigned int __b) {
1091 unsigned int r;
1092 __asm__("vabsdiff4.s32.s32.s32 %0,%1,%2,%3;"
1093 : "=r"(r)
1094 : "r"(__a), "r"(__b), "r"(0));
1095 return r;
1097 __DEVICE__ unsigned int __vabsdiffu2(unsigned int __a, unsigned int __b) {
1098 unsigned int r;
1099 __asm__("vabsdiff2.u32.u32.u32 %0,%1,%2,%3;"
1100 : "=r"(r)
1101 : "r"(__a), "r"(__b), "r"(0));
1102 return r;
1104 __DEVICE__ unsigned int __vabsdiffu4(unsigned int __a, unsigned int __b) {
1105 unsigned int r;
1106 __asm__("vabsdiff4.u32.u32.u32 %0,%1,%2,%3;"
1107 : "=r"(r)
1108 : "r"(__a), "r"(__b), "r"(0));
1109 return r;
1111 __DEVICE__ unsigned int __vabsss2(unsigned int __a) {
1112 unsigned int r;
1113 __asm__("vabsdiff2.s32.s32.s32.sat %0,%1,%2,%3;"
1114 : "=r"(r)
1115 : "r"(__a), "r"(0), "r"(0));
1116 return r;
1118 __DEVICE__ unsigned int __vabsss4(unsigned int __a) {
1119 unsigned int r;
1120 __asm__("vabsdiff4.s32.s32.s32.sat %0,%1,%2,%3;"
1121 : "=r"(r)
1122 : "r"(__a), "r"(0), "r"(0));
1123 return r;
1125 __DEVICE__ unsigned int __vadd2(unsigned int __a, unsigned int __b) {
1126 unsigned int r;
1127 __asm__("vadd2.u32.u32.u32 %0,%1,%2,%3;"
1128 : "=r"(r)
1129 : "r"(__a), "r"(__b), "r"(0));
1130 return r;
1132 __DEVICE__ unsigned int __vadd4(unsigned int __a, unsigned int __b) {
1133 unsigned int r;
1134 __asm__("vadd4.u32.u32.u32 %0,%1,%2,%3;"
1135 : "=r"(r)
1136 : "r"(__a), "r"(__b), "r"(0));
1137 return r;
1139 __DEVICE__ unsigned int __vaddss2(unsigned int __a, unsigned int __b) {
1140 unsigned int r;
1141 __asm__("vadd2.s32.s32.s32.sat %0,%1,%2,%3;"
1142 : "=r"(r)
1143 : "r"(__a), "r"(__b), "r"(0));
1144 return r;
1146 __DEVICE__ unsigned int __vaddss4(unsigned int __a, unsigned int __b) {
1147 unsigned int r;
1148 __asm__("vadd4.s32.s32.s32.sat %0,%1,%2,%3;"
1149 : "=r"(r)
1150 : "r"(__a), "r"(__b), "r"(0));
1151 return r;
1153 __DEVICE__ unsigned int __vaddus2(unsigned int __a, unsigned int __b) {
1154 unsigned int r;
1155 __asm__("vadd2.u32.u32.u32.sat %0,%1,%2,%3;"
1156 : "=r"(r)
1157 : "r"(__a), "r"(__b), "r"(0));
1158 return r;
1160 __DEVICE__ unsigned int __vaddus4(unsigned int __a, unsigned int __b) {
1161 unsigned int r;
1162 __asm__("vadd4.u32.u32.u32.sat %0,%1,%2,%3;"
1163 : "=r"(r)
1164 : "r"(__a), "r"(__b), "r"(0));
1165 return r;
1167 __DEVICE__ unsigned int __vavgs2(unsigned int __a, unsigned int __b) {
1168 unsigned int r;
1169 __asm__("vavrg2.s32.s32.s32 %0,%1,%2,%3;"
1170 : "=r"(r)
1171 : "r"(__a), "r"(__b), "r"(0));
1172 return r;
1174 __DEVICE__ unsigned int __vavgs4(unsigned int __a, unsigned int __b) {
1175 unsigned int r;
1176 __asm__("vavrg4.s32.s32.s32 %0,%1,%2,%3;"
1177 : "=r"(r)
1178 : "r"(__a), "r"(__b), "r"(0));
1179 return r;
1181 __DEVICE__ unsigned int __vavgu2(unsigned int __a, unsigned int __b) {
1182 unsigned int r;
1183 __asm__("vavrg2.u32.u32.u32 %0,%1,%2,%3;"
1184 : "=r"(r)
1185 : "r"(__a), "r"(__b), "r"(0));
1186 return r;
1188 __DEVICE__ unsigned int __vavgu4(unsigned int __a, unsigned int __b) {
1189 unsigned int r;
1190 __asm__("vavrg4.u32.u32.u32 %0,%1,%2,%3;"
1191 : "=r"(r)
1192 : "r"(__a), "r"(__b), "r"(0));
1193 return r;
1195 __DEVICE__ unsigned int __vseteq2(unsigned int __a, unsigned int __b) {
1196 unsigned int r;
1197 __asm__("vset2.u32.u32.eq %0,%1,%2,%3;"
1198 : "=r"(r)
1199 : "r"(__a), "r"(__b), "r"(0));
1200 return r;
1202 __DEVICE__ unsigned int __vcmpeq2(unsigned int __a, unsigned int __b) {
1203 return __bool2mask(__vseteq2(__a, __b), 16);
1205 __DEVICE__ unsigned int __vseteq4(unsigned int __a, unsigned int __b) {
1206 unsigned int r;
1207 __asm__("vset4.u32.u32.eq %0,%1,%2,%3;"
1208 : "=r"(r)
1209 : "r"(__a), "r"(__b), "r"(0));
1210 return r;
1212 __DEVICE__ unsigned int __vcmpeq4(unsigned int __a, unsigned int __b) {
1213 return __bool2mask(__vseteq4(__a, __b), 8);
1215 __DEVICE__ unsigned int __vsetges2(unsigned int __a, unsigned int __b) {
1216 unsigned int r;
1217 __asm__("vset2.s32.s32.ge %0,%1,%2,%3;"
1218 : "=r"(r)
1219 : "r"(__a), "r"(__b), "r"(0));
1220 return r;
1222 __DEVICE__ unsigned int __vcmpges2(unsigned int __a, unsigned int __b) {
1223 return __bool2mask(__vsetges2(__a, __b), 16);
1225 __DEVICE__ unsigned int __vsetges4(unsigned int __a, unsigned int __b) {
1226 unsigned int r;
1227 __asm__("vset4.s32.s32.ge %0,%1,%2,%3;"
1228 : "=r"(r)
1229 : "r"(__a), "r"(__b), "r"(0));
1230 return r;
1232 __DEVICE__ unsigned int __vcmpges4(unsigned int __a, unsigned int __b) {
1233 return __bool2mask(__vsetges4(__a, __b), 8);
1235 __DEVICE__ unsigned int __vsetgeu2(unsigned int __a, unsigned int __b) {
1236 unsigned int r;
1237 __asm__("vset2.u32.u32.ge %0,%1,%2,%3;"
1238 : "=r"(r)
1239 : "r"(__a), "r"(__b), "r"(0));
1240 return r;
1242 __DEVICE__ unsigned int __vcmpgeu2(unsigned int __a, unsigned int __b) {
1243 return __bool2mask(__vsetgeu2(__a, __b), 16);
1245 __DEVICE__ unsigned int __vsetgeu4(unsigned int __a, unsigned int __b) {
1246 unsigned int r;
1247 __asm__("vset4.u32.u32.ge %0,%1,%2,%3;"
1248 : "=r"(r)
1249 : "r"(__a), "r"(__b), "r"(0));
1250 return r;
1252 __DEVICE__ unsigned int __vcmpgeu4(unsigned int __a, unsigned int __b) {
1253 return __bool2mask(__vsetgeu4(__a, __b), 8);
1255 __DEVICE__ unsigned int __vsetgts2(unsigned int __a, unsigned int __b) {
1256 unsigned int r;
1257 __asm__("vset2.s32.s32.gt %0,%1,%2,%3;"
1258 : "=r"(r)
1259 : "r"(__a), "r"(__b), "r"(0));
1260 return r;
1262 __DEVICE__ unsigned int __vcmpgts2(unsigned int __a, unsigned int __b) {
1263 return __bool2mask(__vsetgts2(__a, __b), 16);
1265 __DEVICE__ unsigned int __vsetgts4(unsigned int __a, unsigned int __b) {
1266 unsigned int r;
1267 __asm__("vset4.s32.s32.gt %0,%1,%2,%3;"
1268 : "=r"(r)
1269 : "r"(__a), "r"(__b), "r"(0));
1270 return r;
1272 __DEVICE__ unsigned int __vcmpgts4(unsigned int __a, unsigned int __b) {
1273 return __bool2mask(__vsetgts4(__a, __b), 8);
1275 __DEVICE__ unsigned int __vsetgtu2(unsigned int __a, unsigned int __b) {
1276 unsigned int r;
1277 __asm__("vset2.u32.u32.gt %0,%1,%2,%3;"
1278 : "=r"(r)
1279 : "r"(__a), "r"(__b), "r"(0));
1280 return r;
1282 __DEVICE__ unsigned int __vcmpgtu2(unsigned int __a, unsigned int __b) {
1283 return __bool2mask(__vsetgtu2(__a, __b), 16);
1285 __DEVICE__ unsigned int __vsetgtu4(unsigned int __a, unsigned int __b) {
1286 unsigned int r;
1287 __asm__("vset4.u32.u32.gt %0,%1,%2,%3;"
1288 : "=r"(r)
1289 : "r"(__a), "r"(__b), "r"(0));
1290 return r;
1292 __DEVICE__ unsigned int __vcmpgtu4(unsigned int __a, unsigned int __b) {
1293 return __bool2mask(__vsetgtu4(__a, __b), 8);
1295 __DEVICE__ unsigned int __vsetles2(unsigned int __a, unsigned int __b) {
1296 unsigned int r;
1297 __asm__("vset2.s32.s32.le %0,%1,%2,%3;"
1298 : "=r"(r)
1299 : "r"(__a), "r"(__b), "r"(0));
1300 return r;
1302 __DEVICE__ unsigned int __vcmples2(unsigned int __a, unsigned int __b) {
1303 return __bool2mask(__vsetles2(__a, __b), 16);
1305 __DEVICE__ unsigned int __vsetles4(unsigned int __a, unsigned int __b) {
1306 unsigned int r;
1307 __asm__("vset4.s32.s32.le %0,%1,%2,%3;"
1308 : "=r"(r)
1309 : "r"(__a), "r"(__b), "r"(0));
1310 return r;
1312 __DEVICE__ unsigned int __vcmples4(unsigned int __a, unsigned int __b) {
1313 return __bool2mask(__vsetles4(__a, __b), 8);
1315 __DEVICE__ unsigned int __vsetleu2(unsigned int __a, unsigned int __b) {
1316 unsigned int r;
1317 __asm__("vset2.u32.u32.le %0,%1,%2,%3;"
1318 : "=r"(r)
1319 : "r"(__a), "r"(__b), "r"(0));
1320 return r;
1322 __DEVICE__ unsigned int __vcmpleu2(unsigned int __a, unsigned int __b) {
1323 return __bool2mask(__vsetleu2(__a, __b), 16);
1325 __DEVICE__ unsigned int __vsetleu4(unsigned int __a, unsigned int __b) {
1326 unsigned int r;
1327 __asm__("vset4.u32.u32.le %0,%1,%2,%3;"
1328 : "=r"(r)
1329 : "r"(__a), "r"(__b), "r"(0));
1330 return r;
1332 __DEVICE__ unsigned int __vcmpleu4(unsigned int __a, unsigned int __b) {
1333 return __bool2mask(__vsetleu4(__a, __b), 8);
1335 __DEVICE__ unsigned int __vsetlts2(unsigned int __a, unsigned int __b) {
1336 unsigned int r;
1337 __asm__("vset2.s32.s32.lt %0,%1,%2,%3;"
1338 : "=r"(r)
1339 : "r"(__a), "r"(__b), "r"(0));
1340 return r;
1342 __DEVICE__ unsigned int __vcmplts2(unsigned int __a, unsigned int __b) {
1343 return __bool2mask(__vsetlts2(__a, __b), 16);
1345 __DEVICE__ unsigned int __vsetlts4(unsigned int __a, unsigned int __b) {
1346 unsigned int r;
1347 __asm__("vset4.s32.s32.lt %0,%1,%2,%3;"
1348 : "=r"(r)
1349 : "r"(__a), "r"(__b), "r"(0));
1350 return r;
1352 __DEVICE__ unsigned int __vcmplts4(unsigned int __a, unsigned int __b) {
1353 return __bool2mask(__vsetlts4(__a, __b), 8);
1355 __DEVICE__ unsigned int __vsetltu2(unsigned int __a, unsigned int __b) {
1356 unsigned int r;
1357 __asm__("vset2.u32.u32.lt %0,%1,%2,%3;"
1358 : "=r"(r)
1359 : "r"(__a), "r"(__b), "r"(0));
1360 return r;
1362 __DEVICE__ unsigned int __vcmpltu2(unsigned int __a, unsigned int __b) {
1363 return __bool2mask(__vsetltu2(__a, __b), 16);
1365 __DEVICE__ unsigned int __vsetltu4(unsigned int __a, unsigned int __b) {
1366 unsigned int r;
1367 __asm__("vset4.u32.u32.lt %0,%1,%2,%3;"
1368 : "=r"(r)
1369 : "r"(__a), "r"(__b), "r"(0));
1370 return r;
1372 __DEVICE__ unsigned int __vcmpltu4(unsigned int __a, unsigned int __b) {
1373 return __bool2mask(__vsetltu4(__a, __b), 8);
1375 __DEVICE__ unsigned int __vsetne2(unsigned int __a, unsigned int __b) {
1376 unsigned int r;
1377 __asm__("vset2.u32.u32.ne %0,%1,%2,%3;"
1378 : "=r"(r)
1379 : "r"(__a), "r"(__b), "r"(0));
1380 return r;
1382 __DEVICE__ unsigned int __vcmpne2(unsigned int __a, unsigned int __b) {
1383 return __bool2mask(__vsetne2(__a, __b), 16);
1385 __DEVICE__ unsigned int __vsetne4(unsigned int __a, unsigned int __b) {
1386 unsigned int r;
1387 __asm__("vset4.u32.u32.ne %0,%1,%2,%3;"
1388 : "=r"(r)
1389 : "r"(__a), "r"(__b), "r"(0));
1390 return r;
1392 __DEVICE__ unsigned int __vcmpne4(unsigned int __a, unsigned int __b) {
1393 return __bool2mask(__vsetne4(__a, __b), 8);
1396 // Based on ITEM 23 in AIM-239: http://dspace.mit.edu/handle/1721.1/6086
1397 // (a & b) + (a | b) = a + b = (a ^ b) + 2 * (a & b) =>
1398 // (a + b) / 2 = ((a ^ b) >> 1) + (a & b)
1399 // To operate on multiple sub-elements we need to make sure to mask out bits
1400 // that crossed over into adjacent elements during the shift.
1401 __DEVICE__ unsigned int __vhaddu2(unsigned int __a, unsigned int __b) {
1402 return (((__a ^ __b) >> 1) & ~0x80008000u) + (__a & __b);
1404 __DEVICE__ unsigned int __vhaddu4(unsigned int __a, unsigned int __b) {
1405 return (((__a ^ __b) >> 1) & ~0x80808080u) + (__a & __b);
1408 __DEVICE__ unsigned int __vmaxs2(unsigned int __a, unsigned int __b) {
1409 unsigned int r;
1410 if ((__a & 0x8000) && (__b & 0x8000)) {
1411 // Work around a bug in ptxas which produces invalid result if low element
1412 // is negative.
1413 unsigned mask = __vcmpgts2(__a, __b);
1414 r = (__a & mask) | (__b & ~mask);
1415 } else {
1416 __asm__("vmax2.s32.s32.s32 %0,%1,%2,%3;"
1417 : "=r"(r)
1418 : "r"(__a), "r"(__b), "r"(0));
1420 return r;
1422 __DEVICE__ unsigned int __vmaxs4(unsigned int __a, unsigned int __b) {
1423 unsigned int r;
1424 __asm__("vmax4.s32.s32.s32 %0,%1,%2,%3;"
1425 : "=r"(r)
1426 : "r"(__a), "r"(__b), "r"(0));
1427 return r;
1429 __DEVICE__ unsigned int __vmaxu2(unsigned int __a, unsigned int __b) {
1430 unsigned int r;
1431 __asm__("vmax2.u32.u32.u32 %0,%1,%2,%3;"
1432 : "=r"(r)
1433 : "r"(__a), "r"(__b), "r"(0));
1434 return r;
1436 __DEVICE__ unsigned int __vmaxu4(unsigned int __a, unsigned int __b) {
1437 unsigned int r;
1438 __asm__("vmax4.u32.u32.u32 %0,%1,%2,%3;"
1439 : "=r"(r)
1440 : "r"(__a), "r"(__b), "r"(0));
1441 return r;
1443 __DEVICE__ unsigned int __vmins2(unsigned int __a, unsigned int __b) {
1444 unsigned int r;
1445 __asm__("vmin2.s32.s32.s32 %0,%1,%2,%3;"
1446 : "=r"(r)
1447 : "r"(__a), "r"(__b), "r"(0));
1448 return r;
1450 __DEVICE__ unsigned int __vmins4(unsigned int __a, unsigned int __b) {
1451 unsigned int r;
1452 __asm__("vmin4.s32.s32.s32 %0,%1,%2,%3;"
1453 : "=r"(r)
1454 : "r"(__a), "r"(__b), "r"(0));
1455 return r;
1457 __DEVICE__ unsigned int __vminu2(unsigned int __a, unsigned int __b) {
1458 unsigned int r;
1459 __asm__("vmin2.u32.u32.u32 %0,%1,%2,%3;"
1460 : "=r"(r)
1461 : "r"(__a), "r"(__b), "r"(0));
1462 return r;
1464 __DEVICE__ unsigned int __vminu4(unsigned int __a, unsigned int __b) {
1465 unsigned int r;
1466 __asm__("vmin4.u32.u32.u32 %0,%1,%2,%3;"
1467 : "=r"(r)
1468 : "r"(__a), "r"(__b), "r"(0));
1469 return r;
1471 __DEVICE__ unsigned int __vsads2(unsigned int __a, unsigned int __b) {
1472 unsigned int r;
1473 __asm__("vabsdiff2.s32.s32.s32.add %0,%1,%2,%3;"
1474 : "=r"(r)
1475 : "r"(__a), "r"(__b), "r"(0));
1476 return r;
1478 __DEVICE__ unsigned int __vsads4(unsigned int __a, unsigned int __b) {
1479 unsigned int r;
1480 __asm__("vabsdiff4.s32.s32.s32.add %0,%1,%2,%3;"
1481 : "=r"(r)
1482 : "r"(__a), "r"(__b), "r"(0));
1483 return r;
1485 __DEVICE__ unsigned int __vsadu2(unsigned int __a, unsigned int __b) {
1486 unsigned int r;
1487 __asm__("vabsdiff2.u32.u32.u32.add %0,%1,%2,%3;"
1488 : "=r"(r)
1489 : "r"(__a), "r"(__b), "r"(0));
1490 return r;
1492 __DEVICE__ unsigned int __vsadu4(unsigned int __a, unsigned int __b) {
1493 unsigned int r;
1494 __asm__("vabsdiff4.u32.u32.u32.add %0,%1,%2,%3;"
1495 : "=r"(r)
1496 : "r"(__a), "r"(__b), "r"(0));
1497 return r;
1500 __DEVICE__ unsigned int __vsub2(unsigned int __a, unsigned int __b) {
1501 unsigned int r;
1502 __asm__("vsub2.u32.u32.u32 %0,%1,%2,%3;"
1503 : "=r"(r)
1504 : "r"(__a), "r"(__b), "r"(0));
1505 return r;
1507 __DEVICE__ unsigned int __vneg2(unsigned int __a) { return __vsub2(0, __a); }
1509 __DEVICE__ unsigned int __vsub4(unsigned int __a, unsigned int __b) {
1510 unsigned int r;
1511 __asm__("vsub4.u32.u32.u32 %0,%1,%2,%3;"
1512 : "=r"(r)
1513 : "r"(__a), "r"(__b), "r"(0));
1514 return r;
1516 __DEVICE__ unsigned int __vneg4(unsigned int __a) { return __vsub4(0, __a); }
1517 __DEVICE__ unsigned int __vsubss2(unsigned int __a, unsigned int __b) {
1518 unsigned int r;
1519 __asm__("vsub2.s32.s32.s32.sat %0,%1,%2,%3;"
1520 : "=r"(r)
1521 : "r"(__a), "r"(__b), "r"(0));
1522 return r;
1524 __DEVICE__ unsigned int __vnegss2(unsigned int __a) {
1525 return __vsubss2(0, __a);
1527 __DEVICE__ unsigned int __vsubss4(unsigned int __a, unsigned int __b) {
1528 unsigned int r;
1529 __asm__("vsub4.s32.s32.s32.sat %0,%1,%2,%3;"
1530 : "=r"(r)
1531 : "r"(__a), "r"(__b), "r"(0));
1532 return r;
1534 __DEVICE__ unsigned int __vnegss4(unsigned int __a) {
1535 return __vsubss4(0, __a);
1537 __DEVICE__ unsigned int __vsubus2(unsigned int __a, unsigned int __b) {
1538 unsigned int r;
1539 __asm__("vsub2.u32.u32.u32.sat %0,%1,%2,%3;"
1540 : "=r"(r)
1541 : "r"(__a), "r"(__b), "r"(0));
1542 return r;
1544 __DEVICE__ unsigned int __vsubus4(unsigned int __a, unsigned int __b) {
1545 unsigned int r;
1546 __asm__("vsub4.u32.u32.u32.sat %0,%1,%2,%3;"
1547 : "=r"(r)
1548 : "r"(__a), "r"(__b), "r"(0));
1549 return r;
1551 #endif // CUDA_VERSION >= 9020
1553 // For OpenMP we require the user to include <time.h> as we need to know what
1554 // clock_t is on the system.
1555 #ifndef __OPENMP_NVPTX__
1556 __DEVICE__ /* clock_t= */ int clock() { return __nvvm_read_ptx_sreg_clock(); }
1557 #endif
1558 __DEVICE__ long long clock64() { return __nvvm_read_ptx_sreg_clock64(); }
1560 // These functions shouldn't be declared when including this header
1561 // for math function resolution purposes.
1562 #ifndef __OPENMP_NVPTX__
1563 __DEVICE__ void *memcpy(void *__a, const void *__b, size_t __c) {
1564 return __builtin_memcpy(__a, __b, __c);
1566 __DEVICE__ void *memset(void *__a, int __b, size_t __c) {
1567 return __builtin_memset(__a, __b, __c);
1569 #endif
1571 #pragma pop_macro("__DEVICE__")
1572 #endif // __CLANG_CUDA_DEVICE_FUNCTIONS_H__