1 ; RUN: llc < %s -march=nvptx64 -mcpu=sm_20 | FileCheck --check-prefix=SM20 %s
2 ; RUN: llc < %s -march=nvptx64 -mcpu=sm_35 | FileCheck --check-prefix=SM35 %s
3 ; RUN: %if ptxas %{ llc < %s -march=nvptx64 -mcpu=sm_20 | %ptxas-verify %}
4 ; RUN: %if ptxas %{ llc < %s -march=nvptx64 -mcpu=sm_35 | %ptxas-verify %}
7 declare i32 @llvm.nvvm.rotate.b32(i32, i32)
8 declare i64 @llvm.nvvm.rotate.b64(i64, i32)
9 declare i64 @llvm.nvvm.rotate.right.b64(i64, i32)
13 define i32 @rotate32(i32 %a, i32 %b) {
18 ; SM35: shf.l.wrap.b32
19 %val = tail call i32 @llvm.nvvm.rotate.b32(i32 %a, i32 %b)
25 define i64 @rotate64(i64 %a, i32 %b) {
30 ; SM35: shf.l.wrap.b32
31 ; SM35: shf.l.wrap.b32
32 %val = tail call i64 @llvm.nvvm.rotate.b64(i64 %a, i32 %b)
38 define i64 @rotateright64(i64 %a, i32 %b) {
43 ; SM35: shf.r.wrap.b32
44 ; SM35: shf.r.wrap.b32
45 %val = tail call i64 @llvm.nvvm.rotate.right.b64(i64 %a, i32 %b)
51 define i32 @rotl0(i32 %x) {
55 ; SM35: shf.l.wrap.b32