1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py
2 // RUN: %clang_cc1 -triple riscv32 -target-feature +zbb -emit-llvm %s -o - \
3 // RUN: -disable-O0-optnone | opt -S -passes=mem2reg \
4 // RUN: | FileCheck %s -check-prefix=RV32ZBB
5 // RUN: %clang_cc1 -triple riscv64 -target-feature +zbb -emit-llvm %s -o - \
6 // RUN: -disable-O0-optnone | opt -S -passes=mem2reg \
7 // RUN: | FileCheck %s -check-prefix=RV64ZBB
9 // RV32ZBB-LABEL: @orc_b_32(
10 // RV32ZBB-NEXT: entry:
11 // RV32ZBB-NEXT: [[TMP0:%.*]] = call i32 @llvm.riscv.orc.b.i32(i32 [[A:%.*]])
12 // RV32ZBB-NEXT: ret i32 [[TMP0]]
14 // RV64ZBB-LABEL: @orc_b_32(
15 // RV64ZBB-NEXT: entry:
16 // RV64ZBB-NEXT: [[TMP0:%.*]] = call i32 @llvm.riscv.orc.b.i32(i32 [[A:%.*]])
17 // RV64ZBB-NEXT: ret i32 [[TMP0]]
19 unsigned int orc_b_32(unsigned int a
) {
20 return __builtin_riscv_orc_b_32(a
);
23 #if __riscv_xlen == 64
24 // RV64ZBB-LABEL: @orc_b_64(
25 // RV64ZBB-NEXT: entry:
26 // RV64ZBB-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.orc.b.i64(i64 [[A:%.*]])
27 // RV64ZBB-NEXT: ret i64 [[TMP0]]
29 unsigned long orc_b_64(unsigned long a
) {
30 return __builtin_riscv_orc_b_64(a
);
34 // RV32ZBB-LABEL: @clz_32(
35 // RV32ZBB-NEXT: entry:
36 // RV32ZBB-NEXT: [[TMP0:%.*]] = call i32 @llvm.ctlz.i32(i32 [[A:%.*]], i1 false)
37 // RV32ZBB-NEXT: ret i32 [[TMP0]]
39 // RV64ZBB-LABEL: @clz_32(
40 // RV64ZBB-NEXT: entry:
41 // RV64ZBB-NEXT: [[TMP0:%.*]] = call i32 @llvm.ctlz.i32(i32 [[A:%.*]], i1 false)
42 // RV64ZBB-NEXT: ret i32 [[TMP0]]
44 unsigned int clz_32(unsigned int a
) {
45 return __builtin_riscv_clz_32(a
);
48 #if __riscv_xlen == 64
49 // RV64ZBB-LABEL: @clz_64(
50 // RV64ZBB-NEXT: entry:
51 // RV64ZBB-NEXT: [[TMP0:%.*]] = call i64 @llvm.ctlz.i64(i64 [[A:%.*]], i1 false)
52 // RV64ZBB-NEXT: [[CAST:%.*]] = trunc i64 [[TMP0]] to i32
53 // RV64ZBB-NEXT: ret i32 [[CAST]]
55 unsigned int clz_64(unsigned long a
) {
56 return __builtin_riscv_clz_64(a
);
60 // RV32ZBB-LABEL: @ctz_32(
61 // RV32ZBB-NEXT: entry:
62 // RV32ZBB-NEXT: [[TMP0:%.*]] = call i32 @llvm.cttz.i32(i32 [[A:%.*]], i1 false)
63 // RV32ZBB-NEXT: ret i32 [[TMP0]]
65 // RV64ZBB-LABEL: @ctz_32(
66 // RV64ZBB-NEXT: entry:
67 // RV64ZBB-NEXT: [[TMP0:%.*]] = call i32 @llvm.cttz.i32(i32 [[A:%.*]], i1 false)
68 // RV64ZBB-NEXT: ret i32 [[TMP0]]
70 unsigned int ctz_32(unsigned int a
) {
71 return __builtin_riscv_ctz_32(a
);
74 #if __riscv_xlen == 64
75 // RV64ZBB-LABEL: @ctz_64(
76 // RV64ZBB-NEXT: entry:
77 // RV64ZBB-NEXT: [[TMP0:%.*]] = call i64 @llvm.cttz.i64(i64 [[A:%.*]], i1 false)
78 // RV64ZBB-NEXT: [[CAST:%.*]] = trunc i64 [[TMP0]] to i32
79 // RV64ZBB-NEXT: ret i32 [[CAST]]
81 unsigned int ctz_64(unsigned long a
) {
82 return __builtin_riscv_ctz_64(a
);