1 // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py
2 // RUN: %clang_cc1 -triple riscv32 -target-feature +zbb -emit-llvm %s -o - \
3 // RUN: -disable-O0-optnone | opt -S -passes=mem2reg \
4 // RUN: | FileCheck %s -check-prefix=RV32ZBB
5 // RUN: %clang_cc1 -triple riscv64 -target-feature +zbb -emit-llvm %s -o - \
6 // RUN: -disable-O0-optnone | opt -S -passes=mem2reg \
7 // RUN: | FileCheck %s -check-prefix=RV64ZBB
9 #include <riscv_bitmanip.h>
11 // RV32ZBB-LABEL: @orc_b_32(
12 // RV32ZBB-NEXT: entry:
13 // RV32ZBB-NEXT: [[TMP0:%.*]] = call i32 @llvm.riscv.orc.b.i32(i32 [[A:%.*]])
14 // RV32ZBB-NEXT: ret i32 [[TMP0]]
16 // RV64ZBB-LABEL: @orc_b_32(
17 // RV64ZBB-NEXT: entry:
18 // RV64ZBB-NEXT: [[TMP0:%.*]] = call i32 @llvm.riscv.orc.b.i32(i32 [[A:%.*]])
19 // RV64ZBB-NEXT: ret i32 [[TMP0]]
21 uint32_t orc_b_32(uint32_t a
) {
22 return __riscv_orc_b_32(a
);
25 #if __riscv_xlen == 64
26 // RV64ZBB-LABEL: @orc_b_64(
27 // RV64ZBB-NEXT: entry:
28 // RV64ZBB-NEXT: [[TMP0:%.*]] = call i64 @llvm.riscv.orc.b.i64(i64 [[A:%.*]])
29 // RV64ZBB-NEXT: ret i64 [[TMP0]]
31 uint64_t orc_b_64(uint64_t a
) {
32 return __riscv_orc_b_64(a
);
36 // RV32ZBB-LABEL: @clz_32(
37 // RV32ZBB-NEXT: entry:
38 // RV32ZBB-NEXT: [[TMP0:%.*]] = call i32 @llvm.ctlz.i32(i32 [[A:%.*]], i1 false)
39 // RV32ZBB-NEXT: ret i32 [[TMP0]]
41 // RV64ZBB-LABEL: @clz_32(
42 // RV64ZBB-NEXT: entry:
43 // RV64ZBB-NEXT: [[TMP0:%.*]] = call i32 @llvm.ctlz.i32(i32 [[A:%.*]], i1 false)
44 // RV64ZBB-NEXT: ret i32 [[TMP0]]
46 unsigned int clz_32(uint32_t a
) {
47 return __riscv_clz_32(a
);
50 #if __riscv_xlen == 64
51 // RV64ZBB-LABEL: @clz_64(
52 // RV64ZBB-NEXT: entry:
53 // RV64ZBB-NEXT: [[TMP0:%.*]] = call i64 @llvm.ctlz.i64(i64 [[A:%.*]], i1 false)
54 // RV64ZBB-NEXT: [[CAST_I:%.*]] = trunc i64 [[TMP0]] to i32
55 // RV64ZBB-NEXT: ret i32 [[CAST_I]]
57 unsigned int clz_64(uint64_t a
) {
58 return __riscv_clz_64(a
);
62 // RV32ZBB-LABEL: @ctz_32(
63 // RV32ZBB-NEXT: entry:
64 // RV32ZBB-NEXT: [[TMP0:%.*]] = call i32 @llvm.cttz.i32(i32 [[A:%.*]], i1 false)
65 // RV32ZBB-NEXT: ret i32 [[TMP0]]
67 // RV64ZBB-LABEL: @ctz_32(
68 // RV64ZBB-NEXT: entry:
69 // RV64ZBB-NEXT: [[TMP0:%.*]] = call i32 @llvm.cttz.i32(i32 [[A:%.*]], i1 false)
70 // RV64ZBB-NEXT: ret i32 [[TMP0]]
72 unsigned int ctz_32(uint32_t a
) {
73 return __riscv_ctz_32(a
);
76 #if __riscv_xlen == 64
77 // RV64ZBB-LABEL: @ctz_64(
78 // RV64ZBB-NEXT: entry:
79 // RV64ZBB-NEXT: [[TMP0:%.*]] = call i64 @llvm.cttz.i64(i64 [[A:%.*]], i1 false)
80 // RV64ZBB-NEXT: [[CAST_I:%.*]] = trunc i64 [[TMP0]] to i32
81 // RV64ZBB-NEXT: ret i32 [[CAST_I]]
83 unsigned int ctz_64(uint64_t a
) {
84 return __riscv_ctz_64(a
);
88 // RV32ZBB-LABEL: @cpop_32(
89 // RV32ZBB-NEXT: entry:
90 // RV32ZBB-NEXT: [[TMP0:%.*]] = call i32 @llvm.ctpop.i32(i32 [[A:%.*]])
91 // RV32ZBB-NEXT: ret i32 [[TMP0]]
93 // RV64ZBB-LABEL: @cpop_32(
94 // RV64ZBB-NEXT: entry:
95 // RV64ZBB-NEXT: [[TMP0:%.*]] = call i32 @llvm.ctpop.i32(i32 [[A:%.*]])
96 // RV64ZBB-NEXT: ret i32 [[TMP0]]
98 unsigned int cpop_32(uint32_t a
) {
99 return __riscv_cpop_32(a
);
102 #if __riscv_xlen == 64
103 // RV64ZBB-LABEL: @cpop_64(
104 // RV64ZBB-NEXT: entry:
105 // RV64ZBB-NEXT: [[TMP0:%.*]] = call i64 @llvm.ctpop.i64(i64 [[A:%.*]])
106 // RV64ZBB-NEXT: [[CAST_I:%.*]] = trunc i64 [[TMP0]] to i32
107 // RV64ZBB-NEXT: ret i32 [[CAST_I]]
109 unsigned int cpop_64(uint64_t a
) {
110 return __riscv_cpop_64(a
);