1 ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2 ; RUN: opt -S -instcombine < %s | FileCheck %s
4 target triple = "aarch64-unknown-linux-gnu"
10 define i64 @cntb_vl1() {
11 ; CHECK-LABEL: @cntb_vl1(
12 ; CHECK-NEXT: ret i64 1
14 %out = call i64 @llvm.aarch64.sve.cntb(i32 1)
18 define i64 @cntb_vl2() {
19 ; CHECK-LABEL: @cntb_vl2(
20 ; CHECK-NEXT: ret i64 2
22 %out = call i64 @llvm.aarch64.sve.cntb(i32 2)
26 define i64 @cntb_vl4() {
27 ; CHECK-LABEL: @cntb_vl4(
28 ; CHECK-NEXT: ret i64 4
30 %out = call i64 @llvm.aarch64.sve.cntb(i32 4)
34 define i64 @cntb_mul3() {
35 ; CHECK-LABEL: @cntb_mul3(
36 ; CHECK-NEXT: ret i64 24
38 %cnt = call i64 @llvm.aarch64.sve.cntb(i32 8)
39 %out = mul i64 %cnt, 3
43 define i64 @cntb_mul4() {
44 ; CHECK-LABEL: @cntb_mul4(
45 ; CHECK-NEXT: ret i64 64
47 %cnt = call i64 @llvm.aarch64.sve.cntb(i32 9)
48 %out = mul i64 %cnt, 4
52 define i64 @cntb_all() {
53 ; CHECK-LABEL: @cntb_all(
54 ; CHECK-NEXT: [[TMP1:%.*]] = call i64 @llvm.vscale.i64()
55 ; CHECK-NEXT: [[OUT:%.*]] = shl i64 [[TMP1]], 4
56 ; CHECK-NEXT: ret i64 [[OUT]]
58 %out = call i64 @llvm.aarch64.sve.cntb(i32 31)
66 define i64 @cnth_vl1() {
67 ; CHECK-LABEL: @cnth_vl1(
68 ; CHECK-NEXT: ret i64 1
70 %out = call i64 @llvm.aarch64.sve.cnth(i32 1)
74 define i64 @cnth_vl2() {
75 ; CHECK-LABEL: @cnth_vl2(
76 ; CHECK-NEXT: ret i64 2
78 %out = call i64 @llvm.aarch64.sve.cnth(i32 2)
82 define i64 @cnth_vl4() {
83 ; CHECK-LABEL: @cnth_vl4(
84 ; CHECK-NEXT: ret i64 4
86 %out = call i64 @llvm.aarch64.sve.cnth(i32 4)
90 define i64 @cnth_mul3() {
91 ; CHECK-LABEL: @cnth_mul3(
92 ; CHECK-NEXT: ret i64 24
94 %cnt = call i64 @llvm.aarch64.sve.cnth(i32 8)
95 %out = mul i64 %cnt, 3
99 define i64 @cnth_mul4() {
100 ; CHECK-LABEL: @cnth_mul4(
101 ; CHECK-NEXT: [[CNT:%.*]] = call i64 @llvm.aarch64.sve.cnth(i32 9)
102 ; CHECK-NEXT: [[OUT:%.*]] = shl i64 [[CNT]], 2
103 ; CHECK-NEXT: ret i64 [[OUT]]
105 %cnt = call i64 @llvm.aarch64.sve.cnth(i32 9)
106 %out = mul i64 %cnt, 4
110 define i64 @cnth_all() {
111 ; CHECK-LABEL: @cnth_all(
112 ; CHECK-NEXT: [[TMP1:%.*]] = call i64 @llvm.vscale.i64()
113 ; CHECK-NEXT: [[OUT:%.*]] = shl i64 [[TMP1]], 3
114 ; CHECK-NEXT: ret i64 [[OUT]]
116 %out = call i64 @llvm.aarch64.sve.cnth(i32 31)
124 define i64 @cntw_vl1() {
125 ; CHECK-LABEL: @cntw_vl1(
126 ; CHECK-NEXT: ret i64 1
128 %out = call i64 @llvm.aarch64.sve.cntw(i32 1)
132 define i64 @cntw_vl2() {
133 ; CHECK-LABEL: @cntw_vl2(
134 ; CHECK-NEXT: ret i64 2
136 %out = call i64 @llvm.aarch64.sve.cntw(i32 2)
140 define i64 @cntw_vl4() {
141 ; CHECK-LABEL: @cntw_vl4(
142 ; CHECK-NEXT: ret i64 4
144 %out = call i64 @llvm.aarch64.sve.cntw(i32 4)
148 define i64 @cntw_mul3() {
149 ; CHECK-LABEL: @cntw_mul3(
150 ; CHECK-NEXT: [[CNT:%.*]] = call i64 @llvm.aarch64.sve.cntw(i32 8)
151 ; CHECK-NEXT: [[OUT:%.*]] = mul i64 [[CNT]], 3
152 ; CHECK-NEXT: ret i64 [[OUT]]
154 %cnt = call i64 @llvm.aarch64.sve.cntw(i32 8)
155 %out = mul i64 %cnt, 3
159 define i64 @cntw_mul4() {
160 ; CHECK-LABEL: @cntw_mul4(
161 ; CHECK-NEXT: [[CNT:%.*]] = call i64 @llvm.aarch64.sve.cntw(i32 9)
162 ; CHECK-NEXT: [[OUT:%.*]] = shl i64 [[CNT]], 2
163 ; CHECK-NEXT: ret i64 [[OUT]]
165 %cnt = call i64 @llvm.aarch64.sve.cntw(i32 9)
166 %out = mul i64 %cnt, 4
170 define i64 @cntw_all() {
171 ; CHECK-LABEL: @cntw_all(
172 ; CHECK-NEXT: [[TMP1:%.*]] = call i64 @llvm.vscale.i64()
173 ; CHECK-NEXT: [[OUT:%.*]] = shl i64 [[TMP1]], 2
174 ; CHECK-NEXT: ret i64 [[OUT]]
176 %out = call i64 @llvm.aarch64.sve.cntw(i32 31)
185 define i64 @cntd_vl1() {
186 ; CHECK-LABEL: @cntd_vl1(
187 ; CHECK-NEXT: ret i64 1
189 %out = call i64 @llvm.aarch64.sve.cntd(i32 1)
193 define i64 @cntd_vl2() {
194 ; CHECK-LABEL: @cntd_vl2(
195 ; CHECK-NEXT: ret i64 2
197 %out = call i64 @llvm.aarch64.sve.cntd(i32 2)
201 define i64 @cntd_vl4() {
202 ; CHECK-LABEL: @cntd_vl4(
203 ; CHECK-NEXT: [[OUT:%.*]] = call i64 @llvm.aarch64.sve.cntd(i32 4)
204 ; CHECK-NEXT: ret i64 [[OUT]]
206 %out = call i64 @llvm.aarch64.sve.cntd(i32 4)
210 define i64 @cntd_mul3() {
211 ; CHECK-LABEL: @cntd_mul3(
212 ; CHECK-NEXT: [[CNT:%.*]] = call i64 @llvm.aarch64.sve.cntd(i32 8)
213 ; CHECK-NEXT: [[OUT:%.*]] = mul i64 [[CNT]], 3
214 ; CHECK-NEXT: ret i64 [[OUT]]
216 %cnt = call i64 @llvm.aarch64.sve.cntd(i32 8)
217 %out = mul i64 %cnt, 3
221 define i64 @cntd_mul4() {
222 ; CHECK-LABEL: @cntd_mul4(
223 ; CHECK-NEXT: [[CNT:%.*]] = call i64 @llvm.aarch64.sve.cntd(i32 9)
224 ; CHECK-NEXT: [[OUT:%.*]] = shl i64 [[CNT]], 2
225 ; CHECK-NEXT: ret i64 [[OUT]]
227 %cnt = call i64 @llvm.aarch64.sve.cntd(i32 9)
228 %out = mul i64 %cnt, 4
232 define i64 @cntd_all() {
233 ; CHECK-LABEL: @cntd_all(
234 ; CHECK-NEXT: [[TMP1:%.*]] = call i64 @llvm.vscale.i64()
235 ; CHECK-NEXT: [[OUT:%.*]] = shl i64 [[TMP1]], 1
236 ; CHECK-NEXT: ret i64 [[OUT]]
238 %out = call i64 @llvm.aarch64.sve.cntd(i32 31)
243 declare i64 @llvm.aarch64.sve.cntb(i32 %pattern)
244 declare i64 @llvm.aarch64.sve.cnth(i32 %pattern)
245 declare i64 @llvm.aarch64.sve.cntw(i32 %pattern)
246 declare i64 @llvm.aarch64.sve.cntd(i32 %pattern)