1 ; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2 ; RUN: opt -instcombine -S < %s | FileCheck %s
4 declare double @llvm.powi.f64.i32(double, i32)
5 declare double @llvm.powi.f64.i64(double, i64)
6 declare double @llvm.fabs.f64(double)
7 declare double @llvm.copysign.f64(double, double)
8 declare void @use(double)
10 define double @powi_fneg_even_int(double %x) {
11 ; CHECK-LABEL: @powi_fneg_even_int(
13 ; CHECK-NEXT: [[R:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 4)
14 ; CHECK-NEXT: ret double [[R]]
17 %fneg = fneg double %x
18 %r = tail call double @llvm.powi.f64.i32(double %fneg, i32 4)
22 define double @powi_fabs_even_int(double %x) {
23 ; CHECK-LABEL: @powi_fabs_even_int(
25 ; CHECK-NEXT: [[R:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 4)
26 ; CHECK-NEXT: ret double [[R]]
29 %f = tail call double @llvm.fabs.f64(double %x)
30 %r = tail call double @llvm.powi.f64.i32(double %f, i32 4)
34 define double @powi_copysign_even_int(double %x, double %y) {
35 ; CHECK-LABEL: @powi_copysign_even_int(
37 ; CHECK-NEXT: [[R:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 4)
38 ; CHECK-NEXT: ret double [[R]]
41 %cs = tail call double @llvm.copysign.f64(double %x, double %y)
42 %r = tail call double @llvm.powi.f64.i32(double %cs, i32 4)
46 define double @powi_fneg_odd_int(double %x) {
47 ; CHECK-LABEL: @powi_fneg_odd_int(
49 ; CHECK-NEXT: [[FNEG:%.*]] = fneg double [[X:%.*]]
50 ; CHECK-NEXT: [[R:%.*]] = tail call double @llvm.powi.f64.i32(double [[FNEG]], i32 5)
51 ; CHECK-NEXT: ret double [[R]]
54 %fneg = fneg double %x
55 %r = tail call double @llvm.powi.f64.i32(double %fneg, i32 5)
59 define double @powi_fabs_odd_int(double %x) {
60 ; CHECK-LABEL: @powi_fabs_odd_int(
62 ; CHECK-NEXT: [[F:%.*]] = tail call double @llvm.fabs.f64(double [[X:%.*]])
63 ; CHECK-NEXT: [[R:%.*]] = tail call double @llvm.powi.f64.i32(double [[F]], i32 5)
64 ; CHECK-NEXT: ret double [[R]]
67 %f = tail call double @llvm.fabs.f64(double %x)
68 %r = tail call double @llvm.powi.f64.i32(double %f, i32 5)
72 define double @powi_copysign_odd_int(double %x, double %y) {
73 ; CHECK-LABEL: @powi_copysign_odd_int(
75 ; CHECK-NEXT: [[CS:%.*]] = tail call double @llvm.copysign.f64(double [[X:%.*]], double [[Y:%.*]])
76 ; CHECK-NEXT: [[R:%.*]] = tail call double @llvm.powi.f64.i32(double [[CS]], i32 5)
77 ; CHECK-NEXT: ret double [[R]]
80 %cs = tail call double @llvm.copysign.f64(double %x, double %y)
81 %r = tail call double @llvm.powi.f64.i32(double %cs, i32 5)
85 define double @powi_fmul_arg0_no_reassoc(double %x, i32 %i) {
86 ; CHECK-LABEL: @powi_fmul_arg0_no_reassoc(
88 ; CHECK-NEXT: [[POW:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[I:%.*]])
89 ; CHECK-NEXT: [[MUL:%.*]] = fmul double [[POW]], [[X]]
90 ; CHECK-NEXT: ret double [[MUL]]
93 %pow = tail call double @llvm.powi.f64.i32(double %x, i32 %i)
94 %mul = fmul double %pow, %x
99 define double @powi_fmul_arg0(double %x, i32 %i) {
100 ; CHECK-LABEL: @powi_fmul_arg0(
102 ; CHECK-NEXT: [[POW:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[I:%.*]])
103 ; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[POW]], [[X]]
104 ; CHECK-NEXT: ret double [[MUL]]
107 %pow = tail call double @llvm.powi.f64.i32(double %x, i32 %i)
108 %mul = fmul reassoc double %pow, %x
112 define double @powi_fmul_arg0_use(double %x, i32 %i) {
113 ; CHECK-LABEL: @powi_fmul_arg0_use(
115 ; CHECK-NEXT: [[POW:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[I:%.*]])
116 ; CHECK-NEXT: tail call void @use(double [[POW]])
117 ; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[POW]], [[X]]
118 ; CHECK-NEXT: ret double [[MUL]]
121 %pow = tail call double @llvm.powi.f64.i32(double %x, i32 %i)
122 tail call void @use(double %pow)
123 %mul = fmul reassoc double %pow, %x
127 define double @powi_fmul_powi_no_reassoc(double %x, i32 %y, i32 %z) {
128 ; CHECK-LABEL: @powi_fmul_powi_no_reassoc(
130 ; CHECK-NEXT: [[P1:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[Y:%.*]])
131 ; CHECK-NEXT: [[P2:%.*]] = tail call double @llvm.powi.f64.i32(double [[X]], i32 [[Z:%.*]])
132 ; CHECK-NEXT: [[MUL:%.*]] = fmul double [[P2]], [[P1]]
133 ; CHECK-NEXT: ret double [[MUL]]
136 %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y)
137 %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %z)
138 %mul = fmul double %p2, %p1
143 define double @powi_fmul_powi(double %x, i32 %y, i32 %z) {
144 ; CHECK-LABEL: @powi_fmul_powi(
146 ; CHECK-NEXT: [[TMP0:%.*]] = add i32 [[Z:%.*]], [[Y:%.*]]
147 ; CHECK-NEXT: [[TMP1:%.*]] = call reassoc double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[TMP0]])
148 ; CHECK-NEXT: ret double [[TMP1]]
151 %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y)
152 %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %z)
153 %mul = fmul reassoc double %p2, %p1
157 define double @powi_fmul_powi_fast_on_fmul(double %x, i32 %y, i32 %z) {
158 ; CHECK-LABEL: @powi_fmul_powi_fast_on_fmul(
160 ; CHECK-NEXT: [[TMP0:%.*]] = add i32 [[Z:%.*]], [[Y:%.*]]
161 ; CHECK-NEXT: [[TMP1:%.*]] = call fast double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[TMP0]])
162 ; CHECK-NEXT: ret double [[TMP1]]
165 %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y)
166 %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %z)
167 %mul = fmul fast double %p2, %p1
171 define double @powi_fmul_powi_fast_on_powi(double %x, i32 %y, i32 %z) {
172 ; CHECK-LABEL: @powi_fmul_powi_fast_on_powi(
174 ; CHECK-NEXT: [[P1:%.*]] = tail call fast double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[Y:%.*]])
175 ; CHECK-NEXT: [[P2:%.*]] = tail call fast double @llvm.powi.f64.i32(double [[X]], i32 [[Z:%.*]])
176 ; CHECK-NEXT: [[MUL:%.*]] = fmul double [[P2]], [[P1]]
177 ; CHECK-NEXT: ret double [[MUL]]
180 %p1 = tail call fast double @llvm.powi.f64.i32(double %x, i32 %y)
181 %p2 = tail call fast double @llvm.powi.f64.i32(double %x, i32 %z)
182 %mul = fmul double %p2, %p1
186 define double @powi_fmul_powi_same_power(double %x, i32 %y, i32 %z) {
187 ; CHECK-LABEL: @powi_fmul_powi_same_power(
189 ; CHECK-NEXT: [[TMP0:%.*]] = shl i32 [[Y:%.*]], 1
190 ; CHECK-NEXT: [[TMP1:%.*]] = call reassoc double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[TMP0]])
191 ; CHECK-NEXT: ret double [[TMP1]]
194 %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y)
195 %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %y)
196 %mul = fmul reassoc double %p2, %p1
200 define double @powi_fmul_powi_use_first(double %x, i32 %y, i32 %z) {
201 ; CHECK-LABEL: @powi_fmul_powi_use_first(
203 ; CHECK-NEXT: [[P1:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[Y:%.*]])
204 ; CHECK-NEXT: tail call void @use(double [[P1]])
205 ; CHECK-NEXT: [[TMP0:%.*]] = add i32 [[Y]], [[Z:%.*]]
206 ; CHECK-NEXT: [[TMP1:%.*]] = call reassoc double @llvm.powi.f64.i32(double [[X]], i32 [[TMP0]])
207 ; CHECK-NEXT: ret double [[TMP1]]
210 %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y)
211 tail call void @use(double %p1)
212 %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %z)
213 %mul = fmul reassoc double %p1, %p2
217 define double @powi_fmul_powi_use_second(double %x, i32 %y, i32 %z) {
218 ; CHECK-LABEL: @powi_fmul_powi_use_second(
220 ; CHECK-NEXT: [[P1:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[Z:%.*]])
221 ; CHECK-NEXT: tail call void @use(double [[P1]])
222 ; CHECK-NEXT: [[TMP0:%.*]] = add i32 [[Y:%.*]], [[Z]]
223 ; CHECK-NEXT: [[TMP1:%.*]] = call reassoc double @llvm.powi.f64.i32(double [[X]], i32 [[TMP0]])
224 ; CHECK-NEXT: ret double [[TMP1]]
227 %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %z)
228 tail call void @use(double %p1)
229 %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %y)
230 %mul = fmul reassoc double %p2, %p1
234 define double @powi_fmul_different_base(double %x, double %m, i32 %y, i32 %z) {
235 ; CHECK-LABEL: @powi_fmul_different_base(
237 ; CHECK-NEXT: [[P1:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[Y:%.*]])
238 ; CHECK-NEXT: [[P2:%.*]] = tail call double @llvm.powi.f64.i32(double [[M:%.*]], i32 [[Z:%.*]])
239 ; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[P2]], [[P1]]
240 ; CHECK-NEXT: ret double [[MUL]]
243 %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y)
244 %p2 = tail call double @llvm.powi.f64.i32(double %m, i32 %z)
245 %mul = fmul reassoc double %p2, %p1
249 define double @different_types_powi(double %x, i32 %y, i64 %z) {
250 ; CHECK-LABEL: @different_types_powi(
251 ; CHECK-NEXT: [[P1:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[Y:%.*]])
252 ; CHECK-NEXT: [[P2:%.*]] = tail call double @llvm.powi.f64.i64(double [[X]], i64 [[Z:%.*]])
253 ; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[P2]], [[P1]]
254 ; CHECK-NEXT: ret double [[MUL]]
256 %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y)
257 %p2 = tail call double @llvm.powi.f64.i64(double %x, i64 %z)
258 %mul = fmul reassoc double %p2, %p1