1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc -mtriple=riscv64 -mattr=+d -verify-machineinstrs -target-abi=lp64d \
3 ; RUN: -disable-strictnode-mutation < %s | FileCheck %s -check-prefix=RV64ID
4 ; RUN: llc -mtriple=riscv64 -mattr=+zdinx -verify-machineinstrs -target-abi=lp64 \
5 ; RUN: -disable-strictnode-mutation < %s | FileCheck %s -check-prefix=RV64IDINX
7 ; This file exhaustively checks double<->i32 conversions. In general,
8 ; fcvt.l[u].d can be selected instead of fcvt.w[u].d because poison is
9 ; generated for an fpto[s|u]i conversion if the result doesn't fit in the
12 define i32 @aext_fptosi(double %a) nounwind strictfp {
13 ; RV64ID-LABEL: aext_fptosi:
15 ; RV64ID-NEXT: fcvt.w.d a0, fa0, rtz
18 ; RV64IDINX-LABEL: aext_fptosi:
20 ; RV64IDINX-NEXT: fcvt.w.d a0, a0, rtz
22 %1 = call i32 @llvm.experimental.constrained.fptosi.i32.f64(double %a, metadata !"fpexcept.strict") strictfp
25 declare i32 @llvm.experimental.constrained.fptosi.i32.f64(double, metadata)
27 define signext i32 @sext_fptosi(double %a) nounwind strictfp {
28 ; RV64ID-LABEL: sext_fptosi:
30 ; RV64ID-NEXT: fcvt.w.d a0, fa0, rtz
33 ; RV64IDINX-LABEL: sext_fptosi:
35 ; RV64IDINX-NEXT: fcvt.w.d a0, a0, rtz
37 %1 = call i32 @llvm.experimental.constrained.fptosi.i32.f64(double %a, metadata !"fpexcept.strict") strictfp
41 define zeroext i32 @zext_fptosi(double %a) nounwind strictfp {
42 ; RV64ID-LABEL: zext_fptosi:
44 ; RV64ID-NEXT: fcvt.w.d a0, fa0, rtz
45 ; RV64ID-NEXT: slli a0, a0, 32
46 ; RV64ID-NEXT: srli a0, a0, 32
49 ; RV64IDINX-LABEL: zext_fptosi:
51 ; RV64IDINX-NEXT: fcvt.w.d a0, a0, rtz
52 ; RV64IDINX-NEXT: slli a0, a0, 32
53 ; RV64IDINX-NEXT: srli a0, a0, 32
55 %1 = call i32 @llvm.experimental.constrained.fptosi.i32.f64(double %a, metadata !"fpexcept.strict") strictfp
59 define i32 @aext_fptoui(double %a) nounwind strictfp {
60 ; RV64ID-LABEL: aext_fptoui:
62 ; RV64ID-NEXT: fcvt.wu.d a0, fa0, rtz
65 ; RV64IDINX-LABEL: aext_fptoui:
67 ; RV64IDINX-NEXT: fcvt.wu.d a0, a0, rtz
69 %1 = call i32 @llvm.experimental.constrained.fptoui.i32.f64(double %a, metadata !"fpexcept.strict") strictfp
72 declare i32 @llvm.experimental.constrained.fptoui.i32.f64(double, metadata)
74 define signext i32 @sext_fptoui(double %a) nounwind strictfp {
75 ; RV64ID-LABEL: sext_fptoui:
77 ; RV64ID-NEXT: fcvt.wu.d a0, fa0, rtz
80 ; RV64IDINX-LABEL: sext_fptoui:
82 ; RV64IDINX-NEXT: fcvt.wu.d a0, a0, rtz
84 %1 = call i32 @llvm.experimental.constrained.fptoui.i32.f64(double %a, metadata !"fpexcept.strict") strictfp
88 define zeroext i32 @zext_fptoui(double %a) nounwind strictfp {
89 ; RV64ID-LABEL: zext_fptoui:
91 ; RV64ID-NEXT: fcvt.lu.d a0, fa0, rtz
94 ; RV64IDINX-LABEL: zext_fptoui:
96 ; RV64IDINX-NEXT: fcvt.lu.d a0, a0, rtz
98 %1 = call i32 @llvm.experimental.constrained.fptoui.i32.f64(double %a, metadata !"fpexcept.strict") strictfp
102 define double @uitofp_aext_i32_to_f64(i32 %a) nounwind strictfp {
103 ; RV64ID-LABEL: uitofp_aext_i32_to_f64:
105 ; RV64ID-NEXT: fcvt.d.wu fa0, a0
108 ; RV64IDINX-LABEL: uitofp_aext_i32_to_f64:
109 ; RV64IDINX: # %bb.0:
110 ; RV64IDINX-NEXT: fcvt.d.wu a0, a0
111 ; RV64IDINX-NEXT: ret
112 %1 = call double @llvm.experimental.constrained.uitofp.f64.i32(i32 %a, metadata !"round.dynamic", metadata !"fpexcept.strict") strictfp
115 declare double @llvm.experimental.constrained.uitofp.f64.i32(i32 %a, metadata, metadata)
117 define double @uitofp_sext_i32_to_f64(i32 signext %a) nounwind strictfp {
118 ; RV64ID-LABEL: uitofp_sext_i32_to_f64:
120 ; RV64ID-NEXT: fcvt.d.wu fa0, a0
123 ; RV64IDINX-LABEL: uitofp_sext_i32_to_f64:
124 ; RV64IDINX: # %bb.0:
125 ; RV64IDINX-NEXT: fcvt.d.wu a0, a0
126 ; RV64IDINX-NEXT: ret
127 %1 = call double @llvm.experimental.constrained.uitofp.f64.i32(i32 %a, metadata !"round.dynamic", metadata !"fpexcept.strict") strictfp
131 define double @uitofp_zext_i32_to_f64(i32 zeroext %a) nounwind strictfp {
132 ; RV64ID-LABEL: uitofp_zext_i32_to_f64:
134 ; RV64ID-NEXT: fcvt.d.wu fa0, a0
137 ; RV64IDINX-LABEL: uitofp_zext_i32_to_f64:
138 ; RV64IDINX: # %bb.0:
139 ; RV64IDINX-NEXT: fcvt.d.wu a0, a0
140 ; RV64IDINX-NEXT: ret
141 %1 = call double @llvm.experimental.constrained.uitofp.f64.i32(i32 %a, metadata !"round.dynamic", metadata !"fpexcept.strict") strictfp
145 define double @sitofp_aext_i32_to_f64(i32 %a) nounwind strictfp {
146 ; RV64ID-LABEL: sitofp_aext_i32_to_f64:
148 ; RV64ID-NEXT: fcvt.d.w fa0, a0
151 ; RV64IDINX-LABEL: sitofp_aext_i32_to_f64:
152 ; RV64IDINX: # %bb.0:
153 ; RV64IDINX-NEXT: fcvt.d.w a0, a0
154 ; RV64IDINX-NEXT: ret
155 %1 = call double @llvm.experimental.constrained.sitofp.f64.i32(i32 %a, metadata !"round.dynamic", metadata !"fpexcept.strict") strictfp
158 declare double @llvm.experimental.constrained.sitofp.f64.i32(i32 %a, metadata, metadata)
160 define double @sitofp_sext_i32_to_f64(i32 signext %a) nounwind strictfp {
161 ; RV64ID-LABEL: sitofp_sext_i32_to_f64:
163 ; RV64ID-NEXT: fcvt.d.w fa0, a0
166 ; RV64IDINX-LABEL: sitofp_sext_i32_to_f64:
167 ; RV64IDINX: # %bb.0:
168 ; RV64IDINX-NEXT: fcvt.d.w a0, a0
169 ; RV64IDINX-NEXT: ret
170 %1 = call double @llvm.experimental.constrained.sitofp.f64.i32(i32 %a, metadata !"round.dynamic", metadata !"fpexcept.strict") strictfp
174 define double @sitofp_zext_i32_to_f64(i32 zeroext %a) nounwind strictfp {
175 ; RV64ID-LABEL: sitofp_zext_i32_to_f64:
177 ; RV64ID-NEXT: fcvt.d.w fa0, a0
180 ; RV64IDINX-LABEL: sitofp_zext_i32_to_f64:
181 ; RV64IDINX: # %bb.0:
182 ; RV64IDINX-NEXT: fcvt.d.w a0, a0
183 ; RV64IDINX-NEXT: ret
184 %1 = call double @llvm.experimental.constrained.sitofp.f64.i32(i32 %a, metadata !"round.dynamic", metadata !"fpexcept.strict") strictfp