1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc -mtriple=riscv32 -mattr=+d -verify-machineinstrs < %s \
3 ; RUN: -target-abi=ilp32d | FileCheck -check-prefixes=CHECKIFD,RV32IFD %s
4 ; RUN: llc -mtriple=riscv64 -mattr=+d -verify-machineinstrs < %s \
5 ; RUN: -target-abi=lp64d | FileCheck -check-prefixes=CHECKIFD,RV64IFD %s
6 ; RUN: llc -mtriple=riscv32 -mattr=+zdinx -verify-machineinstrs < %s \
7 ; RUN: -target-abi=ilp32 | FileCheck -check-prefixes=RV32IZFINXZDINX %s
8 ; RUN: llc -mtriple=riscv64 -mattr=+zdinx -verify-machineinstrs < %s \
9 ; RUN: -target-abi=lp64 | FileCheck -check-prefixes=RV64IZFINXZDINX %s
11 define signext i8 @test_floor_si8(double %x) {
12 ; RV32IFD-LABEL: test_floor_si8:
14 ; RV32IFD-NEXT: fcvt.w.d a0, fa0, rdn
17 ; RV64IFD-LABEL: test_floor_si8:
19 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rdn
22 ; RV32IZFINXZDINX-LABEL: test_floor_si8:
23 ; RV32IZFINXZDINX: # %bb.0:
24 ; RV32IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rdn
25 ; RV32IZFINXZDINX-NEXT: ret
27 ; RV64IZFINXZDINX-LABEL: test_floor_si8:
28 ; RV64IZFINXZDINX: # %bb.0:
29 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a0, a0, rdn
30 ; RV64IZFINXZDINX-NEXT: ret
31 %a = call double @llvm.floor.f64(double %x)
32 %b = fptosi double %a to i8
36 define signext i16 @test_floor_si16(double %x) {
37 ; RV32IFD-LABEL: test_floor_si16:
39 ; RV32IFD-NEXT: fcvt.w.d a0, fa0, rdn
42 ; RV64IFD-LABEL: test_floor_si16:
44 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rdn
47 ; RV32IZFINXZDINX-LABEL: test_floor_si16:
48 ; RV32IZFINXZDINX: # %bb.0:
49 ; RV32IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rdn
50 ; RV32IZFINXZDINX-NEXT: ret
52 ; RV64IZFINXZDINX-LABEL: test_floor_si16:
53 ; RV64IZFINXZDINX: # %bb.0:
54 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a0, a0, rdn
55 ; RV64IZFINXZDINX-NEXT: ret
56 %a = call double @llvm.floor.f64(double %x)
57 %b = fptosi double %a to i16
61 define signext i32 @test_floor_si32(double %x) {
62 ; CHECKIFD-LABEL: test_floor_si32:
64 ; CHECKIFD-NEXT: fcvt.w.d a0, fa0, rdn
67 ; RV32IZFINXZDINX-LABEL: test_floor_si32:
68 ; RV32IZFINXZDINX: # %bb.0:
69 ; RV32IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rdn
70 ; RV32IZFINXZDINX-NEXT: ret
72 ; RV64IZFINXZDINX-LABEL: test_floor_si32:
73 ; RV64IZFINXZDINX: # %bb.0:
74 ; RV64IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rdn
75 ; RV64IZFINXZDINX-NEXT: ret
76 %a = call double @llvm.floor.f64(double %x)
77 %b = fptosi double %a to i32
81 define i64 @test_floor_si64(double %x) {
82 ; RV32IFD-LABEL: test_floor_si64:
84 ; RV32IFD-NEXT: addi sp, sp, -16
85 ; RV32IFD-NEXT: .cfi_def_cfa_offset 16
86 ; RV32IFD-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
87 ; RV32IFD-NEXT: .cfi_offset ra, -4
88 ; RV32IFD-NEXT: call floor
89 ; RV32IFD-NEXT: call __fixdfdi
90 ; RV32IFD-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
91 ; RV32IFD-NEXT: .cfi_restore ra
92 ; RV32IFD-NEXT: addi sp, sp, 16
93 ; RV32IFD-NEXT: .cfi_def_cfa_offset 0
96 ; RV64IFD-LABEL: test_floor_si64:
98 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rdn
101 ; RV32IZFINXZDINX-LABEL: test_floor_si64:
102 ; RV32IZFINXZDINX: # %bb.0:
103 ; RV32IZFINXZDINX-NEXT: addi sp, sp, -16
104 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 16
105 ; RV32IZFINXZDINX-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
106 ; RV32IZFINXZDINX-NEXT: .cfi_offset ra, -4
107 ; RV32IZFINXZDINX-NEXT: call floor
108 ; RV32IZFINXZDINX-NEXT: call __fixdfdi
109 ; RV32IZFINXZDINX-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
110 ; RV32IZFINXZDINX-NEXT: .cfi_restore ra
111 ; RV32IZFINXZDINX-NEXT: addi sp, sp, 16
112 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 0
113 ; RV32IZFINXZDINX-NEXT: ret
115 ; RV64IZFINXZDINX-LABEL: test_floor_si64:
116 ; RV64IZFINXZDINX: # %bb.0:
117 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a0, a0, rdn
118 ; RV64IZFINXZDINX-NEXT: ret
119 %a = call double @llvm.floor.f64(double %x)
120 %b = fptosi double %a to i64
124 define zeroext i8 @test_floor_ui8(double %x) {
125 ; RV32IFD-LABEL: test_floor_ui8:
127 ; RV32IFD-NEXT: fcvt.wu.d a0, fa0, rdn
130 ; RV64IFD-LABEL: test_floor_ui8:
132 ; RV64IFD-NEXT: fcvt.lu.d a0, fa0, rdn
135 ; RV32IZFINXZDINX-LABEL: test_floor_ui8:
136 ; RV32IZFINXZDINX: # %bb.0:
137 ; RV32IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rdn
138 ; RV32IZFINXZDINX-NEXT: ret
140 ; RV64IZFINXZDINX-LABEL: test_floor_ui8:
141 ; RV64IZFINXZDINX: # %bb.0:
142 ; RV64IZFINXZDINX-NEXT: fcvt.lu.d a0, a0, rdn
143 ; RV64IZFINXZDINX-NEXT: ret
144 %a = call double @llvm.floor.f64(double %x)
145 %b = fptoui double %a to i8
149 define zeroext i16 @test_floor_ui16(double %x) {
150 ; RV32IFD-LABEL: test_floor_ui16:
152 ; RV32IFD-NEXT: fcvt.wu.d a0, fa0, rdn
155 ; RV64IFD-LABEL: test_floor_ui16:
157 ; RV64IFD-NEXT: fcvt.lu.d a0, fa0, rdn
160 ; RV32IZFINXZDINX-LABEL: test_floor_ui16:
161 ; RV32IZFINXZDINX: # %bb.0:
162 ; RV32IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rdn
163 ; RV32IZFINXZDINX-NEXT: ret
165 ; RV64IZFINXZDINX-LABEL: test_floor_ui16:
166 ; RV64IZFINXZDINX: # %bb.0:
167 ; RV64IZFINXZDINX-NEXT: fcvt.lu.d a0, a0, rdn
168 ; RV64IZFINXZDINX-NEXT: ret
169 %a = call double @llvm.floor.f64(double %x)
170 %b = fptoui double %a to i16
174 define signext i32 @test_floor_ui32(double %x) {
175 ; CHECKIFD-LABEL: test_floor_ui32:
177 ; CHECKIFD-NEXT: fcvt.wu.d a0, fa0, rdn
180 ; RV32IZFINXZDINX-LABEL: test_floor_ui32:
181 ; RV32IZFINXZDINX: # %bb.0:
182 ; RV32IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rdn
183 ; RV32IZFINXZDINX-NEXT: ret
185 ; RV64IZFINXZDINX-LABEL: test_floor_ui32:
186 ; RV64IZFINXZDINX: # %bb.0:
187 ; RV64IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rdn
188 ; RV64IZFINXZDINX-NEXT: ret
189 %a = call double @llvm.floor.f64(double %x)
190 %b = fptoui double %a to i32
194 define i64 @test_floor_ui64(double %x) {
195 ; RV32IFD-LABEL: test_floor_ui64:
197 ; RV32IFD-NEXT: addi sp, sp, -16
198 ; RV32IFD-NEXT: .cfi_def_cfa_offset 16
199 ; RV32IFD-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
200 ; RV32IFD-NEXT: .cfi_offset ra, -4
201 ; RV32IFD-NEXT: call floor
202 ; RV32IFD-NEXT: call __fixunsdfdi
203 ; RV32IFD-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
204 ; RV32IFD-NEXT: .cfi_restore ra
205 ; RV32IFD-NEXT: addi sp, sp, 16
206 ; RV32IFD-NEXT: .cfi_def_cfa_offset 0
209 ; RV64IFD-LABEL: test_floor_ui64:
211 ; RV64IFD-NEXT: fcvt.lu.d a0, fa0, rdn
214 ; RV32IZFINXZDINX-LABEL: test_floor_ui64:
215 ; RV32IZFINXZDINX: # %bb.0:
216 ; RV32IZFINXZDINX-NEXT: addi sp, sp, -16
217 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 16
218 ; RV32IZFINXZDINX-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
219 ; RV32IZFINXZDINX-NEXT: .cfi_offset ra, -4
220 ; RV32IZFINXZDINX-NEXT: call floor
221 ; RV32IZFINXZDINX-NEXT: call __fixunsdfdi
222 ; RV32IZFINXZDINX-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
223 ; RV32IZFINXZDINX-NEXT: .cfi_restore ra
224 ; RV32IZFINXZDINX-NEXT: addi sp, sp, 16
225 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 0
226 ; RV32IZFINXZDINX-NEXT: ret
228 ; RV64IZFINXZDINX-LABEL: test_floor_ui64:
229 ; RV64IZFINXZDINX: # %bb.0:
230 ; RV64IZFINXZDINX-NEXT: fcvt.lu.d a0, a0, rdn
231 ; RV64IZFINXZDINX-NEXT: ret
232 %a = call double @llvm.floor.f64(double %x)
233 %b = fptoui double %a to i64
237 define signext i8 @test_ceil_si8(double %x) {
238 ; RV32IFD-LABEL: test_ceil_si8:
240 ; RV32IFD-NEXT: fcvt.w.d a0, fa0, rup
243 ; RV64IFD-LABEL: test_ceil_si8:
245 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rup
248 ; RV32IZFINXZDINX-LABEL: test_ceil_si8:
249 ; RV32IZFINXZDINX: # %bb.0:
250 ; RV32IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rup
251 ; RV32IZFINXZDINX-NEXT: ret
253 ; RV64IZFINXZDINX-LABEL: test_ceil_si8:
254 ; RV64IZFINXZDINX: # %bb.0:
255 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a0, a0, rup
256 ; RV64IZFINXZDINX-NEXT: ret
257 %a = call double @llvm.ceil.f64(double %x)
258 %b = fptosi double %a to i8
262 define signext i16 @test_ceil_si16(double %x) {
263 ; RV32IFD-LABEL: test_ceil_si16:
265 ; RV32IFD-NEXT: fcvt.w.d a0, fa0, rup
268 ; RV64IFD-LABEL: test_ceil_si16:
270 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rup
273 ; RV32IZFINXZDINX-LABEL: test_ceil_si16:
274 ; RV32IZFINXZDINX: # %bb.0:
275 ; RV32IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rup
276 ; RV32IZFINXZDINX-NEXT: ret
278 ; RV64IZFINXZDINX-LABEL: test_ceil_si16:
279 ; RV64IZFINXZDINX: # %bb.0:
280 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a0, a0, rup
281 ; RV64IZFINXZDINX-NEXT: ret
282 %a = call double @llvm.ceil.f64(double %x)
283 %b = fptosi double %a to i16
287 define signext i32 @test_ceil_si32(double %x) {
288 ; CHECKIFD-LABEL: test_ceil_si32:
290 ; CHECKIFD-NEXT: fcvt.w.d a0, fa0, rup
293 ; RV32IZFINXZDINX-LABEL: test_ceil_si32:
294 ; RV32IZFINXZDINX: # %bb.0:
295 ; RV32IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rup
296 ; RV32IZFINXZDINX-NEXT: ret
298 ; RV64IZFINXZDINX-LABEL: test_ceil_si32:
299 ; RV64IZFINXZDINX: # %bb.0:
300 ; RV64IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rup
301 ; RV64IZFINXZDINX-NEXT: ret
302 %a = call double @llvm.ceil.f64(double %x)
303 %b = fptosi double %a to i32
307 define i64 @test_ceil_si64(double %x) {
308 ; RV32IFD-LABEL: test_ceil_si64:
310 ; RV32IFD-NEXT: addi sp, sp, -16
311 ; RV32IFD-NEXT: .cfi_def_cfa_offset 16
312 ; RV32IFD-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
313 ; RV32IFD-NEXT: .cfi_offset ra, -4
314 ; RV32IFD-NEXT: call ceil
315 ; RV32IFD-NEXT: call __fixdfdi
316 ; RV32IFD-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
317 ; RV32IFD-NEXT: .cfi_restore ra
318 ; RV32IFD-NEXT: addi sp, sp, 16
319 ; RV32IFD-NEXT: .cfi_def_cfa_offset 0
322 ; RV64IFD-LABEL: test_ceil_si64:
324 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rup
327 ; RV32IZFINXZDINX-LABEL: test_ceil_si64:
328 ; RV32IZFINXZDINX: # %bb.0:
329 ; RV32IZFINXZDINX-NEXT: addi sp, sp, -16
330 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 16
331 ; RV32IZFINXZDINX-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
332 ; RV32IZFINXZDINX-NEXT: .cfi_offset ra, -4
333 ; RV32IZFINXZDINX-NEXT: call ceil
334 ; RV32IZFINXZDINX-NEXT: call __fixdfdi
335 ; RV32IZFINXZDINX-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
336 ; RV32IZFINXZDINX-NEXT: .cfi_restore ra
337 ; RV32IZFINXZDINX-NEXT: addi sp, sp, 16
338 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 0
339 ; RV32IZFINXZDINX-NEXT: ret
341 ; RV64IZFINXZDINX-LABEL: test_ceil_si64:
342 ; RV64IZFINXZDINX: # %bb.0:
343 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a0, a0, rup
344 ; RV64IZFINXZDINX-NEXT: ret
345 %a = call double @llvm.ceil.f64(double %x)
346 %b = fptosi double %a to i64
350 define zeroext i8 @test_ceil_ui8(double %x) {
351 ; RV32IFD-LABEL: test_ceil_ui8:
353 ; RV32IFD-NEXT: fcvt.wu.d a0, fa0, rup
356 ; RV64IFD-LABEL: test_ceil_ui8:
358 ; RV64IFD-NEXT: fcvt.lu.d a0, fa0, rup
361 ; RV32IZFINXZDINX-LABEL: test_ceil_ui8:
362 ; RV32IZFINXZDINX: # %bb.0:
363 ; RV32IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rup
364 ; RV32IZFINXZDINX-NEXT: ret
366 ; RV64IZFINXZDINX-LABEL: test_ceil_ui8:
367 ; RV64IZFINXZDINX: # %bb.0:
368 ; RV64IZFINXZDINX-NEXT: fcvt.lu.d a0, a0, rup
369 ; RV64IZFINXZDINX-NEXT: ret
370 %a = call double @llvm.ceil.f64(double %x)
371 %b = fptoui double %a to i8
375 define zeroext i16 @test_ceil_ui16(double %x) {
376 ; RV32IFD-LABEL: test_ceil_ui16:
378 ; RV32IFD-NEXT: fcvt.wu.d a0, fa0, rup
381 ; RV64IFD-LABEL: test_ceil_ui16:
383 ; RV64IFD-NEXT: fcvt.lu.d a0, fa0, rup
386 ; RV32IZFINXZDINX-LABEL: test_ceil_ui16:
387 ; RV32IZFINXZDINX: # %bb.0:
388 ; RV32IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rup
389 ; RV32IZFINXZDINX-NEXT: ret
391 ; RV64IZFINXZDINX-LABEL: test_ceil_ui16:
392 ; RV64IZFINXZDINX: # %bb.0:
393 ; RV64IZFINXZDINX-NEXT: fcvt.lu.d a0, a0, rup
394 ; RV64IZFINXZDINX-NEXT: ret
395 %a = call double @llvm.ceil.f64(double %x)
396 %b = fptoui double %a to i16
400 define signext i32 @test_ceil_ui32(double %x) {
401 ; CHECKIFD-LABEL: test_ceil_ui32:
403 ; CHECKIFD-NEXT: fcvt.wu.d a0, fa0, rup
406 ; RV32IZFINXZDINX-LABEL: test_ceil_ui32:
407 ; RV32IZFINXZDINX: # %bb.0:
408 ; RV32IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rup
409 ; RV32IZFINXZDINX-NEXT: ret
411 ; RV64IZFINXZDINX-LABEL: test_ceil_ui32:
412 ; RV64IZFINXZDINX: # %bb.0:
413 ; RV64IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rup
414 ; RV64IZFINXZDINX-NEXT: ret
415 %a = call double @llvm.ceil.f64(double %x)
416 %b = fptoui double %a to i32
420 define i64 @test_ceil_ui64(double %x) {
421 ; RV32IFD-LABEL: test_ceil_ui64:
423 ; RV32IFD-NEXT: addi sp, sp, -16
424 ; RV32IFD-NEXT: .cfi_def_cfa_offset 16
425 ; RV32IFD-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
426 ; RV32IFD-NEXT: .cfi_offset ra, -4
427 ; RV32IFD-NEXT: call ceil
428 ; RV32IFD-NEXT: call __fixunsdfdi
429 ; RV32IFD-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
430 ; RV32IFD-NEXT: .cfi_restore ra
431 ; RV32IFD-NEXT: addi sp, sp, 16
432 ; RV32IFD-NEXT: .cfi_def_cfa_offset 0
435 ; RV64IFD-LABEL: test_ceil_ui64:
437 ; RV64IFD-NEXT: fcvt.lu.d a0, fa0, rup
440 ; RV32IZFINXZDINX-LABEL: test_ceil_ui64:
441 ; RV32IZFINXZDINX: # %bb.0:
442 ; RV32IZFINXZDINX-NEXT: addi sp, sp, -16
443 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 16
444 ; RV32IZFINXZDINX-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
445 ; RV32IZFINXZDINX-NEXT: .cfi_offset ra, -4
446 ; RV32IZFINXZDINX-NEXT: call ceil
447 ; RV32IZFINXZDINX-NEXT: call __fixunsdfdi
448 ; RV32IZFINXZDINX-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
449 ; RV32IZFINXZDINX-NEXT: .cfi_restore ra
450 ; RV32IZFINXZDINX-NEXT: addi sp, sp, 16
451 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 0
452 ; RV32IZFINXZDINX-NEXT: ret
454 ; RV64IZFINXZDINX-LABEL: test_ceil_ui64:
455 ; RV64IZFINXZDINX: # %bb.0:
456 ; RV64IZFINXZDINX-NEXT: fcvt.lu.d a0, a0, rup
457 ; RV64IZFINXZDINX-NEXT: ret
458 %a = call double @llvm.ceil.f64(double %x)
459 %b = fptoui double %a to i64
463 define signext i8 @test_trunc_si8(double %x) {
464 ; RV32IFD-LABEL: test_trunc_si8:
466 ; RV32IFD-NEXT: fcvt.w.d a0, fa0, rtz
469 ; RV64IFD-LABEL: test_trunc_si8:
471 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rtz
474 ; RV32IZFINXZDINX-LABEL: test_trunc_si8:
475 ; RV32IZFINXZDINX: # %bb.0:
476 ; RV32IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rtz
477 ; RV32IZFINXZDINX-NEXT: ret
479 ; RV64IZFINXZDINX-LABEL: test_trunc_si8:
480 ; RV64IZFINXZDINX: # %bb.0:
481 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a0, a0, rtz
482 ; RV64IZFINXZDINX-NEXT: ret
483 %a = call double @llvm.trunc.f64(double %x)
484 %b = fptosi double %a to i8
488 define signext i16 @test_trunc_si16(double %x) {
489 ; RV32IFD-LABEL: test_trunc_si16:
491 ; RV32IFD-NEXT: fcvt.w.d a0, fa0, rtz
494 ; RV64IFD-LABEL: test_trunc_si16:
496 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rtz
499 ; RV32IZFINXZDINX-LABEL: test_trunc_si16:
500 ; RV32IZFINXZDINX: # %bb.0:
501 ; RV32IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rtz
502 ; RV32IZFINXZDINX-NEXT: ret
504 ; RV64IZFINXZDINX-LABEL: test_trunc_si16:
505 ; RV64IZFINXZDINX: # %bb.0:
506 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a0, a0, rtz
507 ; RV64IZFINXZDINX-NEXT: ret
508 %a = call double @llvm.trunc.f64(double %x)
509 %b = fptosi double %a to i16
513 define signext i32 @test_trunc_si32(double %x) {
514 ; CHECKIFD-LABEL: test_trunc_si32:
516 ; CHECKIFD-NEXT: fcvt.w.d a0, fa0, rtz
519 ; RV32IZFINXZDINX-LABEL: test_trunc_si32:
520 ; RV32IZFINXZDINX: # %bb.0:
521 ; RV32IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rtz
522 ; RV32IZFINXZDINX-NEXT: ret
524 ; RV64IZFINXZDINX-LABEL: test_trunc_si32:
525 ; RV64IZFINXZDINX: # %bb.0:
526 ; RV64IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rtz
527 ; RV64IZFINXZDINX-NEXT: ret
528 %a = call double @llvm.trunc.f64(double %x)
529 %b = fptosi double %a to i32
533 define i64 @test_trunc_si64(double %x) {
534 ; RV32IFD-LABEL: test_trunc_si64:
536 ; RV32IFD-NEXT: addi sp, sp, -16
537 ; RV32IFD-NEXT: .cfi_def_cfa_offset 16
538 ; RV32IFD-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
539 ; RV32IFD-NEXT: .cfi_offset ra, -4
540 ; RV32IFD-NEXT: call trunc
541 ; RV32IFD-NEXT: call __fixdfdi
542 ; RV32IFD-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
543 ; RV32IFD-NEXT: .cfi_restore ra
544 ; RV32IFD-NEXT: addi sp, sp, 16
545 ; RV32IFD-NEXT: .cfi_def_cfa_offset 0
548 ; RV64IFD-LABEL: test_trunc_si64:
550 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rtz
553 ; RV32IZFINXZDINX-LABEL: test_trunc_si64:
554 ; RV32IZFINXZDINX: # %bb.0:
555 ; RV32IZFINXZDINX-NEXT: addi sp, sp, -16
556 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 16
557 ; RV32IZFINXZDINX-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
558 ; RV32IZFINXZDINX-NEXT: .cfi_offset ra, -4
559 ; RV32IZFINXZDINX-NEXT: call trunc
560 ; RV32IZFINXZDINX-NEXT: call __fixdfdi
561 ; RV32IZFINXZDINX-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
562 ; RV32IZFINXZDINX-NEXT: .cfi_restore ra
563 ; RV32IZFINXZDINX-NEXT: addi sp, sp, 16
564 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 0
565 ; RV32IZFINXZDINX-NEXT: ret
567 ; RV64IZFINXZDINX-LABEL: test_trunc_si64:
568 ; RV64IZFINXZDINX: # %bb.0:
569 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a0, a0, rtz
570 ; RV64IZFINXZDINX-NEXT: ret
571 %a = call double @llvm.trunc.f64(double %x)
572 %b = fptosi double %a to i64
576 define zeroext i8 @test_trunc_ui8(double %x) {
577 ; RV32IFD-LABEL: test_trunc_ui8:
579 ; RV32IFD-NEXT: fcvt.wu.d a0, fa0, rtz
582 ; RV64IFD-LABEL: test_trunc_ui8:
584 ; RV64IFD-NEXT: fcvt.lu.d a0, fa0, rtz
587 ; RV32IZFINXZDINX-LABEL: test_trunc_ui8:
588 ; RV32IZFINXZDINX: # %bb.0:
589 ; RV32IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rtz
590 ; RV32IZFINXZDINX-NEXT: ret
592 ; RV64IZFINXZDINX-LABEL: test_trunc_ui8:
593 ; RV64IZFINXZDINX: # %bb.0:
594 ; RV64IZFINXZDINX-NEXT: fcvt.lu.d a0, a0, rtz
595 ; RV64IZFINXZDINX-NEXT: ret
596 %a = call double @llvm.trunc.f64(double %x)
597 %b = fptoui double %a to i8
601 define zeroext i16 @test_trunc_ui16(double %x) {
602 ; RV32IFD-LABEL: test_trunc_ui16:
604 ; RV32IFD-NEXT: fcvt.wu.d a0, fa0, rtz
607 ; RV64IFD-LABEL: test_trunc_ui16:
609 ; RV64IFD-NEXT: fcvt.lu.d a0, fa0, rtz
612 ; RV32IZFINXZDINX-LABEL: test_trunc_ui16:
613 ; RV32IZFINXZDINX: # %bb.0:
614 ; RV32IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rtz
615 ; RV32IZFINXZDINX-NEXT: ret
617 ; RV64IZFINXZDINX-LABEL: test_trunc_ui16:
618 ; RV64IZFINXZDINX: # %bb.0:
619 ; RV64IZFINXZDINX-NEXT: fcvt.lu.d a0, a0, rtz
620 ; RV64IZFINXZDINX-NEXT: ret
621 %a = call double @llvm.trunc.f64(double %x)
622 %b = fptoui double %a to i16
626 define signext i32 @test_trunc_ui32(double %x) {
627 ; CHECKIFD-LABEL: test_trunc_ui32:
629 ; CHECKIFD-NEXT: fcvt.wu.d a0, fa0, rtz
632 ; RV32IZFINXZDINX-LABEL: test_trunc_ui32:
633 ; RV32IZFINXZDINX: # %bb.0:
634 ; RV32IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rtz
635 ; RV32IZFINXZDINX-NEXT: ret
637 ; RV64IZFINXZDINX-LABEL: test_trunc_ui32:
638 ; RV64IZFINXZDINX: # %bb.0:
639 ; RV64IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rtz
640 ; RV64IZFINXZDINX-NEXT: ret
641 %a = call double @llvm.trunc.f64(double %x)
642 %b = fptoui double %a to i32
646 define i64 @test_trunc_ui64(double %x) {
647 ; RV32IFD-LABEL: test_trunc_ui64:
649 ; RV32IFD-NEXT: addi sp, sp, -16
650 ; RV32IFD-NEXT: .cfi_def_cfa_offset 16
651 ; RV32IFD-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
652 ; RV32IFD-NEXT: .cfi_offset ra, -4
653 ; RV32IFD-NEXT: call trunc
654 ; RV32IFD-NEXT: call __fixunsdfdi
655 ; RV32IFD-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
656 ; RV32IFD-NEXT: .cfi_restore ra
657 ; RV32IFD-NEXT: addi sp, sp, 16
658 ; RV32IFD-NEXT: .cfi_def_cfa_offset 0
661 ; RV64IFD-LABEL: test_trunc_ui64:
663 ; RV64IFD-NEXT: fcvt.lu.d a0, fa0, rtz
666 ; RV32IZFINXZDINX-LABEL: test_trunc_ui64:
667 ; RV32IZFINXZDINX: # %bb.0:
668 ; RV32IZFINXZDINX-NEXT: addi sp, sp, -16
669 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 16
670 ; RV32IZFINXZDINX-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
671 ; RV32IZFINXZDINX-NEXT: .cfi_offset ra, -4
672 ; RV32IZFINXZDINX-NEXT: call trunc
673 ; RV32IZFINXZDINX-NEXT: call __fixunsdfdi
674 ; RV32IZFINXZDINX-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
675 ; RV32IZFINXZDINX-NEXT: .cfi_restore ra
676 ; RV32IZFINXZDINX-NEXT: addi sp, sp, 16
677 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 0
678 ; RV32IZFINXZDINX-NEXT: ret
680 ; RV64IZFINXZDINX-LABEL: test_trunc_ui64:
681 ; RV64IZFINXZDINX: # %bb.0:
682 ; RV64IZFINXZDINX-NEXT: fcvt.lu.d a0, a0, rtz
683 ; RV64IZFINXZDINX-NEXT: ret
684 %a = call double @llvm.trunc.f64(double %x)
685 %b = fptoui double %a to i64
689 define signext i8 @test_round_si8(double %x) {
690 ; RV32IFD-LABEL: test_round_si8:
692 ; RV32IFD-NEXT: fcvt.w.d a0, fa0, rmm
695 ; RV64IFD-LABEL: test_round_si8:
697 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rmm
700 ; RV32IZFINXZDINX-LABEL: test_round_si8:
701 ; RV32IZFINXZDINX: # %bb.0:
702 ; RV32IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rmm
703 ; RV32IZFINXZDINX-NEXT: ret
705 ; RV64IZFINXZDINX-LABEL: test_round_si8:
706 ; RV64IZFINXZDINX: # %bb.0:
707 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a0, a0, rmm
708 ; RV64IZFINXZDINX-NEXT: ret
709 %a = call double @llvm.round.f64(double %x)
710 %b = fptosi double %a to i8
714 define signext i16 @test_round_si16(double %x) {
715 ; RV32IFD-LABEL: test_round_si16:
717 ; RV32IFD-NEXT: fcvt.w.d a0, fa0, rmm
720 ; RV64IFD-LABEL: test_round_si16:
722 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rmm
725 ; RV32IZFINXZDINX-LABEL: test_round_si16:
726 ; RV32IZFINXZDINX: # %bb.0:
727 ; RV32IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rmm
728 ; RV32IZFINXZDINX-NEXT: ret
730 ; RV64IZFINXZDINX-LABEL: test_round_si16:
731 ; RV64IZFINXZDINX: # %bb.0:
732 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a0, a0, rmm
733 ; RV64IZFINXZDINX-NEXT: ret
734 %a = call double @llvm.round.f64(double %x)
735 %b = fptosi double %a to i16
739 define signext i32 @test_round_si32(double %x) {
740 ; CHECKIFD-LABEL: test_round_si32:
742 ; CHECKIFD-NEXT: fcvt.w.d a0, fa0, rmm
745 ; RV32IZFINXZDINX-LABEL: test_round_si32:
746 ; RV32IZFINXZDINX: # %bb.0:
747 ; RV32IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rmm
748 ; RV32IZFINXZDINX-NEXT: ret
750 ; RV64IZFINXZDINX-LABEL: test_round_si32:
751 ; RV64IZFINXZDINX: # %bb.0:
752 ; RV64IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rmm
753 ; RV64IZFINXZDINX-NEXT: ret
754 %a = call double @llvm.round.f64(double %x)
755 %b = fptosi double %a to i32
759 define i64 @test_round_si64(double %x) {
760 ; RV32IFD-LABEL: test_round_si64:
762 ; RV32IFD-NEXT: addi sp, sp, -16
763 ; RV32IFD-NEXT: .cfi_def_cfa_offset 16
764 ; RV32IFD-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
765 ; RV32IFD-NEXT: .cfi_offset ra, -4
766 ; RV32IFD-NEXT: call round
767 ; RV32IFD-NEXT: call __fixdfdi
768 ; RV32IFD-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
769 ; RV32IFD-NEXT: .cfi_restore ra
770 ; RV32IFD-NEXT: addi sp, sp, 16
771 ; RV32IFD-NEXT: .cfi_def_cfa_offset 0
774 ; RV64IFD-LABEL: test_round_si64:
776 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rmm
779 ; RV32IZFINXZDINX-LABEL: test_round_si64:
780 ; RV32IZFINXZDINX: # %bb.0:
781 ; RV32IZFINXZDINX-NEXT: addi sp, sp, -16
782 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 16
783 ; RV32IZFINXZDINX-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
784 ; RV32IZFINXZDINX-NEXT: .cfi_offset ra, -4
785 ; RV32IZFINXZDINX-NEXT: call round
786 ; RV32IZFINXZDINX-NEXT: call __fixdfdi
787 ; RV32IZFINXZDINX-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
788 ; RV32IZFINXZDINX-NEXT: .cfi_restore ra
789 ; RV32IZFINXZDINX-NEXT: addi sp, sp, 16
790 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 0
791 ; RV32IZFINXZDINX-NEXT: ret
793 ; RV64IZFINXZDINX-LABEL: test_round_si64:
794 ; RV64IZFINXZDINX: # %bb.0:
795 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a0, a0, rmm
796 ; RV64IZFINXZDINX-NEXT: ret
797 %a = call double @llvm.round.f64(double %x)
798 %b = fptosi double %a to i64
802 define zeroext i8 @test_round_ui8(double %x) {
803 ; RV32IFD-LABEL: test_round_ui8:
805 ; RV32IFD-NEXT: fcvt.wu.d a0, fa0, rmm
808 ; RV64IFD-LABEL: test_round_ui8:
810 ; RV64IFD-NEXT: fcvt.lu.d a0, fa0, rmm
813 ; RV32IZFINXZDINX-LABEL: test_round_ui8:
814 ; RV32IZFINXZDINX: # %bb.0:
815 ; RV32IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rmm
816 ; RV32IZFINXZDINX-NEXT: ret
818 ; RV64IZFINXZDINX-LABEL: test_round_ui8:
819 ; RV64IZFINXZDINX: # %bb.0:
820 ; RV64IZFINXZDINX-NEXT: fcvt.lu.d a0, a0, rmm
821 ; RV64IZFINXZDINX-NEXT: ret
822 %a = call double @llvm.round.f64(double %x)
823 %b = fptoui double %a to i8
827 define zeroext i16 @test_round_ui16(double %x) {
828 ; RV32IFD-LABEL: test_round_ui16:
830 ; RV32IFD-NEXT: fcvt.wu.d a0, fa0, rmm
833 ; RV64IFD-LABEL: test_round_ui16:
835 ; RV64IFD-NEXT: fcvt.lu.d a0, fa0, rmm
838 ; RV32IZFINXZDINX-LABEL: test_round_ui16:
839 ; RV32IZFINXZDINX: # %bb.0:
840 ; RV32IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rmm
841 ; RV32IZFINXZDINX-NEXT: ret
843 ; RV64IZFINXZDINX-LABEL: test_round_ui16:
844 ; RV64IZFINXZDINX: # %bb.0:
845 ; RV64IZFINXZDINX-NEXT: fcvt.lu.d a0, a0, rmm
846 ; RV64IZFINXZDINX-NEXT: ret
847 %a = call double @llvm.round.f64(double %x)
848 %b = fptoui double %a to i16
852 define signext i32 @test_round_ui32(double %x) {
853 ; CHECKIFD-LABEL: test_round_ui32:
855 ; CHECKIFD-NEXT: fcvt.wu.d a0, fa0, rmm
858 ; RV32IZFINXZDINX-LABEL: test_round_ui32:
859 ; RV32IZFINXZDINX: # %bb.0:
860 ; RV32IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rmm
861 ; RV32IZFINXZDINX-NEXT: ret
863 ; RV64IZFINXZDINX-LABEL: test_round_ui32:
864 ; RV64IZFINXZDINX: # %bb.0:
865 ; RV64IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rmm
866 ; RV64IZFINXZDINX-NEXT: ret
867 %a = call double @llvm.round.f64(double %x)
868 %b = fptoui double %a to i32
872 define i64 @test_round_ui64(double %x) {
873 ; RV32IFD-LABEL: test_round_ui64:
875 ; RV32IFD-NEXT: addi sp, sp, -16
876 ; RV32IFD-NEXT: .cfi_def_cfa_offset 16
877 ; RV32IFD-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
878 ; RV32IFD-NEXT: .cfi_offset ra, -4
879 ; RV32IFD-NEXT: call round
880 ; RV32IFD-NEXT: call __fixunsdfdi
881 ; RV32IFD-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
882 ; RV32IFD-NEXT: .cfi_restore ra
883 ; RV32IFD-NEXT: addi sp, sp, 16
884 ; RV32IFD-NEXT: .cfi_def_cfa_offset 0
887 ; RV64IFD-LABEL: test_round_ui64:
889 ; RV64IFD-NEXT: fcvt.lu.d a0, fa0, rmm
892 ; RV32IZFINXZDINX-LABEL: test_round_ui64:
893 ; RV32IZFINXZDINX: # %bb.0:
894 ; RV32IZFINXZDINX-NEXT: addi sp, sp, -16
895 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 16
896 ; RV32IZFINXZDINX-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
897 ; RV32IZFINXZDINX-NEXT: .cfi_offset ra, -4
898 ; RV32IZFINXZDINX-NEXT: call round
899 ; RV32IZFINXZDINX-NEXT: call __fixunsdfdi
900 ; RV32IZFINXZDINX-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
901 ; RV32IZFINXZDINX-NEXT: .cfi_restore ra
902 ; RV32IZFINXZDINX-NEXT: addi sp, sp, 16
903 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 0
904 ; RV32IZFINXZDINX-NEXT: ret
906 ; RV64IZFINXZDINX-LABEL: test_round_ui64:
907 ; RV64IZFINXZDINX: # %bb.0:
908 ; RV64IZFINXZDINX-NEXT: fcvt.lu.d a0, a0, rmm
909 ; RV64IZFINXZDINX-NEXT: ret
910 %a = call double @llvm.round.f64(double %x)
911 %b = fptoui double %a to i64
915 define signext i8 @test_roundeven_si8(double %x) {
916 ; RV32IFD-LABEL: test_roundeven_si8:
918 ; RV32IFD-NEXT: fcvt.w.d a0, fa0, rne
921 ; RV64IFD-LABEL: test_roundeven_si8:
923 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rne
926 ; RV32IZFINXZDINX-LABEL: test_roundeven_si8:
927 ; RV32IZFINXZDINX: # %bb.0:
928 ; RV32IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rne
929 ; RV32IZFINXZDINX-NEXT: ret
931 ; RV64IZFINXZDINX-LABEL: test_roundeven_si8:
932 ; RV64IZFINXZDINX: # %bb.0:
933 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a0, a0, rne
934 ; RV64IZFINXZDINX-NEXT: ret
935 %a = call double @llvm.roundeven.f64(double %x)
936 %b = fptosi double %a to i8
940 define signext i16 @test_roundeven_si16(double %x) {
941 ; RV32IFD-LABEL: test_roundeven_si16:
943 ; RV32IFD-NEXT: fcvt.w.d a0, fa0, rne
946 ; RV64IFD-LABEL: test_roundeven_si16:
948 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rne
951 ; RV32IZFINXZDINX-LABEL: test_roundeven_si16:
952 ; RV32IZFINXZDINX: # %bb.0:
953 ; RV32IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rne
954 ; RV32IZFINXZDINX-NEXT: ret
956 ; RV64IZFINXZDINX-LABEL: test_roundeven_si16:
957 ; RV64IZFINXZDINX: # %bb.0:
958 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a0, a0, rne
959 ; RV64IZFINXZDINX-NEXT: ret
960 %a = call double @llvm.roundeven.f64(double %x)
961 %b = fptosi double %a to i16
965 define signext i32 @test_roundeven_si32(double %x) {
966 ; CHECKIFD-LABEL: test_roundeven_si32:
968 ; CHECKIFD-NEXT: fcvt.w.d a0, fa0, rne
971 ; RV32IZFINXZDINX-LABEL: test_roundeven_si32:
972 ; RV32IZFINXZDINX: # %bb.0:
973 ; RV32IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rne
974 ; RV32IZFINXZDINX-NEXT: ret
976 ; RV64IZFINXZDINX-LABEL: test_roundeven_si32:
977 ; RV64IZFINXZDINX: # %bb.0:
978 ; RV64IZFINXZDINX-NEXT: fcvt.w.d a0, a0, rne
979 ; RV64IZFINXZDINX-NEXT: ret
980 %a = call double @llvm.roundeven.f64(double %x)
981 %b = fptosi double %a to i32
985 define i64 @test_roundeven_si64(double %x) {
986 ; RV32IFD-LABEL: test_roundeven_si64:
988 ; RV32IFD-NEXT: addi sp, sp, -16
989 ; RV32IFD-NEXT: .cfi_def_cfa_offset 16
990 ; RV32IFD-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
991 ; RV32IFD-NEXT: .cfi_offset ra, -4
992 ; RV32IFD-NEXT: call roundeven
993 ; RV32IFD-NEXT: call __fixdfdi
994 ; RV32IFD-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
995 ; RV32IFD-NEXT: .cfi_restore ra
996 ; RV32IFD-NEXT: addi sp, sp, 16
997 ; RV32IFD-NEXT: .cfi_def_cfa_offset 0
1000 ; RV64IFD-LABEL: test_roundeven_si64:
1002 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rne
1005 ; RV32IZFINXZDINX-LABEL: test_roundeven_si64:
1006 ; RV32IZFINXZDINX: # %bb.0:
1007 ; RV32IZFINXZDINX-NEXT: addi sp, sp, -16
1008 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 16
1009 ; RV32IZFINXZDINX-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
1010 ; RV32IZFINXZDINX-NEXT: .cfi_offset ra, -4
1011 ; RV32IZFINXZDINX-NEXT: call roundeven
1012 ; RV32IZFINXZDINX-NEXT: call __fixdfdi
1013 ; RV32IZFINXZDINX-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
1014 ; RV32IZFINXZDINX-NEXT: .cfi_restore ra
1015 ; RV32IZFINXZDINX-NEXT: addi sp, sp, 16
1016 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 0
1017 ; RV32IZFINXZDINX-NEXT: ret
1019 ; RV64IZFINXZDINX-LABEL: test_roundeven_si64:
1020 ; RV64IZFINXZDINX: # %bb.0:
1021 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a0, a0, rne
1022 ; RV64IZFINXZDINX-NEXT: ret
1023 %a = call double @llvm.roundeven.f64(double %x)
1024 %b = fptosi double %a to i64
1028 define zeroext i8 @test_roundeven_ui8(double %x) {
1029 ; RV32IFD-LABEL: test_roundeven_ui8:
1031 ; RV32IFD-NEXT: fcvt.wu.d a0, fa0, rne
1034 ; RV64IFD-LABEL: test_roundeven_ui8:
1036 ; RV64IFD-NEXT: fcvt.lu.d a0, fa0, rne
1039 ; RV32IZFINXZDINX-LABEL: test_roundeven_ui8:
1040 ; RV32IZFINXZDINX: # %bb.0:
1041 ; RV32IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rne
1042 ; RV32IZFINXZDINX-NEXT: ret
1044 ; RV64IZFINXZDINX-LABEL: test_roundeven_ui8:
1045 ; RV64IZFINXZDINX: # %bb.0:
1046 ; RV64IZFINXZDINX-NEXT: fcvt.lu.d a0, a0, rne
1047 ; RV64IZFINXZDINX-NEXT: ret
1048 %a = call double @llvm.roundeven.f64(double %x)
1049 %b = fptoui double %a to i8
1053 define zeroext i16 @test_roundeven_ui16(double %x) {
1054 ; RV32IFD-LABEL: test_roundeven_ui16:
1056 ; RV32IFD-NEXT: fcvt.wu.d a0, fa0, rne
1059 ; RV64IFD-LABEL: test_roundeven_ui16:
1061 ; RV64IFD-NEXT: fcvt.lu.d a0, fa0, rne
1064 ; RV32IZFINXZDINX-LABEL: test_roundeven_ui16:
1065 ; RV32IZFINXZDINX: # %bb.0:
1066 ; RV32IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rne
1067 ; RV32IZFINXZDINX-NEXT: ret
1069 ; RV64IZFINXZDINX-LABEL: test_roundeven_ui16:
1070 ; RV64IZFINXZDINX: # %bb.0:
1071 ; RV64IZFINXZDINX-NEXT: fcvt.lu.d a0, a0, rne
1072 ; RV64IZFINXZDINX-NEXT: ret
1073 %a = call double @llvm.roundeven.f64(double %x)
1074 %b = fptoui double %a to i16
1078 define signext i32 @test_roundeven_ui32(double %x) {
1079 ; CHECKIFD-LABEL: test_roundeven_ui32:
1080 ; CHECKIFD: # %bb.0:
1081 ; CHECKIFD-NEXT: fcvt.wu.d a0, fa0, rne
1082 ; CHECKIFD-NEXT: ret
1084 ; RV32IZFINXZDINX-LABEL: test_roundeven_ui32:
1085 ; RV32IZFINXZDINX: # %bb.0:
1086 ; RV32IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rne
1087 ; RV32IZFINXZDINX-NEXT: ret
1089 ; RV64IZFINXZDINX-LABEL: test_roundeven_ui32:
1090 ; RV64IZFINXZDINX: # %bb.0:
1091 ; RV64IZFINXZDINX-NEXT: fcvt.wu.d a0, a0, rne
1092 ; RV64IZFINXZDINX-NEXT: ret
1093 %a = call double @llvm.roundeven.f64(double %x)
1094 %b = fptoui double %a to i32
1098 define i64 @test_roundeven_ui64(double %x) {
1099 ; RV32IFD-LABEL: test_roundeven_ui64:
1101 ; RV32IFD-NEXT: addi sp, sp, -16
1102 ; RV32IFD-NEXT: .cfi_def_cfa_offset 16
1103 ; RV32IFD-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
1104 ; RV32IFD-NEXT: .cfi_offset ra, -4
1105 ; RV32IFD-NEXT: call roundeven
1106 ; RV32IFD-NEXT: call __fixunsdfdi
1107 ; RV32IFD-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
1108 ; RV32IFD-NEXT: .cfi_restore ra
1109 ; RV32IFD-NEXT: addi sp, sp, 16
1110 ; RV32IFD-NEXT: .cfi_def_cfa_offset 0
1113 ; RV64IFD-LABEL: test_roundeven_ui64:
1115 ; RV64IFD-NEXT: fcvt.lu.d a0, fa0, rne
1118 ; RV32IZFINXZDINX-LABEL: test_roundeven_ui64:
1119 ; RV32IZFINXZDINX: # %bb.0:
1120 ; RV32IZFINXZDINX-NEXT: addi sp, sp, -16
1121 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 16
1122 ; RV32IZFINXZDINX-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
1123 ; RV32IZFINXZDINX-NEXT: .cfi_offset ra, -4
1124 ; RV32IZFINXZDINX-NEXT: call roundeven
1125 ; RV32IZFINXZDINX-NEXT: call __fixunsdfdi
1126 ; RV32IZFINXZDINX-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
1127 ; RV32IZFINXZDINX-NEXT: .cfi_restore ra
1128 ; RV32IZFINXZDINX-NEXT: addi sp, sp, 16
1129 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 0
1130 ; RV32IZFINXZDINX-NEXT: ret
1132 ; RV64IZFINXZDINX-LABEL: test_roundeven_ui64:
1133 ; RV64IZFINXZDINX: # %bb.0:
1134 ; RV64IZFINXZDINX-NEXT: fcvt.lu.d a0, a0, rne
1135 ; RV64IZFINXZDINX-NEXT: ret
1136 %a = call double @llvm.roundeven.f64(double %x)
1137 %b = fptoui double %a to i64
1141 define double @test_floor_double(double %x) {
1142 ; RV32IFD-LABEL: test_floor_double:
1144 ; RV32IFD-NEXT: tail floor
1146 ; RV64IFD-LABEL: test_floor_double:
1148 ; RV64IFD-NEXT: lui a0, %hi(.LCPI40_0)
1149 ; RV64IFD-NEXT: fld fa5, %lo(.LCPI40_0)(a0)
1150 ; RV64IFD-NEXT: fabs.d fa4, fa0
1151 ; RV64IFD-NEXT: flt.d a0, fa4, fa5
1152 ; RV64IFD-NEXT: beqz a0, .LBB40_2
1153 ; RV64IFD-NEXT: # %bb.1:
1154 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rdn
1155 ; RV64IFD-NEXT: fcvt.d.l fa5, a0, rdn
1156 ; RV64IFD-NEXT: fsgnj.d fa0, fa5, fa0
1157 ; RV64IFD-NEXT: .LBB40_2:
1160 ; RV32IZFINXZDINX-LABEL: test_floor_double:
1161 ; RV32IZFINXZDINX: # %bb.0:
1162 ; RV32IZFINXZDINX-NEXT: addi sp, sp, -16
1163 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 16
1164 ; RV32IZFINXZDINX-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
1165 ; RV32IZFINXZDINX-NEXT: .cfi_offset ra, -4
1166 ; RV32IZFINXZDINX-NEXT: call floor
1167 ; RV32IZFINXZDINX-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
1168 ; RV32IZFINXZDINX-NEXT: .cfi_restore ra
1169 ; RV32IZFINXZDINX-NEXT: addi sp, sp, 16
1170 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 0
1171 ; RV32IZFINXZDINX-NEXT: ret
1173 ; RV64IZFINXZDINX-LABEL: test_floor_double:
1174 ; RV64IZFINXZDINX: # %bb.0:
1175 ; RV64IZFINXZDINX-NEXT: li a1, 1075
1176 ; RV64IZFINXZDINX-NEXT: slli a1, a1, 52
1177 ; RV64IZFINXZDINX-NEXT: fabs.d a2, a0
1178 ; RV64IZFINXZDINX-NEXT: flt.d a1, a2, a1
1179 ; RV64IZFINXZDINX-NEXT: beqz a1, .LBB40_2
1180 ; RV64IZFINXZDINX-NEXT: # %bb.1:
1181 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a1, a0, rdn
1182 ; RV64IZFINXZDINX-NEXT: fcvt.d.l a1, a1, rdn
1183 ; RV64IZFINXZDINX-NEXT: fsgnj.d a0, a1, a0
1184 ; RV64IZFINXZDINX-NEXT: .LBB40_2:
1185 ; RV64IZFINXZDINX-NEXT: ret
1186 %a = call double @llvm.floor.f64(double %x)
1190 define double @test_ceil_double(double %x) {
1191 ; RV32IFD-LABEL: test_ceil_double:
1193 ; RV32IFD-NEXT: tail ceil
1195 ; RV64IFD-LABEL: test_ceil_double:
1197 ; RV64IFD-NEXT: lui a0, %hi(.LCPI41_0)
1198 ; RV64IFD-NEXT: fld fa5, %lo(.LCPI41_0)(a0)
1199 ; RV64IFD-NEXT: fabs.d fa4, fa0
1200 ; RV64IFD-NEXT: flt.d a0, fa4, fa5
1201 ; RV64IFD-NEXT: beqz a0, .LBB41_2
1202 ; RV64IFD-NEXT: # %bb.1:
1203 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rup
1204 ; RV64IFD-NEXT: fcvt.d.l fa5, a0, rup
1205 ; RV64IFD-NEXT: fsgnj.d fa0, fa5, fa0
1206 ; RV64IFD-NEXT: .LBB41_2:
1209 ; RV32IZFINXZDINX-LABEL: test_ceil_double:
1210 ; RV32IZFINXZDINX: # %bb.0:
1211 ; RV32IZFINXZDINX-NEXT: addi sp, sp, -16
1212 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 16
1213 ; RV32IZFINXZDINX-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
1214 ; RV32IZFINXZDINX-NEXT: .cfi_offset ra, -4
1215 ; RV32IZFINXZDINX-NEXT: call ceil
1216 ; RV32IZFINXZDINX-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
1217 ; RV32IZFINXZDINX-NEXT: .cfi_restore ra
1218 ; RV32IZFINXZDINX-NEXT: addi sp, sp, 16
1219 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 0
1220 ; RV32IZFINXZDINX-NEXT: ret
1222 ; RV64IZFINXZDINX-LABEL: test_ceil_double:
1223 ; RV64IZFINXZDINX: # %bb.0:
1224 ; RV64IZFINXZDINX-NEXT: li a1, 1075
1225 ; RV64IZFINXZDINX-NEXT: slli a1, a1, 52
1226 ; RV64IZFINXZDINX-NEXT: fabs.d a2, a0
1227 ; RV64IZFINXZDINX-NEXT: flt.d a1, a2, a1
1228 ; RV64IZFINXZDINX-NEXT: beqz a1, .LBB41_2
1229 ; RV64IZFINXZDINX-NEXT: # %bb.1:
1230 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a1, a0, rup
1231 ; RV64IZFINXZDINX-NEXT: fcvt.d.l a1, a1, rup
1232 ; RV64IZFINXZDINX-NEXT: fsgnj.d a0, a1, a0
1233 ; RV64IZFINXZDINX-NEXT: .LBB41_2:
1234 ; RV64IZFINXZDINX-NEXT: ret
1235 %a = call double @llvm.ceil.f64(double %x)
1239 define double @test_trunc_double(double %x) {
1240 ; RV32IFD-LABEL: test_trunc_double:
1242 ; RV32IFD-NEXT: tail trunc
1244 ; RV64IFD-LABEL: test_trunc_double:
1246 ; RV64IFD-NEXT: lui a0, %hi(.LCPI42_0)
1247 ; RV64IFD-NEXT: fld fa5, %lo(.LCPI42_0)(a0)
1248 ; RV64IFD-NEXT: fabs.d fa4, fa0
1249 ; RV64IFD-NEXT: flt.d a0, fa4, fa5
1250 ; RV64IFD-NEXT: beqz a0, .LBB42_2
1251 ; RV64IFD-NEXT: # %bb.1:
1252 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rtz
1253 ; RV64IFD-NEXT: fcvt.d.l fa5, a0, rtz
1254 ; RV64IFD-NEXT: fsgnj.d fa0, fa5, fa0
1255 ; RV64IFD-NEXT: .LBB42_2:
1258 ; RV32IZFINXZDINX-LABEL: test_trunc_double:
1259 ; RV32IZFINXZDINX: # %bb.0:
1260 ; RV32IZFINXZDINX-NEXT: addi sp, sp, -16
1261 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 16
1262 ; RV32IZFINXZDINX-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
1263 ; RV32IZFINXZDINX-NEXT: .cfi_offset ra, -4
1264 ; RV32IZFINXZDINX-NEXT: call trunc
1265 ; RV32IZFINXZDINX-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
1266 ; RV32IZFINXZDINX-NEXT: .cfi_restore ra
1267 ; RV32IZFINXZDINX-NEXT: addi sp, sp, 16
1268 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 0
1269 ; RV32IZFINXZDINX-NEXT: ret
1271 ; RV64IZFINXZDINX-LABEL: test_trunc_double:
1272 ; RV64IZFINXZDINX: # %bb.0:
1273 ; RV64IZFINXZDINX-NEXT: li a1, 1075
1274 ; RV64IZFINXZDINX-NEXT: slli a1, a1, 52
1275 ; RV64IZFINXZDINX-NEXT: fabs.d a2, a0
1276 ; RV64IZFINXZDINX-NEXT: flt.d a1, a2, a1
1277 ; RV64IZFINXZDINX-NEXT: beqz a1, .LBB42_2
1278 ; RV64IZFINXZDINX-NEXT: # %bb.1:
1279 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a1, a0, rtz
1280 ; RV64IZFINXZDINX-NEXT: fcvt.d.l a1, a1, rtz
1281 ; RV64IZFINXZDINX-NEXT: fsgnj.d a0, a1, a0
1282 ; RV64IZFINXZDINX-NEXT: .LBB42_2:
1283 ; RV64IZFINXZDINX-NEXT: ret
1284 %a = call double @llvm.trunc.f64(double %x)
1288 define double @test_round_double(double %x) {
1289 ; RV32IFD-LABEL: test_round_double:
1291 ; RV32IFD-NEXT: tail round
1293 ; RV64IFD-LABEL: test_round_double:
1295 ; RV64IFD-NEXT: lui a0, %hi(.LCPI43_0)
1296 ; RV64IFD-NEXT: fld fa5, %lo(.LCPI43_0)(a0)
1297 ; RV64IFD-NEXT: fabs.d fa4, fa0
1298 ; RV64IFD-NEXT: flt.d a0, fa4, fa5
1299 ; RV64IFD-NEXT: beqz a0, .LBB43_2
1300 ; RV64IFD-NEXT: # %bb.1:
1301 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rmm
1302 ; RV64IFD-NEXT: fcvt.d.l fa5, a0, rmm
1303 ; RV64IFD-NEXT: fsgnj.d fa0, fa5, fa0
1304 ; RV64IFD-NEXT: .LBB43_2:
1307 ; RV32IZFINXZDINX-LABEL: test_round_double:
1308 ; RV32IZFINXZDINX: # %bb.0:
1309 ; RV32IZFINXZDINX-NEXT: addi sp, sp, -16
1310 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 16
1311 ; RV32IZFINXZDINX-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
1312 ; RV32IZFINXZDINX-NEXT: .cfi_offset ra, -4
1313 ; RV32IZFINXZDINX-NEXT: call round
1314 ; RV32IZFINXZDINX-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
1315 ; RV32IZFINXZDINX-NEXT: .cfi_restore ra
1316 ; RV32IZFINXZDINX-NEXT: addi sp, sp, 16
1317 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 0
1318 ; RV32IZFINXZDINX-NEXT: ret
1320 ; RV64IZFINXZDINX-LABEL: test_round_double:
1321 ; RV64IZFINXZDINX: # %bb.0:
1322 ; RV64IZFINXZDINX-NEXT: li a1, 1075
1323 ; RV64IZFINXZDINX-NEXT: slli a1, a1, 52
1324 ; RV64IZFINXZDINX-NEXT: fabs.d a2, a0
1325 ; RV64IZFINXZDINX-NEXT: flt.d a1, a2, a1
1326 ; RV64IZFINXZDINX-NEXT: beqz a1, .LBB43_2
1327 ; RV64IZFINXZDINX-NEXT: # %bb.1:
1328 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a1, a0, rmm
1329 ; RV64IZFINXZDINX-NEXT: fcvt.d.l a1, a1, rmm
1330 ; RV64IZFINXZDINX-NEXT: fsgnj.d a0, a1, a0
1331 ; RV64IZFINXZDINX-NEXT: .LBB43_2:
1332 ; RV64IZFINXZDINX-NEXT: ret
1333 %a = call double @llvm.round.f64(double %x)
1337 define double @test_roundeven_double(double %x) {
1338 ; RV32IFD-LABEL: test_roundeven_double:
1340 ; RV32IFD-NEXT: tail roundeven
1342 ; RV64IFD-LABEL: test_roundeven_double:
1344 ; RV64IFD-NEXT: lui a0, %hi(.LCPI44_0)
1345 ; RV64IFD-NEXT: fld fa5, %lo(.LCPI44_0)(a0)
1346 ; RV64IFD-NEXT: fabs.d fa4, fa0
1347 ; RV64IFD-NEXT: flt.d a0, fa4, fa5
1348 ; RV64IFD-NEXT: beqz a0, .LBB44_2
1349 ; RV64IFD-NEXT: # %bb.1:
1350 ; RV64IFD-NEXT: fcvt.l.d a0, fa0, rne
1351 ; RV64IFD-NEXT: fcvt.d.l fa5, a0, rne
1352 ; RV64IFD-NEXT: fsgnj.d fa0, fa5, fa0
1353 ; RV64IFD-NEXT: .LBB44_2:
1356 ; RV32IZFINXZDINX-LABEL: test_roundeven_double:
1357 ; RV32IZFINXZDINX: # %bb.0:
1358 ; RV32IZFINXZDINX-NEXT: addi sp, sp, -16
1359 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 16
1360 ; RV32IZFINXZDINX-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
1361 ; RV32IZFINXZDINX-NEXT: .cfi_offset ra, -4
1362 ; RV32IZFINXZDINX-NEXT: call roundeven
1363 ; RV32IZFINXZDINX-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
1364 ; RV32IZFINXZDINX-NEXT: .cfi_restore ra
1365 ; RV32IZFINXZDINX-NEXT: addi sp, sp, 16
1366 ; RV32IZFINXZDINX-NEXT: .cfi_def_cfa_offset 0
1367 ; RV32IZFINXZDINX-NEXT: ret
1369 ; RV64IZFINXZDINX-LABEL: test_roundeven_double:
1370 ; RV64IZFINXZDINX: # %bb.0:
1371 ; RV64IZFINXZDINX-NEXT: li a1, 1075
1372 ; RV64IZFINXZDINX-NEXT: slli a1, a1, 52
1373 ; RV64IZFINXZDINX-NEXT: fabs.d a2, a0
1374 ; RV64IZFINXZDINX-NEXT: flt.d a1, a2, a1
1375 ; RV64IZFINXZDINX-NEXT: beqz a1, .LBB44_2
1376 ; RV64IZFINXZDINX-NEXT: # %bb.1:
1377 ; RV64IZFINXZDINX-NEXT: fcvt.l.d a1, a0, rne
1378 ; RV64IZFINXZDINX-NEXT: fcvt.d.l a1, a1, rne
1379 ; RV64IZFINXZDINX-NEXT: fsgnj.d a0, a1, a0
1380 ; RV64IZFINXZDINX-NEXT: .LBB44_2:
1381 ; RV64IZFINXZDINX-NEXT: ret
1382 %a = call double @llvm.roundeven.f64(double %x)
1386 declare double @llvm.floor.f64(double)
1387 declare double @llvm.ceil.f64(double)
1388 declare double @llvm.trunc.f64(double)
1389 declare double @llvm.round.f64(double)
1390 declare double @llvm.roundeven.f64(double)