1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc -mtriple powerpc64le < %s | FileCheck %s
4 ; Check constrained ops converted to call
5 define void @test(ptr %cast) strictfp {
7 ; CHECK: # %bb.0: # %root
9 ; CHECK-NEXT: .cfi_def_cfa_offset 64
10 ; CHECK-NEXT: .cfi_offset lr, 16
11 ; CHECK-NEXT: .cfi_offset r29, -24
12 ; CHECK-NEXT: .cfi_offset r30, -16
13 ; CHECK-NEXT: std 29, -24(1) # 8-byte Folded Spill
14 ; CHECK-NEXT: std 30, -16(1) # 8-byte Folded Spill
15 ; CHECK-NEXT: stdu 1, -64(1)
16 ; CHECK-NEXT: addi 30, 3, -8
17 ; CHECK-NEXT: li 29, 255
18 ; CHECK-NEXT: std 0, 80(1)
19 ; CHECK-NEXT: .p2align 5
20 ; CHECK-NEXT: .LBB0_1: # %for.body
22 ; CHECK-NEXT: lfdu 1, 8(30)
25 ; CHECK-NEXT: addi 29, 29, -1
26 ; CHECK-NEXT: stfd 1, 0(30)
27 ; CHECK-NEXT: cmpldi 29, 0
28 ; CHECK-NEXT: bc 12, 1, .LBB0_1
29 ; CHECK-NEXT: # %bb.2: # %exit
30 ; CHECK-NEXT: addi 1, 1, 64
31 ; CHECK-NEXT: ld 0, 16(1)
32 ; CHECK-NEXT: ld 30, -16(1) # 8-byte Folded Reload
33 ; CHECK-NEXT: ld 29, -24(1) # 8-byte Folded Reload
43 %i = phi i64 [ 0, %root ], [ %next, %for.body ]
44 %idx = getelementptr inbounds double, ptr %cast, i64 %i
45 %val = load double, ptr %idx
46 %cos = tail call nnan ninf nsz arcp double @llvm.experimental.constrained.cos.f64(double %val, metadata !"round.dynamic", metadata !"fpexcept.strict")
47 store double %cos, ptr %idx, align 8
48 %next = add nuw nsw i64 %i, 1
49 %cond = icmp eq i64 %next, 255
50 br i1 %cond, label %exit, label %for.body
53 ; Check constrained ops converted to native instruction
54 define void @test2(ptr %cast) strictfp {
56 ; CHECK: # %bb.0: # %entry
57 ; CHECK-NEXT: li 4, 255
58 ; CHECK-NEXT: addi 3, 3, -8
60 ; CHECK-NEXT: .p2align 4
61 ; CHECK-NEXT: .LBB1_1: # %for.body
63 ; CHECK-NEXT: lfdu 0, 8(3)
64 ; CHECK-NEXT: xssqrtdp 0, 0
65 ; CHECK-NEXT: stfd 0, 0(3)
66 ; CHECK-NEXT: bdnz .LBB1_1
67 ; CHECK-NEXT: # %bb.2: # %exit
73 %i = phi i64 [ 0, %entry ], [ %next, %for.body ]
74 %idx = getelementptr inbounds double, ptr %cast, i64 %i
75 %val = load double, ptr %idx
76 %cos = tail call nnan ninf nsz arcp double @llvm.experimental.constrained.sqrt.f64(double %val, metadata !"round.dynamic", metadata !"fpexcept.strict")
77 store double %cos, ptr %idx, align 8
78 %next = add nuw nsw i64 %i, 1
79 %cond = icmp eq i64 %next, 255
80 br i1 %cond, label %exit, label %for.body
86 ; Check constrained ops converted to call
87 define void @testTan(ptr %cast) strictfp {
88 ; CHECK-LABEL: testTan:
89 ; CHECK: # %bb.0: # %root
91 ; CHECK-NEXT: .cfi_def_cfa_offset 64
92 ; CHECK-NEXT: .cfi_offset lr, 16
93 ; CHECK-NEXT: .cfi_offset r29, -24
94 ; CHECK-NEXT: .cfi_offset r30, -16
95 ; CHECK-NEXT: std 29, -24(1) # 8-byte Folded Spill
96 ; CHECK-NEXT: std 30, -16(1) # 8-byte Folded Spill
97 ; CHECK-NEXT: stdu 1, -64(1)
98 ; CHECK-NEXT: addi 30, 3, -8
99 ; CHECK-NEXT: li 29, 255
100 ; CHECK-NEXT: std 0, 80(1)
101 ; CHECK-NEXT: .p2align 5
102 ; CHECK-NEXT: .LBB2_1: # %for.body
104 ; CHECK-NEXT: lfdu 1, 8(30)
107 ; CHECK-NEXT: addi 29, 29, -1
108 ; CHECK-NEXT: stfd 1, 0(30)
109 ; CHECK-NEXT: cmpldi 29, 0
110 ; CHECK-NEXT: bc 12, 1, .LBB2_1
111 ; CHECK-NEXT: # %bb.2: # %exit
112 ; CHECK-NEXT: addi 1, 1, 64
113 ; CHECK-NEXT: ld 0, 16(1)
114 ; CHECK-NEXT: ld 30, -16(1) # 8-byte Folded Reload
115 ; CHECK-NEXT: ld 29, -24(1) # 8-byte Folded Reload
125 %i = phi i64 [ 0, %root ], [ %next, %for.body ]
126 %idx = getelementptr inbounds double, ptr %cast, i64 %i
127 %val = load double, ptr %idx
128 %tan = tail call nnan ninf nsz arcp double @llvm.experimental.constrained.tan.f64(double %val, metadata !"round.dynamic", metadata !"fpexcept.strict")
129 store double %tan, ptr %idx, align 8
130 %next = add nuw nsw i64 %i, 1
131 %cond = icmp eq i64 %next, 255
132 br i1 %cond, label %exit, label %for.body
135 declare double @llvm.experimental.constrained.cos.f64(double, metadata, metadata)
136 declare double @llvm.experimental.constrained.tan.f64(double, metadata, metadata)
137 declare double @llvm.experimental.constrained.sqrt.f64(double, metadata, metadata)