1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple=riscv64 -run-pass=legalizer %s -o - | FileCheck %s --check-prefixes=RV64I
3 # RUN: llc -mtriple=riscv64 -mattr=+zbb -run-pass=legalizer %s -o - \
4 # RUN: | FileCheck %s --check-prefixes=RV64ZBB
10 ; RV64I-LABEL: name: umin_i8
11 ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
12 ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
13 ; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
14 ; RV64I-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
15 ; RV64I-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C]]
16 ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ult), [[AND]](s64), [[AND1]]
17 ; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY]](s64)
18 ; RV64I-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[COPY1]](s64)
19 ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
20 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[SELECT]](s32)
21 ; RV64I-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[ANYEXT]], [[C]]
22 ; RV64I-NEXT: $x10 = COPY [[AND2]](s64)
23 ; RV64I-NEXT: PseudoRET implicit $x10
25 ; RV64ZBB-LABEL: name: umin_i8
26 ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
27 ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
28 ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
29 ; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
30 ; RV64ZBB-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C]]
31 ; RV64ZBB-NEXT: [[UMIN:%[0-9]+]]:_(s64) = G_UMIN [[AND]], [[AND1]]
32 ; RV64ZBB-NEXT: $x10 = COPY [[UMIN]](s64)
33 ; RV64ZBB-NEXT: PseudoRET implicit $x10
36 %2:_(s8) = G_TRUNC %0(s64)
37 %3:_(s8) = G_TRUNC %1(s64)
38 %4:_(s8) = G_UMIN %2, %3
39 %5:_(s64) = G_ZEXT %4(s8)
41 PseudoRET implicit $x10
48 ; RV64I-LABEL: name: umin_i16
49 ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
50 ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
51 ; RV64I-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
52 ; RV64I-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
53 ; RV64I-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C]]
54 ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ult), [[AND]](s64), [[AND1]]
55 ; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY]](s64)
56 ; RV64I-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[COPY1]](s64)
57 ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
58 ; RV64I-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[SELECT]](s32)
59 ; RV64I-NEXT: [[AND2:%[0-9]+]]:_(s64) = G_AND [[ANYEXT]], [[C]]
60 ; RV64I-NEXT: $x10 = COPY [[AND2]](s64)
61 ; RV64I-NEXT: PseudoRET implicit $x10
63 ; RV64ZBB-LABEL: name: umin_i16
64 ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
65 ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
66 ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
67 ; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[COPY]], [[C]]
68 ; RV64ZBB-NEXT: [[AND1:%[0-9]+]]:_(s64) = G_AND [[COPY1]], [[C]]
69 ; RV64ZBB-NEXT: [[UMIN:%[0-9]+]]:_(s64) = G_UMIN [[AND]], [[AND1]]
70 ; RV64ZBB-NEXT: $x10 = COPY [[UMIN]](s64)
71 ; RV64ZBB-NEXT: PseudoRET implicit $x10
74 %2:_(s16) = G_TRUNC %0(s64)
75 %3:_(s16) = G_TRUNC %1(s64)
76 %4:_(s16) = G_UMIN %2, %3
77 %5:_(s64) = G_ZEXT %4(s16)
79 PseudoRET implicit $x10
86 ; RV64I-LABEL: name: umin_i32
87 ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
88 ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
89 ; RV64I-NEXT: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY]](s64)
90 ; RV64I-NEXT: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[COPY1]](s64)
91 ; RV64I-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 32
92 ; RV64I-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY1]], 32
93 ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ult), [[SEXT_INREG]](s64), [[SEXT_INREG1]]
94 ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s32) = G_SELECT [[ICMP]](s64), [[TRUNC]], [[TRUNC1]]
95 ; RV64I-NEXT: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT [[SELECT]](s32)
96 ; RV64I-NEXT: $x10 = COPY [[ZEXT]](s64)
97 ; RV64I-NEXT: PseudoRET implicit $x10
99 ; RV64ZBB-LABEL: name: umin_i32
100 ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
101 ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
102 ; RV64ZBB-NEXT: [[SEXT_INREG:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY]], 32
103 ; RV64ZBB-NEXT: [[SEXT_INREG1:%[0-9]+]]:_(s64) = G_SEXT_INREG [[COPY1]], 32
104 ; RV64ZBB-NEXT: [[UMIN:%[0-9]+]]:_(s64) = G_UMIN [[SEXT_INREG]], [[SEXT_INREG1]]
105 ; RV64ZBB-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 4294967295
106 ; RV64ZBB-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[UMIN]], [[C]]
107 ; RV64ZBB-NEXT: $x10 = COPY [[AND]](s64)
108 ; RV64ZBB-NEXT: PseudoRET implicit $x10
109 %0:_(s64) = COPY $x10
110 %1:_(s64) = COPY $x11
111 %2:_(s32) = G_TRUNC %0(s64)
112 %3:_(s32) = G_TRUNC %1(s64)
113 %4:_(s32) = G_UMIN %2, %3
114 %5:_(s64) = G_ZEXT %4(s32)
116 PseudoRET implicit $x10
123 ; RV64I-LABEL: name: umin_i64
124 ; RV64I: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
125 ; RV64I-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
126 ; RV64I-NEXT: [[ICMP:%[0-9]+]]:_(s64) = G_ICMP intpred(ult), [[COPY]](s64), [[COPY1]]
127 ; RV64I-NEXT: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT [[ICMP]](s64), [[COPY]], [[COPY1]]
128 ; RV64I-NEXT: $x10 = COPY [[SELECT]](s64)
129 ; RV64I-NEXT: PseudoRET implicit $x10
131 ; RV64ZBB-LABEL: name: umin_i64
132 ; RV64ZBB: [[COPY:%[0-9]+]]:_(s64) = COPY $x10
133 ; RV64ZBB-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x11
134 ; RV64ZBB-NEXT: [[UMIN:%[0-9]+]]:_(s64) = G_UMIN [[COPY]], [[COPY1]]
135 ; RV64ZBB-NEXT: $x10 = COPY [[UMIN]](s64)
136 ; RV64ZBB-NEXT: PseudoRET implicit $x10
137 %0:_(s64) = COPY $x10
138 %1:_(s64) = COPY $x11
139 %2:_(s64) = G_UMIN %0, %1
141 PseudoRET implicit $x10