1 ; RUN: llc -verify-machineinstrs < %s -mtriple=aarch64-none-linux-gnu | FileCheck %s
3 @var32 = dso_local global i32 0
4 @var64 = dso_local global i64 0
6 define dso_local void @test_extendb32(i8 %var) {
7 ; CHECK-LABEL: test_extendb32:
9 %sxt32 = sext i8 %var to i32
10 store volatile i32 %sxt32, ptr @var32
11 ; CHECK: sxtb {{w[0-9]+}}, {{w[0-9]+}}
13 ; N.b. this doesn't actually produce a bitfield instruction at the
14 ; moment, but it's still a good test to have and the semantics are
16 %uxt32 = zext i8 %var to i32
17 store volatile i32 %uxt32, ptr @var32
18 ; CHECK: and {{w[0-9]+}}, {{w[0-9]+}}, #0xff
22 define dso_local void @test_extendb64(i8 %var) {
23 ; CHECK-LABEL: test_extendb64:
25 %sxt64 = sext i8 %var to i64
26 store volatile i64 %sxt64, ptr @var64
27 ; CHECK: sxtb {{x[0-9]+}}, {{w[0-9]+}}
29 ; N.b. this doesn't actually produce a bitfield instruction at the
30 ; moment, but it's still a good test to have and the semantics are
32 %uxt64 = zext i8 %var to i64
33 store volatile i64 %uxt64, ptr @var64
34 ; CHECK: and {{x[0-9]+}}, {{x[0-9]+}}, #0xff
38 define dso_local void @test_extendh32(i16 %var) {
39 ; CHECK-LABEL: test_extendh32:
41 %sxt32 = sext i16 %var to i32
42 store volatile i32 %sxt32, ptr @var32
43 ; CHECK: sxth {{w[0-9]+}}, {{w[0-9]+}}
45 ; N.b. this doesn't actually produce a bitfield instruction at the
46 ; moment, but it's still a good test to have and the semantics are
48 %uxt32 = zext i16 %var to i32
49 store volatile i32 %uxt32, ptr @var32
50 ; CHECK: and {{w[0-9]+}}, {{w[0-9]+}}, #0xffff
54 define dso_local void @test_extendh64(i16 %var) {
55 ; CHECK-LABEL: test_extendh64:
57 %sxt64 = sext i16 %var to i64
58 store volatile i64 %sxt64, ptr @var64
59 ; CHECK: sxth {{x[0-9]+}}, {{w[0-9]+}}
61 ; N.b. this doesn't actually produce a bitfield instruction at the
62 ; moment, but it's still a good test to have and the semantics are
64 %uxt64 = zext i16 %var to i64
65 store volatile i64 %uxt64, ptr @var64
66 ; CHECK: and {{x[0-9]+}}, {{x[0-9]+}}, #0xffff
70 define dso_local void @test_extendw(i32 %var) {
71 ; CHECK-LABEL: test_extendw:
73 %sxt64 = sext i32 %var to i64
74 store volatile i64 %sxt64, ptr @var64
75 ; CHECK: sxtw {{x[0-9]+}}, {{w[0-9]+}}
77 %uxt64 = zext i32 %var to i64
78 store volatile i64 %uxt64, ptr @var64
79 ; CHECK: mov {{w[0-9]+}}, w0
83 define dso_local void @test_shifts(i32 %val32, i64 %val64) {
84 ; CHECK-LABEL: test_shifts:
86 %shift1 = ashr i32 %val32, 31
87 store volatile i32 %shift1, ptr @var32
88 ; CHECK: asr {{w[0-9]+}}, {{w[0-9]+}}, #31
90 %shift2 = lshr i32 %val32, 8
91 store volatile i32 %shift2, ptr @var32
92 ; CHECK: lsr {{w[0-9]+}}, {{w[0-9]+}}, #8
94 %shift3 = shl i32 %val32, 1
95 store volatile i32 %shift3, ptr @var32
96 ; CHECK: lsl {{w[0-9]+}}, {{w[0-9]+}}, #1
98 %shift4 = ashr i64 %val64, 31
99 store volatile i64 %shift4, ptr @var64
100 ; CHECK: asr {{x[0-9]+}}, {{x[0-9]+}}, #31
102 %shift5 = lshr i64 %val64, 8
103 store volatile i64 %shift5, ptr @var64
104 ; CHECK: lsr {{x[0-9]+}}, {{x[0-9]+}}, #8
106 %shift6 = shl i64 %val64, 63
107 store volatile i64 %shift6, ptr @var64
108 ; CHECK: lsl {{x[0-9]+}}, {{x[0-9]+}}, #63
110 %shift7 = ashr i64 %val64, 63
111 store volatile i64 %shift7, ptr @var64
112 ; CHECK: asr {{x[0-9]+}}, {{x[0-9]+}}, #63
114 %shift8 = lshr i64 %val64, 63
115 store volatile i64 %shift8, ptr @var64
116 ; CHECK: lsr {{x[0-9]+}}, {{x[0-9]+}}, #63
118 %shift9 = lshr i32 %val32, 31
119 store volatile i32 %shift9, ptr @var32
120 ; CHECK: lsr {{w[0-9]+}}, {{w[0-9]+}}, #31
122 %shift10 = shl i32 %val32, 31
123 store volatile i32 %shift10, ptr @var32
124 ; CHECK: lsl {{w[0-9]+}}, {{w[0-9]+}}, #31
129 ; LLVM can produce in-register extensions taking place entirely with
130 ; 64-bit registers too.
131 define dso_local void @test_sext_inreg_64(i64 %in) {
132 ; CHECK-LABEL: test_sext_inreg_64:
134 ; i1 doesn't have an official alias, but crops up and is handled by
136 %trunc_i1 = trunc i64 %in to i1
137 %sext_i1 = sext i1 %trunc_i1 to i64
138 store volatile i64 %sext_i1, ptr @var64
139 ; CHECK: sbfx {{x[0-9]+}}, {{x[0-9]+}}, #0, #1
141 %trunc_i8 = trunc i64 %in to i8
142 %sext_i8 = sext i8 %trunc_i8 to i64
143 store volatile i64 %sext_i8, ptr @var64
144 ; CHECK: sxtb {{x[0-9]+}}, {{w[0-9]+}}
146 %trunc_i16 = trunc i64 %in to i16
147 %sext_i16 = sext i16 %trunc_i16 to i64
148 store volatile i64 %sext_i16, ptr @var64
149 ; CHECK: sxth {{x[0-9]+}}, {{w[0-9]+}}
151 %trunc_i32 = trunc i64 %in to i32
152 %sext_i32 = sext i32 %trunc_i32 to i64
153 store volatile i64 %sext_i32, ptr @var64
154 ; CHECK: sxtw {{x[0-9]+}}, {{w[0-9]+}}
158 ; These instructions don't actually select to official bitfield
159 ; operations, but it's important that we select them somehow:
160 define dso_local void @test_zext_inreg_64(i64 %in) {
161 ; CHECK-LABEL: test_zext_inreg_64:
163 %trunc_i8 = trunc i64 %in to i8
164 %zext_i8 = zext i8 %trunc_i8 to i64
165 store volatile i64 %zext_i8, ptr @var64
166 ; CHECK: and {{x[0-9]+}}, {{x[0-9]+}}, #0xff
168 %trunc_i16 = trunc i64 %in to i16
169 %zext_i16 = zext i16 %trunc_i16 to i64
170 store volatile i64 %zext_i16, ptr @var64
171 ; CHECK: and {{x[0-9]+}}, {{x[0-9]+}}, #0xffff
173 %trunc_i32 = trunc i64 %in to i32
174 %zext_i32 = zext i32 %trunc_i32 to i64
175 store volatile i64 %zext_i32, ptr @var64
176 ; CHECK: mov {{w[0-9]+}}, {{w[0-9]+}}
181 define dso_local i64 @test_sext_inreg_from_32(i32 %in) {
182 ; CHECK-LABEL: test_sext_inreg_from_32:
184 %small = trunc i32 %in to i1
185 %ext = sext i1 %small to i64
187 ; Different registers are of course, possible, though suboptimal. This is
188 ; making sure that a 64-bit "(sext_inreg (anyext GPR32), i1)" uses the 64-bit
189 ; sbfx rather than just 32-bits.
190 ; CHECK: sbfx x0, x0, #0, #1
195 define dso_local i32 @test_ubfx32(ptr %addr) {
196 ; CHECK-LABEL: test_ubfx32:
197 ; CHECK: ubfx {{w[0-9]+}}, {{w[0-9]+}}, #23, #3
199 %fields = load i32, ptr %addr
200 %shifted = lshr i32 %fields, 23
201 %masked = and i32 %shifted, 7
205 define dso_local i64 @test_ubfx64(ptr %addr) {
206 ; CHECK-LABEL: test_ubfx64:
207 ; CHECK: ubfx {{x[0-9]+}}, {{x[0-9]+}}, #25, #10
208 %fields = load i64, ptr %addr
209 %shifted = lshr i64 %fields, 25
210 %masked = and i64 %shifted, 1023
214 define dso_local i32 @test_sbfx32(ptr %addr) {
215 ; CHECK-LABEL: test_sbfx32:
216 ; CHECK: sbfx {{w[0-9]+}}, {{w[0-9]+}}, #6, #3
218 %fields = load i32, ptr %addr
219 %shifted = shl i32 %fields, 23
220 %extended = ashr i32 %shifted, 29
224 define dso_local i64 @test_sbfx64(ptr %addr) {
225 ; CHECK-LABEL: test_sbfx64:
226 ; CHECK: sbfx {{x[0-9]+}}, {{x[0-9]+}}, #0, #63
228 %fields = load i64, ptr %addr
229 %shifted = shl i64 %fields, 1
230 %extended = ashr i64 %shifted, 1
234 define i32 @test_ubfx_mask(i32 %lhs, i32 %rhs) {
235 ; CHECK-LABEL: test_ubfx_mask:
236 ; CHECK: lsr w0, w1, #20
237 %mask = and i32 %lhs, 20
238 %i7 = add i32 %mask, 1
239 %i8 = xor i32 %lhs, 20
240 %i9 = xor i32 %i8, %i7
241 %i10 = and i32 %i9, 20
242 %shift = lshr i32 %rhs, %i10
243 %shift.masked = and i32 %shift, 65535
244 ret i32 %shift.masked