1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 2
2 ; RUN: llc -mtriple=riscv32 -mattr=+v,+zfh,+zvfh -verify-machineinstrs < %s | FileCheck -check-prefixes=CHECK,RV32 %s
3 ; RUN: llc -mtriple=riscv64 -mattr=+v,+zfh,+zvfh -verify-machineinstrs < %s | FileCheck -check-prefixes=CHECK,RV64 %s
5 define void @v2i8(ptr %p, ptr %q) {
8 ; CHECK-NEXT: lh a0, 0(a0)
9 ; CHECK-NEXT: sh a0, 0(a1)
11 %v = load <2 x i8>, ptr %p
12 store <2 x i8> %v, ptr %q
16 define void @v2i16(ptr %p, ptr %q) {
19 ; CHECK-NEXT: lw a0, 0(a0)
20 ; CHECK-NEXT: sw a0, 0(a1)
22 %v = load <2 x i16>, ptr %p
23 store <2 x i16> %v, ptr %q
27 define void @v2i32(ptr %p, ptr %q) {
30 ; RV32-NEXT: vsetivli zero, 2, e32, mf2, ta, ma
31 ; RV32-NEXT: vle32.v v8, (a0)
32 ; RV32-NEXT: vse32.v v8, (a1)
37 ; RV64-NEXT: ld a0, 0(a0)
38 ; RV64-NEXT: sd a0, 0(a1)
40 %v = load <2 x i32>, ptr %p
41 store <2 x i32> %v, ptr %q
45 define void @v2i64(ptr %p, ptr %q) {
48 ; CHECK-NEXT: vsetivli zero, 2, e64, m1, ta, ma
49 ; CHECK-NEXT: vle64.v v8, (a0)
50 ; CHECK-NEXT: vse64.v v8, (a1)
52 %v = load <2 x i64>, ptr %p
53 store <2 x i64> %v, ptr %q
57 define void @v2f16(ptr %p, ptr %q) {
60 ; CHECK-NEXT: lw a0, 0(a0)
61 ; CHECK-NEXT: sw a0, 0(a1)
63 %v = load <2 x half>, ptr %p
64 store <2 x half> %v, ptr %q
68 define void @v2f32(ptr %p, ptr %q) {
71 ; RV32-NEXT: vsetivli zero, 2, e32, mf2, ta, ma
72 ; RV32-NEXT: vle32.v v8, (a0)
73 ; RV32-NEXT: vse32.v v8, (a1)
78 ; RV64-NEXT: ld a0, 0(a0)
79 ; RV64-NEXT: sd a0, 0(a1)
81 %v = load <2 x float>, ptr %p
82 store <2 x float> %v, ptr %q
86 define void @v2f64(ptr %p, ptr %q) {
89 ; CHECK-NEXT: vsetivli zero, 2, e64, m1, ta, ma
90 ; CHECK-NEXT: vle64.v v8, (a0)
91 ; CHECK-NEXT: vse64.v v8, (a1)
93 %v = load <2 x double>, ptr %p
94 store <2 x double> %v, ptr %q
98 define void @v4i8(ptr %p, ptr %q) {
101 ; CHECK-NEXT: lw a0, 0(a0)
102 ; CHECK-NEXT: sw a0, 0(a1)
104 %v = load <4 x i8>, ptr %p
105 store <4 x i8> %v, ptr %q
109 define void @v4i16(ptr %p, ptr %q) {
112 ; RV32-NEXT: vsetivli zero, 4, e16, mf2, ta, ma
113 ; RV32-NEXT: vle16.v v8, (a0)
114 ; RV32-NEXT: vse16.v v8, (a1)
119 ; RV64-NEXT: ld a0, 0(a0)
120 ; RV64-NEXT: sd a0, 0(a1)
122 %v = load <4 x i16>, ptr %p
123 store <4 x i16> %v, ptr %q
127 define void @v4i32(ptr %p, ptr %q) {
128 ; CHECK-LABEL: v4i32:
130 ; CHECK-NEXT: vsetivli zero, 4, e32, m1, ta, ma
131 ; CHECK-NEXT: vle32.v v8, (a0)
132 ; CHECK-NEXT: vse32.v v8, (a1)
134 %v = load <4 x i32>, ptr %p
135 store <4 x i32> %v, ptr %q
139 define void @v4i64(ptr %p, ptr %q) {
140 ; CHECK-LABEL: v4i64:
142 ; CHECK-NEXT: vsetivli zero, 4, e64, m2, ta, ma
143 ; CHECK-NEXT: vle64.v v8, (a0)
144 ; CHECK-NEXT: vse64.v v8, (a1)
146 %v = load <4 x i64>, ptr %p
147 store <4 x i64> %v, ptr %q
151 define void @v4f16(ptr %p, ptr %q) {
154 ; RV32-NEXT: vsetivli zero, 4, e16, mf2, ta, ma
155 ; RV32-NEXT: vle16.v v8, (a0)
156 ; RV32-NEXT: vse16.v v8, (a1)
161 ; RV64-NEXT: ld a0, 0(a0)
162 ; RV64-NEXT: sd a0, 0(a1)
164 %v = load <4 x half>, ptr %p
165 store <4 x half> %v, ptr %q
169 define void @v4f32(ptr %p, ptr %q) {
170 ; CHECK-LABEL: v4f32:
172 ; CHECK-NEXT: vsetivli zero, 4, e32, m1, ta, ma
173 ; CHECK-NEXT: vle32.v v8, (a0)
174 ; CHECK-NEXT: vse32.v v8, (a1)
176 %v = load <4 x float>, ptr %p
177 store <4 x float> %v, ptr %q
181 define void @v4f64(ptr %p, ptr %q) {
182 ; CHECK-LABEL: v4f64:
184 ; CHECK-NEXT: vsetivli zero, 4, e64, m2, ta, ma
185 ; CHECK-NEXT: vle64.v v8, (a0)
186 ; CHECK-NEXT: vse64.v v8, (a1)
188 %v = load <4 x double>, ptr %p
189 store <4 x double> %v, ptr %q
193 define void @v8i8(ptr %p, ptr %q) {
196 ; RV32-NEXT: vsetivli zero, 8, e8, mf2, ta, ma
197 ; RV32-NEXT: vle8.v v8, (a0)
198 ; RV32-NEXT: vse8.v v8, (a1)
203 ; RV64-NEXT: ld a0, 0(a0)
204 ; RV64-NEXT: sd a0, 0(a1)
206 %v = load <8 x i8>, ptr %p
207 store <8 x i8> %v, ptr %q
211 define void @v8i16(ptr %p, ptr %q) {
212 ; CHECK-LABEL: v8i16:
214 ; CHECK-NEXT: vsetivli zero, 8, e16, m1, ta, ma
215 ; CHECK-NEXT: vle16.v v8, (a0)
216 ; CHECK-NEXT: vse16.v v8, (a1)
218 %v = load <8 x i16>, ptr %p
219 store <8 x i16> %v, ptr %q
223 define void @v8i32(ptr %p, ptr %q) {
224 ; CHECK-LABEL: v8i32:
226 ; CHECK-NEXT: vsetivli zero, 8, e32, m2, ta, ma
227 ; CHECK-NEXT: vle32.v v8, (a0)
228 ; CHECK-NEXT: vse32.v v8, (a1)
230 %v = load <8 x i32>, ptr %p
231 store <8 x i32> %v, ptr %q
235 define void @v8i64(ptr %p, ptr %q) {
236 ; CHECK-LABEL: v8i64:
238 ; CHECK-NEXT: vsetivli zero, 8, e64, m4, ta, ma
239 ; CHECK-NEXT: vle64.v v8, (a0)
240 ; CHECK-NEXT: vse64.v v8, (a1)
242 %v = load <8 x i64>, ptr %p
243 store <8 x i64> %v, ptr %q
247 define void @v2i8_align1(ptr %p, ptr %q) {
248 ; CHECK-LABEL: v2i8_align1:
250 ; CHECK-NEXT: vsetivli zero, 2, e8, mf8, ta, ma
251 ; CHECK-NEXT: vle8.v v8, (a0)
252 ; CHECK-NEXT: vse8.v v8, (a1)
254 %v = load <2 x i8>, ptr %p, align 1
255 store <2 x i8> %v, ptr %q
259 define void @v2i8_align2(ptr %p, ptr %q) {
260 ; CHECK-LABEL: v2i8_align2:
262 ; CHECK-NEXT: lh a0, 0(a0)
263 ; CHECK-NEXT: sh a0, 0(a1)
265 %v = load <2 x i8>, ptr %p, align 2
266 store <2 x i8> %v, ptr %q
270 define void @v2i8_align4(ptr %p, ptr %q) {
271 ; CHECK-LABEL: v2i8_align4:
273 ; CHECK-NEXT: lh a0, 0(a0)
274 ; CHECK-NEXT: sh a0, 0(a1)
276 %v = load <2 x i8>, ptr %p, align 4
277 store <2 x i8> %v, ptr %q
281 define void @v2i8_volatile_load(ptr %p, ptr %q) {
282 ; CHECK-LABEL: v2i8_volatile_load:
284 ; CHECK-NEXT: vsetivli zero, 2, e8, mf8, ta, ma
285 ; CHECK-NEXT: vle8.v v8, (a0)
286 ; CHECK-NEXT: vse8.v v8, (a1)
288 %v = load volatile <2 x i8>, ptr %p
289 store <2 x i8> %v, ptr %q
293 define void @v2i8_volatile_store(ptr %p, ptr %q) {
294 ; CHECK-LABEL: v2i8_volatile_store:
296 ; CHECK-NEXT: vsetivli zero, 2, e8, mf8, ta, ma
297 ; CHECK-NEXT: vle8.v v8, (a0)
298 ; CHECK-NEXT: vse8.v v8, (a1)
300 %v = load <2 x i8>, ptr %p
301 store volatile <2 x i8> %v, ptr %q