1 ; RUN: llc -mtriple=aarch64-linux-gnu -mattr=+sve < %s | FileCheck %s
4 ; ST1B, ST1W, ST1H, ST1D: base + 32-bit unscaled offset, sign (sxtw) or zero
5 ; (uxtw) extended to 64 bits.
6 ; e.g. st1h { z0.d }, p0, [x0, z1.d, uxtw]
10 define void @sst1b_s_uxtw(<vscale x 4 x i32> %data, <vscale x 4 x i1> %pg, i8* %base, <vscale x 4 x i32> %offsets) {
11 ; CHECK-LABEL: sst1b_s_uxtw:
12 ; CHECK: st1b { z0.s }, p0, [x0, z1.s, uxtw]
14 %data_trunc = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
15 call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i8(<vscale x 4 x i8> %data_trunc,
16 <vscale x 4 x i1> %pg,
18 <vscale x 4 x i32> %offsets)
22 define void @sst1b_s_sxtw(<vscale x 4 x i32> %data, <vscale x 4 x i1> %pg, i8* %base, <vscale x 4 x i32> %offsets) {
23 ; CHECK-LABEL: sst1b_s_sxtw:
24 ; CHECK: st1b { z0.s }, p0, [x0, z1.s, sxtw]
26 %data_trunc = trunc <vscale x 4 x i32> %data to <vscale x 4 x i8>
27 call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i8(<vscale x 4 x i8> %data_trunc,
28 <vscale x 4 x i1> %pg,
30 <vscale x 4 x i32> %offsets)
34 define void @sst1b_d_uxtw(<vscale x 2 x i64> %data, <vscale x 2 x i1> %pg, i8* %base, <vscale x 2 x i32> %offsets) {
35 ; CHECK-LABEL: sst1b_d_uxtw:
36 ; CHECK: st1b { z0.d }, p0, [x0, z1.d, uxtw]
38 %data_trunc = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
39 call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv2i8(<vscale x 2 x i8> %data_trunc,
40 <vscale x 2 x i1> %pg,
42 <vscale x 2 x i32> %offsets)
46 define void @sst1b_d_sxtw(<vscale x 2 x i64> %data, <vscale x 2 x i1> %pg, i8* %base, <vscale x 2 x i32> %offsets) {
47 ; CHECK-LABEL: sst1b_d_sxtw:
48 ; CHECK: st1b { z0.d }, p0, [x0, z1.d, sxtw]
50 %data_trunc = trunc <vscale x 2 x i64> %data to <vscale x 2 x i8>
51 call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv2i8(<vscale x 2 x i8> %data_trunc,
52 <vscale x 2 x i1> %pg,
54 <vscale x 2 x i32> %offsets)
59 define void @sst1h_s_uxtw(<vscale x 4 x i32> %data, <vscale x 4 x i1> %pg, i16* %base, <vscale x 4 x i32> %offsets) {
60 ; CHECK-LABEL: sst1h_s_uxtw:
61 ; CHECK: st1h { z0.s }, p0, [x0, z1.s, uxtw]
63 %data_trunc = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
64 call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i16(<vscale x 4 x i16> %data_trunc,
65 <vscale x 4 x i1> %pg,
67 <vscale x 4 x i32> %offsets)
71 define void @sst1h_s_sxtw(<vscale x 4 x i32> %data, <vscale x 4 x i1> %pg, i16* %base, <vscale x 4 x i32> %offsets) {
72 ; CHECK-LABEL: sst1h_s_sxtw:
73 ; CHECK: st1h { z0.s }, p0, [x0, z1.s, sxtw]
75 %data_trunc = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
76 call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i16(<vscale x 4 x i16> %data_trunc,
77 <vscale x 4 x i1> %pg,
79 <vscale x 4 x i32> %offsets)
83 define void @sst1h_d_uxtw(<vscale x 2 x i64> %data, <vscale x 2 x i1> %pg, i16* %base, <vscale x 2 x i32> %offsets) {
84 ; CHECK-LABEL: sst1h_d_uxtw:
85 ; CHECK: st1h { z0.d }, p0, [x0, z1.d, uxtw]
87 %data_trunc = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
88 call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv2i16(<vscale x 2 x i16> %data_trunc,
89 <vscale x 2 x i1> %pg,
91 <vscale x 2 x i32> %offsets)
95 define void @sst1h_d_sxtw(<vscale x 2 x i64> %data, <vscale x 2 x i1> %pg, i16* %base, <vscale x 2 x i32> %offsets) {
96 ; CHECK-LABEL: sst1h_d_sxtw:
97 ; CHECK: st1h { z0.d }, p0, [x0, z1.d, sxtw]
99 %data_trunc = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
100 call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv2i16(<vscale x 2 x i16> %data_trunc,
101 <vscale x 2 x i1> %pg,
103 <vscale x 2 x i32> %offsets)
108 define void @sst1w_s_uxtw(<vscale x 4 x i32> %data, <vscale x 4 x i1> %pg, i32* %base, <vscale x 4 x i32> %offsets) {
109 ; CHECK-LABEL: sst1w_s_uxtw:
110 ; CHECK: st1w { z0.s }, p0, [x0, z1.s, uxtw]
112 call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i32(<vscale x 4 x i32> %data,
113 <vscale x 4 x i1> %pg,
115 <vscale x 4 x i32> %offsets)
119 define void @sst1w_s_sxtw(<vscale x 4 x i32> %data, <vscale x 4 x i1> %pg, i32* %base, <vscale x 4 x i32> %offsets) {
120 ; CHECK-LABEL: sst1w_s_sxtw:
121 ; CHECK: st1w { z0.s }, p0, [x0, z1.s, sxtw]
123 call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i32(<vscale x 4 x i32> %data,
124 <vscale x 4 x i1> %pg,
126 <vscale x 4 x i32> %offsets)
130 define void @sst1w_d_uxtw(<vscale x 2 x i64> %data, <vscale x 2 x i1> %pg, i32* %base, <vscale x 2 x i32> %offsets) {
131 ; CHECK-LABEL: sst1w_d_uxtw:
132 ; CHECK: st1w { z0.d }, p0, [x0, z1.d, uxtw]
134 %data_trunc = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
135 call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv2i32(<vscale x 2 x i32> %data_trunc,
136 <vscale x 2 x i1> %pg,
138 <vscale x 2 x i32> %offsets)
142 define void @sst1w_d_sxtw(<vscale x 2 x i64> %data, <vscale x 2 x i1> %pg, i32* %base, <vscale x 2 x i32> %offsets) {
143 ; CHECK-LABEL: sst1w_d_sxtw:
144 ; CHECK: st1w { z0.d }, p0, [x0, z1.d, sxtw]
146 %data_trunc = trunc <vscale x 2 x i64> %data to <vscale x 2 x i32>
147 call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv2i32(<vscale x 2 x i32> %data_trunc,
148 <vscale x 2 x i1> %pg,
150 <vscale x 2 x i32> %offsets)
154 define void @sst1w_s_uxtw_float(<vscale x 4 x float> %data, <vscale x 4 x i1> %pg, float* %base, <vscale x 4 x i32> %offsets) {
155 ; CHECK-LABEL: sst1w_s_uxtw_float:
156 ; CHECK: st1w { z0.s }, p0, [x0, z1.s, uxtw]
158 call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4f32(<vscale x 4 x float> %data,
159 <vscale x 4 x i1> %pg,
161 <vscale x 4 x i32> %offsets)
165 define void @sst1w_s_sxtw_float(<vscale x 4 x float> %data, <vscale x 4 x i1> %pg, float* %base, <vscale x 4 x i32> %offsets) {
166 ; CHECK-LABEL: sst1w_s_sxtw_float:
167 ; CHECK: st1w { z0.s }, p0, [x0, z1.s, sxtw]
169 call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4f32(<vscale x 4 x float> %data,
170 <vscale x 4 x i1> %pg,
172 <vscale x 4 x i32> %offsets)
177 define void @sst1d_d_uxtw(<vscale x 2 x i64> %data, <vscale x 2 x i1> %pg, i64* %base, <vscale x 2 x i32> %offsets) {
178 ; CHECK-LABEL: sst1d_d_uxtw:
179 ; CHECK: st1d { z0.d }, p0, [x0, z1.d, uxtw]
181 call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv2i64(<vscale x 2 x i64> %data,
182 <vscale x 2 x i1> %pg,
184 <vscale x 2 x i32> %offsets)
188 define void @sst1d_d_sxtw(<vscale x 2 x i64> %data, <vscale x 2 x i1> %pg, i64* %base, <vscale x 2 x i32> %offsets) {
189 ; CHECK-LABEL: sst1d_d_sxtw:
190 ; CHECK: st1d { z0.d }, p0, [x0, z1.d, sxtw]
192 call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv2i64(<vscale x 2 x i64> %data,
193 <vscale x 2 x i1> %pg,
195 <vscale x 2 x i32> %offsets)
199 define void @sst1d_d_uxtw_double(<vscale x 2 x double> %data, <vscale x 2 x i1> %pg, double* %base, <vscale x 2 x i32> %offsets) {
200 ; CHECK-LABEL: sst1d_d_uxtw_double:
201 ; CHECK: st1d { z0.d }, p0, [x0, z1.d, uxtw]
203 call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv2f64(<vscale x 2 x double> %data,
204 <vscale x 2 x i1> %pg,
206 <vscale x 2 x i32> %offsets)
210 define void @sst1d_d_sxtw_double(<vscale x 2 x double> %data, <vscale x 2 x i1> %pg, double* %base, <vscale x 2 x i32> %offsets) {
211 ; CHECK-LABEL: sst1d_d_sxtw_double:
212 ; CHECK: st1d { z0.d }, p0, [x0, z1.d, sxtw]
214 call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv2f64(<vscale x 2 x double> %data,
215 <vscale x 2 x i1> %pg,
217 <vscale x 2 x i32> %offsets)
223 declare void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i8(<vscale x 4 x i8>, <vscale x 4 x i1>, i8*, <vscale x 4 x i32>)
224 declare void @llvm.aarch64.sve.st1.scatter.uxtw.nxv2i8(<vscale x 2 x i8>, <vscale x 2 x i1>, i8*, <vscale x 2 x i32>)
225 declare void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i8(<vscale x 4 x i8>, <vscale x 4 x i1>, i8*, <vscale x 4 x i32>)
226 declare void @llvm.aarch64.sve.st1.scatter.sxtw.nxv2i8(<vscale x 2 x i8>, <vscale x 2 x i1>, i8*, <vscale x 2 x i32>)
229 declare void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i16(<vscale x 4 x i16>, <vscale x 4 x i1>, i16*, <vscale x 4 x i32>)
230 declare void @llvm.aarch64.sve.st1.scatter.sxtw.nxv2i16(<vscale x 2 x i16>, <vscale x 2 x i1>, i16*, <vscale x 2 x i32>)
231 declare void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i16(<vscale x 4 x i16>, <vscale x 4 x i1>, i16*, <vscale x 4 x i32>)
232 declare void @llvm.aarch64.sve.st1.scatter.uxtw.nxv2i16(<vscale x 2 x i16>, <vscale x 2 x i1>, i16*, <vscale x 2 x i32>)
235 declare void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i32(<vscale x 4 x i32>, <vscale x 4 x i1>, i32*, <vscale x 4 x i32>)
236 declare void @llvm.aarch64.sve.st1.scatter.sxtw.nxv2i32(<vscale x 2 x i32>, <vscale x 2 x i1>, i32*, <vscale x 2 x i32>)
237 declare void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i32(<vscale x 4 x i32>, <vscale x 4 x i1>, i32*, <vscale x 4 x i32>)
238 declare void @llvm.aarch64.sve.st1.scatter.uxtw.nxv2i32(<vscale x 2 x i32>, <vscale x 2 x i1>, i32*, <vscale x 2 x i32>)
240 declare void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4f32(<vscale x 4 x float>, <vscale x 4 x i1>, float*, <vscale x 4 x i32>)
241 declare void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4f32(<vscale x 4 x float>, <vscale x 4 x i1>, float*, <vscale x 4 x i32>)
244 declare void @llvm.aarch64.sve.st1.scatter.sxtw.nxv2i64(<vscale x 2 x i64>, <vscale x 2 x i1>, i64*, <vscale x 2 x i32>)
245 declare void @llvm.aarch64.sve.st1.scatter.uxtw.nxv2i64(<vscale x 2 x i64>, <vscale x 2 x i1>, i64*, <vscale x 2 x i32>)
247 declare void @llvm.aarch64.sve.st1.scatter.sxtw.nxv2f64(<vscale x 2 x double>, <vscale x 2 x i1>, double*, <vscale x 2 x i32>)
248 declare void @llvm.aarch64.sve.st1.scatter.uxtw.nxv2f64(<vscale x 2 x double>, <vscale x 2 x i1>, double*, <vscale x 2 x i32>)