1 ; RUN: llc -mtriple=aarch64-linux-gnu -mattr=+sve < %s | FileCheck %s
7 define <vscale x 8 x i16> @sxtb_i16(<vscale x 8 x i16> %a, <vscale x 8 x i1> %pg, <vscale x 8 x i16> %b) {
8 ; CHECK-LABEL: sxtb_i16:
9 ; CHECK: sxtb z0.h, p0/m, z1.h
11 %out = call <vscale x 8 x i16> @llvm.aarch64.sve.sxtb.nxv8i16(<vscale x 8 x i16> %a,
12 <vscale x 8 x i1> %pg,
13 <vscale x 8 x i16> %b)
14 ret <vscale x 8 x i16> %out
17 define <vscale x 4 x i32> @sxtb_i32(<vscale x 4 x i32> %a, <vscale x 4 x i1> %pg, <vscale x 4 x i32> %b) {
18 ; CHECK-LABEL: sxtb_i32:
19 ; CHECK: sxtb z0.s, p0/m, z1.s
21 %out = call <vscale x 4 x i32> @llvm.aarch64.sve.sxtb.nxv4i32(<vscale x 4 x i32> %a,
22 <vscale x 4 x i1> %pg,
23 <vscale x 4 x i32> %b)
24 ret <vscale x 4 x i32> %out
27 define <vscale x 2 x i64> @sxtb_i64(<vscale x 2 x i64> %a, <vscale x 2 x i1> %pg, <vscale x 2 x i64> %b) {
28 ; CHECK-LABEL: sxtb_i64:
29 ; CHECK: sxtb z0.d, p0/m, z1.d
31 %out = call <vscale x 2 x i64> @llvm.aarch64.sve.sxtb.nxv2i64(<vscale x 2 x i64> %a,
32 <vscale x 2 x i1> %pg,
33 <vscale x 2 x i64> %b)
34 ret <vscale x 2 x i64> %out
41 define <vscale x 4 x i32> @sxth_i32(<vscale x 4 x i32> %a, <vscale x 4 x i1> %pg, <vscale x 4 x i32> %b) {
42 ; CHECK-LABEL: sxth_i32:
43 ; CHECK: sxth z0.s, p0/m, z1.s
45 %out = call <vscale x 4 x i32> @llvm.aarch64.sve.sxth.nxv4i32(<vscale x 4 x i32> %a,
46 <vscale x 4 x i1> %pg,
47 <vscale x 4 x i32> %b)
48 ret <vscale x 4 x i32> %out
51 define <vscale x 2 x i64> @sxth_i64(<vscale x 2 x i64> %a, <vscale x 2 x i1> %pg, <vscale x 2 x i64> %b) {
52 ; CHECK-LABEL: sxth_i64:
53 ; CHECK: sxth z0.d, p0/m, z1.d
55 %out = call <vscale x 2 x i64> @llvm.aarch64.sve.sxth.nxv2i64(<vscale x 2 x i64> %a,
56 <vscale x 2 x i1> %pg,
57 <vscale x 2 x i64> %b)
58 ret <vscale x 2 x i64> %out
65 define <vscale x 2 x i64> @sxtw_i64(<vscale x 2 x i64> %a, <vscale x 2 x i1> %pg, <vscale x 2 x i64> %b) {
66 ; CHECK-LABEL: sxtw_i64:
67 ; CHECK: sxtw z0.d, p0/m, z1.d
69 %out = call <vscale x 2 x i64> @llvm.aarch64.sve.sxtw.nxv2i64(<vscale x 2 x i64> %a,
70 <vscale x 2 x i1> %pg,
71 <vscale x 2 x i64> %b)
72 ret <vscale x 2 x i64> %out
79 define <vscale x 8 x i16> @uxtb_i16(<vscale x 8 x i16> %a, <vscale x 8 x i1> %pg, <vscale x 8 x i16> %b) {
80 ; CHECK-LABEL: uxtb_i16:
81 ; CHECK: uxtb z0.h, p0/m, z1.h
83 %out = call <vscale x 8 x i16> @llvm.aarch64.sve.uxtb.nxv8i16(<vscale x 8 x i16> %a,
84 <vscale x 8 x i1> %pg,
85 <vscale x 8 x i16> %b)
86 ret <vscale x 8 x i16> %out
89 define <vscale x 4 x i32> @uxtb_i32(<vscale x 4 x i32> %a, <vscale x 4 x i1> %pg, <vscale x 4 x i32> %b) {
90 ; CHECK-LABEL: uxtb_i32:
91 ; CHECK: uxtb z0.s, p0/m, z1.s
93 %out = call <vscale x 4 x i32> @llvm.aarch64.sve.uxtb.nxv4i32(<vscale x 4 x i32> %a,
94 <vscale x 4 x i1> %pg,
95 <vscale x 4 x i32> %b)
96 ret <vscale x 4 x i32> %out
99 define <vscale x 2 x i64> @uxtb_i64(<vscale x 2 x i64> %a, <vscale x 2 x i1> %pg, <vscale x 2 x i64> %b) {
100 ; CHECK-LABEL: uxtb_i64:
101 ; CHECK: uxtb z0.d, p0/m, z1.d
103 %out = call <vscale x 2 x i64> @llvm.aarch64.sve.uxtb.nxv2i64(<vscale x 2 x i64> %a,
104 <vscale x 2 x i1> %pg,
105 <vscale x 2 x i64> %b)
106 ret <vscale x 2 x i64> %out
113 define <vscale x 4 x i32> @uxth_i32(<vscale x 4 x i32> %a, <vscale x 4 x i1> %pg, <vscale x 4 x i32> %b) {
114 ; CHECK-LABEL: uxth_i32:
115 ; CHECK: uxth z0.s, p0/m, z1.s
117 %out = call <vscale x 4 x i32> @llvm.aarch64.sve.uxth.nxv4i32(<vscale x 4 x i32> %a,
118 <vscale x 4 x i1> %pg,
119 <vscale x 4 x i32> %b)
120 ret <vscale x 4 x i32> %out
123 define <vscale x 2 x i64> @uxth_i64(<vscale x 2 x i64> %a, <vscale x 2 x i1> %pg, <vscale x 2 x i64> %b) {
124 ; CHECK-LABEL: uxth_i64:
125 ; CHECK: uxth z0.d, p0/m, z1.d
127 %out = call <vscale x 2 x i64> @llvm.aarch64.sve.uxth.nxv2i64(<vscale x 2 x i64> %a,
128 <vscale x 2 x i1> %pg,
129 <vscale x 2 x i64> %b)
130 ret <vscale x 2 x i64> %out
137 define <vscale x 2 x i64> @uxtw_i64(<vscale x 2 x i64> %a, <vscale x 2 x i1> %pg, <vscale x 2 x i64> %b) {
138 ; CHECK-LABEL: uxtw_i64:
139 ; CHECK: uxtw z0.d, p0/m, z1.d
141 %out = call <vscale x 2 x i64> @llvm.aarch64.sve.uxtw.nxv2i64(<vscale x 2 x i64> %a,
142 <vscale x 2 x i1> %pg,
143 <vscale x 2 x i64> %b)
144 ret <vscale x 2 x i64> %out
147 declare <vscale x 8 x i16> @llvm.aarch64.sve.sxtb.nxv8i16(<vscale x 8 x i16>, <vscale x 8 x i1>, <vscale x 8 x i16>)
148 declare <vscale x 4 x i32> @llvm.aarch64.sve.sxtb.nxv4i32(<vscale x 4 x i32>, <vscale x 4 x i1>, <vscale x 4 x i32>)
149 declare <vscale x 2 x i64> @llvm.aarch64.sve.sxtb.nxv2i64(<vscale x 2 x i64>, <vscale x 2 x i1>, <vscale x 2 x i64>)
150 declare <vscale x 4 x i32> @llvm.aarch64.sve.sxth.nxv4i32(<vscale x 4 x i32>, <vscale x 4 x i1>, <vscale x 4 x i32>)
151 declare <vscale x 2 x i64> @llvm.aarch64.sve.sxth.nxv2i64(<vscale x 2 x i64>, <vscale x 2 x i1>, <vscale x 2 x i64>)
152 declare <vscale x 2 x i64> @llvm.aarch64.sve.sxtw.nxv2i64(<vscale x 2 x i64>, <vscale x 2 x i1>, <vscale x 2 x i64>)
154 declare <vscale x 8 x i16> @llvm.aarch64.sve.uxtb.nxv8i16(<vscale x 8 x i16>, <vscale x 8 x i1>, <vscale x 8 x i16>)
155 declare <vscale x 4 x i32> @llvm.aarch64.sve.uxtb.nxv4i32(<vscale x 4 x i32>, <vscale x 4 x i1>, <vscale x 4 x i32>)
156 declare <vscale x 2 x i64> @llvm.aarch64.sve.uxtb.nxv2i64(<vscale x 2 x i64>, <vscale x 2 x i1>, <vscale x 2 x i64>)
157 declare <vscale x 4 x i32> @llvm.aarch64.sve.uxth.nxv4i32(<vscale x 4 x i32>, <vscale x 4 x i1>, <vscale x 4 x i32>)
158 declare <vscale x 2 x i64> @llvm.aarch64.sve.uxth.nxv2i64(<vscale x 2 x i64>, <vscale x 2 x i1>, <vscale x 2 x i64>)
159 declare <vscale x 2 x i64> @llvm.aarch64.sve.uxtw.nxv2i64(<vscale x 2 x i64>, <vscale x 2 x i1>, <vscale x 2 x i64>)