1 /* Machine mode switch for RISC
-V
'V' Extension for GNU compiler.
2 Copyright (C
) 2022-2024 Free Software Foundation
, Inc.
3 Contributed by Ju
-Zhe
Zhong (juzhe.zhong@rivai.ai
), RiVAI Technologies Ltd.
5 This file is part of GCC.
7 GCC is free software
; you can redistribute it and
/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation
; either version
3, or (at your option
)
12 GCC is distributed in the hope that it will be useful
,
13 but WITHOUT ANY WARRANTY
; without even the implied warranty of
14 MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC
; see the file COPYING3. If not see
19 <http
://www.gnu.org
/licenses
/>.
*/
21 /* This file is enable or disable the RVV modes according
'-march'.
*/
23 /* According to rvv
-intrinsic and RISC
-V
'V' Extension ISA document
:
24 https
://github.com
/riscv
-non
-isa
/rvv
-intrinsic
-doc
/blob
/master
/rvv
-intrinsic
-rfc.md.
25 https
://github.com
/riscv
/riscv
-v
-spec
/blob
/master
/v
-spec.adoc.
28 Encode SEW and LMUL into data types.
29 We enforce the constraint LMUL ≥ SEW
/ELEN in the implementation.
30 There are the following data types for MIN_VLEN
> 32.
32 Note
: N
/A means the corresponding vector type is disabled.
34 Encode SEW and LMUL into data types.
35 We enforce the constraint LMUL ≥ SEW
/ELEN in the implementation.
36 There are the following data types for ELEN
= 64.
38 |Modes|LMUL
=1 |LMUL
=2 |LMUL
=4 |LMUL
=8 |LMUL
=1/2|LMUL
=1/4|LMUL
=1/8|
39 |DI |RVVM1DI|RVVM2DI|RVVM4DI|RVVM8DI|N
/A |N
/A |N
/A |
40 |SI |RVVM1SI|RVVM2SI|RVVM4SI|RVVM8SI|RVVMF2SI|N
/A |N
/A |
41 |HI |RVVM1HI|RVVM2HI|RVVM4HI|RVVM8HI|RVVMF2HI|RVVMF4HI|N
/A |
42 |QI |RVVM1QI|RVVM2QI|RVVM4QI|RVVM8QI|RVVMF2QI|RVVMF4QI|RVVMF8QI|
43 |DF |RVVM1DF|RVVM2DF|RVVM4DF|RVVM8DF|N
/A |N
/A |N
/A |
44 |SF |RVVM1SF|RVVM2SF|RVVM4SF|RVVM8SF|RVVMF2SF|N
/A |N
/A |
45 |HF |RVVM1HF|RVVM2HF|RVVM4HF|RVVM8HF|RVVMF2HF|RVVMF4HF|N
/A |
46 |BF |RVVM1BF|RVVM2BF|RVVM4BF|RVVM8BF|RVVMF2BF|RVVMF4BF|N
/A |
48 There are the following data types for ELEN
= 32.
50 |Modes|LMUL
=1 |LMUL
=2 |LMUL
=4 |LMUL
=8 |LMUL
=1/2|LMUL
=1/4|LMUL
=1/8|
51 |SI |RVVM1SI|RVVM2SI|RVVM4SI|RVVM8SI|N
/A |N
/A |N
/A |
52 |HI |RVVM1HI|RVVM2HI|RVVM4HI|RVVM8HI|RVVMF2HI|N
/A |N
/A |
53 |QI |RVVM1QI|RVVM2QI|RVVM4QI|RVVM8QI|RVVMF2QI|RVVMF4QI|N
/A |
54 |SF |RVVM1SF|RVVM2SF|RVVM4SF|RVVM8SF|N
/A |N
/A |N
/A |
55 |HF |RVVM1HF|RVVM2HF|RVVM4HF|RVVM8HF|RVVMF2HF|N
/A |N
/A |
56 |BF |RVVM1BF|RVVM2BF|RVVM4BF|RVVM8BF|RVVMF2BF|N
/A |N
/A |
58 Encode the ratio of SEW
/LMUL into the mask types.
59 There are the following mask types.
63 |Modes| n
= 1 | n
= 2 | n
= 4 | n
= 8 | n
= 16 | n
= 32 | n
= 64 |
64 |BI |RVVM1BI|RVVMF2BI|RVVMF4BI|RVVMF8BI|RVVMF16BI|RVVMF32BI|RVVMF64BI|
*/
66 /* Return
'REQUIREMENT' for machine_mode
'MODE'.
67 For example
: 'MODE' = RVVMF64BImode needs TARGET_MIN_VLEN
> 32.
*/
69 #define
ENTRY(MODE
, REQUIREMENT
, VLMUL
, RATIO
)
72 /* Disable modes if TARGET_MIN_VLEN
== 32.
*/
73 ENTRY (RVVMF64BI
, TARGET_MIN_VLEN
> 32, TARGET_XTHEADVECTOR ? LMUL_1
:LMUL_F8
, 64)
74 ENTRY (RVVMF32BI
, true
, TARGET_XTHEADVECTOR ? LMUL_1
:LMUL_F4
, 32)
75 ENTRY (RVVMF16BI
, true
, TARGET_XTHEADVECTOR ? LMUL_1
: LMUL_F2
, 16)
76 ENTRY (RVVMF8BI
, true
, LMUL_1
, 8)
77 ENTRY (RVVMF4BI
, true
, LMUL_2
, 4)
78 ENTRY (RVVMF2BI
, true
, LMUL_4
, 2)
79 ENTRY (RVVM1BI
, true
, LMUL_8
, 1)
81 /* Disable modes if TARGET_MIN_VLEN
== 32.
*/
82 ENTRY (RVVM8QI
, true
, LMUL_8
, 1)
83 ENTRY (RVVM4QI
, true
, LMUL_4
, 2)
84 ENTRY (RVVM2QI
, true
, LMUL_2
, 4)
85 ENTRY (RVVM1QI
, true
, LMUL_1
, 8)
86 ENTRY (RVVMF2QI
, !TARGET_XTHEADVECTOR
, LMUL_F2
, 16)
87 ENTRY (RVVMF4QI
, !TARGET_XTHEADVECTOR
, LMUL_F4
, 32)
88 ENTRY (RVVMF8QI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, LMUL_F8
, 64)
90 /* Disable modes if TARGET_MIN_VLEN
== 32.
*/
91 ENTRY (RVVM8HI
, true
, LMUL_8
, 2)
92 ENTRY (RVVM4HI
, true
, LMUL_4
, 4)
93 ENTRY (RVVM2HI
, true
, LMUL_2
, 8)
94 ENTRY (RVVM1HI
, true
, LMUL_1
, 16)
95 ENTRY (RVVMF2HI
, !TARGET_XTHEADVECTOR
, LMUL_F2
, 32)
96 ENTRY (RVVMF4HI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, LMUL_F4
, 64)
98 /* Disable modes if TARGET_MIN_VLEN
== 32 or
!TARGET_VECTOR_ELEN_BF_16.
*/
99 ENTRY (RVVM8BF
, TARGET_VECTOR_ELEN_BF_16
, LMUL_8
, 2)
100 ENTRY (RVVM4BF
, TARGET_VECTOR_ELEN_BF_16
, LMUL_4
, 4)
101 ENTRY (RVVM2BF
, TARGET_VECTOR_ELEN_BF_16
, LMUL_2
, 8)
102 ENTRY (RVVM1BF
, TARGET_VECTOR_ELEN_BF_16
, LMUL_1
, 16)
103 ENTRY (RVVMF2BF
, TARGET_VECTOR_ELEN_BF_16
, LMUL_F2
, 32)
104 ENTRY (RVVMF4BF
, TARGET_VECTOR_ELEN_BF_16
&& TARGET_MIN_VLEN
> 32, LMUL_F4
, 64)
106 /* Disable modes if TARGET_MIN_VLEN
== 32 or
!TARGET_VECTOR_ELEN_FP_16.
*/
107 ENTRY (RVVM8HF
, TARGET_VECTOR_ELEN_FP_16
, LMUL_8
, 2)
108 ENTRY (RVVM4HF
, TARGET_VECTOR_ELEN_FP_16
, LMUL_4
, 4)
109 ENTRY (RVVM2HF
, TARGET_VECTOR_ELEN_FP_16
, LMUL_2
, 8)
110 ENTRY (RVVM1HF
, TARGET_VECTOR_ELEN_FP_16
, LMUL_1
, 16)
111 ENTRY (RVVMF2HF
, TARGET_VECTOR_ELEN_FP_16
&& !TARGET_XTHEADVECTOR
, LMUL_F2
, 32)
112 ENTRY (RVVMF4HF
, TARGET_VECTOR_ELEN_FP_16
&& TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, LMUL_F4
, 64)
114 /* Disable modes if TARGET_MIN_VLEN
== 32.
*/
115 ENTRY (RVVM8SI
, true
, LMUL_8
, 4)
116 ENTRY (RVVM4SI
, true
, LMUL_4
, 8)
117 ENTRY (RVVM2SI
, true
, LMUL_2
, 16)
118 ENTRY (RVVM1SI
, true
, LMUL_1
, 32)
119 ENTRY (RVVMF2SI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, LMUL_F2
, 64)
121 /* Disable modes if TARGET_MIN_VLEN
== 32 or
!TARGET_VECTOR_ELEN_FP_32.
*/
122 ENTRY (RVVM8SF
, TARGET_VECTOR_ELEN_FP_32
, LMUL_8
, 4)
123 ENTRY (RVVM4SF
, TARGET_VECTOR_ELEN_FP_32
, LMUL_4
, 8)
124 ENTRY (RVVM2SF
, TARGET_VECTOR_ELEN_FP_32
, LMUL_2
, 16)
125 ENTRY (RVVM1SF
, TARGET_VECTOR_ELEN_FP_32
, LMUL_1
, 32)
126 ENTRY (RVVMF2SF
, TARGET_VECTOR_ELEN_FP_32
&& TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, LMUL_F2
, 64)
128 /* Disable modes if
!TARGET_VECTOR_ELEN_64.
*/
129 ENTRY (RVVM8DI
, TARGET_VECTOR_ELEN_64
, LMUL_8
, 8)
130 ENTRY (RVVM4DI
, TARGET_VECTOR_ELEN_64
, LMUL_4
, 16)
131 ENTRY (RVVM2DI
, TARGET_VECTOR_ELEN_64
, LMUL_2
, 32)
132 ENTRY (RVVM1DI
, TARGET_VECTOR_ELEN_64
, LMUL_1
, 64)
134 /* Disable modes if
!TARGET_VECTOR_ELEN_FP_64.
*/
135 ENTRY (RVVM8DF
, TARGET_VECTOR_ELEN_FP_64
, LMUL_8
, 8)
136 ENTRY (RVVM4DF
, TARGET_VECTOR_ELEN_FP_64
, LMUL_4
, 16)
137 ENTRY (RVVM2DF
, TARGET_VECTOR_ELEN_FP_64
, LMUL_2
, 32)
138 ENTRY (RVVM1DF
, TARGET_VECTOR_ELEN_FP_64
, LMUL_1
, 64)
140 /* Tuple modes for segment loads
/stores according to NF.
142 Tuple modes format
: RVV
<LMUL
>x
<NF
><BASEMODE
>
144 When LMUL is MF8
/MF4
/MF2
/M1
, NF can be
2 ~
8.
145 When LMUL is M2
, NF can be
2 ~
4.
146 When LMUL is M4
, NF can be
4.
*/
149 #define
TUPLE_ENTRY(MODE
, REQUIREMENT
, SUBPART_MODE
, NF
, VLMUL
, RATIO
)
152 TUPLE_ENTRY (RVVM1x8QI
, true
, RVVM1QI
, 8, LMUL_1
, 8)
153 TUPLE_ENTRY (RVVMF2x8QI
, !TARGET_XTHEADVECTOR
, RVVMF2QI
, 8, LMUL_F2
, 16)
154 TUPLE_ENTRY (RVVMF4x8QI
, !TARGET_XTHEADVECTOR
, RVVMF4QI
, 8, LMUL_F4
, 32)
155 TUPLE_ENTRY (RVVMF8x8QI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF8QI
, 8, LMUL_F8
, 64)
156 TUPLE_ENTRY (RVVM1x7QI
, true
, RVVM1QI
, 7, LMUL_1
, 8)
157 TUPLE_ENTRY (RVVMF2x7QI
, !TARGET_XTHEADVECTOR
, RVVMF2QI
, 7, LMUL_F2
, 16)
158 TUPLE_ENTRY (RVVMF4x7QI
, !TARGET_XTHEADVECTOR
, RVVMF4QI
, 7, LMUL_F4
, 32)
159 TUPLE_ENTRY (RVVMF8x7QI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF8QI
, 7, LMUL_F8
, 64)
160 TUPLE_ENTRY (RVVM1x6QI
, true
, RVVM1QI
, 6, LMUL_1
, 8)
161 TUPLE_ENTRY (RVVMF2x6QI
, !TARGET_XTHEADVECTOR
, RVVMF2QI
, 6, LMUL_F2
, 16)
162 TUPLE_ENTRY (RVVMF4x6QI
, !TARGET_XTHEADVECTOR
, RVVMF4QI
, 6, LMUL_F4
, 32)
163 TUPLE_ENTRY (RVVMF8x6QI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF8QI
, 6, LMUL_F8
, 64)
164 TUPLE_ENTRY (RVVM1x5QI
, true
, RVVM1QI
, 5, LMUL_1
, 8)
165 TUPLE_ENTRY (RVVMF2x5QI
, !TARGET_XTHEADVECTOR
, RVVMF2QI
, 5, LMUL_F2
, 16)
166 TUPLE_ENTRY (RVVMF4x5QI
, !TARGET_XTHEADVECTOR
, RVVMF4QI
, 5, LMUL_F4
, 32)
167 TUPLE_ENTRY (RVVMF8x5QI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF8QI
, 5, LMUL_F8
, 64)
168 TUPLE_ENTRY (RVVM2x4QI
, true
, RVVM2QI
, 4, LMUL_2
, 4)
169 TUPLE_ENTRY (RVVM1x4QI
, true
, RVVM1QI
, 4, LMUL_1
, 8)
170 TUPLE_ENTRY (RVVMF2x4QI
, !TARGET_XTHEADVECTOR
, RVVMF2QI
, 4, LMUL_F2
, 16)
171 TUPLE_ENTRY (RVVMF4x4QI
, !TARGET_XTHEADVECTOR
, RVVMF4QI
, 4, LMUL_F4
, 32)
172 TUPLE_ENTRY (RVVMF8x4QI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF8QI
, 4, LMUL_F8
, 64)
173 TUPLE_ENTRY (RVVM2x3QI
, true
, RVVM2QI
, 3, LMUL_2
, 4)
174 TUPLE_ENTRY (RVVM1x3QI
, true
, RVVM1QI
, 3, LMUL_1
, 8)
175 TUPLE_ENTRY (RVVMF2x3QI
, !TARGET_XTHEADVECTOR
, RVVMF2QI
, 3, LMUL_F2
, 16)
176 TUPLE_ENTRY (RVVMF4x3QI
, !TARGET_XTHEADVECTOR
, RVVMF4QI
, 3, LMUL_F4
, 32)
177 TUPLE_ENTRY (RVVMF8x3QI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF8QI
, 3, LMUL_F8
, 64)
178 TUPLE_ENTRY (RVVM4x2QI
, true
, RVVM4QI
, 2, LMUL_4
, 2)
179 TUPLE_ENTRY (RVVM2x2QI
, true
, RVVM2QI
, 2, LMUL_2
, 4)
180 TUPLE_ENTRY (RVVM1x2QI
, true
, RVVM1QI
, 2, LMUL_1
, 8)
181 TUPLE_ENTRY (RVVMF2x2QI
, !TARGET_XTHEADVECTOR
, RVVMF2QI
, 2, LMUL_F2
, 16)
182 TUPLE_ENTRY (RVVMF4x2QI
, !TARGET_XTHEADVECTOR
, RVVMF4QI
, 2, LMUL_F4
, 32)
183 TUPLE_ENTRY (RVVMF8x2QI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF8QI
, 2, LMUL_F8
, 64)
185 TUPLE_ENTRY (RVVM1x8HI
, true
, RVVM1HI
, 8, LMUL_1
, 16)
186 TUPLE_ENTRY (RVVMF2x8HI
, !TARGET_XTHEADVECTOR
, RVVMF2HI
, 8, LMUL_F2
, 32)
187 TUPLE_ENTRY (RVVMF4x8HI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF4HI
, 8, LMUL_F4
, 64)
188 TUPLE_ENTRY (RVVM1x7HI
, true
, RVVM1HI
, 7, LMUL_1
, 16)
189 TUPLE_ENTRY (RVVMF2x7HI
, !TARGET_XTHEADVECTOR
, RVVMF2HI
, 7, LMUL_F2
, 32)
190 TUPLE_ENTRY (RVVMF4x7HI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF4HI
, 7, LMUL_F4
, 64)
191 TUPLE_ENTRY (RVVM1x6HI
, true
, RVVM1HI
, 6, LMUL_1
, 16)
192 TUPLE_ENTRY (RVVMF2x6HI
, !TARGET_XTHEADVECTOR
, RVVMF2HI
, 6, LMUL_F2
, 32)
193 TUPLE_ENTRY (RVVMF4x6HI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF4HI
, 6, LMUL_F4
, 64)
194 TUPLE_ENTRY (RVVM1x5HI
, true
, RVVM1HI
, 5, LMUL_1
, 16)
195 TUPLE_ENTRY (RVVMF2x5HI
, !TARGET_XTHEADVECTOR
, RVVMF2HI
, 5, LMUL_F2
, 32)
196 TUPLE_ENTRY (RVVMF4x5HI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF4HI
, 5, LMUL_F4
, 64)
197 TUPLE_ENTRY (RVVM2x4HI
, true
, RVVM2HI
, 4, LMUL_2
, 8)
198 TUPLE_ENTRY (RVVM1x4HI
, true
, RVVM1HI
, 4, LMUL_1
, 16)
199 TUPLE_ENTRY (RVVMF2x4HI
, !TARGET_XTHEADVECTOR
, RVVMF2HI
, 4, LMUL_F2
, 32)
200 TUPLE_ENTRY (RVVMF4x4HI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF4HI
, 4, LMUL_F4
, 64)
201 TUPLE_ENTRY (RVVM2x3HI
, true
, RVVM2HI
, 3, LMUL_2
, 8)
202 TUPLE_ENTRY (RVVM1x3HI
, true
, RVVM1HI
, 3, LMUL_1
, 16)
203 TUPLE_ENTRY (RVVMF2x3HI
, !TARGET_XTHEADVECTOR
, RVVMF2HI
, 3, LMUL_F2
, 32)
204 TUPLE_ENTRY (RVVMF4x3HI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF4HI
, 3, LMUL_F4
, 64)
205 TUPLE_ENTRY (RVVM4x2HI
, true
, RVVM4HI
, 2, LMUL_4
, 4)
206 TUPLE_ENTRY (RVVM2x2HI
, true
, RVVM2HI
, 2, LMUL_2
, 8)
207 TUPLE_ENTRY (RVVM1x2HI
, true
, RVVM1HI
, 2, LMUL_1
, 16)
208 TUPLE_ENTRY (RVVMF2x2HI
, !TARGET_XTHEADVECTOR
, RVVMF2HI
, 2, LMUL_F2
, 32)
209 TUPLE_ENTRY (RVVMF4x2HI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF4HI
, 2, LMUL_F4
, 64)
211 TUPLE_ENTRY (RVVM1x8BF
, TARGET_VECTOR_ELEN_BF_16
, RVVM1BF
, 8, LMUL_1
, 16)
212 TUPLE_ENTRY (RVVMF2x8BF
, TARGET_VECTOR_ELEN_BF_16
, RVVMF2BF
, 8, LMUL_F2
, 32)
213 TUPLE_ENTRY (RVVMF4x8BF
, TARGET_VECTOR_ELEN_BF_16
&& TARGET_MIN_VLEN
> 32, RVVMF4BF
, 8, LMUL_F4
, 64)
214 TUPLE_ENTRY (RVVM1x7BF
, TARGET_VECTOR_ELEN_BF_16
, RVVM1BF
, 7, LMUL_1
, 16)
215 TUPLE_ENTRY (RVVMF2x7BF
, TARGET_VECTOR_ELEN_BF_16
, RVVMF2BF
, 7, LMUL_F2
, 32)
216 TUPLE_ENTRY (RVVMF4x7BF
, TARGET_VECTOR_ELEN_BF_16
&& TARGET_MIN_VLEN
> 32, RVVMF4BF
, 7, LMUL_F4
, 64)
217 TUPLE_ENTRY (RVVM1x6BF
, TARGET_VECTOR_ELEN_BF_16
, RVVM1BF
, 6, LMUL_1
, 16)
218 TUPLE_ENTRY (RVVMF2x6BF
, TARGET_VECTOR_ELEN_BF_16
, RVVMF2BF
, 6, LMUL_F2
, 32)
219 TUPLE_ENTRY (RVVMF4x6BF
, TARGET_VECTOR_ELEN_BF_16
&& TARGET_MIN_VLEN
> 32, RVVMF4BF
, 6, LMUL_F4
, 64)
220 TUPLE_ENTRY (RVVM1x5BF
, TARGET_VECTOR_ELEN_BF_16
, RVVM1BF
, 5, LMUL_1
, 16)
221 TUPLE_ENTRY (RVVMF2x5BF
, TARGET_VECTOR_ELEN_BF_16
, RVVMF2BF
, 5, LMUL_F2
, 32)
222 TUPLE_ENTRY (RVVMF4x5BF
, TARGET_VECTOR_ELEN_BF_16
&& TARGET_MIN_VLEN
> 32, RVVMF4BF
, 5, LMUL_F4
, 64)
223 TUPLE_ENTRY (RVVM2x4BF
, TARGET_VECTOR_ELEN_BF_16
, RVVM2BF
, 4, LMUL_2
, 8)
224 TUPLE_ENTRY (RVVM1x4BF
, TARGET_VECTOR_ELEN_BF_16
, RVVM1BF
, 4, LMUL_1
, 16)
225 TUPLE_ENTRY (RVVMF2x4BF
, TARGET_VECTOR_ELEN_BF_16
, RVVMF2BF
, 4, LMUL_F2
, 32)
226 TUPLE_ENTRY (RVVMF4x4BF
, TARGET_VECTOR_ELEN_BF_16
&& TARGET_MIN_VLEN
> 32, RVVMF4BF
, 4, LMUL_F4
, 64)
227 TUPLE_ENTRY (RVVM2x3BF
, TARGET_VECTOR_ELEN_BF_16
, RVVM2BF
, 3, LMUL_2
, 8)
228 TUPLE_ENTRY (RVVM1x3BF
, TARGET_VECTOR_ELEN_BF_16
, RVVM1BF
, 3, LMUL_1
, 16)
229 TUPLE_ENTRY (RVVMF2x3BF
, TARGET_VECTOR_ELEN_BF_16
, RVVMF2BF
, 3, LMUL_F2
, 32)
230 TUPLE_ENTRY (RVVMF4x3BF
, TARGET_VECTOR_ELEN_BF_16
&& TARGET_MIN_VLEN
> 32, RVVMF4BF
, 3, LMUL_F4
, 64)
231 TUPLE_ENTRY (RVVM4x2BF
, TARGET_VECTOR_ELEN_BF_16
, RVVM4BF
, 2, LMUL_4
, 4)
232 TUPLE_ENTRY (RVVM2x2BF
, TARGET_VECTOR_ELEN_BF_16
, RVVM2BF
, 2, LMUL_2
, 8)
233 TUPLE_ENTRY (RVVM1x2BF
, TARGET_VECTOR_ELEN_BF_16
, RVVM1BF
, 2, LMUL_1
, 16)
234 TUPLE_ENTRY (RVVMF2x2BF
, TARGET_VECTOR_ELEN_BF_16
, RVVMF2BF
, 2, LMUL_F2
, 32)
235 TUPLE_ENTRY (RVVMF4x2BF
, TARGET_VECTOR_ELEN_BF_16
&& TARGET_MIN_VLEN
> 32, RVVMF4BF
, 2, LMUL_F4
, 64)
237 TUPLE_ENTRY (RVVM1x8HF
, TARGET_VECTOR_ELEN_FP_16
, RVVM1HF
, 8, LMUL_1
, 16)
238 TUPLE_ENTRY (RVVMF2x8HF
, TARGET_VECTOR_ELEN_FP_16
&& !TARGET_XTHEADVECTOR
, RVVMF2HF
, 8, LMUL_F2
, 32)
239 TUPLE_ENTRY (RVVMF4x8HF
, TARGET_VECTOR_ELEN_FP_16
&& TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF4HF
, 8, LMUL_F4
, 64)
240 TUPLE_ENTRY (RVVM1x7HF
, TARGET_VECTOR_ELEN_FP_16
, RVVM1HF
, 7, LMUL_1
, 16)
241 TUPLE_ENTRY (RVVMF2x7HF
, TARGET_VECTOR_ELEN_FP_16
&& !TARGET_XTHEADVECTOR
, RVVMF2HF
, 7, LMUL_F2
, 32)
242 TUPLE_ENTRY (RVVMF4x7HF
, TARGET_VECTOR_ELEN_FP_16
&& TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF4HF
, 7, LMUL_F4
, 64)
243 TUPLE_ENTRY (RVVM1x6HF
, TARGET_VECTOR_ELEN_FP_16
, RVVM1HF
, 6, LMUL_1
, 16)
244 TUPLE_ENTRY (RVVMF2x6HF
, TARGET_VECTOR_ELEN_FP_16
&& !TARGET_XTHEADVECTOR
, RVVMF2HF
, 6, LMUL_F2
, 32)
245 TUPLE_ENTRY (RVVMF4x6HF
, TARGET_VECTOR_ELEN_FP_16
&& TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF4HF
, 6, LMUL_F4
, 64)
246 TUPLE_ENTRY (RVVM1x5HF
, TARGET_VECTOR_ELEN_FP_16
, RVVM1HF
, 5, LMUL_1
, 16)
247 TUPLE_ENTRY (RVVMF2x5HF
, TARGET_VECTOR_ELEN_FP_16
&& !TARGET_XTHEADVECTOR
, RVVMF2HF
, 5, LMUL_F2
, 32)
248 TUPLE_ENTRY (RVVMF4x5HF
, TARGET_VECTOR_ELEN_FP_16
&& TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF4HF
, 5, LMUL_F4
, 64)
249 TUPLE_ENTRY (RVVM2x4HF
, TARGET_VECTOR_ELEN_FP_16
, RVVM2HF
, 4, LMUL_2
, 8)
250 TUPLE_ENTRY (RVVM1x4HF
, TARGET_VECTOR_ELEN_FP_16
, RVVM1HF
, 4, LMUL_1
, 16)
251 TUPLE_ENTRY (RVVMF2x4HF
, TARGET_VECTOR_ELEN_FP_16
&& !TARGET_XTHEADVECTOR
, RVVMF2HF
, 4, LMUL_F2
, 32)
252 TUPLE_ENTRY (RVVMF4x4HF
, TARGET_VECTOR_ELEN_FP_16
&& TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF4HF
, 4, LMUL_F4
, 64)
253 TUPLE_ENTRY (RVVM2x3HF
, TARGET_VECTOR_ELEN_FP_16
, RVVM2HF
, 3, LMUL_2
, 8)
254 TUPLE_ENTRY (RVVM1x3HF
, TARGET_VECTOR_ELEN_FP_16
, RVVM1HF
, 3, LMUL_1
, 16)
255 TUPLE_ENTRY (RVVMF2x3HF
, TARGET_VECTOR_ELEN_FP_16
&& !TARGET_XTHEADVECTOR
, RVVMF2HF
, 3, LMUL_F2
, 32)
256 TUPLE_ENTRY (RVVMF4x3HF
, TARGET_VECTOR_ELEN_FP_16
&& TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF4HF
, 3, LMUL_F4
, 64)
257 TUPLE_ENTRY (RVVM4x2HF
, TARGET_VECTOR_ELEN_FP_16
, RVVM4HF
, 2, LMUL_4
, 4)
258 TUPLE_ENTRY (RVVM2x2HF
, TARGET_VECTOR_ELEN_FP_16
, RVVM2HF
, 2, LMUL_2
, 8)
259 TUPLE_ENTRY (RVVM1x2HF
, TARGET_VECTOR_ELEN_FP_16
, RVVM1HF
, 2, LMUL_1
, 16)
260 TUPLE_ENTRY (RVVMF2x2HF
, TARGET_VECTOR_ELEN_FP_16
&& !TARGET_XTHEADVECTOR
, RVVMF2HF
, 2, LMUL_F2
, 32)
261 TUPLE_ENTRY (RVVMF4x2HF
, TARGET_VECTOR_ELEN_FP_16
&& TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF4HF
, 2, LMUL_F4
, 64)
263 TUPLE_ENTRY (RVVM1x8SI
, true
, RVVM1SI
, 8, LMUL_1
, 16)
264 TUPLE_ENTRY (RVVMF2x8SI
, (TARGET_MIN_VLEN
> 32) && !TARGET_XTHEADVECTOR
, RVVMF2SI
, 8, LMUL_F2
, 32)
265 TUPLE_ENTRY (RVVM1x7SI
, true
, RVVM1SI
, 7, LMUL_1
, 16)
266 TUPLE_ENTRY (RVVMF2x7SI
, (TARGET_MIN_VLEN
> 32) && !TARGET_XTHEADVECTOR
, RVVMF2SI
, 7, LMUL_F2
, 32)
267 TUPLE_ENTRY (RVVM1x6SI
, true
, RVVM1SI
, 6, LMUL_1
, 16)
268 TUPLE_ENTRY (RVVMF2x6SI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF2SI
, 6, LMUL_F2
, 32)
269 TUPLE_ENTRY (RVVM1x5SI
, true
, RVVM1SI
, 5, LMUL_1
, 16)
270 TUPLE_ENTRY (RVVMF2x5SI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF2SI
, 5, LMUL_F2
, 32)
271 TUPLE_ENTRY (RVVM2x4SI
, true
, RVVM2SI
, 4, LMUL_2
, 8)
272 TUPLE_ENTRY (RVVM1x4SI
, true
, RVVM1SI
, 4, LMUL_1
, 16)
273 TUPLE_ENTRY (RVVMF2x4SI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF2SI
, 4, LMUL_F2
, 32)
274 TUPLE_ENTRY (RVVM2x3SI
, true
, RVVM2SI
, 3, LMUL_2
, 8)
275 TUPLE_ENTRY (RVVM1x3SI
, true
, RVVM1SI
, 3, LMUL_1
, 16)
276 TUPLE_ENTRY (RVVMF2x3SI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF2SI
, 3, LMUL_F2
, 32)
277 TUPLE_ENTRY (RVVM4x2SI
, true
, RVVM4SI
, 2, LMUL_4
, 4)
278 TUPLE_ENTRY (RVVM2x2SI
, true
, RVVM2SI
, 2, LMUL_2
, 8)
279 TUPLE_ENTRY (RVVM1x2SI
, true
, RVVM1SI
, 2, LMUL_1
, 16)
280 TUPLE_ENTRY (RVVMF2x2SI
, TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF2SI
, 2, LMUL_F2
, 32)
282 TUPLE_ENTRY (RVVM1x8SF
, TARGET_VECTOR_ELEN_FP_32
, RVVM1SF
, 8, LMUL_1
, 16)
283 TUPLE_ENTRY (RVVMF2x8SF
, TARGET_VECTOR_ELEN_FP_32
&& TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF2SF
, 8, LMUL_F2
, 32)
284 TUPLE_ENTRY (RVVM1x7SF
, TARGET_VECTOR_ELEN_FP_32
, RVVM1SF
, 7, LMUL_1
, 16)
285 TUPLE_ENTRY (RVVMF2x7SF
, TARGET_VECTOR_ELEN_FP_32
&& TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF2SF
, 7, LMUL_F2
, 32)
286 TUPLE_ENTRY (RVVM1x6SF
, TARGET_VECTOR_ELEN_FP_32
, RVVM1SF
, 6, LMUL_1
, 16)
287 TUPLE_ENTRY (RVVMF2x6SF
, TARGET_VECTOR_ELEN_FP_32
&& TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF2SF
, 6, LMUL_F2
, 32)
288 TUPLE_ENTRY (RVVM1x5SF
, TARGET_VECTOR_ELEN_FP_32
, RVVM1SF
, 5, LMUL_1
, 16)
289 TUPLE_ENTRY (RVVMF2x5SF
, TARGET_VECTOR_ELEN_FP_32
&& TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF2SF
, 5, LMUL_F2
, 32)
290 TUPLE_ENTRY (RVVM2x4SF
, TARGET_VECTOR_ELEN_FP_32
, RVVM2SF
, 4, LMUL_2
, 8)
291 TUPLE_ENTRY (RVVM1x4SF
, TARGET_VECTOR_ELEN_FP_32
, RVVM1SF
, 4, LMUL_1
, 16)
292 TUPLE_ENTRY (RVVMF2x4SF
, TARGET_VECTOR_ELEN_FP_32
&& TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF2SF
, 4, LMUL_F2
, 32)
293 TUPLE_ENTRY (RVVM2x3SF
, TARGET_VECTOR_ELEN_FP_32
, RVVM2SF
, 3, LMUL_2
, 8)
294 TUPLE_ENTRY (RVVM1x3SF
, TARGET_VECTOR_ELEN_FP_32
, RVVM1SF
, 3, LMUL_1
, 16)
295 TUPLE_ENTRY (RVVMF2x3SF
, TARGET_VECTOR_ELEN_FP_32
&& TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF2SF
, 3, LMUL_F2
, 32)
296 TUPLE_ENTRY (RVVM4x2SF
, TARGET_VECTOR_ELEN_FP_32
, RVVM4SF
, 2, LMUL_4
, 4)
297 TUPLE_ENTRY (RVVM2x2SF
, TARGET_VECTOR_ELEN_FP_32
, RVVM2SF
, 2, LMUL_2
, 8)
298 TUPLE_ENTRY (RVVM1x2SF
, TARGET_VECTOR_ELEN_FP_32
, RVVM1SF
, 2, LMUL_1
, 16)
299 TUPLE_ENTRY (RVVMF2x2SF
, TARGET_VECTOR_ELEN_FP_32
&& TARGET_MIN_VLEN
> 32 && !TARGET_XTHEADVECTOR
, RVVMF2SF
, 2, LMUL_F2
, 32)
301 TUPLE_ENTRY (RVVM1x8DI
, TARGET_VECTOR_ELEN_64
, RVVM1DI
, 8, LMUL_1
, 16)
302 TUPLE_ENTRY (RVVM1x7DI
, TARGET_VECTOR_ELEN_64
, RVVM1DI
, 7, LMUL_1
, 16)
303 TUPLE_ENTRY (RVVM1x6DI
, TARGET_VECTOR_ELEN_64
, RVVM1DI
, 6, LMUL_1
, 16)
304 TUPLE_ENTRY (RVVM1x5DI
, TARGET_VECTOR_ELEN_64
, RVVM1DI
, 5, LMUL_1
, 16)
305 TUPLE_ENTRY (RVVM2x4DI
, TARGET_VECTOR_ELEN_64
, RVVM2DI
, 4, LMUL_2
, 8)
306 TUPLE_ENTRY (RVVM1x4DI
, TARGET_VECTOR_ELEN_64
, RVVM1DI
, 4, LMUL_1
, 16)
307 TUPLE_ENTRY (RVVM2x3DI
, TARGET_VECTOR_ELEN_64
, RVVM2DI
, 3, LMUL_2
, 8)
308 TUPLE_ENTRY (RVVM1x3DI
, TARGET_VECTOR_ELEN_64
, RVVM1DI
, 3, LMUL_1
, 16)
309 TUPLE_ENTRY (RVVM4x2DI
, TARGET_VECTOR_ELEN_64
, RVVM4DI
, 2, LMUL_4
, 4)
310 TUPLE_ENTRY (RVVM2x2DI
, TARGET_VECTOR_ELEN_64
, RVVM2DI
, 2, LMUL_2
, 8)
311 TUPLE_ENTRY (RVVM1x2DI
, TARGET_VECTOR_ELEN_64
, RVVM1DI
, 2, LMUL_1
, 16)
313 TUPLE_ENTRY (RVVM1x8DF
, TARGET_VECTOR_ELEN_FP_64
, RVVM1DF
, 8, LMUL_1
, 16)
314 TUPLE_ENTRY (RVVM1x7DF
, TARGET_VECTOR_ELEN_FP_64
, RVVM1DF
, 7, LMUL_1
, 16)
315 TUPLE_ENTRY (RVVM1x6DF
, TARGET_VECTOR_ELEN_FP_64
, RVVM1DF
, 6, LMUL_1
, 16)
316 TUPLE_ENTRY (RVVM1x5DF
, TARGET_VECTOR_ELEN_FP_64
, RVVM1DF
, 5, LMUL_1
, 16)
317 TUPLE_ENTRY (RVVM2x4DF
, TARGET_VECTOR_ELEN_FP_64
, RVVM2DF
, 4, LMUL_2
, 8)
318 TUPLE_ENTRY (RVVM1x4DF
, TARGET_VECTOR_ELEN_FP_64
, RVVM1DF
, 4, LMUL_1
, 16)
319 TUPLE_ENTRY (RVVM2x3DF
, TARGET_VECTOR_ELEN_FP_64
, RVVM2DF
, 3, LMUL_2
, 8)
320 TUPLE_ENTRY (RVVM1x3DF
, TARGET_VECTOR_ELEN_FP_64
, RVVM1DF
, 3, LMUL_1
, 16)
321 TUPLE_ENTRY (RVVM4x2DF
, TARGET_VECTOR_ELEN_FP_64
, RVVM4DF
, 2, LMUL_4
, 4)
322 TUPLE_ENTRY (RVVM2x2DF
, TARGET_VECTOR_ELEN_FP_64
, RVVM2DF
, 2, LMUL_2
, 8)
323 TUPLE_ENTRY (RVVM1x2DF
, TARGET_VECTOR_ELEN_FP_64
, RVVM1DF
, 2, LMUL_1
, 16)
326 #define
VLS_ENTRY(MODE
, REQUIREMENT
)
329 /* This following VLS modes should satisfy the constraint
:
330 GET_MODE_BITSIZE (MODE
) <= TARGET_MIN_VLEN
* 8.
*/
331 VLS_ENTRY (V1BI
, riscv_vector
::vls_mode_valid_p (V1BImode
))
332 VLS_ENTRY (V2BI
, riscv_vector
::vls_mode_valid_p (V2BImode
))
333 VLS_ENTRY (V4BI
, riscv_vector
::vls_mode_valid_p (V4BImode
))
334 VLS_ENTRY (V8BI
, riscv_vector
::vls_mode_valid_p (V8BImode
))
335 VLS_ENTRY (V16BI
, riscv_vector
::vls_mode_valid_p (V16BImode
))
336 VLS_ENTRY (V32BI
, riscv_vector
::vls_mode_valid_p (V32BImode
))
337 VLS_ENTRY (V64BI
, riscv_vector
::vls_mode_valid_p (V64BImode
) && TARGET_MIN_VLEN
>= 64)
338 VLS_ENTRY (V128BI
, riscv_vector
::vls_mode_valid_p (V128BImode
) && TARGET_MIN_VLEN
>= 128)
339 VLS_ENTRY (V256BI
, riscv_vector
::vls_mode_valid_p (V256BImode
) && TARGET_MIN_VLEN
>= 256)
340 VLS_ENTRY (V512BI
, riscv_vector
::vls_mode_valid_p (V512BImode
) && TARGET_MIN_VLEN
>= 512)
341 VLS_ENTRY (V1024BI
, riscv_vector
::vls_mode_valid_p (V1024BImode
) && TARGET_MIN_VLEN
>= 1024)
342 VLS_ENTRY (V2048BI
, riscv_vector
::vls_mode_valid_p (V2048BImode
) && TARGET_MIN_VLEN
>= 2048)
343 VLS_ENTRY (V4096BI
, riscv_vector
::vls_mode_valid_p (V4096BImode
) && TARGET_MIN_VLEN
>= 4096)
345 VLS_ENTRY (V1QI
, riscv_vector
::vls_mode_valid_p (V1QImode
))
346 VLS_ENTRY (V2QI
, riscv_vector
::vls_mode_valid_p (V2QImode
))
347 VLS_ENTRY (V4QI
, riscv_vector
::vls_mode_valid_p (V4QImode
))
348 VLS_ENTRY (V8QI
, riscv_vector
::vls_mode_valid_p (V8QImode
))
349 VLS_ENTRY (V16QI
, riscv_vector
::vls_mode_valid_p (V16QImode
))
350 VLS_ENTRY (V32QI
, riscv_vector
::vls_mode_valid_p (V32QImode
))
351 VLS_ENTRY (V64QI
, riscv_vector
::vls_mode_valid_p (V64QImode
) && TARGET_MIN_VLEN
>= 64)
352 VLS_ENTRY (V128QI
, riscv_vector
::vls_mode_valid_p (V128QImode
) && TARGET_MIN_VLEN
>= 128)
353 VLS_ENTRY (V256QI
, riscv_vector
::vls_mode_valid_p (V256QImode
) && TARGET_MIN_VLEN
>= 256)
354 VLS_ENTRY (V512QI
, riscv_vector
::vls_mode_valid_p (V512QImode
) && TARGET_MIN_VLEN
>= 512)
355 VLS_ENTRY (V1024QI
, riscv_vector
::vls_mode_valid_p (V1024QImode
) && TARGET_MIN_VLEN
>= 1024)
356 VLS_ENTRY (V2048QI
, riscv_vector
::vls_mode_valid_p (V2048QImode
) && TARGET_MIN_VLEN
>= 2048)
357 VLS_ENTRY (V4096QI
, riscv_vector
::vls_mode_valid_p (V4096QImode
) && TARGET_MIN_VLEN
>= 4096)
358 VLS_ENTRY (V1HI
, riscv_vector
::vls_mode_valid_p (V1HImode
))
359 VLS_ENTRY (V2HI
, riscv_vector
::vls_mode_valid_p (V2HImode
))
360 VLS_ENTRY (V4HI
, riscv_vector
::vls_mode_valid_p (V4HImode
))
361 VLS_ENTRY (V8HI
, riscv_vector
::vls_mode_valid_p (V8HImode
))
362 VLS_ENTRY (V16HI
, riscv_vector
::vls_mode_valid_p (V16HImode
))
363 VLS_ENTRY (V32HI
, riscv_vector
::vls_mode_valid_p (V32HImode
) && TARGET_MIN_VLEN
>= 64)
364 VLS_ENTRY (V64HI
, riscv_vector
::vls_mode_valid_p (V64HImode
) && TARGET_MIN_VLEN
>= 128)
365 VLS_ENTRY (V128HI
, riscv_vector
::vls_mode_valid_p (V128HImode
) && TARGET_MIN_VLEN
>= 256)
366 VLS_ENTRY (V256HI
, riscv_vector
::vls_mode_valid_p (V256HImode
) && TARGET_MIN_VLEN
>= 512)
367 VLS_ENTRY (V512HI
, riscv_vector
::vls_mode_valid_p (V512HImode
) && TARGET_MIN_VLEN
>= 1024)
368 VLS_ENTRY (V1024HI
, riscv_vector
::vls_mode_valid_p (V1024HImode
) && TARGET_MIN_VLEN
>= 2048)
369 VLS_ENTRY (V2048HI
, riscv_vector
::vls_mode_valid_p (V2048HImode
) && TARGET_MIN_VLEN
>= 4096)
370 VLS_ENTRY (V1SI
, riscv_vector
::vls_mode_valid_p (V1SImode
))
371 VLS_ENTRY (V2SI
, riscv_vector
::vls_mode_valid_p (V2SImode
))
372 VLS_ENTRY (V4SI
, riscv_vector
::vls_mode_valid_p (V4SImode
))
373 VLS_ENTRY (V8SI
, riscv_vector
::vls_mode_valid_p (V8SImode
))
374 VLS_ENTRY (V16SI
, riscv_vector
::vls_mode_valid_p (V16SImode
) && TARGET_MIN_VLEN
>= 64)
375 VLS_ENTRY (V32SI
, riscv_vector
::vls_mode_valid_p (V32SImode
) && TARGET_MIN_VLEN
>= 128)
376 VLS_ENTRY (V64SI
, riscv_vector
::vls_mode_valid_p (V64SImode
) && TARGET_MIN_VLEN
>= 256)
377 VLS_ENTRY (V128SI
, riscv_vector
::vls_mode_valid_p (V128SImode
) && TARGET_MIN_VLEN
>= 512)
378 VLS_ENTRY (V256SI
, riscv_vector
::vls_mode_valid_p (V256SImode
) && TARGET_MIN_VLEN
>= 1024)
379 VLS_ENTRY (V512SI
, riscv_vector
::vls_mode_valid_p (V512SImode
) && TARGET_MIN_VLEN
>= 2048)
380 VLS_ENTRY (V1024SI
, riscv_vector
::vls_mode_valid_p (V1024SImode
) && TARGET_MIN_VLEN
>= 4096)
381 VLS_ENTRY (V1DI
, riscv_vector
::vls_mode_valid_p (V1DImode
) && TARGET_VECTOR_ELEN_64
)
382 VLS_ENTRY (V2DI
, riscv_vector
::vls_mode_valid_p (V2DImode
) && TARGET_VECTOR_ELEN_64
)
383 VLS_ENTRY (V4DI
, riscv_vector
::vls_mode_valid_p (V4DImode
) && TARGET_VECTOR_ELEN_64
)
384 VLS_ENTRY (V8DI
, riscv_vector
::vls_mode_valid_p (V8DImode
) && TARGET_VECTOR_ELEN_64
&& TARGET_MIN_VLEN
>= 64)
385 VLS_ENTRY (V16DI
, riscv_vector
::vls_mode_valid_p (V16DImode
) && TARGET_VECTOR_ELEN_64
&& TARGET_MIN_VLEN
>= 128)
386 VLS_ENTRY (V32DI
, riscv_vector
::vls_mode_valid_p (V32DImode
) && TARGET_VECTOR_ELEN_64
&& TARGET_MIN_VLEN
>= 256)
387 VLS_ENTRY (V64DI
, riscv_vector
::vls_mode_valid_p (V64DImode
) && TARGET_VECTOR_ELEN_64
&& TARGET_MIN_VLEN
>= 512)
388 VLS_ENTRY (V128DI
, riscv_vector
::vls_mode_valid_p (V128DImode
) && TARGET_VECTOR_ELEN_64
&& TARGET_MIN_VLEN
>= 1024)
389 VLS_ENTRY (V256DI
, riscv_vector
::vls_mode_valid_p (V256DImode
) && TARGET_VECTOR_ELEN_64
&& TARGET_MIN_VLEN
>= 2048)
390 VLS_ENTRY (V512DI
, riscv_vector
::vls_mode_valid_p (V512DImode
) && TARGET_VECTOR_ELEN_64
&& TARGET_MIN_VLEN
>= 4096)
392 VLS_ENTRY (V1HF
, riscv_vector
::vls_mode_valid_p (V1HFmode
) && TARGET_VECTOR_ELEN_FP_16
)
393 VLS_ENTRY (V2HF
, riscv_vector
::vls_mode_valid_p (V2HFmode
) && TARGET_VECTOR_ELEN_FP_16
)
394 VLS_ENTRY (V4HF
, riscv_vector
::vls_mode_valid_p (V4HFmode
) && TARGET_VECTOR_ELEN_FP_16
)
395 VLS_ENTRY (V8HF
, riscv_vector
::vls_mode_valid_p (V8HFmode
) && TARGET_VECTOR_ELEN_FP_16
)
396 VLS_ENTRY (V16HF
, riscv_vector
::vls_mode_valid_p (V16HFmode
) && TARGET_VECTOR_ELEN_FP_16
)
397 VLS_ENTRY (V32HF
, riscv_vector
::vls_mode_valid_p (V32HFmode
) && TARGET_VECTOR_ELEN_FP_16
&& TARGET_MIN_VLEN
>= 64)
398 VLS_ENTRY (V64HF
, riscv_vector
::vls_mode_valid_p (V64HFmode
) && TARGET_VECTOR_ELEN_FP_16
&& TARGET_MIN_VLEN
>= 128)
399 VLS_ENTRY (V128HF
, riscv_vector
::vls_mode_valid_p (V128HFmode
) && TARGET_VECTOR_ELEN_FP_16
&& TARGET_MIN_VLEN
>= 256)
400 VLS_ENTRY (V256HF
, riscv_vector
::vls_mode_valid_p (V256HFmode
) && TARGET_VECTOR_ELEN_FP_16
&& TARGET_MIN_VLEN
>= 512)
401 VLS_ENTRY (V512HF
, riscv_vector
::vls_mode_valid_p (V512HFmode
) && TARGET_VECTOR_ELEN_FP_16
&& TARGET_MIN_VLEN
>= 1024)
402 VLS_ENTRY (V1024HF
, riscv_vector
::vls_mode_valid_p (V1024HFmode
) && TARGET_VECTOR_ELEN_FP_16
&& TARGET_MIN_VLEN
>= 2048)
403 VLS_ENTRY (V2048HF
, riscv_vector
::vls_mode_valid_p (V2048HFmode
) && TARGET_VECTOR_ELEN_FP_16
&& TARGET_MIN_VLEN
>= 4096)
404 VLS_ENTRY (V1SF
, riscv_vector
::vls_mode_valid_p (V1SFmode
) && TARGET_VECTOR_ELEN_FP_32
)
405 VLS_ENTRY (V2SF
, riscv_vector
::vls_mode_valid_p (V2SFmode
) && TARGET_VECTOR_ELEN_FP_32
)
406 VLS_ENTRY (V4SF
, riscv_vector
::vls_mode_valid_p (V4SFmode
) && TARGET_VECTOR_ELEN_FP_32
)
407 VLS_ENTRY (V8SF
, riscv_vector
::vls_mode_valid_p (V8SFmode
) && TARGET_VECTOR_ELEN_FP_32
)
408 VLS_ENTRY (V16SF
, riscv_vector
::vls_mode_valid_p (V16SFmode
) && TARGET_VECTOR_ELEN_FP_32
&& TARGET_MIN_VLEN
>= 64)
409 VLS_ENTRY (V32SF
, riscv_vector
::vls_mode_valid_p (V32SFmode
) && TARGET_VECTOR_ELEN_FP_32
&& TARGET_MIN_VLEN
>= 128)
410 VLS_ENTRY (V64SF
, riscv_vector
::vls_mode_valid_p (V64SFmode
) && TARGET_VECTOR_ELEN_FP_32
&& TARGET_MIN_VLEN
>= 256)
411 VLS_ENTRY (V128SF
, riscv_vector
::vls_mode_valid_p (V128SFmode
) && TARGET_VECTOR_ELEN_FP_32
&& TARGET_MIN_VLEN
>= 512)
412 VLS_ENTRY (V256SF
, riscv_vector
::vls_mode_valid_p (V256SFmode
) && TARGET_VECTOR_ELEN_FP_32
&& TARGET_MIN_VLEN
>= 1024)
413 VLS_ENTRY (V512SF
, riscv_vector
::vls_mode_valid_p (V512SFmode
) && TARGET_VECTOR_ELEN_FP_32
&& TARGET_MIN_VLEN
>= 2048)
414 VLS_ENTRY (V1024SF
, riscv_vector
::vls_mode_valid_p (V1024SFmode
) && TARGET_VECTOR_ELEN_FP_32
&& TARGET_MIN_VLEN
>= 4096)
415 VLS_ENTRY (V1DF
, riscv_vector
::vls_mode_valid_p (V1DFmode
) && TARGET_VECTOR_ELEN_FP_64
)
416 VLS_ENTRY (V2DF
, riscv_vector
::vls_mode_valid_p (V2DFmode
) && TARGET_VECTOR_ELEN_FP_64
)
417 VLS_ENTRY (V4DF
, riscv_vector
::vls_mode_valid_p (V4DFmode
) && TARGET_VECTOR_ELEN_FP_64
)
418 VLS_ENTRY (V8DF
, riscv_vector
::vls_mode_valid_p (V8DFmode
) && TARGET_VECTOR_ELEN_FP_64
&& TARGET_MIN_VLEN
>= 64)
419 VLS_ENTRY (V16DF
, riscv_vector
::vls_mode_valid_p (V16DFmode
) && TARGET_VECTOR_ELEN_FP_64
&& TARGET_MIN_VLEN
>= 128)
420 VLS_ENTRY (V32DF
, riscv_vector
::vls_mode_valid_p (V32DFmode
) && TARGET_VECTOR_ELEN_FP_64
&& TARGET_MIN_VLEN
>= 256)
421 VLS_ENTRY (V64DF
, riscv_vector
::vls_mode_valid_p (V64DFmode
) && TARGET_VECTOR_ELEN_FP_64
&& TARGET_MIN_VLEN
>= 512)
422 VLS_ENTRY (V128DF
, riscv_vector
::vls_mode_valid_p (V128DFmode
) && TARGET_VECTOR_ELEN_FP_64
&& TARGET_MIN_VLEN
>= 1024)
423 VLS_ENTRY (V256DF
, riscv_vector
::vls_mode_valid_p (V256DFmode
) && TARGET_VECTOR_ELEN_FP_64
&& TARGET_MIN_VLEN
>= 2048)
424 VLS_ENTRY (V512DF
, riscv_vector
::vls_mode_valid_p (V512DFmode
) && TARGET_VECTOR_ELEN_FP_64
&& TARGET_MIN_VLEN
>= 4096)