1 //===- X86ManualFoldTables.def
----------------------------*- C
++ -*-==//
3 // Part of the LLVM Project
, under the Apache License v2.0 with LLVM Exceptions.
4 // See https
://llvm.org
/LICENSE.txt for license information.
5 // SPDX
-License
-Identifier
: Apache
-2.0 WITH LLVM
-exception
7 //===----------------------------------------------------------------------===//
9 // This file defines all the entries in X86 memory folding tables that need
11 //===----------------------------------------------------------------------===//
25 NOFOLD(VCOMPRESSPDZ128rrk
)
26 NOFOLD(VCOMPRESSPDZ256rrk
)
27 NOFOLD(VCOMPRESSPDZrrk
)
28 NOFOLD(VCOMPRESSPSZ128rrk
)
29 NOFOLD(VCOMPRESSPSZ256rrk
)
30 NOFOLD(VCOMPRESSPSZrrk
)
31 NOFOLD(VCVTPS2PHZ128rrk
)
32 NOFOLD(VCVTPS2PHZ256rrk
)
34 NOFOLD(VEXTRACTF32X4Z256rrik
)
35 NOFOLD(VEXTRACTF32X4Zrrik
)
36 NOFOLD(VEXTRACTF32X8Zrrik
)
37 NOFOLD(VEXTRACTF64X2Z256rrik
)
38 NOFOLD(VEXTRACTF64X2Zrrik
)
39 NOFOLD(VEXTRACTF64X4Zrrik
)
40 NOFOLD(VEXTRACTI32X4Z256rrik
)
41 NOFOLD(VEXTRACTI32X4Zrrik
)
42 NOFOLD(VEXTRACTI32X8Zrrik
)
43 NOFOLD(VEXTRACTI64X2Z256rrik
)
44 NOFOLD(VEXTRACTI64X2Zrrik
)
45 NOFOLD(VEXTRACTI64X4Zrrik
)
46 NOFOLD(VMOVAPDZ128mrk
)
47 NOFOLD(VMOVAPDZ256mrk
)
49 NOFOLD(VMOVAPSZ128mrk
)
50 NOFOLD(VMOVAPSZ256mrk
)
52 NOFOLD(VMOVDQA32Z128mrk
)
53 NOFOLD(VMOVDQA32Z256mrk
)
55 NOFOLD(VMOVDQA64Z128mrk
)
56 NOFOLD(VMOVDQA64Z256mrk
)
58 NOFOLD(VMOVDQU16Z128mrk
)
59 NOFOLD(VMOVDQU16Z256mrk
)
61 NOFOLD(VMOVDQU32Z128mrk
)
62 NOFOLD(VMOVDQU32Z256mrk
)
64 NOFOLD(VMOVDQU64Z128mrk
)
65 NOFOLD(VMOVDQU64Z256mrk
)
67 NOFOLD(VMOVDQU8Z128mrk
)
68 NOFOLD(VMOVDQU8Z256mrk
)
70 NOFOLD(VMOVUPDZ128mrk
)
71 NOFOLD(VMOVUPDZ256mrk
)
73 NOFOLD(VMOVUPSZ128mrk
)
74 NOFOLD(VMOVUPSZ256mrk
)
76 NOFOLD(VPCOMPRESSBZ128rrk
)
77 NOFOLD(VPCOMPRESSBZ256rrk
)
78 NOFOLD(VPCOMPRESSBZrrk
)
79 NOFOLD(VPCOMPRESSDZ128rrk
)
80 NOFOLD(VPCOMPRESSDZ256rrk
)
81 NOFOLD(VPCOMPRESSDZrrk
)
82 NOFOLD(VPCOMPRESSQZ128rrk
)
83 NOFOLD(VPCOMPRESSQZ256rrk
)
84 NOFOLD(VPCOMPRESSQZrrk
)
85 NOFOLD(VPCOMPRESSWZ128rrk
)
86 NOFOLD(VPCOMPRESSWZ256rrk
)
87 NOFOLD(VPCOMPRESSWZrrk
)
88 NOFOLD(VPMOVDBZ128rrk
)
89 NOFOLD(VPMOVDBZ256rrk
)
91 NOFOLD(VPMOVDWZ128rrk
)
92 NOFOLD(VPMOVDWZ256rrk
)
94 NOFOLD(VPMOVQBZ128rrk
)
95 NOFOLD(VPMOVQBZ256rrk
)
97 NOFOLD(VPMOVQDZ128rrk
)
98 NOFOLD(VPMOVQDZ256rrk
)
100 NOFOLD(VPMOVQWZ128rrk
)
101 NOFOLD(VPMOVQWZ256rrk
)
103 NOFOLD(VPMOVSDBZ128rrk
)
104 NOFOLD(VPMOVSDBZ256rrk
)
106 NOFOLD(VPMOVSDWZ128rrk
)
107 NOFOLD(VPMOVSDWZ256rrk
)
109 NOFOLD(VPMOVSQBZ128rrk
)
110 NOFOLD(VPMOVSQBZ256rrk
)
112 NOFOLD(VPMOVSQDZ128rrk
)
113 NOFOLD(VPMOVSQDZ256rrk
)
115 NOFOLD(VPMOVSQWZ128rrk
)
116 NOFOLD(VPMOVSQWZ256rrk
)
118 NOFOLD(VPMOVSWBZ128rrk
)
119 NOFOLD(VPMOVSWBZ256rrk
)
121 NOFOLD(VPMOVUSDBZ128rrk
)
122 NOFOLD(VPMOVUSDBZ256rrk
)
123 NOFOLD(VPMOVUSDBZrrk
)
124 NOFOLD(VPMOVUSDWZ128rrk
)
125 NOFOLD(VPMOVUSDWZ256rrk
)
126 NOFOLD(VPMOVUSDWZrrk
)
127 NOFOLD(VPMOVUSQBZ128rrk
)
128 NOFOLD(VPMOVUSQBZ256rrk
)
129 NOFOLD(VPMOVUSQBZrrk
)
130 NOFOLD(VPMOVUSQDZ128rrk
)
131 NOFOLD(VPMOVUSQDZ256rrk
)
132 NOFOLD(VPMOVUSQDZrrk
)
133 NOFOLD(VPMOVUSQWZ128rrk
)
134 NOFOLD(VPMOVUSQWZ256rrk
)
135 NOFOLD(VPMOVUSQWZrrk
)
136 NOFOLD(VPMOVUSWBZ128rrk
)
137 NOFOLD(VPMOVUSWBZ256rrk
)
138 NOFOLD(VPMOVUSWBZrrk
)
139 NOFOLD(VPMOVWBZ128rrk
)
140 NOFOLD(VPMOVWBZ256rrk
)
162 NOFOLD(VCOMPRESSPDZ128rr
)
163 NOFOLD(VCOMPRESSPDZ256rr
)
164 NOFOLD(VCOMPRESSPDZrr
)
165 NOFOLD(VCOMPRESSPSZ128rr
)
166 NOFOLD(VCOMPRESSPSZ256rr
)
167 NOFOLD(VCOMPRESSPSZrr
)
172 NOFOLD(VPCOMPRESSBZ128rr
)
173 NOFOLD(VPCOMPRESSBZ256rr
)
174 NOFOLD(VPCOMPRESSBZrr
)
175 NOFOLD(VPCOMPRESSDZ128rr
)
176 NOFOLD(VPCOMPRESSDZ256rr
)
177 NOFOLD(VPCOMPRESSDZrr
)
178 NOFOLD(VPCOMPRESSQZ128rr
)
179 NOFOLD(VPCOMPRESSQZ256rr
)
180 NOFOLD(VPCOMPRESSQZrr
)
181 NOFOLD(VPCOMPRESSWZ128rr
)
182 NOFOLD(VPCOMPRESSWZ256rr
)
183 NOFOLD(VPCOMPRESSWZrr
)
194 NOFOLD(VBLENDMPDZ128rrkz
)
195 NOFOLD(VBLENDMPDZ256rrkz
)
196 NOFOLD(VBLENDMPDZrrkz
)
197 NOFOLD(VBLENDMPSZ128rrkz
)
198 NOFOLD(VBLENDMPSZ256rrkz
)
199 NOFOLD(VBLENDMPSZrrkz
)
200 NOFOLD(VPBLENDMBZ128rrkz
)
201 NOFOLD(VPBLENDMBZ256rrkz
)
202 NOFOLD(VPBLENDMBZrrkz
)
203 NOFOLD(VPBLENDMDZ128rrkz
)
204 NOFOLD(VPBLENDMDZ256rrkz
)
205 NOFOLD(VPBLENDMDZrrkz
)
206 NOFOLD(VPBLENDMQZ128rrkz
)
207 NOFOLD(VPBLENDMQZ256rrkz
)
208 NOFOLD(VPBLENDMQZrrkz
)
209 NOFOLD(VPBLENDMWZ128rrkz
)
210 NOFOLD(VPBLENDMWZ256rrkz
)
211 NOFOLD(VPBLENDMWZrrkz
)
215 // Exclude these two b
/c they would conflict with
{MMX_MOVD64from64rr
, MMX_MOVQ64mr
} in unfolding table
217 NOFOLD(MMX_MOVQ64rr_REV
)
218 // INSERTPSrmi has no count_s while INSERTPSrri has count_s.
219 // count_s is to indicate which element in dst vector is inserted.
220 // if count_s
!=0, we can not fold INSERTPSrri into INSERTPSrmi
222 // the following folding can happen when count_s
==0
224 // INSERTPSrri xmm1
, xmm0
, imm
226 // INSERTPSrmi xmm1
, m32
, imm
228 NOFOLD(VINSERTPSZrri
)
230 // Memory faults are suppressed for CFCMOV with memory operand.
231 NOFOLD(CFCMOV16rr_REV
)
232 NOFOLD(CFCMOV32rr_REV
)
233 NOFOLD(CFCMOV64rr_REV
)
234 NOFOLD(CFCMOV16rr_ND
)
235 NOFOLD(CFCMOV32rr_ND
)
236 NOFOLD(CFCMOV64rr_ND
)
240 #define
ENTRY(REG
, MEM
, FLAGS
)
242 // The following entries are added manually b
/c the encodings of reg form does not match the
243 // encoding of memory form
244 ENTRY(ADD16ri_DB
, ADD16mi
, TB_NO_REVERSE
)
245 ENTRY(ADD16rr_DB
, ADD16mr
, TB_NO_REVERSE
)
246 ENTRY(ADD32ri_DB
, ADD32mi
, TB_NO_REVERSE
)
247 ENTRY(ADD32rr_DB
, ADD32mr
, TB_NO_REVERSE
)
248 ENTRY(ADD64ri32_DB
, ADD64mi32
, TB_NO_REVERSE
)
249 ENTRY(ADD64rr_DB
, ADD64mr
, TB_NO_REVERSE
)
250 ENTRY(ADD8ri_DB
, ADD8mi
, TB_NO_REVERSE
)
251 ENTRY(ADD8rr_DB
, ADD8mr
, TB_NO_REVERSE
)
252 ENTRY(ADD16rr_DB
, ADD16rm
, TB_NO_REVERSE
)
253 ENTRY(ADD32rr_DB
, ADD32rm
, TB_NO_REVERSE
)
254 ENTRY(ADD64rr_DB
, ADD64rm
, TB_NO_REVERSE
)
255 ENTRY(ADD8rr_DB
, ADD8rm
, TB_NO_REVERSE
)
256 ENTRY(MMX_MOVD64from64rr
, MMX_MOVQ64mr
, TB_FOLDED_STORE
)
257 ENTRY(MMX_MOVD64grr
, MMX_MOVD64mr
, TB_FOLDED_STORE
)
258 ENTRY(MOV64toSDrr
, MOV64mr
, TB_FOLDED_STORE | TB_NO_REVERSE
)
259 ENTRY(MOVDI2SSrr
, MOV32mr
, TB_FOLDED_STORE | TB_NO_REVERSE
)
260 ENTRY(MOVPQIto64rr
, MOVPQI2QImr
, TB_FOLDED_STORE | TB_NO_REVERSE
)
261 ENTRY(MOVSDto64rr
, MOVSDmr
, TB_FOLDED_STORE | TB_NO_REVERSE
)
262 ENTRY(MOVSS2DIrr
, MOVSSmr
, TB_FOLDED_STORE
)
263 ENTRY(MOVLHPSrr
, MOVHPSrm
, TB_NO_REVERSE
)
264 ENTRY(PUSH16r
, PUSH16rmm
, TB_FOLDED_LOAD
)
265 ENTRY(PUSH32r
, PUSH32rmm
, TB_FOLDED_LOAD
)
266 ENTRY(PUSH64r
, PUSH64rmm
, TB_FOLDED_LOAD
)
267 ENTRY(TAILJMPr
, TAILJMPm
, TB_FOLDED_LOAD
)
268 ENTRY(TAILJMPr64
, TAILJMPm64
, TB_FOLDED_LOAD
)
269 ENTRY(TAILJMPr64_REX
, TAILJMPm64_REX
, TB_FOLDED_LOAD
)
270 ENTRY(TCRETURNri
, TCRETURNmi
, TB_FOLDED_LOAD | TB_NO_FORWARD
)
271 ENTRY(TCRETURNri64
, TCRETURNmi64
, TB_FOLDED_LOAD | TB_NO_FORWARD
)
272 ENTRY(VMOVLHPSZrr
, VMOVHPSZ128rm
, TB_NO_REVERSE
)
273 ENTRY(VMOVLHPSrr
, VMOVHPSrm
, TB_NO_REVERSE
)
274 ENTRY(VMOV64toSDZrr
, MOV64mr
, TB_FOLDED_STORE | TB_NO_REVERSE
)
275 ENTRY(VMOV64toSDrr
, MOV64mr
, TB_FOLDED_STORE | TB_NO_REVERSE
)
276 ENTRY(VMOVDI2SSZrr
, MOV32mr
, TB_FOLDED_STORE | TB_NO_REVERSE
)
277 ENTRY(VMOVDI2SSrr
, MOV32mr
, TB_FOLDED_STORE | TB_NO_REVERSE
)
278 ENTRY(VMOVPQIto64Zrr
, VMOVPQI2QIZmr
, TB_FOLDED_STORE | TB_NO_REVERSE
)
279 ENTRY(VMOVPQIto64rr
, VMOVPQI2QImr
, TB_FOLDED_STORE | TB_NO_REVERSE
)
280 ENTRY(VMOVSDto64Zrr
, VMOVSDZmr
, TB_FOLDED_STORE | TB_NO_REVERSE
)
281 ENTRY(VMOVSDto64rr
, VMOVSDmr
, TB_FOLDED_STORE | TB_NO_REVERSE
)
282 ENTRY(VMOVSS2DIZrr
, VMOVSSZmr
, TB_FOLDED_STORE
)
283 ENTRY(VMOVSS2DIrr
, VMOVSSmr
, TB_FOLDED_STORE
)
284 ENTRY(MMX_MOVD64to64rr
, MMX_MOVQ64rm
, 0)
285 ENTRY(MOV64toPQIrr
, MOVQI2PQIrm
, TB_NO_REVERSE
)
286 ENTRY(MOV64toSDrr
, MOVSDrm_alt
, TB_NO_REVERSE
)
287 ENTRY(MOVDI2SSrr
, MOVSSrm_alt
, 0)
288 ENTRY(VMOV64toPQIZrr
, VMOVQI2PQIZrm
, TB_NO_REVERSE
)
289 ENTRY(VMOV64toPQIrr
, VMOVQI2PQIrm
, TB_NO_REVERSE
)
290 ENTRY(VMOV64toSDZrr
, VMOVSDZrm_alt
, TB_NO_REVERSE
)
291 ENTRY(VMOV64toSDrr
, VMOVSDrm_alt
, TB_NO_REVERSE
)
292 ENTRY(VMOVDI2SSZrr
, VMOVSSZrm_alt
, 0)
293 ENTRY(VMOVDI2SSrr
, VMOVSSrm_alt
, 0)
294 ENTRY(MOVSDrr
, MOVLPDrm
, TB_NO_REVERSE
)
295 ENTRY(VMOVSDZrr
, VMOVLPDZ128rm
, TB_NO_REVERSE
)
296 ENTRY(VMOVSDrr
, VMOVLPDrm
, TB_NO_REVERSE
)