1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004-2024 Free Software Foundation, Inc.
4 ;; This file is part of GCC.
6 ;; GCC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
11 ;; GCC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>.
20 ;; Return true if OP is either a i387 or SSE fp register.
21 (define_predicate "any_fp_register_operand"
22 (and (match_code "reg")
23 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
25 ;; Return true if OP is an i387 fp register.
26 (define_predicate "fp_register_operand"
27 (and (match_code "reg")
28 (match_test "STACK_REGNO_P (REGNO (op))")))
30 ;; True if the operand is a GENERAL class register.
31 (define_predicate "general_reg_operand"
32 (and (match_code "reg")
33 (match_test "GENERAL_REGNO_P (REGNO (op))")))
35 ;; True if the operand is an INDEX class register.
36 (define_predicate "index_reg_operand"
37 (and (match_code "reg")
38 (match_test "INDEX_REGNO_P (REGNO (op))")))
40 ;; True if the operand is a nonimmediate operand with GENERAL class register.
41 (define_predicate "nonimmediate_gr_operand"
42 (if_then_else (match_code "reg")
43 (match_test "GENERAL_REGNO_P (REGNO (op))")
44 (match_operand 0 "nonimmediate_operand")))
46 ;; True if the operand is a general operand with GENERAL class register.
47 (define_predicate "general_gr_operand"
48 (if_then_else (match_code "reg")
49 (match_test "GENERAL_REGNO_P (REGNO (op))")
50 (match_operand 0 "general_operand")))
52 ;; True if the operand is an MMX register.
53 (define_predicate "mmx_reg_operand"
54 (and (match_code "reg")
55 (match_test "MMX_REGNO_P (REGNO (op))")))
57 ;; Match register operands, but include memory operands for
58 ;; !TARGET_MMX_WITH_SSE.
59 (define_predicate "register_mmxmem_operand"
60 (ior (match_operand 0 "register_operand")
61 (and (not (match_test "TARGET_MMX_WITH_SSE"))
62 (match_operand 0 "memory_operand"))))
64 ;; True if the operand is an SSE register.
65 (define_predicate "sse_reg_operand"
66 (and (match_code "reg")
67 (match_test "SSE_REGNO_P (REGNO (op))")))
69 ;; Return true if op is a QImode register.
70 (define_predicate "any_QIreg_operand"
71 (and (match_code "reg")
72 (match_test "ANY_QI_REGNO_P (REGNO (op))")))
74 ;; Return true if op is one of QImode registers: %[abcd][hl].
75 (define_predicate "QIreg_operand"
76 (and (match_code "reg")
77 (match_test "QI_REGNO_P (REGNO (op))")))
79 ;; Return true if op is a QImode register operand other than %[abcd][hl].
80 (define_predicate "ext_QIreg_operand"
81 (and (match_test "TARGET_64BIT")
83 (not (match_test "QI_REGNO_P (REGNO (op))"))))
85 ;; Return true if op is the AX register.
86 (define_predicate "ax_reg_operand"
87 (and (match_code "reg")
88 (match_test "REGNO (op) == AX_REG")))
90 ;; Return true if op is the flags register.
91 (define_special_predicate "flags_reg_operand"
92 (and (match_code "reg")
93 (match_test "REGNO (op) == FLAGS_REG")))
95 ;; True if the operand is a MASK register.
96 (define_predicate "mask_reg_operand"
97 (and (match_code "reg")
98 (match_test "MASK_REGNO_P (REGNO (op))")))
100 ;; Match a DI, SI or HImode register operand.
101 (define_special_predicate "int248_register_operand"
102 (and (match_operand 0 "register_operand")
103 (ior (and (match_test "TARGET_64BIT")
104 (match_test "GET_MODE (op) == DImode"))
105 (match_test "GET_MODE (op) == SImode")
106 (match_test "GET_MODE (op) == HImode"))))
108 ;; Match a DI, SI, HI or QImode nonimmediate_operand.
109 (define_special_predicate "int_nonimmediate_operand"
110 (and (match_operand 0 "nonimmediate_operand")
111 (ior (and (match_test "TARGET_64BIT")
112 (match_test "GET_MODE (op) == DImode"))
113 (match_test "GET_MODE (op) == SImode")
114 (match_test "GET_MODE (op) == HImode")
115 (match_test "GET_MODE (op) == QImode"))))
117 ;; Match register operands, but include memory operands for TARGET_SSE_MATH.
118 (define_predicate "register_ssemem_operand"
120 (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
121 (match_operand 0 "nonimmediate_operand")
122 (match_operand 0 "register_operand")))
124 ;; Match nonimmediate operands, but exclude memory operands
125 ;; for TARGET_SSE_MATH if TARGET_MIX_SSE_I387 is not enabled.
126 (define_predicate "nonimm_ssenomem_operand"
128 (and (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
129 (not (match_test "TARGET_MIX_SSE_I387")))
130 (match_operand 0 "register_operand")
131 (match_operand 0 "nonimmediate_operand")))
133 ;; The above predicate, suitable for x87 arithmetic operators.
134 (define_predicate "x87nonimm_ssenomem_operand"
136 (and (match_test "SSE_FLOAT_MODE_P (mode) && TARGET_SSE_MATH")
137 (not (match_test "TARGET_MIX_SSE_I387 && X87_ENABLE_ARITH (mode)")))
138 (match_operand 0 "register_operand")
139 (match_operand 0 "nonimmediate_operand")))
141 ;; Match register operands, include memory operand for TARGET_SSE4_1.
142 (define_predicate "register_sse4nonimm_operand"
143 (if_then_else (match_test "TARGET_SSE4_1")
144 (match_operand 0 "nonimmediate_operand")
145 (match_operand 0 "register_operand")))
147 ;; Return true if VALUE is symbol reference
148 (define_predicate "symbol_operand"
149 (match_code "symbol_ref"))
151 ;; Return true if VALUE is an ENDBR opcode in immediate field.
152 (define_predicate "ix86_endbr_immediate_operand"
153 (match_code "const_int")
155 if (flag_cf_protection & CF_BRANCH)
157 unsigned HOST_WIDE_INT imm = UINTVAL (op);
158 unsigned HOST_WIDE_INT val = TARGET_64BIT ? 0xfa1e0ff3 : 0xfb1e0ff3;
163 /* NB: Encoding is byte based. */
165 for (; imm >= val; imm >>= 8)
173 ;; Return true if VALUE can be stored in a sign extended immediate field.
174 (define_predicate "x86_64_immediate_operand"
175 (match_code "const_int,symbol_ref,label_ref,const")
177 if (ix86_endbr_immediate_operand (op, VOIDmode))
181 return immediate_operand (op, mode);
183 switch (GET_CODE (op))
187 HOST_WIDE_INT val = INTVAL (op);
188 return trunc_int_for_mode (val, SImode) == val;
191 /* TLS symbols are not constant. */
192 if (SYMBOL_REF_TLS_MODEL (op))
195 /* Load the external function address via the GOT slot. */
196 if (ix86_force_load_from_GOT_p (op))
199 /* For certain code models, the symbolic references are known to fit.
200 in CM_SMALL_PIC model we know it fits if it is local to the shared
201 library. Don't count TLS SYMBOL_REFs here, since they should fit
202 only if inside of UNSPEC handled below. */
203 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
204 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
207 /* For certain code models, the code is near as well. */
208 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
209 || ix86_cmodel == CM_KERNEL);
212 /* We also may accept the offsetted memory references in certain
214 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
215 switch (XINT (XEXP (op, 0), 1))
217 case UNSPEC_GOTPCREL:
219 case UNSPEC_GOTNTPOFF:
226 if (GET_CODE (XEXP (op, 0)) == PLUS)
228 rtx op1 = XEXP (XEXP (op, 0), 0);
229 rtx op2 = XEXP (XEXP (op, 0), 1);
231 if (ix86_cmodel == CM_LARGE && GET_CODE (op1) != UNSPEC)
233 if (!CONST_INT_P (op2))
236 HOST_WIDE_INT offset = INTVAL (op2);
237 if (trunc_int_for_mode (offset, SImode) != offset)
240 switch (GET_CODE (op1))
243 /* TLS symbols are not constant. */
244 if (SYMBOL_REF_TLS_MODEL (op1))
247 /* Load the external function address via the GOT slot. */
248 if (ix86_force_load_from_GOT_p (op1))
251 /* For CM_SMALL assume that latest object is 16MB before
252 end of 31bits boundary. We may also accept pretty
253 large negative constants knowing that all objects are
254 in the positive half of address space. */
255 if ((ix86_cmodel == CM_SMALL
256 || (ix86_cmodel == CM_MEDIUM
257 && !SYMBOL_REF_FAR_ADDR_P (op1)))
258 && offset < 16*1024*1024)
260 /* For CM_KERNEL we know that all object resist in the
261 negative half of 32bits address space. We may not
262 accept negative offsets, since they may be just off
263 and we may accept pretty large positive ones. */
264 if (ix86_cmodel == CM_KERNEL
270 /* These conditions are similar to SYMBOL_REF ones, just the
271 constraints for code models differ. */
272 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
273 && offset < 16*1024*1024)
275 if (ix86_cmodel == CM_KERNEL
281 switch (XINT (op1, 1))
302 ;; Return true if VALUE can be stored in the zero extended immediate field.
303 (define_predicate "x86_64_zext_immediate_operand"
304 (match_code "const_int,symbol_ref,label_ref,const")
306 if (ix86_endbr_immediate_operand (op, VOIDmode))
309 switch (GET_CODE (op))
312 return !(INTVAL (op) & ~HOST_WIDE_INT_C (0xffffffff));
315 /* TLS symbols are not constant. */
316 if (SYMBOL_REF_TLS_MODEL (op))
319 /* Load the external function address via the GOT slot. */
320 if (ix86_force_load_from_GOT_p (op))
323 /* For certain code models, the symbolic references are known to fit. */
324 return (ix86_cmodel == CM_SMALL
325 || (ix86_cmodel == CM_MEDIUM
326 && !SYMBOL_REF_FAR_ADDR_P (op)));
329 /* For certain code models, the code is near as well. */
330 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
333 /* We also may accept the offsetted memory references in certain
335 if (GET_CODE (XEXP (op, 0)) == PLUS)
337 rtx op1 = XEXP (XEXP (op, 0), 0);
338 rtx op2 = XEXP (XEXP (op, 0), 1);
340 if (ix86_cmodel == CM_LARGE)
342 if (!CONST_INT_P (op2))
345 HOST_WIDE_INT offset = INTVAL (op2);
346 if (trunc_int_for_mode (offset, SImode) != offset)
349 switch (GET_CODE (op1))
352 /* TLS symbols are not constant. */
353 if (SYMBOL_REF_TLS_MODEL (op1))
356 /* Load the external function address via the GOT slot. */
357 if (ix86_force_load_from_GOT_p (op1))
360 /* For small code model we may accept pretty large positive
361 offsets, since one bit is available for free. Negative
362 offsets are limited by the size of NULL pointer area
363 specified by the ABI. */
364 if ((ix86_cmodel == CM_SMALL
365 || (ix86_cmodel == CM_MEDIUM
366 && !SYMBOL_REF_FAR_ADDR_P (op1)))
367 && offset > -0x10000)
369 /* ??? For the kernel, we may accept adjustment of
370 -0x10000000, since we know that it will just convert
371 negative address space to positive, but perhaps this
372 is not worthwhile. */
376 /* These conditions are similar to SYMBOL_REF ones, just the
377 constraints for code models differ. */
378 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
379 && offset > -0x10000)
395 ;; Return true if VALUE is a constant integer whose low and high words satisfy
396 ;; x86_64_immediate_operand.
397 (define_predicate "x86_64_hilo_int_operand"
398 (match_code "const_int,const_wide_int")
400 switch (GET_CODE (op))
403 return x86_64_immediate_operand (op, mode);
406 gcc_assert (CONST_WIDE_INT_NUNITS (op) == 2);
407 return (x86_64_immediate_operand (GEN_INT (CONST_WIDE_INT_ELT (op, 0)),
409 && x86_64_immediate_operand (GEN_INT (CONST_WIDE_INT_ELT (op,
418 ;; Return true if VALUE is a constant integer whose value is
419 ;; x86_64_immediate_operand value zero extended from word mode to mode.
420 (define_predicate "x86_64_dwzext_immediate_operand"
421 (match_code "const_int,const_wide_int")
423 if (ix86_endbr_immediate_operand (op, VOIDmode))
426 switch (GET_CODE (op))
430 return UINTVAL (op) <= HOST_WIDE_INT_UC (0xffffffff);
431 return UINTVAL (op) <= HOST_WIDE_INT_UC (0x7fffffff);
436 return (CONST_WIDE_INT_NUNITS (op) == 2
437 && CONST_WIDE_INT_ELT (op, 1) == 0
438 && (trunc_int_for_mode (CONST_WIDE_INT_ELT (op, 0), SImode)
439 == (HOST_WIDE_INT) CONST_WIDE_INT_ELT (op, 0)));
446 ;; Return true if size of VALUE can be stored in a sign
447 ;; extended immediate field.
448 (define_predicate "x86_64_immediate_size_operand"
449 (and (match_code "symbol_ref")
450 (ior (not (match_test "TARGET_64BIT"))
451 (match_test "ix86_cmodel == CM_SMALL")
452 (match_test "ix86_cmodel == CM_KERNEL"))))
454 ;; Return true if OP is general operand representable on x86_64.
455 (define_predicate "x86_64_general_operand"
456 (if_then_else (match_test "TARGET_64BIT")
457 (ior (match_operand 0 "nonimmediate_operand")
458 (match_operand 0 "x86_64_immediate_operand"))
459 (match_operand 0 "general_operand")))
461 ;; Return true if OP's both words are general operands representable
463 (define_predicate "x86_64_hilo_general_operand"
464 (if_then_else (match_test "TARGET_64BIT")
465 (ior (match_operand 0 "nonimmediate_operand")
466 (match_operand 0 "x86_64_hilo_int_operand"))
467 (match_operand 0 "general_operand")))
469 ;; Return true if OP is non-VOIDmode general operand representable
470 ;; on x86_64. This predicate is used in sign-extending conversion
471 ;; operations that require non-VOIDmode immediate operands.
472 (define_predicate "x86_64_sext_operand"
473 (and (match_test "GET_MODE (op) != VOIDmode")
474 (match_operand 0 "x86_64_general_operand")))
476 ;; Return true if OP is non-VOIDmode general operand. This predicate
477 ;; is used in sign-extending conversion operations that require
478 ;; non-VOIDmode immediate operands.
479 (define_predicate "sext_operand"
480 (and (match_test "GET_MODE (op) != VOIDmode")
481 (match_operand 0 "general_operand")))
483 ;; Return true if OP is representable on x86_64 as zero-extended operand.
484 ;; This predicate is used in zero-extending conversion operations that
485 ;; require non-VOIDmode immediate operands.
486 (define_predicate "x86_64_zext_operand"
487 (if_then_else (match_test "TARGET_64BIT")
488 (ior (match_operand 0 "nonimmediate_operand")
489 (and (match_operand 0 "x86_64_zext_immediate_operand")
490 (match_test "GET_MODE (op) != VOIDmode")))
491 (match_operand 0 "nonimmediate_operand")))
493 ;; Return true if OP is general operand representable on x86_64
494 ;; as either sign extended or zero extended constant.
495 (define_predicate "x86_64_szext_general_operand"
496 (if_then_else (match_test "TARGET_64BIT")
497 (ior (match_operand 0 "nonimmediate_operand")
498 (match_operand 0 "x86_64_immediate_operand")
499 (match_operand 0 "x86_64_zext_immediate_operand"))
500 (match_operand 0 "general_operand")))
502 ;; Return true if OP is nonmemory operand representable on x86_64.
503 (define_predicate "x86_64_nonmemory_operand"
504 (if_then_else (match_test "TARGET_64BIT")
505 (ior (match_operand 0 "register_operand")
506 (match_operand 0 "x86_64_immediate_operand"))
507 (match_operand 0 "nonmemory_operand")))
509 ;; Return true if OP is nonmemory operand representable on x86_64.
510 (define_predicate "x86_64_szext_nonmemory_operand"
511 (if_then_else (match_test "TARGET_64BIT")
512 (ior (match_operand 0 "register_operand")
513 (match_operand 0 "x86_64_immediate_operand")
514 (match_operand 0 "x86_64_zext_immediate_operand"))
515 (match_operand 0 "nonmemory_operand")))
517 ;; Return true when operand is PIC expression that can be computed by lea
519 (define_predicate "pic_32bit_operand"
520 (match_code "const,symbol_ref,label_ref")
525 /* Rule out relocations that translate into 64bit constants. */
526 if (TARGET_64BIT && GET_CODE (op) == CONST)
529 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
531 if (GET_CODE (op) == UNSPEC
532 && (XINT (op, 1) == UNSPEC_GOTOFF
533 || XINT (op, 1) == UNSPEC_GOT))
537 return symbolic_operand (op, mode);
540 ;; Return true if OP is nonmemory operand acceptable by movabs patterns.
541 (define_predicate "x86_64_movabs_operand"
542 (and (match_operand 0 "nonmemory_operand")
543 (not (match_operand 0 "pic_32bit_operand"))))
545 ;; Return true if OP is either a symbol reference or a sum of a symbol
546 ;; reference and a constant.
547 (define_predicate "symbolic_operand"
548 (match_code "symbol_ref,label_ref,const")
550 switch (GET_CODE (op))
558 if (GET_CODE (op) == SYMBOL_REF
559 || GET_CODE (op) == LABEL_REF
560 || (GET_CODE (op) == UNSPEC
561 && (XINT (op, 1) == UNSPEC_GOT
562 || XINT (op, 1) == UNSPEC_GOTOFF
563 || XINT (op, 1) == UNSPEC_PCREL
564 || XINT (op, 1) == UNSPEC_GOTPCREL)))
566 if (GET_CODE (op) != PLUS
567 || !CONST_INT_P (XEXP (op, 1)))
571 if (GET_CODE (op) == SYMBOL_REF
572 || GET_CODE (op) == LABEL_REF)
574 /* Only @GOTOFF gets offsets. */
575 if (GET_CODE (op) != UNSPEC
576 || XINT (op, 1) != UNSPEC_GOTOFF)
579 op = XVECEXP (op, 0, 0);
580 if (GET_CODE (op) == SYMBOL_REF
581 || GET_CODE (op) == LABEL_REF)
590 ;; Return true if OP is a symbolic operand that resolves locally.
591 (define_predicate "local_symbolic_operand"
592 (match_code "const,label_ref,symbol_ref")
594 if (GET_CODE (op) == CONST
595 && GET_CODE (XEXP (op, 0)) == PLUS
596 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
597 op = XEXP (XEXP (op, 0), 0);
599 if (GET_CODE (op) == LABEL_REF)
602 if (GET_CODE (op) != SYMBOL_REF)
605 if (SYMBOL_REF_TLS_MODEL (op))
608 /* Dll-imported symbols are always external. */
609 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
611 if (SYMBOL_REF_LOCAL_P (op))
614 /* There is, however, a not insubstantial body of code in the rest of
615 the compiler that assumes it can just stick the results of
616 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
617 /* ??? This is a hack. Should update the body of the compiler to
618 always create a DECL an invoke targetm.encode_section_info. */
619 if (strncmp (XSTR (op, 0), internal_label_prefix,
620 internal_label_prefix_len) == 0)
626 (define_predicate "local_func_symbolic_operand"
627 (match_operand 0 "local_symbolic_operand")
629 if (GET_CODE (op) == CONST
630 && GET_CODE (XEXP (op, 0)) == PLUS
631 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
632 op = XEXP (XEXP (op, 0), 0);
634 if (GET_CODE (op) == SYMBOL_REF
635 && !SYMBOL_REF_FUNCTION_P (op))
641 ;; Test for a legitimate @GOTOFF operand.
643 ;; VxWorks does not impose a fixed gap between segments; the run-time
644 ;; gap can be different from the object-file gap. We therefore can't
645 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
646 ;; same segment as the GOT. Unfortunately, the flexibility of linker
647 ;; scripts means that we can't be sure of that in general, so assume
648 ;; @GOTOFF is not valid on VxWorks, except with the large code model.
649 (define_predicate "gotoff_operand"
650 (and (ior (not (match_test "TARGET_VXWORKS_RTP"))
651 (match_test "ix86_cmodel == CM_LARGE")
652 (match_test "ix86_cmodel == CM_LARGE_PIC"))
653 (match_operand 0 "local_symbolic_operand")))
655 ;; Test for various thread-local symbols.
656 (define_special_predicate "tls_symbolic_operand"
657 (and (match_code "symbol_ref")
658 (match_test "SYMBOL_REF_TLS_MODEL (op)")))
660 (define_special_predicate "tls_modbase_operand"
661 (and (match_code "symbol_ref")
662 (match_test "op == ix86_tls_module_base ()")))
664 (define_predicate "tls_address_pattern"
665 (and (match_code "set,parallel,unspec,unspec_volatile")
666 (match_test "ix86_tls_address_pattern_p (op)")))
668 ;; Test for a pc-relative call operand
669 (define_predicate "constant_call_address_operand"
670 (match_code "symbol_ref")
672 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC
673 || flag_force_indirect_call)
675 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
680 ;; True for any non-virtual and non-eliminable register. Used in places where
681 ;; instantiation of such a register may cause the pattern to not be recognized.
682 (define_predicate "register_no_elim_operand"
683 (match_operand 0 "register_operand")
686 op = SUBREG_REG (op);
688 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
689 because it is guaranteed to be reloaded into one. */
693 return !(op == arg_pointer_rtx
694 || op == frame_pointer_rtx
695 || VIRTUAL_REGISTER_P (op));
698 ;; Similarly, but include the stack pointer. This is used
699 ;; to prevent esp from being used as an index reg.
700 (define_predicate "register_no_SP_operand"
701 (match_operand 0 "register_operand")
704 op = SUBREG_REG (op);
706 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
707 because it is guaranteed to be reloaded into one. */
711 return !(op == arg_pointer_rtx
712 || op == frame_pointer_rtx
713 || op == stack_pointer_rtx
714 || VIRTUAL_REGISTER_P (op));
717 ;; P6 processors will jump to the address after the decrement when %esp
718 ;; is used as a call operand, so they will execute return address as a code.
719 ;; See Pentium Pro errata 70, Pentium 2 errata A33 and Pentium 3 errata E17.
721 (define_predicate "call_register_operand"
722 (if_then_else (match_test "TARGET_64BIT")
723 (match_operand 0 "register_operand")
724 (match_operand 0 "register_no_SP_operand")))
726 ;; Return false if this is any eliminable register. Otherwise general_operand.
727 (define_predicate "general_no_elim_operand"
728 (if_then_else (match_code "reg,subreg")
729 (match_operand 0 "register_no_elim_operand")
730 (match_operand 0 "general_operand")))
732 ;; Return false if this is any eliminable register. Otherwise
733 ;; register_operand or a constant.
734 (define_predicate "nonmemory_no_elim_operand"
735 (ior (match_operand 0 "register_no_elim_operand")
736 (match_operand 0 "immediate_operand")))
738 ;; Test for a valid operand for indirect branch.
739 (define_predicate "indirect_branch_operand"
740 (ior (match_operand 0 "register_operand")
741 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
742 (not (match_test "TARGET_X32"))
743 (match_operand 0 "memory_operand"))))
745 ;; Return true if OP is a memory operands that can be used in sibcalls.
746 ;; Since sibcall never returns, we can only use call-clobbered register
747 ;; as GOT base. Allow GOT slot here only with pseudo register as GOT
748 ;; base. Properly handle sibcall over GOT slot with *sibcall_GOT_32
749 ;; and *sibcall_value_GOT_32 patterns.
750 (define_predicate "sibcall_memory_operand"
751 (match_operand 0 "memory_operand")
756 if (GET_CODE (op) == PLUS && REG_P (XEXP (op, 0)))
758 int regno = REGNO (XEXP (op, 0));
759 if (!HARD_REGISTER_NUM_P (regno) || call_used_or_fixed_reg_p (regno))
762 if (GOT32_symbol_operand (op, VOIDmode))
769 ;; Return true if OP is a GOT memory operand.
770 (define_predicate "GOT_memory_operand"
771 (and (match_operand 0 "memory_operand")
772 (match_code "const" "0")
773 (match_code "unspec" "00")
774 (match_test "XINT (XEXP (XEXP (op, 0), 0), 1) == UNSPEC_GOTPCREL")))
776 ;; Test for a valid operand for a call instruction.
777 ;; Allow constant call address operands in Pmode only.
778 (define_special_predicate "call_insn_operand"
779 (ior (match_test "constant_call_address_operand
780 (op, mode == VOIDmode ? mode : Pmode)")
781 (match_operand 0 "call_register_operand")
782 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
783 (ior (and (not (match_test "TARGET_X32"))
784 (match_operand 0 "memory_operand"))
785 (and (match_test "TARGET_X32 && Pmode == DImode")
786 (match_operand 0 "GOT_memory_operand"))))))
788 ;; Similarly, but for tail calls, in which we cannot allow memory references.
789 (define_special_predicate "sibcall_insn_operand"
790 (ior (match_test "constant_call_address_operand
791 (op, mode == VOIDmode ? mode : Pmode)")
792 (match_operand 0 "register_no_elim_operand")
793 (and (not (match_test "TARGET_INDIRECT_BRANCH_REGISTER"))
794 (ior (and (not (match_test "TARGET_X32"))
795 (match_operand 0 "sibcall_memory_operand"))
796 (and (match_test "TARGET_X32 && Pmode == DImode")
797 (match_operand 0 "GOT_memory_operand"))))))
799 ;; Return true if OP is a 32-bit GOT symbol operand.
800 (define_predicate "GOT32_symbol_operand"
801 (and (match_code "const")
802 (match_code "unspec" "0")
803 (match_test "XINT (XEXP (op, 0), 1) == UNSPEC_GOT")))
805 ;; Match exactly zero.
806 (define_predicate "const0_operand"
807 (match_code "const_int,const_double,const_vector")
809 if (mode == VOIDmode)
810 mode = GET_MODE (op);
811 return op == CONST0_RTX (mode);
814 ;; Match one or a vector with all elements equal to one.
815 (define_predicate "const1_operand"
816 (match_code "const_int,const_double,const_vector")
818 if (mode == VOIDmode)
819 mode = GET_MODE (op);
820 return op == CONST1_RTX (mode);
824 (define_predicate "constm1_operand"
825 (and (match_code "const_int")
826 (match_test "op == constm1_rtx")))
829 (define_predicate "const0_or_m1_operand"
830 (ior (match_operand 0 "const0_operand")
831 (match_operand 0 "constm1_operand")))
833 ;; Match exactly eight.
834 (define_predicate "const8_operand"
835 (and (match_code "const_int")
836 (match_test "INTVAL (op) == 8")))
838 ;; Match exactly 128.
839 (define_predicate "const128_operand"
840 (and (match_code "const_int")
841 (match_test "INTVAL (op) == 128")))
843 ;; Match exactly 0x0FFFFFFFF in anddi as a zero-extension operation
844 (define_predicate "const_32bit_mask"
845 (and (match_code "const_int")
846 (match_test "trunc_int_for_mode (INTVAL (op), DImode)
847 == HOST_WIDE_INT_C (0xffffffff)")))
849 ;; Match 2, 4, or 8. Used for leal multiplicands.
850 (define_predicate "const248_operand"
851 (match_code "const_int")
853 HOST_WIDE_INT i = INTVAL (op);
854 return i == 2 || i == 4 || i == 8;
857 ;; Match 1, 2, or 3. Used for lea shift amounts.
858 (define_predicate "const123_operand"
859 (match_code "const_int")
861 HOST_WIDE_INT i = INTVAL (op);
862 return i == 1 || i == 2 || i == 3;
865 ;; Match 2, 3, 6, or 7
866 (define_predicate "const2367_operand"
867 (match_code "const_int")
869 HOST_WIDE_INT i = INTVAL (op);
870 return i == 2 || i == 3 || i == 6 || i == 7;
873 ;; Match 1, 2, 4, or 8
874 (define_predicate "const1248_operand"
875 (match_code "const_int")
877 HOST_WIDE_INT i = INTVAL (op);
878 return i == 1 || i == 2 || i == 4 || i == 8;
881 ;; Match 3, 5, or 9. Used for leal multiplicands.
882 (define_predicate "const359_operand"
883 (match_code "const_int")
885 HOST_WIDE_INT i = INTVAL (op);
886 return i == 3 || i == 5 || i == 9;
889 ;; Match 4 or 8 to 11. Used for embeded rounding.
890 (define_predicate "const_4_or_8_to_11_operand"
891 (match_code "const_int")
893 HOST_WIDE_INT i = INTVAL (op);
894 return i == 4 || (i >= 8 && i <= 11);
897 ;; Match 4 or 8. Used for SAE.
898 (define_predicate "const48_operand"
899 (match_code "const_int")
901 HOST_WIDE_INT i = INTVAL (op);
902 return i == 4 || i == 8;
906 (define_predicate "const_0_to_1_operand"
907 (and (match_code "const_int")
908 (ior (match_test "op == const0_rtx")
909 (match_test "op == const1_rtx"))))
912 (define_predicate "const_0_to_3_operand"
913 (and (match_code "const_int")
914 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
917 (define_predicate "const_0_to_4_operand"
918 (and (match_code "const_int")
919 (match_test "IN_RANGE (INTVAL (op), 0, 4)")))
922 (define_predicate "const_0_to_5_operand"
923 (and (match_code "const_int")
924 (match_test "IN_RANGE (INTVAL (op), 0, 5)")))
927 (define_predicate "const_0_to_7_operand"
928 (and (match_code "const_int")
929 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
932 (define_predicate "const_0_to_15_operand"
933 (and (match_code "const_int")
934 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
937 (define_predicate "const_0_to_31_operand"
938 (and (match_code "const_int")
939 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
942 (define_predicate "const_0_to_63_operand"
943 (and (match_code "const_int")
944 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
947 (define_predicate "const_0_to_127_operand"
948 (and (match_code "const_int")
949 (match_test "IN_RANGE (INTVAL (op), 0, 127)")))
952 (define_predicate "const_0_to_255_operand"
953 (and (match_code "const_int")
954 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
956 ;; Match (0 to 255) * 8
957 (define_predicate "const_0_to_255_mul_8_operand"
958 (match_code "const_int")
960 unsigned HOST_WIDE_INT val = INTVAL (op);
961 return val <= 255*8 && val % 8 == 0;
964 ;; Match 1 to 255 except multiples of 8
965 (define_predicate "const_0_to_255_not_mul_8_operand"
966 (match_code "const_int")
968 unsigned HOST_WIDE_INT val = INTVAL (op);
969 return val <= 255 && val % 8 != 0;
972 ;; Return true if OP is CONST_INT >= 1 and <= 31 (a valid operand
973 ;; for shift & compare patterns, as shifting by 0 does not change flags).
974 (define_predicate "const_1_to_31_operand"
975 (and (match_code "const_int")
976 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
978 ;; Return true if OP is CONST_INT >= 1 and <= 63 (a valid operand
979 ;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
980 (define_predicate "const_1_to_63_operand"
981 (and (match_code "const_int")
982 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
985 (define_predicate "const_2_to_3_operand"
986 (and (match_code "const_int")
987 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
990 (define_predicate "const_4_to_5_operand"
991 (and (match_code "const_int")
992 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
995 (define_predicate "const_4_to_7_operand"
996 (and (match_code "const_int")
997 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
1000 (define_predicate "const_6_to_7_operand"
1001 (and (match_code "const_int")
1002 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
1005 (define_predicate "const_8_to_9_operand"
1006 (and (match_code "const_int")
1007 (match_test "IN_RANGE (INTVAL (op), 8, 9)")))
1010 (define_predicate "const_8_to_11_operand"
1011 (and (match_code "const_int")
1012 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
1015 (define_predicate "const_8_to_15_operand"
1016 (and (match_code "const_int")
1017 (match_test "IN_RANGE (INTVAL (op), 8, 15)")))
1020 (define_predicate "const_10_to_11_operand"
1021 (and (match_code "const_int")
1022 (match_test "IN_RANGE (INTVAL (op), 10, 11)")))
1025 (define_predicate "const_12_to_13_operand"
1026 (and (match_code "const_int")
1027 (match_test "IN_RANGE (INTVAL (op), 12, 13)")))
1030 (define_predicate "const_12_to_15_operand"
1031 (and (match_code "const_int")
1032 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
1035 (define_predicate "const_14_to_15_operand"
1036 (and (match_code "const_int")
1037 (match_test "IN_RANGE (INTVAL (op), 14, 15)")))
1040 (define_predicate "const_16_to_19_operand"
1041 (and (match_code "const_int")
1042 (match_test "IN_RANGE (INTVAL (op), 16, 19)")))
1045 (define_predicate "const_16_to_31_operand"
1046 (and (match_code "const_int")
1047 (match_test "IN_RANGE (INTVAL (op), 16, 31)")))
1050 (define_predicate "const_20_to_23_operand"
1051 (and (match_code "const_int")
1052 (match_test "IN_RANGE (INTVAL (op), 20, 23)")))
1055 (define_predicate "const_24_to_27_operand"
1056 (and (match_code "const_int")
1057 (match_test "IN_RANGE (INTVAL (op), 24, 27)")))
1060 (define_predicate "const_28_to_31_operand"
1061 (and (match_code "const_int")
1062 (match_test "IN_RANGE (INTVAL (op), 28, 31)")))
1064 (define_predicate "cmpps_imm_operand"
1065 (ior (match_operand 0 "const_0_to_7_operand")
1066 (and (match_test "TARGET_AVX")
1067 (match_operand 0 "const_0_to_31_operand"))))
1069 ;; True if this is a constant appropriate for an increment or decrement.
1070 (define_predicate "incdec_operand"
1071 (match_code "const_int")
1073 /* On Pentium4, the inc and dec operations causes extra dependency on flag
1074 registers, since carry flag is not set. */
1075 if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
1077 return op == const1_rtx || op == constm1_rtx;
1080 ;; True for registers, or const_int_operand, used to vec_setm expander.
1081 (define_predicate "vec_setm_sse41_operand"
1082 (ior (and (match_operand 0 "register_operand")
1083 (match_test "TARGET_SSE4_1"))
1084 (match_code "const_int")))
1086 (define_predicate "vec_setm_avx2_operand"
1087 (ior (and (match_operand 0 "register_operand")
1088 (match_test "TARGET_AVX2"))
1089 (match_code "const_int")))
1091 (define_predicate "vec_setm_mmx_operand"
1092 (ior (and (match_operand 0 "register_operand")
1093 (match_test "TARGET_SSE4_1")
1094 (match_test "TARGET_MMX_WITH_SSE"))
1095 (match_code "const_int")))
1097 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
1098 (define_predicate "reg_or_pm1_operand"
1099 (ior (match_operand 0 "register_operand")
1100 (and (match_code "const_int")
1101 (ior (match_test "op == const1_rtx")
1102 (match_test "op == constm1_rtx")))))
1104 ;; True for registers, or (not: registers). Used to optimize 3-operand
1105 ;; bitwise operation.
1106 (define_predicate "regmem_or_bitnot_regmem_operand"
1107 (ior (match_operand 0 "nonimmediate_operand")
1108 (and (match_code "not")
1109 (match_test "nonimmediate_operand (XEXP (op, 0), mode)"))))
1111 ;; True for expressions valid for 3-operand ternlog instructions.
1112 (define_predicate "ternlog_operand"
1113 (and (match_code "not,and,ior,xor")
1114 (match_test "ix86_ternlog_operand_p (op)")))
1116 ;; True if OP is acceptable as operand of DImode shift expander.
1117 (define_predicate "shiftdi_operand"
1118 (if_then_else (match_test "TARGET_64BIT")
1119 (match_operand 0 "nonimmediate_operand")
1120 (match_operand 0 "register_operand")))
1122 (define_predicate "ashldi_input_operand"
1123 (if_then_else (match_test "TARGET_64BIT")
1124 (match_operand 0 "nonimmediate_operand")
1125 (match_operand 0 "reg_or_pm1_operand")))
1127 ;; Return true if OP is a vector load from the constant pool with just
1128 ;; the first element nonzero.
1129 (define_predicate "zero_extended_scalar_load_operand"
1133 op = avoid_constant_pool_reference (op);
1135 if (GET_CODE (op) != CONST_VECTOR)
1138 n_elts = CONST_VECTOR_NUNITS (op);
1140 for (n_elts--; n_elts > 0; n_elts--)
1142 rtx elt = CONST_VECTOR_ELT (op, n_elts);
1143 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
1149 /* Return true if operand is a float vector constant that is all ones. */
1150 (define_predicate "float_vector_all_ones_operand"
1151 (match_code "const_vector,mem")
1153 mode = GET_MODE (op);
1154 if (!FLOAT_MODE_P (mode)
1156 && (!SYMBOL_REF_P (XEXP (op, 0))
1157 || !CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))))
1162 op = get_pool_constant (XEXP (op, 0));
1163 if (GET_CODE (op) != CONST_VECTOR)
1166 if (GET_MODE (op) != mode
1167 && INTEGRAL_MODE_P (GET_MODE (op))
1168 && op == CONSTM1_RTX (GET_MODE (op)))
1172 rtx first = XVECEXP (op, 0, 0);
1173 for (int i = 1; i != GET_MODE_NUNITS (GET_MODE (op)); i++)
1175 rtx tmp = XVECEXP (op, 0, i);
1176 if (!rtx_equal_p (tmp, first))
1179 if (GET_MODE (first) == E_SFmode)
1182 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (first), l);
1183 return (l & 0xffffffff) == 0xffffffff;
1185 else if (GET_MODE (first) == E_DFmode)
1188 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (first), l);
1189 return ((l[0] & 0xffffffff) == 0xffffffff
1190 && (l[1] & 0xffffffff) == 0xffffffff);
1196 /* Return true if operand is an integral vector constant that is all ones. */
1197 (define_predicate "vector_all_ones_operand"
1198 (and (match_code "const_vector")
1199 (match_test "INTEGRAL_MODE_P (GET_MODE (op))")
1200 (match_test "op == CONSTM1_RTX (GET_MODE (op))")))
1202 /* Return true if operand is a vector constant that is all ones. */
1203 (define_predicate "int_float_vector_all_ones_operand"
1204 (ior (match_operand 0 "vector_all_ones_operand")
1205 (match_operand 0 "float_vector_all_ones_operand")
1206 (match_test "op == constm1_rtx")))
1208 /* Return true if operand is an 128/256bit all ones vector
1209 that zero-extends to 256/512bit. */
1210 (define_predicate "vector_all_ones_zero_extend_half_operand"
1211 (match_code "const_vector")
1213 mode = GET_MODE (op);
1214 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT
1215 || (GET_MODE_SIZE (mode) != 32
1216 && GET_MODE_SIZE (mode) != 64))
1219 int nelts = CONST_VECTOR_NUNITS (op);
1220 for (int i = 0; i != nelts; i++)
1222 rtx elt = CONST_VECTOR_ELT (op, i);
1224 && elt != CONSTM1_RTX (GET_MODE_INNER (mode)))
1227 && elt != CONST0_RTX (GET_MODE_INNER (mode)))
1233 /* Return true if operand is an 128bit all ones vector
1234 that zero extends to 512bit. */
1235 (define_predicate "vector_all_ones_zero_extend_quarter_operand"
1236 (match_code "const_vector")
1238 mode = GET_MODE (op);
1239 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT
1240 || GET_MODE_SIZE (mode) != 64)
1243 int nelts = CONST_VECTOR_NUNITS (op);
1244 for (int i = 0; i != nelts; i++)
1246 rtx elt = CONST_VECTOR_ELT (op, i);
1248 && elt != CONSTM1_RTX (GET_MODE_INNER (mode)))
1251 && elt != CONST0_RTX (GET_MODE_INNER (mode)))
1257 ; Return true when OP is operand acceptable for vector memory operand.
1258 ; Only AVX can have misaligned memory operand.
1259 (define_predicate "vector_memory_operand"
1260 (and (match_operand 0 "memory_operand")
1261 (ior (match_test "TARGET_AVX")
1262 (match_test "MEM_ALIGN (op) >= GET_MODE_ALIGNMENT (mode)"))))
1264 ; Return true when OP is register_operand or vector_memory_operand.
1265 (define_predicate "vector_operand"
1266 (ior (match_operand 0 "register_operand")
1267 (match_operand 0 "vector_memory_operand")))
1269 ; Return true when OP is register_operand, vector_memory_operand
1271 (define_predicate "vector_or_const_vector_operand"
1272 (ior (match_operand 0 "register_operand")
1273 (match_operand 0 "vector_memory_operand")
1274 (match_code "const_vector")))
1276 (define_predicate "bcst_mem_operand"
1277 (and (match_code "vec_duplicate")
1278 (and (match_test "TARGET_AVX512F")
1279 (ior (match_test "TARGET_AVX512VL")
1280 (and (match_test "GET_MODE_SIZE (GET_MODE (op)) == 64")
1281 (match_test "TARGET_EVEX512"))))
1282 (match_test "VALID_BCST_MODE_P (GET_MODE_INNER (GET_MODE (op)))")
1283 (match_test "GET_MODE (XEXP (op, 0))
1284 == GET_MODE_INNER (GET_MODE (op))")
1285 (match_test "memory_operand (XEXP (op, 0), GET_MODE (XEXP (op, 0)))")))
1287 ; Return true when OP is bcst_mem_operand or vector_memory_operand.
1288 (define_predicate "bcst_vector_operand"
1289 (ior (match_operand 0 "vector_operand")
1290 (match_operand 0 "bcst_mem_operand")))
1292 ;; Return true when OP is either nonimmediate operand, or any
1294 (define_predicate "nonimmediate_or_const_vector_operand"
1295 (ior (match_operand 0 "nonimmediate_operand")
1296 (match_code "const_vector")))
1298 (define_predicate "nonimmediate_or_const_vec_dup_operand"
1299 (ior (match_operand 0 "nonimmediate_operand")
1300 (match_test "const_vec_duplicate_p (op)")))
1302 ;; Return true when OP is either register operand, or any
1304 (define_predicate "reg_or_const_vector_operand"
1305 (ior (match_operand 0 "register_operand")
1306 (match_code "const_vector")))
1308 ;; Return true when OP is CONST_VECTOR which can be converted to a
1309 ;; sign extended 32-bit integer.
1310 (define_predicate "x86_64_const_vector_operand"
1311 (match_code "const_vector")
1313 if (mode == VOIDmode)
1314 mode = GET_MODE (op);
1315 else if (GET_MODE (op) != mode)
1317 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1319 HOST_WIDE_INT val = ix86_convert_const_vector_to_integer (op, mode);
1320 return trunc_int_for_mode (val, SImode) == val;
1323 (define_predicate "nonimmediate_or_x86_64_const_vector_operand"
1324 (ior (match_operand 0 "nonimmediate_operand")
1325 (match_operand 0 "x86_64_const_vector_operand")))
1327 ;; Return true when OP is nonimmediate or standard SSE constant.
1328 (define_predicate "nonimmediate_or_sse_const_operand"
1329 (ior (match_operand 0 "nonimmediate_operand")
1330 (match_test "standard_sse_constant_p (op, mode)")))
1332 ;; Return true if OP is a register or a zero.
1333 (define_predicate "reg_or_0_operand"
1334 (ior (match_operand 0 "register_operand")
1335 (match_operand 0 "const0_operand")))
1337 ; Return true when OP is a nonimmediate or zero.
1338 (define_predicate "nonimm_or_0_operand"
1339 (ior (match_operand 0 "nonimmediate_operand")
1340 (match_operand 0 "const0_operand")))
1342 ;; Return true for RTX codes that force SImode address.
1343 (define_predicate "SImode_address_operand"
1344 (match_code "subreg,zero_extend,and"))
1346 ;; Return true if op is a valid address for LEA, and does not contain
1347 ;; a segment override. Defined as a special predicate to allow
1348 ;; mode-less const_int operands pass to address_operand.
1349 (define_special_predicate "address_no_seg_operand"
1350 (match_test "address_operand (op, VOIDmode)")
1352 struct ix86_address parts;
1355 if (!CONST_INT_P (op)
1357 && GET_MODE (op) != mode)
1360 ok = ix86_decompose_address (op, &parts);
1362 return parts.seg == ADDR_SPACE_GENERIC;
1365 ;; Return true if op if a valid base register, displacement or
1366 ;; sum of base register and displacement for VSIB addressing.
1367 (define_predicate "vsib_address_operand"
1368 (match_test "address_operand (op, VOIDmode)")
1370 struct ix86_address parts;
1374 ok = ix86_decompose_address (op, &parts);
1376 if (parts.index || parts.seg != ADDR_SPACE_GENERIC)
1379 /* VSIB addressing doesn't support (%rip). */
1383 if (GET_CODE (disp) == CONST)
1385 disp = XEXP (disp, 0);
1386 if (GET_CODE (disp) == PLUS)
1387 disp = XEXP (disp, 0);
1388 if (GET_CODE (disp) == UNSPEC)
1389 switch (XINT (disp, 1))
1391 case UNSPEC_GOTPCREL:
1393 case UNSPEC_GOTNTPOFF:
1399 && (GET_CODE (disp) == SYMBOL_REF
1400 || GET_CODE (disp) == LABEL_REF))
1407 (define_predicate "vsib_mem_operator"
1410 ;; Return true if the rtx is known to be at least 32 bits aligned.
1411 (define_predicate "aligned_operand"
1412 (match_operand 0 "general_operand")
1414 struct ix86_address parts;
1417 /* Registers and immediate operands are always "aligned". */
1421 /* All patterns using aligned_operand on memory operands ends up
1422 in promoting memory operand to 64bit and thus causing memory mismatch. */
1423 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
1426 /* Don't even try to do any aligned optimizations with volatiles. */
1427 if (MEM_VOLATILE_P (op))
1430 if (MEM_ALIGN (op) >= 32)
1435 /* Pushes and pops are only valid on the stack pointer. */
1436 if (GET_CODE (op) == PRE_DEC
1437 || GET_CODE (op) == POST_INC)
1440 /* Decode the address. */
1441 ok = ix86_decompose_address (op, &parts);
1444 if (parts.base && SUBREG_P (parts.base))
1445 parts.base = SUBREG_REG (parts.base);
1446 if (parts.index && SUBREG_P (parts.index))
1447 parts.index = SUBREG_REG (parts.index);
1449 /* Look for some component that isn't known to be aligned. */
1452 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
1457 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
1462 if (!CONST_INT_P (parts.disp)
1463 || (INTVAL (parts.disp) & 3))
1467 /* Didn't find one -- this must be an aligned address. */
1471 ;; Return true if OP is memory operand with a displacement.
1472 (define_predicate "memory_displacement_operand"
1473 (match_operand 0 "memory_operand")
1475 struct ix86_address parts;
1478 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1480 return parts.disp != NULL_RTX;
1483 ;; Return true if OP is memory operand with a displacement only.
1484 (define_predicate "memory_displacement_only_operand"
1485 (match_operand 0 "memory_operand")
1487 struct ix86_address parts;
1493 ok = ix86_decompose_address (XEXP (op, 0), &parts);
1496 if (parts.base || parts.index)
1499 return parts.disp != NULL_RTX;
1502 ;; Return true if OP is memory operand that cannot be represented
1503 ;; by the modRM array.
1504 (define_predicate "long_memory_operand"
1505 (and (match_operand 0 "memory_operand")
1506 (match_test "memory_address_length (op, false)")))
1508 ;; Return true if OP is a comparison operator that can be issued by fcmov.
1509 (define_predicate "fcmov_comparison_operator"
1510 (match_operand 0 "comparison_operator")
1512 machine_mode inmode = GET_MODE (XEXP (op, 0));
1513 enum rtx_code code = GET_CODE (op);
1515 if (inmode == CCFPmode)
1516 code = ix86_fp_compare_code_to_integer (code);
1518 /* i387 supports just limited amount of conditional codes. */
1522 if (inmode == CCCmode || inmode == CCGZmode)
1526 if (inmode == CCmode || inmode == CCFPmode)
1529 case ORDERED: case UNORDERED:
1537 ;; Return true if OP is a comparison that can be used in the CMPSS/CMPPS insns.
1538 ;; The first set are supported directly; the second set can't be done with
1539 ;; full IEEE support, i.e. NaNs.
1541 (define_predicate "sse_comparison_operator"
1542 (ior (match_code "eq,ne,lt,le,unordered,unge,ungt,ordered")
1543 (and (match_test "TARGET_AVX")
1544 (match_code "ge,gt,uneq,unle,unlt,ltgt"))))
1546 (define_predicate "ix86_comparison_int_operator"
1547 (match_code "ne,eq,ge,gt,le,lt"))
1549 (define_predicate "ix86_comparison_uns_operator"
1550 (match_code "ne,eq,geu,gtu,leu,ltu"))
1552 (define_predicate "bt_comparison_operator"
1553 (match_code "ne,eq"))
1555 (define_predicate "shr_comparison_operator"
1556 (match_code "gtu,leu"))
1558 (define_predicate "add_comparison_operator"
1559 (match_code "geu,ltu"))
1561 ;; Return true if OP is a valid comparison operator in valid mode.
1562 (define_predicate "ix86_comparison_operator"
1563 (match_operand 0 "comparison_operator")
1565 machine_mode inmode = GET_MODE (XEXP (op, 0));
1566 enum rtx_code code = GET_CODE (op);
1568 if (inmode == CCFPmode)
1569 return ix86_trivial_fp_comparison_operator (op, mode);
1574 if (inmode == CCGZmode)
1578 if (inmode == CCmode || inmode == CCGCmode
1579 || inmode == CCGOCmode || inmode == CCNOmode || inmode == CCGZmode)
1583 if (inmode == CCCmode || inmode == CCGZmode)
1587 if (inmode == CCmode)
1590 case ORDERED: case UNORDERED:
1591 if (inmode == CCmode)
1595 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1603 ;; Return true if OP is a valid comparison operator
1604 ;; testing carry flag to be set.
1605 (define_predicate "ix86_carry_flag_operator"
1606 (match_code "ltu,unlt")
1608 machine_mode inmode = GET_MODE (XEXP (op, 0));
1609 enum rtx_code code = GET_CODE (op);
1611 if (inmode == CCFPmode)
1612 code = ix86_fp_compare_code_to_integer (code);
1613 else if (inmode != CCmode && inmode != CCCmode && inmode != CCGZmode)
1619 ;; Return true if OP is a valid comparison operator
1620 ;; testing carry flag to be unset.
1621 (define_predicate "ix86_carry_flag_unset_operator"
1622 (match_code "geu,ge")
1624 machine_mode inmode = GET_MODE (XEXP (op, 0));
1625 enum rtx_code code = GET_CODE (op);
1627 if (inmode == CCFPmode)
1628 code = ix86_fp_compare_code_to_integer (code);
1629 else if (inmode != CCmode && inmode != CCCmode && inmode != CCGZmode)
1635 ;; Return true if this comparison only requires testing one flag bit.
1636 ;; VCOMX/VUCOMX set ZF, SF, OF, differently from COMI/UCOMI.
1637 (define_predicate "ix86_trivial_fp_comparison_operator"
1638 (if_then_else (match_test "TARGET_AVX10_2_256")
1639 (match_code "gt,ge,unlt,unle,eq,uneq,ne,ltgt,ordered,unordered")
1640 (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered")))
1642 (define_predicate "ix86_trivial_fp_comparison_operator_xf"
1643 (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
1645 ;; Return true if we know how to do this comparison. Others require
1646 ;; testing more than one flag bit, and we let the generic middle-end
1648 (define_predicate "ix86_fp_comparison_operator"
1649 (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1650 == IX86_FPCMP_ARITH")
1651 (match_operand 0 "comparison_operator")
1652 (match_operand 0 "ix86_trivial_fp_comparison_operator")))
1654 (define_predicate "ix86_fp_comparison_operator_xf"
1655 (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1656 == IX86_FPCMP_ARITH")
1657 (match_operand 0 "comparison_operator")
1658 (match_operand 0 "ix86_trivial_fp_comparison_operator_xf")))
1660 ;; Return true if we can perform this comparison on TImode operands.
1661 (define_predicate "ix86_timode_comparison_operator"
1662 (if_then_else (match_test "TARGET_64BIT")
1663 (match_operand 0 "ordered_comparison_operator")
1664 (match_operand 0 "bt_comparison_operator")))
1666 ;; Return true if this is a valid second operand for a TImode comparison.
1667 (define_predicate "ix86_timode_comparison_operand"
1668 (if_then_else (match_test "TARGET_64BIT")
1669 (match_operand 0 "x86_64_general_operand")
1670 (match_operand 0 "nonimmediate_operand")))
1672 ;; Nearly general operand, but accept any const_double, since we wish
1673 ;; to be able to drop them into memory rather than have them get pulled
1675 (define_predicate "cmp_fp_expander_operand"
1676 (ior (match_code "const_double")
1677 (match_operand 0 "general_operand")))
1679 ;; Return true if this is a valid binary floating-point operation.
1680 (define_predicate "binary_fp_operator"
1681 (match_code "plus,minus,mult,div"))
1683 ;; Return true if this is a multiply operation.
1684 (define_predicate "mult_operator"
1685 (match_code "mult"))
1687 ;; Return true if this is a division operation.
1688 (define_predicate "div_operator"
1691 ;; Return true if this is a and, ior or xor operation.
1692 (define_predicate "logic_operator"
1693 (match_code "and,ior,xor"))
1695 ;; Return true if this is a plus, minus, and, ior or xor operation.
1696 (define_predicate "plusminuslogic_operator"
1697 (match_code "plus,minus,and,ior,xor"))
1699 ;; Return true for ARITHMETIC_P.
1700 (define_predicate "arith_or_logical_operator"
1701 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1702 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1704 ;; Return true for COMMUTATIVE_P.
1705 (define_predicate "commutative_operator"
1706 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1708 ;; Return true if OP is a binary operator that can be promoted to wider mode.
1709 (define_predicate "promotable_binary_operator"
1710 (ior (match_code "plus,minus,and,ior,xor,ashift")
1711 (and (match_code "mult")
1712 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1714 (define_predicate "compare_operator"
1715 (match_code "compare"))
1717 (define_predicate "extract_operator"
1718 (match_code "zero_extract,sign_extract"))
1720 ;; Return true if OP is a memory operand, aligned to
1721 ;; less than its natural alignment.
1722 (define_predicate "misaligned_operand"
1723 (and (match_code "mem")
1724 (match_test "MEM_ALIGN (op) < GET_MODE_BITSIZE (mode)")))
1726 ;; Return true if OP is a parallel for an mov{d,q,dqa,ps,pd} vec_select,
1727 ;; where one of the two operands of the vec_concat is const0_operand.
1728 (define_predicate "movq_parallel"
1729 (match_code "parallel")
1731 unsigned nelt = XVECLEN (op, 0);
1732 unsigned nelt2 = nelt >> 1;
1738 /* Validate that all of the elements are constants,
1739 lower halves of permute are lower halves of the first operand,
1740 upper halves of permute come from any of the second operand. */
1741 for (i = 0; i < nelt; ++i)
1743 rtx er = XVECEXP (op, 0, i);
1744 unsigned HOST_WIDE_INT ei;
1746 if (!CONST_INT_P (er))
1749 if (i < nelt2 && ei != i)
1751 if (i >= nelt2 && (ei < nelt || ei >= nelt << 1))
1758 ;; Return true if OP is a vzeroall operation, known to be a PARALLEL.
1759 (define_predicate "vzeroall_operation"
1760 (match_code "parallel")
1762 unsigned i, nregs = TARGET_64BIT ? 16 : 8;
1764 if ((unsigned) XVECLEN (op, 0) != 1 + nregs)
1767 for (i = 0; i < nregs; i++)
1769 rtx elt = XVECEXP (op, 0, i+1);
1771 if (GET_CODE (elt) != SET
1772 || GET_CODE (SET_DEST (elt)) != REG
1773 || GET_MODE (SET_DEST (elt)) != V8SImode
1774 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
1775 || SET_SRC (elt) != CONST0_RTX (V8SImode))
1781 ;; return true if OP is a vzeroall pattern.
1782 (define_predicate "vzeroall_pattern"
1783 (and (match_code "parallel")
1784 (match_code "unspec_volatile" "a")
1785 (match_test "XINT (XVECEXP (op, 0, 0), 1) == UNSPECV_VZEROALL")))
1787 ;; return true if OP is a vzeroupper pattern.
1788 (define_predicate "vzeroupper_pattern"
1789 (and (match_code "parallel")
1790 (match_code "unspec" "b")
1791 (match_test "XINT (XVECEXP (op, 0, 1), 1) == UNSPEC_CALLEE_ABI")
1792 (match_test "INTVAL (XVECEXP (XVECEXP (op, 0, 1), 0, 0)) == ABI_VZEROUPPER")))
1794 ;; Return true if OP is an addsub vec_merge operation
1795 (define_predicate "addsub_vm_operator"
1796 (match_code "vec_merge")
1807 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1809 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1814 mask = INTVAL (XEXP (op, 2));
1815 nunits = GET_MODE_NUNITS (mode);
1817 for (elt = 0; elt < nunits; elt++)
1819 /* bit clear: take from op0, set: take from op1 */
1820 int bit = !(mask & (HOST_WIDE_INT_1U << elt));
1822 if (bit != ((elt & 1) ^ swapped))
1829 ;; Return true if OP is an addsub vec_select/vec_concat operation
1830 (define_predicate "addsub_vs_operator"
1831 (and (match_code "vec_select")
1832 (match_code "vec_concat" "0"))
1838 op0 = XEXP (XEXP (op, 0), 0);
1839 op1 = XEXP (XEXP (op, 0), 1);
1842 if (GET_CODE (op0) == MINUS && GET_CODE (op1) == PLUS)
1844 else if (GET_CODE (op0) == PLUS && GET_CODE (op1) == MINUS)
1849 nunits = GET_MODE_NUNITS (mode);
1850 if (XVECLEN (XEXP (op, 1), 0) != nunits)
1853 /* We already checked that permutation is suitable for addsub,
1854 so only look at the first element of the parallel. */
1855 elt = INTVAL (XVECEXP (XEXP (op, 1), 0, 0));
1857 return elt == (swapped ? nunits : 0);
1860 ;; Return true if OP is a parallel for an addsub vec_select.
1861 (define_predicate "addsub_vs_parallel"
1862 (and (match_code "parallel")
1863 (match_code "const_int" "a"))
1865 int nelt = XVECLEN (op, 0);
1871 /* Check that the permutation is suitable for addsub.
1872 For example, { 0 9 2 11 4 13 6 15 } or { 8 1 10 3 12 5 14 7 }. */
1873 elt = INTVAL (XVECEXP (op, 0, 0));
1876 for (i = 1; i < nelt; ++i)
1877 if (INTVAL (XVECEXP (op, 0, i)) != (i + (i & 1) * nelt))
1880 else if (elt == nelt)
1882 for (i = 1; i < nelt; ++i)
1883 if (INTVAL (XVECEXP (op, 0, i)) != (elt + i - (i & 1) * nelt))
1892 ;; Return true if OP is a constant pool in perm{w,d,b} which constains index
1893 ;; match pmov{dw,wb,qd}.
1894 (define_predicate "permvar_truncate_operand"
1897 int nelt = GET_MODE_NUNITS (mode);
1901 if (!INTEGRAL_MODE_P (mode) || !VECTOR_MODE_P (mode))
1907 if (!ix86_extract_perm_from_pool_constant (&perm[0], op))
1910 id = exact_log2 (nelt);
1912 /* Check that the permutation is suitable for pmovz{bw,wd,dq}.
1913 For example V16HImode to V8HImode
1914 { 0 2 4 6 8 10 12 14 * * * * * * * * }. */
1915 for (int i = 0; i != nelt / 2; i++)
1916 if ((perm[i] & ((1 << id) - 1)) != i * 2)
1922 ;; Return true if OP is a constant pool in shufb which constains index
1924 (define_predicate "pshufb_truncv4siv4hi_operand"
1929 if (mode != E_V16QImode)
1932 if (!ix86_extract_perm_from_pool_constant (&perm[0], op))
1935 /* Check that the permutation is suitable for pmovdw.
1936 For example V4SImode to V4HImode
1937 { 0 1 4 5 8 9 12 13 * * * * * * * * }.
1938 index = i % 2 + (i / 2) * 4. */
1939 for (int i = 0; i != 8; i++)
1941 /* if (SRC2[(i * 8)+7] = 1) then DEST[(i*8)+7..(i*8)+0] := 0; */
1945 if ((perm[i] & 15) != ((i & 1) + (i & 0xFE) * 2))
1952 ;; Return true if OP is a constant pool in shufb which constains index
1954 (define_predicate "pshufb_truncv8hiv8qi_operand"
1959 if (mode != E_V16QImode)
1962 if (!ix86_extract_perm_from_pool_constant (&perm[0], op))
1965 /* Check that the permutation is suitable for pmovwb.
1966 For example V16QImode to V8QImode
1967 { 0 2 4 6 8 10 12 14 * * * * * * * * }.
1968 index = i % 2 + (i / 2) * 4. */
1969 for (int i = 0; i != 8; i++)
1971 /* if (SRC2[(i * 8)+7] = 1) then DEST[(i*8)+7..(i*8)+0] := 0; */
1975 if ((perm[i] & 15) != i * 2)
1982 ;; Return true if OP is a parallel for an pmovz{bw,wd,dq} vec_select,
1983 ;; where one of the two operands of the vec_concat is const0_operand.
1984 (define_predicate "pmovzx_parallel"
1985 (and (match_code "parallel")
1986 (match_code "const_int" "a"))
1988 int nelt = XVECLEN (op, 0);
1994 /* Check that the permutation is suitable for pmovz{bw,wd,dq}.
1995 For example { 0 16 1 17 2 18 3 19 4 20 5 21 6 22 7 23 }. */
1996 elt = INTVAL (XVECEXP (op, 0, 0));
1999 for (i = 1; i < nelt; ++i)
2002 if (INTVAL (XVECEXP (op, 0, i)) < nelt)
2005 else if (INTVAL (XVECEXP (op, 0, i)) != i / 2)
2014 ;; Return true if OP is a const vector with duplicate value.
2015 (define_predicate "const_vector_duplicate_operand"
2016 (match_code "const_vector")
2018 rtx elt = XVECEXP (op, 0, 0);
2019 int i, nelt = XVECLEN (op, 0);
2021 for (i = 1; i < nelt; ++i)
2022 if (!rtx_equal_p (elt, XVECEXP (op, 0, i)))
2027 ;; Return true if OP is a parallel for a vbroadcast permute.
2028 (define_predicate "avx_vbroadcast_operand"
2029 (and (match_code "parallel")
2030 (match_code "const_int" "a"))
2032 rtx elt = XVECEXP (op, 0, 0);
2033 int i, nelt = XVECLEN (op, 0);
2035 /* Don't bother checking there are the right number of operands,
2036 merely that they're all identical. */
2037 for (i = 1; i < nelt; ++i)
2038 if (XVECEXP (op, 0, i) != elt)
2043 ;; Return true if OP is a parallel for a palignr permute.
2044 (define_predicate "palignr_operand"
2045 (and (match_code "parallel")
2046 (match_code "const_int" "a"))
2048 int elt = INTVAL (XVECEXP (op, 0, 0));
2049 int i, nelt = XVECLEN (op, 0);
2051 /* Check that an order in the permutation is suitable for palignr.
2052 For example, {5 6 7 0 1 2 3 4} is "palignr 5, xmm, xmm". */
2053 for (i = 1; i < nelt; ++i)
2054 if (INTVAL (XVECEXP (op, 0, i)) != ((elt + i) % nelt))
2059 ;; Return true if OP is a proper third operand to vpblendw256.
2060 (define_predicate "avx2_pblendw_operand"
2061 (match_code "const_int")
2063 HOST_WIDE_INT val = INTVAL (op);
2064 HOST_WIDE_INT low = val & 0xff;
2065 return val == ((low << 8) | low);
2068 ;; Return true if OP is vector_operand or CONST_VECTOR.
2069 (define_predicate "general_vector_operand"
2070 (ior (match_operand 0 "vector_operand")
2071 (match_code "const_vector")))
2073 ;; Return true if OP is either -1 constant or stored in register.
2074 (define_predicate "register_or_constm1_operand"
2075 (ior (match_operand 0 "register_operand")
2076 (and (match_code "const_int")
2077 (match_test "op == constm1_rtx"))))
2079 ;; Return true if the vector ends with between 12 and 18 register saves using
2080 ;; RAX as the base address.
2081 (define_predicate "save_multiple"
2082 (match_code "parallel")
2084 const unsigned len = XVECLEN (op, 0);
2087 /* Starting from end of vector, count register saves. */
2088 for (i = 0; i < len; ++i)
2090 rtx src, dest, addr;
2091 rtx e = XVECEXP (op, 0, len - 1 - i);
2093 if (GET_CODE (e) != SET)
2097 dest = SET_DEST (e);
2099 if (!REG_P (src) || !MEM_P (dest))
2102 addr = XEXP (dest, 0);
2104 /* Good if dest address is in RAX. */
2105 if (REG_P (addr) && REGNO (addr) == AX_REG)
2108 /* Good if dest address is offset of RAX. */
2109 if (GET_CODE (addr) == PLUS
2110 && REG_P (XEXP (addr, 0))
2111 && REGNO (XEXP (addr, 0)) == AX_REG)
2116 return (i >= 12 && i <= 18);
2120 ;; Return true if the vector ends with between 12 and 18 register loads using
2121 ;; RSI as the base address.
2122 (define_predicate "restore_multiple"
2123 (match_code "parallel")
2125 const unsigned len = XVECLEN (op, 0);
2128 /* Starting from end of vector, count register restores. */
2129 for (i = 0; i < len; ++i)
2131 rtx src, dest, addr;
2132 rtx e = XVECEXP (op, 0, len - 1 - i);
2134 if (GET_CODE (e) != SET)
2138 dest = SET_DEST (e);
2140 if (!MEM_P (src) || !REG_P (dest))
2143 addr = XEXP (src, 0);
2145 /* Good if src address is in RSI. */
2146 if (REG_P (addr) && REGNO (addr) == SI_REG)
2149 /* Good if src address is offset of RSI. */
2150 if (GET_CODE (addr) == PLUS
2151 && REG_P (XEXP (addr, 0))
2152 && REGNO (XEXP (addr, 0)) == SI_REG)
2157 return (i >= 12 && i <= 18);
2160 ;; Keylocker specific predicates
2161 (define_predicate "encodekey128_operation"
2162 (match_code "parallel")
2167 if (XVECLEN (op, 0) != 8)
2170 for(i = 0; i < 3; i++)
2172 elt = XVECEXP (op, 0, i + 1);
2173 if (GET_CODE (elt) != SET
2174 || GET_CODE (SET_DEST (elt)) != REG
2175 || GET_MODE (SET_DEST (elt)) != V2DImode
2176 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
2177 || GET_CODE (SET_SRC (elt)) != UNSPEC_VOLATILE
2178 || GET_MODE (SET_SRC (elt)) != V2DImode
2179 || XVECLEN(SET_SRC (elt), 0) != 1
2180 || XVECEXP(SET_SRC (elt), 0, 0) != const0_rtx)
2184 for(i = 4; i < 7; i++)
2186 elt = XVECEXP (op, 0, i);
2187 if (GET_CODE (elt) != CLOBBER
2188 || GET_MODE (elt) != VOIDmode
2189 || GET_CODE (XEXP (elt, 0)) != REG
2190 || GET_MODE (XEXP (elt, 0)) != V2DImode
2191 || REGNO (XEXP (elt, 0)) != GET_SSE_REGNO (i))
2195 elt = XVECEXP (op, 0, 7);
2196 if (GET_CODE (elt) != CLOBBER
2197 || GET_MODE (elt) != VOIDmode
2198 || GET_CODE (XEXP (elt, 0)) != REG
2199 || GET_MODE (XEXP (elt, 0)) != CCmode
2200 || REGNO (XEXP (elt, 0)) != FLAGS_REG)
2205 (define_predicate "encodekey256_operation"
2206 (match_code "parallel")
2211 if (XVECLEN (op, 0) != 9)
2214 elt = SET_SRC (XVECEXP (op, 0, 0));
2215 elt = XVECEXP (elt, 0, 2);
2217 || REGNO(elt) != GET_SSE_REGNO (1))
2220 for(i = 0; i < 4; i++)
2222 elt = XVECEXP (op, 0, i + 1);
2223 if (GET_CODE (elt) != SET
2224 || GET_CODE (SET_DEST (elt)) != REG
2225 || GET_MODE (SET_DEST (elt)) != V2DImode
2226 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
2227 || GET_CODE (SET_SRC (elt)) != UNSPEC_VOLATILE
2228 || GET_MODE (SET_SRC (elt)) != V2DImode
2229 || XVECLEN(SET_SRC (elt), 0) != 1
2230 || XVECEXP(SET_SRC (elt), 0, 0) != const0_rtx)
2234 for(i = 4; i < 7; i++)
2236 elt = XVECEXP (op, 0, i + 1);
2237 if (GET_CODE (elt) != CLOBBER
2238 || GET_MODE (elt) != VOIDmode
2239 || GET_CODE (XEXP (elt, 0)) != REG
2240 || GET_MODE (XEXP (elt, 0)) != V2DImode
2241 || REGNO (XEXP (elt, 0)) != GET_SSE_REGNO (i))
2245 elt = XVECEXP (op, 0, 8);
2246 if (GET_CODE (elt) != CLOBBER
2247 || GET_MODE (elt) != VOIDmode
2248 || GET_CODE (XEXP (elt, 0)) != REG
2249 || GET_MODE (XEXP (elt, 0)) != CCmode
2250 || REGNO (XEXP (elt, 0)) != FLAGS_REG)
2256 (define_predicate "aeswidekl_operation"
2257 (match_code "parallel")
2262 for (i = 0; i < 8; i++)
2264 elt = XVECEXP (op, 0, i + 1);
2265 if (GET_CODE (elt) != SET
2266 || GET_CODE (SET_DEST (elt)) != REG
2267 || GET_MODE (SET_DEST (elt)) != V2DImode
2268 || REGNO (SET_DEST (elt)) != GET_SSE_REGNO (i)
2269 || GET_CODE (SET_SRC (elt)) != UNSPEC_VOLATILE
2270 || GET_MODE (SET_SRC (elt)) != V2DImode
2271 || XVECLEN (SET_SRC (elt), 0) != 1
2272 || !REG_P (XVECEXP (SET_SRC (elt), 0, 0))
2273 || REGNO (XVECEXP (SET_SRC (elt), 0, 0)) != GET_SSE_REGNO (i))
2279 ;; Return true if OP is a memory operand that can be also used in APX
2280 ;; EVEX-encoded patterns (i.e. APX NDD/NF) with immediate operand. With
2281 ;; non-default address space, segment register or address size prefix,
2282 ;; APX EVEX-encoded instruction length can exceed the 15 byte size limit.
2283 (define_predicate "apx_evex_memory_operand"
2284 (match_operand 0 "memory_operand")
2286 /* OK if immediate operand size < 4 bytes. */
2287 if (GET_MODE_SIZE (mode) < 4)
2290 bool default_addr = ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (op));
2291 bool address_size_prefix = TARGET_X32 && Pmode == SImode;
2293 struct ix86_address parts;
2297 ok = ix86_decompose_address (op, &parts);
2302 /* Default address space. */
2304 /* Not OK with address size prefix, index register and disp. */
2305 if (address_size_prefix
2308 && parts.disp != const0_rtx)
2313 /* Non-default address space. */
2315 /* Not OK without base register. */
2319 /* Not OK with disp and address size prefix. */
2320 if (address_size_prefix && parts.disp)
2327 ;; Return true if OP is a memory operand which can be used in APX EVEX-encoded
2328 ;; ADD patterns (i.e. APX NDD/NF) for with register source operand.
2329 ;; UNSPEC_GOTNTPOFF memory operand is allowed with APX EVEX-encoded ADD only if
2330 ;; R_X86_64_CODE_6_GOTTPOFF works.
2331 (define_predicate "apx_evex_add_memory_operand"
2332 (match_operand 0 "memory_operand")
2334 /* OK if "add %reg1, name@gottpoff(%rip), %reg2" or
2335 "{nf} add name@gottpoff(%rip), %reg1" are supported. */
2336 if (HAVE_AS_R_X86_64_CODE_6_GOTTPOFF)
2341 /* Disallow APX EVEX-encoded ADD with UNSPEC_GOTNTPOFF. */
2342 if (GET_CODE (op) == CONST
2343 && GET_CODE (XEXP (op, 0)) == UNSPEC
2344 && XINT (XEXP (op, 0), 1) == UNSPEC_GOTNTPOFF)