1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 ;; Free Software Foundation, Inc.
5 ;; This file is part of GCC.
7 ;; GCC is free software; you can redistribute it and/or modify
8 ;; it under the terms of the GNU General Public License as published by
9 ;; the Free Software Foundation; either version 3, or (at your option)
12 ;; GCC is distributed in the hope that it will be useful,
13 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
14 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 ;; GNU General Public License for more details.
17 ;; You should have received a copy of the GNU General Public License
18 ;; along with GCC; see the file COPYING3. If not see
19 ;; <http://www.gnu.org/licenses/>.
21 ;; Return nonzero if OP is either a i387 or SSE fp register.
22 (define_predicate "any_fp_register_operand"
23 (and (match_code "reg")
24 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
26 ;; Return nonzero if OP is an i387 fp register.
27 (define_predicate "fp_register_operand"
28 (and (match_code "reg")
29 (match_test "FP_REGNO_P (REGNO (op))")))
31 ;; Return nonzero if OP is a non-fp register_operand.
32 (define_predicate "register_and_not_any_fp_reg_operand"
33 (and (match_code "reg")
34 (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
36 ;; Return nonzero if OP is a register operand other than an i387 fp register.
37 (define_predicate "register_and_not_fp_reg_operand"
38 (and (match_code "reg")
39 (not (match_test "FP_REGNO_P (REGNO (op))"))))
41 ;; True if the operand is an MMX register.
42 (define_predicate "mmx_reg_operand"
43 (and (match_code "reg")
44 (match_test "MMX_REGNO_P (REGNO (op))")))
46 ;; True if the operand is a Q_REGS class register.
47 (define_predicate "q_regs_operand"
48 (match_operand 0 "register_operand")
50 if (GET_CODE (op) == SUBREG)
52 return ANY_QI_REG_P (op);
55 ;; Match an SI or HImode register for a zero_extract.
56 (define_special_predicate "ext_register_operand"
57 (match_operand 0 "register_operand")
59 if ((!TARGET_64BIT || GET_MODE (op) != DImode)
60 && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
62 if (GET_CODE (op) == SUBREG)
65 /* Be careful to accept only registers having upper parts. */
66 return REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) < 4;
69 ;; Return true if op is the AX register.
70 (define_predicate "ax_reg_operand"
71 (and (match_code "reg")
72 (match_test "REGNO (op) == 0")))
74 ;; Return true if op is the flags register.
75 (define_predicate "flags_reg_operand"
76 (and (match_code "reg")
77 (match_test "REGNO (op) == FLAGS_REG")))
79 ;; Return true if op is a QImode register operand other than
81 (define_predicate "ext_QIreg_operand"
82 (and (match_code "reg")
83 (match_test "TARGET_64BIT
84 && GET_MODE (op) == QImode
85 && REGNO (op) > BX_REG")))
87 ;; Return true if op is not xmm0 register.
88 (define_predicate "reg_not_xmm0_operand"
89 (and (match_operand 0 "register_operand")
90 (match_test "GET_CODE (op) != REG
91 || REGNO (op) != FIRST_SSE_REG")))
93 ;; As above, but allow nonimmediate operands.
94 (define_predicate "nonimm_not_xmm0_operand"
95 (and (match_operand 0 "nonimmediate_operand")
96 (match_test "GET_CODE (op) != REG
97 || REGNO (op) != FIRST_SSE_REG")))
99 ;; Return 1 if VALUE can be stored in a sign extended immediate field.
100 (define_predicate "x86_64_immediate_operand"
101 (match_code "const_int,symbol_ref,label_ref,const")
104 return immediate_operand (op, mode);
106 switch (GET_CODE (op))
109 /* CONST_DOUBLES never match, since HOST_BITS_PER_WIDE_INT is known
110 to be at least 32 and this all acceptable constants are
111 represented as CONST_INT. */
112 if (HOST_BITS_PER_WIDE_INT == 32)
116 HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
117 return trunc_int_for_mode (val, SImode) == val;
122 /* For certain code models, the symbolic references are known to fit.
123 in CM_SMALL_PIC model we know it fits if it is local to the shared
124 library. Don't count TLS SYMBOL_REFs here, since they should fit
125 only if inside of UNSPEC handled below. */
126 /* TLS symbols are not constant. */
127 if (SYMBOL_REF_TLS_MODEL (op))
129 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
130 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
133 /* For certain code models, the code is near as well. */
134 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
135 || ix86_cmodel == CM_KERNEL);
138 /* We also may accept the offsetted memory references in certain
140 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
141 switch (XINT (XEXP (op, 0), 1))
143 case UNSPEC_GOTPCREL:
145 case UNSPEC_GOTNTPOFF:
152 if (GET_CODE (XEXP (op, 0)) == PLUS)
154 rtx op1 = XEXP (XEXP (op, 0), 0);
155 rtx op2 = XEXP (XEXP (op, 0), 1);
156 HOST_WIDE_INT offset;
158 if (ix86_cmodel == CM_LARGE)
160 if (!CONST_INT_P (op2))
162 offset = trunc_int_for_mode (INTVAL (op2), DImode);
163 switch (GET_CODE (op1))
166 /* TLS symbols are not constant. */
167 if (SYMBOL_REF_TLS_MODEL (op1))
169 /* For CM_SMALL assume that latest object is 16MB before
170 end of 31bits boundary. We may also accept pretty
171 large negative constants knowing that all objects are
172 in the positive half of address space. */
173 if ((ix86_cmodel == CM_SMALL
174 || (ix86_cmodel == CM_MEDIUM
175 && !SYMBOL_REF_FAR_ADDR_P (op1)))
176 && offset < 16*1024*1024
177 && trunc_int_for_mode (offset, SImode) == offset)
179 /* For CM_KERNEL we know that all object resist in the
180 negative half of 32bits address space. We may not
181 accept negative offsets, since they may be just off
182 and we may accept pretty large positive ones. */
183 if (ix86_cmodel == CM_KERNEL
185 && trunc_int_for_mode (offset, SImode) == offset)
190 /* These conditions are similar to SYMBOL_REF ones, just the
191 constraints for code models differ. */
192 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
193 && offset < 16*1024*1024
194 && trunc_int_for_mode (offset, SImode) == offset)
196 if (ix86_cmodel == CM_KERNEL
198 && trunc_int_for_mode (offset, SImode) == offset)
203 switch (XINT (op1, 1))
208 && trunc_int_for_mode (offset, SImode) == offset)
226 ;; Return 1 if VALUE can be stored in the zero extended immediate field.
227 (define_predicate "x86_64_zext_immediate_operand"
228 (match_code "const_double,const_int,symbol_ref,label_ref,const")
230 switch (GET_CODE (op))
233 if (HOST_BITS_PER_WIDE_INT == 32)
234 return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
239 if (HOST_BITS_PER_WIDE_INT == 32)
240 return INTVAL (op) >= 0;
242 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
245 /* For certain code models, the symbolic references are known to fit. */
246 /* TLS symbols are not constant. */
247 if (SYMBOL_REF_TLS_MODEL (op))
249 return (ix86_cmodel == CM_SMALL
250 || (ix86_cmodel == CM_MEDIUM
251 && !SYMBOL_REF_FAR_ADDR_P (op)));
254 /* For certain code models, the code is near as well. */
255 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
258 /* We also may accept the offsetted memory references in certain
260 if (GET_CODE (XEXP (op, 0)) == PLUS)
262 rtx op1 = XEXP (XEXP (op, 0), 0);
263 rtx op2 = XEXP (XEXP (op, 0), 1);
265 if (ix86_cmodel == CM_LARGE)
267 switch (GET_CODE (op1))
270 /* TLS symbols are not constant. */
271 if (SYMBOL_REF_TLS_MODEL (op1))
273 /* For small code model we may accept pretty large positive
274 offsets, since one bit is available for free. Negative
275 offsets are limited by the size of NULL pointer area
276 specified by the ABI. */
277 if ((ix86_cmodel == CM_SMALL
278 || (ix86_cmodel == CM_MEDIUM
279 && !SYMBOL_REF_FAR_ADDR_P (op1)))
281 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
282 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
284 /* ??? For the kernel, we may accept adjustment of
285 -0x10000000, since we know that it will just convert
286 negative address space to positive, but perhaps this
287 is not worthwhile. */
291 /* These conditions are similar to SYMBOL_REF ones, just the
292 constraints for code models differ. */
293 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
295 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
296 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
312 ;; Return nonzero if OP is general operand representable on x86_64.
313 (define_predicate "x86_64_general_operand"
314 (if_then_else (match_test "TARGET_64BIT")
315 (ior (match_operand 0 "nonimmediate_operand")
316 (match_operand 0 "x86_64_immediate_operand"))
317 (match_operand 0 "general_operand")))
319 ;; Return nonzero if OP is general operand representable on x86_64
320 ;; as either sign extended or zero extended constant.
321 (define_predicate "x86_64_szext_general_operand"
322 (if_then_else (match_test "TARGET_64BIT")
323 (ior (match_operand 0 "nonimmediate_operand")
324 (ior (match_operand 0 "x86_64_immediate_operand")
325 (match_operand 0 "x86_64_zext_immediate_operand")))
326 (match_operand 0 "general_operand")))
328 ;; Return nonzero if OP is nonmemory operand representable on x86_64.
329 (define_predicate "x86_64_nonmemory_operand"
330 (if_then_else (match_test "TARGET_64BIT")
331 (ior (match_operand 0 "register_operand")
332 (match_operand 0 "x86_64_immediate_operand"))
333 (match_operand 0 "nonmemory_operand")))
335 ;; Return nonzero if OP is nonmemory operand representable on x86_64.
336 (define_predicate "x86_64_szext_nonmemory_operand"
337 (if_then_else (match_test "TARGET_64BIT")
338 (ior (match_operand 0 "register_operand")
339 (ior (match_operand 0 "x86_64_immediate_operand")
340 (match_operand 0 "x86_64_zext_immediate_operand")))
341 (match_operand 0 "nonmemory_operand")))
343 ;; Return true when operand is PIC expression that can be computed by lea
345 (define_predicate "pic_32bit_operand"
346 (match_code "const,symbol_ref,label_ref")
350 /* Rule out relocations that translate into 64bit constants. */
351 if (TARGET_64BIT && GET_CODE (op) == CONST)
354 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
356 if (GET_CODE (op) == UNSPEC
357 && (XINT (op, 1) == UNSPEC_GOTOFF
358 || XINT (op, 1) == UNSPEC_GOT))
361 return symbolic_operand (op, mode);
365 ;; Return nonzero if OP is nonmemory operand acceptable by movabs patterns.
366 (define_predicate "x86_64_movabs_operand"
367 (if_then_else (match_test "!TARGET_64BIT || !flag_pic")
368 (match_operand 0 "nonmemory_operand")
369 (ior (match_operand 0 "register_operand")
370 (and (match_operand 0 "const_double_operand")
371 (match_test "GET_MODE_SIZE (mode) <= 8")))))
373 ;; Returns nonzero if OP is either a symbol reference or a sum of a symbol
374 ;; reference and a constant.
375 (define_predicate "symbolic_operand"
376 (match_code "symbol_ref,label_ref,const")
378 switch (GET_CODE (op))
386 if (GET_CODE (op) == SYMBOL_REF
387 || GET_CODE (op) == LABEL_REF
388 || (GET_CODE (op) == UNSPEC
389 && (XINT (op, 1) == UNSPEC_GOT
390 || XINT (op, 1) == UNSPEC_GOTOFF
391 || XINT (op, 1) == UNSPEC_GOTPCREL)))
393 if (GET_CODE (op) != PLUS
394 || !CONST_INT_P (XEXP (op, 1)))
398 if (GET_CODE (op) == SYMBOL_REF
399 || GET_CODE (op) == LABEL_REF)
401 /* Only @GOTOFF gets offsets. */
402 if (GET_CODE (op) != UNSPEC
403 || XINT (op, 1) != UNSPEC_GOTOFF)
406 op = XVECEXP (op, 0, 0);
407 if (GET_CODE (op) == SYMBOL_REF
408 || GET_CODE (op) == LABEL_REF)
417 ;; Return true if the operand contains a @GOT or @GOTOFF reference.
418 (define_predicate "pic_symbolic_operand"
424 if (GET_CODE (op) == UNSPEC
425 && XINT (op, 1) == UNSPEC_GOTPCREL)
427 if (GET_CODE (op) == PLUS
428 && GET_CODE (XEXP (op, 0)) == UNSPEC
429 && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL)
434 if (GET_CODE (op) == UNSPEC)
436 if (GET_CODE (op) != PLUS
437 || !CONST_INT_P (XEXP (op, 1)))
440 if (GET_CODE (op) == UNSPEC
441 && XINT (op, 1) != UNSPEC_MACHOPIC_OFFSET)
447 ;; Return true if OP is a symbolic operand that resolves locally.
448 (define_predicate "local_symbolic_operand"
449 (match_code "const,label_ref,symbol_ref")
451 if (GET_CODE (op) == CONST
452 && GET_CODE (XEXP (op, 0)) == PLUS
453 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
454 op = XEXP (XEXP (op, 0), 0);
456 if (GET_CODE (op) == LABEL_REF)
459 if (GET_CODE (op) != SYMBOL_REF)
462 if (SYMBOL_REF_TLS_MODEL (op) != 0)
465 if (SYMBOL_REF_LOCAL_P (op))
468 /* There is, however, a not insubstantial body of code in the rest of
469 the compiler that assumes it can just stick the results of
470 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
471 /* ??? This is a hack. Should update the body of the compiler to
472 always create a DECL an invoke targetm.encode_section_info. */
473 if (strncmp (XSTR (op, 0), internal_label_prefix,
474 internal_label_prefix_len) == 0)
480 ;; Test for a legitimate @GOTOFF operand.
482 ;; VxWorks does not impose a fixed gap between segments; the run-time
483 ;; gap can be different from the object-file gap. We therefore can't
484 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
485 ;; same segment as the GOT. Unfortunately, the flexibility of linker
486 ;; scripts means that we can't be sure of that in general, so assume
487 ;; that @GOTOFF is never valid on VxWorks.
488 (define_predicate "gotoff_operand"
489 (and (match_test "!TARGET_VXWORKS_RTP")
490 (match_operand 0 "local_symbolic_operand")))
492 ;; Test for various thread-local symbols.
493 (define_predicate "tls_symbolic_operand"
494 (and (match_code "symbol_ref")
495 (match_test "SYMBOL_REF_TLS_MODEL (op) != 0")))
497 (define_predicate "tls_modbase_operand"
498 (and (match_code "symbol_ref")
499 (match_test "op == ix86_tls_module_base ()")))
501 (define_predicate "tp_or_register_operand"
502 (ior (match_operand 0 "register_operand")
503 (and (match_code "unspec")
504 (match_test "XINT (op, 1) == UNSPEC_TP"))))
506 ;; Test for a pc-relative call operand
507 (define_predicate "constant_call_address_operand"
508 (match_code "symbol_ref")
510 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC)
512 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
517 ;; True for any non-virtual or eliminable register. Used in places where
518 ;; instantiation of such a register may cause the pattern to not be recognized.
519 (define_predicate "register_no_elim_operand"
520 (match_operand 0 "register_operand")
522 if (GET_CODE (op) == SUBREG)
523 op = SUBREG_REG (op);
524 return !(op == arg_pointer_rtx
525 || op == frame_pointer_rtx
526 || IN_RANGE (REGNO (op),
527 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
530 ;; Similarly, but include the stack pointer. This is used to prevent esp
531 ;; from being used as an index reg.
532 (define_predicate "index_register_operand"
533 (match_operand 0 "register_operand")
535 if (GET_CODE (op) == SUBREG)
536 op = SUBREG_REG (op);
537 if (reload_in_progress || reload_completed)
538 return REG_OK_FOR_INDEX_STRICT_P (op);
540 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
543 ;; Return false if this is any eliminable register. Otherwise general_operand.
544 (define_predicate "general_no_elim_operand"
545 (if_then_else (match_code "reg,subreg")
546 (match_operand 0 "register_no_elim_operand")
547 (match_operand 0 "general_operand")))
549 ;; Return false if this is any eliminable register. Otherwise
550 ;; register_operand or a constant.
551 (define_predicate "nonmemory_no_elim_operand"
552 (ior (match_operand 0 "register_no_elim_operand")
553 (match_operand 0 "immediate_operand")))
555 ;; Test for a valid operand for a call instruction.
556 (define_predicate "call_insn_operand"
557 (ior (match_operand 0 "constant_call_address_operand")
558 (ior (match_operand 0 "register_no_elim_operand")
559 (match_operand 0 "memory_operand"))))
561 ;; Similarly, but for tail calls, in which we cannot allow memory references.
562 (define_predicate "sibcall_insn_operand"
563 (ior (match_operand 0 "constant_call_address_operand")
564 (match_operand 0 "register_no_elim_operand")))
566 ;; Match exactly zero.
567 (define_predicate "const0_operand"
568 (match_code "const_int,const_double,const_vector")
570 if (mode == VOIDmode)
571 mode = GET_MODE (op);
572 return op == CONST0_RTX (mode);
575 ;; Match exactly one.
576 (define_predicate "const1_operand"
577 (and (match_code "const_int")
578 (match_test "op == const1_rtx")))
580 ;; Match exactly eight.
581 (define_predicate "const8_operand"
582 (and (match_code "const_int")
583 (match_test "INTVAL (op) == 8")))
585 ;; Match exactly 128.
586 (define_predicate "const128_operand"
587 (and (match_code "const_int")
588 (match_test "INTVAL (op) == 128")))
590 ;; Match exactly -128.
591 (define_predicate "constm128_operand"
592 (and (match_code "const_int")
593 (match_test "INTVAL (op) == -128")))
595 ;; Match 2, 4, or 8. Used for leal multiplicands.
596 (define_predicate "const248_operand"
597 (match_code "const_int")
599 HOST_WIDE_INT i = INTVAL (op);
600 return i == 2 || i == 4 || i == 8;
604 (define_predicate "const_0_to_1_operand"
605 (and (match_code "const_int")
606 (match_test "op == const0_rtx || op == const1_rtx")))
609 (define_predicate "const_0_to_3_operand"
610 (and (match_code "const_int")
611 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
614 (define_predicate "const_0_to_7_operand"
615 (and (match_code "const_int")
616 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
619 (define_predicate "const_0_to_15_operand"
620 (and (match_code "const_int")
621 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
624 (define_predicate "const_0_to_31_operand"
625 (and (match_code "const_int")
626 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
629 (define_predicate "const_0_to_63_operand"
630 (and (match_code "const_int")
631 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
634 (define_predicate "const_0_to_255_operand"
635 (and (match_code "const_int")
636 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
638 ;; Match (0 to 255) * 8
639 (define_predicate "const_0_to_255_mul_8_operand"
640 (match_code "const_int")
642 unsigned HOST_WIDE_INT val = INTVAL (op);
643 return val <= 255*8 && val % 8 == 0;
646 ;; Return nonzero if OP is CONST_INT >= 1 and <= 31 (a valid operand
647 ;; for shift & compare patterns, as shifting by 0 does not change flags).
648 (define_predicate "const_1_to_31_operand"
649 (and (match_code "const_int")
650 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
652 ;; Return nonzero if OP is CONST_INT >= 1 and <= 63 (a valid operand
653 ;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
654 (define_predicate "const_1_to_63_operand"
655 (and (match_code "const_int")
656 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
659 (define_predicate "const_2_to_3_operand"
660 (and (match_code "const_int")
661 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
664 (define_predicate "const_4_to_5_operand"
665 (and (match_code "const_int")
666 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
669 (define_predicate "const_4_to_7_operand"
670 (and (match_code "const_int")
671 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
674 (define_predicate "const_6_to_7_operand"
675 (and (match_code "const_int")
676 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
679 (define_predicate "const_8_to_11_operand"
680 (and (match_code "const_int")
681 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
684 (define_predicate "const_12_to_15_operand"
685 (and (match_code "const_int")
686 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
688 ;; Match exactly one bit in 2-bit mask.
689 (define_predicate "const_pow2_1_to_2_operand"
690 (and (match_code "const_int")
691 (match_test "INTVAL (op) == 1 || INTVAL (op) == 2")))
693 ;; Match exactly one bit in 4-bit mask.
694 (define_predicate "const_pow2_1_to_8_operand"
695 (match_code "const_int")
697 unsigned int log = exact_log2 (INTVAL (op));
701 ;; Match exactly one bit in 8-bit mask.
702 (define_predicate "const_pow2_1_to_128_operand"
703 (match_code "const_int")
705 unsigned int log = exact_log2 (INTVAL (op));
709 ;; Match exactly one bit in 16-bit mask.
710 (define_predicate "const_pow2_1_to_32768_operand"
711 (match_code "const_int")
713 unsigned int log = exact_log2 (INTVAL (op));
717 ;; True if this is a constant appropriate for an increment or decrement.
718 (define_predicate "incdec_operand"
719 (match_code "const_int")
721 /* On Pentium4, the inc and dec operations causes extra dependency on flag
722 registers, since carry flag is not set. */
723 if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
725 return op == const1_rtx || op == constm1_rtx;
728 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
729 (define_predicate "reg_or_pm1_operand"
730 (ior (match_operand 0 "register_operand")
731 (and (match_code "const_int")
732 (match_test "op == const1_rtx || op == constm1_rtx"))))
734 ;; True if OP is acceptable as operand of DImode shift expander.
735 (define_predicate "shiftdi_operand"
736 (if_then_else (match_test "TARGET_64BIT")
737 (match_operand 0 "nonimmediate_operand")
738 (match_operand 0 "register_operand")))
740 (define_predicate "ashldi_input_operand"
741 (if_then_else (match_test "TARGET_64BIT")
742 (match_operand 0 "nonimmediate_operand")
743 (match_operand 0 "reg_or_pm1_operand")))
745 ;; Return true if OP is a vector load from the constant pool with just
746 ;; the first element nonzero.
747 (define_predicate "zero_extended_scalar_load_operand"
751 op = maybe_get_pool_constant (op);
754 if (GET_CODE (op) != CONST_VECTOR)
757 (GET_MODE_SIZE (GET_MODE (op)) /
758 GET_MODE_SIZE (GET_MODE_INNER (GET_MODE (op))));
759 for (n_elts--; n_elts > 0; n_elts--)
761 rtx elt = CONST_VECTOR_ELT (op, n_elts);
762 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
768 /* Return true if operand is a vector constant that is all ones. */
769 (define_predicate "vector_all_ones_operand"
770 (match_code "const_vector")
772 int nunits = GET_MODE_NUNITS (mode);
774 if (GET_CODE (op) == CONST_VECTOR
775 && CONST_VECTOR_NUNITS (op) == nunits)
778 for (i = 0; i < nunits; ++i)
780 rtx x = CONST_VECTOR_ELT (op, i);
781 if (x != constm1_rtx)
790 ; Return 1 when OP is operand acceptable for standard SSE move.
791 (define_predicate "vector_move_operand"
792 (ior (match_operand 0 "nonimmediate_operand")
793 (match_operand 0 "const0_operand")))
795 ;; Return 1 when OP is nonimmediate or standard SSE constant.
796 (define_predicate "nonimmediate_or_sse_const_operand"
797 (match_operand 0 "general_operand")
799 if (nonimmediate_operand (op, mode))
801 if (standard_sse_constant_p (op) > 0)
806 ;; Return true if OP is a register or a zero.
807 (define_predicate "reg_or_0_operand"
808 (ior (match_operand 0 "register_operand")
809 (match_operand 0 "const0_operand")))
811 ;; Return true if op if a valid address, and does not contain
812 ;; a segment override.
813 (define_special_predicate "no_seg_address_operand"
814 (match_operand 0 "address_operand")
816 struct ix86_address parts;
819 ok = ix86_decompose_address (op, &parts);
821 return parts.seg == SEG_DEFAULT;
824 ;; Return nonzero if the rtx is known to be at least 32 bits aligned.
825 (define_predicate "aligned_operand"
826 (match_operand 0 "general_operand")
828 struct ix86_address parts;
831 /* Registers and immediate operands are always "aligned". */
832 if (GET_CODE (op) != MEM)
835 /* All patterns using aligned_operand on memory operands ends up
836 in promoting memory operand to 64bit and thus causing memory mismatch. */
837 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
840 /* Don't even try to do any aligned optimizations with volatiles. */
841 if (MEM_VOLATILE_P (op))
844 if (MEM_ALIGN (op) >= 32)
849 /* Pushes and pops are only valid on the stack pointer. */
850 if (GET_CODE (op) == PRE_DEC
851 || GET_CODE (op) == POST_INC)
854 /* Decode the address. */
855 ok = ix86_decompose_address (op, &parts);
858 /* Look for some component that isn't known to be aligned. */
861 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
866 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
871 if (!CONST_INT_P (parts.disp)
872 || (INTVAL (parts.disp) & 3) != 0)
876 /* Didn't find one -- this must be an aligned address. */
880 ;; Returns 1 if OP is memory operand with a displacement.
881 (define_predicate "memory_displacement_operand"
882 (match_operand 0 "memory_operand")
884 struct ix86_address parts;
887 ok = ix86_decompose_address (XEXP (op, 0), &parts);
889 return parts.disp != NULL_RTX;
892 ;; Returns 1 if OP is memory operand with a displacement only.
893 (define_predicate "memory_displacement_only_operand"
894 (match_operand 0 "memory_operand")
896 struct ix86_address parts;
902 ok = ix86_decompose_address (XEXP (op, 0), &parts);
905 if (parts.base || parts.index)
908 return parts.disp != NULL_RTX;
911 ;; Returns 1 if OP is memory operand which will need zero or
912 ;; one register at most, not counting stack pointer or frame pointer.
913 (define_predicate "cmpxchg8b_pic_memory_operand"
914 (match_operand 0 "memory_operand")
916 struct ix86_address parts;
919 ok = ix86_decompose_address (XEXP (op, 0), &parts);
921 if (parts.base == NULL_RTX
922 || parts.base == arg_pointer_rtx
923 || parts.base == frame_pointer_rtx
924 || parts.base == hard_frame_pointer_rtx
925 || parts.base == stack_pointer_rtx)
928 if (parts.index == NULL_RTX
929 || parts.index == arg_pointer_rtx
930 || parts.index == frame_pointer_rtx
931 || parts.index == hard_frame_pointer_rtx
932 || parts.index == stack_pointer_rtx)
939 ;; Returns 1 if OP is memory operand that cannot be represented
940 ;; by the modRM array.
941 (define_predicate "long_memory_operand"
942 (and (match_operand 0 "memory_operand")
943 (match_test "memory_address_length (op) != 0")))
945 ;; Return 1 if OP is a comparison operator that can be issued by fcmov.
946 (define_predicate "fcmov_comparison_operator"
947 (match_operand 0 "comparison_operator")
949 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
950 enum rtx_code code = GET_CODE (op);
952 if (inmode == CCFPmode || inmode == CCFPUmode)
954 enum rtx_code second_code, bypass_code;
955 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
956 if (bypass_code != UNKNOWN || second_code != UNKNOWN)
958 code = ix86_fp_compare_code_to_integer (code);
960 /* i387 supports just limited amount of conditional codes. */
963 case LTU: case GTU: case LEU: case GEU:
964 if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode
965 || inmode == CCCmode)
968 case ORDERED: case UNORDERED:
976 ;; Return 1 if OP is a comparison that can be used in the CMPSS/CMPPS insns.
977 ;; The first set are supported directly; the second set can't be done with
978 ;; full IEEE support, i.e. NaNs.
980 ;; ??? It would seem that we have a lot of uses of this predicate that pass
981 ;; it the wrong mode. We got away with this because the old function didn't
982 ;; check the mode at all. Mirror that for now by calling this a special
985 (define_special_predicate "sse_comparison_operator"
986 (match_code "eq,lt,le,unordered,ne,unge,ungt,ordered"))
988 ;; Return 1 if OP is a comparison operator that can be issued by
989 ;; avx predicate generation instructions
990 (define_predicate "avx_comparison_float_operator"
991 (match_code "ne,eq,ge,gt,le,lt,unordered,ordered,uneq,unge,ungt,unle,unlt,ltgt"))
993 ;; Return 1 if OP is a comparison operator that can be issued by sse predicate
994 ;; generation instructions
995 (define_predicate "sse5_comparison_float_operator"
996 (and (match_test "TARGET_SSE5")
997 (match_code "ne,eq,ge,gt,le,lt,unordered,ordered,uneq,unge,ungt,unle,unlt,ltgt")))
999 (define_predicate "ix86_comparison_int_operator"
1000 (match_code "ne,eq,ge,gt,le,lt"))
1002 (define_predicate "ix86_comparison_uns_operator"
1003 (match_code "ne,eq,geu,gtu,leu,ltu"))
1005 (define_predicate "bt_comparison_operator"
1006 (match_code "ne,eq"))
1008 ;; Return 1 if OP is a valid comparison operator in valid mode.
1009 (define_predicate "ix86_comparison_operator"
1010 (match_operand 0 "comparison_operator")
1012 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1013 enum rtx_code code = GET_CODE (op);
1015 if (inmode == CCFPmode || inmode == CCFPUmode)
1017 enum rtx_code second_code, bypass_code;
1018 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
1019 return (bypass_code == UNKNOWN && second_code == UNKNOWN);
1026 if (inmode == CCmode || inmode == CCGCmode
1027 || inmode == CCGOCmode || inmode == CCNOmode)
1030 case LTU: case GTU: case LEU: case GEU:
1031 if (inmode == CCmode || inmode == CCCmode)
1034 case ORDERED: case UNORDERED:
1035 if (inmode == CCmode)
1039 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1047 ;; Return 1 if OP is a valid comparison operator testing carry flag to be set.
1048 (define_predicate "ix86_carry_flag_operator"
1049 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1051 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1052 enum rtx_code code = GET_CODE (op);
1054 if (!REG_P (XEXP (op, 0))
1055 || REGNO (XEXP (op, 0)) != FLAGS_REG
1056 || XEXP (op, 1) != const0_rtx)
1059 if (inmode == CCFPmode || inmode == CCFPUmode)
1061 enum rtx_code second_code, bypass_code;
1062 ix86_fp_comparison_codes (code, &bypass_code, &code, &second_code);
1063 if (bypass_code != UNKNOWN || second_code != UNKNOWN)
1065 code = ix86_fp_compare_code_to_integer (code);
1067 else if (inmode == CCCmode)
1068 return code == LTU || code == GTU;
1069 else if (inmode != CCmode)
1075 ;; Nearly general operand, but accept any const_double, since we wish
1076 ;; to be able to drop them into memory rather than have them get pulled
1078 (define_predicate "cmp_fp_expander_operand"
1079 (ior (match_code "const_double")
1080 (match_operand 0 "general_operand")))
1082 ;; Return true if this is a valid binary floating-point operation.
1083 (define_predicate "binary_fp_operator"
1084 (match_code "plus,minus,mult,div"))
1086 ;; Return true if this is a multiply operation.
1087 (define_predicate "mult_operator"
1088 (match_code "mult"))
1090 ;; Return true if this is a division operation.
1091 (define_predicate "div_operator"
1094 ;; Return true if this is a float extend operation.
1095 (define_predicate "float_operator"
1096 (match_code "float"))
1098 ;; Return true for ARITHMETIC_P.
1099 (define_predicate "arith_or_logical_operator"
1100 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1101 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1103 ;; Return true for COMMUTATIVE_P.
1104 (define_predicate "commutative_operator"
1105 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1107 ;; Return 1 if OP is a binary operator that can be promoted to wider mode.
1108 (define_predicate "promotable_binary_operator"
1109 (ior (match_code "plus,and,ior,xor,ashift")
1110 (and (match_code "mult")
1111 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1113 ;; To avoid problems when jump re-emits comparisons like testqi_ext_ccno_0,
1114 ;; re-recognize the operand to avoid a copy_to_mode_reg that will fail.
1116 ;; ??? It seems likely that this will only work because cmpsi is an
1117 ;; expander, and no actual insns use this.
1119 (define_predicate "cmpsi_operand"
1120 (ior (match_operand 0 "nonimmediate_operand")
1121 (and (match_code "and")
1122 (match_code "zero_extract" "0")
1123 (match_code "const_int" "1")
1124 (match_code "const_int" "01")
1125 (match_code "const_int" "02")
1126 (match_test "INTVAL (XEXP (XEXP (op, 0), 1)) == 8")
1127 (match_test "INTVAL (XEXP (XEXP (op, 0), 2)) == 8")
1130 (define_predicate "compare_operator"
1131 (match_code "compare"))
1133 (define_predicate "absneg_operator"
1134 (match_code "abs,neg"))
1136 ;; Return 1 if OP is misaligned memory operand
1137 (define_predicate "misaligned_operand"
1138 (and (match_code "mem")
1139 (match_test "MEM_ALIGN (op) < GET_MODE_ALIGNMENT (mode)")))
1141 ;; Return 1 if OP is a vzeroall operation, known to be a PARALLEL.
1142 (define_predicate "vzeroall_operation"
1143 (match_code "parallel")
1145 int nregs = TARGET_64BIT ? 16 : 8;
1147 if (XVECLEN (op, 0) != nregs + 1)