1 ;; Predicate definitions for IA-32 and x86-64.
2 ;; Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 ;; Free Software Foundation, Inc.
5 ;; This file is part of GCC.
7 ;; GCC is free software; you can redistribute it and/or modify
8 ;; it under the terms of the GNU General Public License as published by
9 ;; the Free Software Foundation; either version 3, or (at your option)
12 ;; GCC is distributed in the hope that it will be useful,
13 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
14 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 ;; GNU General Public License for more details.
17 ;; You should have received a copy of the GNU General Public License
18 ;; along with GCC; see the file COPYING3. If not see
19 ;; <http://www.gnu.org/licenses/>.
21 ;; Return nonzero if OP is either a i387 or SSE fp register.
22 (define_predicate "any_fp_register_operand"
23 (and (match_code "reg")
24 (match_test "ANY_FP_REGNO_P (REGNO (op))")))
26 ;; Return nonzero if OP is an i387 fp register.
27 (define_predicate "fp_register_operand"
28 (and (match_code "reg")
29 (match_test "FP_REGNO_P (REGNO (op))")))
31 ;; Return nonzero if OP is a non-fp register_operand.
32 (define_predicate "register_and_not_any_fp_reg_operand"
33 (and (match_code "reg")
34 (not (match_test "ANY_FP_REGNO_P (REGNO (op))"))))
36 ;; Return nonzero if OP is a register operand other than an i387 fp register.
37 (define_predicate "register_and_not_fp_reg_operand"
38 (and (match_code "reg")
39 (not (match_test "FP_REGNO_P (REGNO (op))"))))
41 ;; True if the operand is an MMX register.
42 (define_predicate "mmx_reg_operand"
43 (and (match_code "reg")
44 (match_test "MMX_REGNO_P (REGNO (op))")))
46 ;; True if the operand is a Q_REGS class register.
47 (define_predicate "q_regs_operand"
48 (match_operand 0 "register_operand")
50 if (GET_CODE (op) == SUBREG)
52 return ANY_QI_REG_P (op);
55 ;; Match an SI or HImode register for a zero_extract.
56 (define_special_predicate "ext_register_operand"
57 (match_operand 0 "register_operand")
59 if ((!TARGET_64BIT || GET_MODE (op) != DImode)
60 && GET_MODE (op) != SImode && GET_MODE (op) != HImode)
62 if (GET_CODE (op) == SUBREG)
65 /* Be careful to accept only registers having upper parts. */
66 return REGNO (op) > LAST_VIRTUAL_REGISTER || REGNO (op) < 4;
69 ;; Return true if op is the AX register.
70 (define_predicate "ax_reg_operand"
71 (and (match_code "reg")
72 (match_test "REGNO (op) == 0")))
74 ;; Return true if op is the flags register.
75 (define_predicate "flags_reg_operand"
76 (and (match_code "reg")
77 (match_test "REGNO (op) == FLAGS_REG")))
79 ;; Return true if op is a QImode register operand other than
81 (define_predicate "ext_QIreg_operand"
82 (and (match_code "reg")
83 (match_test "TARGET_64BIT
84 && GET_MODE (op) == QImode
85 && REGNO (op) > BX_REG")))
87 ;; Similarly, but don't check mode of the operand.
88 (define_predicate "ext_QIreg_nomode_operand"
89 (and (match_code "reg")
90 (match_test "TARGET_64BIT
91 && REGNO (op) > BX_REG")))
93 ;; Return true if op is not xmm0 register.
94 (define_predicate "reg_not_xmm0_operand"
95 (and (match_operand 0 "register_operand")
96 (match_test "!REG_P (op)
97 || REGNO (op) != FIRST_SSE_REG")))
99 ;; As above, but allow nonimmediate operands.
100 (define_predicate "nonimm_not_xmm0_operand"
101 (and (match_operand 0 "nonimmediate_operand")
102 (match_test "!REG_P (op)
103 || REGNO (op) != FIRST_SSE_REG")))
105 ;; Return 1 if VALUE can be stored in a sign extended immediate field.
106 (define_predicate "x86_64_immediate_operand"
107 (match_code "const_int,symbol_ref,label_ref,const")
110 return immediate_operand (op, mode);
112 switch (GET_CODE (op))
115 /* CONST_DOUBLES never match, since HOST_BITS_PER_WIDE_INT is known
116 to be at least 32 and this all acceptable constants are
117 represented as CONST_INT. */
118 if (HOST_BITS_PER_WIDE_INT == 32)
122 HOST_WIDE_INT val = trunc_int_for_mode (INTVAL (op), DImode);
123 return trunc_int_for_mode (val, SImode) == val;
128 /* For certain code models, the symbolic references are known to fit.
129 in CM_SMALL_PIC model we know it fits if it is local to the shared
130 library. Don't count TLS SYMBOL_REFs here, since they should fit
131 only if inside of UNSPEC handled below. */
132 /* TLS symbols are not constant. */
133 if (SYMBOL_REF_TLS_MODEL (op))
135 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_KERNEL
136 || (ix86_cmodel == CM_MEDIUM && !SYMBOL_REF_FAR_ADDR_P (op)));
139 /* For certain code models, the code is near as well. */
140 return (ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM
141 || ix86_cmodel == CM_KERNEL);
144 /* We also may accept the offsetted memory references in certain
146 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
147 switch (XINT (XEXP (op, 0), 1))
149 case UNSPEC_GOTPCREL:
151 case UNSPEC_GOTNTPOFF:
158 if (GET_CODE (XEXP (op, 0)) == PLUS)
160 rtx op1 = XEXP (XEXP (op, 0), 0);
161 rtx op2 = XEXP (XEXP (op, 0), 1);
162 HOST_WIDE_INT offset;
164 if (ix86_cmodel == CM_LARGE)
166 if (!CONST_INT_P (op2))
168 offset = trunc_int_for_mode (INTVAL (op2), DImode);
169 switch (GET_CODE (op1))
172 /* TLS symbols are not constant. */
173 if (SYMBOL_REF_TLS_MODEL (op1))
175 /* For CM_SMALL assume that latest object is 16MB before
176 end of 31bits boundary. We may also accept pretty
177 large negative constants knowing that all objects are
178 in the positive half of address space. */
179 if ((ix86_cmodel == CM_SMALL
180 || (ix86_cmodel == CM_MEDIUM
181 && !SYMBOL_REF_FAR_ADDR_P (op1)))
182 && offset < 16*1024*1024
183 && trunc_int_for_mode (offset, SImode) == offset)
185 /* For CM_KERNEL we know that all object resist in the
186 negative half of 32bits address space. We may not
187 accept negative offsets, since they may be just off
188 and we may accept pretty large positive ones. */
189 if (ix86_cmodel == CM_KERNEL
191 && trunc_int_for_mode (offset, SImode) == offset)
196 /* These conditions are similar to SYMBOL_REF ones, just the
197 constraints for code models differ. */
198 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
199 && offset < 16*1024*1024
200 && trunc_int_for_mode (offset, SImode) == offset)
202 if (ix86_cmodel == CM_KERNEL
204 && trunc_int_for_mode (offset, SImode) == offset)
209 switch (XINT (op1, 1))
214 && trunc_int_for_mode (offset, SImode) == offset)
232 ;; Return 1 if VALUE can be stored in the zero extended immediate field.
233 (define_predicate "x86_64_zext_immediate_operand"
234 (match_code "const_double,const_int,symbol_ref,label_ref,const")
236 switch (GET_CODE (op))
239 if (HOST_BITS_PER_WIDE_INT == 32)
240 return (GET_MODE (op) == VOIDmode && !CONST_DOUBLE_HIGH (op));
245 if (HOST_BITS_PER_WIDE_INT == 32)
246 return INTVAL (op) >= 0;
248 return !(INTVAL (op) & ~(HOST_WIDE_INT) 0xffffffff);
251 /* For certain code models, the symbolic references are known to fit. */
252 /* TLS symbols are not constant. */
253 if (SYMBOL_REF_TLS_MODEL (op))
255 return (ix86_cmodel == CM_SMALL
256 || (ix86_cmodel == CM_MEDIUM
257 && !SYMBOL_REF_FAR_ADDR_P (op)));
260 /* For certain code models, the code is near as well. */
261 return ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM;
264 /* We also may accept the offsetted memory references in certain
266 if (GET_CODE (XEXP (op, 0)) == PLUS)
268 rtx op1 = XEXP (XEXP (op, 0), 0);
269 rtx op2 = XEXP (XEXP (op, 0), 1);
271 if (ix86_cmodel == CM_LARGE)
273 switch (GET_CODE (op1))
276 /* TLS symbols are not constant. */
277 if (SYMBOL_REF_TLS_MODEL (op1))
279 /* For small code model we may accept pretty large positive
280 offsets, since one bit is available for free. Negative
281 offsets are limited by the size of NULL pointer area
282 specified by the ABI. */
283 if ((ix86_cmodel == CM_SMALL
284 || (ix86_cmodel == CM_MEDIUM
285 && !SYMBOL_REF_FAR_ADDR_P (op1)))
287 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
288 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
290 /* ??? For the kernel, we may accept adjustment of
291 -0x10000000, since we know that it will just convert
292 negative address space to positive, but perhaps this
293 is not worthwhile. */
297 /* These conditions are similar to SYMBOL_REF ones, just the
298 constraints for code models differ. */
299 if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
301 && trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
302 && trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
318 ;; Return nonzero if OP is general operand representable on x86_64.
319 (define_predicate "x86_64_general_operand"
320 (if_then_else (match_test "TARGET_64BIT")
321 (ior (match_operand 0 "nonimmediate_operand")
322 (match_operand 0 "x86_64_immediate_operand"))
323 (match_operand 0 "general_operand")))
325 ;; Return nonzero if OP is general operand representable on x86_64
326 ;; as either sign extended or zero extended constant.
327 (define_predicate "x86_64_szext_general_operand"
328 (if_then_else (match_test "TARGET_64BIT")
329 (ior (match_operand 0 "nonimmediate_operand")
330 (ior (match_operand 0 "x86_64_immediate_operand")
331 (match_operand 0 "x86_64_zext_immediate_operand")))
332 (match_operand 0 "general_operand")))
334 ;; Return nonzero if OP is nonmemory operand representable on x86_64.
335 (define_predicate "x86_64_nonmemory_operand"
336 (if_then_else (match_test "TARGET_64BIT")
337 (ior (match_operand 0 "register_operand")
338 (match_operand 0 "x86_64_immediate_operand"))
339 (match_operand 0 "nonmemory_operand")))
341 ;; Return nonzero if OP is nonmemory operand representable on x86_64.
342 (define_predicate "x86_64_szext_nonmemory_operand"
343 (if_then_else (match_test "TARGET_64BIT")
344 (ior (match_operand 0 "register_operand")
345 (ior (match_operand 0 "x86_64_immediate_operand")
346 (match_operand 0 "x86_64_zext_immediate_operand")))
347 (match_operand 0 "nonmemory_operand")))
349 ;; Return true when operand is PIC expression that can be computed by lea
351 (define_predicate "pic_32bit_operand"
352 (match_code "const,symbol_ref,label_ref")
356 /* Rule out relocations that translate into 64bit constants. */
357 if (TARGET_64BIT && GET_CODE (op) == CONST)
360 if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
362 if (GET_CODE (op) == UNSPEC
363 && (XINT (op, 1) == UNSPEC_GOTOFF
364 || XINT (op, 1) == UNSPEC_GOT))
367 return symbolic_operand (op, mode);
371 ;; Return nonzero if OP is nonmemory operand acceptable by movabs patterns.
372 (define_predicate "x86_64_movabs_operand"
373 (if_then_else (match_test "!TARGET_64BIT || !flag_pic")
374 (match_operand 0 "nonmemory_operand")
375 (ior (match_operand 0 "register_operand")
376 (and (match_operand 0 "const_double_operand")
377 (match_test "GET_MODE_SIZE (mode) <= 8")))))
379 ;; Returns nonzero if OP is either a symbol reference or a sum of a symbol
380 ;; reference and a constant.
381 (define_predicate "symbolic_operand"
382 (match_code "symbol_ref,label_ref,const")
384 switch (GET_CODE (op))
392 if (GET_CODE (op) == SYMBOL_REF
393 || GET_CODE (op) == LABEL_REF
394 || (GET_CODE (op) == UNSPEC
395 && (XINT (op, 1) == UNSPEC_GOT
396 || XINT (op, 1) == UNSPEC_GOTOFF
397 || XINT (op, 1) == UNSPEC_GOTPCREL)))
399 if (GET_CODE (op) != PLUS
400 || !CONST_INT_P (XEXP (op, 1)))
404 if (GET_CODE (op) == SYMBOL_REF
405 || GET_CODE (op) == LABEL_REF)
407 /* Only @GOTOFF gets offsets. */
408 if (GET_CODE (op) != UNSPEC
409 || XINT (op, 1) != UNSPEC_GOTOFF)
412 op = XVECEXP (op, 0, 0);
413 if (GET_CODE (op) == SYMBOL_REF
414 || GET_CODE (op) == LABEL_REF)
423 ;; Return true if the operand contains a @GOT or @GOTOFF reference.
424 (define_predicate "pic_symbolic_operand"
430 if (GET_CODE (op) == UNSPEC
431 && XINT (op, 1) == UNSPEC_GOTPCREL)
433 if (GET_CODE (op) == PLUS
434 && GET_CODE (XEXP (op, 0)) == UNSPEC
435 && XINT (XEXP (op, 0), 1) == UNSPEC_GOTPCREL)
440 if (GET_CODE (op) == UNSPEC)
442 if (GET_CODE (op) != PLUS
443 || !CONST_INT_P (XEXP (op, 1)))
446 if (GET_CODE (op) == UNSPEC
447 && XINT (op, 1) != UNSPEC_MACHOPIC_OFFSET)
453 ;; Return true if OP is a symbolic operand that resolves locally.
454 (define_predicate "local_symbolic_operand"
455 (match_code "const,label_ref,symbol_ref")
457 if (GET_CODE (op) == CONST
458 && GET_CODE (XEXP (op, 0)) == PLUS
459 && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
460 op = XEXP (XEXP (op, 0), 0);
462 if (GET_CODE (op) == LABEL_REF)
465 if (GET_CODE (op) != SYMBOL_REF)
468 if (SYMBOL_REF_TLS_MODEL (op) != 0)
471 if (SYMBOL_REF_LOCAL_P (op))
474 /* There is, however, a not insubstantial body of code in the rest of
475 the compiler that assumes it can just stick the results of
476 ASM_GENERATE_INTERNAL_LABEL in a symbol_ref and have done. */
477 /* ??? This is a hack. Should update the body of the compiler to
478 always create a DECL an invoke targetm.encode_section_info. */
479 if (strncmp (XSTR (op, 0), internal_label_prefix,
480 internal_label_prefix_len) == 0)
486 ;; Test for a legitimate @GOTOFF operand.
488 ;; VxWorks does not impose a fixed gap between segments; the run-time
489 ;; gap can be different from the object-file gap. We therefore can't
490 ;; use @GOTOFF unless we are absolutely sure that the symbol is in the
491 ;; same segment as the GOT. Unfortunately, the flexibility of linker
492 ;; scripts means that we can't be sure of that in general, so assume
493 ;; that @GOTOFF is never valid on VxWorks.
494 (define_predicate "gotoff_operand"
495 (and (match_test "!TARGET_VXWORKS_RTP")
496 (match_operand 0 "local_symbolic_operand")))
498 ;; Test for various thread-local symbols.
499 (define_predicate "tls_symbolic_operand"
500 (and (match_code "symbol_ref")
501 (match_test "SYMBOL_REF_TLS_MODEL (op) != 0")))
503 (define_predicate "tls_modbase_operand"
504 (and (match_code "symbol_ref")
505 (match_test "op == ix86_tls_module_base ()")))
507 (define_predicate "tp_or_register_operand"
508 (ior (match_operand 0 "register_operand")
509 (and (match_code "unspec")
510 (match_test "XINT (op, 1) == UNSPEC_TP"))))
512 ;; Test for a pc-relative call operand
513 (define_predicate "constant_call_address_operand"
514 (match_code "symbol_ref")
516 if (ix86_cmodel == CM_LARGE || ix86_cmodel == CM_LARGE_PIC)
518 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES && SYMBOL_REF_DLLIMPORT_P (op))
523 ;; True for any non-virtual or eliminable register. Used in places where
524 ;; instantiation of such a register may cause the pattern to not be recognized.
525 (define_predicate "register_no_elim_operand"
526 (match_operand 0 "register_operand")
528 if (GET_CODE (op) == SUBREG)
529 op = SUBREG_REG (op);
530 return !(op == arg_pointer_rtx
531 || op == frame_pointer_rtx
532 || IN_RANGE (REGNO (op),
533 FIRST_PSEUDO_REGISTER, LAST_VIRTUAL_REGISTER));
536 ;; Similarly, but include the stack pointer. This is used to prevent esp
537 ;; from being used as an index reg.
538 (define_predicate "index_register_operand"
539 (match_operand 0 "register_operand")
541 if (GET_CODE (op) == SUBREG)
542 op = SUBREG_REG (op);
543 if (reload_in_progress || reload_completed)
544 return REG_OK_FOR_INDEX_STRICT_P (op);
546 return REG_OK_FOR_INDEX_NONSTRICT_P (op);
549 ;; Return false if this is any eliminable register. Otherwise general_operand.
550 (define_predicate "general_no_elim_operand"
551 (if_then_else (match_code "reg,subreg")
552 (match_operand 0 "register_no_elim_operand")
553 (match_operand 0 "general_operand")))
555 ;; Return false if this is any eliminable register. Otherwise
556 ;; register_operand or a constant.
557 (define_predicate "nonmemory_no_elim_operand"
558 (ior (match_operand 0 "register_no_elim_operand")
559 (match_operand 0 "immediate_operand")))
561 ;; Test for a valid operand for a call instruction.
562 (define_predicate "call_insn_operand"
563 (ior (match_operand 0 "constant_call_address_operand")
564 (ior (and (match_operand 0 "register_no_elim_operand")
565 (ior (match_test "TARGET_CALL_ESP")
566 (match_operand 0 "index_register_operand")))
567 (match_operand 0 "memory_operand"))))
569 ;; Similarly, but for tail calls, in which we cannot allow memory references.
570 (define_predicate "sibcall_insn_operand"
571 (ior (match_operand 0 "constant_call_address_operand")
572 (match_operand 0 "register_no_elim_operand")))
574 ;; Match exactly zero.
575 (define_predicate "const0_operand"
576 (match_code "const_int,const_double,const_vector")
578 if (mode == VOIDmode)
579 mode = GET_MODE (op);
580 return op == CONST0_RTX (mode);
583 ;; Match exactly one.
584 (define_predicate "const1_operand"
585 (and (match_code "const_int")
586 (match_test "op == const1_rtx")))
588 ;; Match exactly eight.
589 (define_predicate "const8_operand"
590 (and (match_code "const_int")
591 (match_test "INTVAL (op) == 8")))
593 ;; Match exactly 128.
594 (define_predicate "const128_operand"
595 (and (match_code "const_int")
596 (match_test "INTVAL (op) == 128")))
598 ;; Match 2, 4, or 8. Used for leal multiplicands.
599 (define_predicate "const248_operand"
600 (match_code "const_int")
602 HOST_WIDE_INT i = INTVAL (op);
603 return i == 2 || i == 4 || i == 8;
607 (define_predicate "const_0_to_1_operand"
608 (and (match_code "const_int")
609 (match_test "op == const0_rtx || op == const1_rtx")))
612 (define_predicate "const_0_to_3_operand"
613 (and (match_code "const_int")
614 (match_test "IN_RANGE (INTVAL (op), 0, 3)")))
617 (define_predicate "const_0_to_7_operand"
618 (and (match_code "const_int")
619 (match_test "IN_RANGE (INTVAL (op), 0, 7)")))
622 (define_predicate "const_0_to_15_operand"
623 (and (match_code "const_int")
624 (match_test "IN_RANGE (INTVAL (op), 0, 15)")))
627 (define_predicate "const_0_to_31_operand"
628 (and (match_code "const_int")
629 (match_test "IN_RANGE (INTVAL (op), 0, 31)")))
632 (define_predicate "const_0_to_63_operand"
633 (and (match_code "const_int")
634 (match_test "IN_RANGE (INTVAL (op), 0, 63)")))
637 (define_predicate "const_0_to_255_operand"
638 (and (match_code "const_int")
639 (match_test "IN_RANGE (INTVAL (op), 0, 255)")))
641 ;; Match (0 to 255) * 8
642 (define_predicate "const_0_to_255_mul_8_operand"
643 (match_code "const_int")
645 unsigned HOST_WIDE_INT val = INTVAL (op);
646 return val <= 255*8 && val % 8 == 0;
649 ;; Return nonzero if OP is CONST_INT >= 1 and <= 31 (a valid operand
650 ;; for shift & compare patterns, as shifting by 0 does not change flags).
651 (define_predicate "const_1_to_31_operand"
652 (and (match_code "const_int")
653 (match_test "IN_RANGE (INTVAL (op), 1, 31)")))
655 ;; Return nonzero if OP is CONST_INT >= 1 and <= 63 (a valid operand
656 ;; for 64bit shift & compare patterns, as shifting by 0 does not change flags).
657 (define_predicate "const_1_to_63_operand"
658 (and (match_code "const_int")
659 (match_test "IN_RANGE (INTVAL (op), 1, 63)")))
662 (define_predicate "const_2_to_3_operand"
663 (and (match_code "const_int")
664 (match_test "IN_RANGE (INTVAL (op), 2, 3)")))
667 (define_predicate "const_4_to_5_operand"
668 (and (match_code "const_int")
669 (match_test "IN_RANGE (INTVAL (op), 4, 5)")))
672 (define_predicate "const_4_to_7_operand"
673 (and (match_code "const_int")
674 (match_test "IN_RANGE (INTVAL (op), 4, 7)")))
677 (define_predicate "const_6_to_7_operand"
678 (and (match_code "const_int")
679 (match_test "IN_RANGE (INTVAL (op), 6, 7)")))
682 (define_predicate "const_8_to_11_operand"
683 (and (match_code "const_int")
684 (match_test "IN_RANGE (INTVAL (op), 8, 11)")))
687 (define_predicate "const_12_to_15_operand"
688 (and (match_code "const_int")
689 (match_test "IN_RANGE (INTVAL (op), 12, 15)")))
691 ;; Match exactly one bit in 2-bit mask.
692 (define_predicate "const_pow2_1_to_2_operand"
693 (and (match_code "const_int")
694 (match_test "INTVAL (op) == 1 || INTVAL (op) == 2")))
696 ;; Match exactly one bit in 4-bit mask.
697 (define_predicate "const_pow2_1_to_8_operand"
698 (match_code "const_int")
700 unsigned int log = exact_log2 (INTVAL (op));
704 ;; Match exactly one bit in 8-bit mask.
705 (define_predicate "const_pow2_1_to_128_operand"
706 (match_code "const_int")
708 unsigned int log = exact_log2 (INTVAL (op));
712 ;; Match exactly one bit in 16-bit mask.
713 (define_predicate "const_pow2_1_to_32768_operand"
714 (match_code "const_int")
716 unsigned int log = exact_log2 (INTVAL (op));
720 ;; True if this is a constant appropriate for an increment or decrement.
721 (define_predicate "incdec_operand"
722 (match_code "const_int")
724 /* On Pentium4, the inc and dec operations causes extra dependency on flag
725 registers, since carry flag is not set. */
726 if (!TARGET_USE_INCDEC && !optimize_insn_for_size_p ())
728 return op == const1_rtx || op == constm1_rtx;
731 ;; True for registers, or 1 or -1. Used to optimize double-word shifts.
732 (define_predicate "reg_or_pm1_operand"
733 (ior (match_operand 0 "register_operand")
734 (and (match_code "const_int")
735 (match_test "op == const1_rtx || op == constm1_rtx"))))
737 ;; True if OP is acceptable as operand of DImode shift expander.
738 (define_predicate "shiftdi_operand"
739 (if_then_else (match_test "TARGET_64BIT")
740 (match_operand 0 "nonimmediate_operand")
741 (match_operand 0 "register_operand")))
743 (define_predicate "ashldi_input_operand"
744 (if_then_else (match_test "TARGET_64BIT")
745 (match_operand 0 "nonimmediate_operand")
746 (match_operand 0 "reg_or_pm1_operand")))
748 ;; Return true if OP is a vector load from the constant pool with just
749 ;; the first element nonzero.
750 (define_predicate "zero_extended_scalar_load_operand"
754 op = maybe_get_pool_constant (op);
756 if (!(op && GET_CODE (op) == CONST_VECTOR))
759 n_elts = CONST_VECTOR_NUNITS (op);
761 for (n_elts--; n_elts > 0; n_elts--)
763 rtx elt = CONST_VECTOR_ELT (op, n_elts);
764 if (elt != CONST0_RTX (GET_MODE_INNER (GET_MODE (op))))
770 /* Return true if operand is a vector constant that is all ones. */
771 (define_predicate "vector_all_ones_operand"
772 (match_code "const_vector")
774 int nunits = GET_MODE_NUNITS (mode);
776 if (GET_CODE (op) == CONST_VECTOR
777 && CONST_VECTOR_NUNITS (op) == nunits)
780 for (i = 0; i < nunits; ++i)
782 rtx x = CONST_VECTOR_ELT (op, i);
783 if (x != constm1_rtx)
792 ; Return 1 when OP is operand acceptable for standard SSE move.
793 (define_predicate "vector_move_operand"
794 (ior (match_operand 0 "nonimmediate_operand")
795 (match_operand 0 "const0_operand")))
797 ;; Return 1 when OP is nonimmediate or standard SSE constant.
798 (define_predicate "nonimmediate_or_sse_const_operand"
799 (match_operand 0 "general_operand")
801 if (nonimmediate_operand (op, mode))
803 if (standard_sse_constant_p (op) > 0)
808 ;; Return true if OP is a register or a zero.
809 (define_predicate "reg_or_0_operand"
810 (ior (match_operand 0 "register_operand")
811 (match_operand 0 "const0_operand")))
813 ;; Return true if op if a valid address, and does not contain
814 ;; a segment override.
815 (define_special_predicate "no_seg_address_operand"
816 (match_operand 0 "address_operand")
818 struct ix86_address parts;
821 ok = ix86_decompose_address (op, &parts);
823 return parts.seg == SEG_DEFAULT;
826 ;; Return nonzero if the rtx is known to be at least 32 bits aligned.
827 (define_predicate "aligned_operand"
828 (match_operand 0 "general_operand")
830 struct ix86_address parts;
833 /* Registers and immediate operands are always "aligned". */
837 /* All patterns using aligned_operand on memory operands ends up
838 in promoting memory operand to 64bit and thus causing memory mismatch. */
839 if (TARGET_MEMORY_MISMATCH_STALL && !optimize_insn_for_size_p ())
842 /* Don't even try to do any aligned optimizations with volatiles. */
843 if (MEM_VOLATILE_P (op))
846 if (MEM_ALIGN (op) >= 32)
851 /* Pushes and pops are only valid on the stack pointer. */
852 if (GET_CODE (op) == PRE_DEC
853 || GET_CODE (op) == POST_INC)
856 /* Decode the address. */
857 ok = ix86_decompose_address (op, &parts);
860 /* Look for some component that isn't known to be aligned. */
863 if (REGNO_POINTER_ALIGN (REGNO (parts.index)) * parts.scale < 32)
868 if (REGNO_POINTER_ALIGN (REGNO (parts.base)) < 32)
873 if (!CONST_INT_P (parts.disp)
874 || (INTVAL (parts.disp) & 3) != 0)
878 /* Didn't find one -- this must be an aligned address. */
882 ;; Returns 1 if OP is memory operand with a displacement.
883 (define_predicate "memory_displacement_operand"
884 (match_operand 0 "memory_operand")
886 struct ix86_address parts;
889 ok = ix86_decompose_address (XEXP (op, 0), &parts);
891 return parts.disp != NULL_RTX;
894 ;; Returns 1 if OP is memory operand with a displacement only.
895 (define_predicate "memory_displacement_only_operand"
896 (match_operand 0 "memory_operand")
898 struct ix86_address parts;
904 ok = ix86_decompose_address (XEXP (op, 0), &parts);
907 if (parts.base || parts.index)
910 return parts.disp != NULL_RTX;
913 ;; Returns 1 if OP is memory operand which will need zero or
914 ;; one register at most, not counting stack pointer or frame pointer.
915 (define_predicate "cmpxchg8b_pic_memory_operand"
916 (match_operand 0 "memory_operand")
918 struct ix86_address parts;
921 ok = ix86_decompose_address (XEXP (op, 0), &parts);
923 if (parts.base == NULL_RTX
924 || parts.base == arg_pointer_rtx
925 || parts.base == frame_pointer_rtx
926 || parts.base == hard_frame_pointer_rtx
927 || parts.base == stack_pointer_rtx)
930 if (parts.index == NULL_RTX
931 || parts.index == arg_pointer_rtx
932 || parts.index == frame_pointer_rtx
933 || parts.index == hard_frame_pointer_rtx
934 || parts.index == stack_pointer_rtx)
941 ;; Returns 1 if OP is memory operand that cannot be represented
942 ;; by the modRM array.
943 (define_predicate "long_memory_operand"
944 (and (match_operand 0 "memory_operand")
945 (match_test "memory_address_length (op) != 0")))
947 ;; Return 1 if OP is a comparison operator that can be issued by fcmov.
948 (define_predicate "fcmov_comparison_operator"
949 (match_operand 0 "comparison_operator")
951 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
952 enum rtx_code code = GET_CODE (op);
954 if (inmode == CCFPmode || inmode == CCFPUmode)
956 if (!ix86_trivial_fp_comparison_operator (op, mode))
958 code = ix86_fp_compare_code_to_integer (code);
960 /* i387 supports just limited amount of conditional codes. */
963 case LTU: case GTU: case LEU: case GEU:
964 if (inmode == CCmode || inmode == CCFPmode || inmode == CCFPUmode
965 || inmode == CCCmode)
968 case ORDERED: case UNORDERED:
976 ;; Return 1 if OP is a comparison that can be used in the CMPSS/CMPPS insns.
977 ;; The first set are supported directly; the second set can't be done with
978 ;; full IEEE support, i.e. NaNs.
980 ;; ??? It would seem that we have a lot of uses of this predicate that pass
981 ;; it the wrong mode. We got away with this because the old function didn't
982 ;; check the mode at all. Mirror that for now by calling this a special
985 (define_special_predicate "sse_comparison_operator"
986 (match_code "eq,lt,le,unordered,ne,unge,ungt,ordered"))
988 ;; Return 1 if OP is a comparison operator that can be issued by
989 ;; avx predicate generation instructions
990 (define_predicate "avx_comparison_float_operator"
991 (match_code "ne,eq,ge,gt,le,lt,unordered,ordered,uneq,unge,ungt,unle,unlt,ltgt"))
993 (define_predicate "ix86_comparison_int_operator"
994 (match_code "ne,eq,ge,gt,le,lt"))
996 (define_predicate "ix86_comparison_uns_operator"
997 (match_code "ne,eq,geu,gtu,leu,ltu"))
999 (define_predicate "bt_comparison_operator"
1000 (match_code "ne,eq"))
1002 ;; Return 1 if OP is a valid comparison operator in valid mode.
1003 (define_predicate "ix86_comparison_operator"
1004 (match_operand 0 "comparison_operator")
1006 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1007 enum rtx_code code = GET_CODE (op);
1009 if (inmode == CCFPmode || inmode == CCFPUmode)
1010 return ix86_trivial_fp_comparison_operator (op, mode);
1017 if (inmode == CCmode || inmode == CCGCmode
1018 || inmode == CCGOCmode || inmode == CCNOmode)
1021 case LTU: case GTU: case LEU: case GEU:
1022 if (inmode == CCmode || inmode == CCCmode)
1025 case ORDERED: case UNORDERED:
1026 if (inmode == CCmode)
1030 if (inmode == CCmode || inmode == CCGCmode || inmode == CCNOmode)
1038 ;; Return 1 if OP is a valid comparison operator testing carry flag to be set.
1039 (define_predicate "ix86_carry_flag_operator"
1040 (match_code "ltu,lt,unlt,gtu,gt,ungt,le,unle,ge,unge,ltgt,uneq")
1042 enum machine_mode inmode = GET_MODE (XEXP (op, 0));
1043 enum rtx_code code = GET_CODE (op);
1045 if (!REG_P (XEXP (op, 0))
1046 || REGNO (XEXP (op, 0)) != FLAGS_REG
1047 || XEXP (op, 1) != const0_rtx)
1050 if (inmode == CCFPmode || inmode == CCFPUmode)
1052 if (!ix86_trivial_fp_comparison_operator (op, mode))
1054 code = ix86_fp_compare_code_to_integer (code);
1056 else if (inmode == CCCmode)
1057 return code == LTU || code == GTU;
1058 else if (inmode != CCmode)
1064 ;; Return 1 if this comparison only requires testing one flag bit.
1065 (define_predicate "ix86_trivial_fp_comparison_operator"
1066 (match_code "gt,ge,unlt,unle,uneq,ltgt,ordered,unordered"))
1068 ;; Return 1 if we know how to do this comparison. Others require
1069 ;; testing more than one flag bit, and we let the generic middle-end
1071 (define_predicate "ix86_fp_comparison_operator"
1072 (if_then_else (match_test "ix86_fp_comparison_strategy (GET_CODE (op))
1073 == IX86_FPCMP_ARITH")
1074 (match_operand 0 "comparison_operator")
1075 (match_operand 0 "ix86_trivial_fp_comparison_operator")))
1077 ;; Nearly general operand, but accept any const_double, since we wish
1078 ;; to be able to drop them into memory rather than have them get pulled
1080 (define_predicate "cmp_fp_expander_operand"
1081 (ior (match_code "const_double")
1082 (match_operand 0 "general_operand")))
1084 ;; Return true if this is a valid binary floating-point operation.
1085 (define_predicate "binary_fp_operator"
1086 (match_code "plus,minus,mult,div"))
1088 ;; Return true if this is a multiply operation.
1089 (define_predicate "mult_operator"
1090 (match_code "mult"))
1092 ;; Return true if this is a division operation.
1093 (define_predicate "div_operator"
1096 ;; Return true if this is a float extend operation.
1097 (define_predicate "float_operator"
1098 (match_code "float"))
1100 ;; Return true for ARITHMETIC_P.
1101 (define_predicate "arith_or_logical_operator"
1102 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax,compare,minus,div,
1103 mod,udiv,umod,ashift,rotate,ashiftrt,lshiftrt,rotatert"))
1105 ;; Return true for COMMUTATIVE_P.
1106 (define_predicate "commutative_operator"
1107 (match_code "plus,mult,and,ior,xor,smin,smax,umin,umax"))
1109 ;; Return 1 if OP is a binary operator that can be promoted to wider mode.
1110 (define_predicate "promotable_binary_operator"
1111 (ior (match_code "plus,and,ior,xor,ashift")
1112 (and (match_code "mult")
1113 (match_test "TARGET_TUNE_PROMOTE_HIMODE_IMUL"))))
1115 ;; To avoid problems when jump re-emits comparisons like testqi_ext_ccno_0,
1116 ;; re-recognize the operand to avoid a copy_to_mode_reg that will fail.
1118 ;; ??? It seems likely that this will only work because cmpsi is an
1119 ;; expander, and no actual insns use this.
1121 (define_predicate "cmpsi_operand"
1122 (ior (match_operand 0 "nonimmediate_operand")
1123 (and (match_code "and")
1124 (match_code "zero_extract" "0")
1125 (match_code "const_int" "1")
1126 (match_code "const_int" "01")
1127 (match_code "const_int" "02")
1128 (match_test "INTVAL (XEXP (XEXP (op, 0), 1)) == 8")
1129 (match_test "INTVAL (XEXP (XEXP (op, 0), 2)) == 8")
1132 (define_predicate "compare_operator"
1133 (match_code "compare"))
1135 (define_predicate "absneg_operator"
1136 (match_code "abs,neg"))
1138 ;; Return 1 if OP is misaligned memory operand
1139 (define_predicate "misaligned_operand"
1140 (and (match_code "mem")
1141 (match_test "MEM_ALIGN (op) < GET_MODE_ALIGNMENT (mode)")))
1143 ;; Return 1 if OP is a vzeroall operation, known to be a PARALLEL.
1144 (define_predicate "vzeroall_operation"
1145 (match_code "parallel")
1147 int nregs = TARGET_64BIT ? 16 : 8;
1149 if (XVECLEN (op, 0) != nregs + 1)