1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
42 /* Information about a subreg of a hard register. */
45 /* Offset of first hard register involved in the subreg. */
47 /* Number of hard registers involved in the subreg. */
49 /* Whether this subreg can be represented as a hard reg with the new
54 /* Forward declarations */
55 static void set_of_1 (rtx, const_rtx, void *);
56 static bool covers_regno_p (const_rtx, unsigned int);
57 static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
58 static int rtx_referenced_p_1 (rtx *, void *);
59 static int computed_jump_p_1 (const_rtx);
60 static void parms_set (rtx, const_rtx, void *);
61 static void subreg_get_info (unsigned int, enum machine_mode,
62 unsigned int, enum machine_mode,
63 struct subreg_info *);
65 static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
66 const_rtx, enum machine_mode,
67 unsigned HOST_WIDE_INT);
68 static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
69 const_rtx, enum machine_mode,
70 unsigned HOST_WIDE_INT);
71 static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
74 static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
75 enum machine_mode, unsigned int);
77 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
78 -1 if a code has no such operand. */
79 static int non_rtx_starting_operands[NUM_RTX_CODE];
81 /* Bit flags that specify the machine subtype we are compiling for.
82 Bits are tested using macros TARGET_... defined in the tm.h file
83 and set by `-m...' switches. Must be defined in rtlanal.c. */
87 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
88 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
89 SIGN_EXTEND then while narrowing we also have to enforce the
90 representation and sign-extend the value to mode DESTINATION_REP.
92 If the value is already sign-extended to DESTINATION_REP mode we
93 can just switch to DESTINATION mode on it. For each pair of
94 integral modes SOURCE and DESTINATION, when truncating from SOURCE
95 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
96 contains the number of high-order bits in SOURCE that have to be
97 copies of the sign-bit so that we can do this mode-switch to
101 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
103 /* Return 1 if the value of X is unstable
104 (would be different at a different point in the program).
105 The frame pointer, arg pointer, etc. are considered stable
106 (within one function) and so is anything marked `unchanging'. */
109 rtx_unstable_p (const_rtx x)
111 const RTX_CODE code = GET_CODE (x);
118 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
130 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
131 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
132 /* The arg pointer varies if it is not a fixed register. */
133 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
135 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
136 /* ??? When call-clobbered, the value is stable modulo the restore
137 that must happen after a call. This currently screws up local-alloc
138 into believing that the restore is not needed. */
139 if (x == pic_offset_table_rtx)
145 if (MEM_VOLATILE_P (x))
154 fmt = GET_RTX_FORMAT (code);
155 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
158 if (rtx_unstable_p (XEXP (x, i)))
161 else if (fmt[i] == 'E')
164 for (j = 0; j < XVECLEN (x, i); j++)
165 if (rtx_unstable_p (XVECEXP (x, i, j)))
172 /* Return 1 if X has a value that can vary even between two
173 executions of the program. 0 means X can be compared reliably
174 against certain constants or near-constants.
175 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
176 zero, we are slightly more conservative.
177 The frame pointer and the arg pointer are considered constant. */
180 rtx_varies_p (const_rtx x, bool for_alias)
193 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
205 /* Note that we have to test for the actual rtx used for the frame
206 and arg pointers and not just the register number in case we have
207 eliminated the frame and/or arg pointer and are using it
209 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
210 /* The arg pointer varies if it is not a fixed register. */
211 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
213 if (x == pic_offset_table_rtx
214 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
215 /* ??? When call-clobbered, the value is stable modulo the restore
216 that must happen after a call. This currently screws up
217 local-alloc into believing that the restore is not needed, so we
218 must return 0 only if we are called from alias analysis. */
226 /* The operand 0 of a LO_SUM is considered constant
227 (in fact it is related specifically to operand 1)
228 during alias analysis. */
229 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
230 || rtx_varies_p (XEXP (x, 1), for_alias);
233 if (MEM_VOLATILE_P (x))
242 fmt = GET_RTX_FORMAT (code);
243 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
246 if (rtx_varies_p (XEXP (x, i), for_alias))
249 else if (fmt[i] == 'E')
252 for (j = 0; j < XVECLEN (x, i); j++)
253 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
260 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
261 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
262 whether nonzero is returned for unaligned memory accesses on strict
263 alignment machines. */
266 rtx_addr_can_trap_p_1 (const_rtx x, enum machine_mode mode, bool unaligned_mems)
268 enum rtx_code code = GET_CODE (x);
273 return SYMBOL_REF_WEAK (x);
279 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
280 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
281 || x == stack_pointer_rtx
282 /* The arg pointer varies if it is not a fixed register. */
283 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
285 /* All of the virtual frame registers are stack references. */
286 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
287 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
292 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
295 /* An address is assumed not to trap if:
296 - it is an address that can't trap plus a constant integer,
297 with the proper remainder modulo the mode size if we are
298 considering unaligned memory references. */
299 if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
300 && GET_CODE (XEXP (x, 1)) == CONST_INT)
302 HOST_WIDE_INT offset;
304 if (!STRICT_ALIGNMENT
306 || GET_MODE_SIZE (mode) == 0)
309 offset = INTVAL (XEXP (x, 1));
311 #ifdef SPARC_STACK_BOUNDARY_HACK
312 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
313 the real alignment of %sp. However, when it does this, the
314 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
315 if (SPARC_STACK_BOUNDARY_HACK
316 && (XEXP (x, 0) == stack_pointer_rtx
317 || XEXP (x, 0) == hard_frame_pointer_rtx))
318 offset -= STACK_POINTER_OFFSET;
321 return offset % GET_MODE_SIZE (mode) != 0;
324 /* - or it is the pic register plus a constant. */
325 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
332 return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
339 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
345 /* If it isn't one of the case above, it can cause a trap. */
349 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
352 rtx_addr_can_trap_p (const_rtx x)
354 return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
357 /* Return true if X is an address that is known to not be zero. */
360 nonzero_address_p (const_rtx x)
362 const enum rtx_code code = GET_CODE (x);
367 return !SYMBOL_REF_WEAK (x);
373 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
374 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
375 || x == stack_pointer_rtx
376 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
378 /* All of the virtual frame registers are stack references. */
379 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
380 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
385 return nonzero_address_p (XEXP (x, 0));
388 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
389 return nonzero_address_p (XEXP (x, 0));
390 /* Handle PIC references. */
391 else if (XEXP (x, 0) == pic_offset_table_rtx
392 && CONSTANT_P (XEXP (x, 1)))
397 /* Similar to the above; allow positive offsets. Further, since
398 auto-inc is only allowed in memories, the register must be a
400 if (GET_CODE (XEXP (x, 1)) == CONST_INT
401 && INTVAL (XEXP (x, 1)) > 0)
403 return nonzero_address_p (XEXP (x, 0));
406 /* Similarly. Further, the offset is always positive. */
413 return nonzero_address_p (XEXP (x, 0));
416 return nonzero_address_p (XEXP (x, 1));
422 /* If it isn't one of the case above, might be zero. */
426 /* Return 1 if X refers to a memory location whose address
427 cannot be compared reliably with constant addresses,
428 or if X refers to a BLKmode memory object.
429 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
430 zero, we are slightly more conservative. */
433 rtx_addr_varies_p (const_rtx x, bool for_alias)
444 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
446 fmt = GET_RTX_FORMAT (code);
447 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
450 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
453 else if (fmt[i] == 'E')
456 for (j = 0; j < XVECLEN (x, i); j++)
457 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
463 /* Return the value of the integer term in X, if one is apparent;
465 Only obvious integer terms are detected.
466 This is used in cse.c with the `related_value' field. */
469 get_integer_term (const_rtx x)
471 if (GET_CODE (x) == CONST)
474 if (GET_CODE (x) == MINUS
475 && GET_CODE (XEXP (x, 1)) == CONST_INT)
476 return - INTVAL (XEXP (x, 1));
477 if (GET_CODE (x) == PLUS
478 && GET_CODE (XEXP (x, 1)) == CONST_INT)
479 return INTVAL (XEXP (x, 1));
483 /* If X is a constant, return the value sans apparent integer term;
485 Only obvious integer terms are detected. */
488 get_related_value (const_rtx x)
490 if (GET_CODE (x) != CONST)
493 if (GET_CODE (x) == PLUS
494 && GET_CODE (XEXP (x, 1)) == CONST_INT)
496 else if (GET_CODE (x) == MINUS
497 && GET_CODE (XEXP (x, 1)) == CONST_INT)
502 /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
503 to somewhere in the same object or object_block as SYMBOL. */
506 offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
510 if (GET_CODE (symbol) != SYMBOL_REF)
518 if (CONSTANT_POOL_ADDRESS_P (symbol)
519 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
522 decl = SYMBOL_REF_DECL (symbol);
523 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
527 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
528 && SYMBOL_REF_BLOCK (symbol)
529 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
530 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
531 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
537 /* Split X into a base and a constant offset, storing them in *BASE_OUT
538 and *OFFSET_OUT respectively. */
541 split_const (rtx x, rtx *base_out, rtx *offset_out)
543 if (GET_CODE (x) == CONST)
546 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
548 *base_out = XEXP (x, 0);
549 *offset_out = XEXP (x, 1);
554 *offset_out = const0_rtx;
557 /* Return the number of places FIND appears within X. If COUNT_DEST is
558 zero, we do not count occurrences inside the destination of a SET. */
561 count_occurrences (const_rtx x, const_rtx find, int count_dest)
565 const char *format_ptr;
587 count = count_occurrences (XEXP (x, 0), find, count_dest);
589 count += count_occurrences (XEXP (x, 1), find, count_dest);
593 if (MEM_P (find) && rtx_equal_p (x, find))
598 if (SET_DEST (x) == find && ! count_dest)
599 return count_occurrences (SET_SRC (x), find, count_dest);
606 format_ptr = GET_RTX_FORMAT (code);
609 for (i = 0; i < GET_RTX_LENGTH (code); i++)
611 switch (*format_ptr++)
614 count += count_occurrences (XEXP (x, i), find, count_dest);
618 for (j = 0; j < XVECLEN (x, i); j++)
619 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
627 /* Nonzero if register REG appears somewhere within IN.
628 Also works if REG is not a register; in this case it checks
629 for a subexpression of IN that is Lisp "equal" to REG. */
632 reg_mentioned_p (const_rtx reg, const_rtx in)
644 if (GET_CODE (in) == LABEL_REF)
645 return reg == XEXP (in, 0);
647 code = GET_CODE (in);
651 /* Compare registers by number. */
653 return REG_P (reg) && REGNO (in) == REGNO (reg);
655 /* These codes have no constituent expressions
666 /* These are kept unique for a given value. */
673 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
676 fmt = GET_RTX_FORMAT (code);
678 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
683 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
684 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
687 else if (fmt[i] == 'e'
688 && reg_mentioned_p (reg, XEXP (in, i)))
694 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
695 no CODE_LABEL insn. */
698 no_labels_between_p (const_rtx beg, const_rtx end)
703 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
709 /* Nonzero if register REG is used in an insn between
710 FROM_INSN and TO_INSN (exclusive of those two). */
713 reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
717 if (from_insn == to_insn)
720 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
722 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
723 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
728 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
729 is entirely replaced by a new value and the only use is as a SET_DEST,
730 we do not consider it a reference. */
733 reg_referenced_p (const_rtx x, const_rtx body)
737 switch (GET_CODE (body))
740 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
743 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
744 of a REG that occupies all of the REG, the insn references X if
745 it is mentioned in the destination. */
746 if (GET_CODE (SET_DEST (body)) != CC0
747 && GET_CODE (SET_DEST (body)) != PC
748 && !REG_P (SET_DEST (body))
749 && ! (GET_CODE (SET_DEST (body)) == SUBREG
750 && REG_P (SUBREG_REG (SET_DEST (body)))
751 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
752 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
753 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
754 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
755 && reg_overlap_mentioned_p (x, SET_DEST (body)))
760 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
761 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
768 return reg_overlap_mentioned_p (x, body);
771 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
774 return reg_overlap_mentioned_p (x, XEXP (body, 0));
777 case UNSPEC_VOLATILE:
778 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
779 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
784 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
785 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
790 if (MEM_P (XEXP (body, 0)))
791 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
796 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
798 return reg_referenced_p (x, COND_EXEC_CODE (body));
805 /* Nonzero if register REG is set or clobbered in an insn between
806 FROM_INSN and TO_INSN (exclusive of those two). */
809 reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
813 if (from_insn == to_insn)
816 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
817 if (INSN_P (insn) && reg_set_p (reg, insn))
822 /* Internals of reg_set_between_p. */
824 reg_set_p (const_rtx reg, const_rtx insn)
826 /* We can be passed an insn or part of one. If we are passed an insn,
827 check if a side-effect of the insn clobbers REG. */
829 && (FIND_REG_INC_NOTE (insn, reg)
832 && REGNO (reg) < FIRST_PSEUDO_REGISTER
833 && overlaps_hard_reg_set_p (regs_invalidated_by_call,
834 GET_MODE (reg), REGNO (reg)))
836 || find_reg_fusage (insn, CLOBBER, reg)))))
839 return set_of (reg, insn) != NULL_RTX;
842 /* Similar to reg_set_between_p, but check all registers in X. Return 0
843 only if none of them are modified between START and END. Return 1 if
844 X contains a MEM; this routine does usememory aliasing. */
847 modified_between_p (const_rtx x, const_rtx start, const_rtx end)
849 const enum rtx_code code = GET_CODE (x);
873 if (modified_between_p (XEXP (x, 0), start, end))
875 if (MEM_READONLY_P (x))
877 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
878 if (memory_modified_in_insn_p (x, insn))
884 return reg_set_between_p (x, start, end);
890 fmt = GET_RTX_FORMAT (code);
891 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
893 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
896 else if (fmt[i] == 'E')
897 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
898 if (modified_between_p (XVECEXP (x, i, j), start, end))
905 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
906 of them are modified in INSN. Return 1 if X contains a MEM; this routine
907 does use memory aliasing. */
910 modified_in_p (const_rtx x, const_rtx insn)
912 const enum rtx_code code = GET_CODE (x);
932 if (modified_in_p (XEXP (x, 0), insn))
934 if (MEM_READONLY_P (x))
936 if (memory_modified_in_insn_p (x, insn))
942 return reg_set_p (x, insn);
948 fmt = GET_RTX_FORMAT (code);
949 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
951 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
954 else if (fmt[i] == 'E')
955 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
956 if (modified_in_p (XVECEXP (x, i, j), insn))
963 /* Helper function for set_of. */
971 set_of_1 (rtx x, const_rtx pat, void *data1)
973 struct set_of_data *const data = (struct set_of_data *) (data1);
974 if (rtx_equal_p (x, data->pat)
975 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
979 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
980 (either directly or via STRICT_LOW_PART and similar modifiers). */
982 set_of (const_rtx pat, const_rtx insn)
984 struct set_of_data data;
985 data.found = NULL_RTX;
987 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
991 /* Given an INSN, return a SET expression if this insn has only a single SET.
992 It may also have CLOBBERs, USEs, or SET whose output
993 will not be used, which we ignore. */
996 single_set_2 (const_rtx insn, const_rtx pat)
999 int set_verified = 1;
1002 if (GET_CODE (pat) == PARALLEL)
1004 for (i = 0; i < XVECLEN (pat, 0); i++)
1006 rtx sub = XVECEXP (pat, 0, i);
1007 switch (GET_CODE (sub))
1014 /* We can consider insns having multiple sets, where all
1015 but one are dead as single set insns. In common case
1016 only single set is present in the pattern so we want
1017 to avoid checking for REG_UNUSED notes unless necessary.
1019 When we reach set first time, we just expect this is
1020 the single set we are looking for and only when more
1021 sets are found in the insn, we check them. */
1024 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1025 && !side_effects_p (set))
1031 set = sub, set_verified = 0;
1032 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1033 || side_effects_p (sub))
1045 /* Given an INSN, return nonzero if it has more than one SET, else return
1049 multiple_sets (const_rtx insn)
1054 /* INSN must be an insn. */
1055 if (! INSN_P (insn))
1058 /* Only a PARALLEL can have multiple SETs. */
1059 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1061 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1062 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1064 /* If we have already found a SET, then return now. */
1072 /* Either zero or one SET. */
1076 /* Return nonzero if the destination of SET equals the source
1077 and there are no side effects. */
1080 set_noop_p (const_rtx set)
1082 rtx src = SET_SRC (set);
1083 rtx dst = SET_DEST (set);
1085 if (dst == pc_rtx && src == pc_rtx)
1088 if (MEM_P (dst) && MEM_P (src))
1089 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1091 if (GET_CODE (dst) == ZERO_EXTRACT)
1092 return rtx_equal_p (XEXP (dst, 0), src)
1093 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1094 && !side_effects_p (src);
1096 if (GET_CODE (dst) == STRICT_LOW_PART)
1097 dst = XEXP (dst, 0);
1099 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1101 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1103 src = SUBREG_REG (src);
1104 dst = SUBREG_REG (dst);
1107 return (REG_P (src) && REG_P (dst)
1108 && REGNO (src) == REGNO (dst));
1111 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1115 noop_move_p (const_rtx insn)
1117 rtx pat = PATTERN (insn);
1119 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1122 /* Insns carrying these notes are useful later on. */
1123 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1126 /* For now treat an insn with a REG_RETVAL note as a
1127 a special insn which should not be considered a no-op. */
1128 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1131 if (GET_CODE (pat) == SET && set_noop_p (pat))
1134 if (GET_CODE (pat) == PARALLEL)
1137 /* If nothing but SETs of registers to themselves,
1138 this insn can also be deleted. */
1139 for (i = 0; i < XVECLEN (pat, 0); i++)
1141 rtx tem = XVECEXP (pat, 0, i);
1143 if (GET_CODE (tem) == USE
1144 || GET_CODE (tem) == CLOBBER)
1147 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1157 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1158 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1159 If the object was modified, if we hit a partial assignment to X, or hit a
1160 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1161 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1165 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1169 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1173 rtx set = single_set (p);
1174 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1176 if (set && rtx_equal_p (x, SET_DEST (set)))
1178 rtx src = SET_SRC (set);
1180 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1181 src = XEXP (note, 0);
1183 if ((valid_to == NULL_RTX
1184 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1185 /* Reject hard registers because we don't usually want
1186 to use them; we'd rather use a pseudo. */
1188 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1195 /* If set in non-simple way, we don't have a value. */
1196 if (reg_set_p (x, p))
1203 /* Return nonzero if register in range [REGNO, ENDREGNO)
1204 appears either explicitly or implicitly in X
1205 other than being stored into.
1207 References contained within the substructure at LOC do not count.
1208 LOC may be zero, meaning don't ignore anything. */
1211 refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1215 unsigned int x_regno;
1220 /* The contents of a REG_NONNEG note is always zero, so we must come here
1221 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1225 code = GET_CODE (x);
1230 x_regno = REGNO (x);
1232 /* If we modifying the stack, frame, or argument pointer, it will
1233 clobber a virtual register. In fact, we could be more precise,
1234 but it isn't worth it. */
1235 if ((x_regno == STACK_POINTER_REGNUM
1236 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1237 || x_regno == ARG_POINTER_REGNUM
1239 || x_regno == FRAME_POINTER_REGNUM)
1240 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1243 return endregno > x_regno && regno < END_REGNO (x);
1246 /* If this is a SUBREG of a hard reg, we can see exactly which
1247 registers are being modified. Otherwise, handle normally. */
1248 if (REG_P (SUBREG_REG (x))
1249 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1251 unsigned int inner_regno = subreg_regno (x);
1252 unsigned int inner_endregno
1253 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1254 ? subreg_nregs (x) : 1);
1256 return endregno > inner_regno && regno < inner_endregno;
1262 if (&SET_DEST (x) != loc
1263 /* Note setting a SUBREG counts as referring to the REG it is in for
1264 a pseudo but not for hard registers since we can
1265 treat each word individually. */
1266 && ((GET_CODE (SET_DEST (x)) == SUBREG
1267 && loc != &SUBREG_REG (SET_DEST (x))
1268 && REG_P (SUBREG_REG (SET_DEST (x)))
1269 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1270 && refers_to_regno_p (regno, endregno,
1271 SUBREG_REG (SET_DEST (x)), loc))
1272 || (!REG_P (SET_DEST (x))
1273 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1276 if (code == CLOBBER || loc == &SET_SRC (x))
1285 /* X does not match, so try its subexpressions. */
1287 fmt = GET_RTX_FORMAT (code);
1288 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1290 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1298 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1301 else if (fmt[i] == 'E')
1304 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1305 if (loc != &XVECEXP (x, i, j)
1306 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1313 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1314 we check if any register number in X conflicts with the relevant register
1315 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1316 contains a MEM (we don't bother checking for memory addresses that can't
1317 conflict because we expect this to be a rare case. */
1320 reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1322 unsigned int regno, endregno;
1324 /* If either argument is a constant, then modifying X can not
1325 affect IN. Here we look at IN, we can profitably combine
1326 CONSTANT_P (x) with the switch statement below. */
1327 if (CONSTANT_P (in))
1331 switch (GET_CODE (x))
1333 case STRICT_LOW_PART:
1336 /* Overly conservative. */
1341 regno = REGNO (SUBREG_REG (x));
1342 if (regno < FIRST_PSEUDO_REGISTER)
1343 regno = subreg_regno (x);
1344 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1345 ? subreg_nregs (x) : 1);
1350 endregno = END_REGNO (x);
1352 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1362 fmt = GET_RTX_FORMAT (GET_CODE (in));
1363 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1366 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1369 else if (fmt[i] == 'E')
1372 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1373 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1383 return reg_mentioned_p (x, in);
1389 /* If any register in here refers to it we return true. */
1390 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1391 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1392 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1398 gcc_assert (CONSTANT_P (x));
1403 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1404 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1405 ignored by note_stores, but passed to FUN.
1407 FUN receives three arguments:
1408 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1409 2. the SET or CLOBBER rtx that does the store,
1410 3. the pointer DATA provided to note_stores.
1412 If the item being stored in or clobbered is a SUBREG of a hard register,
1413 the SUBREG will be passed. */
1416 note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
1420 if (GET_CODE (x) == COND_EXEC)
1421 x = COND_EXEC_CODE (x);
1423 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1425 rtx dest = SET_DEST (x);
1427 while ((GET_CODE (dest) == SUBREG
1428 && (!REG_P (SUBREG_REG (dest))
1429 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1430 || GET_CODE (dest) == ZERO_EXTRACT
1431 || GET_CODE (dest) == STRICT_LOW_PART)
1432 dest = XEXP (dest, 0);
1434 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1435 each of whose first operand is a register. */
1436 if (GET_CODE (dest) == PARALLEL)
1438 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1439 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1440 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1443 (*fun) (dest, x, data);
1446 else if (GET_CODE (x) == PARALLEL)
1447 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1448 note_stores (XVECEXP (x, 0, i), fun, data);
1451 /* Like notes_stores, but call FUN for each expression that is being
1452 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1453 FUN for each expression, not any interior subexpressions. FUN receives a
1454 pointer to the expression and the DATA passed to this function.
1456 Note that this is not quite the same test as that done in reg_referenced_p
1457 since that considers something as being referenced if it is being
1458 partially set, while we do not. */
1461 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1466 switch (GET_CODE (body))
1469 (*fun) (&COND_EXEC_TEST (body), data);
1470 note_uses (&COND_EXEC_CODE (body), fun, data);
1474 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1475 note_uses (&XVECEXP (body, 0, i), fun, data);
1479 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1480 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1484 (*fun) (&XEXP (body, 0), data);
1488 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1489 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1493 (*fun) (&TRAP_CONDITION (body), data);
1497 (*fun) (&XEXP (body, 0), data);
1501 case UNSPEC_VOLATILE:
1502 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1503 (*fun) (&XVECEXP (body, 0, i), data);
1507 if (MEM_P (XEXP (body, 0)))
1508 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1513 rtx dest = SET_DEST (body);
1515 /* For sets we replace everything in source plus registers in memory
1516 expression in store and operands of a ZERO_EXTRACT. */
1517 (*fun) (&SET_SRC (body), data);
1519 if (GET_CODE (dest) == ZERO_EXTRACT)
1521 (*fun) (&XEXP (dest, 1), data);
1522 (*fun) (&XEXP (dest, 2), data);
1525 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1526 dest = XEXP (dest, 0);
1529 (*fun) (&XEXP (dest, 0), data);
1534 /* All the other possibilities never store. */
1535 (*fun) (pbody, data);
1540 /* Return nonzero if X's old contents don't survive after INSN.
1541 This will be true if X is (cc0) or if X is a register and
1542 X dies in INSN or because INSN entirely sets X.
1544 "Entirely set" means set directly and not through a SUBREG, or
1545 ZERO_EXTRACT, so no trace of the old contents remains.
1546 Likewise, REG_INC does not count.
1548 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1549 but for this use that makes no difference, since regs don't overlap
1550 during their lifetimes. Therefore, this function may be used
1551 at any time after deaths have been computed.
1553 If REG is a hard reg that occupies multiple machine registers, this
1554 function will only return 1 if each of those registers will be replaced
1558 dead_or_set_p (const_rtx insn, const_rtx x)
1560 unsigned int regno, end_regno;
1563 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1564 if (GET_CODE (x) == CC0)
1567 gcc_assert (REG_P (x));
1570 end_regno = END_REGNO (x);
1571 for (i = regno; i < end_regno; i++)
1572 if (! dead_or_set_regno_p (insn, i))
1578 /* Return TRUE iff DEST is a register or subreg of a register and
1579 doesn't change the number of words of the inner register, and any
1580 part of the register is TEST_REGNO. */
1583 covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
1585 unsigned int regno, endregno;
1587 if (GET_CODE (dest) == SUBREG
1588 && (((GET_MODE_SIZE (GET_MODE (dest))
1589 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1590 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1591 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1592 dest = SUBREG_REG (dest);
1597 regno = REGNO (dest);
1598 endregno = END_REGNO (dest);
1599 return (test_regno >= regno && test_regno < endregno);
1602 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1603 any member matches the covers_regno_no_parallel_p criteria. */
1606 covers_regno_p (const_rtx dest, unsigned int test_regno)
1608 if (GET_CODE (dest) == PARALLEL)
1610 /* Some targets place small structures in registers for return
1611 values of functions, and those registers are wrapped in
1612 PARALLELs that we may see as the destination of a SET. */
1615 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1617 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1618 if (inner != NULL_RTX
1619 && covers_regno_no_parallel_p (inner, test_regno))
1626 return covers_regno_no_parallel_p (dest, test_regno);
1629 /* Utility function for dead_or_set_p to check an individual register. */
1632 dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
1636 /* See if there is a death note for something that includes TEST_REGNO. */
1637 if (find_regno_note (insn, REG_DEAD, test_regno))
1641 && find_regno_fusage (insn, CLOBBER, test_regno))
1644 pattern = PATTERN (insn);
1646 if (GET_CODE (pattern) == COND_EXEC)
1647 pattern = COND_EXEC_CODE (pattern);
1649 if (GET_CODE (pattern) == SET)
1650 return covers_regno_p (SET_DEST (pattern), test_regno);
1651 else if (GET_CODE (pattern) == PARALLEL)
1655 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1657 rtx body = XVECEXP (pattern, 0, i);
1659 if (GET_CODE (body) == COND_EXEC)
1660 body = COND_EXEC_CODE (body);
1662 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1663 && covers_regno_p (SET_DEST (body), test_regno))
1671 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1672 If DATUM is nonzero, look for one whose datum is DATUM. */
1675 find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
1681 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1682 if (! INSN_P (insn))
1686 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1687 if (REG_NOTE_KIND (link) == kind)
1692 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1693 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1698 /* Return the reg-note of kind KIND in insn INSN which applies to register
1699 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1700 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1701 it might be the case that the note overlaps REGNO. */
1704 find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
1708 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1709 if (! INSN_P (insn))
1712 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1713 if (REG_NOTE_KIND (link) == kind
1714 /* Verify that it is a register, so that scratch and MEM won't cause a
1716 && REG_P (XEXP (link, 0))
1717 && REGNO (XEXP (link, 0)) <= regno
1718 && END_REGNO (XEXP (link, 0)) > regno)
1723 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1727 find_reg_equal_equiv_note (const_rtx insn)
1734 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1735 if (REG_NOTE_KIND (link) == REG_EQUAL
1736 || REG_NOTE_KIND (link) == REG_EQUIV)
1738 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1739 insns that have multiple sets. Checking single_set to
1740 make sure of this is not the proper check, as explained
1741 in the comment in set_unique_reg_note.
1743 This should be changed into an assert. */
1744 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1751 /* Check whether INSN is a single_set whose source is known to be
1752 equivalent to a constant. Return that constant if so, otherwise
1756 find_constant_src (const_rtx insn)
1760 set = single_set (insn);
1763 x = avoid_constant_pool_reference (SET_SRC (set));
1768 note = find_reg_equal_equiv_note (insn);
1769 if (note && CONSTANT_P (XEXP (note, 0)))
1770 return XEXP (note, 0);
1775 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1776 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1779 find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
1781 /* If it's not a CALL_INSN, it can't possibly have a
1782 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1792 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1794 link = XEXP (link, 1))
1795 if (GET_CODE (XEXP (link, 0)) == code
1796 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1801 unsigned int regno = REGNO (datum);
1803 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1804 to pseudo registers, so don't bother checking. */
1806 if (regno < FIRST_PSEUDO_REGISTER)
1808 unsigned int end_regno = END_HARD_REGNO (datum);
1811 for (i = regno; i < end_regno; i++)
1812 if (find_regno_fusage (insn, code, i))
1820 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1821 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1824 find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
1828 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1829 to pseudo registers, so don't bother checking. */
1831 if (regno >= FIRST_PSEUDO_REGISTER
1835 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1839 if (GET_CODE (op = XEXP (link, 0)) == code
1840 && REG_P (reg = XEXP (op, 0))
1841 && REGNO (reg) <= regno
1842 && END_HARD_REGNO (reg) > regno)
1850 /* Remove register note NOTE from the REG_NOTES of INSN. */
1853 remove_note (rtx insn, const_rtx note)
1857 if (note == NULL_RTX)
1860 if (REG_NOTES (insn) == note)
1861 REG_NOTES (insn) = XEXP (note, 1);
1863 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1864 if (XEXP (link, 1) == note)
1866 XEXP (link, 1) = XEXP (note, 1);
1870 switch (REG_NOTE_KIND (note))
1874 df_notes_rescan (insn);
1881 /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
1884 remove_reg_equal_equiv_notes (rtx insn)
1888 loc = ®_NOTES (insn);
1891 enum reg_note kind = REG_NOTE_KIND (*loc);
1892 if (kind == REG_EQUAL || kind == REG_EQUIV)
1893 *loc = XEXP (*loc, 1);
1895 loc = &XEXP (*loc, 1);
1899 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1900 return 1 if it is found. A simple equality test is used to determine if
1904 in_expr_list_p (const_rtx listp, const_rtx node)
1908 for (x = listp; x; x = XEXP (x, 1))
1909 if (node == XEXP (x, 0))
1915 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1916 remove that entry from the list if it is found.
1918 A simple equality test is used to determine if NODE matches. */
1921 remove_node_from_expr_list (const_rtx node, rtx *listp)
1924 rtx prev = NULL_RTX;
1928 if (node == XEXP (temp, 0))
1930 /* Splice the node out of the list. */
1932 XEXP (prev, 1) = XEXP (temp, 1);
1934 *listp = XEXP (temp, 1);
1940 temp = XEXP (temp, 1);
1944 /* Nonzero if X contains any volatile instructions. These are instructions
1945 which may cause unpredictable machine state instructions, and thus no
1946 instructions should be moved or combined across them. This includes
1947 only volatile asms and UNSPEC_VOLATILE instructions. */
1950 volatile_insn_p (const_rtx x)
1952 const RTX_CODE code = GET_CODE (x);
1973 case UNSPEC_VOLATILE:
1974 /* case TRAP_IF: This isn't clear yet. */
1979 if (MEM_VOLATILE_P (x))
1986 /* Recursively scan the operands of this expression. */
1989 const char *const fmt = GET_RTX_FORMAT (code);
1992 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1996 if (volatile_insn_p (XEXP (x, i)))
1999 else if (fmt[i] == 'E')
2002 for (j = 0; j < XVECLEN (x, i); j++)
2003 if (volatile_insn_p (XVECEXP (x, i, j)))
2011 /* Nonzero if X contains any volatile memory references
2012 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2015 volatile_refs_p (const_rtx x)
2017 const RTX_CODE code = GET_CODE (x);
2036 case UNSPEC_VOLATILE:
2042 if (MEM_VOLATILE_P (x))
2049 /* Recursively scan the operands of this expression. */
2052 const char *const fmt = GET_RTX_FORMAT (code);
2055 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2059 if (volatile_refs_p (XEXP (x, i)))
2062 else if (fmt[i] == 'E')
2065 for (j = 0; j < XVECLEN (x, i); j++)
2066 if (volatile_refs_p (XVECEXP (x, i, j)))
2074 /* Similar to above, except that it also rejects register pre- and post-
2078 side_effects_p (const_rtx x)
2080 const RTX_CODE code = GET_CODE (x);
2099 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2100 when some combination can't be done. If we see one, don't think
2101 that we can simplify the expression. */
2102 return (GET_MODE (x) != VOIDmode);
2111 case UNSPEC_VOLATILE:
2112 /* case TRAP_IF: This isn't clear yet. */
2118 if (MEM_VOLATILE_P (x))
2125 /* Recursively scan the operands of this expression. */
2128 const char *fmt = GET_RTX_FORMAT (code);
2131 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2135 if (side_effects_p (XEXP (x, i)))
2138 else if (fmt[i] == 'E')
2141 for (j = 0; j < XVECLEN (x, i); j++)
2142 if (side_effects_p (XVECEXP (x, i, j)))
2150 enum may_trap_p_flags
2152 MTP_UNALIGNED_MEMS = 1,
2155 /* Return nonzero if evaluating rtx X might cause a trap.
2156 (FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for
2157 unaligned memory accesses on strict alignment machines. If
2158 (FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression
2159 cannot trap at its current location, but it might become trapping if moved
2163 may_trap_p_1 (const_rtx x, unsigned flags)
2168 bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0;
2172 code = GET_CODE (x);
2175 /* Handle these cases quickly. */
2190 case UNSPEC_VOLATILE:
2191 return targetm.unspec_may_trap_p (x, flags);
2198 return MEM_VOLATILE_P (x);
2200 /* Memory ref can trap unless it's a static var or a stack slot. */
2202 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2203 reference; moving it out of condition might cause its address
2205 !(flags & MTP_AFTER_MOVE)
2207 && (!STRICT_ALIGNMENT || !unaligned_mems))
2210 rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2212 /* Division by a non-constant might trap. */
2217 if (HONOR_SNANS (GET_MODE (x)))
2219 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2220 return flag_trapping_math;
2221 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2226 /* An EXPR_LIST is used to represent a function call. This
2227 certainly may trap. */
2236 /* Some floating point comparisons may trap. */
2237 if (!flag_trapping_math)
2239 /* ??? There is no machine independent way to check for tests that trap
2240 when COMPARE is used, though many targets do make this distinction.
2241 For instance, sparc uses CCFPE for compares which generate exceptions
2242 and CCFP for compares which do not generate exceptions. */
2243 if (HONOR_NANS (GET_MODE (x)))
2245 /* But often the compare has some CC mode, so check operand
2247 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2248 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2254 if (HONOR_SNANS (GET_MODE (x)))
2256 /* Often comparison is CC mode, so check operand modes. */
2257 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2258 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2263 /* Conversion of floating point might trap. */
2264 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2271 /* These operations don't trap even with floating point. */
2275 /* Any floating arithmetic may trap. */
2276 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2277 && flag_trapping_math)
2281 fmt = GET_RTX_FORMAT (code);
2282 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2286 if (may_trap_p_1 (XEXP (x, i), flags))
2289 else if (fmt[i] == 'E')
2292 for (j = 0; j < XVECLEN (x, i); j++)
2293 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2300 /* Return nonzero if evaluating rtx X might cause a trap. */
2303 may_trap_p (const_rtx x)
2305 return may_trap_p_1 (x, 0);
2308 /* Return nonzero if evaluating rtx X might cause a trap, when the expression
2309 is moved from its current location by some optimization. */
2312 may_trap_after_code_motion_p (const_rtx x)
2314 return may_trap_p_1 (x, MTP_AFTER_MOVE);
2317 /* Same as above, but additionally return nonzero if evaluating rtx X might
2318 cause a fault. We define a fault for the purpose of this function as a
2319 erroneous execution condition that cannot be encountered during the normal
2320 execution of a valid program; the typical example is an unaligned memory
2321 access on a strict alignment machine. The compiler guarantees that it
2322 doesn't generate code that will fault from a valid program, but this
2323 guarantee doesn't mean anything for individual instructions. Consider
2324 the following example:
2326 struct S { int d; union { char *cp; int *ip; }; };
2328 int foo(struct S *s)
2336 on a strict alignment machine. In a valid program, foo will never be
2337 invoked on a structure for which d is equal to 1 and the underlying
2338 unique field of the union not aligned on a 4-byte boundary, but the
2339 expression *s->ip might cause a fault if considered individually.
2341 At the RTL level, potentially problematic expressions will almost always
2342 verify may_trap_p; for example, the above dereference can be emitted as
2343 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2344 However, suppose that foo is inlined in a caller that causes s->cp to
2345 point to a local character variable and guarantees that s->d is not set
2346 to 1; foo may have been effectively translated into pseudo-RTL as:
2349 (set (reg:SI) (mem:SI (%fp - 7)))
2351 (set (reg:QI) (mem:QI (%fp - 7)))
2353 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2354 memory reference to a stack slot, but it will certainly cause a fault
2355 on a strict alignment machine. */
2358 may_trap_or_fault_p (const_rtx x)
2360 return may_trap_p_1 (x, MTP_UNALIGNED_MEMS);
2363 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2364 i.e., an inequality. */
2367 inequality_comparisons_p (const_rtx x)
2371 const enum rtx_code code = GET_CODE (x);
2402 len = GET_RTX_LENGTH (code);
2403 fmt = GET_RTX_FORMAT (code);
2405 for (i = 0; i < len; i++)
2409 if (inequality_comparisons_p (XEXP (x, i)))
2412 else if (fmt[i] == 'E')
2415 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2416 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2424 /* Replace any occurrence of FROM in X with TO. The function does
2425 not enter into CONST_DOUBLE for the replace.
2427 Note that copying is not done so X must not be shared unless all copies
2428 are to be modified. */
2431 replace_rtx (rtx x, rtx from, rtx to)
2436 /* The following prevents loops occurrence when we change MEM in
2437 CONST_DOUBLE onto the same CONST_DOUBLE. */
2438 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2444 /* Allow this function to make replacements in EXPR_LISTs. */
2448 if (GET_CODE (x) == SUBREG)
2450 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2452 if (GET_CODE (new) == CONST_INT)
2454 x = simplify_subreg (GET_MODE (x), new,
2455 GET_MODE (SUBREG_REG (x)),
2460 SUBREG_REG (x) = new;
2464 else if (GET_CODE (x) == ZERO_EXTEND)
2466 rtx new = replace_rtx (XEXP (x, 0), from, to);
2468 if (GET_CODE (new) == CONST_INT)
2470 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2471 new, GET_MODE (XEXP (x, 0)));
2480 fmt = GET_RTX_FORMAT (GET_CODE (x));
2481 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2484 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2485 else if (fmt[i] == 'E')
2486 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2487 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2493 /* Replace occurrences of the old label in *X with the new one.
2494 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2497 replace_label (rtx *x, void *data)
2500 rtx old_label = ((replace_label_data *) data)->r1;
2501 rtx new_label = ((replace_label_data *) data)->r2;
2502 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2507 if (GET_CODE (l) == SYMBOL_REF
2508 && CONSTANT_POOL_ADDRESS_P (l))
2510 rtx c = get_pool_constant (l);
2511 if (rtx_referenced_p (old_label, c))
2514 replace_label_data *d = (replace_label_data *) data;
2516 /* Create a copy of constant C; replace the label inside
2517 but do not update LABEL_NUSES because uses in constant pool
2519 new_c = copy_rtx (c);
2520 d->update_label_nuses = false;
2521 for_each_rtx (&new_c, replace_label, data);
2522 d->update_label_nuses = update_label_nuses;
2524 /* Add the new constant NEW_C to constant pool and replace
2525 the old reference to constant by new reference. */
2526 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2527 *x = replace_rtx (l, l, new_l);
2532 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2533 field. This is not handled by for_each_rtx because it doesn't
2534 handle unprinted ('0') fields. */
2535 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2536 JUMP_LABEL (l) = new_label;
2538 if ((GET_CODE (l) == LABEL_REF
2539 || GET_CODE (l) == INSN_LIST)
2540 && XEXP (l, 0) == old_label)
2542 XEXP (l, 0) = new_label;
2543 if (update_label_nuses)
2545 ++LABEL_NUSES (new_label);
2546 --LABEL_NUSES (old_label);
2554 /* When *BODY is equal to X or X is directly referenced by *BODY
2555 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2556 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2559 rtx_referenced_p_1 (rtx *body, void *x)
2563 if (*body == NULL_RTX)
2564 return y == NULL_RTX;
2566 /* Return true if a label_ref *BODY refers to label Y. */
2567 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2568 return XEXP (*body, 0) == y;
2570 /* If *BODY is a reference to pool constant traverse the constant. */
2571 if (GET_CODE (*body) == SYMBOL_REF
2572 && CONSTANT_POOL_ADDRESS_P (*body))
2573 return rtx_referenced_p (y, get_pool_constant (*body));
2575 /* By default, compare the RTL expressions. */
2576 return rtx_equal_p (*body, y);
2579 /* Return true if X is referenced in BODY. */
2582 rtx_referenced_p (rtx x, rtx body)
2584 return for_each_rtx (&body, rtx_referenced_p_1, x);
2587 /* If INSN is a tablejump return true and store the label (before jump table) to
2588 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2591 tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
2596 && (label = JUMP_LABEL (insn)) != NULL_RTX
2597 && (table = next_active_insn (label)) != NULL_RTX
2599 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2600 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2611 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2612 constant that is not in the constant pool and not in the condition
2613 of an IF_THEN_ELSE. */
2616 computed_jump_p_1 (const_rtx x)
2618 const enum rtx_code code = GET_CODE (x);
2638 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2639 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2642 return (computed_jump_p_1 (XEXP (x, 1))
2643 || computed_jump_p_1 (XEXP (x, 2)));
2649 fmt = GET_RTX_FORMAT (code);
2650 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2653 && computed_jump_p_1 (XEXP (x, i)))
2656 else if (fmt[i] == 'E')
2657 for (j = 0; j < XVECLEN (x, i); j++)
2658 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2665 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2667 Tablejumps and casesi insns are not considered indirect jumps;
2668 we can recognize them by a (use (label_ref)). */
2671 computed_jump_p (const_rtx insn)
2676 rtx pat = PATTERN (insn);
2678 /* If we have a JUMP_LABEL set, we're not a computed jump. */
2679 if (JUMP_LABEL (insn) != NULL)
2682 if (GET_CODE (pat) == PARALLEL)
2684 int len = XVECLEN (pat, 0);
2685 int has_use_labelref = 0;
2687 for (i = len - 1; i >= 0; i--)
2688 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2689 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2691 has_use_labelref = 1;
2693 if (! has_use_labelref)
2694 for (i = len - 1; i >= 0; i--)
2695 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2696 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2697 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2700 else if (GET_CODE (pat) == SET
2701 && SET_DEST (pat) == pc_rtx
2702 && computed_jump_p_1 (SET_SRC (pat)))
2708 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2709 calls. Processes the subexpressions of EXP and passes them to F. */
2711 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2714 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2717 for (; format[n] != '\0'; n++)
2724 result = (*f) (x, data);
2726 /* Do not traverse sub-expressions. */
2728 else if (result != 0)
2729 /* Stop the traversal. */
2733 /* There are no sub-expressions. */
2736 i = non_rtx_starting_operands[GET_CODE (*x)];
2739 result = for_each_rtx_1 (*x, i, f, data);
2747 if (XVEC (exp, n) == 0)
2749 for (j = 0; j < XVECLEN (exp, n); ++j)
2752 x = &XVECEXP (exp, n, j);
2753 result = (*f) (x, data);
2755 /* Do not traverse sub-expressions. */
2757 else if (result != 0)
2758 /* Stop the traversal. */
2762 /* There are no sub-expressions. */
2765 i = non_rtx_starting_operands[GET_CODE (*x)];
2768 result = for_each_rtx_1 (*x, i, f, data);
2776 /* Nothing to do. */
2784 /* Traverse X via depth-first search, calling F for each
2785 sub-expression (including X itself). F is also passed the DATA.
2786 If F returns -1, do not traverse sub-expressions, but continue
2787 traversing the rest of the tree. If F ever returns any other
2788 nonzero value, stop the traversal, and return the value returned
2789 by F. Otherwise, return 0. This function does not traverse inside
2790 tree structure that contains RTX_EXPRs, or into sub-expressions
2791 whose format code is `0' since it is not known whether or not those
2792 codes are actually RTL.
2794 This routine is very general, and could (should?) be used to
2795 implement many of the other routines in this file. */
2798 for_each_rtx (rtx *x, rtx_function f, void *data)
2804 result = (*f) (x, data);
2806 /* Do not traverse sub-expressions. */
2808 else if (result != 0)
2809 /* Stop the traversal. */
2813 /* There are no sub-expressions. */
2816 i = non_rtx_starting_operands[GET_CODE (*x)];
2820 return for_each_rtx_1 (*x, i, f, data);
2824 /* Searches X for any reference to REGNO, returning the rtx of the
2825 reference found if any. Otherwise, returns NULL_RTX. */
2828 regno_use_in (unsigned int regno, rtx x)
2834 if (REG_P (x) && REGNO (x) == regno)
2837 fmt = GET_RTX_FORMAT (GET_CODE (x));
2838 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2842 if ((tem = regno_use_in (regno, XEXP (x, i))))
2845 else if (fmt[i] == 'E')
2846 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2847 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2854 /* Return a value indicating whether OP, an operand of a commutative
2855 operation, is preferred as the first or second operand. The higher
2856 the value, the stronger the preference for being the first operand.
2857 We use negative values to indicate a preference for the first operand
2858 and positive values for the second operand. */
2861 commutative_operand_precedence (rtx op)
2863 enum rtx_code code = GET_CODE (op);
2865 /* Constants always come the second operand. Prefer "nice" constants. */
2866 if (code == CONST_INT)
2868 if (code == CONST_DOUBLE)
2870 if (code == CONST_FIXED)
2872 op = avoid_constant_pool_reference (op);
2873 code = GET_CODE (op);
2875 switch (GET_RTX_CLASS (code))
2878 if (code == CONST_INT)
2880 if (code == CONST_DOUBLE)
2882 if (code == CONST_FIXED)
2887 /* SUBREGs of objects should come second. */
2888 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2893 /* Complex expressions should be the first, so decrease priority
2894 of objects. Prefer pointer objects over non pointer objects. */
2895 if ((REG_P (op) && REG_POINTER (op))
2896 || (MEM_P (op) && MEM_POINTER (op)))
2900 case RTX_COMM_ARITH:
2901 /* Prefer operands that are themselves commutative to be first.
2902 This helps to make things linear. In particular,
2903 (and (and (reg) (reg)) (not (reg))) is canonical. */
2907 /* If only one operand is a binary expression, it will be the first
2908 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2909 is canonical, although it will usually be further simplified. */
2913 /* Then prefer NEG and NOT. */
2914 if (code == NEG || code == NOT)
2922 /* Return 1 iff it is necessary to swap operands of commutative operation
2923 in order to canonicalize expression. */
2926 swap_commutative_operands_p (rtx x, rtx y)
2928 return (commutative_operand_precedence (x)
2929 < commutative_operand_precedence (y));
2932 /* Return 1 if X is an autoincrement side effect and the register is
2933 not the stack pointer. */
2935 auto_inc_p (const_rtx x)
2937 switch (GET_CODE (x))
2945 /* There are no REG_INC notes for SP. */
2946 if (XEXP (x, 0) != stack_pointer_rtx)
2954 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2956 loc_mentioned_in_p (rtx *loc, const_rtx in)
2965 code = GET_CODE (in);
2966 fmt = GET_RTX_FORMAT (code);
2967 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2971 if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
2974 else if (fmt[i] == 'E')
2975 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2976 if (loc == &XVECEXP (in, i, j)
2977 || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2983 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2984 and SUBREG_BYTE, return the bit offset where the subreg begins
2985 (counting from the least significant bit of the operand). */
2988 subreg_lsb_1 (enum machine_mode outer_mode,
2989 enum machine_mode inner_mode,
2990 unsigned int subreg_byte)
2992 unsigned int bitpos;
2996 /* A paradoxical subreg begins at bit position 0. */
2997 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3000 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3001 /* If the subreg crosses a word boundary ensure that
3002 it also begins and ends on a word boundary. */
3003 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3004 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3005 && (subreg_byte % UNITS_PER_WORD
3006 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3008 if (WORDS_BIG_ENDIAN)
3009 word = (GET_MODE_SIZE (inner_mode)
3010 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3012 word = subreg_byte / UNITS_PER_WORD;
3013 bitpos = word * BITS_PER_WORD;
3015 if (BYTES_BIG_ENDIAN)
3016 byte = (GET_MODE_SIZE (inner_mode)
3017 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3019 byte = subreg_byte % UNITS_PER_WORD;
3020 bitpos += byte * BITS_PER_UNIT;
3025 /* Given a subreg X, return the bit offset where the subreg begins
3026 (counting from the least significant bit of the reg). */
3029 subreg_lsb (const_rtx x)
3031 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3035 /* Fill in information about a subreg of a hard register.
3036 xregno - A regno of an inner hard subreg_reg (or what will become one).
3037 xmode - The mode of xregno.
3038 offset - The byte offset.
3039 ymode - The mode of a top level SUBREG (or what may become one).
3040 info - Pointer to structure to fill in. */
3042 subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3043 unsigned int offset, enum machine_mode ymode,
3044 struct subreg_info *info)
3046 int nregs_xmode, nregs_ymode;
3047 int mode_multiple, nregs_multiple;
3048 int offset_adj, y_offset, y_offset_adj;
3049 int regsize_xmode, regsize_ymode;
3052 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3056 /* If there are holes in a non-scalar mode in registers, we expect
3057 that it is made up of its units concatenated together. */
3058 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3060 enum machine_mode xmode_unit;
3062 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3063 if (GET_MODE_INNER (xmode) == VOIDmode)
3066 xmode_unit = GET_MODE_INNER (xmode);
3067 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3068 gcc_assert (nregs_xmode
3069 == (GET_MODE_NUNITS (xmode)
3070 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3071 gcc_assert (hard_regno_nregs[xregno][xmode]
3072 == (hard_regno_nregs[xregno][xmode_unit]
3073 * GET_MODE_NUNITS (xmode)));
3075 /* You can only ask for a SUBREG of a value with holes in the middle
3076 if you don't cross the holes. (Such a SUBREG should be done by
3077 picking a different register class, or doing it in memory if
3078 necessary.) An example of a value with holes is XCmode on 32-bit
3079 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3080 3 for each part, but in memory it's two 128-bit parts.
3081 Padding is assumed to be at the end (not necessarily the 'high part')
3083 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3084 < GET_MODE_NUNITS (xmode))
3085 && (offset / GET_MODE_SIZE (xmode_unit)
3086 != ((offset + GET_MODE_SIZE (ymode) - 1)
3087 / GET_MODE_SIZE (xmode_unit))))
3089 info->representable_p = false;
3094 nregs_xmode = hard_regno_nregs[xregno][xmode];
3096 nregs_ymode = hard_regno_nregs[xregno][ymode];
3098 /* Paradoxical subregs are otherwise valid. */
3101 && GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode))
3103 info->representable_p = true;
3104 /* If this is a big endian paradoxical subreg, which uses more
3105 actual hard registers than the original register, we must
3106 return a negative offset so that we find the proper highpart
3108 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3109 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3110 info->offset = nregs_xmode - nregs_ymode;
3113 info->nregs = nregs_ymode;
3117 /* If registers store different numbers of bits in the different
3118 modes, we cannot generally form this subreg. */
3119 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3120 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3121 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3122 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
3124 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3125 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3126 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3128 info->representable_p = false;
3130 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3131 info->offset = offset / regsize_xmode;
3134 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3136 info->representable_p = false;
3138 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3139 info->offset = offset / regsize_xmode;
3144 /* Lowpart subregs are otherwise valid. */
3145 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3147 info->representable_p = true;
3150 if (offset == 0 || nregs_xmode == nregs_ymode)
3153 info->nregs = nregs_ymode;
3158 /* This should always pass, otherwise we don't know how to verify
3159 the constraint. These conditions may be relaxed but
3160 subreg_regno_offset would need to be redesigned. */
3161 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3162 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3164 /* The XMODE value can be seen as a vector of NREGS_XMODE
3165 values. The subreg must represent a lowpart of given field.
3166 Compute what field it is. */
3167 offset_adj = offset;
3168 offset_adj -= subreg_lowpart_offset (ymode,
3169 mode_for_size (GET_MODE_BITSIZE (xmode)
3173 /* Size of ymode must not be greater than the size of xmode. */
3174 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3175 gcc_assert (mode_multiple != 0);
3177 y_offset = offset / GET_MODE_SIZE (ymode);
3178 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3179 nregs_multiple = nregs_xmode / nregs_ymode;
3181 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
3182 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3186 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3189 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3190 info->nregs = nregs_ymode;
3193 /* This function returns the regno offset of a subreg expression.
3194 xregno - A regno of an inner hard subreg_reg (or what will become one).
3195 xmode - The mode of xregno.
3196 offset - The byte offset.
3197 ymode - The mode of a top level SUBREG (or what may become one).
3198 RETURN - The regno offset which would be used. */
3200 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3201 unsigned int offset, enum machine_mode ymode)
3203 struct subreg_info info;
3204 subreg_get_info (xregno, xmode, offset, ymode, &info);
3208 /* This function returns true when the offset is representable via
3209 subreg_offset in the given regno.
3210 xregno - A regno of an inner hard subreg_reg (or what will become one).
3211 xmode - The mode of xregno.
3212 offset - The byte offset.
3213 ymode - The mode of a top level SUBREG (or what may become one).
3214 RETURN - Whether the offset is representable. */
3216 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3217 unsigned int offset, enum machine_mode ymode)
3219 struct subreg_info info;
3220 subreg_get_info (xregno, xmode, offset, ymode, &info);
3221 return info.representable_p;
3224 /* Return the final regno that a subreg expression refers to. */
3226 subreg_regno (const_rtx x)
3229 rtx subreg = SUBREG_REG (x);
3230 int regno = REGNO (subreg);
3232 ret = regno + subreg_regno_offset (regno,
3240 /* Return the number of registers that a subreg expression refers
3243 subreg_nregs (const_rtx x)
3245 return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3248 /* Return the number of registers that a subreg REG with REGNO
3249 expression refers to. This is a copy of the rtlanal.c:subreg_nregs
3250 changed so that the regno can be passed in. */
3253 subreg_nregs_with_regno (unsigned int regno, const_rtx x)
3255 struct subreg_info info;
3256 rtx subreg = SUBREG_REG (x);
3258 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3264 struct parms_set_data
3270 /* Helper function for noticing stores to parameter registers. */
3272 parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
3274 struct parms_set_data *d = data;
3275 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3276 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3278 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3283 /* Look backward for first parameter to be loaded.
3284 Note that loads of all parameters will not necessarily be
3285 found if CSE has eliminated some of them (e.g., an argument
3286 to the outer function is passed down as a parameter).
3287 Do not skip BOUNDARY. */
3289 find_first_parameter_load (rtx call_insn, rtx boundary)
3291 struct parms_set_data parm;
3292 rtx p, before, first_set;
3294 /* Since different machines initialize their parameter registers
3295 in different orders, assume nothing. Collect the set of all
3296 parameter registers. */
3297 CLEAR_HARD_REG_SET (parm.regs);
3299 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3300 if (GET_CODE (XEXP (p, 0)) == USE
3301 && REG_P (XEXP (XEXP (p, 0), 0)))
3303 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3305 /* We only care about registers which can hold function
3307 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3310 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3314 first_set = call_insn;
3316 /* Search backward for the first set of a register in this set. */
3317 while (parm.nregs && before != boundary)
3319 before = PREV_INSN (before);
3321 /* It is possible that some loads got CSEed from one call to
3322 another. Stop in that case. */
3323 if (CALL_P (before))
3326 /* Our caller needs either ensure that we will find all sets
3327 (in case code has not been optimized yet), or take care
3328 for possible labels in a way by setting boundary to preceding
3330 if (LABEL_P (before))
3332 gcc_assert (before == boundary);
3336 if (INSN_P (before))
3338 int nregs_old = parm.nregs;
3339 note_stores (PATTERN (before), parms_set, &parm);
3340 /* If we found something that did not set a parameter reg,
3341 we're done. Do not keep going, as that might result
3342 in hoisting an insn before the setting of a pseudo
3343 that is used by the hoisted insn. */
3344 if (nregs_old != parm.nregs)
3353 /* Return true if we should avoid inserting code between INSN and preceding
3354 call instruction. */
3357 keep_with_call_p (const_rtx insn)
3361 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3363 if (REG_P (SET_DEST (set))
3364 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3365 && fixed_regs[REGNO (SET_DEST (set))]
3366 && general_operand (SET_SRC (set), VOIDmode))
3368 if (REG_P (SET_SRC (set))
3369 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3370 && REG_P (SET_DEST (set))
3371 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3373 /* There may be a stack pop just after the call and before the store
3374 of the return register. Search for the actual store when deciding
3375 if we can break or not. */
3376 if (SET_DEST (set) == stack_pointer_rtx)
3378 /* This CONST_CAST is okay because next_nonnote_insn just
3379 returns it's argument and we assign it to a const_rtx
3381 const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX(insn));
3382 if (i2 && keep_with_call_p (i2))
3389 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3390 to non-complex jumps. That is, direct unconditional, conditional,
3391 and tablejumps, but not computed jumps or returns. It also does
3392 not apply to the fallthru case of a conditional jump. */
3395 label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
3397 rtx tmp = JUMP_LABEL (jump_insn);
3402 if (tablejump_p (jump_insn, NULL, &tmp))
3404 rtvec vec = XVEC (PATTERN (tmp),
3405 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3406 int i, veclen = GET_NUM_ELEM (vec);
3408 for (i = 0; i < veclen; ++i)
3409 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3413 if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3420 /* Return an estimate of the cost of computing rtx X.
3421 One use is in cse, to decide which expression to keep in the hash table.
3422 Another is in rtl generation, to pick the cheapest way to multiply.
3423 Other uses like the latter are expected in the future. */
3426 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3436 /* Compute the default costs of certain things.
3437 Note that targetm.rtx_costs can override the defaults. */
3439 code = GET_CODE (x);
3443 total = COSTS_N_INSNS (5);
3449 total = COSTS_N_INSNS (7);
3452 /* Used in combine.c as a marker. */
3456 total = COSTS_N_INSNS (1);
3466 /* If we can't tie these modes, make this expensive. The larger
3467 the mode, the more expensive it is. */
3468 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3469 return COSTS_N_INSNS (2
3470 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3474 if (targetm.rtx_costs (x, code, outer_code, &total))
3479 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3480 which is already in total. */
3482 fmt = GET_RTX_FORMAT (code);
3483 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3485 total += rtx_cost (XEXP (x, i), code);
3486 else if (fmt[i] == 'E')
3487 for (j = 0; j < XVECLEN (x, i); j++)
3488 total += rtx_cost (XVECEXP (x, i, j), code);
3493 /* Return cost of address expression X.
3494 Expect that X is properly formed address reference. */
3497 address_cost (rtx x, enum machine_mode mode)
3499 /* We may be asked for cost of various unusual addresses, such as operands
3500 of push instruction. It is not worthwhile to complicate writing
3501 of the target hook by such cases. */
3503 if (!memory_address_p (mode, x))
3506 return targetm.address_cost (x);
3509 /* If the target doesn't override, compute the cost as with arithmetic. */
3512 default_address_cost (rtx x)
3514 return rtx_cost (x, MEM);
3518 unsigned HOST_WIDE_INT
3519 nonzero_bits (const_rtx x, enum machine_mode mode)
3521 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3525 num_sign_bit_copies (const_rtx x, enum machine_mode mode)
3527 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3530 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3531 It avoids exponential behavior in nonzero_bits1 when X has
3532 identical subexpressions on the first or the second level. */
3534 static unsigned HOST_WIDE_INT
3535 cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
3536 enum machine_mode known_mode,
3537 unsigned HOST_WIDE_INT known_ret)
3539 if (x == known_x && mode == known_mode)
3542 /* Try to find identical subexpressions. If found call
3543 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3544 precomputed value for the subexpression as KNOWN_RET. */
3546 if (ARITHMETIC_P (x))
3548 rtx x0 = XEXP (x, 0);
3549 rtx x1 = XEXP (x, 1);
3551 /* Check the first level. */
3553 return nonzero_bits1 (x, mode, x0, mode,
3554 cached_nonzero_bits (x0, mode, known_x,
3555 known_mode, known_ret));
3557 /* Check the second level. */
3558 if (ARITHMETIC_P (x0)
3559 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3560 return nonzero_bits1 (x, mode, x1, mode,
3561 cached_nonzero_bits (x1, mode, known_x,
3562 known_mode, known_ret));
3564 if (ARITHMETIC_P (x1)
3565 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3566 return nonzero_bits1 (x, mode, x0, mode,
3567 cached_nonzero_bits (x0, mode, known_x,
3568 known_mode, known_ret));
3571 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3574 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3575 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3576 is less useful. We can't allow both, because that results in exponential
3577 run time recursion. There is a nullstone testcase that triggered
3578 this. This macro avoids accidental uses of num_sign_bit_copies. */
3579 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3581 /* Given an expression, X, compute which bits in X can be nonzero.
3582 We don't care about bits outside of those defined in MODE.
3584 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3585 an arithmetic operation, we can do better. */
3587 static unsigned HOST_WIDE_INT
3588 nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
3589 enum machine_mode known_mode,
3590 unsigned HOST_WIDE_INT known_ret)
3592 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3593 unsigned HOST_WIDE_INT inner_nz;
3595 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3597 /* For floating-point values, assume all bits are needed. */
3598 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3601 /* If X is wider than MODE, use its mode instead. */
3602 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3604 mode = GET_MODE (x);
3605 nonzero = GET_MODE_MASK (mode);
3606 mode_width = GET_MODE_BITSIZE (mode);
3609 if (mode_width > HOST_BITS_PER_WIDE_INT)
3610 /* Our only callers in this case look for single bit values. So
3611 just return the mode mask. Those tests will then be false. */
3614 #ifndef WORD_REGISTER_OPERATIONS
3615 /* If MODE is wider than X, but both are a single word for both the host
3616 and target machines, we can compute this from which bits of the
3617 object might be nonzero in its own mode, taking into account the fact
3618 that on many CISC machines, accessing an object in a wider mode
3619 causes the high-order bits to become undefined. So they are
3620 not known to be zero. */
3622 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3623 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3624 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3625 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3627 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3628 known_x, known_mode, known_ret);
3629 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3634 code = GET_CODE (x);
3638 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3639 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3640 all the bits above ptr_mode are known to be zero. */
3641 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3643 nonzero &= GET_MODE_MASK (ptr_mode);
3646 /* Include declared information about alignment of pointers. */
3647 /* ??? We don't properly preserve REG_POINTER changes across
3648 pointer-to-integer casts, so we can't trust it except for
3649 things that we know must be pointers. See execute/960116-1.c. */
3650 if ((x == stack_pointer_rtx
3651 || x == frame_pointer_rtx
3652 || x == arg_pointer_rtx)
3653 && REGNO_POINTER_ALIGN (REGNO (x)))
3655 unsigned HOST_WIDE_INT alignment
3656 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3658 #ifdef PUSH_ROUNDING
3659 /* If PUSH_ROUNDING is defined, it is possible for the
3660 stack to be momentarily aligned only to that amount,
3661 so we pick the least alignment. */
3662 if (x == stack_pointer_rtx && PUSH_ARGS)
3663 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3667 nonzero &= ~(alignment - 1);
3671 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3672 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3673 known_mode, known_ret,
3677 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3678 known_mode, known_ret);
3680 return nonzero_for_hook;
3684 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3685 /* If X is negative in MODE, sign-extend the value. */
3686 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3687 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3688 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3694 #ifdef LOAD_EXTEND_OP
3695 /* In many, if not most, RISC machines, reading a byte from memory
3696 zeros the rest of the register. Noticing that fact saves a lot
3697 of extra zero-extends. */
3698 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3699 nonzero &= GET_MODE_MASK (GET_MODE (x));
3704 case UNEQ: case LTGT:
3705 case GT: case GTU: case UNGT:
3706 case LT: case LTU: case UNLT:
3707 case GE: case GEU: case UNGE:
3708 case LE: case LEU: case UNLE:
3709 case UNORDERED: case ORDERED:
3710 /* If this produces an integer result, we know which bits are set.
3711 Code here used to clear bits outside the mode of X, but that is
3713 /* Mind that MODE is the mode the caller wants to look at this
3714 operation in, and not the actual operation mode. We can wind
3715 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3716 that describes the results of a vector compare. */
3717 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3718 && mode_width <= HOST_BITS_PER_WIDE_INT)
3719 nonzero = STORE_FLAG_VALUE;
3724 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3725 and num_sign_bit_copies. */
3726 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3727 == GET_MODE_BITSIZE (GET_MODE (x)))
3731 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3732 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3737 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3738 and num_sign_bit_copies. */
3739 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3740 == GET_MODE_BITSIZE (GET_MODE (x)))
3746 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3747 known_x, known_mode, known_ret)
3748 & GET_MODE_MASK (mode));
3752 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3753 known_x, known_mode, known_ret);
3754 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3755 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3759 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3760 Otherwise, show all the bits in the outer mode but not the inner
3762 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3763 known_x, known_mode, known_ret);
3764 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3766 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3768 & (((HOST_WIDE_INT) 1
3769 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3770 inner_nz |= (GET_MODE_MASK (mode)
3771 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3774 nonzero &= inner_nz;
3778 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3779 known_x, known_mode, known_ret)
3780 & cached_nonzero_bits (XEXP (x, 1), mode,
3781 known_x, known_mode, known_ret);
3785 case UMIN: case UMAX: case SMIN: case SMAX:
3787 unsigned HOST_WIDE_INT nonzero0 =
3788 cached_nonzero_bits (XEXP (x, 0), mode,
3789 known_x, known_mode, known_ret);
3791 /* Don't call nonzero_bits for the second time if it cannot change
3793 if ((nonzero & nonzero0) != nonzero)
3795 | cached_nonzero_bits (XEXP (x, 1), mode,
3796 known_x, known_mode, known_ret);
3800 case PLUS: case MINUS:
3802 case DIV: case UDIV:
3803 case MOD: case UMOD:
3804 /* We can apply the rules of arithmetic to compute the number of
3805 high- and low-order zero bits of these operations. We start by
3806 computing the width (position of the highest-order nonzero bit)
3807 and the number of low-order zero bits for each value. */
3809 unsigned HOST_WIDE_INT nz0 =
3810 cached_nonzero_bits (XEXP (x, 0), mode,
3811 known_x, known_mode, known_ret);
3812 unsigned HOST_WIDE_INT nz1 =
3813 cached_nonzero_bits (XEXP (x, 1), mode,
3814 known_x, known_mode, known_ret);
3815 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3816 int width0 = floor_log2 (nz0) + 1;
3817 int width1 = floor_log2 (nz1) + 1;
3818 int low0 = floor_log2 (nz0 & -nz0);
3819 int low1 = floor_log2 (nz1 & -nz1);
3820 HOST_WIDE_INT op0_maybe_minusp
3821 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3822 HOST_WIDE_INT op1_maybe_minusp
3823 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3824 unsigned int result_width = mode_width;
3830 result_width = MAX (width0, width1) + 1;
3831 result_low = MIN (low0, low1);
3834 result_low = MIN (low0, low1);
3837 result_width = width0 + width1;
3838 result_low = low0 + low1;
3843 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3844 result_width = width0;
3849 result_width = width0;
3854 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3855 result_width = MIN (width0, width1);
3856 result_low = MIN (low0, low1);
3861 result_width = MIN (width0, width1);
3862 result_low = MIN (low0, low1);
3868 if (result_width < mode_width)
3869 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3872 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3874 #ifdef POINTERS_EXTEND_UNSIGNED
3875 /* If pointers extend unsigned and this is an addition or subtraction
3876 to a pointer in Pmode, all the bits above ptr_mode are known to be
3878 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3879 && (code == PLUS || code == MINUS)
3880 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3881 nonzero &= GET_MODE_MASK (ptr_mode);
3887 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3888 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3889 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3893 /* If this is a SUBREG formed for a promoted variable that has
3894 been zero-extended, we know that at least the high-order bits
3895 are zero, though others might be too. */
3897 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3898 nonzero = GET_MODE_MASK (GET_MODE (x))
3899 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3900 known_x, known_mode, known_ret);
3902 /* If the inner mode is a single word for both the host and target
3903 machines, we can compute this from which bits of the inner
3904 object might be nonzero. */
3905 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3906 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3907 <= HOST_BITS_PER_WIDE_INT))
3909 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3910 known_x, known_mode, known_ret);
3912 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3913 /* If this is a typical RISC machine, we only have to worry
3914 about the way loads are extended. */
3915 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3917 & (((unsigned HOST_WIDE_INT) 1
3918 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3920 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3921 || !MEM_P (SUBREG_REG (x)))
3924 /* On many CISC machines, accessing an object in a wider mode
3925 causes the high-order bits to become undefined. So they are
3926 not known to be zero. */
3927 if (GET_MODE_SIZE (GET_MODE (x))
3928 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3929 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3930 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3939 /* The nonzero bits are in two classes: any bits within MODE
3940 that aren't in GET_MODE (x) are always significant. The rest of the
3941 nonzero bits are those that are significant in the operand of
3942 the shift when shifted the appropriate number of bits. This
3943 shows that high-order bits are cleared by the right shift and
3944 low-order bits by left shifts. */
3945 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3946 && INTVAL (XEXP (x, 1)) >= 0
3947 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3949 enum machine_mode inner_mode = GET_MODE (x);
3950 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3951 int count = INTVAL (XEXP (x, 1));
3952 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3953 unsigned HOST_WIDE_INT op_nonzero =
3954 cached_nonzero_bits (XEXP (x, 0), mode,
3955 known_x, known_mode, known_ret);
3956 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3957 unsigned HOST_WIDE_INT outer = 0;
3959 if (mode_width > width)
3960 outer = (op_nonzero & nonzero & ~mode_mask);
3962 if (code == LSHIFTRT)
3964 else if (code == ASHIFTRT)
3968 /* If the sign bit may have been nonzero before the shift, we
3969 need to mark all the places it could have been copied to
3970 by the shift as possibly nonzero. */
3971 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3972 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3974 else if (code == ASHIFT)
3977 inner = ((inner << (count % width)
3978 | (inner >> (width - (count % width)))) & mode_mask);
3980 nonzero &= (outer | inner);
3986 /* This is at most the number of bits in the mode. */
3987 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3991 /* If CLZ has a known value at zero, then the nonzero bits are
3992 that value, plus the number of bits in the mode minus one. */
3993 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3994 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4000 /* If CTZ has a known value at zero, then the nonzero bits are
4001 that value, plus the number of bits in the mode minus one. */
4002 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4003 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4014 unsigned HOST_WIDE_INT nonzero_true =
4015 cached_nonzero_bits (XEXP (x, 1), mode,
4016 known_x, known_mode, known_ret);
4018 /* Don't call nonzero_bits for the second time if it cannot change
4020 if ((nonzero & nonzero_true) != nonzero)
4021 nonzero &= nonzero_true
4022 | cached_nonzero_bits (XEXP (x, 2), mode,
4023 known_x, known_mode, known_ret);
4034 /* See the macro definition above. */
4035 #undef cached_num_sign_bit_copies
4038 /* The function cached_num_sign_bit_copies is a wrapper around
4039 num_sign_bit_copies1. It avoids exponential behavior in
4040 num_sign_bit_copies1 when X has identical subexpressions on the
4041 first or the second level. */
4044 cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
4045 enum machine_mode known_mode,
4046 unsigned int known_ret)
4048 if (x == known_x && mode == known_mode)
4051 /* Try to find identical subexpressions. If found call
4052 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4053 the precomputed value for the subexpression as KNOWN_RET. */
4055 if (ARITHMETIC_P (x))
4057 rtx x0 = XEXP (x, 0);
4058 rtx x1 = XEXP (x, 1);
4060 /* Check the first level. */
4063 num_sign_bit_copies1 (x, mode, x0, mode,
4064 cached_num_sign_bit_copies (x0, mode, known_x,
4068 /* Check the second level. */
4069 if (ARITHMETIC_P (x0)
4070 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4072 num_sign_bit_copies1 (x, mode, x1, mode,
4073 cached_num_sign_bit_copies (x1, mode, known_x,
4077 if (ARITHMETIC_P (x1)
4078 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4080 num_sign_bit_copies1 (x, mode, x0, mode,
4081 cached_num_sign_bit_copies (x0, mode, known_x,
4086 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4089 /* Return the number of bits at the high-order end of X that are known to
4090 be equal to the sign bit. X will be used in mode MODE; if MODE is
4091 VOIDmode, X will be used in its own mode. The returned value will always
4092 be between 1 and the number of bits in MODE. */
4095 num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
4096 enum machine_mode known_mode,
4097 unsigned int known_ret)
4099 enum rtx_code code = GET_CODE (x);
4100 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4101 int num0, num1, result;
4102 unsigned HOST_WIDE_INT nonzero;
4104 /* If we weren't given a mode, use the mode of X. If the mode is still
4105 VOIDmode, we don't know anything. Likewise if one of the modes is
4108 if (mode == VOIDmode)
4109 mode = GET_MODE (x);
4111 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4114 /* For a smaller object, just ignore the high bits. */
4115 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4117 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4118 known_x, known_mode, known_ret);
4120 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4123 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4125 #ifndef WORD_REGISTER_OPERATIONS
4126 /* If this machine does not do all register operations on the entire
4127 register and MODE is wider than the mode of X, we can say nothing
4128 at all about the high-order bits. */
4131 /* Likewise on machines that do, if the mode of the object is smaller
4132 than a word and loads of that size don't sign extend, we can say
4133 nothing about the high order bits. */
4134 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4135 #ifdef LOAD_EXTEND_OP
4136 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4147 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4148 /* If pointers extend signed and this is a pointer in Pmode, say that
4149 all the bits above ptr_mode are known to be sign bit copies. */
4150 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4152 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4156 unsigned int copies_for_hook = 1, copies = 1;
4157 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4158 known_mode, known_ret,
4162 copies = cached_num_sign_bit_copies (new, mode, known_x,
4163 known_mode, known_ret);
4165 if (copies > 1 || copies_for_hook > 1)
4166 return MAX (copies, copies_for_hook);
4168 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4173 #ifdef LOAD_EXTEND_OP
4174 /* Some RISC machines sign-extend all loads of smaller than a word. */
4175 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4176 return MAX (1, ((int) bitwidth
4177 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4182 /* If the constant is negative, take its 1's complement and remask.
4183 Then see how many zero bits we have. */
4184 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4185 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4186 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4187 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4189 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4192 /* If this is a SUBREG for a promoted object that is sign-extended
4193 and we are looking at it in a wider mode, we know that at least the
4194 high-order bits are known to be sign bit copies. */
4196 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4198 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4199 known_x, known_mode, known_ret);
4200 return MAX ((int) bitwidth
4201 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4205 /* For a smaller object, just ignore the high bits. */
4206 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4208 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4209 known_x, known_mode, known_ret);
4210 return MAX (1, (num0
4211 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4215 #ifdef WORD_REGISTER_OPERATIONS
4216 #ifdef LOAD_EXTEND_OP
4217 /* For paradoxical SUBREGs on machines where all register operations
4218 affect the entire register, just look inside. Note that we are
4219 passing MODE to the recursive call, so the number of sign bit copies
4220 will remain relative to that mode, not the inner mode. */
4222 /* This works only if loads sign extend. Otherwise, if we get a
4223 reload for the inner part, it may be loaded from the stack, and
4224 then we lose all sign bit copies that existed before the store
4227 if ((GET_MODE_SIZE (GET_MODE (x))
4228 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4229 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4230 && MEM_P (SUBREG_REG (x)))
4231 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4232 known_x, known_mode, known_ret);
4238 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4239 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4243 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4244 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4245 known_x, known_mode, known_ret));
4248 /* For a smaller object, just ignore the high bits. */
4249 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4250 known_x, known_mode, known_ret);
4251 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4255 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4256 known_x, known_mode, known_ret);
4258 case ROTATE: case ROTATERT:
4259 /* If we are rotating left by a number of bits less than the number
4260 of sign bit copies, we can just subtract that amount from the
4262 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4263 && INTVAL (XEXP (x, 1)) >= 0
4264 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4266 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4267 known_x, known_mode, known_ret);
4268 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4269 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4274 /* In general, this subtracts one sign bit copy. But if the value
4275 is known to be positive, the number of sign bit copies is the
4276 same as that of the input. Finally, if the input has just one bit
4277 that might be nonzero, all the bits are copies of the sign bit. */
4278 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4279 known_x, known_mode, known_ret);
4280 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4281 return num0 > 1 ? num0 - 1 : 1;
4283 nonzero = nonzero_bits (XEXP (x, 0), mode);
4288 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4293 case IOR: case AND: case XOR:
4294 case SMIN: case SMAX: case UMIN: case UMAX:
4295 /* Logical operations will preserve the number of sign-bit copies.
4296 MIN and MAX operations always return one of the operands. */
4297 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4298 known_x, known_mode, known_ret);
4299 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4300 known_x, known_mode, known_ret);
4302 /* If num1 is clearing some of the top bits then regardless of
4303 the other term, we are guaranteed to have at least that many
4304 high-order zero bits. */
4307 && bitwidth <= HOST_BITS_PER_WIDE_INT
4308 && GET_CODE (XEXP (x, 1)) == CONST_INT
4309 && !(INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4312 /* Similarly for IOR when setting high-order bits. */
4315 && bitwidth <= HOST_BITS_PER_WIDE_INT
4316 && GET_CODE (XEXP (x, 1)) == CONST_INT
4317 && (INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4320 return MIN (num0, num1);
4322 case PLUS: case MINUS:
4323 /* For addition and subtraction, we can have a 1-bit carry. However,
4324 if we are subtracting 1 from a positive number, there will not
4325 be such a carry. Furthermore, if the positive number is known to
4326 be 0 or 1, we know the result is either -1 or 0. */
4328 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4329 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4331 nonzero = nonzero_bits (XEXP (x, 0), mode);
4332 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4333 return (nonzero == 1 || nonzero == 0 ? bitwidth
4334 : bitwidth - floor_log2 (nonzero) - 1);
4337 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4338 known_x, known_mode, known_ret);
4339 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4340 known_x, known_mode, known_ret);
4341 result = MAX (1, MIN (num0, num1) - 1);
4343 #ifdef POINTERS_EXTEND_UNSIGNED
4344 /* If pointers extend signed and this is an addition or subtraction
4345 to a pointer in Pmode, all the bits above ptr_mode are known to be
4347 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4348 && (code == PLUS || code == MINUS)
4349 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4350 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4351 - GET_MODE_BITSIZE (ptr_mode) + 1),
4357 /* The number of bits of the product is the sum of the number of
4358 bits of both terms. However, unless one of the terms if known
4359 to be positive, we must allow for an additional bit since negating
4360 a negative number can remove one sign bit copy. */
4362 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4363 known_x, known_mode, known_ret);
4364 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4365 known_x, known_mode, known_ret);
4367 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4369 && (bitwidth > HOST_BITS_PER_WIDE_INT
4370 || (((nonzero_bits (XEXP (x, 0), mode)
4371 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4372 && ((nonzero_bits (XEXP (x, 1), mode)
4373 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4376 return MAX (1, result);
4379 /* The result must be <= the first operand. If the first operand
4380 has the high bit set, we know nothing about the number of sign
4382 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4384 else if ((nonzero_bits (XEXP (x, 0), mode)
4385 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4388 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4389 known_x, known_mode, known_ret);
4392 /* The result must be <= the second operand. */
4393 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4394 known_x, known_mode, known_ret);
4397 /* Similar to unsigned division, except that we have to worry about
4398 the case where the divisor is negative, in which case we have
4400 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4401 known_x, known_mode, known_ret);
4403 && (bitwidth > HOST_BITS_PER_WIDE_INT
4404 || (nonzero_bits (XEXP (x, 1), mode)
4405 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4411 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4412 known_x, known_mode, known_ret);
4414 && (bitwidth > HOST_BITS_PER_WIDE_INT
4415 || (nonzero_bits (XEXP (x, 1), mode)
4416 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4422 /* Shifts by a constant add to the number of bits equal to the
4424 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4425 known_x, known_mode, known_ret);
4426 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4427 && INTVAL (XEXP (x, 1)) > 0)
4428 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4433 /* Left shifts destroy copies. */
4434 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4435 || INTVAL (XEXP (x, 1)) < 0
4436 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4439 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4440 known_x, known_mode, known_ret);
4441 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4444 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4445 known_x, known_mode, known_ret);
4446 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4447 known_x, known_mode, known_ret);
4448 return MIN (num0, num1);
4450 case EQ: case NE: case GE: case GT: case LE: case LT:
4451 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4452 case GEU: case GTU: case LEU: case LTU:
4453 case UNORDERED: case ORDERED:
4454 /* If the constant is negative, take its 1's complement and remask.
4455 Then see how many zero bits we have. */
4456 nonzero = STORE_FLAG_VALUE;
4457 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4458 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4459 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4461 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4467 /* If we haven't been able to figure it out by one of the above rules,
4468 see if some of the high-order bits are known to be zero. If so,
4469 count those bits and return one less than that amount. If we can't
4470 safely compute the mask for this mode, always return BITWIDTH. */
4472 bitwidth = GET_MODE_BITSIZE (mode);
4473 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4476 nonzero = nonzero_bits (x, mode);
4477 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4478 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4481 /* Calculate the rtx_cost of a single instruction. A return value of
4482 zero indicates an instruction pattern without a known cost. */
4485 insn_rtx_cost (rtx pat)
4490 /* Extract the single set rtx from the instruction pattern.
4491 We can't use single_set since we only have the pattern. */
4492 if (GET_CODE (pat) == SET)
4494 else if (GET_CODE (pat) == PARALLEL)
4497 for (i = 0; i < XVECLEN (pat, 0); i++)
4499 rtx x = XVECEXP (pat, 0, i);
4500 if (GET_CODE (x) == SET)
4513 cost = rtx_cost (SET_SRC (set), SET);
4514 return cost > 0 ? cost : COSTS_N_INSNS (1);
4517 /* Given an insn INSN and condition COND, return the condition in a
4518 canonical form to simplify testing by callers. Specifically:
4520 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4521 (2) Both operands will be machine operands; (cc0) will have been replaced.
4522 (3) If an operand is a constant, it will be the second operand.
4523 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4524 for GE, GEU, and LEU.
4526 If the condition cannot be understood, or is an inequality floating-point
4527 comparison which needs to be reversed, 0 will be returned.
4529 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4531 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4532 insn used in locating the condition was found. If a replacement test
4533 of the condition is desired, it should be placed in front of that
4534 insn and we will be sure that the inputs are still valid.
4536 If WANT_REG is nonzero, we wish the condition to be relative to that
4537 register, if possible. Therefore, do not canonicalize the condition
4538 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4539 to be a compare to a CC mode register.
4541 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4545 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4546 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4553 int reverse_code = 0;
4554 enum machine_mode mode;
4555 basic_block bb = BLOCK_FOR_INSN (insn);
4557 code = GET_CODE (cond);
4558 mode = GET_MODE (cond);
4559 op0 = XEXP (cond, 0);
4560 op1 = XEXP (cond, 1);
4563 code = reversed_comparison_code (cond, insn);
4564 if (code == UNKNOWN)
4570 /* If we are comparing a register with zero, see if the register is set
4571 in the previous insn to a COMPARE or a comparison operation. Perform
4572 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4575 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4576 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4577 && op1 == CONST0_RTX (GET_MODE (op0))
4580 /* Set nonzero when we find something of interest. */
4584 /* If comparison with cc0, import actual comparison from compare
4588 if ((prev = prev_nonnote_insn (prev)) == 0
4589 || !NONJUMP_INSN_P (prev)
4590 || (set = single_set (prev)) == 0
4591 || SET_DEST (set) != cc0_rtx)
4594 op0 = SET_SRC (set);
4595 op1 = CONST0_RTX (GET_MODE (op0));
4601 /* If this is a COMPARE, pick up the two things being compared. */
4602 if (GET_CODE (op0) == COMPARE)
4604 op1 = XEXP (op0, 1);
4605 op0 = XEXP (op0, 0);
4608 else if (!REG_P (op0))
4611 /* Go back to the previous insn. Stop if it is not an INSN. We also
4612 stop if it isn't a single set or if it has a REG_INC note because
4613 we don't want to bother dealing with it. */
4615 if ((prev = prev_nonnote_insn (prev)) == 0
4616 || !NONJUMP_INSN_P (prev)
4617 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4618 /* In cfglayout mode, there do not have to be labels at the
4619 beginning of a block, or jumps at the end, so the previous
4620 conditions would not stop us when we reach bb boundary. */
4621 || BLOCK_FOR_INSN (prev) != bb)
4624 set = set_of (op0, prev);
4627 && (GET_CODE (set) != SET
4628 || !rtx_equal_p (SET_DEST (set), op0)))
4631 /* If this is setting OP0, get what it sets it to if it looks
4635 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4636 #ifdef FLOAT_STORE_FLAG_VALUE
4637 REAL_VALUE_TYPE fsfv;
4640 /* ??? We may not combine comparisons done in a CCmode with
4641 comparisons not done in a CCmode. This is to aid targets
4642 like Alpha that have an IEEE compliant EQ instruction, and
4643 a non-IEEE compliant BEQ instruction. The use of CCmode is
4644 actually artificial, simply to prevent the combination, but
4645 should not affect other platforms.
4647 However, we must allow VOIDmode comparisons to match either
4648 CCmode or non-CCmode comparison, because some ports have
4649 modeless comparisons inside branch patterns.
4651 ??? This mode check should perhaps look more like the mode check
4652 in simplify_comparison in combine. */
4654 if ((GET_CODE (SET_SRC (set)) == COMPARE
4657 && GET_MODE_CLASS (inner_mode) == MODE_INT
4658 && (GET_MODE_BITSIZE (inner_mode)
4659 <= HOST_BITS_PER_WIDE_INT)
4660 && (STORE_FLAG_VALUE
4661 & ((HOST_WIDE_INT) 1
4662 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4663 #ifdef FLOAT_STORE_FLAG_VALUE
4665 && SCALAR_FLOAT_MODE_P (inner_mode)
4666 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4667 REAL_VALUE_NEGATIVE (fsfv)))
4670 && COMPARISON_P (SET_SRC (set))))
4671 && (((GET_MODE_CLASS (mode) == MODE_CC)
4672 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4673 || mode == VOIDmode || inner_mode == VOIDmode))
4675 else if (((code == EQ
4677 && (GET_MODE_BITSIZE (inner_mode)
4678 <= HOST_BITS_PER_WIDE_INT)
4679 && GET_MODE_CLASS (inner_mode) == MODE_INT
4680 && (STORE_FLAG_VALUE
4681 & ((HOST_WIDE_INT) 1
4682 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4683 #ifdef FLOAT_STORE_FLAG_VALUE
4685 && SCALAR_FLOAT_MODE_P (inner_mode)
4686 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4687 REAL_VALUE_NEGATIVE (fsfv)))
4690 && COMPARISON_P (SET_SRC (set))
4691 && (((GET_MODE_CLASS (mode) == MODE_CC)
4692 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4693 || mode == VOIDmode || inner_mode == VOIDmode))
4703 else if (reg_set_p (op0, prev))
4704 /* If this sets OP0, but not directly, we have to give up. */
4709 /* If the caller is expecting the condition to be valid at INSN,
4710 make sure X doesn't change before INSN. */
4711 if (valid_at_insn_p)
4712 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4714 if (COMPARISON_P (x))
4715 code = GET_CODE (x);
4718 code = reversed_comparison_code (x, prev);
4719 if (code == UNKNOWN)
4724 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4730 /* If constant is first, put it last. */
4731 if (CONSTANT_P (op0))
4732 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4734 /* If OP0 is the result of a comparison, we weren't able to find what
4735 was really being compared, so fail. */
4737 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4740 /* Canonicalize any ordered comparison with integers involving equality
4741 if we can do computations in the relevant mode and we do not
4744 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4745 && GET_CODE (op1) == CONST_INT
4746 && GET_MODE (op0) != VOIDmode
4747 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4749 HOST_WIDE_INT const_val = INTVAL (op1);
4750 unsigned HOST_WIDE_INT uconst_val = const_val;
4751 unsigned HOST_WIDE_INT max_val
4752 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4757 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4758 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4761 /* When cross-compiling, const_val might be sign-extended from
4762 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4764 if ((HOST_WIDE_INT) (const_val & max_val)
4765 != (((HOST_WIDE_INT) 1
4766 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4767 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4771 if (uconst_val < max_val)
4772 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4776 if (uconst_val != 0)
4777 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4785 /* Never return CC0; return zero instead. */
4789 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4792 /* Given a jump insn JUMP, return the condition that will cause it to branch
4793 to its JUMP_LABEL. If the condition cannot be understood, or is an
4794 inequality floating-point comparison which needs to be reversed, 0 will
4797 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4798 insn used in locating the condition was found. If a replacement test
4799 of the condition is desired, it should be placed in front of that
4800 insn and we will be sure that the inputs are still valid. If EARLIEST
4801 is null, the returned condition will be valid at INSN.
4803 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4804 compare CC mode register.
4806 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4809 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4815 /* If this is not a standard conditional jump, we can't parse it. */
4817 || ! any_condjump_p (jump))
4819 set = pc_set (jump);
4821 cond = XEXP (SET_SRC (set), 0);
4823 /* If this branches to JUMP_LABEL when the condition is false, reverse
4826 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4827 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4829 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4830 allow_cc_mode, valid_at_insn_p);
4833 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4834 TARGET_MODE_REP_EXTENDED.
4836 Note that we assume that the property of
4837 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4838 narrower than mode B. I.e., if A is a mode narrower than B then in
4839 order to be able to operate on it in mode B, mode A needs to
4840 satisfy the requirements set by the representation of mode B. */
4843 init_num_sign_bit_copies_in_rep (void)
4845 enum machine_mode mode, in_mode;
4847 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4848 in_mode = GET_MODE_WIDER_MODE (mode))
4849 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4850 mode = GET_MODE_WIDER_MODE (mode))
4852 enum machine_mode i;
4854 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4855 extends to the next widest mode. */
4856 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4857 || GET_MODE_WIDER_MODE (mode) == in_mode);
4859 /* We are in in_mode. Count how many bits outside of mode
4860 have to be copies of the sign-bit. */
4861 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4863 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4865 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4866 /* We can only check sign-bit copies starting from the
4867 top-bit. In order to be able to check the bits we
4868 have already seen we pretend that subsequent bits
4869 have to be sign-bit copies too. */
4870 || num_sign_bit_copies_in_rep [in_mode][mode])
4871 num_sign_bit_copies_in_rep [in_mode][mode]
4872 += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4877 /* Suppose that truncation from the machine mode of X to MODE is not a
4878 no-op. See if there is anything special about X so that we can
4879 assume it already contains a truncated value of MODE. */
4882 truncated_to_mode (enum machine_mode mode, const_rtx x)
4884 /* This register has already been used in MODE without explicit
4886 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
4889 /* See if we already satisfy the requirements of MODE. If yes we
4890 can just switch to MODE. */
4891 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
4892 && (num_sign_bit_copies (x, GET_MODE (x))
4893 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
4899 /* Initialize non_rtx_starting_operands, which is used to speed up
4905 for (i = 0; i < NUM_RTX_CODE; i++)
4907 const char *format = GET_RTX_FORMAT (i);
4908 const char *first = strpbrk (format, "eEV");
4909 non_rtx_starting_operands[i] = first ? first - format : -1;
4912 init_num_sign_bit_copies_in_rep ();
4915 /* Check whether this is a constant pool constant. */
4917 constant_pool_constant_p (rtx x)
4919 x = avoid_constant_pool_reference (x);
4920 return GET_CODE (x) == CONST_DOUBLE;