1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
42 /* Information about a subreg of a hard register. */
45 /* Offset of first hard register involved in the subreg. */
47 /* Number of hard registers involved in the subreg. */
49 /* Whether this subreg can be represented as a hard reg with the new
54 /* Forward declarations */
55 static void set_of_1 (rtx, const_rtx, void *);
56 static bool covers_regno_p (const_rtx, unsigned int);
57 static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
58 static int rtx_referenced_p_1 (rtx *, void *);
59 static int computed_jump_p_1 (const_rtx);
60 static void parms_set (rtx, const_rtx, void *);
61 static void subreg_get_info (unsigned int, enum machine_mode,
62 unsigned int, enum machine_mode,
63 struct subreg_info *);
65 static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
66 const_rtx, enum machine_mode,
67 unsigned HOST_WIDE_INT);
68 static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
69 const_rtx, enum machine_mode,
70 unsigned HOST_WIDE_INT);
71 static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
74 static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
75 enum machine_mode, unsigned int);
77 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
78 -1 if a code has no such operand. */
79 static int non_rtx_starting_operands[NUM_RTX_CODE];
81 /* Bit flags that specify the machine subtype we are compiling for.
82 Bits are tested using macros TARGET_... defined in the tm.h file
83 and set by `-m...' switches. Must be defined in rtlanal.c. */
87 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
88 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
89 SIGN_EXTEND then while narrowing we also have to enforce the
90 representation and sign-extend the value to mode DESTINATION_REP.
92 If the value is already sign-extended to DESTINATION_REP mode we
93 can just switch to DESTINATION mode on it. For each pair of
94 integral modes SOURCE and DESTINATION, when truncating from SOURCE
95 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
96 contains the number of high-order bits in SOURCE that have to be
97 copies of the sign-bit so that we can do this mode-switch to
101 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
103 /* Return 1 if the value of X is unstable
104 (would be different at a different point in the program).
105 The frame pointer, arg pointer, etc. are considered stable
106 (within one function) and so is anything marked `unchanging'. */
109 rtx_unstable_p (const_rtx x)
111 const RTX_CODE code = GET_CODE (x);
118 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
130 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
131 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
132 /* The arg pointer varies if it is not a fixed register. */
133 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
135 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
136 /* ??? When call-clobbered, the value is stable modulo the restore
137 that must happen after a call. This currently screws up local-alloc
138 into believing that the restore is not needed. */
139 if (x == pic_offset_table_rtx)
145 if (MEM_VOLATILE_P (x))
154 fmt = GET_RTX_FORMAT (code);
155 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
158 if (rtx_unstable_p (XEXP (x, i)))
161 else if (fmt[i] == 'E')
164 for (j = 0; j < XVECLEN (x, i); j++)
165 if (rtx_unstable_p (XVECEXP (x, i, j)))
172 /* Return 1 if X has a value that can vary even between two
173 executions of the program. 0 means X can be compared reliably
174 against certain constants or near-constants.
175 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
176 zero, we are slightly more conservative.
177 The frame pointer and the arg pointer are considered constant. */
180 rtx_varies_p (const_rtx x, bool for_alias)
193 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
205 /* Note that we have to test for the actual rtx used for the frame
206 and arg pointers and not just the register number in case we have
207 eliminated the frame and/or arg pointer and are using it
209 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
210 /* The arg pointer varies if it is not a fixed register. */
211 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
213 if (x == pic_offset_table_rtx
214 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
215 /* ??? When call-clobbered, the value is stable modulo the restore
216 that must happen after a call. This currently screws up
217 local-alloc into believing that the restore is not needed, so we
218 must return 0 only if we are called from alias analysis. */
226 /* The operand 0 of a LO_SUM is considered constant
227 (in fact it is related specifically to operand 1)
228 during alias analysis. */
229 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
230 || rtx_varies_p (XEXP (x, 1), for_alias);
233 if (MEM_VOLATILE_P (x))
242 fmt = GET_RTX_FORMAT (code);
243 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
246 if (rtx_varies_p (XEXP (x, i), for_alias))
249 else if (fmt[i] == 'E')
252 for (j = 0; j < XVECLEN (x, i); j++)
253 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
260 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
261 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
262 whether nonzero is returned for unaligned memory accesses on strict
263 alignment machines. */
266 rtx_addr_can_trap_p_1 (const_rtx x, enum machine_mode mode, bool unaligned_mems)
268 enum rtx_code code = GET_CODE (x);
273 return SYMBOL_REF_WEAK (x);
279 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
280 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
281 || x == stack_pointer_rtx
282 /* The arg pointer varies if it is not a fixed register. */
283 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
285 /* All of the virtual frame registers are stack references. */
286 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
287 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
292 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
295 /* An address is assumed not to trap if:
296 - it is an address that can't trap plus a constant integer,
297 with the proper remainder modulo the mode size if we are
298 considering unaligned memory references. */
299 if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
300 && GET_CODE (XEXP (x, 1)) == CONST_INT)
302 HOST_WIDE_INT offset;
304 if (!STRICT_ALIGNMENT
306 || GET_MODE_SIZE (mode) == 0)
309 offset = INTVAL (XEXP (x, 1));
311 #ifdef SPARC_STACK_BOUNDARY_HACK
312 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
313 the real alignment of %sp. However, when it does this, the
314 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
315 if (SPARC_STACK_BOUNDARY_HACK
316 && (XEXP (x, 0) == stack_pointer_rtx
317 || XEXP (x, 0) == hard_frame_pointer_rtx))
318 offset -= STACK_POINTER_OFFSET;
321 return offset % GET_MODE_SIZE (mode) != 0;
324 /* - or it is the pic register plus a constant. */
325 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
332 return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
339 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
345 /* If it isn't one of the case above, it can cause a trap. */
349 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
352 rtx_addr_can_trap_p (const_rtx x)
354 return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
357 /* Return true if X is an address that is known to not be zero. */
360 nonzero_address_p (const_rtx x)
362 const enum rtx_code code = GET_CODE (x);
367 return !SYMBOL_REF_WEAK (x);
373 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
374 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
375 || x == stack_pointer_rtx
376 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
378 /* All of the virtual frame registers are stack references. */
379 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
380 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
385 return nonzero_address_p (XEXP (x, 0));
388 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
389 return nonzero_address_p (XEXP (x, 0));
390 /* Handle PIC references. */
391 else if (XEXP (x, 0) == pic_offset_table_rtx
392 && CONSTANT_P (XEXP (x, 1)))
397 /* Similar to the above; allow positive offsets. Further, since
398 auto-inc is only allowed in memories, the register must be a
400 if (GET_CODE (XEXP (x, 1)) == CONST_INT
401 && INTVAL (XEXP (x, 1)) > 0)
403 return nonzero_address_p (XEXP (x, 0));
406 /* Similarly. Further, the offset is always positive. */
413 return nonzero_address_p (XEXP (x, 0));
416 return nonzero_address_p (XEXP (x, 1));
422 /* If it isn't one of the case above, might be zero. */
426 /* Return 1 if X refers to a memory location whose address
427 cannot be compared reliably with constant addresses,
428 or if X refers to a BLKmode memory object.
429 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
430 zero, we are slightly more conservative. */
433 rtx_addr_varies_p (const_rtx x, bool for_alias)
444 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
446 fmt = GET_RTX_FORMAT (code);
447 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
450 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
453 else if (fmt[i] == 'E')
456 for (j = 0; j < XVECLEN (x, i); j++)
457 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
463 /* Return the value of the integer term in X, if one is apparent;
465 Only obvious integer terms are detected.
466 This is used in cse.c with the `related_value' field. */
469 get_integer_term (const_rtx x)
471 if (GET_CODE (x) == CONST)
474 if (GET_CODE (x) == MINUS
475 && GET_CODE (XEXP (x, 1)) == CONST_INT)
476 return - INTVAL (XEXP (x, 1));
477 if (GET_CODE (x) == PLUS
478 && GET_CODE (XEXP (x, 1)) == CONST_INT)
479 return INTVAL (XEXP (x, 1));
483 /* If X is a constant, return the value sans apparent integer term;
485 Only obvious integer terms are detected. */
488 get_related_value (const_rtx x)
490 if (GET_CODE (x) != CONST)
493 if (GET_CODE (x) == PLUS
494 && GET_CODE (XEXP (x, 1)) == CONST_INT)
496 else if (GET_CODE (x) == MINUS
497 && GET_CODE (XEXP (x, 1)) == CONST_INT)
502 /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
503 to somewhere in the same object or object_block as SYMBOL. */
506 offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
510 if (GET_CODE (symbol) != SYMBOL_REF)
518 if (CONSTANT_POOL_ADDRESS_P (symbol)
519 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
522 decl = SYMBOL_REF_DECL (symbol);
523 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
527 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
528 && SYMBOL_REF_BLOCK (symbol)
529 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
530 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
531 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
537 /* Split X into a base and a constant offset, storing them in *BASE_OUT
538 and *OFFSET_OUT respectively. */
541 split_const (rtx x, rtx *base_out, rtx *offset_out)
543 if (GET_CODE (x) == CONST)
546 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
548 *base_out = XEXP (x, 0);
549 *offset_out = XEXP (x, 1);
554 *offset_out = const0_rtx;
557 /* Return the number of places FIND appears within X. If COUNT_DEST is
558 zero, we do not count occurrences inside the destination of a SET. */
561 count_occurrences (const_rtx x, const_rtx find, int count_dest)
565 const char *format_ptr;
587 count = count_occurrences (XEXP (x, 0), find, count_dest);
589 count += count_occurrences (XEXP (x, 1), find, count_dest);
593 if (MEM_P (find) && rtx_equal_p (x, find))
598 if (SET_DEST (x) == find && ! count_dest)
599 return count_occurrences (SET_SRC (x), find, count_dest);
606 format_ptr = GET_RTX_FORMAT (code);
609 for (i = 0; i < GET_RTX_LENGTH (code); i++)
611 switch (*format_ptr++)
614 count += count_occurrences (XEXP (x, i), find, count_dest);
618 for (j = 0; j < XVECLEN (x, i); j++)
619 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
627 /* Nonzero if register REG appears somewhere within IN.
628 Also works if REG is not a register; in this case it checks
629 for a subexpression of IN that is Lisp "equal" to REG. */
632 reg_mentioned_p (const_rtx reg, const_rtx in)
644 if (GET_CODE (in) == LABEL_REF)
645 return reg == XEXP (in, 0);
647 code = GET_CODE (in);
651 /* Compare registers by number. */
653 return REG_P (reg) && REGNO (in) == REGNO (reg);
655 /* These codes have no constituent expressions
666 /* These are kept unique for a given value. */
673 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
676 fmt = GET_RTX_FORMAT (code);
678 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
683 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
684 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
687 else if (fmt[i] == 'e'
688 && reg_mentioned_p (reg, XEXP (in, i)))
694 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
695 no CODE_LABEL insn. */
698 no_labels_between_p (const_rtx beg, const_rtx end)
703 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
709 /* Nonzero if register REG is used in an insn between
710 FROM_INSN and TO_INSN (exclusive of those two). */
713 reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
717 if (from_insn == to_insn)
720 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
722 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
723 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
728 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
729 is entirely replaced by a new value and the only use is as a SET_DEST,
730 we do not consider it a reference. */
733 reg_referenced_p (const_rtx x, const_rtx body)
737 switch (GET_CODE (body))
740 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
743 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
744 of a REG that occupies all of the REG, the insn references X if
745 it is mentioned in the destination. */
746 if (GET_CODE (SET_DEST (body)) != CC0
747 && GET_CODE (SET_DEST (body)) != PC
748 && !REG_P (SET_DEST (body))
749 && ! (GET_CODE (SET_DEST (body)) == SUBREG
750 && REG_P (SUBREG_REG (SET_DEST (body)))
751 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
752 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
753 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
754 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
755 && reg_overlap_mentioned_p (x, SET_DEST (body)))
760 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
761 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
768 return reg_overlap_mentioned_p (x, body);
771 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
774 return reg_overlap_mentioned_p (x, XEXP (body, 0));
777 case UNSPEC_VOLATILE:
778 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
779 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
784 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
785 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
790 if (MEM_P (XEXP (body, 0)))
791 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
796 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
798 return reg_referenced_p (x, COND_EXEC_CODE (body));
805 /* Nonzero if register REG is set or clobbered in an insn between
806 FROM_INSN and TO_INSN (exclusive of those two). */
809 reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
813 if (from_insn == to_insn)
816 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
817 if (INSN_P (insn) && reg_set_p (reg, insn))
822 /* Internals of reg_set_between_p. */
824 reg_set_p (const_rtx reg, const_rtx insn)
826 /* We can be passed an insn or part of one. If we are passed an insn,
827 check if a side-effect of the insn clobbers REG. */
829 && (FIND_REG_INC_NOTE (insn, reg)
832 && REGNO (reg) < FIRST_PSEUDO_REGISTER
833 && overlaps_hard_reg_set_p (regs_invalidated_by_call,
834 GET_MODE (reg), REGNO (reg)))
836 || find_reg_fusage (insn, CLOBBER, reg)))))
839 return set_of (reg, insn) != NULL_RTX;
842 /* Similar to reg_set_between_p, but check all registers in X. Return 0
843 only if none of them are modified between START and END. Return 1 if
844 X contains a MEM; this routine does use memory aliasing. */
847 modified_between_p (const_rtx x, const_rtx start, const_rtx end)
849 const enum rtx_code code = GET_CODE (x);
873 if (modified_between_p (XEXP (x, 0), start, end))
875 if (MEM_READONLY_P (x))
877 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
878 if (memory_modified_in_insn_p (x, insn))
884 return reg_set_between_p (x, start, end);
890 fmt = GET_RTX_FORMAT (code);
891 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
893 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
896 else if (fmt[i] == 'E')
897 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
898 if (modified_between_p (XVECEXP (x, i, j), start, end))
905 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
906 of them are modified in INSN. Return 1 if X contains a MEM; this routine
907 does use memory aliasing. */
910 modified_in_p (const_rtx x, const_rtx insn)
912 const enum rtx_code code = GET_CODE (x);
932 if (modified_in_p (XEXP (x, 0), insn))
934 if (MEM_READONLY_P (x))
936 if (memory_modified_in_insn_p (x, insn))
942 return reg_set_p (x, insn);
948 fmt = GET_RTX_FORMAT (code);
949 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
951 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
954 else if (fmt[i] == 'E')
955 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
956 if (modified_in_p (XVECEXP (x, i, j), insn))
963 /* Helper function for set_of. */
971 set_of_1 (rtx x, const_rtx pat, void *data1)
973 struct set_of_data *const data = (struct set_of_data *) (data1);
974 if (rtx_equal_p (x, data->pat)
975 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
979 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
980 (either directly or via STRICT_LOW_PART and similar modifiers). */
982 set_of (const_rtx pat, const_rtx insn)
984 struct set_of_data data;
985 data.found = NULL_RTX;
987 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
991 /* Given an INSN, return a SET expression if this insn has only a single SET.
992 It may also have CLOBBERs, USEs, or SET whose output
993 will not be used, which we ignore. */
996 single_set_2 (const_rtx insn, const_rtx pat)
999 int set_verified = 1;
1002 if (GET_CODE (pat) == PARALLEL)
1004 for (i = 0; i < XVECLEN (pat, 0); i++)
1006 rtx sub = XVECEXP (pat, 0, i);
1007 switch (GET_CODE (sub))
1014 /* We can consider insns having multiple sets, where all
1015 but one are dead as single set insns. In common case
1016 only single set is present in the pattern so we want
1017 to avoid checking for REG_UNUSED notes unless necessary.
1019 When we reach set first time, we just expect this is
1020 the single set we are looking for and only when more
1021 sets are found in the insn, we check them. */
1024 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1025 && !side_effects_p (set))
1031 set = sub, set_verified = 0;
1032 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1033 || side_effects_p (sub))
1045 /* Given an INSN, return nonzero if it has more than one SET, else return
1049 multiple_sets (const_rtx insn)
1054 /* INSN must be an insn. */
1055 if (! INSN_P (insn))
1058 /* Only a PARALLEL can have multiple SETs. */
1059 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1061 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1062 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1064 /* If we have already found a SET, then return now. */
1072 /* Either zero or one SET. */
1076 /* Return nonzero if the destination of SET equals the source
1077 and there are no side effects. */
1080 set_noop_p (const_rtx set)
1082 rtx src = SET_SRC (set);
1083 rtx dst = SET_DEST (set);
1085 if (dst == pc_rtx && src == pc_rtx)
1088 if (MEM_P (dst) && MEM_P (src))
1089 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1091 if (GET_CODE (dst) == ZERO_EXTRACT)
1092 return rtx_equal_p (XEXP (dst, 0), src)
1093 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1094 && !side_effects_p (src);
1096 if (GET_CODE (dst) == STRICT_LOW_PART)
1097 dst = XEXP (dst, 0);
1099 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1101 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1103 src = SUBREG_REG (src);
1104 dst = SUBREG_REG (dst);
1107 return (REG_P (src) && REG_P (dst)
1108 && REGNO (src) == REGNO (dst));
1111 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1115 noop_move_p (const_rtx insn)
1117 rtx pat = PATTERN (insn);
1119 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1122 /* Insns carrying these notes are useful later on. */
1123 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1126 if (GET_CODE (pat) == SET && set_noop_p (pat))
1129 if (GET_CODE (pat) == PARALLEL)
1132 /* If nothing but SETs of registers to themselves,
1133 this insn can also be deleted. */
1134 for (i = 0; i < XVECLEN (pat, 0); i++)
1136 rtx tem = XVECEXP (pat, 0, i);
1138 if (GET_CODE (tem) == USE
1139 || GET_CODE (tem) == CLOBBER)
1142 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1152 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1153 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1154 If the object was modified, if we hit a partial assignment to X, or hit a
1155 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1156 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1160 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1164 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1168 rtx set = single_set (p);
1169 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1171 if (set && rtx_equal_p (x, SET_DEST (set)))
1173 rtx src = SET_SRC (set);
1175 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1176 src = XEXP (note, 0);
1178 if ((valid_to == NULL_RTX
1179 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1180 /* Reject hard registers because we don't usually want
1181 to use them; we'd rather use a pseudo. */
1183 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1190 /* If set in non-simple way, we don't have a value. */
1191 if (reg_set_p (x, p))
1198 /* Return nonzero if register in range [REGNO, ENDREGNO)
1199 appears either explicitly or implicitly in X
1200 other than being stored into.
1202 References contained within the substructure at LOC do not count.
1203 LOC may be zero, meaning don't ignore anything. */
1206 refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1210 unsigned int x_regno;
1215 /* The contents of a REG_NONNEG note is always zero, so we must come here
1216 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1220 code = GET_CODE (x);
1225 x_regno = REGNO (x);
1227 /* If we modifying the stack, frame, or argument pointer, it will
1228 clobber a virtual register. In fact, we could be more precise,
1229 but it isn't worth it. */
1230 if ((x_regno == STACK_POINTER_REGNUM
1231 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1232 || x_regno == ARG_POINTER_REGNUM
1234 || x_regno == FRAME_POINTER_REGNUM)
1235 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1238 return endregno > x_regno && regno < END_REGNO (x);
1241 /* If this is a SUBREG of a hard reg, we can see exactly which
1242 registers are being modified. Otherwise, handle normally. */
1243 if (REG_P (SUBREG_REG (x))
1244 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1246 unsigned int inner_regno = subreg_regno (x);
1247 unsigned int inner_endregno
1248 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1249 ? subreg_nregs (x) : 1);
1251 return endregno > inner_regno && regno < inner_endregno;
1257 if (&SET_DEST (x) != loc
1258 /* Note setting a SUBREG counts as referring to the REG it is in for
1259 a pseudo but not for hard registers since we can
1260 treat each word individually. */
1261 && ((GET_CODE (SET_DEST (x)) == SUBREG
1262 && loc != &SUBREG_REG (SET_DEST (x))
1263 && REG_P (SUBREG_REG (SET_DEST (x)))
1264 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1265 && refers_to_regno_p (regno, endregno,
1266 SUBREG_REG (SET_DEST (x)), loc))
1267 || (!REG_P (SET_DEST (x))
1268 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1271 if (code == CLOBBER || loc == &SET_SRC (x))
1280 /* X does not match, so try its subexpressions. */
1282 fmt = GET_RTX_FORMAT (code);
1283 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1285 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1293 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1296 else if (fmt[i] == 'E')
1299 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1300 if (loc != &XVECEXP (x, i, j)
1301 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1308 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1309 we check if any register number in X conflicts with the relevant register
1310 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1311 contains a MEM (we don't bother checking for memory addresses that can't
1312 conflict because we expect this to be a rare case. */
1315 reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1317 unsigned int regno, endregno;
1319 /* If either argument is a constant, then modifying X can not
1320 affect IN. Here we look at IN, we can profitably combine
1321 CONSTANT_P (x) with the switch statement below. */
1322 if (CONSTANT_P (in))
1326 switch (GET_CODE (x))
1328 case STRICT_LOW_PART:
1331 /* Overly conservative. */
1336 regno = REGNO (SUBREG_REG (x));
1337 if (regno < FIRST_PSEUDO_REGISTER)
1338 regno = subreg_regno (x);
1339 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1340 ? subreg_nregs (x) : 1);
1345 endregno = END_REGNO (x);
1347 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1357 fmt = GET_RTX_FORMAT (GET_CODE (in));
1358 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1361 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1364 else if (fmt[i] == 'E')
1367 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1368 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1378 return reg_mentioned_p (x, in);
1384 /* If any register in here refers to it we return true. */
1385 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1386 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1387 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1393 gcc_assert (CONSTANT_P (x));
1398 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1399 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1400 ignored by note_stores, but passed to FUN.
1402 FUN receives three arguments:
1403 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1404 2. the SET or CLOBBER rtx that does the store,
1405 3. the pointer DATA provided to note_stores.
1407 If the item being stored in or clobbered is a SUBREG of a hard register,
1408 the SUBREG will be passed. */
1411 note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
1415 if (GET_CODE (x) == COND_EXEC)
1416 x = COND_EXEC_CODE (x);
1418 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1420 rtx dest = SET_DEST (x);
1422 while ((GET_CODE (dest) == SUBREG
1423 && (!REG_P (SUBREG_REG (dest))
1424 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1425 || GET_CODE (dest) == ZERO_EXTRACT
1426 || GET_CODE (dest) == STRICT_LOW_PART)
1427 dest = XEXP (dest, 0);
1429 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1430 each of whose first operand is a register. */
1431 if (GET_CODE (dest) == PARALLEL)
1433 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1434 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1435 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1438 (*fun) (dest, x, data);
1441 else if (GET_CODE (x) == PARALLEL)
1442 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1443 note_stores (XVECEXP (x, 0, i), fun, data);
1446 /* Like notes_stores, but call FUN for each expression that is being
1447 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1448 FUN for each expression, not any interior subexpressions. FUN receives a
1449 pointer to the expression and the DATA passed to this function.
1451 Note that this is not quite the same test as that done in reg_referenced_p
1452 since that considers something as being referenced if it is being
1453 partially set, while we do not. */
1456 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1461 switch (GET_CODE (body))
1464 (*fun) (&COND_EXEC_TEST (body), data);
1465 note_uses (&COND_EXEC_CODE (body), fun, data);
1469 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1470 note_uses (&XVECEXP (body, 0, i), fun, data);
1474 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1475 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1479 (*fun) (&XEXP (body, 0), data);
1483 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1484 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1488 (*fun) (&TRAP_CONDITION (body), data);
1492 (*fun) (&XEXP (body, 0), data);
1496 case UNSPEC_VOLATILE:
1497 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1498 (*fun) (&XVECEXP (body, 0, i), data);
1502 if (MEM_P (XEXP (body, 0)))
1503 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1508 rtx dest = SET_DEST (body);
1510 /* For sets we replace everything in source plus registers in memory
1511 expression in store and operands of a ZERO_EXTRACT. */
1512 (*fun) (&SET_SRC (body), data);
1514 if (GET_CODE (dest) == ZERO_EXTRACT)
1516 (*fun) (&XEXP (dest, 1), data);
1517 (*fun) (&XEXP (dest, 2), data);
1520 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1521 dest = XEXP (dest, 0);
1524 (*fun) (&XEXP (dest, 0), data);
1529 /* All the other possibilities never store. */
1530 (*fun) (pbody, data);
1535 /* Return nonzero if X's old contents don't survive after INSN.
1536 This will be true if X is (cc0) or if X is a register and
1537 X dies in INSN or because INSN entirely sets X.
1539 "Entirely set" means set directly and not through a SUBREG, or
1540 ZERO_EXTRACT, so no trace of the old contents remains.
1541 Likewise, REG_INC does not count.
1543 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1544 but for this use that makes no difference, since regs don't overlap
1545 during their lifetimes. Therefore, this function may be used
1546 at any time after deaths have been computed.
1548 If REG is a hard reg that occupies multiple machine registers, this
1549 function will only return 1 if each of those registers will be replaced
1553 dead_or_set_p (const_rtx insn, const_rtx x)
1555 unsigned int regno, end_regno;
1558 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1559 if (GET_CODE (x) == CC0)
1562 gcc_assert (REG_P (x));
1565 end_regno = END_REGNO (x);
1566 for (i = regno; i < end_regno; i++)
1567 if (! dead_or_set_regno_p (insn, i))
1573 /* Return TRUE iff DEST is a register or subreg of a register and
1574 doesn't change the number of words of the inner register, and any
1575 part of the register is TEST_REGNO. */
1578 covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
1580 unsigned int regno, endregno;
1582 if (GET_CODE (dest) == SUBREG
1583 && (((GET_MODE_SIZE (GET_MODE (dest))
1584 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1585 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1586 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1587 dest = SUBREG_REG (dest);
1592 regno = REGNO (dest);
1593 endregno = END_REGNO (dest);
1594 return (test_regno >= regno && test_regno < endregno);
1597 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1598 any member matches the covers_regno_no_parallel_p criteria. */
1601 covers_regno_p (const_rtx dest, unsigned int test_regno)
1603 if (GET_CODE (dest) == PARALLEL)
1605 /* Some targets place small structures in registers for return
1606 values of functions, and those registers are wrapped in
1607 PARALLELs that we may see as the destination of a SET. */
1610 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1612 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1613 if (inner != NULL_RTX
1614 && covers_regno_no_parallel_p (inner, test_regno))
1621 return covers_regno_no_parallel_p (dest, test_regno);
1624 /* Utility function for dead_or_set_p to check an individual register. */
1627 dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
1631 /* See if there is a death note for something that includes TEST_REGNO. */
1632 if (find_regno_note (insn, REG_DEAD, test_regno))
1636 && find_regno_fusage (insn, CLOBBER, test_regno))
1639 pattern = PATTERN (insn);
1641 if (GET_CODE (pattern) == COND_EXEC)
1642 pattern = COND_EXEC_CODE (pattern);
1644 if (GET_CODE (pattern) == SET)
1645 return covers_regno_p (SET_DEST (pattern), test_regno);
1646 else if (GET_CODE (pattern) == PARALLEL)
1650 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1652 rtx body = XVECEXP (pattern, 0, i);
1654 if (GET_CODE (body) == COND_EXEC)
1655 body = COND_EXEC_CODE (body);
1657 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1658 && covers_regno_p (SET_DEST (body), test_regno))
1666 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1667 If DATUM is nonzero, look for one whose datum is DATUM. */
1670 find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
1676 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1677 if (! INSN_P (insn))
1681 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1682 if (REG_NOTE_KIND (link) == kind)
1687 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1688 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1693 /* Return the reg-note of kind KIND in insn INSN which applies to register
1694 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1695 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1696 it might be the case that the note overlaps REGNO. */
1699 find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
1703 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1704 if (! INSN_P (insn))
1707 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1708 if (REG_NOTE_KIND (link) == kind
1709 /* Verify that it is a register, so that scratch and MEM won't cause a
1711 && REG_P (XEXP (link, 0))
1712 && REGNO (XEXP (link, 0)) <= regno
1713 && END_REGNO (XEXP (link, 0)) > regno)
1718 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1722 find_reg_equal_equiv_note (const_rtx insn)
1729 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1730 if (REG_NOTE_KIND (link) == REG_EQUAL
1731 || REG_NOTE_KIND (link) == REG_EQUIV)
1733 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1734 insns that have multiple sets. Checking single_set to
1735 make sure of this is not the proper check, as explained
1736 in the comment in set_unique_reg_note.
1738 This should be changed into an assert. */
1739 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1746 /* Check whether INSN is a single_set whose source is known to be
1747 equivalent to a constant. Return that constant if so, otherwise
1751 find_constant_src (const_rtx insn)
1755 set = single_set (insn);
1758 x = avoid_constant_pool_reference (SET_SRC (set));
1763 note = find_reg_equal_equiv_note (insn);
1764 if (note && CONSTANT_P (XEXP (note, 0)))
1765 return XEXP (note, 0);
1770 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1771 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1774 find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
1776 /* If it's not a CALL_INSN, it can't possibly have a
1777 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1787 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1789 link = XEXP (link, 1))
1790 if (GET_CODE (XEXP (link, 0)) == code
1791 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1796 unsigned int regno = REGNO (datum);
1798 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1799 to pseudo registers, so don't bother checking. */
1801 if (regno < FIRST_PSEUDO_REGISTER)
1803 unsigned int end_regno = END_HARD_REGNO (datum);
1806 for (i = regno; i < end_regno; i++)
1807 if (find_regno_fusage (insn, code, i))
1815 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1816 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1819 find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
1823 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1824 to pseudo registers, so don't bother checking. */
1826 if (regno >= FIRST_PSEUDO_REGISTER
1830 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1834 if (GET_CODE (op = XEXP (link, 0)) == code
1835 && REG_P (reg = XEXP (op, 0))
1836 && REGNO (reg) <= regno
1837 && END_HARD_REGNO (reg) > regno)
1845 /* Add register note with kind KIND and datum DATUM to INSN. */
1848 add_reg_note (rtx insn, enum reg_note kind, rtx datum)
1856 case REG_LABEL_TARGET:
1857 case REG_LABEL_OPERAND:
1858 /* These types of register notes use an INSN_LIST rather than an
1859 EXPR_LIST, so that copying is done right and dumps look
1861 note = alloc_INSN_LIST (datum, REG_NOTES (insn));
1862 PUT_REG_NOTE_KIND (note, kind);
1866 note = alloc_EXPR_LIST (kind, datum, REG_NOTES (insn));
1870 REG_NOTES (insn) = note;
1873 /* Remove register note NOTE from the REG_NOTES of INSN. */
1876 remove_note (rtx insn, const_rtx note)
1880 if (note == NULL_RTX)
1883 if (REG_NOTES (insn) == note)
1884 REG_NOTES (insn) = XEXP (note, 1);
1886 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1887 if (XEXP (link, 1) == note)
1889 XEXP (link, 1) = XEXP (note, 1);
1893 switch (REG_NOTE_KIND (note))
1897 df_notes_rescan (insn);
1904 /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
1907 remove_reg_equal_equiv_notes (rtx insn)
1911 loc = ®_NOTES (insn);
1914 enum reg_note kind = REG_NOTE_KIND (*loc);
1915 if (kind == REG_EQUAL || kind == REG_EQUIV)
1916 *loc = XEXP (*loc, 1);
1918 loc = &XEXP (*loc, 1);
1922 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1923 return 1 if it is found. A simple equality test is used to determine if
1927 in_expr_list_p (const_rtx listp, const_rtx node)
1931 for (x = listp; x; x = XEXP (x, 1))
1932 if (node == XEXP (x, 0))
1938 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1939 remove that entry from the list if it is found.
1941 A simple equality test is used to determine if NODE matches. */
1944 remove_node_from_expr_list (const_rtx node, rtx *listp)
1947 rtx prev = NULL_RTX;
1951 if (node == XEXP (temp, 0))
1953 /* Splice the node out of the list. */
1955 XEXP (prev, 1) = XEXP (temp, 1);
1957 *listp = XEXP (temp, 1);
1963 temp = XEXP (temp, 1);
1967 /* Nonzero if X contains any volatile instructions. These are instructions
1968 which may cause unpredictable machine state instructions, and thus no
1969 instructions should be moved or combined across them. This includes
1970 only volatile asms and UNSPEC_VOLATILE instructions. */
1973 volatile_insn_p (const_rtx x)
1975 const RTX_CODE code = GET_CODE (x);
1996 case UNSPEC_VOLATILE:
1997 /* case TRAP_IF: This isn't clear yet. */
2002 if (MEM_VOLATILE_P (x))
2009 /* Recursively scan the operands of this expression. */
2012 const char *const fmt = GET_RTX_FORMAT (code);
2015 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2019 if (volatile_insn_p (XEXP (x, i)))
2022 else if (fmt[i] == 'E')
2025 for (j = 0; j < XVECLEN (x, i); j++)
2026 if (volatile_insn_p (XVECEXP (x, i, j)))
2034 /* Nonzero if X contains any volatile memory references
2035 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2038 volatile_refs_p (const_rtx x)
2040 const RTX_CODE code = GET_CODE (x);
2059 case UNSPEC_VOLATILE:
2065 if (MEM_VOLATILE_P (x))
2072 /* Recursively scan the operands of this expression. */
2075 const char *const fmt = GET_RTX_FORMAT (code);
2078 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2082 if (volatile_refs_p (XEXP (x, i)))
2085 else if (fmt[i] == 'E')
2088 for (j = 0; j < XVECLEN (x, i); j++)
2089 if (volatile_refs_p (XVECEXP (x, i, j)))
2097 /* Similar to above, except that it also rejects register pre- and post-
2101 side_effects_p (const_rtx x)
2103 const RTX_CODE code = GET_CODE (x);
2122 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2123 when some combination can't be done. If we see one, don't think
2124 that we can simplify the expression. */
2125 return (GET_MODE (x) != VOIDmode);
2134 case UNSPEC_VOLATILE:
2135 /* case TRAP_IF: This isn't clear yet. */
2141 if (MEM_VOLATILE_P (x))
2148 /* Recursively scan the operands of this expression. */
2151 const char *fmt = GET_RTX_FORMAT (code);
2154 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2158 if (side_effects_p (XEXP (x, i)))
2161 else if (fmt[i] == 'E')
2164 for (j = 0; j < XVECLEN (x, i); j++)
2165 if (side_effects_p (XVECEXP (x, i, j)))
2173 enum may_trap_p_flags
2175 MTP_UNALIGNED_MEMS = 1,
2178 /* Return nonzero if evaluating rtx X might cause a trap.
2179 (FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for
2180 unaligned memory accesses on strict alignment machines. If
2181 (FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression
2182 cannot trap at its current location, but it might become trapping if moved
2186 may_trap_p_1 (const_rtx x, unsigned flags)
2191 bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0;
2195 code = GET_CODE (x);
2198 /* Handle these cases quickly. */
2213 case UNSPEC_VOLATILE:
2214 return targetm.unspec_may_trap_p (x, flags);
2221 return MEM_VOLATILE_P (x);
2223 /* Memory ref can trap unless it's a static var or a stack slot. */
2225 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2226 reference; moving it out of condition might cause its address
2228 !(flags & MTP_AFTER_MOVE)
2230 && (!STRICT_ALIGNMENT || !unaligned_mems))
2233 rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2235 /* Division by a non-constant might trap. */
2240 if (HONOR_SNANS (GET_MODE (x)))
2242 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2243 return flag_trapping_math;
2244 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2249 /* An EXPR_LIST is used to represent a function call. This
2250 certainly may trap. */
2259 /* Some floating point comparisons may trap. */
2260 if (!flag_trapping_math)
2262 /* ??? There is no machine independent way to check for tests that trap
2263 when COMPARE is used, though many targets do make this distinction.
2264 For instance, sparc uses CCFPE for compares which generate exceptions
2265 and CCFP for compares which do not generate exceptions. */
2266 if (HONOR_NANS (GET_MODE (x)))
2268 /* But often the compare has some CC mode, so check operand
2270 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2271 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2277 if (HONOR_SNANS (GET_MODE (x)))
2279 /* Often comparison is CC mode, so check operand modes. */
2280 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2281 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2286 /* Conversion of floating point might trap. */
2287 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2294 /* These operations don't trap even with floating point. */
2298 /* Any floating arithmetic may trap. */
2299 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2300 && flag_trapping_math)
2304 fmt = GET_RTX_FORMAT (code);
2305 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2309 if (may_trap_p_1 (XEXP (x, i), flags))
2312 else if (fmt[i] == 'E')
2315 for (j = 0; j < XVECLEN (x, i); j++)
2316 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2323 /* Return nonzero if evaluating rtx X might cause a trap. */
2326 may_trap_p (const_rtx x)
2328 return may_trap_p_1 (x, 0);
2331 /* Return nonzero if evaluating rtx X might cause a trap, when the expression
2332 is moved from its current location by some optimization. */
2335 may_trap_after_code_motion_p (const_rtx x)
2337 return may_trap_p_1 (x, MTP_AFTER_MOVE);
2340 /* Same as above, but additionally return nonzero if evaluating rtx X might
2341 cause a fault. We define a fault for the purpose of this function as a
2342 erroneous execution condition that cannot be encountered during the normal
2343 execution of a valid program; the typical example is an unaligned memory
2344 access on a strict alignment machine. The compiler guarantees that it
2345 doesn't generate code that will fault from a valid program, but this
2346 guarantee doesn't mean anything for individual instructions. Consider
2347 the following example:
2349 struct S { int d; union { char *cp; int *ip; }; };
2351 int foo(struct S *s)
2359 on a strict alignment machine. In a valid program, foo will never be
2360 invoked on a structure for which d is equal to 1 and the underlying
2361 unique field of the union not aligned on a 4-byte boundary, but the
2362 expression *s->ip might cause a fault if considered individually.
2364 At the RTL level, potentially problematic expressions will almost always
2365 verify may_trap_p; for example, the above dereference can be emitted as
2366 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2367 However, suppose that foo is inlined in a caller that causes s->cp to
2368 point to a local character variable and guarantees that s->d is not set
2369 to 1; foo may have been effectively translated into pseudo-RTL as:
2372 (set (reg:SI) (mem:SI (%fp - 7)))
2374 (set (reg:QI) (mem:QI (%fp - 7)))
2376 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2377 memory reference to a stack slot, but it will certainly cause a fault
2378 on a strict alignment machine. */
2381 may_trap_or_fault_p (const_rtx x)
2383 return may_trap_p_1 (x, MTP_UNALIGNED_MEMS);
2386 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2387 i.e., an inequality. */
2390 inequality_comparisons_p (const_rtx x)
2394 const enum rtx_code code = GET_CODE (x);
2425 len = GET_RTX_LENGTH (code);
2426 fmt = GET_RTX_FORMAT (code);
2428 for (i = 0; i < len; i++)
2432 if (inequality_comparisons_p (XEXP (x, i)))
2435 else if (fmt[i] == 'E')
2438 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2439 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2447 /* Replace any occurrence of FROM in X with TO. The function does
2448 not enter into CONST_DOUBLE for the replace.
2450 Note that copying is not done so X must not be shared unless all copies
2451 are to be modified. */
2454 replace_rtx (rtx x, rtx from, rtx to)
2459 /* The following prevents loops occurrence when we change MEM in
2460 CONST_DOUBLE onto the same CONST_DOUBLE. */
2461 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2467 /* Allow this function to make replacements in EXPR_LISTs. */
2471 if (GET_CODE (x) == SUBREG)
2473 rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
2475 if (GET_CODE (new_rtx) == CONST_INT)
2477 x = simplify_subreg (GET_MODE (x), new_rtx,
2478 GET_MODE (SUBREG_REG (x)),
2483 SUBREG_REG (x) = new_rtx;
2487 else if (GET_CODE (x) == ZERO_EXTEND)
2489 rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
2491 if (GET_CODE (new_rtx) == CONST_INT)
2493 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2494 new_rtx, GET_MODE (XEXP (x, 0)));
2498 XEXP (x, 0) = new_rtx;
2503 fmt = GET_RTX_FORMAT (GET_CODE (x));
2504 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2507 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2508 else if (fmt[i] == 'E')
2509 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2510 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2516 /* Replace occurrences of the old label in *X with the new one.
2517 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2520 replace_label (rtx *x, void *data)
2523 rtx old_label = ((replace_label_data *) data)->r1;
2524 rtx new_label = ((replace_label_data *) data)->r2;
2525 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2530 if (GET_CODE (l) == SYMBOL_REF
2531 && CONSTANT_POOL_ADDRESS_P (l))
2533 rtx c = get_pool_constant (l);
2534 if (rtx_referenced_p (old_label, c))
2537 replace_label_data *d = (replace_label_data *) data;
2539 /* Create a copy of constant C; replace the label inside
2540 but do not update LABEL_NUSES because uses in constant pool
2542 new_c = copy_rtx (c);
2543 d->update_label_nuses = false;
2544 for_each_rtx (&new_c, replace_label, data);
2545 d->update_label_nuses = update_label_nuses;
2547 /* Add the new constant NEW_C to constant pool and replace
2548 the old reference to constant by new reference. */
2549 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2550 *x = replace_rtx (l, l, new_l);
2555 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2556 field. This is not handled by for_each_rtx because it doesn't
2557 handle unprinted ('0') fields. */
2558 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2559 JUMP_LABEL (l) = new_label;
2561 if ((GET_CODE (l) == LABEL_REF
2562 || GET_CODE (l) == INSN_LIST)
2563 && XEXP (l, 0) == old_label)
2565 XEXP (l, 0) = new_label;
2566 if (update_label_nuses)
2568 ++LABEL_NUSES (new_label);
2569 --LABEL_NUSES (old_label);
2577 /* When *BODY is equal to X or X is directly referenced by *BODY
2578 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2579 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2582 rtx_referenced_p_1 (rtx *body, void *x)
2586 if (*body == NULL_RTX)
2587 return y == NULL_RTX;
2589 /* Return true if a label_ref *BODY refers to label Y. */
2590 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2591 return XEXP (*body, 0) == y;
2593 /* If *BODY is a reference to pool constant traverse the constant. */
2594 if (GET_CODE (*body) == SYMBOL_REF
2595 && CONSTANT_POOL_ADDRESS_P (*body))
2596 return rtx_referenced_p (y, get_pool_constant (*body));
2598 /* By default, compare the RTL expressions. */
2599 return rtx_equal_p (*body, y);
2602 /* Return true if X is referenced in BODY. */
2605 rtx_referenced_p (rtx x, rtx body)
2607 return for_each_rtx (&body, rtx_referenced_p_1, x);
2610 /* If INSN is a tablejump return true and store the label (before jump table) to
2611 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2614 tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
2619 && (label = JUMP_LABEL (insn)) != NULL_RTX
2620 && (table = next_active_insn (label)) != NULL_RTX
2622 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2623 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2634 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2635 constant that is not in the constant pool and not in the condition
2636 of an IF_THEN_ELSE. */
2639 computed_jump_p_1 (const_rtx x)
2641 const enum rtx_code code = GET_CODE (x);
2661 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2662 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2665 return (computed_jump_p_1 (XEXP (x, 1))
2666 || computed_jump_p_1 (XEXP (x, 2)));
2672 fmt = GET_RTX_FORMAT (code);
2673 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2676 && computed_jump_p_1 (XEXP (x, i)))
2679 else if (fmt[i] == 'E')
2680 for (j = 0; j < XVECLEN (x, i); j++)
2681 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2688 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2690 Tablejumps and casesi insns are not considered indirect jumps;
2691 we can recognize them by a (use (label_ref)). */
2694 computed_jump_p (const_rtx insn)
2699 rtx pat = PATTERN (insn);
2701 /* If we have a JUMP_LABEL set, we're not a computed jump. */
2702 if (JUMP_LABEL (insn) != NULL)
2705 if (GET_CODE (pat) == PARALLEL)
2707 int len = XVECLEN (pat, 0);
2708 int has_use_labelref = 0;
2710 for (i = len - 1; i >= 0; i--)
2711 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2712 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2714 has_use_labelref = 1;
2716 if (! has_use_labelref)
2717 for (i = len - 1; i >= 0; i--)
2718 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2719 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2720 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2723 else if (GET_CODE (pat) == SET
2724 && SET_DEST (pat) == pc_rtx
2725 && computed_jump_p_1 (SET_SRC (pat)))
2731 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2732 calls. Processes the subexpressions of EXP and passes them to F. */
2734 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2737 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2740 for (; format[n] != '\0'; n++)
2747 result = (*f) (x, data);
2749 /* Do not traverse sub-expressions. */
2751 else if (result != 0)
2752 /* Stop the traversal. */
2756 /* There are no sub-expressions. */
2759 i = non_rtx_starting_operands[GET_CODE (*x)];
2762 result = for_each_rtx_1 (*x, i, f, data);
2770 if (XVEC (exp, n) == 0)
2772 for (j = 0; j < XVECLEN (exp, n); ++j)
2775 x = &XVECEXP (exp, n, j);
2776 result = (*f) (x, data);
2778 /* Do not traverse sub-expressions. */
2780 else if (result != 0)
2781 /* Stop the traversal. */
2785 /* There are no sub-expressions. */
2788 i = non_rtx_starting_operands[GET_CODE (*x)];
2791 result = for_each_rtx_1 (*x, i, f, data);
2799 /* Nothing to do. */
2807 /* Traverse X via depth-first search, calling F for each
2808 sub-expression (including X itself). F is also passed the DATA.
2809 If F returns -1, do not traverse sub-expressions, but continue
2810 traversing the rest of the tree. If F ever returns any other
2811 nonzero value, stop the traversal, and return the value returned
2812 by F. Otherwise, return 0. This function does not traverse inside
2813 tree structure that contains RTX_EXPRs, or into sub-expressions
2814 whose format code is `0' since it is not known whether or not those
2815 codes are actually RTL.
2817 This routine is very general, and could (should?) be used to
2818 implement many of the other routines in this file. */
2821 for_each_rtx (rtx *x, rtx_function f, void *data)
2827 result = (*f) (x, data);
2829 /* Do not traverse sub-expressions. */
2831 else if (result != 0)
2832 /* Stop the traversal. */
2836 /* There are no sub-expressions. */
2839 i = non_rtx_starting_operands[GET_CODE (*x)];
2843 return for_each_rtx_1 (*x, i, f, data);
2847 /* Searches X for any reference to REGNO, returning the rtx of the
2848 reference found if any. Otherwise, returns NULL_RTX. */
2851 regno_use_in (unsigned int regno, rtx x)
2857 if (REG_P (x) && REGNO (x) == regno)
2860 fmt = GET_RTX_FORMAT (GET_CODE (x));
2861 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2865 if ((tem = regno_use_in (regno, XEXP (x, i))))
2868 else if (fmt[i] == 'E')
2869 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2870 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2877 /* Return a value indicating whether OP, an operand of a commutative
2878 operation, is preferred as the first or second operand. The higher
2879 the value, the stronger the preference for being the first operand.
2880 We use negative values to indicate a preference for the first operand
2881 and positive values for the second operand. */
2884 commutative_operand_precedence (rtx op)
2886 enum rtx_code code = GET_CODE (op);
2888 /* Constants always come the second operand. Prefer "nice" constants. */
2889 if (code == CONST_INT)
2891 if (code == CONST_DOUBLE)
2893 if (code == CONST_FIXED)
2895 op = avoid_constant_pool_reference (op);
2896 code = GET_CODE (op);
2898 switch (GET_RTX_CLASS (code))
2901 if (code == CONST_INT)
2903 if (code == CONST_DOUBLE)
2905 if (code == CONST_FIXED)
2910 /* SUBREGs of objects should come second. */
2911 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2916 /* Complex expressions should be the first, so decrease priority
2917 of objects. Prefer pointer objects over non pointer objects. */
2918 if ((REG_P (op) && REG_POINTER (op))
2919 || (MEM_P (op) && MEM_POINTER (op)))
2923 case RTX_COMM_ARITH:
2924 /* Prefer operands that are themselves commutative to be first.
2925 This helps to make things linear. In particular,
2926 (and (and (reg) (reg)) (not (reg))) is canonical. */
2930 /* If only one operand is a binary expression, it will be the first
2931 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2932 is canonical, although it will usually be further simplified. */
2936 /* Then prefer NEG and NOT. */
2937 if (code == NEG || code == NOT)
2945 /* Return 1 iff it is necessary to swap operands of commutative operation
2946 in order to canonicalize expression. */
2949 swap_commutative_operands_p (rtx x, rtx y)
2951 return (commutative_operand_precedence (x)
2952 < commutative_operand_precedence (y));
2955 /* Return 1 if X is an autoincrement side effect and the register is
2956 not the stack pointer. */
2958 auto_inc_p (const_rtx x)
2960 switch (GET_CODE (x))
2968 /* There are no REG_INC notes for SP. */
2969 if (XEXP (x, 0) != stack_pointer_rtx)
2977 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2979 loc_mentioned_in_p (rtx *loc, const_rtx in)
2988 code = GET_CODE (in);
2989 fmt = GET_RTX_FORMAT (code);
2990 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2994 if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
2997 else if (fmt[i] == 'E')
2998 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2999 if (loc == &XVECEXP (in, i, j)
3000 || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3006 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3007 and SUBREG_BYTE, return the bit offset where the subreg begins
3008 (counting from the least significant bit of the operand). */
3011 subreg_lsb_1 (enum machine_mode outer_mode,
3012 enum machine_mode inner_mode,
3013 unsigned int subreg_byte)
3015 unsigned int bitpos;
3019 /* A paradoxical subreg begins at bit position 0. */
3020 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3023 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3024 /* If the subreg crosses a word boundary ensure that
3025 it also begins and ends on a word boundary. */
3026 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3027 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3028 && (subreg_byte % UNITS_PER_WORD
3029 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3031 if (WORDS_BIG_ENDIAN)
3032 word = (GET_MODE_SIZE (inner_mode)
3033 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3035 word = subreg_byte / UNITS_PER_WORD;
3036 bitpos = word * BITS_PER_WORD;
3038 if (BYTES_BIG_ENDIAN)
3039 byte = (GET_MODE_SIZE (inner_mode)
3040 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3042 byte = subreg_byte % UNITS_PER_WORD;
3043 bitpos += byte * BITS_PER_UNIT;
3048 /* Given a subreg X, return the bit offset where the subreg begins
3049 (counting from the least significant bit of the reg). */
3052 subreg_lsb (const_rtx x)
3054 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3058 /* Fill in information about a subreg of a hard register.
3059 xregno - A regno of an inner hard subreg_reg (or what will become one).
3060 xmode - The mode of xregno.
3061 offset - The byte offset.
3062 ymode - The mode of a top level SUBREG (or what may become one).
3063 info - Pointer to structure to fill in. */
3065 subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3066 unsigned int offset, enum machine_mode ymode,
3067 struct subreg_info *info)
3069 int nregs_xmode, nregs_ymode;
3070 int mode_multiple, nregs_multiple;
3071 int offset_adj, y_offset, y_offset_adj;
3072 int regsize_xmode, regsize_ymode;
3075 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3079 /* If there are holes in a non-scalar mode in registers, we expect
3080 that it is made up of its units concatenated together. */
3081 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3083 enum machine_mode xmode_unit;
3085 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3086 if (GET_MODE_INNER (xmode) == VOIDmode)
3089 xmode_unit = GET_MODE_INNER (xmode);
3090 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3091 gcc_assert (nregs_xmode
3092 == (GET_MODE_NUNITS (xmode)
3093 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3094 gcc_assert (hard_regno_nregs[xregno][xmode]
3095 == (hard_regno_nregs[xregno][xmode_unit]
3096 * GET_MODE_NUNITS (xmode)));
3098 /* You can only ask for a SUBREG of a value with holes in the middle
3099 if you don't cross the holes. (Such a SUBREG should be done by
3100 picking a different register class, or doing it in memory if
3101 necessary.) An example of a value with holes is XCmode on 32-bit
3102 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3103 3 for each part, but in memory it's two 128-bit parts.
3104 Padding is assumed to be at the end (not necessarily the 'high part')
3106 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3107 < GET_MODE_NUNITS (xmode))
3108 && (offset / GET_MODE_SIZE (xmode_unit)
3109 != ((offset + GET_MODE_SIZE (ymode) - 1)
3110 / GET_MODE_SIZE (xmode_unit))))
3112 info->representable_p = false;
3117 nregs_xmode = hard_regno_nregs[xregno][xmode];
3119 nregs_ymode = hard_regno_nregs[xregno][ymode];
3121 /* Paradoxical subregs are otherwise valid. */
3124 && GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode))
3126 info->representable_p = true;
3127 /* If this is a big endian paradoxical subreg, which uses more
3128 actual hard registers than the original register, we must
3129 return a negative offset so that we find the proper highpart
3131 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3132 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3133 info->offset = nregs_xmode - nregs_ymode;
3136 info->nregs = nregs_ymode;
3140 /* If registers store different numbers of bits in the different
3141 modes, we cannot generally form this subreg. */
3142 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3143 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3144 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3145 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
3147 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3148 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3149 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3151 info->representable_p = false;
3153 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3154 info->offset = offset / regsize_xmode;
3157 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3159 info->representable_p = false;
3161 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3162 info->offset = offset / regsize_xmode;
3167 /* Lowpart subregs are otherwise valid. */
3168 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3170 info->representable_p = true;
3173 if (offset == 0 || nregs_xmode == nregs_ymode)
3176 info->nregs = nregs_ymode;
3181 /* This should always pass, otherwise we don't know how to verify
3182 the constraint. These conditions may be relaxed but
3183 subreg_regno_offset would need to be redesigned. */
3184 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3185 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3187 /* The XMODE value can be seen as a vector of NREGS_XMODE
3188 values. The subreg must represent a lowpart of given field.
3189 Compute what field it is. */
3190 offset_adj = offset;
3191 offset_adj -= subreg_lowpart_offset (ymode,
3192 mode_for_size (GET_MODE_BITSIZE (xmode)
3196 /* Size of ymode must not be greater than the size of xmode. */
3197 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3198 gcc_assert (mode_multiple != 0);
3200 y_offset = offset / GET_MODE_SIZE (ymode);
3201 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3202 nregs_multiple = nregs_xmode / nregs_ymode;
3204 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
3205 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3209 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3212 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3213 info->nregs = nregs_ymode;
3216 /* This function returns the regno offset of a subreg expression.
3217 xregno - A regno of an inner hard subreg_reg (or what will become one).
3218 xmode - The mode of xregno.
3219 offset - The byte offset.
3220 ymode - The mode of a top level SUBREG (or what may become one).
3221 RETURN - The regno offset which would be used. */
3223 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3224 unsigned int offset, enum machine_mode ymode)
3226 struct subreg_info info;
3227 subreg_get_info (xregno, xmode, offset, ymode, &info);
3231 /* This function returns true when the offset is representable via
3232 subreg_offset in the given regno.
3233 xregno - A regno of an inner hard subreg_reg (or what will become one).
3234 xmode - The mode of xregno.
3235 offset - The byte offset.
3236 ymode - The mode of a top level SUBREG (or what may become one).
3237 RETURN - Whether the offset is representable. */
3239 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3240 unsigned int offset, enum machine_mode ymode)
3242 struct subreg_info info;
3243 subreg_get_info (xregno, xmode, offset, ymode, &info);
3244 return info.representable_p;
3247 /* Return the number of a YMODE register to which
3249 (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
3251 can be simplified. Return -1 if the subreg can't be simplified.
3253 XREGNO is a hard register number. */
3256 simplify_subreg_regno (unsigned int xregno, enum machine_mode xmode,
3257 unsigned int offset, enum machine_mode ymode)
3259 struct subreg_info info;
3260 unsigned int yregno;
3262 #ifdef CANNOT_CHANGE_MODE_CLASS
3263 /* Give the backend a chance to disallow the mode change. */
3264 if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
3265 && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
3266 && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode))
3270 /* We shouldn't simplify stack-related registers. */
3271 if ((!reload_completed || frame_pointer_needed)
3272 && (xregno == FRAME_POINTER_REGNUM
3273 || xregno == HARD_FRAME_POINTER_REGNUM))
3276 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3277 && xregno == ARG_POINTER_REGNUM)
3280 if (xregno == STACK_POINTER_REGNUM)
3283 /* Try to get the register offset. */
3284 subreg_get_info (xregno, xmode, offset, ymode, &info);
3285 if (!info.representable_p)
3288 /* Make sure that the offsetted register value is in range. */
3289 yregno = xregno + info.offset;
3290 if (!HARD_REGISTER_NUM_P (yregno))
3293 /* See whether (reg:YMODE YREGNO) is valid.
3295 ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
3296 This is a kludge to work around how float/complex arguments are passed
3297 on 32-bit SPARC and should be fixed. */
3298 if (!HARD_REGNO_MODE_OK (yregno, ymode)
3299 && HARD_REGNO_MODE_OK (xregno, xmode))
3302 return (int) yregno;
3305 /* Return the final regno that a subreg expression refers to. */
3307 subreg_regno (const_rtx x)
3310 rtx subreg = SUBREG_REG (x);
3311 int regno = REGNO (subreg);
3313 ret = regno + subreg_regno_offset (regno,
3321 /* Return the number of registers that a subreg expression refers
3324 subreg_nregs (const_rtx x)
3326 return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3329 /* Return the number of registers that a subreg REG with REGNO
3330 expression refers to. This is a copy of the rtlanal.c:subreg_nregs
3331 changed so that the regno can be passed in. */
3334 subreg_nregs_with_regno (unsigned int regno, const_rtx x)
3336 struct subreg_info info;
3337 rtx subreg = SUBREG_REG (x);
3339 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3345 struct parms_set_data
3351 /* Helper function for noticing stores to parameter registers. */
3353 parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
3355 struct parms_set_data *const d = (struct parms_set_data *) data;
3356 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3357 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3359 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3364 /* Look backward for first parameter to be loaded.
3365 Note that loads of all parameters will not necessarily be
3366 found if CSE has eliminated some of them (e.g., an argument
3367 to the outer function is passed down as a parameter).
3368 Do not skip BOUNDARY. */
3370 find_first_parameter_load (rtx call_insn, rtx boundary)
3372 struct parms_set_data parm;
3373 rtx p, before, first_set;
3375 /* Since different machines initialize their parameter registers
3376 in different orders, assume nothing. Collect the set of all
3377 parameter registers. */
3378 CLEAR_HARD_REG_SET (parm.regs);
3380 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3381 if (GET_CODE (XEXP (p, 0)) == USE
3382 && REG_P (XEXP (XEXP (p, 0), 0)))
3384 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3386 /* We only care about registers which can hold function
3388 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3391 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3395 first_set = call_insn;
3397 /* Search backward for the first set of a register in this set. */
3398 while (parm.nregs && before != boundary)
3400 before = PREV_INSN (before);
3402 /* It is possible that some loads got CSEed from one call to
3403 another. Stop in that case. */
3404 if (CALL_P (before))
3407 /* Our caller needs either ensure that we will find all sets
3408 (in case code has not been optimized yet), or take care
3409 for possible labels in a way by setting boundary to preceding
3411 if (LABEL_P (before))
3413 gcc_assert (before == boundary);
3417 if (INSN_P (before))
3419 int nregs_old = parm.nregs;
3420 note_stores (PATTERN (before), parms_set, &parm);
3421 /* If we found something that did not set a parameter reg,
3422 we're done. Do not keep going, as that might result
3423 in hoisting an insn before the setting of a pseudo
3424 that is used by the hoisted insn. */
3425 if (nregs_old != parm.nregs)
3434 /* Return true if we should avoid inserting code between INSN and preceding
3435 call instruction. */
3438 keep_with_call_p (const_rtx insn)
3442 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3444 if (REG_P (SET_DEST (set))
3445 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3446 && fixed_regs[REGNO (SET_DEST (set))]
3447 && general_operand (SET_SRC (set), VOIDmode))
3449 if (REG_P (SET_SRC (set))
3450 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3451 && REG_P (SET_DEST (set))
3452 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3454 /* There may be a stack pop just after the call and before the store
3455 of the return register. Search for the actual store when deciding
3456 if we can break or not. */
3457 if (SET_DEST (set) == stack_pointer_rtx)
3459 /* This CONST_CAST is okay because next_nonnote_insn just
3460 returns its argument and we assign it to a const_rtx
3462 const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX(insn));
3463 if (i2 && keep_with_call_p (i2))
3470 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3471 to non-complex jumps. That is, direct unconditional, conditional,
3472 and tablejumps, but not computed jumps or returns. It also does
3473 not apply to the fallthru case of a conditional jump. */
3476 label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
3478 rtx tmp = JUMP_LABEL (jump_insn);
3483 if (tablejump_p (jump_insn, NULL, &tmp))
3485 rtvec vec = XVEC (PATTERN (tmp),
3486 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3487 int i, veclen = GET_NUM_ELEM (vec);
3489 for (i = 0; i < veclen; ++i)
3490 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3494 if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3501 /* Return an estimate of the cost of computing rtx X.
3502 One use is in cse, to decide which expression to keep in the hash table.
3503 Another is in rtl generation, to pick the cheapest way to multiply.
3504 Other uses like the latter are expected in the future. */
3507 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3517 /* Compute the default costs of certain things.
3518 Note that targetm.rtx_costs can override the defaults. */
3520 code = GET_CODE (x);
3524 total = COSTS_N_INSNS (5);
3530 total = COSTS_N_INSNS (7);
3533 /* Used in combine.c as a marker. */
3537 total = COSTS_N_INSNS (1);
3547 /* If we can't tie these modes, make this expensive. The larger
3548 the mode, the more expensive it is. */
3549 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3550 return COSTS_N_INSNS (2
3551 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3555 if (targetm.rtx_costs (x, code, outer_code, &total))
3560 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3561 which is already in total. */
3563 fmt = GET_RTX_FORMAT (code);
3564 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3566 total += rtx_cost (XEXP (x, i), code);
3567 else if (fmt[i] == 'E')
3568 for (j = 0; j < XVECLEN (x, i); j++)
3569 total += rtx_cost (XVECEXP (x, i, j), code);
3574 /* Return cost of address expression X.
3575 Expect that X is properly formed address reference. */
3578 address_cost (rtx x, enum machine_mode mode)
3580 /* We may be asked for cost of various unusual addresses, such as operands
3581 of push instruction. It is not worthwhile to complicate writing
3582 of the target hook by such cases. */
3584 if (!memory_address_p (mode, x))
3587 return targetm.address_cost (x);
3590 /* If the target doesn't override, compute the cost as with arithmetic. */
3593 default_address_cost (rtx x)
3595 return rtx_cost (x, MEM);
3599 unsigned HOST_WIDE_INT
3600 nonzero_bits (const_rtx x, enum machine_mode mode)
3602 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3606 num_sign_bit_copies (const_rtx x, enum machine_mode mode)
3608 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3611 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3612 It avoids exponential behavior in nonzero_bits1 when X has
3613 identical subexpressions on the first or the second level. */
3615 static unsigned HOST_WIDE_INT
3616 cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
3617 enum machine_mode known_mode,
3618 unsigned HOST_WIDE_INT known_ret)
3620 if (x == known_x && mode == known_mode)
3623 /* Try to find identical subexpressions. If found call
3624 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3625 precomputed value for the subexpression as KNOWN_RET. */
3627 if (ARITHMETIC_P (x))
3629 rtx x0 = XEXP (x, 0);
3630 rtx x1 = XEXP (x, 1);
3632 /* Check the first level. */
3634 return nonzero_bits1 (x, mode, x0, mode,
3635 cached_nonzero_bits (x0, mode, known_x,
3636 known_mode, known_ret));
3638 /* Check the second level. */
3639 if (ARITHMETIC_P (x0)
3640 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3641 return nonzero_bits1 (x, mode, x1, mode,
3642 cached_nonzero_bits (x1, mode, known_x,
3643 known_mode, known_ret));
3645 if (ARITHMETIC_P (x1)
3646 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3647 return nonzero_bits1 (x, mode, x0, mode,
3648 cached_nonzero_bits (x0, mode, known_x,
3649 known_mode, known_ret));
3652 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3655 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3656 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3657 is less useful. We can't allow both, because that results in exponential
3658 run time recursion. There is a nullstone testcase that triggered
3659 this. This macro avoids accidental uses of num_sign_bit_copies. */
3660 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3662 /* Given an expression, X, compute which bits in X can be nonzero.
3663 We don't care about bits outside of those defined in MODE.
3665 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3666 an arithmetic operation, we can do better. */
3668 static unsigned HOST_WIDE_INT
3669 nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
3670 enum machine_mode known_mode,
3671 unsigned HOST_WIDE_INT known_ret)
3673 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3674 unsigned HOST_WIDE_INT inner_nz;
3676 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3678 /* For floating-point values, assume all bits are needed. */
3679 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3682 /* If X is wider than MODE, use its mode instead. */
3683 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3685 mode = GET_MODE (x);
3686 nonzero = GET_MODE_MASK (mode);
3687 mode_width = GET_MODE_BITSIZE (mode);
3690 if (mode_width > HOST_BITS_PER_WIDE_INT)
3691 /* Our only callers in this case look for single bit values. So
3692 just return the mode mask. Those tests will then be false. */
3695 #ifndef WORD_REGISTER_OPERATIONS
3696 /* If MODE is wider than X, but both are a single word for both the host
3697 and target machines, we can compute this from which bits of the
3698 object might be nonzero in its own mode, taking into account the fact
3699 that on many CISC machines, accessing an object in a wider mode
3700 causes the high-order bits to become undefined. So they are
3701 not known to be zero. */
3703 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3704 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3705 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3706 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3708 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3709 known_x, known_mode, known_ret);
3710 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3715 code = GET_CODE (x);
3719 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3720 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3721 all the bits above ptr_mode are known to be zero. */
3722 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3724 nonzero &= GET_MODE_MASK (ptr_mode);
3727 /* Include declared information about alignment of pointers. */
3728 /* ??? We don't properly preserve REG_POINTER changes across
3729 pointer-to-integer casts, so we can't trust it except for
3730 things that we know must be pointers. See execute/960116-1.c. */
3731 if ((x == stack_pointer_rtx
3732 || x == frame_pointer_rtx
3733 || x == arg_pointer_rtx)
3734 && REGNO_POINTER_ALIGN (REGNO (x)))
3736 unsigned HOST_WIDE_INT alignment
3737 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3739 #ifdef PUSH_ROUNDING
3740 /* If PUSH_ROUNDING is defined, it is possible for the
3741 stack to be momentarily aligned only to that amount,
3742 so we pick the least alignment. */
3743 if (x == stack_pointer_rtx && PUSH_ARGS)
3744 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3748 nonzero &= ~(alignment - 1);
3752 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3753 rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3754 known_mode, known_ret,
3758 nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
3759 known_mode, known_ret);
3761 return nonzero_for_hook;
3765 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3766 /* If X is negative in MODE, sign-extend the value. */
3767 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3768 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3769 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3775 #ifdef LOAD_EXTEND_OP
3776 /* In many, if not most, RISC machines, reading a byte from memory
3777 zeros the rest of the register. Noticing that fact saves a lot
3778 of extra zero-extends. */
3779 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3780 nonzero &= GET_MODE_MASK (GET_MODE (x));
3785 case UNEQ: case LTGT:
3786 case GT: case GTU: case UNGT:
3787 case LT: case LTU: case UNLT:
3788 case GE: case GEU: case UNGE:
3789 case LE: case LEU: case UNLE:
3790 case UNORDERED: case ORDERED:
3791 /* If this produces an integer result, we know which bits are set.
3792 Code here used to clear bits outside the mode of X, but that is
3794 /* Mind that MODE is the mode the caller wants to look at this
3795 operation in, and not the actual operation mode. We can wind
3796 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3797 that describes the results of a vector compare. */
3798 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3799 && mode_width <= HOST_BITS_PER_WIDE_INT)
3800 nonzero = STORE_FLAG_VALUE;
3805 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3806 and num_sign_bit_copies. */
3807 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3808 == GET_MODE_BITSIZE (GET_MODE (x)))
3812 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3813 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3818 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3819 and num_sign_bit_copies. */
3820 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3821 == GET_MODE_BITSIZE (GET_MODE (x)))
3827 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3828 known_x, known_mode, known_ret)
3829 & GET_MODE_MASK (mode));
3833 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3834 known_x, known_mode, known_ret);
3835 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3836 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3840 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3841 Otherwise, show all the bits in the outer mode but not the inner
3843 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3844 known_x, known_mode, known_ret);
3845 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3847 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3849 & (((HOST_WIDE_INT) 1
3850 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3851 inner_nz |= (GET_MODE_MASK (mode)
3852 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3855 nonzero &= inner_nz;
3859 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3860 known_x, known_mode, known_ret)
3861 & cached_nonzero_bits (XEXP (x, 1), mode,
3862 known_x, known_mode, known_ret);
3866 case UMIN: case UMAX: case SMIN: case SMAX:
3868 unsigned HOST_WIDE_INT nonzero0 =
3869 cached_nonzero_bits (XEXP (x, 0), mode,
3870 known_x, known_mode, known_ret);
3872 /* Don't call nonzero_bits for the second time if it cannot change
3874 if ((nonzero & nonzero0) != nonzero)
3876 | cached_nonzero_bits (XEXP (x, 1), mode,
3877 known_x, known_mode, known_ret);
3881 case PLUS: case MINUS:
3883 case DIV: case UDIV:
3884 case MOD: case UMOD:
3885 /* We can apply the rules of arithmetic to compute the number of
3886 high- and low-order zero bits of these operations. We start by
3887 computing the width (position of the highest-order nonzero bit)
3888 and the number of low-order zero bits for each value. */
3890 unsigned HOST_WIDE_INT nz0 =
3891 cached_nonzero_bits (XEXP (x, 0), mode,
3892 known_x, known_mode, known_ret);
3893 unsigned HOST_WIDE_INT nz1 =
3894 cached_nonzero_bits (XEXP (x, 1), mode,
3895 known_x, known_mode, known_ret);
3896 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3897 int width0 = floor_log2 (nz0) + 1;
3898 int width1 = floor_log2 (nz1) + 1;
3899 int low0 = floor_log2 (nz0 & -nz0);
3900 int low1 = floor_log2 (nz1 & -nz1);
3901 HOST_WIDE_INT op0_maybe_minusp
3902 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3903 HOST_WIDE_INT op1_maybe_minusp
3904 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3905 unsigned int result_width = mode_width;
3911 result_width = MAX (width0, width1) + 1;
3912 result_low = MIN (low0, low1);
3915 result_low = MIN (low0, low1);
3918 result_width = width0 + width1;
3919 result_low = low0 + low1;
3924 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3925 result_width = width0;
3930 result_width = width0;
3935 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3936 result_width = MIN (width0, width1);
3937 result_low = MIN (low0, low1);
3942 result_width = MIN (width0, width1);
3943 result_low = MIN (low0, low1);
3949 if (result_width < mode_width)
3950 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3953 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3955 #ifdef POINTERS_EXTEND_UNSIGNED
3956 /* If pointers extend unsigned and this is an addition or subtraction
3957 to a pointer in Pmode, all the bits above ptr_mode are known to be
3959 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3960 && (code == PLUS || code == MINUS)
3961 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3962 nonzero &= GET_MODE_MASK (ptr_mode);
3968 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3969 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3970 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3974 /* If this is a SUBREG formed for a promoted variable that has
3975 been zero-extended, we know that at least the high-order bits
3976 are zero, though others might be too. */
3978 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3979 nonzero = GET_MODE_MASK (GET_MODE (x))
3980 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3981 known_x, known_mode, known_ret);
3983 /* If the inner mode is a single word for both the host and target
3984 machines, we can compute this from which bits of the inner
3985 object might be nonzero. */
3986 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3987 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3988 <= HOST_BITS_PER_WIDE_INT))
3990 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3991 known_x, known_mode, known_ret);
3993 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3994 /* If this is a typical RISC machine, we only have to worry
3995 about the way loads are extended. */
3996 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3998 & (((unsigned HOST_WIDE_INT) 1
3999 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
4001 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
4002 || !MEM_P (SUBREG_REG (x)))
4005 /* On many CISC machines, accessing an object in a wider mode
4006 causes the high-order bits to become undefined. So they are
4007 not known to be zero. */
4008 if (GET_MODE_SIZE (GET_MODE (x))
4009 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4010 nonzero |= (GET_MODE_MASK (GET_MODE (x))
4011 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
4020 /* The nonzero bits are in two classes: any bits within MODE
4021 that aren't in GET_MODE (x) are always significant. The rest of the
4022 nonzero bits are those that are significant in the operand of
4023 the shift when shifted the appropriate number of bits. This
4024 shows that high-order bits are cleared by the right shift and
4025 low-order bits by left shifts. */
4026 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4027 && INTVAL (XEXP (x, 1)) >= 0
4028 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4030 enum machine_mode inner_mode = GET_MODE (x);
4031 unsigned int width = GET_MODE_BITSIZE (inner_mode);
4032 int count = INTVAL (XEXP (x, 1));
4033 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4034 unsigned HOST_WIDE_INT op_nonzero =
4035 cached_nonzero_bits (XEXP (x, 0), mode,
4036 known_x, known_mode, known_ret);
4037 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4038 unsigned HOST_WIDE_INT outer = 0;
4040 if (mode_width > width)
4041 outer = (op_nonzero & nonzero & ~mode_mask);
4043 if (code == LSHIFTRT)
4045 else if (code == ASHIFTRT)
4049 /* If the sign bit may have been nonzero before the shift, we
4050 need to mark all the places it could have been copied to
4051 by the shift as possibly nonzero. */
4052 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
4053 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
4055 else if (code == ASHIFT)
4058 inner = ((inner << (count % width)
4059 | (inner >> (width - (count % width)))) & mode_mask);
4061 nonzero &= (outer | inner);
4067 /* This is at most the number of bits in the mode. */
4068 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4072 /* If CLZ has a known value at zero, then the nonzero bits are
4073 that value, plus the number of bits in the mode minus one. */
4074 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4075 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4081 /* If CTZ has a known value at zero, then the nonzero bits are
4082 that value, plus the number of bits in the mode minus one. */
4083 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4084 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4095 unsigned HOST_WIDE_INT nonzero_true =
4096 cached_nonzero_bits (XEXP (x, 1), mode,
4097 known_x, known_mode, known_ret);
4099 /* Don't call nonzero_bits for the second time if it cannot change
4101 if ((nonzero & nonzero_true) != nonzero)
4102 nonzero &= nonzero_true
4103 | cached_nonzero_bits (XEXP (x, 2), mode,
4104 known_x, known_mode, known_ret);
4115 /* See the macro definition above. */
4116 #undef cached_num_sign_bit_copies
4119 /* The function cached_num_sign_bit_copies is a wrapper around
4120 num_sign_bit_copies1. It avoids exponential behavior in
4121 num_sign_bit_copies1 when X has identical subexpressions on the
4122 first or the second level. */
4125 cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
4126 enum machine_mode known_mode,
4127 unsigned int known_ret)
4129 if (x == known_x && mode == known_mode)
4132 /* Try to find identical subexpressions. If found call
4133 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4134 the precomputed value for the subexpression as KNOWN_RET. */
4136 if (ARITHMETIC_P (x))
4138 rtx x0 = XEXP (x, 0);
4139 rtx x1 = XEXP (x, 1);
4141 /* Check the first level. */
4144 num_sign_bit_copies1 (x, mode, x0, mode,
4145 cached_num_sign_bit_copies (x0, mode, known_x,
4149 /* Check the second level. */
4150 if (ARITHMETIC_P (x0)
4151 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4153 num_sign_bit_copies1 (x, mode, x1, mode,
4154 cached_num_sign_bit_copies (x1, mode, known_x,
4158 if (ARITHMETIC_P (x1)
4159 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4161 num_sign_bit_copies1 (x, mode, x0, mode,
4162 cached_num_sign_bit_copies (x0, mode, known_x,
4167 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4170 /* Return the number of bits at the high-order end of X that are known to
4171 be equal to the sign bit. X will be used in mode MODE; if MODE is
4172 VOIDmode, X will be used in its own mode. The returned value will always
4173 be between 1 and the number of bits in MODE. */
4176 num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
4177 enum machine_mode known_mode,
4178 unsigned int known_ret)
4180 enum rtx_code code = GET_CODE (x);
4181 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4182 int num0, num1, result;
4183 unsigned HOST_WIDE_INT nonzero;
4185 /* If we weren't given a mode, use the mode of X. If the mode is still
4186 VOIDmode, we don't know anything. Likewise if one of the modes is
4189 if (mode == VOIDmode)
4190 mode = GET_MODE (x);
4192 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4195 /* For a smaller object, just ignore the high bits. */
4196 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4198 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4199 known_x, known_mode, known_ret);
4201 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4204 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4206 #ifndef WORD_REGISTER_OPERATIONS
4207 /* If this machine does not do all register operations on the entire
4208 register and MODE is wider than the mode of X, we can say nothing
4209 at all about the high-order bits. */
4212 /* Likewise on machines that do, if the mode of the object is smaller
4213 than a word and loads of that size don't sign extend, we can say
4214 nothing about the high order bits. */
4215 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4216 #ifdef LOAD_EXTEND_OP
4217 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4228 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4229 /* If pointers extend signed and this is a pointer in Pmode, say that
4230 all the bits above ptr_mode are known to be sign bit copies. */
4231 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4233 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4237 unsigned int copies_for_hook = 1, copies = 1;
4238 rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4239 known_mode, known_ret,
4243 copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
4244 known_mode, known_ret);
4246 if (copies > 1 || copies_for_hook > 1)
4247 return MAX (copies, copies_for_hook);
4249 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4254 #ifdef LOAD_EXTEND_OP
4255 /* Some RISC machines sign-extend all loads of smaller than a word. */
4256 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4257 return MAX (1, ((int) bitwidth
4258 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4263 /* If the constant is negative, take its 1's complement and remask.
4264 Then see how many zero bits we have. */
4265 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4266 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4267 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4268 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4270 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4273 /* If this is a SUBREG for a promoted object that is sign-extended
4274 and we are looking at it in a wider mode, we know that at least the
4275 high-order bits are known to be sign bit copies. */
4277 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4279 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4280 known_x, known_mode, known_ret);
4281 return MAX ((int) bitwidth
4282 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4286 /* For a smaller object, just ignore the high bits. */
4287 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4289 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4290 known_x, known_mode, known_ret);
4291 return MAX (1, (num0
4292 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4296 #ifdef WORD_REGISTER_OPERATIONS
4297 #ifdef LOAD_EXTEND_OP
4298 /* For paradoxical SUBREGs on machines where all register operations
4299 affect the entire register, just look inside. Note that we are
4300 passing MODE to the recursive call, so the number of sign bit copies
4301 will remain relative to that mode, not the inner mode. */
4303 /* This works only if loads sign extend. Otherwise, if we get a
4304 reload for the inner part, it may be loaded from the stack, and
4305 then we lose all sign bit copies that existed before the store
4308 if ((GET_MODE_SIZE (GET_MODE (x))
4309 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4310 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4311 && MEM_P (SUBREG_REG (x)))
4312 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4313 known_x, known_mode, known_ret);
4319 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4320 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4324 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4325 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4326 known_x, known_mode, known_ret));
4329 /* For a smaller object, just ignore the high bits. */
4330 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4331 known_x, known_mode, known_ret);
4332 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4336 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4337 known_x, known_mode, known_ret);
4339 case ROTATE: case ROTATERT:
4340 /* If we are rotating left by a number of bits less than the number
4341 of sign bit copies, we can just subtract that amount from the
4343 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4344 && INTVAL (XEXP (x, 1)) >= 0
4345 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4347 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4348 known_x, known_mode, known_ret);
4349 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4350 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4355 /* In general, this subtracts one sign bit copy. But if the value
4356 is known to be positive, the number of sign bit copies is the
4357 same as that of the input. Finally, if the input has just one bit
4358 that might be nonzero, all the bits are copies of the sign bit. */
4359 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4360 known_x, known_mode, known_ret);
4361 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4362 return num0 > 1 ? num0 - 1 : 1;
4364 nonzero = nonzero_bits (XEXP (x, 0), mode);
4369 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4374 case IOR: case AND: case XOR:
4375 case SMIN: case SMAX: case UMIN: case UMAX:
4376 /* Logical operations will preserve the number of sign-bit copies.
4377 MIN and MAX operations always return one of the operands. */
4378 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4379 known_x, known_mode, known_ret);
4380 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4381 known_x, known_mode, known_ret);
4383 /* If num1 is clearing some of the top bits then regardless of
4384 the other term, we are guaranteed to have at least that many
4385 high-order zero bits. */
4388 && bitwidth <= HOST_BITS_PER_WIDE_INT
4389 && GET_CODE (XEXP (x, 1)) == CONST_INT
4390 && !(INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4393 /* Similarly for IOR when setting high-order bits. */
4396 && bitwidth <= HOST_BITS_PER_WIDE_INT
4397 && GET_CODE (XEXP (x, 1)) == CONST_INT
4398 && (INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4401 return MIN (num0, num1);
4403 case PLUS: case MINUS:
4404 /* For addition and subtraction, we can have a 1-bit carry. However,
4405 if we are subtracting 1 from a positive number, there will not
4406 be such a carry. Furthermore, if the positive number is known to
4407 be 0 or 1, we know the result is either -1 or 0. */
4409 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4410 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4412 nonzero = nonzero_bits (XEXP (x, 0), mode);
4413 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4414 return (nonzero == 1 || nonzero == 0 ? bitwidth
4415 : bitwidth - floor_log2 (nonzero) - 1);
4418 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4419 known_x, known_mode, known_ret);
4420 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4421 known_x, known_mode, known_ret);
4422 result = MAX (1, MIN (num0, num1) - 1);
4424 #ifdef POINTERS_EXTEND_UNSIGNED
4425 /* If pointers extend signed and this is an addition or subtraction
4426 to a pointer in Pmode, all the bits above ptr_mode are known to be
4428 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4429 && (code == PLUS || code == MINUS)
4430 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4431 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4432 - GET_MODE_BITSIZE (ptr_mode) + 1),
4438 /* The number of bits of the product is the sum of the number of
4439 bits of both terms. However, unless one of the terms if known
4440 to be positive, we must allow for an additional bit since negating
4441 a negative number can remove one sign bit copy. */
4443 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4444 known_x, known_mode, known_ret);
4445 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4446 known_x, known_mode, known_ret);
4448 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4450 && (bitwidth > HOST_BITS_PER_WIDE_INT
4451 || (((nonzero_bits (XEXP (x, 0), mode)
4452 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4453 && ((nonzero_bits (XEXP (x, 1), mode)
4454 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4457 return MAX (1, result);
4460 /* The result must be <= the first operand. If the first operand
4461 has the high bit set, we know nothing about the number of sign
4463 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4465 else if ((nonzero_bits (XEXP (x, 0), mode)
4466 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4469 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4470 known_x, known_mode, known_ret);
4473 /* The result must be <= the second operand. */
4474 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4475 known_x, known_mode, known_ret);
4478 /* Similar to unsigned division, except that we have to worry about
4479 the case where the divisor is negative, in which case we have
4481 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4482 known_x, known_mode, known_ret);
4484 && (bitwidth > HOST_BITS_PER_WIDE_INT
4485 || (nonzero_bits (XEXP (x, 1), mode)
4486 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4492 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4493 known_x, known_mode, known_ret);
4495 && (bitwidth > HOST_BITS_PER_WIDE_INT
4496 || (nonzero_bits (XEXP (x, 1), mode)
4497 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4503 /* Shifts by a constant add to the number of bits equal to the
4505 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4506 known_x, known_mode, known_ret);
4507 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4508 && INTVAL (XEXP (x, 1)) > 0)
4509 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4514 /* Left shifts destroy copies. */
4515 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4516 || INTVAL (XEXP (x, 1)) < 0
4517 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4520 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4521 known_x, known_mode, known_ret);
4522 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4525 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4526 known_x, known_mode, known_ret);
4527 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4528 known_x, known_mode, known_ret);
4529 return MIN (num0, num1);
4531 case EQ: case NE: case GE: case GT: case LE: case LT:
4532 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4533 case GEU: case GTU: case LEU: case LTU:
4534 case UNORDERED: case ORDERED:
4535 /* If the constant is negative, take its 1's complement and remask.
4536 Then see how many zero bits we have. */
4537 nonzero = STORE_FLAG_VALUE;
4538 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4539 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4540 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4542 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4548 /* If we haven't been able to figure it out by one of the above rules,
4549 see if some of the high-order bits are known to be zero. If so,
4550 count those bits and return one less than that amount. If we can't
4551 safely compute the mask for this mode, always return BITWIDTH. */
4553 bitwidth = GET_MODE_BITSIZE (mode);
4554 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4557 nonzero = nonzero_bits (x, mode);
4558 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4559 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4562 /* Calculate the rtx_cost of a single instruction. A return value of
4563 zero indicates an instruction pattern without a known cost. */
4566 insn_rtx_cost (rtx pat)
4571 /* Extract the single set rtx from the instruction pattern.
4572 We can't use single_set since we only have the pattern. */
4573 if (GET_CODE (pat) == SET)
4575 else if (GET_CODE (pat) == PARALLEL)
4578 for (i = 0; i < XVECLEN (pat, 0); i++)
4580 rtx x = XVECEXP (pat, 0, i);
4581 if (GET_CODE (x) == SET)
4594 cost = rtx_cost (SET_SRC (set), SET);
4595 return cost > 0 ? cost : COSTS_N_INSNS (1);
4598 /* Given an insn INSN and condition COND, return the condition in a
4599 canonical form to simplify testing by callers. Specifically:
4601 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4602 (2) Both operands will be machine operands; (cc0) will have been replaced.
4603 (3) If an operand is a constant, it will be the second operand.
4604 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4605 for GE, GEU, and LEU.
4607 If the condition cannot be understood, or is an inequality floating-point
4608 comparison which needs to be reversed, 0 will be returned.
4610 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4612 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4613 insn used in locating the condition was found. If a replacement test
4614 of the condition is desired, it should be placed in front of that
4615 insn and we will be sure that the inputs are still valid.
4617 If WANT_REG is nonzero, we wish the condition to be relative to that
4618 register, if possible. Therefore, do not canonicalize the condition
4619 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4620 to be a compare to a CC mode register.
4622 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4626 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4627 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4634 int reverse_code = 0;
4635 enum machine_mode mode;
4636 basic_block bb = BLOCK_FOR_INSN (insn);
4638 code = GET_CODE (cond);
4639 mode = GET_MODE (cond);
4640 op0 = XEXP (cond, 0);
4641 op1 = XEXP (cond, 1);
4644 code = reversed_comparison_code (cond, insn);
4645 if (code == UNKNOWN)
4651 /* If we are comparing a register with zero, see if the register is set
4652 in the previous insn to a COMPARE or a comparison operation. Perform
4653 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4656 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4657 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4658 && op1 == CONST0_RTX (GET_MODE (op0))
4661 /* Set nonzero when we find something of interest. */
4665 /* If comparison with cc0, import actual comparison from compare
4669 if ((prev = prev_nonnote_insn (prev)) == 0
4670 || !NONJUMP_INSN_P (prev)
4671 || (set = single_set (prev)) == 0
4672 || SET_DEST (set) != cc0_rtx)
4675 op0 = SET_SRC (set);
4676 op1 = CONST0_RTX (GET_MODE (op0));
4682 /* If this is a COMPARE, pick up the two things being compared. */
4683 if (GET_CODE (op0) == COMPARE)
4685 op1 = XEXP (op0, 1);
4686 op0 = XEXP (op0, 0);
4689 else if (!REG_P (op0))
4692 /* Go back to the previous insn. Stop if it is not an INSN. We also
4693 stop if it isn't a single set or if it has a REG_INC note because
4694 we don't want to bother dealing with it. */
4696 if ((prev = prev_nonnote_insn (prev)) == 0
4697 || !NONJUMP_INSN_P (prev)
4698 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4699 /* In cfglayout mode, there do not have to be labels at the
4700 beginning of a block, or jumps at the end, so the previous
4701 conditions would not stop us when we reach bb boundary. */
4702 || BLOCK_FOR_INSN (prev) != bb)
4705 set = set_of (op0, prev);
4708 && (GET_CODE (set) != SET
4709 || !rtx_equal_p (SET_DEST (set), op0)))
4712 /* If this is setting OP0, get what it sets it to if it looks
4716 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4717 #ifdef FLOAT_STORE_FLAG_VALUE
4718 REAL_VALUE_TYPE fsfv;
4721 /* ??? We may not combine comparisons done in a CCmode with
4722 comparisons not done in a CCmode. This is to aid targets
4723 like Alpha that have an IEEE compliant EQ instruction, and
4724 a non-IEEE compliant BEQ instruction. The use of CCmode is
4725 actually artificial, simply to prevent the combination, but
4726 should not affect other platforms.
4728 However, we must allow VOIDmode comparisons to match either
4729 CCmode or non-CCmode comparison, because some ports have
4730 modeless comparisons inside branch patterns.
4732 ??? This mode check should perhaps look more like the mode check
4733 in simplify_comparison in combine. */
4735 if ((GET_CODE (SET_SRC (set)) == COMPARE
4738 && GET_MODE_CLASS (inner_mode) == MODE_INT
4739 && (GET_MODE_BITSIZE (inner_mode)
4740 <= HOST_BITS_PER_WIDE_INT)
4741 && (STORE_FLAG_VALUE
4742 & ((HOST_WIDE_INT) 1
4743 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4744 #ifdef FLOAT_STORE_FLAG_VALUE
4746 && SCALAR_FLOAT_MODE_P (inner_mode)
4747 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4748 REAL_VALUE_NEGATIVE (fsfv)))
4751 && COMPARISON_P (SET_SRC (set))))
4752 && (((GET_MODE_CLASS (mode) == MODE_CC)
4753 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4754 || mode == VOIDmode || inner_mode == VOIDmode))
4756 else if (((code == EQ
4758 && (GET_MODE_BITSIZE (inner_mode)
4759 <= HOST_BITS_PER_WIDE_INT)
4760 && GET_MODE_CLASS (inner_mode) == MODE_INT
4761 && (STORE_FLAG_VALUE
4762 & ((HOST_WIDE_INT) 1
4763 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4764 #ifdef FLOAT_STORE_FLAG_VALUE
4766 && SCALAR_FLOAT_MODE_P (inner_mode)
4767 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4768 REAL_VALUE_NEGATIVE (fsfv)))
4771 && COMPARISON_P (SET_SRC (set))
4772 && (((GET_MODE_CLASS (mode) == MODE_CC)
4773 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4774 || mode == VOIDmode || inner_mode == VOIDmode))
4784 else if (reg_set_p (op0, prev))
4785 /* If this sets OP0, but not directly, we have to give up. */
4790 /* If the caller is expecting the condition to be valid at INSN,
4791 make sure X doesn't change before INSN. */
4792 if (valid_at_insn_p)
4793 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4795 if (COMPARISON_P (x))
4796 code = GET_CODE (x);
4799 code = reversed_comparison_code (x, prev);
4800 if (code == UNKNOWN)
4805 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4811 /* If constant is first, put it last. */
4812 if (CONSTANT_P (op0))
4813 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4815 /* If OP0 is the result of a comparison, we weren't able to find what
4816 was really being compared, so fail. */
4818 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4821 /* Canonicalize any ordered comparison with integers involving equality
4822 if we can do computations in the relevant mode and we do not
4825 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4826 && GET_CODE (op1) == CONST_INT
4827 && GET_MODE (op0) != VOIDmode
4828 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4830 HOST_WIDE_INT const_val = INTVAL (op1);
4831 unsigned HOST_WIDE_INT uconst_val = const_val;
4832 unsigned HOST_WIDE_INT max_val
4833 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4838 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4839 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4842 /* When cross-compiling, const_val might be sign-extended from
4843 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4845 if ((HOST_WIDE_INT) (const_val & max_val)
4846 != (((HOST_WIDE_INT) 1
4847 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4848 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4852 if (uconst_val < max_val)
4853 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4857 if (uconst_val != 0)
4858 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4866 /* Never return CC0; return zero instead. */
4870 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4873 /* Given a jump insn JUMP, return the condition that will cause it to branch
4874 to its JUMP_LABEL. If the condition cannot be understood, or is an
4875 inequality floating-point comparison which needs to be reversed, 0 will
4878 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4879 insn used in locating the condition was found. If a replacement test
4880 of the condition is desired, it should be placed in front of that
4881 insn and we will be sure that the inputs are still valid. If EARLIEST
4882 is null, the returned condition will be valid at INSN.
4884 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4885 compare CC mode register.
4887 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4890 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4896 /* If this is not a standard conditional jump, we can't parse it. */
4898 || ! any_condjump_p (jump))
4900 set = pc_set (jump);
4902 cond = XEXP (SET_SRC (set), 0);
4904 /* If this branches to JUMP_LABEL when the condition is false, reverse
4907 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4908 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4910 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4911 allow_cc_mode, valid_at_insn_p);
4914 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4915 TARGET_MODE_REP_EXTENDED.
4917 Note that we assume that the property of
4918 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4919 narrower than mode B. I.e., if A is a mode narrower than B then in
4920 order to be able to operate on it in mode B, mode A needs to
4921 satisfy the requirements set by the representation of mode B. */
4924 init_num_sign_bit_copies_in_rep (void)
4926 enum machine_mode mode, in_mode;
4928 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4929 in_mode = GET_MODE_WIDER_MODE (mode))
4930 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4931 mode = GET_MODE_WIDER_MODE (mode))
4933 enum machine_mode i;
4935 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4936 extends to the next widest mode. */
4937 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4938 || GET_MODE_WIDER_MODE (mode) == in_mode);
4940 /* We are in in_mode. Count how many bits outside of mode
4941 have to be copies of the sign-bit. */
4942 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4944 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4946 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4947 /* We can only check sign-bit copies starting from the
4948 top-bit. In order to be able to check the bits we
4949 have already seen we pretend that subsequent bits
4950 have to be sign-bit copies too. */
4951 || num_sign_bit_copies_in_rep [in_mode][mode])
4952 num_sign_bit_copies_in_rep [in_mode][mode]
4953 += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4958 /* Suppose that truncation from the machine mode of X to MODE is not a
4959 no-op. See if there is anything special about X so that we can
4960 assume it already contains a truncated value of MODE. */
4963 truncated_to_mode (enum machine_mode mode, const_rtx x)
4965 /* This register has already been used in MODE without explicit
4967 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
4970 /* See if we already satisfy the requirements of MODE. If yes we
4971 can just switch to MODE. */
4972 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
4973 && (num_sign_bit_copies (x, GET_MODE (x))
4974 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
4980 /* Initialize non_rtx_starting_operands, which is used to speed up
4986 for (i = 0; i < NUM_RTX_CODE; i++)
4988 const char *format = GET_RTX_FORMAT (i);
4989 const char *first = strpbrk (format, "eEV");
4990 non_rtx_starting_operands[i] = first ? first - format : -1;
4993 init_num_sign_bit_copies_in_rep ();
4996 /* Check whether this is a constant pool constant. */
4998 constant_pool_constant_p (rtx x)
5000 x = avoid_constant_pool_reference (x);
5001 return GET_CODE (x) == CONST_DOUBLE;