1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
42 /* Forward declarations */
43 static void set_of_1 (rtx, const_rtx, void *);
44 static bool covers_regno_p (const_rtx, unsigned int);
45 static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
46 static int rtx_referenced_p_1 (rtx *, void *);
47 static int computed_jump_p_1 (const_rtx);
48 static void parms_set (rtx, const_rtx, void *);
50 static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
51 const_rtx, enum machine_mode,
52 unsigned HOST_WIDE_INT);
53 static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
54 const_rtx, enum machine_mode,
55 unsigned HOST_WIDE_INT);
56 static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
59 static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
60 enum machine_mode, unsigned int);
62 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
63 -1 if a code has no such operand. */
64 static int non_rtx_starting_operands[NUM_RTX_CODE];
66 /* Bit flags that specify the machine subtype we are compiling for.
67 Bits are tested using macros TARGET_... defined in the tm.h file
68 and set by `-m...' switches. Must be defined in rtlanal.c. */
72 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
73 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
74 SIGN_EXTEND then while narrowing we also have to enforce the
75 representation and sign-extend the value to mode DESTINATION_REP.
77 If the value is already sign-extended to DESTINATION_REP mode we
78 can just switch to DESTINATION mode on it. For each pair of
79 integral modes SOURCE and DESTINATION, when truncating from SOURCE
80 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
81 contains the number of high-order bits in SOURCE that have to be
82 copies of the sign-bit so that we can do this mode-switch to
86 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
88 /* Return 1 if the value of X is unstable
89 (would be different at a different point in the program).
90 The frame pointer, arg pointer, etc. are considered stable
91 (within one function) and so is anything marked `unchanging'. */
94 rtx_unstable_p (const_rtx x)
96 const RTX_CODE code = GET_CODE (x);
103 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
115 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
116 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
117 /* The arg pointer varies if it is not a fixed register. */
118 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
120 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
121 /* ??? When call-clobbered, the value is stable modulo the restore
122 that must happen after a call. This currently screws up local-alloc
123 into believing that the restore is not needed. */
124 if (x == pic_offset_table_rtx)
130 if (MEM_VOLATILE_P (x))
139 fmt = GET_RTX_FORMAT (code);
140 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
143 if (rtx_unstable_p (XEXP (x, i)))
146 else if (fmt[i] == 'E')
149 for (j = 0; j < XVECLEN (x, i); j++)
150 if (rtx_unstable_p (XVECEXP (x, i, j)))
157 /* Return 1 if X has a value that can vary even between two
158 executions of the program. 0 means X can be compared reliably
159 against certain constants or near-constants.
160 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
161 zero, we are slightly more conservative.
162 The frame pointer and the arg pointer are considered constant. */
165 rtx_varies_p (const_rtx x, bool for_alias)
178 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
190 /* Note that we have to test for the actual rtx used for the frame
191 and arg pointers and not just the register number in case we have
192 eliminated the frame and/or arg pointer and are using it
194 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
195 /* The arg pointer varies if it is not a fixed register. */
196 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
198 if (x == pic_offset_table_rtx
199 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
200 /* ??? When call-clobbered, the value is stable modulo the restore
201 that must happen after a call. This currently screws up
202 local-alloc into believing that the restore is not needed, so we
203 must return 0 only if we are called from alias analysis. */
211 /* The operand 0 of a LO_SUM is considered constant
212 (in fact it is related specifically to operand 1)
213 during alias analysis. */
214 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
215 || rtx_varies_p (XEXP (x, 1), for_alias);
218 if (MEM_VOLATILE_P (x))
227 fmt = GET_RTX_FORMAT (code);
228 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
231 if (rtx_varies_p (XEXP (x, i), for_alias))
234 else if (fmt[i] == 'E')
237 for (j = 0; j < XVECLEN (x, i); j++)
238 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
245 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
246 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
247 whether nonzero is returned for unaligned memory accesses on strict
248 alignment machines. */
251 rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
252 enum machine_mode mode, bool unaligned_mems)
254 enum rtx_code code = GET_CODE (x);
258 && GET_MODE_SIZE (mode) != 0)
260 HOST_WIDE_INT actual_offset = offset;
261 #ifdef SPARC_STACK_BOUNDARY_HACK
262 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
263 the real alignment of %sp. However, when it does this, the
264 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
265 if (SPARC_STACK_BOUNDARY_HACK
266 && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
267 actual_offset -= STACK_POINTER_OFFSET;
270 if (actual_offset % GET_MODE_SIZE (mode) != 0)
277 if (SYMBOL_REF_WEAK (x))
279 if (!CONSTANT_POOL_ADDRESS_P (x))
282 HOST_WIDE_INT decl_size;
287 size = GET_MODE_SIZE (mode);
291 /* If the size of the access or of the symbol is unknown,
293 decl = SYMBOL_REF_DECL (x);
295 /* Else check that the access is in bounds. TODO: restructure
296 expr_size/lhd_expr_size/int_expr_size and just use the latter. */
299 else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
300 decl_size = (host_integerp (DECL_SIZE_UNIT (decl), 0)
301 ? tree_low_cst (DECL_SIZE_UNIT (decl), 0)
303 else if (TREE_CODE (decl) == STRING_CST)
304 decl_size = TREE_STRING_LENGTH (decl);
305 else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
306 decl_size = int_size_in_bytes (TREE_TYPE (decl));
310 return (decl_size <= 0 ? offset != 0 : offset + size > decl_size);
319 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
320 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
321 || x == stack_pointer_rtx
322 /* The arg pointer varies if it is not a fixed register. */
323 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
325 /* All of the virtual frame registers are stack references. */
326 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
327 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
332 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
333 mode, unaligned_mems);
336 /* An address is assumed not to trap if:
337 - it is the pic register plus a constant. */
338 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
341 /* - or it is an address that can't trap plus a constant integer,
342 with the proper remainder modulo the mode size if we are
343 considering unaligned memory references. */
344 if (GET_CODE (XEXP (x, 1)) == CONST_INT
345 && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
346 size, mode, unaligned_mems))
353 return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
354 mode, unaligned_mems);
361 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
362 mode, unaligned_mems);
368 /* If it isn't one of the case above, it can cause a trap. */
372 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
375 rtx_addr_can_trap_p (const_rtx x)
377 return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false);
380 /* Return true if X is an address that is known to not be zero. */
383 nonzero_address_p (const_rtx x)
385 const enum rtx_code code = GET_CODE (x);
390 return !SYMBOL_REF_WEAK (x);
396 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
397 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
398 || x == stack_pointer_rtx
399 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
401 /* All of the virtual frame registers are stack references. */
402 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
403 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
408 return nonzero_address_p (XEXP (x, 0));
411 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
412 return nonzero_address_p (XEXP (x, 0));
413 /* Handle PIC references. */
414 else if (XEXP (x, 0) == pic_offset_table_rtx
415 && CONSTANT_P (XEXP (x, 1)))
420 /* Similar to the above; allow positive offsets. Further, since
421 auto-inc is only allowed in memories, the register must be a
423 if (GET_CODE (XEXP (x, 1)) == CONST_INT
424 && INTVAL (XEXP (x, 1)) > 0)
426 return nonzero_address_p (XEXP (x, 0));
429 /* Similarly. Further, the offset is always positive. */
436 return nonzero_address_p (XEXP (x, 0));
439 return nonzero_address_p (XEXP (x, 1));
445 /* If it isn't one of the case above, might be zero. */
449 /* Return 1 if X refers to a memory location whose address
450 cannot be compared reliably with constant addresses,
451 or if X refers to a BLKmode memory object.
452 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
453 zero, we are slightly more conservative. */
456 rtx_addr_varies_p (const_rtx x, bool for_alias)
467 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
469 fmt = GET_RTX_FORMAT (code);
470 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
473 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
476 else if (fmt[i] == 'E')
479 for (j = 0; j < XVECLEN (x, i); j++)
480 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
486 /* Return the value of the integer term in X, if one is apparent;
488 Only obvious integer terms are detected.
489 This is used in cse.c with the `related_value' field. */
492 get_integer_term (const_rtx x)
494 if (GET_CODE (x) == CONST)
497 if (GET_CODE (x) == MINUS
498 && GET_CODE (XEXP (x, 1)) == CONST_INT)
499 return - INTVAL (XEXP (x, 1));
500 if (GET_CODE (x) == PLUS
501 && GET_CODE (XEXP (x, 1)) == CONST_INT)
502 return INTVAL (XEXP (x, 1));
506 /* If X is a constant, return the value sans apparent integer term;
508 Only obvious integer terms are detected. */
511 get_related_value (const_rtx x)
513 if (GET_CODE (x) != CONST)
516 if (GET_CODE (x) == PLUS
517 && GET_CODE (XEXP (x, 1)) == CONST_INT)
519 else if (GET_CODE (x) == MINUS
520 && GET_CODE (XEXP (x, 1)) == CONST_INT)
525 /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
526 to somewhere in the same object or object_block as SYMBOL. */
529 offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
533 if (GET_CODE (symbol) != SYMBOL_REF)
541 if (CONSTANT_POOL_ADDRESS_P (symbol)
542 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
545 decl = SYMBOL_REF_DECL (symbol);
546 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
550 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
551 && SYMBOL_REF_BLOCK (symbol)
552 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
553 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
554 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
560 /* Split X into a base and a constant offset, storing them in *BASE_OUT
561 and *OFFSET_OUT respectively. */
564 split_const (rtx x, rtx *base_out, rtx *offset_out)
566 if (GET_CODE (x) == CONST)
569 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
571 *base_out = XEXP (x, 0);
572 *offset_out = XEXP (x, 1);
577 *offset_out = const0_rtx;
580 /* Return the number of places FIND appears within X. If COUNT_DEST is
581 zero, we do not count occurrences inside the destination of a SET. */
584 count_occurrences (const_rtx x, const_rtx find, int count_dest)
588 const char *format_ptr;
610 count = count_occurrences (XEXP (x, 0), find, count_dest);
612 count += count_occurrences (XEXP (x, 1), find, count_dest);
616 if (MEM_P (find) && rtx_equal_p (x, find))
621 if (SET_DEST (x) == find && ! count_dest)
622 return count_occurrences (SET_SRC (x), find, count_dest);
629 format_ptr = GET_RTX_FORMAT (code);
632 for (i = 0; i < GET_RTX_LENGTH (code); i++)
634 switch (*format_ptr++)
637 count += count_occurrences (XEXP (x, i), find, count_dest);
641 for (j = 0; j < XVECLEN (x, i); j++)
642 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
650 /* Nonzero if register REG appears somewhere within IN.
651 Also works if REG is not a register; in this case it checks
652 for a subexpression of IN that is Lisp "equal" to REG. */
655 reg_mentioned_p (const_rtx reg, const_rtx in)
667 if (GET_CODE (in) == LABEL_REF)
668 return reg == XEXP (in, 0);
670 code = GET_CODE (in);
674 /* Compare registers by number. */
676 return REG_P (reg) && REGNO (in) == REGNO (reg);
678 /* These codes have no constituent expressions
689 /* These are kept unique for a given value. */
696 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
699 fmt = GET_RTX_FORMAT (code);
701 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
706 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
707 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
710 else if (fmt[i] == 'e'
711 && reg_mentioned_p (reg, XEXP (in, i)))
717 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
718 no CODE_LABEL insn. */
721 no_labels_between_p (const_rtx beg, const_rtx end)
726 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
732 /* Nonzero if register REG is used in an insn between
733 FROM_INSN and TO_INSN (exclusive of those two). */
736 reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
740 if (from_insn == to_insn)
743 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
745 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
746 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
751 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
752 is entirely replaced by a new value and the only use is as a SET_DEST,
753 we do not consider it a reference. */
756 reg_referenced_p (const_rtx x, const_rtx body)
760 switch (GET_CODE (body))
763 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
766 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
767 of a REG that occupies all of the REG, the insn references X if
768 it is mentioned in the destination. */
769 if (GET_CODE (SET_DEST (body)) != CC0
770 && GET_CODE (SET_DEST (body)) != PC
771 && !REG_P (SET_DEST (body))
772 && ! (GET_CODE (SET_DEST (body)) == SUBREG
773 && REG_P (SUBREG_REG (SET_DEST (body)))
774 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
775 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
776 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
777 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
778 && reg_overlap_mentioned_p (x, SET_DEST (body)))
783 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
784 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
791 return reg_overlap_mentioned_p (x, body);
794 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
797 return reg_overlap_mentioned_p (x, XEXP (body, 0));
800 case UNSPEC_VOLATILE:
801 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
802 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
807 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
808 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
813 if (MEM_P (XEXP (body, 0)))
814 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
819 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
821 return reg_referenced_p (x, COND_EXEC_CODE (body));
828 /* Nonzero if register REG is set or clobbered in an insn between
829 FROM_INSN and TO_INSN (exclusive of those two). */
832 reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
836 if (from_insn == to_insn)
839 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
840 if (INSN_P (insn) && reg_set_p (reg, insn))
845 /* Internals of reg_set_between_p. */
847 reg_set_p (const_rtx reg, const_rtx insn)
849 /* We can be passed an insn or part of one. If we are passed an insn,
850 check if a side-effect of the insn clobbers REG. */
852 && (FIND_REG_INC_NOTE (insn, reg)
855 && REGNO (reg) < FIRST_PSEUDO_REGISTER
856 && overlaps_hard_reg_set_p (regs_invalidated_by_call,
857 GET_MODE (reg), REGNO (reg)))
859 || find_reg_fusage (insn, CLOBBER, reg)))))
862 return set_of (reg, insn) != NULL_RTX;
865 /* Similar to reg_set_between_p, but check all registers in X. Return 0
866 only if none of them are modified between START and END. Return 1 if
867 X contains a MEM; this routine does use memory aliasing. */
870 modified_between_p (const_rtx x, const_rtx start, const_rtx end)
872 const enum rtx_code code = GET_CODE (x);
896 if (modified_between_p (XEXP (x, 0), start, end))
898 if (MEM_READONLY_P (x))
900 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
901 if (memory_modified_in_insn_p (x, insn))
907 return reg_set_between_p (x, start, end);
913 fmt = GET_RTX_FORMAT (code);
914 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
916 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
919 else if (fmt[i] == 'E')
920 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
921 if (modified_between_p (XVECEXP (x, i, j), start, end))
928 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
929 of them are modified in INSN. Return 1 if X contains a MEM; this routine
930 does use memory aliasing. */
933 modified_in_p (const_rtx x, const_rtx insn)
935 const enum rtx_code code = GET_CODE (x);
955 if (modified_in_p (XEXP (x, 0), insn))
957 if (MEM_READONLY_P (x))
959 if (memory_modified_in_insn_p (x, insn))
965 return reg_set_p (x, insn);
971 fmt = GET_RTX_FORMAT (code);
972 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
974 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
977 else if (fmt[i] == 'E')
978 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
979 if (modified_in_p (XVECEXP (x, i, j), insn))
986 /* Helper function for set_of. */
994 set_of_1 (rtx x, const_rtx pat, void *data1)
996 struct set_of_data *const data = (struct set_of_data *) (data1);
997 if (rtx_equal_p (x, data->pat)
998 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
1002 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1003 (either directly or via STRICT_LOW_PART and similar modifiers). */
1005 set_of (const_rtx pat, const_rtx insn)
1007 struct set_of_data data;
1008 data.found = NULL_RTX;
1010 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1014 /* Given an INSN, return a SET expression if this insn has only a single SET.
1015 It may also have CLOBBERs, USEs, or SET whose output
1016 will not be used, which we ignore. */
1019 single_set_2 (const_rtx insn, const_rtx pat)
1022 int set_verified = 1;
1025 if (GET_CODE (pat) == PARALLEL)
1027 for (i = 0; i < XVECLEN (pat, 0); i++)
1029 rtx sub = XVECEXP (pat, 0, i);
1030 switch (GET_CODE (sub))
1037 /* We can consider insns having multiple sets, where all
1038 but one are dead as single set insns. In common case
1039 only single set is present in the pattern so we want
1040 to avoid checking for REG_UNUSED notes unless necessary.
1042 When we reach set first time, we just expect this is
1043 the single set we are looking for and only when more
1044 sets are found in the insn, we check them. */
1047 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1048 && !side_effects_p (set))
1054 set = sub, set_verified = 0;
1055 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1056 || side_effects_p (sub))
1068 /* Given an INSN, return nonzero if it has more than one SET, else return
1072 multiple_sets (const_rtx insn)
1077 /* INSN must be an insn. */
1078 if (! INSN_P (insn))
1081 /* Only a PARALLEL can have multiple SETs. */
1082 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1084 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1085 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1087 /* If we have already found a SET, then return now. */
1095 /* Either zero or one SET. */
1099 /* Return nonzero if the destination of SET equals the source
1100 and there are no side effects. */
1103 set_noop_p (const_rtx set)
1105 rtx src = SET_SRC (set);
1106 rtx dst = SET_DEST (set);
1108 if (dst == pc_rtx && src == pc_rtx)
1111 if (MEM_P (dst) && MEM_P (src))
1112 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1114 if (GET_CODE (dst) == ZERO_EXTRACT)
1115 return rtx_equal_p (XEXP (dst, 0), src)
1116 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1117 && !side_effects_p (src);
1119 if (GET_CODE (dst) == STRICT_LOW_PART)
1120 dst = XEXP (dst, 0);
1122 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1124 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1126 src = SUBREG_REG (src);
1127 dst = SUBREG_REG (dst);
1130 return (REG_P (src) && REG_P (dst)
1131 && REGNO (src) == REGNO (dst));
1134 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1138 noop_move_p (const_rtx insn)
1140 rtx pat = PATTERN (insn);
1142 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1145 /* Insns carrying these notes are useful later on. */
1146 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1149 if (GET_CODE (pat) == SET && set_noop_p (pat))
1152 if (GET_CODE (pat) == PARALLEL)
1155 /* If nothing but SETs of registers to themselves,
1156 this insn can also be deleted. */
1157 for (i = 0; i < XVECLEN (pat, 0); i++)
1159 rtx tem = XVECEXP (pat, 0, i);
1161 if (GET_CODE (tem) == USE
1162 || GET_CODE (tem) == CLOBBER)
1165 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1175 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1176 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1177 If the object was modified, if we hit a partial assignment to X, or hit a
1178 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1179 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1183 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1187 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1191 rtx set = single_set (p);
1192 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1194 if (set && rtx_equal_p (x, SET_DEST (set)))
1196 rtx src = SET_SRC (set);
1198 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1199 src = XEXP (note, 0);
1201 if ((valid_to == NULL_RTX
1202 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1203 /* Reject hard registers because we don't usually want
1204 to use them; we'd rather use a pseudo. */
1206 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1213 /* If set in non-simple way, we don't have a value. */
1214 if (reg_set_p (x, p))
1221 /* Return nonzero if register in range [REGNO, ENDREGNO)
1222 appears either explicitly or implicitly in X
1223 other than being stored into.
1225 References contained within the substructure at LOC do not count.
1226 LOC may be zero, meaning don't ignore anything. */
1229 refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1233 unsigned int x_regno;
1238 /* The contents of a REG_NONNEG note is always zero, so we must come here
1239 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1243 code = GET_CODE (x);
1248 x_regno = REGNO (x);
1250 /* If we modifying the stack, frame, or argument pointer, it will
1251 clobber a virtual register. In fact, we could be more precise,
1252 but it isn't worth it. */
1253 if ((x_regno == STACK_POINTER_REGNUM
1254 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1255 || x_regno == ARG_POINTER_REGNUM
1257 || x_regno == FRAME_POINTER_REGNUM)
1258 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1261 return endregno > x_regno && regno < END_REGNO (x);
1264 /* If this is a SUBREG of a hard reg, we can see exactly which
1265 registers are being modified. Otherwise, handle normally. */
1266 if (REG_P (SUBREG_REG (x))
1267 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1269 unsigned int inner_regno = subreg_regno (x);
1270 unsigned int inner_endregno
1271 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1272 ? subreg_nregs (x) : 1);
1274 return endregno > inner_regno && regno < inner_endregno;
1280 if (&SET_DEST (x) != loc
1281 /* Note setting a SUBREG counts as referring to the REG it is in for
1282 a pseudo but not for hard registers since we can
1283 treat each word individually. */
1284 && ((GET_CODE (SET_DEST (x)) == SUBREG
1285 && loc != &SUBREG_REG (SET_DEST (x))
1286 && REG_P (SUBREG_REG (SET_DEST (x)))
1287 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1288 && refers_to_regno_p (regno, endregno,
1289 SUBREG_REG (SET_DEST (x)), loc))
1290 || (!REG_P (SET_DEST (x))
1291 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1294 if (code == CLOBBER || loc == &SET_SRC (x))
1303 /* X does not match, so try its subexpressions. */
1305 fmt = GET_RTX_FORMAT (code);
1306 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1308 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1316 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1319 else if (fmt[i] == 'E')
1322 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1323 if (loc != &XVECEXP (x, i, j)
1324 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1331 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1332 we check if any register number in X conflicts with the relevant register
1333 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1334 contains a MEM (we don't bother checking for memory addresses that can't
1335 conflict because we expect this to be a rare case. */
1338 reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1340 unsigned int regno, endregno;
1342 /* If either argument is a constant, then modifying X can not
1343 affect IN. Here we look at IN, we can profitably combine
1344 CONSTANT_P (x) with the switch statement below. */
1345 if (CONSTANT_P (in))
1349 switch (GET_CODE (x))
1351 case STRICT_LOW_PART:
1354 /* Overly conservative. */
1359 regno = REGNO (SUBREG_REG (x));
1360 if (regno < FIRST_PSEUDO_REGISTER)
1361 regno = subreg_regno (x);
1362 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1363 ? subreg_nregs (x) : 1);
1368 endregno = END_REGNO (x);
1370 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1380 fmt = GET_RTX_FORMAT (GET_CODE (in));
1381 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1384 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1387 else if (fmt[i] == 'E')
1390 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1391 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1401 return reg_mentioned_p (x, in);
1407 /* If any register in here refers to it we return true. */
1408 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1409 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1410 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1416 gcc_assert (CONSTANT_P (x));
1421 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1422 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1423 ignored by note_stores, but passed to FUN.
1425 FUN receives three arguments:
1426 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1427 2. the SET or CLOBBER rtx that does the store,
1428 3. the pointer DATA provided to note_stores.
1430 If the item being stored in or clobbered is a SUBREG of a hard register,
1431 the SUBREG will be passed. */
1434 note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
1438 if (GET_CODE (x) == COND_EXEC)
1439 x = COND_EXEC_CODE (x);
1441 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1443 rtx dest = SET_DEST (x);
1445 while ((GET_CODE (dest) == SUBREG
1446 && (!REG_P (SUBREG_REG (dest))
1447 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1448 || GET_CODE (dest) == ZERO_EXTRACT
1449 || GET_CODE (dest) == STRICT_LOW_PART)
1450 dest = XEXP (dest, 0);
1452 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1453 each of whose first operand is a register. */
1454 if (GET_CODE (dest) == PARALLEL)
1456 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1457 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1458 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1461 (*fun) (dest, x, data);
1464 else if (GET_CODE (x) == PARALLEL)
1465 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1466 note_stores (XVECEXP (x, 0, i), fun, data);
1469 /* Like notes_stores, but call FUN for each expression that is being
1470 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1471 FUN for each expression, not any interior subexpressions. FUN receives a
1472 pointer to the expression and the DATA passed to this function.
1474 Note that this is not quite the same test as that done in reg_referenced_p
1475 since that considers something as being referenced if it is being
1476 partially set, while we do not. */
1479 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1484 switch (GET_CODE (body))
1487 (*fun) (&COND_EXEC_TEST (body), data);
1488 note_uses (&COND_EXEC_CODE (body), fun, data);
1492 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1493 note_uses (&XVECEXP (body, 0, i), fun, data);
1497 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1498 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1502 (*fun) (&XEXP (body, 0), data);
1506 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1507 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1511 (*fun) (&TRAP_CONDITION (body), data);
1515 (*fun) (&XEXP (body, 0), data);
1519 case UNSPEC_VOLATILE:
1520 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1521 (*fun) (&XVECEXP (body, 0, i), data);
1525 if (MEM_P (XEXP (body, 0)))
1526 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1531 rtx dest = SET_DEST (body);
1533 /* For sets we replace everything in source plus registers in memory
1534 expression in store and operands of a ZERO_EXTRACT. */
1535 (*fun) (&SET_SRC (body), data);
1537 if (GET_CODE (dest) == ZERO_EXTRACT)
1539 (*fun) (&XEXP (dest, 1), data);
1540 (*fun) (&XEXP (dest, 2), data);
1543 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1544 dest = XEXP (dest, 0);
1547 (*fun) (&XEXP (dest, 0), data);
1552 /* All the other possibilities never store. */
1553 (*fun) (pbody, data);
1558 /* Return nonzero if X's old contents don't survive after INSN.
1559 This will be true if X is (cc0) or if X is a register and
1560 X dies in INSN or because INSN entirely sets X.
1562 "Entirely set" means set directly and not through a SUBREG, or
1563 ZERO_EXTRACT, so no trace of the old contents remains.
1564 Likewise, REG_INC does not count.
1566 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1567 but for this use that makes no difference, since regs don't overlap
1568 during their lifetimes. Therefore, this function may be used
1569 at any time after deaths have been computed.
1571 If REG is a hard reg that occupies multiple machine registers, this
1572 function will only return 1 if each of those registers will be replaced
1576 dead_or_set_p (const_rtx insn, const_rtx x)
1578 unsigned int regno, end_regno;
1581 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1582 if (GET_CODE (x) == CC0)
1585 gcc_assert (REG_P (x));
1588 end_regno = END_REGNO (x);
1589 for (i = regno; i < end_regno; i++)
1590 if (! dead_or_set_regno_p (insn, i))
1596 /* Return TRUE iff DEST is a register or subreg of a register and
1597 doesn't change the number of words of the inner register, and any
1598 part of the register is TEST_REGNO. */
1601 covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
1603 unsigned int regno, endregno;
1605 if (GET_CODE (dest) == SUBREG
1606 && (((GET_MODE_SIZE (GET_MODE (dest))
1607 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1608 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1609 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1610 dest = SUBREG_REG (dest);
1615 regno = REGNO (dest);
1616 endregno = END_REGNO (dest);
1617 return (test_regno >= regno && test_regno < endregno);
1620 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1621 any member matches the covers_regno_no_parallel_p criteria. */
1624 covers_regno_p (const_rtx dest, unsigned int test_regno)
1626 if (GET_CODE (dest) == PARALLEL)
1628 /* Some targets place small structures in registers for return
1629 values of functions, and those registers are wrapped in
1630 PARALLELs that we may see as the destination of a SET. */
1633 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1635 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1636 if (inner != NULL_RTX
1637 && covers_regno_no_parallel_p (inner, test_regno))
1644 return covers_regno_no_parallel_p (dest, test_regno);
1647 /* Utility function for dead_or_set_p to check an individual register. */
1650 dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
1654 /* See if there is a death note for something that includes TEST_REGNO. */
1655 if (find_regno_note (insn, REG_DEAD, test_regno))
1659 && find_regno_fusage (insn, CLOBBER, test_regno))
1662 pattern = PATTERN (insn);
1664 if (GET_CODE (pattern) == COND_EXEC)
1665 pattern = COND_EXEC_CODE (pattern);
1667 if (GET_CODE (pattern) == SET)
1668 return covers_regno_p (SET_DEST (pattern), test_regno);
1669 else if (GET_CODE (pattern) == PARALLEL)
1673 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1675 rtx body = XVECEXP (pattern, 0, i);
1677 if (GET_CODE (body) == COND_EXEC)
1678 body = COND_EXEC_CODE (body);
1680 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1681 && covers_regno_p (SET_DEST (body), test_regno))
1689 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1690 If DATUM is nonzero, look for one whose datum is DATUM. */
1693 find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
1699 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1700 if (! INSN_P (insn))
1704 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1705 if (REG_NOTE_KIND (link) == kind)
1710 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1711 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1716 /* Return the reg-note of kind KIND in insn INSN which applies to register
1717 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1718 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1719 it might be the case that the note overlaps REGNO. */
1722 find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
1726 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1727 if (! INSN_P (insn))
1730 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1731 if (REG_NOTE_KIND (link) == kind
1732 /* Verify that it is a register, so that scratch and MEM won't cause a
1734 && REG_P (XEXP (link, 0))
1735 && REGNO (XEXP (link, 0)) <= regno
1736 && END_REGNO (XEXP (link, 0)) > regno)
1741 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1745 find_reg_equal_equiv_note (const_rtx insn)
1752 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1753 if (REG_NOTE_KIND (link) == REG_EQUAL
1754 || REG_NOTE_KIND (link) == REG_EQUIV)
1756 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1757 insns that have multiple sets. Checking single_set to
1758 make sure of this is not the proper check, as explained
1759 in the comment in set_unique_reg_note.
1761 This should be changed into an assert. */
1762 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1769 /* Check whether INSN is a single_set whose source is known to be
1770 equivalent to a constant. Return that constant if so, otherwise
1774 find_constant_src (const_rtx insn)
1778 set = single_set (insn);
1781 x = avoid_constant_pool_reference (SET_SRC (set));
1786 note = find_reg_equal_equiv_note (insn);
1787 if (note && CONSTANT_P (XEXP (note, 0)))
1788 return XEXP (note, 0);
1793 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1794 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1797 find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
1799 /* If it's not a CALL_INSN, it can't possibly have a
1800 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1810 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1812 link = XEXP (link, 1))
1813 if (GET_CODE (XEXP (link, 0)) == code
1814 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1819 unsigned int regno = REGNO (datum);
1821 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1822 to pseudo registers, so don't bother checking. */
1824 if (regno < FIRST_PSEUDO_REGISTER)
1826 unsigned int end_regno = END_HARD_REGNO (datum);
1829 for (i = regno; i < end_regno; i++)
1830 if (find_regno_fusage (insn, code, i))
1838 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1839 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1842 find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
1846 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1847 to pseudo registers, so don't bother checking. */
1849 if (regno >= FIRST_PSEUDO_REGISTER
1853 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1857 if (GET_CODE (op = XEXP (link, 0)) == code
1858 && REG_P (reg = XEXP (op, 0))
1859 && REGNO (reg) <= regno
1860 && END_HARD_REGNO (reg) > regno)
1868 /* Allocate a register note with kind KIND and datum DATUM. LIST is
1869 stored as the pointer to the next register note. */
1872 alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
1880 case REG_LABEL_TARGET:
1881 case REG_LABEL_OPERAND:
1882 /* These types of register notes use an INSN_LIST rather than an
1883 EXPR_LIST, so that copying is done right and dumps look
1885 note = alloc_INSN_LIST (datum, list);
1886 PUT_REG_NOTE_KIND (note, kind);
1890 note = alloc_EXPR_LIST (kind, datum, list);
1897 /* Add register note with kind KIND and datum DATUM to INSN. */
1900 add_reg_note (rtx insn, enum reg_note kind, rtx datum)
1902 REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
1905 /* Remove register note NOTE from the REG_NOTES of INSN. */
1908 remove_note (rtx insn, const_rtx note)
1912 if (note == NULL_RTX)
1915 if (REG_NOTES (insn) == note)
1916 REG_NOTES (insn) = XEXP (note, 1);
1918 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1919 if (XEXP (link, 1) == note)
1921 XEXP (link, 1) = XEXP (note, 1);
1925 switch (REG_NOTE_KIND (note))
1929 df_notes_rescan (insn);
1936 /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
1939 remove_reg_equal_equiv_notes (rtx insn)
1943 loc = ®_NOTES (insn);
1946 enum reg_note kind = REG_NOTE_KIND (*loc);
1947 if (kind == REG_EQUAL || kind == REG_EQUIV)
1948 *loc = XEXP (*loc, 1);
1950 loc = &XEXP (*loc, 1);
1954 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1955 return 1 if it is found. A simple equality test is used to determine if
1959 in_expr_list_p (const_rtx listp, const_rtx node)
1963 for (x = listp; x; x = XEXP (x, 1))
1964 if (node == XEXP (x, 0))
1970 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1971 remove that entry from the list if it is found.
1973 A simple equality test is used to determine if NODE matches. */
1976 remove_node_from_expr_list (const_rtx node, rtx *listp)
1979 rtx prev = NULL_RTX;
1983 if (node == XEXP (temp, 0))
1985 /* Splice the node out of the list. */
1987 XEXP (prev, 1) = XEXP (temp, 1);
1989 *listp = XEXP (temp, 1);
1995 temp = XEXP (temp, 1);
1999 /* Nonzero if X contains any volatile instructions. These are instructions
2000 which may cause unpredictable machine state instructions, and thus no
2001 instructions should be moved or combined across them. This includes
2002 only volatile asms and UNSPEC_VOLATILE instructions. */
2005 volatile_insn_p (const_rtx x)
2007 const RTX_CODE code = GET_CODE (x);
2028 case UNSPEC_VOLATILE:
2029 /* case TRAP_IF: This isn't clear yet. */
2034 if (MEM_VOLATILE_P (x))
2041 /* Recursively scan the operands of this expression. */
2044 const char *const fmt = GET_RTX_FORMAT (code);
2047 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2051 if (volatile_insn_p (XEXP (x, i)))
2054 else if (fmt[i] == 'E')
2057 for (j = 0; j < XVECLEN (x, i); j++)
2058 if (volatile_insn_p (XVECEXP (x, i, j)))
2066 /* Nonzero if X contains any volatile memory references
2067 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2070 volatile_refs_p (const_rtx x)
2072 const RTX_CODE code = GET_CODE (x);
2091 case UNSPEC_VOLATILE:
2097 if (MEM_VOLATILE_P (x))
2104 /* Recursively scan the operands of this expression. */
2107 const char *const fmt = GET_RTX_FORMAT (code);
2110 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2114 if (volatile_refs_p (XEXP (x, i)))
2117 else if (fmt[i] == 'E')
2120 for (j = 0; j < XVECLEN (x, i); j++)
2121 if (volatile_refs_p (XVECEXP (x, i, j)))
2129 /* Similar to above, except that it also rejects register pre- and post-
2133 side_effects_p (const_rtx x)
2135 const RTX_CODE code = GET_CODE (x);
2154 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2155 when some combination can't be done. If we see one, don't think
2156 that we can simplify the expression. */
2157 return (GET_MODE (x) != VOIDmode);
2166 case UNSPEC_VOLATILE:
2167 /* case TRAP_IF: This isn't clear yet. */
2173 if (MEM_VOLATILE_P (x))
2180 /* Recursively scan the operands of this expression. */
2183 const char *fmt = GET_RTX_FORMAT (code);
2186 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2190 if (side_effects_p (XEXP (x, i)))
2193 else if (fmt[i] == 'E')
2196 for (j = 0; j < XVECLEN (x, i); j++)
2197 if (side_effects_p (XVECEXP (x, i, j)))
2205 /* Return nonzero if evaluating rtx X might cause a trap.
2206 FLAGS controls how to consider MEMs. A nonzero means the context
2207 of the access may have changed from the original, such that the
2208 address may have become invalid. */
2211 may_trap_p_1 (const_rtx x, unsigned flags)
2217 /* We make no distinction currently, but this function is part of
2218 the internal target-hooks ABI so we keep the parameter as
2219 "unsigned flags". */
2220 bool code_changed = flags != 0;
2224 code = GET_CODE (x);
2227 /* Handle these cases quickly. */
2242 case UNSPEC_VOLATILE:
2243 return targetm.unspec_may_trap_p (x, flags);
2250 return MEM_VOLATILE_P (x);
2252 /* Memory ref can trap unless it's a static var or a stack slot. */
2254 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2255 reference; moving it out of context such as when moving code
2256 when optimizing, might cause its address to become invalid. */
2258 || !MEM_NOTRAP_P (x))
2260 HOST_WIDE_INT size = MEM_SIZE (x) ? INTVAL (MEM_SIZE (x)) : 0;
2261 return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
2262 GET_MODE (x), code_changed);
2267 /* Division by a non-constant might trap. */
2272 if (HONOR_SNANS (GET_MODE (x)))
2274 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2275 return flag_trapping_math;
2276 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2281 /* An EXPR_LIST is used to represent a function call. This
2282 certainly may trap. */
2291 /* Some floating point comparisons may trap. */
2292 if (!flag_trapping_math)
2294 /* ??? There is no machine independent way to check for tests that trap
2295 when COMPARE is used, though many targets do make this distinction.
2296 For instance, sparc uses CCFPE for compares which generate exceptions
2297 and CCFP for compares which do not generate exceptions. */
2298 if (HONOR_NANS (GET_MODE (x)))
2300 /* But often the compare has some CC mode, so check operand
2302 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2303 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2309 if (HONOR_SNANS (GET_MODE (x)))
2311 /* Often comparison is CC mode, so check operand modes. */
2312 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2313 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2318 /* Conversion of floating point might trap. */
2319 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2326 /* These operations don't trap even with floating point. */
2330 /* Any floating arithmetic may trap. */
2331 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2332 && flag_trapping_math)
2336 fmt = GET_RTX_FORMAT (code);
2337 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2341 if (may_trap_p_1 (XEXP (x, i), flags))
2344 else if (fmt[i] == 'E')
2347 for (j = 0; j < XVECLEN (x, i); j++)
2348 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2355 /* Return nonzero if evaluating rtx X might cause a trap. */
2358 may_trap_p (const_rtx x)
2360 return may_trap_p_1 (x, 0);
2363 /* Same as above, but additionally return nonzero if evaluating rtx X might
2364 cause a fault. We define a fault for the purpose of this function as a
2365 erroneous execution condition that cannot be encountered during the normal
2366 execution of a valid program; the typical example is an unaligned memory
2367 access on a strict alignment machine. The compiler guarantees that it
2368 doesn't generate code that will fault from a valid program, but this
2369 guarantee doesn't mean anything for individual instructions. Consider
2370 the following example:
2372 struct S { int d; union { char *cp; int *ip; }; };
2374 int foo(struct S *s)
2382 on a strict alignment machine. In a valid program, foo will never be
2383 invoked on a structure for which d is equal to 1 and the underlying
2384 unique field of the union not aligned on a 4-byte boundary, but the
2385 expression *s->ip might cause a fault if considered individually.
2387 At the RTL level, potentially problematic expressions will almost always
2388 verify may_trap_p; for example, the above dereference can be emitted as
2389 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2390 However, suppose that foo is inlined in a caller that causes s->cp to
2391 point to a local character variable and guarantees that s->d is not set
2392 to 1; foo may have been effectively translated into pseudo-RTL as:
2395 (set (reg:SI) (mem:SI (%fp - 7)))
2397 (set (reg:QI) (mem:QI (%fp - 7)))
2399 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2400 memory reference to a stack slot, but it will certainly cause a fault
2401 on a strict alignment machine. */
2404 may_trap_or_fault_p (const_rtx x)
2406 return may_trap_p_1 (x, 1);
2409 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2410 i.e., an inequality. */
2413 inequality_comparisons_p (const_rtx x)
2417 const enum rtx_code code = GET_CODE (x);
2448 len = GET_RTX_LENGTH (code);
2449 fmt = GET_RTX_FORMAT (code);
2451 for (i = 0; i < len; i++)
2455 if (inequality_comparisons_p (XEXP (x, i)))
2458 else if (fmt[i] == 'E')
2461 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2462 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2470 /* Replace any occurrence of FROM in X with TO. The function does
2471 not enter into CONST_DOUBLE for the replace.
2473 Note that copying is not done so X must not be shared unless all copies
2474 are to be modified. */
2477 replace_rtx (rtx x, rtx from, rtx to)
2482 /* The following prevents loops occurrence when we change MEM in
2483 CONST_DOUBLE onto the same CONST_DOUBLE. */
2484 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2490 /* Allow this function to make replacements in EXPR_LISTs. */
2494 if (GET_CODE (x) == SUBREG)
2496 rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
2498 if (GET_CODE (new_rtx) == CONST_INT)
2500 x = simplify_subreg (GET_MODE (x), new_rtx,
2501 GET_MODE (SUBREG_REG (x)),
2506 SUBREG_REG (x) = new_rtx;
2510 else if (GET_CODE (x) == ZERO_EXTEND)
2512 rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
2514 if (GET_CODE (new_rtx) == CONST_INT)
2516 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2517 new_rtx, GET_MODE (XEXP (x, 0)));
2521 XEXP (x, 0) = new_rtx;
2526 fmt = GET_RTX_FORMAT (GET_CODE (x));
2527 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2530 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2531 else if (fmt[i] == 'E')
2532 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2533 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2539 /* Replace occurrences of the old label in *X with the new one.
2540 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2543 replace_label (rtx *x, void *data)
2546 rtx old_label = ((replace_label_data *) data)->r1;
2547 rtx new_label = ((replace_label_data *) data)->r2;
2548 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2553 if (GET_CODE (l) == SYMBOL_REF
2554 && CONSTANT_POOL_ADDRESS_P (l))
2556 rtx c = get_pool_constant (l);
2557 if (rtx_referenced_p (old_label, c))
2560 replace_label_data *d = (replace_label_data *) data;
2562 /* Create a copy of constant C; replace the label inside
2563 but do not update LABEL_NUSES because uses in constant pool
2565 new_c = copy_rtx (c);
2566 d->update_label_nuses = false;
2567 for_each_rtx (&new_c, replace_label, data);
2568 d->update_label_nuses = update_label_nuses;
2570 /* Add the new constant NEW_C to constant pool and replace
2571 the old reference to constant by new reference. */
2572 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2573 *x = replace_rtx (l, l, new_l);
2578 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2579 field. This is not handled by for_each_rtx because it doesn't
2580 handle unprinted ('0') fields. */
2581 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2582 JUMP_LABEL (l) = new_label;
2584 if ((GET_CODE (l) == LABEL_REF
2585 || GET_CODE (l) == INSN_LIST)
2586 && XEXP (l, 0) == old_label)
2588 XEXP (l, 0) = new_label;
2589 if (update_label_nuses)
2591 ++LABEL_NUSES (new_label);
2592 --LABEL_NUSES (old_label);
2600 /* When *BODY is equal to X or X is directly referenced by *BODY
2601 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2602 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2605 rtx_referenced_p_1 (rtx *body, void *x)
2609 if (*body == NULL_RTX)
2610 return y == NULL_RTX;
2612 /* Return true if a label_ref *BODY refers to label Y. */
2613 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2614 return XEXP (*body, 0) == y;
2616 /* If *BODY is a reference to pool constant traverse the constant. */
2617 if (GET_CODE (*body) == SYMBOL_REF
2618 && CONSTANT_POOL_ADDRESS_P (*body))
2619 return rtx_referenced_p (y, get_pool_constant (*body));
2621 /* By default, compare the RTL expressions. */
2622 return rtx_equal_p (*body, y);
2625 /* Return true if X is referenced in BODY. */
2628 rtx_referenced_p (rtx x, rtx body)
2630 return for_each_rtx (&body, rtx_referenced_p_1, x);
2633 /* If INSN is a tablejump return true and store the label (before jump table) to
2634 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2637 tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
2642 && (label = JUMP_LABEL (insn)) != NULL_RTX
2643 && (table = next_active_insn (label)) != NULL_RTX
2645 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2646 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2657 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2658 constant that is not in the constant pool and not in the condition
2659 of an IF_THEN_ELSE. */
2662 computed_jump_p_1 (const_rtx x)
2664 const enum rtx_code code = GET_CODE (x);
2684 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2685 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2688 return (computed_jump_p_1 (XEXP (x, 1))
2689 || computed_jump_p_1 (XEXP (x, 2)));
2695 fmt = GET_RTX_FORMAT (code);
2696 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2699 && computed_jump_p_1 (XEXP (x, i)))
2702 else if (fmt[i] == 'E')
2703 for (j = 0; j < XVECLEN (x, i); j++)
2704 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2711 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2713 Tablejumps and casesi insns are not considered indirect jumps;
2714 we can recognize them by a (use (label_ref)). */
2717 computed_jump_p (const_rtx insn)
2722 rtx pat = PATTERN (insn);
2724 /* If we have a JUMP_LABEL set, we're not a computed jump. */
2725 if (JUMP_LABEL (insn) != NULL)
2728 if (GET_CODE (pat) == PARALLEL)
2730 int len = XVECLEN (pat, 0);
2731 int has_use_labelref = 0;
2733 for (i = len - 1; i >= 0; i--)
2734 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2735 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2737 has_use_labelref = 1;
2739 if (! has_use_labelref)
2740 for (i = len - 1; i >= 0; i--)
2741 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2742 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2743 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2746 else if (GET_CODE (pat) == SET
2747 && SET_DEST (pat) == pc_rtx
2748 && computed_jump_p_1 (SET_SRC (pat)))
2754 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2755 calls. Processes the subexpressions of EXP and passes them to F. */
2757 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2760 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2763 for (; format[n] != '\0'; n++)
2770 result = (*f) (x, data);
2772 /* Do not traverse sub-expressions. */
2774 else if (result != 0)
2775 /* Stop the traversal. */
2779 /* There are no sub-expressions. */
2782 i = non_rtx_starting_operands[GET_CODE (*x)];
2785 result = for_each_rtx_1 (*x, i, f, data);
2793 if (XVEC (exp, n) == 0)
2795 for (j = 0; j < XVECLEN (exp, n); ++j)
2798 x = &XVECEXP (exp, n, j);
2799 result = (*f) (x, data);
2801 /* Do not traverse sub-expressions. */
2803 else if (result != 0)
2804 /* Stop the traversal. */
2808 /* There are no sub-expressions. */
2811 i = non_rtx_starting_operands[GET_CODE (*x)];
2814 result = for_each_rtx_1 (*x, i, f, data);
2822 /* Nothing to do. */
2830 /* Traverse X via depth-first search, calling F for each
2831 sub-expression (including X itself). F is also passed the DATA.
2832 If F returns -1, do not traverse sub-expressions, but continue
2833 traversing the rest of the tree. If F ever returns any other
2834 nonzero value, stop the traversal, and return the value returned
2835 by F. Otherwise, return 0. This function does not traverse inside
2836 tree structure that contains RTX_EXPRs, or into sub-expressions
2837 whose format code is `0' since it is not known whether or not those
2838 codes are actually RTL.
2840 This routine is very general, and could (should?) be used to
2841 implement many of the other routines in this file. */
2844 for_each_rtx (rtx *x, rtx_function f, void *data)
2850 result = (*f) (x, data);
2852 /* Do not traverse sub-expressions. */
2854 else if (result != 0)
2855 /* Stop the traversal. */
2859 /* There are no sub-expressions. */
2862 i = non_rtx_starting_operands[GET_CODE (*x)];
2866 return for_each_rtx_1 (*x, i, f, data);
2870 /* Searches X for any reference to REGNO, returning the rtx of the
2871 reference found if any. Otherwise, returns NULL_RTX. */
2874 regno_use_in (unsigned int regno, rtx x)
2880 if (REG_P (x) && REGNO (x) == regno)
2883 fmt = GET_RTX_FORMAT (GET_CODE (x));
2884 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2888 if ((tem = regno_use_in (regno, XEXP (x, i))))
2891 else if (fmt[i] == 'E')
2892 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2893 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2900 /* Return a value indicating whether OP, an operand of a commutative
2901 operation, is preferred as the first or second operand. The higher
2902 the value, the stronger the preference for being the first operand.
2903 We use negative values to indicate a preference for the first operand
2904 and positive values for the second operand. */
2907 commutative_operand_precedence (rtx op)
2909 enum rtx_code code = GET_CODE (op);
2911 /* Constants always come the second operand. Prefer "nice" constants. */
2912 if (code == CONST_INT)
2914 if (code == CONST_DOUBLE)
2916 if (code == CONST_FIXED)
2918 op = avoid_constant_pool_reference (op);
2919 code = GET_CODE (op);
2921 switch (GET_RTX_CLASS (code))
2924 if (code == CONST_INT)
2926 if (code == CONST_DOUBLE)
2928 if (code == CONST_FIXED)
2933 /* SUBREGs of objects should come second. */
2934 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2939 /* Complex expressions should be the first, so decrease priority
2940 of objects. Prefer pointer objects over non pointer objects. */
2941 if ((REG_P (op) && REG_POINTER (op))
2942 || (MEM_P (op) && MEM_POINTER (op)))
2946 case RTX_COMM_ARITH:
2947 /* Prefer operands that are themselves commutative to be first.
2948 This helps to make things linear. In particular,
2949 (and (and (reg) (reg)) (not (reg))) is canonical. */
2953 /* If only one operand is a binary expression, it will be the first
2954 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2955 is canonical, although it will usually be further simplified. */
2959 /* Then prefer NEG and NOT. */
2960 if (code == NEG || code == NOT)
2968 /* Return 1 iff it is necessary to swap operands of commutative operation
2969 in order to canonicalize expression. */
2972 swap_commutative_operands_p (rtx x, rtx y)
2974 return (commutative_operand_precedence (x)
2975 < commutative_operand_precedence (y));
2978 /* Return 1 if X is an autoincrement side effect and the register is
2979 not the stack pointer. */
2981 auto_inc_p (const_rtx x)
2983 switch (GET_CODE (x))
2991 /* There are no REG_INC notes for SP. */
2992 if (XEXP (x, 0) != stack_pointer_rtx)
3000 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3002 loc_mentioned_in_p (rtx *loc, const_rtx in)
3011 code = GET_CODE (in);
3012 fmt = GET_RTX_FORMAT (code);
3013 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3017 if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
3020 else if (fmt[i] == 'E')
3021 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3022 if (loc == &XVECEXP (in, i, j)
3023 || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3029 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3030 and SUBREG_BYTE, return the bit offset where the subreg begins
3031 (counting from the least significant bit of the operand). */
3034 subreg_lsb_1 (enum machine_mode outer_mode,
3035 enum machine_mode inner_mode,
3036 unsigned int subreg_byte)
3038 unsigned int bitpos;
3042 /* A paradoxical subreg begins at bit position 0. */
3043 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3046 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3047 /* If the subreg crosses a word boundary ensure that
3048 it also begins and ends on a word boundary. */
3049 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3050 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3051 && (subreg_byte % UNITS_PER_WORD
3052 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3054 if (WORDS_BIG_ENDIAN)
3055 word = (GET_MODE_SIZE (inner_mode)
3056 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3058 word = subreg_byte / UNITS_PER_WORD;
3059 bitpos = word * BITS_PER_WORD;
3061 if (BYTES_BIG_ENDIAN)
3062 byte = (GET_MODE_SIZE (inner_mode)
3063 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3065 byte = subreg_byte % UNITS_PER_WORD;
3066 bitpos += byte * BITS_PER_UNIT;
3071 /* Given a subreg X, return the bit offset where the subreg begins
3072 (counting from the least significant bit of the reg). */
3075 subreg_lsb (const_rtx x)
3077 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3081 /* Fill in information about a subreg of a hard register.
3082 xregno - A regno of an inner hard subreg_reg (or what will become one).
3083 xmode - The mode of xregno.
3084 offset - The byte offset.
3085 ymode - The mode of a top level SUBREG (or what may become one).
3086 info - Pointer to structure to fill in. */
3088 subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3089 unsigned int offset, enum machine_mode ymode,
3090 struct subreg_info *info)
3092 int nregs_xmode, nregs_ymode;
3093 int mode_multiple, nregs_multiple;
3094 int offset_adj, y_offset, y_offset_adj;
3095 int regsize_xmode, regsize_ymode;
3098 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3102 /* If there are holes in a non-scalar mode in registers, we expect
3103 that it is made up of its units concatenated together. */
3104 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3106 enum machine_mode xmode_unit;
3108 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3109 if (GET_MODE_INNER (xmode) == VOIDmode)
3112 xmode_unit = GET_MODE_INNER (xmode);
3113 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3114 gcc_assert (nregs_xmode
3115 == (GET_MODE_NUNITS (xmode)
3116 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3117 gcc_assert (hard_regno_nregs[xregno][xmode]
3118 == (hard_regno_nregs[xregno][xmode_unit]
3119 * GET_MODE_NUNITS (xmode)));
3121 /* You can only ask for a SUBREG of a value with holes in the middle
3122 if you don't cross the holes. (Such a SUBREG should be done by
3123 picking a different register class, or doing it in memory if
3124 necessary.) An example of a value with holes is XCmode on 32-bit
3125 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3126 3 for each part, but in memory it's two 128-bit parts.
3127 Padding is assumed to be at the end (not necessarily the 'high part')
3129 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3130 < GET_MODE_NUNITS (xmode))
3131 && (offset / GET_MODE_SIZE (xmode_unit)
3132 != ((offset + GET_MODE_SIZE (ymode) - 1)
3133 / GET_MODE_SIZE (xmode_unit))))
3135 info->representable_p = false;
3140 nregs_xmode = hard_regno_nregs[xregno][xmode];
3142 nregs_ymode = hard_regno_nregs[xregno][ymode];
3144 /* Paradoxical subregs are otherwise valid. */
3147 && GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode))
3149 info->representable_p = true;
3150 /* If this is a big endian paradoxical subreg, which uses more
3151 actual hard registers than the original register, we must
3152 return a negative offset so that we find the proper highpart
3154 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3155 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3156 info->offset = nregs_xmode - nregs_ymode;
3159 info->nregs = nregs_ymode;
3163 /* If registers store different numbers of bits in the different
3164 modes, we cannot generally form this subreg. */
3165 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3166 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3167 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3168 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
3170 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3171 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3172 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3174 info->representable_p = false;
3176 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3177 info->offset = offset / regsize_xmode;
3180 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3182 info->representable_p = false;
3184 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3185 info->offset = offset / regsize_xmode;
3190 /* Lowpart subregs are otherwise valid. */
3191 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3193 info->representable_p = true;
3196 if (offset == 0 || nregs_xmode == nregs_ymode)
3199 info->nregs = nregs_ymode;
3204 /* This should always pass, otherwise we don't know how to verify
3205 the constraint. These conditions may be relaxed but
3206 subreg_regno_offset would need to be redesigned. */
3207 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3208 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3210 /* The XMODE value can be seen as a vector of NREGS_XMODE
3211 values. The subreg must represent a lowpart of given field.
3212 Compute what field it is. */
3213 offset_adj = offset;
3214 offset_adj -= subreg_lowpart_offset (ymode,
3215 mode_for_size (GET_MODE_BITSIZE (xmode)
3219 /* Size of ymode must not be greater than the size of xmode. */
3220 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3221 gcc_assert (mode_multiple != 0);
3223 y_offset = offset / GET_MODE_SIZE (ymode);
3224 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3225 nregs_multiple = nregs_xmode / nregs_ymode;
3227 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
3228 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3232 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3235 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3236 info->nregs = nregs_ymode;
3239 /* This function returns the regno offset of a subreg expression.
3240 xregno - A regno of an inner hard subreg_reg (or what will become one).
3241 xmode - The mode of xregno.
3242 offset - The byte offset.
3243 ymode - The mode of a top level SUBREG (or what may become one).
3244 RETURN - The regno offset which would be used. */
3246 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3247 unsigned int offset, enum machine_mode ymode)
3249 struct subreg_info info;
3250 subreg_get_info (xregno, xmode, offset, ymode, &info);
3254 /* This function returns true when the offset is representable via
3255 subreg_offset in the given regno.
3256 xregno - A regno of an inner hard subreg_reg (or what will become one).
3257 xmode - The mode of xregno.
3258 offset - The byte offset.
3259 ymode - The mode of a top level SUBREG (or what may become one).
3260 RETURN - Whether the offset is representable. */
3262 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3263 unsigned int offset, enum machine_mode ymode)
3265 struct subreg_info info;
3266 subreg_get_info (xregno, xmode, offset, ymode, &info);
3267 return info.representable_p;
3270 /* Return the number of a YMODE register to which
3272 (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
3274 can be simplified. Return -1 if the subreg can't be simplified.
3276 XREGNO is a hard register number. */
3279 simplify_subreg_regno (unsigned int xregno, enum machine_mode xmode,
3280 unsigned int offset, enum machine_mode ymode)
3282 struct subreg_info info;
3283 unsigned int yregno;
3285 #ifdef CANNOT_CHANGE_MODE_CLASS
3286 /* Give the backend a chance to disallow the mode change. */
3287 if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
3288 && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
3289 && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode))
3293 /* We shouldn't simplify stack-related registers. */
3294 if ((!reload_completed || frame_pointer_needed)
3295 && (xregno == FRAME_POINTER_REGNUM
3296 || xregno == HARD_FRAME_POINTER_REGNUM))
3299 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3300 && xregno == ARG_POINTER_REGNUM)
3303 if (xregno == STACK_POINTER_REGNUM)
3306 /* Try to get the register offset. */
3307 subreg_get_info (xregno, xmode, offset, ymode, &info);
3308 if (!info.representable_p)
3311 /* Make sure that the offsetted register value is in range. */
3312 yregno = xregno + info.offset;
3313 if (!HARD_REGISTER_NUM_P (yregno))
3316 /* See whether (reg:YMODE YREGNO) is valid.
3318 ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
3319 This is a kludge to work around how float/complex arguments are passed
3320 on 32-bit SPARC and should be fixed. */
3321 if (!HARD_REGNO_MODE_OK (yregno, ymode)
3322 && HARD_REGNO_MODE_OK (xregno, xmode))
3325 return (int) yregno;
3328 /* Return the final regno that a subreg expression refers to. */
3330 subreg_regno (const_rtx x)
3333 rtx subreg = SUBREG_REG (x);
3334 int regno = REGNO (subreg);
3336 ret = regno + subreg_regno_offset (regno,
3344 /* Return the number of registers that a subreg expression refers
3347 subreg_nregs (const_rtx x)
3349 return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3352 /* Return the number of registers that a subreg REG with REGNO
3353 expression refers to. This is a copy of the rtlanal.c:subreg_nregs
3354 changed so that the regno can be passed in. */
3357 subreg_nregs_with_regno (unsigned int regno, const_rtx x)
3359 struct subreg_info info;
3360 rtx subreg = SUBREG_REG (x);
3362 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3368 struct parms_set_data
3374 /* Helper function for noticing stores to parameter registers. */
3376 parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
3378 struct parms_set_data *const d = (struct parms_set_data *) data;
3379 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3380 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3382 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3387 /* Look backward for first parameter to be loaded.
3388 Note that loads of all parameters will not necessarily be
3389 found if CSE has eliminated some of them (e.g., an argument
3390 to the outer function is passed down as a parameter).
3391 Do not skip BOUNDARY. */
3393 find_first_parameter_load (rtx call_insn, rtx boundary)
3395 struct parms_set_data parm;
3396 rtx p, before, first_set;
3398 /* Since different machines initialize their parameter registers
3399 in different orders, assume nothing. Collect the set of all
3400 parameter registers. */
3401 CLEAR_HARD_REG_SET (parm.regs);
3403 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3404 if (GET_CODE (XEXP (p, 0)) == USE
3405 && REG_P (XEXP (XEXP (p, 0), 0)))
3407 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3409 /* We only care about registers which can hold function
3411 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3414 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3418 first_set = call_insn;
3420 /* Search backward for the first set of a register in this set. */
3421 while (parm.nregs && before != boundary)
3423 before = PREV_INSN (before);
3425 /* It is possible that some loads got CSEed from one call to
3426 another. Stop in that case. */
3427 if (CALL_P (before))
3430 /* Our caller needs either ensure that we will find all sets
3431 (in case code has not been optimized yet), or take care
3432 for possible labels in a way by setting boundary to preceding
3434 if (LABEL_P (before))
3436 gcc_assert (before == boundary);
3440 if (INSN_P (before))
3442 int nregs_old = parm.nregs;
3443 note_stores (PATTERN (before), parms_set, &parm);
3444 /* If we found something that did not set a parameter reg,
3445 we're done. Do not keep going, as that might result
3446 in hoisting an insn before the setting of a pseudo
3447 that is used by the hoisted insn. */
3448 if (nregs_old != parm.nregs)
3457 /* Return true if we should avoid inserting code between INSN and preceding
3458 call instruction. */
3461 keep_with_call_p (const_rtx insn)
3465 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3467 if (REG_P (SET_DEST (set))
3468 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3469 && fixed_regs[REGNO (SET_DEST (set))]
3470 && general_operand (SET_SRC (set), VOIDmode))
3472 if (REG_P (SET_SRC (set))
3473 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3474 && REG_P (SET_DEST (set))
3475 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3477 /* There may be a stack pop just after the call and before the store
3478 of the return register. Search for the actual store when deciding
3479 if we can break or not. */
3480 if (SET_DEST (set) == stack_pointer_rtx)
3482 /* This CONST_CAST is okay because next_nonnote_insn just
3483 returns its argument and we assign it to a const_rtx
3485 const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX(insn));
3486 if (i2 && keep_with_call_p (i2))
3493 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3494 to non-complex jumps. That is, direct unconditional, conditional,
3495 and tablejumps, but not computed jumps or returns. It also does
3496 not apply to the fallthru case of a conditional jump. */
3499 label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
3501 rtx tmp = JUMP_LABEL (jump_insn);
3506 if (tablejump_p (jump_insn, NULL, &tmp))
3508 rtvec vec = XVEC (PATTERN (tmp),
3509 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3510 int i, veclen = GET_NUM_ELEM (vec);
3512 for (i = 0; i < veclen; ++i)
3513 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3517 if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3524 /* Return an estimate of the cost of computing rtx X.
3525 One use is in cse, to decide which expression to keep in the hash table.
3526 Another is in rtl generation, to pick the cheapest way to multiply.
3527 Other uses like the latter are expected in the future.
3529 SPEED parameter specify whether costs optimized for speed or size should
3533 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED, bool speed)
3543 /* Compute the default costs of certain things.
3544 Note that targetm.rtx_costs can override the defaults. */
3546 code = GET_CODE (x);
3550 total = COSTS_N_INSNS (5);
3556 total = COSTS_N_INSNS (7);
3559 /* Used in combine.c as a marker. */
3563 total = COSTS_N_INSNS (1);
3573 /* If we can't tie these modes, make this expensive. The larger
3574 the mode, the more expensive it is. */
3575 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3576 return COSTS_N_INSNS (2
3577 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3581 if (targetm.rtx_costs (x, code, outer_code, &total, speed))
3586 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3587 which is already in total. */
3589 fmt = GET_RTX_FORMAT (code);
3590 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3592 total += rtx_cost (XEXP (x, i), code, speed);
3593 else if (fmt[i] == 'E')
3594 for (j = 0; j < XVECLEN (x, i); j++)
3595 total += rtx_cost (XVECEXP (x, i, j), code, speed);
3600 /* Return cost of address expression X.
3601 Expect that X is properly formed address reference.
3603 SPEED parameter specify whether costs optimized for speed or size should
3607 address_cost (rtx x, enum machine_mode mode, bool speed)
3609 /* We may be asked for cost of various unusual addresses, such as operands
3610 of push instruction. It is not worthwhile to complicate writing
3611 of the target hook by such cases. */
3613 if (!memory_address_p (mode, x))
3616 return targetm.address_cost (x, speed);
3619 /* If the target doesn't override, compute the cost as with arithmetic. */
3622 default_address_cost (rtx x, bool speed)
3624 return rtx_cost (x, MEM, speed);
3628 unsigned HOST_WIDE_INT
3629 nonzero_bits (const_rtx x, enum machine_mode mode)
3631 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3635 num_sign_bit_copies (const_rtx x, enum machine_mode mode)
3637 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3640 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3641 It avoids exponential behavior in nonzero_bits1 when X has
3642 identical subexpressions on the first or the second level. */
3644 static unsigned HOST_WIDE_INT
3645 cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
3646 enum machine_mode known_mode,
3647 unsigned HOST_WIDE_INT known_ret)
3649 if (x == known_x && mode == known_mode)
3652 /* Try to find identical subexpressions. If found call
3653 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3654 precomputed value for the subexpression as KNOWN_RET. */
3656 if (ARITHMETIC_P (x))
3658 rtx x0 = XEXP (x, 0);
3659 rtx x1 = XEXP (x, 1);
3661 /* Check the first level. */
3663 return nonzero_bits1 (x, mode, x0, mode,
3664 cached_nonzero_bits (x0, mode, known_x,
3665 known_mode, known_ret));
3667 /* Check the second level. */
3668 if (ARITHMETIC_P (x0)
3669 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3670 return nonzero_bits1 (x, mode, x1, mode,
3671 cached_nonzero_bits (x1, mode, known_x,
3672 known_mode, known_ret));
3674 if (ARITHMETIC_P (x1)
3675 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3676 return nonzero_bits1 (x, mode, x0, mode,
3677 cached_nonzero_bits (x0, mode, known_x,
3678 known_mode, known_ret));
3681 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3684 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3685 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3686 is less useful. We can't allow both, because that results in exponential
3687 run time recursion. There is a nullstone testcase that triggered
3688 this. This macro avoids accidental uses of num_sign_bit_copies. */
3689 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3691 /* Given an expression, X, compute which bits in X can be nonzero.
3692 We don't care about bits outside of those defined in MODE.
3694 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3695 an arithmetic operation, we can do better. */
3697 static unsigned HOST_WIDE_INT
3698 nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
3699 enum machine_mode known_mode,
3700 unsigned HOST_WIDE_INT known_ret)
3702 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3703 unsigned HOST_WIDE_INT inner_nz;
3705 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3707 /* For floating-point and vector values, assume all bits are needed. */
3708 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)
3709 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
3712 /* If X is wider than MODE, use its mode instead. */
3713 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3715 mode = GET_MODE (x);
3716 nonzero = GET_MODE_MASK (mode);
3717 mode_width = GET_MODE_BITSIZE (mode);
3720 if (mode_width > HOST_BITS_PER_WIDE_INT)
3721 /* Our only callers in this case look for single bit values. So
3722 just return the mode mask. Those tests will then be false. */
3725 #ifndef WORD_REGISTER_OPERATIONS
3726 /* If MODE is wider than X, but both are a single word for both the host
3727 and target machines, we can compute this from which bits of the
3728 object might be nonzero in its own mode, taking into account the fact
3729 that on many CISC machines, accessing an object in a wider mode
3730 causes the high-order bits to become undefined. So they are
3731 not known to be zero. */
3733 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3734 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3735 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3736 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3738 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3739 known_x, known_mode, known_ret);
3740 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3745 code = GET_CODE (x);
3749 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3750 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3751 all the bits above ptr_mode are known to be zero. */
3752 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3754 nonzero &= GET_MODE_MASK (ptr_mode);
3757 /* Include declared information about alignment of pointers. */
3758 /* ??? We don't properly preserve REG_POINTER changes across
3759 pointer-to-integer casts, so we can't trust it except for
3760 things that we know must be pointers. See execute/960116-1.c. */
3761 if ((x == stack_pointer_rtx
3762 || x == frame_pointer_rtx
3763 || x == arg_pointer_rtx)
3764 && REGNO_POINTER_ALIGN (REGNO (x)))
3766 unsigned HOST_WIDE_INT alignment
3767 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3769 #ifdef PUSH_ROUNDING
3770 /* If PUSH_ROUNDING is defined, it is possible for the
3771 stack to be momentarily aligned only to that amount,
3772 so we pick the least alignment. */
3773 if (x == stack_pointer_rtx && PUSH_ARGS)
3774 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3778 nonzero &= ~(alignment - 1);
3782 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3783 rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3784 known_mode, known_ret,
3788 nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
3789 known_mode, known_ret);
3791 return nonzero_for_hook;
3795 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3796 /* If X is negative in MODE, sign-extend the value. */
3797 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3798 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3799 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3805 #ifdef LOAD_EXTEND_OP
3806 /* In many, if not most, RISC machines, reading a byte from memory
3807 zeros the rest of the register. Noticing that fact saves a lot
3808 of extra zero-extends. */
3809 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3810 nonzero &= GET_MODE_MASK (GET_MODE (x));
3815 case UNEQ: case LTGT:
3816 case GT: case GTU: case UNGT:
3817 case LT: case LTU: case UNLT:
3818 case GE: case GEU: case UNGE:
3819 case LE: case LEU: case UNLE:
3820 case UNORDERED: case ORDERED:
3821 /* If this produces an integer result, we know which bits are set.
3822 Code here used to clear bits outside the mode of X, but that is
3824 /* Mind that MODE is the mode the caller wants to look at this
3825 operation in, and not the actual operation mode. We can wind
3826 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3827 that describes the results of a vector compare. */
3828 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3829 && mode_width <= HOST_BITS_PER_WIDE_INT)
3830 nonzero = STORE_FLAG_VALUE;
3835 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3836 and num_sign_bit_copies. */
3837 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3838 == GET_MODE_BITSIZE (GET_MODE (x)))
3842 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3843 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3848 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3849 and num_sign_bit_copies. */
3850 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3851 == GET_MODE_BITSIZE (GET_MODE (x)))
3857 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3858 known_x, known_mode, known_ret)
3859 & GET_MODE_MASK (mode));
3863 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3864 known_x, known_mode, known_ret);
3865 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3866 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3870 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3871 Otherwise, show all the bits in the outer mode but not the inner
3873 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3874 known_x, known_mode, known_ret);
3875 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3877 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3879 & (((HOST_WIDE_INT) 1
3880 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3881 inner_nz |= (GET_MODE_MASK (mode)
3882 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3885 nonzero &= inner_nz;
3889 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3890 known_x, known_mode, known_ret)
3891 & cached_nonzero_bits (XEXP (x, 1), mode,
3892 known_x, known_mode, known_ret);
3896 case UMIN: case UMAX: case SMIN: case SMAX:
3898 unsigned HOST_WIDE_INT nonzero0 =
3899 cached_nonzero_bits (XEXP (x, 0), mode,
3900 known_x, known_mode, known_ret);
3902 /* Don't call nonzero_bits for the second time if it cannot change
3904 if ((nonzero & nonzero0) != nonzero)
3906 | cached_nonzero_bits (XEXP (x, 1), mode,
3907 known_x, known_mode, known_ret);
3911 case PLUS: case MINUS:
3913 case DIV: case UDIV:
3914 case MOD: case UMOD:
3915 /* We can apply the rules of arithmetic to compute the number of
3916 high- and low-order zero bits of these operations. We start by
3917 computing the width (position of the highest-order nonzero bit)
3918 and the number of low-order zero bits for each value. */
3920 unsigned HOST_WIDE_INT nz0 =
3921 cached_nonzero_bits (XEXP (x, 0), mode,
3922 known_x, known_mode, known_ret);
3923 unsigned HOST_WIDE_INT nz1 =
3924 cached_nonzero_bits (XEXP (x, 1), mode,
3925 known_x, known_mode, known_ret);
3926 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3927 int width0 = floor_log2 (nz0) + 1;
3928 int width1 = floor_log2 (nz1) + 1;
3929 int low0 = floor_log2 (nz0 & -nz0);
3930 int low1 = floor_log2 (nz1 & -nz1);
3931 HOST_WIDE_INT op0_maybe_minusp
3932 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3933 HOST_WIDE_INT op1_maybe_minusp
3934 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3935 unsigned int result_width = mode_width;
3941 result_width = MAX (width0, width1) + 1;
3942 result_low = MIN (low0, low1);
3945 result_low = MIN (low0, low1);
3948 result_width = width0 + width1;
3949 result_low = low0 + low1;
3954 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3955 result_width = width0;
3960 result_width = width0;
3965 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3966 result_width = MIN (width0, width1);
3967 result_low = MIN (low0, low1);
3972 result_width = MIN (width0, width1);
3973 result_low = MIN (low0, low1);
3979 if (result_width < mode_width)
3980 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3983 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3985 #ifdef POINTERS_EXTEND_UNSIGNED
3986 /* If pointers extend unsigned and this is an addition or subtraction
3987 to a pointer in Pmode, all the bits above ptr_mode are known to be
3989 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3990 && (code == PLUS || code == MINUS)
3991 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3992 nonzero &= GET_MODE_MASK (ptr_mode);
3998 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3999 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4000 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
4004 /* If this is a SUBREG formed for a promoted variable that has
4005 been zero-extended, we know that at least the high-order bits
4006 are zero, though others might be too. */
4008 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
4009 nonzero = GET_MODE_MASK (GET_MODE (x))
4010 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4011 known_x, known_mode, known_ret);
4013 /* If the inner mode is a single word for both the host and target
4014 machines, we can compute this from which bits of the inner
4015 object might be nonzero. */
4016 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
4017 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4018 <= HOST_BITS_PER_WIDE_INT))
4020 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4021 known_x, known_mode, known_ret);
4023 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4024 /* If this is a typical RISC machine, we only have to worry
4025 about the way loads are extended. */
4026 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4028 & (((unsigned HOST_WIDE_INT) 1
4029 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
4031 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
4032 || !MEM_P (SUBREG_REG (x)))
4035 /* On many CISC machines, accessing an object in a wider mode
4036 causes the high-order bits to become undefined. So they are
4037 not known to be zero. */
4038 if (GET_MODE_SIZE (GET_MODE (x))
4039 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4040 nonzero |= (GET_MODE_MASK (GET_MODE (x))
4041 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
4050 /* The nonzero bits are in two classes: any bits within MODE
4051 that aren't in GET_MODE (x) are always significant. The rest of the
4052 nonzero bits are those that are significant in the operand of
4053 the shift when shifted the appropriate number of bits. This
4054 shows that high-order bits are cleared by the right shift and
4055 low-order bits by left shifts. */
4056 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4057 && INTVAL (XEXP (x, 1)) >= 0
4058 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4059 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x)))
4061 enum machine_mode inner_mode = GET_MODE (x);
4062 unsigned int width = GET_MODE_BITSIZE (inner_mode);
4063 int count = INTVAL (XEXP (x, 1));
4064 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4065 unsigned HOST_WIDE_INT op_nonzero =
4066 cached_nonzero_bits (XEXP (x, 0), mode,
4067 known_x, known_mode, known_ret);
4068 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4069 unsigned HOST_WIDE_INT outer = 0;
4071 if (mode_width > width)
4072 outer = (op_nonzero & nonzero & ~mode_mask);
4074 if (code == LSHIFTRT)
4076 else if (code == ASHIFTRT)
4080 /* If the sign bit may have been nonzero before the shift, we
4081 need to mark all the places it could have been copied to
4082 by the shift as possibly nonzero. */
4083 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
4084 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
4086 else if (code == ASHIFT)
4089 inner = ((inner << (count % width)
4090 | (inner >> (width - (count % width)))) & mode_mask);
4092 nonzero &= (outer | inner);
4098 /* This is at most the number of bits in the mode. */
4099 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4103 /* If CLZ has a known value at zero, then the nonzero bits are
4104 that value, plus the number of bits in the mode minus one. */
4105 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4106 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4112 /* If CTZ has a known value at zero, then the nonzero bits are
4113 that value, plus the number of bits in the mode minus one. */
4114 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4115 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4126 unsigned HOST_WIDE_INT nonzero_true =
4127 cached_nonzero_bits (XEXP (x, 1), mode,
4128 known_x, known_mode, known_ret);
4130 /* Don't call nonzero_bits for the second time if it cannot change
4132 if ((nonzero & nonzero_true) != nonzero)
4133 nonzero &= nonzero_true
4134 | cached_nonzero_bits (XEXP (x, 2), mode,
4135 known_x, known_mode, known_ret);
4146 /* See the macro definition above. */
4147 #undef cached_num_sign_bit_copies
4150 /* The function cached_num_sign_bit_copies is a wrapper around
4151 num_sign_bit_copies1. It avoids exponential behavior in
4152 num_sign_bit_copies1 when X has identical subexpressions on the
4153 first or the second level. */
4156 cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
4157 enum machine_mode known_mode,
4158 unsigned int known_ret)
4160 if (x == known_x && mode == known_mode)
4163 /* Try to find identical subexpressions. If found call
4164 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4165 the precomputed value for the subexpression as KNOWN_RET. */
4167 if (ARITHMETIC_P (x))
4169 rtx x0 = XEXP (x, 0);
4170 rtx x1 = XEXP (x, 1);
4172 /* Check the first level. */
4175 num_sign_bit_copies1 (x, mode, x0, mode,
4176 cached_num_sign_bit_copies (x0, mode, known_x,
4180 /* Check the second level. */
4181 if (ARITHMETIC_P (x0)
4182 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4184 num_sign_bit_copies1 (x, mode, x1, mode,
4185 cached_num_sign_bit_copies (x1, mode, known_x,
4189 if (ARITHMETIC_P (x1)
4190 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4192 num_sign_bit_copies1 (x, mode, x0, mode,
4193 cached_num_sign_bit_copies (x0, mode, known_x,
4198 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4201 /* Return the number of bits at the high-order end of X that are known to
4202 be equal to the sign bit. X will be used in mode MODE; if MODE is
4203 VOIDmode, X will be used in its own mode. The returned value will always
4204 be between 1 and the number of bits in MODE. */
4207 num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
4208 enum machine_mode known_mode,
4209 unsigned int known_ret)
4211 enum rtx_code code = GET_CODE (x);
4212 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4213 int num0, num1, result;
4214 unsigned HOST_WIDE_INT nonzero;
4216 /* If we weren't given a mode, use the mode of X. If the mode is still
4217 VOIDmode, we don't know anything. Likewise if one of the modes is
4220 if (mode == VOIDmode)
4221 mode = GET_MODE (x);
4223 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))
4224 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
4227 /* For a smaller object, just ignore the high bits. */
4228 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4230 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4231 known_x, known_mode, known_ret);
4233 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4236 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4238 #ifndef WORD_REGISTER_OPERATIONS
4239 /* If this machine does not do all register operations on the entire
4240 register and MODE is wider than the mode of X, we can say nothing
4241 at all about the high-order bits. */
4244 /* Likewise on machines that do, if the mode of the object is smaller
4245 than a word and loads of that size don't sign extend, we can say
4246 nothing about the high order bits. */
4247 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4248 #ifdef LOAD_EXTEND_OP
4249 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4260 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4261 /* If pointers extend signed and this is a pointer in Pmode, say that
4262 all the bits above ptr_mode are known to be sign bit copies. */
4263 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4265 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4269 unsigned int copies_for_hook = 1, copies = 1;
4270 rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4271 known_mode, known_ret,
4275 copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
4276 known_mode, known_ret);
4278 if (copies > 1 || copies_for_hook > 1)
4279 return MAX (copies, copies_for_hook);
4281 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4286 #ifdef LOAD_EXTEND_OP
4287 /* Some RISC machines sign-extend all loads of smaller than a word. */
4288 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4289 return MAX (1, ((int) bitwidth
4290 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4295 /* If the constant is negative, take its 1's complement and remask.
4296 Then see how many zero bits we have. */
4297 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4298 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4299 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4300 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4302 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4305 /* If this is a SUBREG for a promoted object that is sign-extended
4306 and we are looking at it in a wider mode, we know that at least the
4307 high-order bits are known to be sign bit copies. */
4309 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4311 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4312 known_x, known_mode, known_ret);
4313 return MAX ((int) bitwidth
4314 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4318 /* For a smaller object, just ignore the high bits. */
4319 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4321 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4322 known_x, known_mode, known_ret);
4323 return MAX (1, (num0
4324 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4328 #ifdef WORD_REGISTER_OPERATIONS
4329 #ifdef LOAD_EXTEND_OP
4330 /* For paradoxical SUBREGs on machines where all register operations
4331 affect the entire register, just look inside. Note that we are
4332 passing MODE to the recursive call, so the number of sign bit copies
4333 will remain relative to that mode, not the inner mode. */
4335 /* This works only if loads sign extend. Otherwise, if we get a
4336 reload for the inner part, it may be loaded from the stack, and
4337 then we lose all sign bit copies that existed before the store
4340 if ((GET_MODE_SIZE (GET_MODE (x))
4341 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4342 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4343 && MEM_P (SUBREG_REG (x)))
4344 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4345 known_x, known_mode, known_ret);
4351 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4352 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4356 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4357 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4358 known_x, known_mode, known_ret));
4361 /* For a smaller object, just ignore the high bits. */
4362 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4363 known_x, known_mode, known_ret);
4364 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4368 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4369 known_x, known_mode, known_ret);
4371 case ROTATE: case ROTATERT:
4372 /* If we are rotating left by a number of bits less than the number
4373 of sign bit copies, we can just subtract that amount from the
4375 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4376 && INTVAL (XEXP (x, 1)) >= 0
4377 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4379 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4380 known_x, known_mode, known_ret);
4381 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4382 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4387 /* In general, this subtracts one sign bit copy. But if the value
4388 is known to be positive, the number of sign bit copies is the
4389 same as that of the input. Finally, if the input has just one bit
4390 that might be nonzero, all the bits are copies of the sign bit. */
4391 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4392 known_x, known_mode, known_ret);
4393 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4394 return num0 > 1 ? num0 - 1 : 1;
4396 nonzero = nonzero_bits (XEXP (x, 0), mode);
4401 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4406 case IOR: case AND: case XOR:
4407 case SMIN: case SMAX: case UMIN: case UMAX:
4408 /* Logical operations will preserve the number of sign-bit copies.
4409 MIN and MAX operations always return one of the operands. */
4410 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4411 known_x, known_mode, known_ret);
4412 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4413 known_x, known_mode, known_ret);
4415 /* If num1 is clearing some of the top bits then regardless of
4416 the other term, we are guaranteed to have at least that many
4417 high-order zero bits. */
4420 && bitwidth <= HOST_BITS_PER_WIDE_INT
4421 && GET_CODE (XEXP (x, 1)) == CONST_INT
4422 && !(INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4425 /* Similarly for IOR when setting high-order bits. */
4428 && bitwidth <= HOST_BITS_PER_WIDE_INT
4429 && GET_CODE (XEXP (x, 1)) == CONST_INT
4430 && (INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4433 return MIN (num0, num1);
4435 case PLUS: case MINUS:
4436 /* For addition and subtraction, we can have a 1-bit carry. However,
4437 if we are subtracting 1 from a positive number, there will not
4438 be such a carry. Furthermore, if the positive number is known to
4439 be 0 or 1, we know the result is either -1 or 0. */
4441 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4442 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4444 nonzero = nonzero_bits (XEXP (x, 0), mode);
4445 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4446 return (nonzero == 1 || nonzero == 0 ? bitwidth
4447 : bitwidth - floor_log2 (nonzero) - 1);
4450 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4451 known_x, known_mode, known_ret);
4452 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4453 known_x, known_mode, known_ret);
4454 result = MAX (1, MIN (num0, num1) - 1);
4456 #ifdef POINTERS_EXTEND_UNSIGNED
4457 /* If pointers extend signed and this is an addition or subtraction
4458 to a pointer in Pmode, all the bits above ptr_mode are known to be
4460 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4461 && (code == PLUS || code == MINUS)
4462 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4463 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4464 - GET_MODE_BITSIZE (ptr_mode) + 1),
4470 /* The number of bits of the product is the sum of the number of
4471 bits of both terms. However, unless one of the terms if known
4472 to be positive, we must allow for an additional bit since negating
4473 a negative number can remove one sign bit copy. */
4475 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4476 known_x, known_mode, known_ret);
4477 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4478 known_x, known_mode, known_ret);
4480 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4482 && (bitwidth > HOST_BITS_PER_WIDE_INT
4483 || (((nonzero_bits (XEXP (x, 0), mode)
4484 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4485 && ((nonzero_bits (XEXP (x, 1), mode)
4486 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4489 return MAX (1, result);
4492 /* The result must be <= the first operand. If the first operand
4493 has the high bit set, we know nothing about the number of sign
4495 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4497 else if ((nonzero_bits (XEXP (x, 0), mode)
4498 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4501 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4502 known_x, known_mode, known_ret);
4505 /* The result must be <= the second operand. */
4506 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4507 known_x, known_mode, known_ret);
4510 /* Similar to unsigned division, except that we have to worry about
4511 the case where the divisor is negative, in which case we have
4513 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4514 known_x, known_mode, known_ret);
4516 && (bitwidth > HOST_BITS_PER_WIDE_INT
4517 || (nonzero_bits (XEXP (x, 1), mode)
4518 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4524 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4525 known_x, known_mode, known_ret);
4527 && (bitwidth > HOST_BITS_PER_WIDE_INT
4528 || (nonzero_bits (XEXP (x, 1), mode)
4529 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4535 /* Shifts by a constant add to the number of bits equal to the
4537 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4538 known_x, known_mode, known_ret);
4539 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4540 && INTVAL (XEXP (x, 1)) > 0
4541 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x)))
4542 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4547 /* Left shifts destroy copies. */
4548 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4549 || INTVAL (XEXP (x, 1)) < 0
4550 || INTVAL (XEXP (x, 1)) >= (int) bitwidth
4551 || INTVAL (XEXP (x, 1)) >= GET_MODE_BITSIZE (GET_MODE (x)))
4554 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4555 known_x, known_mode, known_ret);
4556 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4559 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4560 known_x, known_mode, known_ret);
4561 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4562 known_x, known_mode, known_ret);
4563 return MIN (num0, num1);
4565 case EQ: case NE: case GE: case GT: case LE: case LT:
4566 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4567 case GEU: case GTU: case LEU: case LTU:
4568 case UNORDERED: case ORDERED:
4569 /* If the constant is negative, take its 1's complement and remask.
4570 Then see how many zero bits we have. */
4571 nonzero = STORE_FLAG_VALUE;
4572 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4573 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4574 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4576 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4582 /* If we haven't been able to figure it out by one of the above rules,
4583 see if some of the high-order bits are known to be zero. If so,
4584 count those bits and return one less than that amount. If we can't
4585 safely compute the mask for this mode, always return BITWIDTH. */
4587 bitwidth = GET_MODE_BITSIZE (mode);
4588 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4591 nonzero = nonzero_bits (x, mode);
4592 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4593 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4596 /* Calculate the rtx_cost of a single instruction. A return value of
4597 zero indicates an instruction pattern without a known cost. */
4600 insn_rtx_cost (rtx pat, bool speed)
4605 /* Extract the single set rtx from the instruction pattern.
4606 We can't use single_set since we only have the pattern. */
4607 if (GET_CODE (pat) == SET)
4609 else if (GET_CODE (pat) == PARALLEL)
4612 for (i = 0; i < XVECLEN (pat, 0); i++)
4614 rtx x = XVECEXP (pat, 0, i);
4615 if (GET_CODE (x) == SET)
4628 cost = rtx_cost (SET_SRC (set), SET, speed);
4629 return cost > 0 ? cost : COSTS_N_INSNS (1);
4632 /* Given an insn INSN and condition COND, return the condition in a
4633 canonical form to simplify testing by callers. Specifically:
4635 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4636 (2) Both operands will be machine operands; (cc0) will have been replaced.
4637 (3) If an operand is a constant, it will be the second operand.
4638 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4639 for GE, GEU, and LEU.
4641 If the condition cannot be understood, or is an inequality floating-point
4642 comparison which needs to be reversed, 0 will be returned.
4644 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4646 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4647 insn used in locating the condition was found. If a replacement test
4648 of the condition is desired, it should be placed in front of that
4649 insn and we will be sure that the inputs are still valid.
4651 If WANT_REG is nonzero, we wish the condition to be relative to that
4652 register, if possible. Therefore, do not canonicalize the condition
4653 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4654 to be a compare to a CC mode register.
4656 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4660 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4661 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4668 int reverse_code = 0;
4669 enum machine_mode mode;
4670 basic_block bb = BLOCK_FOR_INSN (insn);
4672 code = GET_CODE (cond);
4673 mode = GET_MODE (cond);
4674 op0 = XEXP (cond, 0);
4675 op1 = XEXP (cond, 1);
4678 code = reversed_comparison_code (cond, insn);
4679 if (code == UNKNOWN)
4685 /* If we are comparing a register with zero, see if the register is set
4686 in the previous insn to a COMPARE or a comparison operation. Perform
4687 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4690 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4691 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4692 && op1 == CONST0_RTX (GET_MODE (op0))
4695 /* Set nonzero when we find something of interest. */
4699 /* If comparison with cc0, import actual comparison from compare
4703 if ((prev = prev_nonnote_insn (prev)) == 0
4704 || !NONJUMP_INSN_P (prev)
4705 || (set = single_set (prev)) == 0
4706 || SET_DEST (set) != cc0_rtx)
4709 op0 = SET_SRC (set);
4710 op1 = CONST0_RTX (GET_MODE (op0));
4716 /* If this is a COMPARE, pick up the two things being compared. */
4717 if (GET_CODE (op0) == COMPARE)
4719 op1 = XEXP (op0, 1);
4720 op0 = XEXP (op0, 0);
4723 else if (!REG_P (op0))
4726 /* Go back to the previous insn. Stop if it is not an INSN. We also
4727 stop if it isn't a single set or if it has a REG_INC note because
4728 we don't want to bother dealing with it. */
4730 if ((prev = prev_nonnote_insn (prev)) == 0
4731 || !NONJUMP_INSN_P (prev)
4732 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4733 /* In cfglayout mode, there do not have to be labels at the
4734 beginning of a block, or jumps at the end, so the previous
4735 conditions would not stop us when we reach bb boundary. */
4736 || BLOCK_FOR_INSN (prev) != bb)
4739 set = set_of (op0, prev);
4742 && (GET_CODE (set) != SET
4743 || !rtx_equal_p (SET_DEST (set), op0)))
4746 /* If this is setting OP0, get what it sets it to if it looks
4750 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4751 #ifdef FLOAT_STORE_FLAG_VALUE
4752 REAL_VALUE_TYPE fsfv;
4755 /* ??? We may not combine comparisons done in a CCmode with
4756 comparisons not done in a CCmode. This is to aid targets
4757 like Alpha that have an IEEE compliant EQ instruction, and
4758 a non-IEEE compliant BEQ instruction. The use of CCmode is
4759 actually artificial, simply to prevent the combination, but
4760 should not affect other platforms.
4762 However, we must allow VOIDmode comparisons to match either
4763 CCmode or non-CCmode comparison, because some ports have
4764 modeless comparisons inside branch patterns.
4766 ??? This mode check should perhaps look more like the mode check
4767 in simplify_comparison in combine. */
4769 if ((GET_CODE (SET_SRC (set)) == COMPARE
4772 && GET_MODE_CLASS (inner_mode) == MODE_INT
4773 && (GET_MODE_BITSIZE (inner_mode)
4774 <= HOST_BITS_PER_WIDE_INT)
4775 && (STORE_FLAG_VALUE
4776 & ((HOST_WIDE_INT) 1
4777 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4778 #ifdef FLOAT_STORE_FLAG_VALUE
4780 && SCALAR_FLOAT_MODE_P (inner_mode)
4781 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4782 REAL_VALUE_NEGATIVE (fsfv)))
4785 && COMPARISON_P (SET_SRC (set))))
4786 && (((GET_MODE_CLASS (mode) == MODE_CC)
4787 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4788 || mode == VOIDmode || inner_mode == VOIDmode))
4790 else if (((code == EQ
4792 && (GET_MODE_BITSIZE (inner_mode)
4793 <= HOST_BITS_PER_WIDE_INT)
4794 && GET_MODE_CLASS (inner_mode) == MODE_INT
4795 && (STORE_FLAG_VALUE
4796 & ((HOST_WIDE_INT) 1
4797 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4798 #ifdef FLOAT_STORE_FLAG_VALUE
4800 && SCALAR_FLOAT_MODE_P (inner_mode)
4801 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4802 REAL_VALUE_NEGATIVE (fsfv)))
4805 && COMPARISON_P (SET_SRC (set))
4806 && (((GET_MODE_CLASS (mode) == MODE_CC)
4807 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4808 || mode == VOIDmode || inner_mode == VOIDmode))
4818 else if (reg_set_p (op0, prev))
4819 /* If this sets OP0, but not directly, we have to give up. */
4824 /* If the caller is expecting the condition to be valid at INSN,
4825 make sure X doesn't change before INSN. */
4826 if (valid_at_insn_p)
4827 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4829 if (COMPARISON_P (x))
4830 code = GET_CODE (x);
4833 code = reversed_comparison_code (x, prev);
4834 if (code == UNKNOWN)
4839 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4845 /* If constant is first, put it last. */
4846 if (CONSTANT_P (op0))
4847 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4849 /* If OP0 is the result of a comparison, we weren't able to find what
4850 was really being compared, so fail. */
4852 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4855 /* Canonicalize any ordered comparison with integers involving equality
4856 if we can do computations in the relevant mode and we do not
4859 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4860 && GET_CODE (op1) == CONST_INT
4861 && GET_MODE (op0) != VOIDmode
4862 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4864 HOST_WIDE_INT const_val = INTVAL (op1);
4865 unsigned HOST_WIDE_INT uconst_val = const_val;
4866 unsigned HOST_WIDE_INT max_val
4867 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4872 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4873 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4876 /* When cross-compiling, const_val might be sign-extended from
4877 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4879 if ((HOST_WIDE_INT) (const_val & max_val)
4880 != (((HOST_WIDE_INT) 1
4881 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4882 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4886 if (uconst_val < max_val)
4887 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4891 if (uconst_val != 0)
4892 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4900 /* Never return CC0; return zero instead. */
4904 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4907 /* Given a jump insn JUMP, return the condition that will cause it to branch
4908 to its JUMP_LABEL. If the condition cannot be understood, or is an
4909 inequality floating-point comparison which needs to be reversed, 0 will
4912 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4913 insn used in locating the condition was found. If a replacement test
4914 of the condition is desired, it should be placed in front of that
4915 insn and we will be sure that the inputs are still valid. If EARLIEST
4916 is null, the returned condition will be valid at INSN.
4918 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4919 compare CC mode register.
4921 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4924 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4930 /* If this is not a standard conditional jump, we can't parse it. */
4932 || ! any_condjump_p (jump))
4934 set = pc_set (jump);
4936 cond = XEXP (SET_SRC (set), 0);
4938 /* If this branches to JUMP_LABEL when the condition is false, reverse
4941 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4942 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4944 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4945 allow_cc_mode, valid_at_insn_p);
4948 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4949 TARGET_MODE_REP_EXTENDED.
4951 Note that we assume that the property of
4952 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4953 narrower than mode B. I.e., if A is a mode narrower than B then in
4954 order to be able to operate on it in mode B, mode A needs to
4955 satisfy the requirements set by the representation of mode B. */
4958 init_num_sign_bit_copies_in_rep (void)
4960 enum machine_mode mode, in_mode;
4962 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4963 in_mode = GET_MODE_WIDER_MODE (mode))
4964 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4965 mode = GET_MODE_WIDER_MODE (mode))
4967 enum machine_mode i;
4969 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4970 extends to the next widest mode. */
4971 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4972 || GET_MODE_WIDER_MODE (mode) == in_mode);
4974 /* We are in in_mode. Count how many bits outside of mode
4975 have to be copies of the sign-bit. */
4976 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4978 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4980 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4981 /* We can only check sign-bit copies starting from the
4982 top-bit. In order to be able to check the bits we
4983 have already seen we pretend that subsequent bits
4984 have to be sign-bit copies too. */
4985 || num_sign_bit_copies_in_rep [in_mode][mode])
4986 num_sign_bit_copies_in_rep [in_mode][mode]
4987 += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4992 /* Suppose that truncation from the machine mode of X to MODE is not a
4993 no-op. See if there is anything special about X so that we can
4994 assume it already contains a truncated value of MODE. */
4997 truncated_to_mode (enum machine_mode mode, const_rtx x)
4999 /* This register has already been used in MODE without explicit
5001 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
5004 /* See if we already satisfy the requirements of MODE. If yes we
5005 can just switch to MODE. */
5006 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
5007 && (num_sign_bit_copies (x, GET_MODE (x))
5008 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
5014 /* Initialize non_rtx_starting_operands, which is used to speed up
5020 for (i = 0; i < NUM_RTX_CODE; i++)
5022 const char *format = GET_RTX_FORMAT (i);
5023 const char *first = strpbrk (format, "eEV");
5024 non_rtx_starting_operands[i] = first ? first - format : -1;
5027 init_num_sign_bit_copies_in_rep ();
5030 /* Check whether this is a constant pool constant. */
5032 constant_pool_constant_p (rtx x)
5034 x = avoid_constant_pool_reference (x);
5035 return GET_CODE (x) == CONST_DOUBLE;