1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
34 #include "insn-config.h"
38 #include "integrate.h"
47 #include "langhooks.h"
49 /* Round to the next highest integer that meets the alignment. */
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
53 /* Private type used by {get/has}_func_hard_reg_initial_val. */
54 typedef struct initial_value_pair GTY(()) {
58 typedef struct initial_value_struct GTY(()) {
61 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
62 } initial_value_struct;
64 static void subst_constants (rtx *, rtx, struct inline_remap *, int);
65 static void set_block_origin_self (tree);
66 static void set_block_abstract_flags (tree, int);
67 static void mark_stores (rtx, rtx, void *);
69 /* Returns the Ith entry in the label_map contained in MAP. If the
70 Ith entry has not yet been set, return a fresh label. This function
71 performs a lazy initialization of label_map, thereby avoiding huge memory
72 explosions when the label_map gets very large. */
75 get_label_from_map (struct inline_remap *map, int i)
77 rtx x = map->label_map[i];
80 x = map->label_map[i] = gen_label_rtx ();
85 /* Return false if the function FNDECL cannot be inlined on account of its
86 attributes, true otherwise. */
88 function_attribute_inlinable_p (tree fndecl)
90 if (targetm.attribute_table)
94 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
96 tree name = TREE_PURPOSE (a);
99 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
100 if (is_attribute_p (targetm.attribute_table[i].name, name))
101 return targetm.function_attribute_inlinable_p (fndecl);
108 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
109 but now it will be in the TO_FN. */
112 copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
116 /* Copy the declaration. */
117 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
122 /* See if the frontend wants to pass this by invisible reference. */
123 if (TREE_CODE (decl) == PARM_DECL
124 && DECL_ARG_TYPE (decl) != TREE_TYPE (decl)
125 && POINTER_TYPE_P (DECL_ARG_TYPE (decl))
126 && TREE_TYPE (DECL_ARG_TYPE (decl)) == TREE_TYPE (decl))
129 type = DECL_ARG_TYPE (decl);
132 type = TREE_TYPE (decl);
134 /* For a parameter or result, we must make an equivalent VAR_DECL, not a
136 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
139 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
140 TREE_READONLY (copy) = TREE_READONLY (decl);
141 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
145 TREE_ADDRESSABLE (copy) = 0;
146 TREE_READONLY (copy) = 1;
147 TREE_THIS_VOLATILE (copy) = 0;
152 copy = copy_node (decl);
153 /* The COPY is not abstract; it will be generated in TO_FN. */
154 DECL_ABSTRACT (copy) = 0;
155 lang_hooks.dup_lang_specific_decl (copy);
157 /* TREE_ADDRESSABLE isn't used to indicate that a label's
158 address has been taken; it's for internal bookkeeping in
159 expand_goto_internal. */
160 if (TREE_CODE (copy) == LABEL_DECL)
162 TREE_ADDRESSABLE (copy) = 0;
163 DECL_TOO_LATE (copy) = 0;
167 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
168 declaration inspired this copy. */
169 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
171 /* The new variable/label has no RTL, yet. */
172 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
173 SET_DECL_RTL (copy, NULL_RTX);
175 /* These args would always appear unused, if not for this. */
176 TREE_USED (copy) = 1;
178 /* Set the context for the new declaration. */
179 if (!DECL_CONTEXT (decl))
180 /* Globals stay global. */
182 else if (DECL_CONTEXT (decl) != from_fn)
183 /* Things that weren't in the scope of the function we're inlining
184 from aren't in the scope we're inlining to, either. */
186 else if (TREE_STATIC (decl))
187 /* Function-scoped static variables should stay in the original
191 /* Ordinary automatic local variables are now in the scope of the
193 DECL_CONTEXT (copy) = to_fn;
198 /* Unfortunately, we need a global copy of const_equiv map for communication
199 with a function called from note_stores. Be *very* careful that this
200 is used properly in the presence of recursion. */
202 varray_type global_const_equiv_varray;
204 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
205 except for those few rtx codes that are sharable.
207 We always return an rtx that is similar to that incoming rtx, with the
208 exception of possibly changing a REG to a SUBREG or vice versa. No
211 If FOR_LHS is nonzero, if means we are processing something that will
212 be the LHS of a SET. In that case, we copy RTX_UNCHANGING_P even if
213 inlining since we need to be conservative in how it is set for
216 Handle constants that need to be placed in the constant pool by
217 calling `force_const_mem'. */
220 copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
225 enum machine_mode mode;
226 const char *format_ptr;
232 code = GET_CODE (orig);
233 mode = GET_MODE (orig);
238 /* If the stack pointer register shows up, it must be part of
239 stack-adjustments (*not* because we eliminated the frame pointer!).
240 Small hard registers are returned as-is. Pseudo-registers
241 go through their `reg_map'. */
242 regno = REGNO (orig);
243 if (regno <= LAST_VIRTUAL_REGISTER)
245 /* Some hard registers are also mapped,
246 but others are not translated. */
247 if (map->reg_map[regno] != 0)
248 return map->reg_map[regno];
250 /* If this is the virtual frame pointer, make space in current
251 function's stack frame for the stack frame of the inline function.
253 Copy the address of this area into a pseudo. Map
254 virtual_stack_vars_rtx to this pseudo and set up a constant
255 equivalence for it to be the address. This will substitute the
256 address into insns where it can be substituted and use the new
257 pseudo where it can't. */
258 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
262 = get_func_frame_size (DECL_STRUCT_FUNCTION (map->fndecl));
263 #ifdef FRAME_GROWS_DOWNWARD
265 = (DECL_STRUCT_FUNCTION (map->fndecl)->stack_alignment_needed
268 /* In this case, virtual_stack_vars_rtx points to one byte
269 higher than the top of the frame area. So make sure we
270 allocate a big enough chunk to keep the frame pointer
271 aligned like a real one. */
273 size = CEIL_ROUND (size, alignment);
276 loc = assign_stack_temp (BLKmode, size, 1);
278 #ifdef FRAME_GROWS_DOWNWARD
279 /* In this case, virtual_stack_vars_rtx points to one byte
280 higher than the top of the frame area. So compute the offset
281 to one byte higher than our substitute frame. */
282 loc = plus_constant (loc, size);
284 map->reg_map[regno] = temp
285 = force_reg (Pmode, force_operand (loc, NULL_RTX));
287 #ifdef STACK_BOUNDARY
288 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
291 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
295 emit_insn_after (seq, map->insns_at_start);
298 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
300 /* Do the same for a block to contain any arguments referenced
303 int size = DECL_STRUCT_FUNCTION (map->fndecl)->args_size;
306 loc = assign_stack_temp (BLKmode, size, 1);
308 /* When arguments grow downward, the virtual incoming
309 args pointer points to the top of the argument block,
310 so the remapped location better do the same. */
311 #ifdef ARGS_GROW_DOWNWARD
312 loc = plus_constant (loc, size);
314 map->reg_map[regno] = temp
315 = force_reg (Pmode, force_operand (loc, NULL_RTX));
317 #ifdef STACK_BOUNDARY
318 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
321 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
325 emit_insn_after (seq, map->insns_at_start);
328 else if (REG_FUNCTION_VALUE_P (orig))
330 if (rtx_equal_function_value_matters)
331 /* This is an ignored return value. We must not
332 leave it in with REG_FUNCTION_VALUE_P set, since
333 that would confuse subsequent inlining of the
334 current function into a later function. */
335 return gen_rtx_REG (GET_MODE (orig), regno);
337 /* Must be unrolling loops or replicating code if we
338 reach here, so return the register unchanged. */
346 if (map->reg_map[regno] == NULL)
348 map->reg_map[regno] = gen_reg_rtx (mode);
349 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
350 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
351 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
352 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
354 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
355 mark_reg_pointer (map->reg_map[regno],
356 map->regno_pointer_align[regno]);
358 return map->reg_map[regno];
361 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
362 return simplify_gen_subreg (GET_MODE (orig), copy,
363 GET_MODE (SUBREG_REG (orig)),
368 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
369 to (use foo) if the original insn didn't have a subreg.
370 Removing the subreg distorts the VAX movmemhi pattern
371 by changing the mode of an operand. */
372 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
373 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
374 copy = SUBREG_REG (copy);
375 return gen_rtx_fmt_e (code, VOIDmode, copy);
377 /* We need to handle "deleted" labels that appear in the DECL_RTL
380 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
385 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
386 = LABEL_PRESERVE_P (orig);
387 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
393 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
394 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
396 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
398 /* The fact that this label was previously nonlocal does not mean
399 it still is, so we must check if it is within the range of
400 this function's labels. */
401 LABEL_REF_NONLOCAL_P (copy)
402 = (LABEL_REF_NONLOCAL_P (orig)
403 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
404 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
415 /* Symbols which represent the address of a label stored in the constant
416 pool must be modified to point to a constant pool entry for the
417 remapped label. Otherwise, symbols are returned unchanged. */
418 if (CONSTANT_POOL_ADDRESS_P (orig))
420 struct function *f = cfun;
421 rtx constant = get_pool_constant_for_function (f, orig);
422 if (GET_CODE (constant) == LABEL_REF)
423 return XEXP (force_const_mem
425 copy_rtx_and_substitute (constant, map, for_lhs)),
431 /* We have to make a new copy of this CONST_DOUBLE because don't want
432 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
433 duplicate of a CONST_DOUBLE we have already seen. */
434 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
438 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
439 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
442 return immed_double_const (CONST_DOUBLE_LOW (orig),
443 CONST_DOUBLE_HIGH (orig), VOIDmode);
449 /* If a single asm insn contains multiple output operands then
450 it contains multiple ASM_OPERANDS rtx's that share the input
451 and constraint vecs. We must make sure that the copied insn
452 continues to share it. */
453 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
455 copy = rtx_alloc (ASM_OPERANDS);
456 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
457 PUT_MODE (copy, GET_MODE (orig));
458 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
459 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
460 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
461 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
462 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
463 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
464 = map->copy_asm_constraints_vector;
465 #ifdef USE_MAPPED_LOCATION
466 ASM_OPERANDS_SOURCE_LOCATION (copy)
467 = ASM_OPERANDS_SOURCE_LOCATION (orig);
469 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
470 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
477 /* This is given special treatment because the first
478 operand of a CALL is a (MEM ...) which may get
479 forced into a register for cse. This is undesirable
480 if function-address cse isn't wanted or if we won't do cse. */
481 #ifndef NO_FUNCTION_CSE
482 if (! (optimize && ! flag_no_function_cse))
486 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
487 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
490 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
493 gen_rtx_CALL (GET_MODE (orig), copy,
494 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
499 /* Must be ifdefed out for loop unrolling to work. */
500 /* ??? Is this for the old or the new unroller? */
506 /* If this is setting fp or ap, it means that we have a nonlocal goto.
507 Adjust the setting by the offset of the area we made.
508 If the nonlocal goto is into the current function,
509 this will result in unnecessarily bad code, but should work. */
510 if (SET_DEST (orig) == virtual_stack_vars_rtx
511 || SET_DEST (orig) == virtual_incoming_args_rtx)
513 /* In case a translation hasn't occurred already, make one now. */
516 HOST_WIDE_INT loc_offset;
518 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
519 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
520 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
521 REGNO (equiv_reg)).rtx;
523 = REG_P (equiv_loc) ? 0 : INTVAL (XEXP (equiv_loc, 1));
525 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
528 (copy_rtx_and_substitute (SET_SRC (orig),
534 return gen_rtx_SET (VOIDmode,
535 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
536 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
540 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
542 MEM_COPY_ATTRIBUTES (copy, orig);
549 copy = rtx_alloc (code);
550 PUT_MODE (copy, mode);
551 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
552 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
553 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
555 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
557 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
559 switch (*format_ptr++)
562 X0ANY (copy, i) = X0ANY (orig, i);
567 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
571 /* Change any references to old-insns to point to the
572 corresponding copied insns. */
573 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
577 XVEC (copy, i) = XVEC (orig, i);
578 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
580 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
581 for (j = 0; j < XVECLEN (copy, i); j++)
583 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
589 XWINT (copy, i) = XWINT (orig, i);
593 XINT (copy, i) = XINT (orig, i);
597 XSTR (copy, i) = XSTR (orig, i);
601 XTREE (copy, i) = XTREE (orig, i);
609 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
611 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
612 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
613 map->copy_asm_constraints_vector
614 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
620 /* Substitute known constant values into INSN, if that is valid. */
623 try_constants (rtx insn, struct inline_remap *map)
629 /* First try just updating addresses, then other things. This is
630 important when we have something like the store of a constant
631 into memory and we can update the memory address but the machine
632 does not support a constant source. */
633 subst_constants (&PATTERN (insn), insn, map, 1);
634 apply_change_group ();
635 subst_constants (&PATTERN (insn), insn, map, 0);
636 apply_change_group ();
638 /* Enforce consistency between the addresses in the regular insn flow
639 and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
640 if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
642 subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
643 apply_change_group ();
646 /* Show we don't know the value of anything stored or clobbered. */
647 note_stores (PATTERN (insn), mark_stores, NULL);
648 map->last_pc_value = 0;
650 map->last_cc0_value = 0;
653 /* Set up any constant equivalences made in this insn. */
654 for (i = 0; i < map->num_sets; i++)
656 if (REG_P (map->equiv_sets[i].dest))
658 int regno = REGNO (map->equiv_sets[i].dest);
660 MAYBE_EXTEND_CONST_EQUIV_VARRAY (map, regno);
661 if (VARRAY_CONST_EQUIV (map->const_equiv_varray, regno).rtx == 0
662 /* Following clause is a hack to make case work where GNU C++
663 reassigns a variable to make cse work right. */
664 || ! rtx_equal_p (VARRAY_CONST_EQUIV (map->const_equiv_varray,
666 map->equiv_sets[i].equiv))
667 SET_CONST_EQUIV_DATA (map, map->equiv_sets[i].dest,
668 map->equiv_sets[i].equiv, map->const_age);
670 else if (map->equiv_sets[i].dest == pc_rtx)
671 map->last_pc_value = map->equiv_sets[i].equiv;
673 else if (map->equiv_sets[i].dest == cc0_rtx)
674 map->last_cc0_value = map->equiv_sets[i].equiv;
679 /* Substitute known constants for pseudo regs in the contents of LOC,
680 which are part of INSN.
681 If INSN is zero, the substitution should always be done (this is used to
683 These changes are taken out by try_constants if the result is not valid.
685 Note that we are more concerned with determining when the result of a SET
686 is a constant, for further propagation, than actually inserting constants
687 into insns; cse will do the latter task better.
689 This function is also used to adjust address of items previously addressed
690 via the virtual stack variable or virtual incoming arguments registers.
692 If MEMONLY is nonzero, only make changes inside a MEM. */
695 subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
700 const char *format_ptr;
701 int num_changes = num_validated_changes ();
703 enum machine_mode op0_mode = MAX_MACHINE_MODE;
722 validate_change (insn, loc, map->last_cc0_value, 1);
728 /* The only thing we can do with a USE or CLOBBER is possibly do
729 some substitutions in a MEM within it. */
730 if (MEM_P (XEXP (x, 0)))
731 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
735 /* Substitute for parms and known constants. Don't replace
736 hard regs used as user variables with constants. */
739 int regno = REGNO (x);
740 struct const_equiv_data *p;
742 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
743 && (size_t) regno < VARRAY_SIZE (map->const_equiv_varray)
744 && (p = &VARRAY_CONST_EQUIV (map->const_equiv_varray, regno),
746 && p->age >= map->const_age)
747 validate_change (insn, loc, p->rtx, 1);
752 /* SUBREG applied to something other than a reg
753 should be treated as ordinary, since that must
754 be a special hack and we don't know how to treat it specially.
755 Consider for example mulsidi3 in m68k.md.
756 Ordinary SUBREG of a REG needs this special treatment. */
757 if (! memonly && REG_P (SUBREG_REG (x)))
759 rtx inner = SUBREG_REG (x);
762 /* We can't call subst_constants on &SUBREG_REG (x) because any
763 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
764 see what is inside, try to form the new SUBREG and see if that is
765 valid. We handle two cases: extracting a full word in an
766 integral mode and extracting the low part. */
767 subst_constants (&inner, NULL_RTX, map, 0);
768 new = simplify_gen_subreg (GET_MODE (x), inner,
769 GET_MODE (SUBREG_REG (x)),
773 validate_change (insn, loc, new, 1);
775 cancel_changes (num_changes);
782 subst_constants (&XEXP (x, 0), insn, map, 0);
784 /* If a memory address got spoiled, change it back. */
785 if (! memonly && insn != 0 && num_validated_changes () != num_changes
786 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
787 cancel_changes (num_changes);
792 /* Substitute constants in our source, and in any arguments to a
793 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
795 rtx *dest_loc = &SET_DEST (x);
796 rtx dest = *dest_loc;
798 enum machine_mode compare_mode = VOIDmode;
800 /* If SET_SRC is a COMPARE which subst_constants would turn into
801 COMPARE of 2 VOIDmode constants, note the mode in which comparison
803 if (GET_CODE (SET_SRC (x)) == COMPARE)
806 if (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
809 compare_mode = GET_MODE (XEXP (src, 0));
810 if (compare_mode == VOIDmode)
811 compare_mode = GET_MODE (XEXP (src, 1));
815 subst_constants (&SET_SRC (x), insn, map, memonly);
818 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
819 || GET_CODE (*dest_loc) == SUBREG
820 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
822 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
824 subst_constants (&XEXP (*dest_loc, 1), insn, map, memonly);
825 subst_constants (&XEXP (*dest_loc, 2), insn, map, memonly);
827 dest_loc = &XEXP (*dest_loc, 0);
830 /* Do substitute in the address of a destination in memory. */
831 if (MEM_P (*dest_loc))
832 subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
834 /* Check for the case of DEST a SUBREG, both it and the underlying
835 register are less than one word, and the SUBREG has the wider mode.
836 In the case, we are really setting the underlying register to the
837 source converted to the mode of DEST. So indicate that. */
838 if (GET_CODE (dest) == SUBREG
839 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
840 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
841 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
842 <= GET_MODE_SIZE (GET_MODE (dest)))
843 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
845 src = tem, dest = SUBREG_REG (dest);
847 /* If storing a recognizable value save it for later recording. */
848 if ((map->num_sets < MAX_RECOG_OPERANDS)
851 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
852 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
853 || (GET_CODE (src) == PLUS
854 && REG_P (XEXP (src, 0))
855 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
856 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
857 && CONSTANT_P (XEXP (src, 1)))
858 || GET_CODE (src) == COMPARE
861 && (src == pc_rtx || GET_CODE (src) == RETURN
862 || GET_CODE (src) == LABEL_REF))))
864 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
865 it will cause us to save the COMPARE with any constants
866 substituted, which is what we want for later. */
867 rtx src_copy = copy_rtx (src);
868 map->equiv_sets[map->num_sets].equiv = src_copy;
869 map->equiv_sets[map->num_sets++].dest = dest;
870 if (compare_mode != VOIDmode
871 && GET_CODE (src) == COMPARE
872 && (GET_MODE_CLASS (GET_MODE (src)) == MODE_CC
874 && GET_MODE (XEXP (src, 0)) == VOIDmode
875 && GET_MODE (XEXP (src, 1)) == VOIDmode)
877 map->compare_src = src_copy;
878 map->compare_mode = compare_mode;
888 format_ptr = GET_RTX_FORMAT (code);
890 /* If the first operand is an expression, save its mode for later. */
891 if (*format_ptr == 'e')
892 op0_mode = GET_MODE (XEXP (x, 0));
894 for (i = 0; i < GET_RTX_LENGTH (code); i++)
896 switch (*format_ptr++)
903 subst_constants (&XEXP (x, i), insn, map, memonly);
916 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
917 for (j = 0; j < XVECLEN (x, i); j++)
918 subst_constants (&XVECEXP (x, i, j), insn, map, memonly);
927 /* If this is a commutative operation, move a constant to the second
928 operand unless the second operand is already a CONST_INT. */
930 && (GET_RTX_CLASS (code) == RTX_COMM_ARITH
931 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
932 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
934 rtx tem = XEXP (x, 0);
935 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
936 validate_change (insn, &XEXP (x, 1), tem, 1);
939 /* Simplify the expression in case we put in some constants. */
941 switch (GET_RTX_CLASS (code))
944 if (op0_mode == MAX_MACHINE_MODE)
946 new = simplify_unary_operation (code, GET_MODE (x),
947 XEXP (x, 0), op0_mode);
951 case RTX_COMM_COMPARE:
953 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
955 if (op_mode == VOIDmode)
956 op_mode = GET_MODE (XEXP (x, 1));
958 new = simplify_relational_operation (code, GET_MODE (x), op_mode,
959 XEXP (x, 0), XEXP (x, 1));
965 new = simplify_binary_operation (code, GET_MODE (x),
966 XEXP (x, 0), XEXP (x, 1));
969 case RTX_BITFIELD_OPS:
971 if (op0_mode == MAX_MACHINE_MODE)
974 if (code == IF_THEN_ELSE)
976 rtx op0 = XEXP (x, 0);
978 if (COMPARISON_P (op0)
979 && GET_MODE (op0) == VOIDmode
980 && ! side_effects_p (op0)
981 && XEXP (op0, 0) == map->compare_src
982 && GET_MODE (XEXP (op0, 1)) == VOIDmode)
984 /* We have compare of two VOIDmode constants for which
985 we recorded the comparison mode. */
987 simplify_gen_relational (GET_CODE (op0), GET_MODE (op0),
988 map->compare_mode, XEXP (op0, 0),
991 if (GET_CODE (tem) != CONST_INT)
992 new = simplify_ternary_operation (code, GET_MODE (x),
993 op0_mode, tem, XEXP (x, 1),
995 else if (tem == const0_rtx)
1002 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
1003 XEXP (x, 0), XEXP (x, 1),
1012 validate_change (insn, loc, new, 1);
1015 /* Show that register modified no longer contain known constants. We are
1016 called from note_stores with parts of the new insn. */
1019 mark_stores (rtx dest, rtx x ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
1022 enum machine_mode mode = VOIDmode;
1024 /* DEST is always the innermost thing set, except in the case of
1025 SUBREGs of hard registers. */
1028 regno = REGNO (dest), mode = GET_MODE (dest);
1029 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
1031 regno = REGNO (SUBREG_REG (dest));
1032 if (regno < FIRST_PSEUDO_REGISTER)
1033 regno += subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1034 GET_MODE (SUBREG_REG (dest)),
1037 mode = GET_MODE (SUBREG_REG (dest));
1042 unsigned int uregno = regno;
1043 unsigned int last_reg = (uregno >= FIRST_PSEUDO_REGISTER ? uregno
1044 : uregno + hard_regno_nregs[uregno][mode] - 1);
1047 /* Ignore virtual stack var or virtual arg register since those
1048 are handled separately. */
1049 if (uregno != VIRTUAL_INCOMING_ARGS_REGNUM
1050 && uregno != VIRTUAL_STACK_VARS_REGNUM)
1051 for (i = uregno; i <= last_reg; i++)
1052 if ((size_t) i < VARRAY_SIZE (global_const_equiv_varray))
1053 VARRAY_CONST_EQUIV (global_const_equiv_varray, i).rtx = 0;
1057 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
1058 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
1059 that it points to the node itself, thus indicating that the node is its
1060 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
1061 the given node is NULL, recursively descend the decl/block tree which
1062 it is the root of, and for each other ..._DECL or BLOCK node contained
1063 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
1064 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
1065 values to point to themselves. */
1068 set_block_origin_self (tree stmt)
1070 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
1072 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
1077 for (local_decl = BLOCK_VARS (stmt);
1078 local_decl != NULL_TREE;
1079 local_decl = TREE_CHAIN (local_decl))
1080 set_decl_origin_self (local_decl); /* Potential recursion. */
1086 for (subblock = BLOCK_SUBBLOCKS (stmt);
1087 subblock != NULL_TREE;
1088 subblock = BLOCK_CHAIN (subblock))
1089 set_block_origin_self (subblock); /* Recurse. */
1094 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
1095 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
1096 node to so that it points to the node itself, thus indicating that the
1097 node represents its own (abstract) origin. Additionally, if the
1098 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
1099 the decl/block tree of which the given node is the root of, and for
1100 each other ..._DECL or BLOCK node contained therein whose
1101 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
1102 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
1103 point to themselves. */
1106 set_decl_origin_self (tree decl)
1108 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
1110 DECL_ABSTRACT_ORIGIN (decl) = decl;
1111 if (TREE_CODE (decl) == FUNCTION_DECL)
1115 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1116 DECL_ABSTRACT_ORIGIN (arg) = arg;
1117 if (DECL_INITIAL (decl) != NULL_TREE
1118 && DECL_INITIAL (decl) != error_mark_node)
1119 set_block_origin_self (DECL_INITIAL (decl));
1124 /* Given a pointer to some BLOCK node, and a boolean value to set the
1125 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
1126 the given block, and for all local decls and all local sub-blocks
1127 (recursively) which are contained therein. */
1130 set_block_abstract_flags (tree stmt, int setting)
1135 BLOCK_ABSTRACT (stmt) = setting;
1137 for (local_decl = BLOCK_VARS (stmt);
1138 local_decl != NULL_TREE;
1139 local_decl = TREE_CHAIN (local_decl))
1140 set_decl_abstract_flags (local_decl, setting);
1142 for (subblock = BLOCK_SUBBLOCKS (stmt);
1143 subblock != NULL_TREE;
1144 subblock = BLOCK_CHAIN (subblock))
1145 set_block_abstract_flags (subblock, setting);
1148 /* Given a pointer to some ..._DECL node, and a boolean value to set the
1149 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
1150 given decl, and (in the case where the decl is a FUNCTION_DECL) also
1151 set the abstract flags for all of the parameters, local vars, local
1152 blocks and sub-blocks (recursively) to the same setting. */
1155 set_decl_abstract_flags (tree decl, int setting)
1157 DECL_ABSTRACT (decl) = setting;
1158 if (TREE_CODE (decl) == FUNCTION_DECL)
1162 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1163 DECL_ABSTRACT (arg) = setting;
1164 if (DECL_INITIAL (decl) != NULL_TREE
1165 && DECL_INITIAL (decl) != error_mark_node)
1166 set_block_abstract_flags (DECL_INITIAL (decl), setting);
1170 /* Functions to keep track of the values hard regs had at the start of
1174 get_hard_reg_initial_reg (struct function *fun, rtx reg)
1176 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1182 for (i = 0; i < ivs->num_entries; i++)
1183 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
1184 return ivs->entries[i].hard_reg;
1190 has_func_hard_reg_initial_val (struct function *fun, rtx reg)
1192 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1198 for (i = 0; i < ivs->num_entries; i++)
1199 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
1200 return ivs->entries[i].pseudo;
1206 get_func_hard_reg_initial_val (struct function *fun, rtx reg)
1208 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
1209 rtx rv = has_func_hard_reg_initial_val (fun, reg);
1216 fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct));
1217 ivs = fun->hard_reg_initial_vals;
1218 ivs->num_entries = 0;
1219 ivs->max_entries = 5;
1220 ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair));
1223 if (ivs->num_entries >= ivs->max_entries)
1225 ivs->max_entries += 5;
1226 ivs->entries = ggc_realloc (ivs->entries,
1228 * sizeof (initial_value_pair));
1231 ivs->entries[ivs->num_entries].hard_reg = reg;
1232 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
1234 return ivs->entries[ivs->num_entries++].pseudo;
1238 get_hard_reg_initial_val (enum machine_mode mode, int regno)
1240 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1244 has_hard_reg_initial_val (enum machine_mode mode, int regno)
1246 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
1250 emit_initial_value_sets (void)
1252 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1260 for (i = 0; i < ivs->num_entries; i++)
1261 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
1265 emit_insn_after (seq, entry_of_function ());
1268 /* If the backend knows where to allocate pseudos for hard
1269 register initial values, register these allocations now. */
1271 allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
1273 #ifdef ALLOCATE_INITIAL_VALUE
1274 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
1280 for (i = 0; i < ivs->num_entries; i++)
1282 int regno = REGNO (ivs->entries[i].pseudo);
1283 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
1285 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
1288 reg_equiv_memory_loc[regno] = x;
1291 reg_renumber[regno] = REGNO (x);
1292 /* Poke the regno right into regno_reg_rtx
1293 so that even fixed regs are accepted. */
1294 REGNO (ivs->entries[i].pseudo) = REGNO (x);
1301 #include "gt-integrate.h"