X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Freorg.c;h=4453ccabca6e678a8c9f451bd8d4b4401a3a80e6;hb=d0aaf3990d011abe5d7c65905f240a60d2fdccb6;hp=c32d9bc0f939d12fa40af95d54a8b3ded3d79685;hpb=77fce4cd57cbc9db7cdbc15bba96e178dbd0f879;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/reorg.c b/gcc/reorg.c index c32d9bc0f93..4453ccabca6 100644 --- a/gcc/reorg.c +++ b/gcc/reorg.c @@ -1,6 +1,7 @@ /* Perform instruction reorganizations for delay slot filling. - Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, - 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. + Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, + 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 + Free Software Foundation, Inc. Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu). Hacked by Michael Tiemann (tiemann@cygnus.com). @@ -8,7 +9,7 @@ This file is part of GCC. GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free -Software Foundation; either version 2, or (at your option) any later +Software Foundation; either version 3, or (at your option) any later version. GCC is distributed in the hope that it will be useful, but WITHOUT ANY @@ -17,9 +18,8 @@ FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License -along with GCC; see the file COPYING. If not, write to the Free -Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA -02110-1301, USA. */ +along with GCC; see the file COPYING3. If not see +. */ /* Instruction reorganization pass. @@ -173,8 +173,8 @@ static int max_uid; static int stop_search_p (rtx, int); static int resource_conflicts_p (struct resources *, struct resources *); -static int insn_references_resource_p (rtx, struct resources *, int); -static int insn_sets_resource_p (rtx, struct resources *, int); +static int insn_references_resource_p (rtx, struct resources *, bool); +static int insn_sets_resource_p (rtx, struct resources *, bool); static rtx find_end_label (void); static rtx emit_delay_sequence (rtx, rtx, int); static rtx add_to_delay_list (rtx, rtx); @@ -211,7 +211,8 @@ static void update_reg_dead_notes (rtx, rtx); static void fix_reg_dead_note (rtx, rtx); static void update_reg_unused_notes (rtx, rtx); static void fill_simple_delay_slots (int); -static rtx fill_slots_from_thread (rtx, rtx, rtx, rtx, int, int, int, int, +static rtx fill_slots_from_thread (rtx, rtx, rtx, rtx, + int, int, int, int, int *, rtx); static void fill_eager_delay_slots (void); static void relax_delay_slots (rtx); @@ -296,7 +297,7 @@ resource_conflicts_p (struct resources *res1, struct resources *res2) static int insn_references_resource_p (rtx insn, struct resources *res, - int include_delayed_effects) + bool include_delayed_effects) { struct resources insn_res; @@ -312,12 +313,15 @@ insn_references_resource_p (rtx insn, struct resources *res, static int insn_sets_resource_p (rtx insn, struct resources *res, - int include_delayed_effects) + bool include_delayed_effects) { struct resources insn_sets; CLEAR_RESOURCE (&insn_sets); - mark_set_resources (insn, &insn_sets, 0, include_delayed_effects); + mark_set_resources (insn, &insn_sets, 0, + (include_delayed_effects + ? MARK_SRC_DEST_CALL + : MARK_SRC_DEST)); return resource_conflicts_p (&insn_sets, res); } @@ -427,7 +431,7 @@ find_end_label (void) epilogue has filled delay-slots; we would have to try and move the delay-slot fillers to the delay-slots for the new return insn or in front of the new return insn. */ - if (current_function_epilogue_delay_list == NULL + if (crtl->epilogue_delay_list == NULL && HAVE_return) { /* The return we make may have delay slots too. */ @@ -511,6 +515,8 @@ emit_delay_sequence (rtx insn, rtx list, int length) INSN_DELETED_P (delay_insn) = 0; PREV_INSN (delay_insn) = PREV_INSN (seq_insn); + INSN_LOCATOR (seq_insn) = INSN_LOCATOR (delay_insn); + for (li = list; li; li = XEXP (li, 1), i++) { rtx tem = XEXP (li, 0); @@ -540,7 +546,8 @@ emit_delay_sequence (rtx insn, rtx list, int length) remove_note (tem, note); break; - case REG_LABEL: + case REG_LABEL_OPERAND: + case REG_LABEL_TARGET: /* Keep the label reference count up to date. */ if (LABEL_P (XEXP (note, 0))) LABEL_NUSES (XEXP (note, 0)) ++; @@ -638,7 +645,7 @@ delete_from_delay_slot (rtx insn) prev = PREV_INSN (seq_insn); trial = XVECEXP (seq, 0, 0); delete_related_insns (seq_insn); - add_insn_after (trial, prev); + add_insn_after (trial, prev, NULL); /* If there was a barrier after the old SEQUENCE, remit it. */ if (had_barrier) @@ -788,7 +795,7 @@ optimize_skip (rtx insn) In both of these cases, inverting the jump and annulling the delay slot give the same effect in fewer insns. */ if ((next_trial == next_active_insn (JUMP_LABEL (insn)) - && ! (next_trial == 0 && current_function_epilogue_delay_list != 0)) + && ! (next_trial == 0 && crtl->epilogue_delay_list != 0)) || (next_trial != 0 && JUMP_P (next_trial) && JUMP_LABEL (insn) == JUMP_LABEL (next_trial) @@ -963,9 +970,8 @@ static int mostly_true_jump (rtx jump_insn, rtx condition) { rtx target_label = JUMP_LABEL (jump_insn); - rtx insn, note; - int rare_dest = rare_destination (target_label); - int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn)); + rtx note; + int rare_dest, rare_fallthrough; /* If branch probabilities are available, then use that number since it always gives a correct answer. */ @@ -984,32 +990,10 @@ mostly_true_jump (rtx jump_insn, rtx condition) return -1; } - /* ??? Ought to use estimate_probability instead. */ - - /* If this is a branch outside a loop, it is highly unlikely. */ - if (GET_CODE (PATTERN (jump_insn)) == SET - && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE - && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF - && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1))) - || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF - && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2))))) - return -1; - - if (target_label) - { - /* If this is the test of a loop, it is very likely true. We scan - backwards from the target label. If we find a NOTE_INSN_LOOP_BEG - before the next real insn, we assume the branch is to the top of - the loop. */ - for (insn = PREV_INSN (target_label); - insn && NOTE_P (insn); - insn = PREV_INSN (insn)) - if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG) - return 2; - } - /* Look at the relative rarities of the fallthrough and destination. If they differ, we can predict the branch that way. */ + rare_dest = rare_destination (target_label); + rare_fallthrough = rare_destination (NEXT_INSN (jump_insn)); switch (rare_fallthrough - rare_dest) { @@ -1030,33 +1014,6 @@ mostly_true_jump (rtx jump_insn, rtx condition) if (condition == 0) return 0; - /* EQ tests are usually false and NE tests are usually true. Also, - most quantities are positive, so we can make the appropriate guesses - about signed comparisons against zero. */ - switch (GET_CODE (condition)) - { - case CONST_INT: - /* Unconditional branch. */ - return 1; - case EQ: - return 0; - case NE: - return 1; - case LE: - case LT: - if (XEXP (condition, 1) == const0_rtx) - return 0; - break; - case GE: - case GT: - if (XEXP (condition, 1) == const0_rtx) - return 1; - break; - - default: - break; - } - /* Predict backward branches usually take, forward branches usually not. If we don't know whether this is forward or backward, assume the branch will be taken, since most are. */ @@ -1289,7 +1246,7 @@ steal_delay_list_from_target (rtx insn, rtx condition, rtx seq, rtx trial = XEXP (temp, 0); mark_set_resources (trial, &cc_set, 0, MARK_SRC_DEST_CALL); - if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, 0)) + if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, false)) return delay_list; } @@ -1311,9 +1268,9 @@ steal_delay_list_from_target (rtx insn, rtx condition, rtx seq, rtx trial = XVECEXP (seq, 0, i); int flags; - if (insn_references_resource_p (trial, sets, 0) - || insn_sets_resource_p (trial, needed, 0) - || insn_sets_resource_p (trial, sets, 0) + if (insn_references_resource_p (trial, sets, false) + || insn_sets_resource_p (trial, needed, false) + || insn_sets_resource_p (trial, sets, false) #ifdef HAVE_cc0 /* If TRIAL sets CC0, we can't copy it, so we can't steal this delay list. */ @@ -1336,8 +1293,8 @@ steal_delay_list_from_target (rtx insn, rtx condition, rtx seq, if (! must_annul && ((condition == const_true_rtx - || (! insn_sets_resource_p (trial, other_needed, 0) - && ! may_trap_p (PATTERN (trial))))) + || (! insn_sets_resource_p (trial, other_needed, false) + && ! may_trap_or_fault_p (PATTERN (trial))))) ? eligible_for_delay (insn, total_slots_filled, trial, flags) : (must_annul || (delay_list == NULL && new_delay_list == NULL)) && (must_annul = 1, @@ -1411,9 +1368,9 @@ steal_delay_list_from_fallthrough (rtx insn, rtx condition, rtx seq, /* If TRIAL sets CC0, stealing it will move it too far from the use of CC0. */ - if (insn_references_resource_p (trial, sets, 0) - || insn_sets_resource_p (trial, needed, 0) - || insn_sets_resource_p (trial, sets, 0) + if (insn_references_resource_p (trial, sets, false) + || insn_sets_resource_p (trial, needed, false) + || insn_sets_resource_p (trial, sets, false) #ifdef HAVE_cc0 || sets_cc0_p (PATTERN (trial)) #endif @@ -1430,8 +1387,8 @@ steal_delay_list_from_fallthrough (rtx insn, rtx condition, rtx seq, if (! must_annul && ((condition == const_true_rtx - || (! insn_sets_resource_p (trial, other_needed, 0) - && ! may_trap_p (PATTERN (trial))))) + || (! insn_sets_resource_p (trial, other_needed, false) + && ! may_trap_or_fault_p (PATTERN (trial))))) ? eligible_for_delay (insn, *pslots_filled, trial, flags) : (must_annul || delay_list == NULL) && (must_annul = 1, check_annul_list_true_false (1, delay_list) @@ -1491,7 +1448,8 @@ try_merge_delay_insns (rtx insn, rtx thread) if (! annul_p) for (i = 1 ; i < num_slots; i++) if (XVECEXP (PATTERN (insn), 0, i)) - mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed, 1); + mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed, + true); for (trial = thread; !stop_search_p (trial, 1); trial = next_trial) { @@ -1510,9 +1468,9 @@ try_merge_delay_insns (rtx insn, rtx thread) /* We can't share an insn that sets cc0. */ && ! sets_cc0_p (pat) #endif - && ! insn_references_resource_p (trial, &set, 1) - && ! insn_sets_resource_p (trial, &set, 1) - && ! insn_sets_resource_p (trial, &needed, 1) + && ! insn_references_resource_p (trial, &set, true) + && ! insn_sets_resource_p (trial, &set, true) + && ! insn_sets_resource_p (trial, &needed, true) && (trial = try_split (pat, trial, 0)) != 0 /* Update next_trial, in case try_split succeeded. */ && (next_trial = next_nonnote_insn (trial)) @@ -1543,7 +1501,7 @@ try_merge_delay_insns (rtx insn, rtx thread) } mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL); - mark_referenced_resources (trial, &needed, 1); + mark_referenced_resources (trial, &needed, true); } /* See if we stopped on a filled insn. If we did, try to see if its @@ -1558,15 +1516,15 @@ try_merge_delay_insns (rtx insn, rtx thread) /* Account for resources set/needed by the filled insn. */ mark_set_resources (filled_insn, &set, 0, MARK_SRC_DEST_CALL); - mark_referenced_resources (filled_insn, &needed, 1); + mark_referenced_resources (filled_insn, &needed, true); for (i = 1; i < XVECLEN (pat, 0); i++) { rtx dtrial = XVECEXP (pat, 0, i); - if (! insn_references_resource_p (dtrial, &set, 1) - && ! insn_sets_resource_p (dtrial, &set, 1) - && ! insn_sets_resource_p (dtrial, &needed, 1) + if (! insn_references_resource_p (dtrial, &set, true) + && ! insn_sets_resource_p (dtrial, &set, true) + && ! insn_sets_resource_p (dtrial, &needed, true) #ifdef HAVE_cc0 && ! sets_cc0_p (PATTERN (dtrial)) #endif @@ -1575,12 +1533,12 @@ try_merge_delay_insns (rtx insn, rtx thread) { if (! annul_p) { - rtx new; + rtx new_rtx; update_block (dtrial, thread); - new = delete_from_delay_slot (dtrial); + new_rtx = delete_from_delay_slot (dtrial); if (INSN_DELETED_P (thread)) - thread = new; + thread = new_rtx; INSN_FROM_TARGET_P (next_to_match) = 0; } else @@ -1597,7 +1555,7 @@ try_merge_delay_insns (rtx insn, rtx thread) /* Keep track of the set/referenced resources for the delay slots of any trial insns we encounter. */ mark_set_resources (dtrial, &set, 0, MARK_SRC_DEST_CALL); - mark_referenced_resources (dtrial, &needed, 1); + mark_referenced_resources (dtrial, &needed, true); } } } @@ -1613,12 +1571,12 @@ try_merge_delay_insns (rtx insn, rtx thread) { if (GET_MODE (merged_insns) == SImode) { - rtx new; + rtx new_rtx; update_block (XEXP (merged_insns, 0), thread); - new = delete_from_delay_slot (XEXP (merged_insns, 0)); + new_rtx = delete_from_delay_slot (XEXP (merged_insns, 0)); if (INSN_DELETED_P (thread)) - thread = new; + thread = new_rtx; } else { @@ -1733,7 +1691,7 @@ redundant_insn (rtx insn, rtx target, rtx delay_list) CLEAR_RESOURCE (&needed); CLEAR_RESOURCE (&set); mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); - mark_referenced_resources (insn, &needed, 1); + mark_referenced_resources (insn, &needed, true); /* If TARGET is a SEQUENCE, get the main insn. */ if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE) @@ -1745,8 +1703,8 @@ redundant_insn (rtx insn, rtx target, rtx delay_list) #endif /* The insn requiring the delay may not set anything needed or set by INSN. */ - || insn_sets_resource_p (target_main, &needed, 1) - || insn_sets_resource_p (target_main, &set, 1)) + || insn_sets_resource_p (target_main, &needed, true) + || insn_sets_resource_p (target_main, &set, true)) return 0; /* Insns we pass may not set either NEEDED or SET, so merge them for @@ -1760,14 +1718,15 @@ redundant_insn (rtx insn, rtx target, rtx delay_list) while (delay_list) { - if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1)) + if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, true)) return 0; delay_list = XEXP (delay_list, 1); } if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE) for (i = 1; i < XVECLEN (PATTERN (target), 0); i++) - if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1)) + if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, + true)) return 0; /* Scan backwards until we reach a label or an insn that uses something @@ -1826,13 +1785,13 @@ redundant_insn (rtx insn, rtx target, rtx delay_list) we must stop if it sets anything needed or set by INSN. */ if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0)) || ! INSN_FROM_TARGET_P (candidate)) - && insn_sets_resource_p (candidate, &needed, 1)) + && insn_sets_resource_p (candidate, &needed, true)) return 0; } /* If the insn requiring the delay slot conflicts with INSN, we must stop. */ - if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1)) + if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, true)) return 0; } else @@ -1843,7 +1802,7 @@ redundant_insn (rtx insn, rtx target, rtx delay_list) return trial; /* Can't go any further if TRIAL conflicts with INSN. */ - if (insn_sets_resource_p (trial, &needed, 1)) + if (insn_sets_resource_p (trial, &needed, true)) return 0; } } @@ -1934,7 +1893,7 @@ reorg_redirect_jump (rtx jump, rtx nlabel) that reference values used in INSN. If we find one, then we move the REG_DEAD note to INSN. - This is needed to handle the case where an later insn (after INSN) has a + This is needed to handle the case where a later insn (after INSN) has a REG_DEAD note for a register used by INSN, and this later insn subsequently gets moved before a CODE_LABEL because it is a redundant insn. In this case, mark_target_live_regs may be confused into thinking the register @@ -2022,6 +1981,28 @@ update_reg_unused_notes (rtx insn, rtx redundant_insn) } } +/* Return the label before INSN, or put a new label there. */ + +static rtx +get_label_before (rtx insn) +{ + rtx label; + + /* Find an existing label at this point + or make a new one if there is none. */ + label = prev_nonnote_insn (insn); + + if (label == 0 || !LABEL_P (label)) + { + rtx prev = PREV_INSN (insn); + + label = gen_label_rtx (); + emit_label_after (label, prev); + LABEL_NUSES (label) = 0; + } + return label; +} + /* Scan a function looking for insns that need a delay slot and find insns to put into the delay slot. @@ -2159,7 +2140,7 @@ fill_simple_delay_slots (int non_jumps_p) CLEAR_RESOURCE (&needed); CLEAR_RESOURCE (&set); mark_set_resources (insn, &set, 0, MARK_SRC_DEST); - mark_referenced_resources (insn, &needed, 0); + mark_referenced_resources (insn, &needed, false); for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1); trial = next_trial) @@ -2175,9 +2156,9 @@ fill_simple_delay_slots (int non_jumps_p) /* Check for resource conflict first, to avoid unnecessary splitting. */ - if (! insn_references_resource_p (trial, &set, 1) - && ! insn_sets_resource_p (trial, &set, 1) - && ! insn_sets_resource_p (trial, &needed, 1) + if (! insn_references_resource_p (trial, &set, true) + && ! insn_sets_resource_p (trial, &set, true) + && ! insn_sets_resource_p (trial, &needed, true) #ifdef HAVE_cc0 /* Can't separate set of cc0 from its use. */ && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat)) @@ -2205,7 +2186,7 @@ fill_simple_delay_slots (int non_jumps_p) } mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL); - mark_referenced_resources (trial, &needed, 1); + mark_referenced_resources (trial, &needed, true); } } @@ -2276,13 +2257,13 @@ fill_simple_delay_slots (int non_jumps_p) if (CALL_P (insn)) { mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); - mark_referenced_resources (insn, &needed, 1); + mark_referenced_resources (insn, &needed, true); maybe_never = 1; } else { mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); - mark_referenced_resources (insn, &needed, 1); + mark_referenced_resources (insn, &needed, true); if (JUMP_P (insn)) target = JUMP_LABEL (insn); } @@ -2317,13 +2298,13 @@ fill_simple_delay_slots (int non_jumps_p) /* See if we have a resource problem before we try to split. */ if (GET_CODE (pat) != SEQUENCE - && ! insn_references_resource_p (trial, &set, 1) - && ! insn_sets_resource_p (trial, &set, 1) - && ! insn_sets_resource_p (trial, &needed, 1) + && ! insn_references_resource_p (trial, &set, true) + && ! insn_sets_resource_p (trial, &set, true) + && ! insn_sets_resource_p (trial, &needed, true) #ifdef HAVE_cc0 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat)) #endif - && ! (maybe_never && may_trap_p (pat)) + && ! (maybe_never && may_trap_or_fault_p (pat)) && (trial = try_split (pat, trial, 0)) && eligible_for_delay (insn, slots_filled, trial, flags) && ! can_throw_internal(trial)) @@ -2343,7 +2324,7 @@ fill_simple_delay_slots (int non_jumps_p) } mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL); - mark_referenced_resources (trial, &needed, 1); + mark_referenced_resources (trial, &needed, true); /* Ensure we don't put insns between the setting of cc and the comparison by moving a setting of cc into an earlier delay @@ -2370,13 +2351,13 @@ fill_simple_delay_slots (int non_jumps_p) && ! (NONJUMP_INSN_P (next_trial) && GET_CODE (PATTERN (next_trial)) == SEQUENCE) && !JUMP_P (next_trial) - && ! insn_references_resource_p (next_trial, &set, 1) - && ! insn_sets_resource_p (next_trial, &set, 1) - && ! insn_sets_resource_p (next_trial, &needed, 1) + && ! insn_references_resource_p (next_trial, &set, true) + && ! insn_sets_resource_p (next_trial, &set, true) + && ! insn_sets_resource_p (next_trial, &needed, true) #ifdef HAVE_cc0 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial)) #endif - && ! (maybe_never && may_trap_p (PATTERN (next_trial))) + && ! (maybe_never && may_trap_or_fault_p (PATTERN (next_trial))) && (next_trial = try_split (PATTERN (next_trial), next_trial, 0)) && eligible_for_delay (insn, slots_filled, next_trial, flags) && ! can_throw_internal (trial)) @@ -2434,7 +2415,7 @@ fill_simple_delay_slots (int non_jumps_p) The only thing we can do is scan backwards from the end of the function. If we did this in a previous pass, it is incorrect to do it again. */ - if (current_function_epilogue_delay_list) + if (crtl->epilogue_delay_list) return; slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE; @@ -2479,9 +2460,9 @@ fill_simple_delay_slots (int non_jumps_p) if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) continue; - if (! insn_references_resource_p (trial, &set, 1) - && ! insn_sets_resource_p (trial, &needed, 1) - && ! insn_sets_resource_p (trial, &set, 1) + if (! insn_references_resource_p (trial, &set, true) + && ! insn_sets_resource_p (trial, &needed, true) + && ! insn_sets_resource_p (trial, &set, true) #ifdef HAVE_cc0 /* Don't want to mess with cc0 here. */ && ! reg_mentioned_p (cc0_rtx, pat) @@ -2494,10 +2475,10 @@ fill_simple_delay_slots (int non_jumps_p) /* Here as well we are searching backward, so put the insns we find on the head of the list. */ - current_function_epilogue_delay_list + crtl->epilogue_delay_list = gen_rtx_INSN_LIST (VOIDmode, trial, - current_function_epilogue_delay_list); - mark_end_of_function_resources (trial, 1); + crtl->epilogue_delay_list); + mark_end_of_function_resources (trial, true); update_block (trial, trial); delete_related_insns (trial); @@ -2511,13 +2492,57 @@ fill_simple_delay_slots (int non_jumps_p) } mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL); - mark_referenced_resources (trial, &needed, 1); + mark_referenced_resources (trial, &needed, true); } note_delay_statistics (slots_filled, 0); #endif } +/* Follow any unconditional jump at LABEL; + return the ultimate label reached by any such chain of jumps. + Return null if the chain ultimately leads to a return instruction. + If LABEL is not followed by a jump, return LABEL. + If the chain loops or we can't find end, return LABEL, + since that tells caller to avoid changing the insn. */ + +static rtx +follow_jumps (rtx label) +{ + rtx insn; + rtx next; + rtx value = label; + int depth; + + for (depth = 0; + (depth < 10 + && (insn = next_active_insn (value)) != 0 + && JUMP_P (insn) + && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn) + && onlyjump_p (insn)) + || GET_CODE (PATTERN (insn)) == RETURN) + && (next = NEXT_INSN (insn)) + && BARRIER_P (next)); + depth++) + { + rtx tem; + + /* If we have found a cycle, make the insn jump to itself. */ + if (JUMP_LABEL (insn) == label) + return label; + + tem = next_active_insn (JUMP_LABEL (insn)); + if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC + || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC)) + break; + + value = JUMP_LABEL (insn); + } + if (depth == 10) + return label; + return value; +} + /* Try to find insns to place in delay slots. INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION @@ -2612,9 +2637,9 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread, /* If TRIAL conflicts with the insns ahead of it, we lose. Also, don't separate or copy insns that set and use CC0. */ - if (! insn_references_resource_p (trial, &set, 1) - && ! insn_sets_resource_p (trial, &set, 1) - && ! insn_sets_resource_p (trial, &needed, 1) + if (! insn_references_resource_p (trial, &set, true) + && ! insn_sets_resource_p (trial, &set, true) + && ! insn_sets_resource_p (trial, &needed, true) #ifdef HAVE_cc0 && ! (reg_mentioned_p (cc0_rtx, pat) && (! own_thread || ! sets_cc0_p (pat))) @@ -2655,8 +2680,8 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread, go into an annulled delay slot. */ if (!must_annul && (condition == const_true_rtx - || (! insn_sets_resource_p (trial, &opposite_needed, 1) - && ! may_trap_p (pat)))) + || (! insn_sets_resource_p (trial, &opposite_needed, true) + && ! may_trap_or_fault_p (pat)))) { old_trial = trial; trial = try_split (pat, trial, 0); @@ -2719,15 +2744,41 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread, /* We are moving this insn, not deleting it. We must temporarily increment the use count on any referenced label lest it be deleted by delete_related_insns. */ - note = find_reg_note (trial, REG_LABEL, 0); - /* REG_LABEL could be NOTE_INSN_DELETED_LABEL too. */ - if (note && LABEL_P (XEXP (note, 0))) - LABEL_NUSES (XEXP (note, 0))++; + for (note = REG_NOTES (trial); + note != NULL_RTX; + note = XEXP (note, 1)) + if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND + || REG_NOTE_KIND (note) == REG_LABEL_TARGET) + { + /* REG_LABEL_OPERAND could be + NOTE_INSN_DELETED_LABEL too. */ + if (LABEL_P (XEXP (note, 0))) + LABEL_NUSES (XEXP (note, 0))++; + else + gcc_assert (REG_NOTE_KIND (note) + == REG_LABEL_OPERAND); + } + if (JUMP_P (trial) && JUMP_LABEL (trial)) + LABEL_NUSES (JUMP_LABEL (trial))++; delete_related_insns (trial); - if (note && LABEL_P (XEXP (note, 0))) - LABEL_NUSES (XEXP (note, 0))--; + for (note = REG_NOTES (trial); + note != NULL_RTX; + note = XEXP (note, 1)) + if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND + || REG_NOTE_KIND (note) == REG_LABEL_TARGET) + { + /* REG_LABEL_OPERAND could be + NOTE_INSN_DELETED_LABEL too. */ + if (LABEL_P (XEXP (note, 0))) + LABEL_NUSES (XEXP (note, 0))--; + else + gcc_assert (REG_NOTE_KIND (note) + == REG_LABEL_OPERAND); + } + if (JUMP_P (trial) && JUMP_LABEL (trial)) + LABEL_NUSES (JUMP_LABEL (trial))--; } else new_thread = next_active_insn (trial); @@ -2744,10 +2795,11 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread, may be branching to a location that has a redundant insn. Skip any if so. */ while (new_thread && ! own_thread - && ! insn_sets_resource_p (new_thread, &set, 1) - && ! insn_sets_resource_p (new_thread, &needed, 1) + && ! insn_sets_resource_p (new_thread, &set, true) + && ! insn_sets_resource_p (new_thread, &needed, + true) && ! insn_references_resource_p (new_thread, - &set, 1) + &set, true) && (prior_insn = redundant_insn (new_thread, insn, delay_list))) @@ -2769,7 +2821,7 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread, /* This insn can't go into a delay slot. */ lose = 1; mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL); - mark_referenced_resources (trial, &needed, 1); + mark_referenced_resources (trial, &needed, true); /* Ensure we don't put insns between the setting of cc and the comparison by moving a setting of cc into an earlier delay slot since these insns @@ -2867,6 +2919,8 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread, dest = SET_DEST (pat), src = SET_SRC (pat); if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS) && rtx_equal_p (XEXP (src, 0), dest) + && (!FLOAT_MODE_P (GET_MODE (src)) + || flag_unsafe_math_optimizations) && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)) && ! side_effects_p (pat)) { @@ -2877,7 +2931,7 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread, /* If this is a constant adjustment, use the same code with the negated constant. Otherwise, reverse the sense of the arithmetic. */ - if (GET_CODE (other) == CONST_INT) + if (CONST_INT_P (other)) new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest, negate_rtx (GET_MODE (src), other)); else @@ -3085,6 +3139,186 @@ fill_eager_delay_slots (void) } } +static void delete_computation (rtx insn); + +/* Recursively delete prior insns that compute the value (used only by INSN + which the caller is deleting) stored in the register mentioned by NOTE + which is a REG_DEAD note associated with INSN. */ + +static void +delete_prior_computation (rtx note, rtx insn) +{ + rtx our_prev; + rtx reg = XEXP (note, 0); + + for (our_prev = prev_nonnote_insn (insn); + our_prev && (NONJUMP_INSN_P (our_prev) + || CALL_P (our_prev)); + our_prev = prev_nonnote_insn (our_prev)) + { + rtx pat = PATTERN (our_prev); + + /* If we reach a CALL which is not calling a const function + or the callee pops the arguments, then give up. */ + if (CALL_P (our_prev) + && (! RTL_CONST_CALL_P (our_prev) + || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL)) + break; + + /* If we reach a SEQUENCE, it is too complex to try to + do anything with it, so give up. We can be run during + and after reorg, so SEQUENCE rtl can legitimately show + up here. */ + if (GET_CODE (pat) == SEQUENCE) + break; + + if (GET_CODE (pat) == USE + && NONJUMP_INSN_P (XEXP (pat, 0))) + /* reorg creates USEs that look like this. We leave them + alone because reorg needs them for its own purposes. */ + break; + + if (reg_set_p (reg, pat)) + { + if (side_effects_p (pat) && !CALL_P (our_prev)) + break; + + if (GET_CODE (pat) == PARALLEL) + { + /* If we find a SET of something else, we can't + delete the insn. */ + + int i; + + for (i = 0; i < XVECLEN (pat, 0); i++) + { + rtx part = XVECEXP (pat, 0, i); + + if (GET_CODE (part) == SET + && SET_DEST (part) != reg) + break; + } + + if (i == XVECLEN (pat, 0)) + delete_computation (our_prev); + } + else if (GET_CODE (pat) == SET + && REG_P (SET_DEST (pat))) + { + int dest_regno = REGNO (SET_DEST (pat)); + int dest_endregno = END_REGNO (SET_DEST (pat)); + int regno = REGNO (reg); + int endregno = END_REGNO (reg); + + if (dest_regno >= regno + && dest_endregno <= endregno) + delete_computation (our_prev); + + /* We may have a multi-word hard register and some, but not + all, of the words of the register are needed in subsequent + insns. Write REG_UNUSED notes for those parts that were not + needed. */ + else if (dest_regno <= regno + && dest_endregno >= endregno) + { + int i; + + add_reg_note (our_prev, REG_UNUSED, reg); + + for (i = dest_regno; i < dest_endregno; i++) + if (! find_regno_note (our_prev, REG_UNUSED, i)) + break; + + if (i == dest_endregno) + delete_computation (our_prev); + } + } + + break; + } + + /* If PAT references the register that dies here, it is an + additional use. Hence any prior SET isn't dead. However, this + insn becomes the new place for the REG_DEAD note. */ + if (reg_overlap_mentioned_p (reg, pat)) + { + XEXP (note, 1) = REG_NOTES (our_prev); + REG_NOTES (our_prev) = note; + break; + } + } +} + +/* Delete INSN and recursively delete insns that compute values used only + by INSN. This uses the REG_DEAD notes computed during flow analysis. + If we are running before flow.c, we need do nothing since flow.c will + delete dead code. We also can't know if the registers being used are + dead or not at this point. + + Otherwise, look at all our REG_DEAD notes. If a previous insn does + nothing other than set a register that dies in this insn, we can delete + that insn as well. + + On machines with CC0, if CC0 is used in this insn, we may be able to + delete the insn that set it. */ + +static void +delete_computation (rtx insn) +{ + rtx note, next; + +#ifdef HAVE_cc0 + if (reg_referenced_p (cc0_rtx, PATTERN (insn))) + { + rtx prev = prev_nonnote_insn (insn); + /* We assume that at this stage + CC's are always set explicitly + and always immediately before the jump that + will use them. So if the previous insn + exists to set the CC's, delete it + (unless it performs auto-increments, etc.). */ + if (prev && NONJUMP_INSN_P (prev) + && sets_cc0_p (PATTERN (prev))) + { + if (sets_cc0_p (PATTERN (prev)) > 0 + && ! side_effects_p (PATTERN (prev))) + delete_computation (prev); + else + /* Otherwise, show that cc0 won't be used. */ + add_reg_note (prev, REG_UNUSED, cc0_rtx); + } + } +#endif + + for (note = REG_NOTES (insn); note; note = next) + { + next = XEXP (note, 1); + + if (REG_NOTE_KIND (note) != REG_DEAD + /* Verify that the REG_NOTE is legitimate. */ + || !REG_P (XEXP (note, 0))) + continue; + + delete_prior_computation (note, insn); + } + + delete_related_insns (insn); +} + +/* If all INSN does is set the pc, delete it, + and delete the insn that set the condition codes for it + if that's what the previous thing was. */ + +static void +delete_jump (rtx insn) +{ + rtx set = single_set (insn); + + if (set && GET_CODE (SET_DEST (set)) == PC) + delete_computation (insn); +} + + /* Once we have tried two ways to fill a delay slot, make a pass over the code to try to improve the results and to do such things as more jump threading. */ @@ -3123,10 +3357,11 @@ relax_delay_slots (rtx first) if (target_label && target_label != JUMP_LABEL (insn)) reorg_redirect_jump (insn, target_label); - /* See if this jump branches around an unconditional jump. - If so, invert this jump and point it to the target of the + /* See if this jump conditionally branches around an unconditional + jump. If so, invert this jump and point it to the target of the second jump. */ if (next && JUMP_P (next) + && any_condjump_p (insn) && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN) && target_label && next_active_insn (target_label) == next_active_insn (next) @@ -3172,7 +3407,7 @@ relax_delay_slots (rtx first) if (JUMP_P (insn) && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN) && (other = prev_active_insn (insn)) != 0 - && (condjump_p (other) || condjump_in_parallel_p (other)) + && any_condjump_p (other) && no_labels_between_p (other, insn) && 0 > mostly_true_jump (other, get_branch_condition (other, @@ -3210,7 +3445,7 @@ relax_delay_slots (rtx first) Only do so if optimizing for size since this results in slower, but smaller code. */ - if (optimize_size + if (optimize_function_for_size_p (cfun) && GET_CODE (PATTERN (delay_insn)) == RETURN && next && JUMP_P (next) @@ -3238,7 +3473,7 @@ relax_delay_slots (rtx first) for (i = 0; i < XVECLEN (pat, 0); i++) { rtx this_insn = XVECEXP (pat, 0, i); - add_insn_after (this_insn, after); + add_insn_after (this_insn, after, NULL); after = this_insn; } delete_scheduled_jump (delay_insn); @@ -3356,7 +3591,7 @@ relax_delay_slots (rtx first) for (i = 0; i < XVECLEN (pat, 0); i++) { rtx this_insn = XVECEXP (pat, 0, i); - add_insn_after (this_insn, after); + add_insn_after (this_insn, after, NULL); after = this_insn; } delete_scheduled_jump (delay_insn); @@ -3376,11 +3611,11 @@ relax_delay_slots (rtx first) continue; } - /* See if this jump (with its delay slots) branches around another - jump (without delay slots). If so, invert this jump and point - it to the target of the second jump. We cannot do this for - annulled jumps, though. Again, don't convert a jump to a RETURN - here. */ + /* See if this jump (with its delay slots) conditionally branches + around an unconditional jump (without delay slots). If so, invert + this jump and point it to the target of the second jump. We cannot + do this for annulled jumps, though. Again, don't convert a jump to + a RETURN here. */ if (! INSN_ANNULLED_BRANCH_P (delay_insn) && any_condjump_p (delay_insn) && next && JUMP_P (next) @@ -3463,7 +3698,7 @@ make_return_insns (rtx first) delay slot filler insns. It is also unknown whether such a transformation would actually be profitable. Note that the existing code only cares for branches with (some) filled delay slots. */ - if (current_function_epilogue_delay_list != NULL) + if (crtl->epilogue_delay_list != NULL) return; #endif @@ -3581,14 +3816,14 @@ make_return_insns (rtx first) /* Try to find insns to place in delay slots. */ void -dbr_schedule (rtx first, FILE *file) +dbr_schedule (rtx first) { rtx insn, next, epilogue_insn = 0; int i; /* If the current function has no insns other than the prologue and epilogue, then do not try to fill any delay slots. */ - if (n_basic_blocks == 0) + if (n_basic_blocks == NUM_FIXED_BLOCKS) return; /* Find the highest INSN_UID and allocate and initialize our map from @@ -3598,11 +3833,11 @@ dbr_schedule (rtx first, FILE *file) if (INSN_UID (insn) > max_uid) max_uid = INSN_UID (insn); if (NOTE_P (insn) - && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG) + && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG) epilogue_insn = insn; } - uid_to_ruid = xmalloc ((max_uid + 1) * sizeof (int)); + uid_to_ruid = XNEWVEC (int, max_uid + 1); for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn)) uid_to_ruid[INSN_UID (insn)] = i; @@ -3610,7 +3845,7 @@ dbr_schedule (rtx first, FILE *file) if (unfilled_firstobj == 0) { gcc_obstack_init (&unfilled_slots_obstack); - unfilled_firstobj = obstack_alloc (&unfilled_slots_obstack, 0); + unfilled_firstobj = XOBNEWVAR (&unfilled_slots_obstack, rtx, 0); } for (insn = next_active_insn (first); insn; insn = next_active_insn (insn)) @@ -3621,9 +3856,7 @@ dbr_schedule (rtx first, FILE *file) INSN_FROM_TARGET_P (insn) = 0; /* Skip vector tables. We can't get attributes for them. */ - if (JUMP_P (insn) - && (GET_CODE (PATTERN (insn)) == ADDR_VEC - || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)) + if (JUMP_TABLE_DATA_P (insn)) continue; if (num_delay_slots (insn) > 0) @@ -3660,17 +3893,6 @@ dbr_schedule (rtx first, FILE *file) relax_delay_slots (first); } - /* Delete any USE insns made by update_block; subsequent passes don't need - them or know how to deal with them. */ - for (insn = first; insn; insn = next) - { - next = NEXT_INSN (insn); - - if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE - && INSN_P (XEXP (PATTERN (insn), 0))) - next = delete_related_insns (insn); - } - /* If we made an end of function label, indicate that it is now safe to delete it by undoing our prior adjustment to LABEL_NUSES. If it is now unused, delete it. */ @@ -3682,12 +3904,23 @@ dbr_schedule (rtx first, FILE *file) make_return_insns (first); #endif + /* Delete any USE insns made by update_block; subsequent passes don't need + them or know how to deal with them. */ + for (insn = first; insn; insn = next) + { + next = NEXT_INSN (insn); + + if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE + && INSN_P (XEXP (PATTERN (insn), 0))) + next = delete_related_insns (insn); + } + obstack_free (&unfilled_slots_obstack, unfilled_firstobj); /* It is not clear why the line below is needed, but it does seem to be. */ - unfilled_firstobj = obstack_alloc (&unfilled_slots_obstack, 0); + unfilled_firstobj = XOBNEWVAR (&unfilled_slots_obstack, rtx, 0); - if (file) + if (dump_file) { int i, j, need_comma; int total_delay_slots[MAX_DELAY_HISTOGRAM + 1]; @@ -3697,25 +3930,25 @@ dbr_schedule (rtx first, FILE *file) reorg_pass_number < MAX_REORG_PASSES; reorg_pass_number++) { - fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1); + fprintf (dump_file, ";; Reorg pass #%d:\n", reorg_pass_number + 1); for (i = 0; i < NUM_REORG_FUNCTIONS; i++) { need_comma = 0; - fprintf (file, ";; Reorg function #%d\n", i); + fprintf (dump_file, ";; Reorg function #%d\n", i); - fprintf (file, ";; %d insns needing delay slots\n;; ", + fprintf (dump_file, ";; %d insns needing delay slots\n;; ", num_insns_needing_delays[i][reorg_pass_number]); for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++) if (num_filled_delays[i][j][reorg_pass_number]) { if (need_comma) - fprintf (file, ", "); + fprintf (dump_file, ", "); need_comma = 1; - fprintf (file, "%d got %d delays", + fprintf (dump_file, "%d got %d delays", num_filled_delays[i][j][reorg_pass_number], j); } - fprintf (file, "\n"); + fprintf (dump_file, "\n"); } } memset (total_delay_slots, 0, sizeof total_delay_slots); @@ -3741,35 +3974,35 @@ dbr_schedule (rtx first, FILE *file) total_delay_slots[0]++; } } - fprintf (file, ";; Reorg totals: "); + fprintf (dump_file, ";; Reorg totals: "); need_comma = 0; for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++) { if (total_delay_slots[j]) { if (need_comma) - fprintf (file, ", "); + fprintf (dump_file, ", "); need_comma = 1; - fprintf (file, "%d got %d delays", total_delay_slots[j], j); + fprintf (dump_file, "%d got %d delays", total_delay_slots[j], j); } } - fprintf (file, "\n"); + fprintf (dump_file, "\n"); #if defined (ANNUL_IFTRUE_SLOTS) || defined (ANNUL_IFFALSE_SLOTS) - fprintf (file, ";; Reorg annuls: "); + fprintf (dump_file, ";; Reorg annuls: "); need_comma = 0; for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++) { if (total_annul_slots[j]) { if (need_comma) - fprintf (file, ", "); + fprintf (dump_file, ", "); need_comma = 1; - fprintf (file, "%d got %d delays", total_annul_slots[j], j); + fprintf (dump_file, "%d got %d delays", total_annul_slots[j], j); } } - fprintf (file, "\n"); + fprintf (dump_file, "\n"); #endif - fprintf (file, "\n"); + fprintf (dump_file, "\n"); } /* For all JUMP insns, fill in branch prediction notes, so that during @@ -3792,9 +4025,7 @@ dbr_schedule (rtx first, FILE *file) continue; pred_flags = get_jump_flags (insn, JUMP_LABEL (insn)); - REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED, - GEN_INT (pred_flags), - REG_NOTES (insn)); + add_reg_note (insn, REG_BR_PRED, GEN_INT (pred_flags)); } free_resource_info (); free (uid_to_ruid); @@ -3804,12 +4035,14 @@ dbr_schedule (rtx first, FILE *file) { rtx link; - for (link = current_function_epilogue_delay_list; + for (link = crtl->epilogue_delay_list; link; link = XEXP (link, 1)) INSN_LOCATOR (XEXP (link, 0)) = 0; } + #endif + crtl->dbr_scheduled_p = true; } #endif /* DELAY_SLOTS */ @@ -3817,23 +4050,27 @@ static bool gate_handle_delay_slots (void) { #ifdef DELAY_SLOTS - return flag_delayed_branch; -#else + /* At -O0 dataflow info isn't updated after RA. */ + return optimize > 0 && flag_delayed_branch && !crtl->dbr_scheduled_p; +#else return 0; #endif } /* Run delay slot optimization. */ -static void +static unsigned int rest_of_handle_delay_slots (void) { #ifdef DELAY_SLOTS - dbr_schedule (get_insns (), dump_file); + dbr_schedule (get_insns ()); #endif -} + return 0; +} -struct tree_opt_pass pass_delay_slots = +struct rtl_opt_pass pass_delay_slots = { + { + RTL_PASS, "dbr", /* name */ gate_handle_delay_slots, /* gate */ rest_of_handle_delay_slots, /* execute */ @@ -3846,8 +4083,8 @@ struct tree_opt_pass pass_delay_slots = 0, /* properties_destroyed */ 0, /* todo_flags_start */ TODO_dump_func | - TODO_ggc_collect, /* todo_flags_finish */ - 'd' /* letter */ + TODO_ggc_collect /* todo_flags_finish */ + } }; /* Machine dependent reorg pass. */ @@ -3858,14 +4095,17 @@ gate_handle_machine_reorg (void) } -static void +static unsigned int rest_of_handle_machine_reorg (void) { targetm.machine_dependent_reorg (); + return 0; } -struct tree_opt_pass pass_machine_reorg = +struct rtl_opt_pass pass_machine_reorg = { + { + RTL_PASS, "mach", /* name */ gate_handle_machine_reorg, /* gate */ rest_of_handle_machine_reorg, /* execute */ @@ -3878,7 +4118,6 @@ struct tree_opt_pass pass_machine_reorg = 0, /* properties_destroyed */ 0, /* todo_flags_start */ TODO_dump_func | - TODO_ggc_collect, /* todo_flags_finish */ - 'M' /* letter */ + TODO_ggc_collect /* todo_flags_finish */ + } }; -