+ if (l->in_libcall)
+ continue;
+
+ if (gcse_constant_p (this_rtx))
+ newcnst = this_rtx;
+ if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
+ /* Don't copy propagate if it has attached REG_EQUIV note.
+ At this point this only function parameters should have
+ REG_EQUIV notes and if the argument slot is used somewhere
+ explicitly, it means address of parameter has been taken,
+ so we should not extend the lifetime of the pseudo. */
+ && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
+ || GET_CODE (XEXP (note, 0)) != MEM))
+ newreg = this_rtx;
+ }
+ if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
+ {
+ /* If we find a case where we can't fix the retval REG_EQUAL notes
+ match the new register, we either have to abandon this replacement
+ or fix delete_trivially_dead_insns to preserve the setting insn,
+ or make it delete the REG_EUAQL note, and fix up all passes that
+ require the REG_EQUAL note there. */
+ if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
+ abort ();
+ if (gcse_file != NULL)
+ {
+ fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
+ REGNO (x));
+ fprintf (gcse_file, "insn %d with constant ",
+ INSN_UID (insn));
+ print_rtl (gcse_file, newcnst);
+ fprintf (gcse_file, "\n");
+ }
+ const_prop_count++;
+ return true;
+ }
+ else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
+ {
+ adjust_libcall_notes (x, newreg, insn, libcall_sp);
+ if (gcse_file != NULL)
+ {
+ fprintf (gcse_file,
+ "LOCAL COPY-PROP: Replacing reg %d in insn %d",
+ REGNO (x), INSN_UID (insn));
+ fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
+ }
+ copy_prop_count++;
+ return true;
+ }
+ }
+ return false;
+}
+
+/* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
+ their REG_EQUAL notes need updating to reflect that OLDREG has been
+ replaced with NEWVAL in INSN. Return true if all substitutions could
+ be made. */
+static bool
+adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
+{
+ rtx end;
+
+ while ((end = *libcall_sp++))
+ {
+ rtx note = find_reg_equal_equiv_note (end);
+
+ if (! note)
+ continue;
+
+ if (REG_P (newval))
+ {
+ if (reg_set_between_p (newval, PREV_INSN (insn), end))
+ {
+ do
+ {
+ note = find_reg_equal_equiv_note (end);
+ if (! note)
+ continue;
+ if (reg_mentioned_p (newval, XEXP (note, 0)))
+ return false;
+ }
+ while ((end = *libcall_sp++));
+ return true;
+ }
+ }
+ XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
+ insn = end;
+ }
+ return true;
+}
+
+#define MAX_NESTED_LIBCALLS 9
+
+static void
+local_cprop_pass (int alter_jumps)
+{
+ rtx insn;
+ struct reg_use *reg_used;
+ rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
+ bool changed = false;
+
+ cselib_init ();
+ libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
+ *libcall_sp = 0;
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ {
+ if (INSN_P (insn))
+ {
+ rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
+
+ if (note)
+ {
+ if (libcall_sp == libcall_stack)
+ abort ();
+ *--libcall_sp = XEXP (note, 0);
+ }
+ note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
+ if (note)
+ libcall_sp++;
+ note = find_reg_equal_equiv_note (insn);
+ do
+ {
+ reg_use_count = 0;
+ note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
+ if (note)
+ local_cprop_find_used_regs (&XEXP (note, 0), NULL);
+
+ for (reg_used = ®_use_table[0]; reg_use_count > 0;
+ reg_used++, reg_use_count--)
+ if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
+ libcall_sp))
+ {
+ changed = true;
+ break;
+ }
+ if (INSN_DELETED_P (insn))
+ break;
+ }
+ while (reg_use_count);
+ }
+ cselib_process_insn (insn);
+ }
+ cselib_finish ();
+ /* Global analysis may get into infinite loops for unreachable blocks. */
+ if (changed && alter_jumps)
+ {
+ delete_unreachable_blocks ();
+ free_reg_set_mem ();
+ alloc_reg_set_mem (max_reg_num ());
+ compute_sets (get_insns ());
+ }
+}
+
+/* Forward propagate copies. This includes copies and constants. Return
+ nonzero if a change was made. */
+
+static int
+cprop (int alter_jumps)
+{
+ int changed;
+ basic_block bb;
+ rtx insn;
+
+ /* Note we start at block 1. */
+ if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
+ {
+ if (gcse_file != NULL)
+ fprintf (gcse_file, "\n");
+ return 0;
+ }
+
+ changed = 0;
+ FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
+ {
+ /* Reset tables used to keep track of what's still valid [since the
+ start of the block]. */
+ reset_opr_set_tables ();
+
+ for (insn = BB_HEAD (bb);
+ insn != NULL && insn != NEXT_INSN (BB_END (bb));
+ insn = NEXT_INSN (insn))
+ if (INSN_P (insn))
+ {
+ changed |= cprop_insn (insn, alter_jumps);
+
+ /* Keep track of everything modified by this insn. */
+ /* ??? Need to be careful w.r.t. mods done to INSN. Don't
+ call mark_oprs_set if we turned the insn into a NOTE. */
+ if (GET_CODE (insn) != NOTE)
+ mark_oprs_set (insn);
+ }
+ }
+
+ if (gcse_file != NULL)
+ fprintf (gcse_file, "\n");
+
+ return changed;
+}
+
+/* Similar to get_condition, only the resulting condition must be
+ valid at JUMP, instead of at EARLIEST.
+
+ This differs from noce_get_condition in ifcvt.c in that we prefer not to
+ settle for the condition variable in the jump instruction being integral.
+ We prefer to be able to record the value of a user variable, rather than
+ the value of a temporary used in a condition. This could be solved by
+ recording the value of *every* register scaned by canonicalize_condition,
+ but this would require some code reorganization. */
+
+rtx
+fis_get_condition (rtx jump)
+{
+ rtx cond, set, tmp, insn, earliest;
+ bool reverse;
+
+ if (! any_condjump_p (jump))
+ return NULL_RTX;
+
+ set = pc_set (jump);
+ cond = XEXP (SET_SRC (set), 0);
+
+ /* If this branches to JUMP_LABEL when the condition is false,
+ reverse the condition. */
+ reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
+ && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
+
+ /* Use canonicalize_condition to do the dirty work of manipulating
+ MODE_CC values and COMPARE rtx codes. */
+ tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX,
+ false);
+ if (!tmp)
+ return NULL_RTX;
+
+ /* Verify that the given condition is valid at JUMP by virtue of not
+ having been modified since EARLIEST. */
+ for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
+ if (INSN_P (insn) && modified_in_p (tmp, insn))
+ break;
+ if (insn == jump)
+ return tmp;
+
+ /* The condition was modified. See if we can get a partial result
+ that doesn't follow all the reversals. Perhaps combine can fold
+ them together later. */
+ tmp = XEXP (tmp, 0);
+ if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
+ return NULL_RTX;
+ tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp,
+ false);
+ if (!tmp)
+ return NULL_RTX;
+
+ /* For sanity's sake, re-validate the new result. */
+ for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
+ if (INSN_P (insn) && modified_in_p (tmp, insn))
+ return NULL_RTX;
+
+ return tmp;
+}
+
+/* Check the comparison COND to see if we can safely form an implicit set from
+ it. COND is either an EQ or NE comparison. */
+
+static bool
+implicit_set_cond_p (rtx cond)
+{
+ enum machine_mode mode = GET_MODE (XEXP (cond, 0));
+ rtx cst = XEXP (cond, 1);
+
+ /* We can't perform this optimization if either operand might be or might
+ contain a signed zero. */
+ if (HONOR_SIGNED_ZEROS (mode))
+ {
+ /* It is sufficient to check if CST is or contains a zero. We must
+ handle float, complex, and vector. If any subpart is a zero, then
+ the optimization can't be performed. */
+ /* ??? The complex and vector checks are not implemented yet. We just
+ always return zero for them. */
+ if (GET_CODE (cst) == CONST_DOUBLE)
+ {
+ REAL_VALUE_TYPE d;
+ REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
+ if (REAL_VALUES_EQUAL (d, dconst0))
+ return 0;
+ }
+ else
+ return 0;
+ }
+
+ return gcse_constant_p (cst);
+}
+
+/* Find the implicit sets of a function. An "implicit set" is a constraint
+ on the value of a variable, implied by a conditional jump. For example,
+ following "if (x == 2)", the then branch may be optimized as though the
+ conditional performed an "explicit set", in this example, "x = 2". This
+ function records the set patterns that are implicit at the start of each
+ basic block. */
+
+static void
+find_implicit_sets (void)
+{
+ basic_block bb, dest;
+ unsigned int count;
+ rtx cond, new;
+
+ count = 0;
+ FOR_EACH_BB (bb)
+ /* Check for more than one successor. */
+ if (bb->succ && bb->succ->succ_next)
+ {
+ cond = fis_get_condition (BB_END (bb));
+
+ if (cond
+ && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
+ && GET_CODE (XEXP (cond, 0)) == REG
+ && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
+ && implicit_set_cond_p (cond))
+ {
+ dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
+ : FALLTHRU_EDGE (bb)->dest;
+
+ if (dest && ! dest->pred->pred_next
+ && dest != EXIT_BLOCK_PTR)
+ {
+ new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
+ XEXP (cond, 1));
+ implicit_sets[dest->index] = new;
+ if (gcse_file)
+ {
+ fprintf(gcse_file, "Implicit set of reg %d in ",
+ REGNO (XEXP (cond, 0)));
+ fprintf(gcse_file, "basic block %d\n", dest->index);
+ }
+ count++;
+ }
+ }
+ }
+
+ if (gcse_file)
+ fprintf (gcse_file, "Found %d implicit sets\n", count);
+}
+
+/* Perform one copy/constant propagation pass.
+ PASS is the pass count. If CPROP_JUMPS is true, perform constant
+ propagation into conditional jumps. If BYPASS_JUMPS is true,
+ perform conditional jump bypassing optimizations. */
+
+static int
+one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
+{
+ int changed = 0;
+
+ const_prop_count = 0;
+ copy_prop_count = 0;
+
+ local_cprop_pass (cprop_jumps);
+
+ /* Determine implicit sets. */
+ implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
+ find_implicit_sets ();
+
+ alloc_hash_table (max_cuid, &set_hash_table, 1);
+ compute_hash_table (&set_hash_table);
+
+ /* Free implicit_sets before peak usage. */
+ free (implicit_sets);
+ implicit_sets = NULL;
+
+ if (gcse_file)
+ dump_hash_table (gcse_file, "SET", &set_hash_table);
+ if (set_hash_table.n_elems > 0)
+ {
+ alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
+ compute_cprop_data ();
+ changed = cprop (cprop_jumps);
+ if (bypass_jumps)
+ changed |= bypass_conditional_jumps ();
+ free_cprop_mem ();
+ }
+
+ free_hash_table (&set_hash_table);
+
+ if (gcse_file)
+ {
+ fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
+ current_function_name (), pass, bytes_used);
+ fprintf (gcse_file, "%d const props, %d copy props\n\n",
+ const_prop_count, copy_prop_count);
+ }
+ /* Global analysis may get into infinite loops for unreachable blocks. */
+ if (changed && cprop_jumps)
+ delete_unreachable_blocks ();
+
+ return changed;
+}
+\f
+/* Bypass conditional jumps. */
+
+/* The value of last_basic_block at the beginning of the jump_bypass
+ pass. The use of redirect_edge_and_branch_force may introduce new
+ basic blocks, but the data flow analysis is only valid for basic
+ block indices less than bypass_last_basic_block. */
+
+static int bypass_last_basic_block;
+
+/* Find a set of REGNO to a constant that is available at the end of basic
+ block BB. Returns NULL if no such set is found. Based heavily upon
+ find_avail_set. */
+
+static struct expr *
+find_bypass_set (int regno, int bb)
+{
+ struct expr *result = 0;