Contributed by Michael Tiemann (tiemann@cygnus.com) Enhanced by,
and currently maintained by, Jim Wilson (wilson@cygnus.com)
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify it
-under the terms of the GNU General Public License as published by the
-Free Software Foundation; either version 2, or (at your option) any
-later version.
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
-GNU CC is distributed in the hope that it will be useful, but WITHOUT
-ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to the Free
-the Free Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+along with GCC; see the file COPYING. If not, write to the Free the
+Free Software Foundation, 59 Temple Place - Suite 330, Boston, MA
02111-1307, USA. */
/* This pass implements list scheduling within basic blocks. It is
static void init_regions PARAMS ((void));
static void schedule_region PARAMS ((int));
-static void propagate_deps PARAMS ((int, struct deps *, int));
+static void propagate_deps PARAMS ((int, struct deps *));
static void free_pending_lists PARAMS ((void));
/* Functions for construction of the control flow graph. */
code = GET_CODE (insn);
if (GET_RTX_CLASS (code) == 'i' && code != JUMP_INSN)
{
- rtx note = find_reg_note (REG_NOTES (insn), REG_LABEL, NULL_RTX);
+ rtx note = find_reg_note (insn, REG_LABEL, NULL_RTX);
if (note
&& ! (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
- && find_reg_note (REG_NOTES (NEXT_INSN (insn)),
- REG_LABEL,
+ && find_reg_note (NEXT_INSN (insn), REG_LABEL,
XEXP (note, 0))))
return 1;
}
abort (); \
else \
set[index/HOST_BITS_PER_WIDE_INT] |= \
- 1 << (index % HOST_BITS_PER_WIDE_INT); \
+ ((unsigned HOST_WIDE_INT) 1) << (index % HOST_BITS_PER_WIDE_INT); \
}
/* Turn off the index'th bit in set. */
abort (); \
else \
set[index/HOST_BITS_PER_WIDE_INT] &= \
- ~(1 << (index%HOST_BITS_PER_WIDE_INT)); \
+ ~(((unsigned HOST_WIDE_INT) 1) << (index % HOST_BITS_PER_WIDE_INT)); \
}
/* Check if the index'th bit in bitset set is on. */
{
if (index >= HOST_BITS_PER_WIDE_INT * len)
abort ();
- return (set[index / HOST_BITS_PER_WIDE_INT] &
- 1 << (index % HOST_BITS_PER_WIDE_INT)) ? 1 : 0;
+ return ((set[index / HOST_BITS_PER_WIDE_INT] &
+ ((unsigned HOST_WIDE_INT) 1) << (index % HOST_BITS_PER_WIDE_INT))
+ ? 1 : 0);
}
/* Translate a bit-set SET to a list BL of the bit-set members. */
|| GET_CODE (reg) == STRICT_LOW_PART)
reg = XEXP (reg, 0);
- if (GET_CODE (reg) == PARALLEL
- && GET_MODE (reg) == BLKmode)
+ if (GET_CODE (reg) == PARALLEL)
{
register int i;
+
for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
- if (check_live_1 (src, XVECEXP (reg, 0, i)))
- return 1;
+ if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
+ if (check_live_1 (src, XEXP (XVECEXP (reg, 0, i), 0)))
+ return 1;
+
return 0;
}
|| GET_CODE (reg) == STRICT_LOW_PART)
reg = XEXP (reg, 0);
- if (GET_CODE (reg) == PARALLEL
- && GET_MODE (reg) == BLKmode)
+ if (GET_CODE (reg) == PARALLEL)
{
register int i;
+
for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
- update_live_1 (src, XVECEXP (reg, 0, i));
+ if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
+ update_live_1 (src, XEXP (XVECEXP (reg, 0, i), 0));
+
return;
}
if (tmp_class == TRAP_RISKY)
break;
/* Test if it is a load. */
- tmp_class =
- WORST_CLASS (tmp_class,
- may_trap_exp (SET_SRC (XVECEXP (pat, 0, i)), 0));
+ tmp_class
+ = WORST_CLASS (tmp_class,
+ may_trap_exp (SET_SRC (XVECEXP (pat, 0, i)),
+ 0));
break;
case COND_EXEC:
case TRAP_IF:
tmp_class = TRAP_RISKY;
break;
- default:;
+ default:
+ ;
}
insn_class = WORST_CLASS (insn_class, tmp_class);
if (insn_class == TRAP_RISKY || insn_class == IRISKY)
NULL, NULL,
NULL, NULL,
- 0
+ 0, 0
};
/* Add dependences so that branches are scheduled to run last in their
static struct deps *bb_deps;
/* After computing the dependencies for block BB, propagate the dependencies
- found in TMP_DEPS to the successors of the block. MAX_REG is the number
- of registers. */
+ found in TMP_DEPS to the successors of the block. */
static void
-propagate_deps (bb, tmp_deps, max_reg)
+propagate_deps (bb, tmp_deps)
int bb;
struct deps *tmp_deps;
- int max_reg;
{
int b = BB_TO_BLOCK (bb);
int e, first_edge;
continue;
}
- for (reg = 0; reg < max_reg; reg++)
+ /* The reg_last lists are inherited by bb_succ. */
+ EXECUTE_IF_SET_IN_REG_SET (&tmp_deps->reg_last_in_use, 0, reg,
{
- /* reg-last-uses lists are inherited by bb_succ. */
- for (u = tmp_deps->reg_last_uses[reg]; u; u = XEXP (u, 1))
- {
- if (find_insn_list (XEXP (u, 0),
- succ_deps->reg_last_uses[reg]))
- continue;
-
- succ_deps->reg_last_uses[reg]
- = alloc_INSN_LIST (XEXP (u, 0),
- succ_deps->reg_last_uses[reg]);
- }
-
- /* reg-last-defs lists are inherited by bb_succ. */
- for (u = tmp_deps->reg_last_sets[reg]; u; u = XEXP (u, 1))
- {
- if (find_insn_list (XEXP (u, 0),
- succ_deps->reg_last_sets[reg]))
- continue;
-
- succ_deps->reg_last_sets[reg]
- = alloc_INSN_LIST (XEXP (u, 0),
- succ_deps->reg_last_sets[reg]);
- }
-
- for (u = tmp_deps->reg_last_clobbers[reg]; u; u = XEXP (u, 1))
- {
- if (find_insn_list (XEXP (u, 0),
- succ_deps->reg_last_clobbers[reg]))
- continue;
-
- succ_deps->reg_last_clobbers[reg]
- = alloc_INSN_LIST (XEXP (u, 0),
- succ_deps->reg_last_clobbers[reg]);
- }
- }
+ struct deps_reg *tmp_deps_reg = &tmp_deps->reg_last[reg];
+ struct deps_reg *succ_deps_reg = &succ_deps->reg_last[reg];
+
+ for (u = tmp_deps_reg->uses; u; u = XEXP (u, 1))
+ if (! find_insn_list (XEXP (u, 0), succ_deps_reg->uses))
+ succ_deps_reg->uses
+ = alloc_INSN_LIST (XEXP (u, 0), succ_deps_reg->uses);
+
+ for (u = tmp_deps_reg->sets; u; u = XEXP (u, 1))
+ if (! find_insn_list (XEXP (u, 0), succ_deps_reg->sets))
+ succ_deps_reg->sets
+ = alloc_INSN_LIST (XEXP (u, 0), succ_deps_reg->sets);
+
+ for (u = tmp_deps_reg->clobbers; u; u = XEXP (u, 1))
+ if (! find_insn_list (XEXP (u, 0), succ_deps_reg->clobbers))
+ succ_deps_reg->clobbers
+ = alloc_INSN_LIST (XEXP (u, 0), succ_deps_reg->clobbers);
+ });
+ IOR_REG_SET (&succ_deps->reg_last_in_use, &tmp_deps->reg_last_in_use);
/* Mem read/write lists are inherited by bb_succ. */
link_insn = tmp_deps->pending_read_insns;
/* last_function_call is inherited by bb_succ. */
for (u = tmp_deps->last_function_call; u; u = XEXP (u, 1))
- {
- if (find_insn_list (XEXP (u, 0),
- succ_deps->last_function_call))
- continue;
-
+ if (! find_insn_list (XEXP (u, 0), succ_deps->last_function_call))
succ_deps->last_function_call
- = alloc_INSN_LIST (XEXP (u, 0),
- succ_deps->last_function_call);
- }
+ = alloc_INSN_LIST (XEXP (u, 0), succ_deps->last_function_call);
/* last_pending_memory_flush is inherited by bb_succ. */
for (u = tmp_deps->last_pending_memory_flush; u; u = XEXP (u, 1))
- {
- if (find_insn_list (XEXP (u, 0),
+ if (! find_insn_list (XEXP (u, 0),
succ_deps->last_pending_memory_flush))
- continue;
-
succ_deps->last_pending_memory_flush
= alloc_INSN_LIST (XEXP (u, 0),
succ_deps->last_pending_memory_flush);
- }
/* sched_before_next_call is inherited by bb_succ. */
x = LOG_LINKS (tmp_deps->sched_before_next_call);
Specifically for reg-reg data dependences, the block insns are
scanned by sched_analyze () top-to-bottom. Two lists are
- maintained by sched_analyze (): reg_last_sets[] for register DEFs,
- and reg_last_uses[] for register USEs.
+ maintained by sched_analyze (): reg_last[].sets for register DEFs,
+ and reg_last[].uses for register USEs.
When analysis is completed for bb, we update for its successors:
; - DEFS[succ] = Union (DEFS [succ], DEFS [bb])
int bb;
{
rtx head, tail;
- int max_reg = max_reg_num ();
struct deps tmp_deps;
tmp_deps = bb_deps[bb];
add_branch_dependences (head, tail);
if (current_nr_blocks > 1)
- propagate_deps (bb, &tmp_deps, max_reg);
+ propagate_deps (bb, &tmp_deps);
/* Free up the INSN_LISTs. */
free_deps (&tmp_deps);
-
- /* Assert that we won't need bb_reg_last_* for this block anymore.
- The vectors we're zeroing out have just been freed by the call to
- free_deps. */
- bb_deps[bb].reg_last_uses = 0;
- bb_deps[bb].reg_last_sets = 0;
- bb_deps[bb].reg_last_clobbers = 0;
}
+
/* Remove all INSN_LISTs and EXPR_LISTs from the pending lists and add
them to the unused_*_list variables, so that they can be reused. */
or after the last real insn of the block. So if the first insn
has a REG_SAVE_NOTE which would otherwise be emitted before the
insn, it is redundant with the note before the start of the
- block, and so we have to take it out.
-
- FIXME: Probably the same thing should be done with REG_SAVE_NOTEs
- referencing NOTE_INSN_SETJMP at the end of the block. */
+ block, and so we have to take it out. */
if (INSN_P (head))
{
rtx note;
for (note = REG_NOTES (head); note; note = XEXP (note, 1))
if (REG_NOTE_KIND (note) == REG_SAVE_NOTE)
{
- if (INTVAL (XEXP (note, 0)) != NOTE_INSN_SETJMP)
- {
- remove_note (head, note);
- note = XEXP (note, 1);
- remove_note (head, note);
- }
- else
- note = XEXP (note, 1);
+ remove_note (head, note);
+ note = XEXP (note, 1);
+ remove_note (head, note);
}
}
{
rtx head, tail;
get_block_head_tail (BB_TO_BLOCK (bb), &head, &tail);
- restore_line_notes (BB_TO_BLOCK (bb), head, tail);
+ restore_line_notes (head, tail);
}
}