You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
/* This is the final pass of the compiler.
It looks at the rtl code for a function and outputs assembler code.
#include "debug.h"
#include "expr.h"
#include "cfglayout.h"
+#include "tree-pass.h"
+#include "timevar.h"
+#include "cgraph.h"
+#include "coverage.h"
#ifdef XCOFF_DEBUGGING_INFO
#include "xcoffout.h" /* Needed for external data
#include "dbxout.h"
#endif
+#ifdef SDB_DEBUGGING_INFO
+#include "sdbout.h"
+#endif
+
/* If we aren't using cc0, CC_STATUS_INIT shouldn't exist. So define a
null default for it to save conditionalization later. */
#ifndef CC_STATUS_INIT
/* Filename of last NOTE. */
static const char *last_filename;
+/* Whether to force emission of a line note before the next insn. */
+static bool force_source_line = false;
+
extern int length_unit_log; /* This is defined in insn-attrtab.c. */
/* Nonzero while outputting an `asm' with operands.
- This means that inconsistencies are the user's fault, so don't abort.
+ This means that inconsistencies are the user's fault, so don't die.
The precise value is the insn being output, to pass to error_for_asm. */
rtx this_is_asm_operands;
}
/* Obtain the current length of an insn. If branch shortening has been done,
- get its actual length. Otherwise, get its maximum length. */
-
-int
-get_attr_length (rtx insn ATTRIBUTE_UNUSED)
+ get its actual length. Otherwise, use FALLBACK_FN to calcualte the
+ length. */
+static inline int
+get_attr_length_1 (rtx insn ATTRIBUTE_UNUSED,
+ int (*fallback_fn) (rtx) ATTRIBUTE_UNUSED)
{
#ifdef HAVE_ATTR_length
rtx body;
return 0;
case CALL_INSN:
- length = insn_default_length (insn);
+ length = fallback_fn (insn);
break;
case JUMP_INSN:
ADDR_VEC_ALIGN. */
}
else
- length = insn_default_length (insn);
+ length = fallback_fn (insn);
break;
case INSN:
return 0;
else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
- length = asm_insn_count (body) * insn_default_length (insn);
+ length = asm_insn_count (body) * fallback_fn (insn);
else if (GET_CODE (body) == SEQUENCE)
for (i = 0; i < XVECLEN (body, 0); i++)
length += get_attr_length (XVECEXP (body, 0, i));
else
- length = insn_default_length (insn);
+ length = fallback_fn (insn);
break;
default:
return length;
#else /* not HAVE_ATTR_length */
return 0;
+#define insn_default_length 0
+#define insn_min_length 0
#endif /* not HAVE_ATTR_length */
}
+
+/* Obtain the current length of an insn. If branch shortening has been done,
+ get its actual length. Otherwise, get its maximum length. */
+int
+get_attr_length (rtx insn)
+{
+ return get_attr_length_1 (insn, insn_default_length);
+}
+
+/* Obtain the current length of an insn. If branch shortening has been done,
+ get its actual length. Otherwise, get its minimum length. */
+int
+get_attr_min_length (rtx insn)
+{
+ return get_attr_length_1 (insn, insn_min_length);
+}
\f
/* Code to handle alignment inside shorten_branches. */
LABEL_TO_MAX_SKIP (label) = max_skip;
}
}
+
+struct tree_opt_pass pass_compute_alignments =
+{
+ NULL, /* name */
+ NULL, /* gate */
+ compute_alignments, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
+ 0 /* letter */
+};
+
\f
/* Make a pass over all insns and compute their actual lengths by shortening
any branches of variable length if possible. */
label_align = xrealloc (label_align,
n_labels * sizeof (struct label_alignment));
- /* Range of labels grows monotonically in the function. Abort here
+ /* Range of labels grows monotonically in the function. Failing here
means that the initialization of array got lost. */
gcc_assert (n_old_labels <= n_labels);
if (min_align > LABEL_TO_ALIGNMENT (lab))
min_align = LABEL_TO_ALIGNMENT (lab);
}
- XEXP (pat, 2) = gen_rtx_LABEL_REF (VOIDmode, min_lab);
- XEXP (pat, 3) = gen_rtx_LABEL_REF (VOIDmode, max_lab);
+ XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
+ XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
insn_shuid = INSN_SHUID (insn);
rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
memset (&flags, 0, sizeof (flags));
assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
}
- function_section (current_function_decl);
+ current_function_section (current_function_decl);
#if defined(ASM_OUTPUT_REG_PUSH)
if (sval && svrtx != NULL_RTX && REG_P (svrtx))
}
}
-/* Return boolean indicating if there is a NOTE_INSN_UNLIKELY_EXECUTED_CODE
- note in the instruction chain (going forward) between the current
- instruction, and the next 'executable' instruction. */
-
-bool
-scan_ahead_for_unlikely_executed_note (rtx insn)
-{
- rtx temp;
- int bb_note_count = 0;
-
- for (temp = insn; temp; temp = NEXT_INSN (temp))
- {
- if (NOTE_P (temp)
- && NOTE_LINE_NUMBER (temp) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
- return true;
- if (NOTE_P (temp)
- && NOTE_LINE_NUMBER (temp) == NOTE_INSN_BASIC_BLOCK)
- {
- bb_note_count++;
- if (bb_note_count > 1)
- return false;
- }
- if (INSN_P (temp))
- return false;
- }
-
- return false;
-}
-
/* The final scan for one insn, INSN.
Args are same as in `final', except that INSN
is the insn being scanned.
case NOTE_INSN_EXPECTED_VALUE:
break;
- case NOTE_INSN_UNLIKELY_EXECUTED_CODE:
+ case NOTE_INSN_SWITCH_TEXT_SECTIONS:
/* The presence of this note indicates that this basic block
belongs in the "cold" section of the .o file. If we are
not already writing to the cold section we need to change
to it. */
-
- unlikely_text_section ();
+
+ if (last_text_section == in_text)
+ {
+ (*debug_hooks->switch_text_section) ();
+ unlikely_text_section ();
+ }
+ else
+ {
+ (*debug_hooks->switch_text_section) ();
+ text_section ();
+ }
break;
case NOTE_INSN_BASIC_BLOCK:
- /* If we are performing the optimization that partitions
- basic blocks into hot & cold sections of the .o file,
- then at the start of each new basic block, before
- beginning to write code for the basic block, we need to
- check to see whether the basic block belongs in the hot
- or cold section of the .o file, and change the section we
- are writing to appropriately. */
-
- if (flag_reorder_blocks_and_partition
- && !scan_ahead_for_unlikely_executed_note (insn))
- function_section (current_function_decl);
-
#ifdef TARGET_UNWIND_INFO
targetm.asm_out.unwind_emit (asm_out_file, insn);
#endif
if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
{
*seen |= SEEN_EMITTED;
- last_filename = NULL;
+ force_source_line = true;
}
else
*seen |= SEEN_BB;
if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
{
*seen |= SEEN_EMITTED;
- last_filename = NULL;
+ force_source_line = true;
}
else
*seen |= SEEN_NOTE;
if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
{
*seen |= SEEN_EMITTED;
- last_filename = NULL;
+ force_source_line = true;
}
else
*seen |= SEEN_NOTE;
if (LABEL_NAME (insn))
(*debug_hooks->label) (insn);
- /* If we are doing the optimization that partitions hot & cold
- basic blocks into separate sections of the .o file, we need
- to ensure the jump table ends up in the correct section... */
-
- if (flag_reorder_blocks_and_partition
- && targetm.have_named_sections)
- {
- rtx tmp_table, tmp_label;
- if (LABEL_P (insn)
- && tablejump_p (NEXT_INSN (insn), &tmp_label, &tmp_table))
- {
- /* Do nothing; Do NOT change the current section. */
- }
- else if (scan_ahead_for_unlikely_executed_note (insn))
- unlikely_text_section ();
- else if (in_unlikely_text_section ())
- function_section (current_function_decl);
- }
-
if (app_on)
{
fputs (ASM_APP_OFF, file);
ASM_OUTPUT_ALIGN (file, log_align);
}
else
- function_section (current_function_decl);
+ current_function_section (current_function_decl);
#ifdef ASM_OUTPUT_CASE_LABEL
ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
if (! JUMP_TABLES_IN_TEXT_SECTION)
targetm.asm_out.function_rodata_section (current_function_decl);
else
- function_section (current_function_decl);
+ current_function_section (current_function_decl);
if (app_on)
{
#endif
#endif
- function_section (current_function_decl);
+ current_function_section (current_function_decl);
break;
}
/* Get out the operand values. */
string = decode_asm_operands (body, ops, NULL, NULL, NULL);
- /* Inhibit aborts on what would otherwise be compiler bugs. */
+ /* Inhibit dieing on what would otherwise be compiler bugs. */
insn_noperands = noperands;
this_is_asm_operands = insn;
emit them before the peephole. */
if (next != 0 && next != NEXT_INSN (insn))
{
- rtx note;
+ rtx note, prev = PREV_INSN (insn);
for (note = NEXT_INSN (insn); note != next;
note = NEXT_INSN (note))
final_scan_insn (note, file, optimize, nopeepholes, seen);
+
+ /* Put the notes in the proper position for a later
+ rescan. For example, the SH target can do this
+ when generating a far jump in a delayed branch
+ sequence. */
+ note = NEXT_INSN (insn);
+ PREV_INSN (note) = prev;
+ NEXT_INSN (prev) = note;
+ NEXT_INSN (PREV_INSN (next)) = insn;
+ PREV_INSN (insn) = PREV_INSN (next);
+ NEXT_INSN (insn) = next;
+ PREV_INSN (next) = insn;
}
/* PEEPHOLE might have changed this. */
return NEXT_INSN (insn);
}
\f
-/* Output debugging info to the assembler file FILE
- based on the NOTE-insn INSN, assumed to be a line number. */
+/* Return whether a source line note needs to be emitted before INSN. */
static bool
notice_source_line (rtx insn)
const char *filename = insn_file (insn);
int linenum = insn_line (insn);
- if (filename && (filename != last_filename || last_linenum != linenum))
+ if (filename
+ && (force_source_line
+ || filename != last_filename
+ || last_linenum != linenum))
{
+ force_source_line = false;
last_filename = filename;
last_linenum = linenum;
high_block_linenum = MAX (last_linenum, high_block_linenum);
if (new != 0)
*xp = new;
- else
+ else if (REG_P (y))
{
/* Simplify_subreg can't handle some REG cases, but we have to. */
unsigned int regno = subreg_regno (x);
- gcc_assert (REG_P (y));
*xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, SUBREG_BYTE (x));
}
}
In an `asm', it's the user's fault; otherwise, the compiler's fault. */
void
-output_operand_lossage (const char *msgid, ...)
+output_operand_lossage (const char *cmsgid, ...)
{
char *fmt_string;
char *new_message;
const char *pfx_str;
va_list ap;
- va_start (ap, msgid);
+ va_start (ap, cmsgid);
pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
- asprintf (&fmt_string, "%s%s", pfx_str, _(msgid));
+ asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
vasprintf (&new_message, fmt_string, ap);
if (this_is_asm_operands)
if (x && GET_CODE (x) == SUBREG)
x = alter_subreg (&x);
- /* If X is a pseudo-register, abort now rather than writing trash to the
- assembler file. */
+ /* X must not be a pseudo reg. */
gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
PRINT_OPERAND (asm_out_file, x, code);
symbol_queue_size = 0;
}
}
+\f
+/* Turn the RTL into assembly. */
+static void
+rest_of_handle_final (void)
+{
+ rtx x;
+ const char *fnname;
+
+ /* Get the function's name, as described by its RTL. This may be
+ different from the DECL_NAME name used in the source file. */
+
+ x = DECL_RTL (current_function_decl);
+ gcc_assert (MEM_P (x));
+ x = XEXP (x, 0);
+ gcc_assert (GET_CODE (x) == SYMBOL_REF);
+ fnname = XSTR (x, 0);
+
+ assemble_start_function (current_function_decl, fnname);
+ final_start_function (get_insns (), asm_out_file, optimize);
+ final (get_insns (), asm_out_file, optimize);
+ final_end_function ();
+
+#ifdef TARGET_UNWIND_INFO
+ /* ??? The IA-64 ".handlerdata" directive must be issued before
+ the ".endp" directive that closes the procedure descriptor. */
+ output_function_exception_table ();
+#endif
+
+ assemble_end_function (current_function_decl, fnname);
+
+#ifndef TARGET_UNWIND_INFO
+ /* Otherwise, it feels unclean to switch sections in the middle. */
+ output_function_exception_table ();
+#endif
+
+ user_defined_section_attribute = false;
+
+ if (! quiet_flag)
+ fflush (asm_out_file);
+
+ /* Release all memory allocated by flow. */
+ free_basic_block_vars ();
+
+ /* Write DBX symbols if requested. */
+
+ /* Note that for those inline functions where we don't initially
+ know for certain that we will be generating an out-of-line copy,
+ the first invocation of this routine (rest_of_compilation) will
+ skip over this code by doing a `goto exit_rest_of_compilation;'.
+ Later on, wrapup_global_declarations will (indirectly) call
+ rest_of_compilation again for those inline functions that need
+ to have out-of-line copies generated. During that call, we
+ *will* be routed past here. */
+
+ timevar_push (TV_SYMOUT);
+ (*debug_hooks->function_decl) (current_function_decl);
+ timevar_pop (TV_SYMOUT);
+}
+
+struct tree_opt_pass pass_final =
+{
+ NULL, /* name */
+ NULL, /* gate */
+ rest_of_handle_final, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_FINAL, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_ggc_collect, /* todo_flags_finish */
+ 0 /* letter */
+};
+
+
+static void
+rest_of_handle_shorten_branches (void)
+{
+ /* Shorten branches. */
+ shorten_branches (get_insns ());
+}
+
+struct tree_opt_pass pass_shorten_branches =
+{
+ "shorten", /* name */
+ NULL, /* gate */
+ rest_of_handle_shorten_branches, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_FINAL, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
+};
+
+
+static void
+rest_of_clean_state (void)
+{
+ rtx insn, next;
+
+ /* It is very important to decompose the RTL instruction chain here:
+ debug information keeps pointing into CODE_LABEL insns inside the function
+ body. If these remain pointing to the other insns, we end up preserving
+ whole RTL chain and attached detailed debug info in memory. */
+ for (insn = get_insns (); insn; insn = next)
+ {
+ next = NEXT_INSN (insn);
+ NEXT_INSN (insn) = NULL;
+ PREV_INSN (insn) = NULL;
+ }
+
+ /* In case the function was not output,
+ don't leave any temporary anonymous types
+ queued up for sdb output. */
+#ifdef SDB_DEBUGGING_INFO
+ if (write_symbols == SDB_DEBUG)
+ sdbout_types (NULL_TREE);
+#endif
+
+ reload_completed = 0;
+ epilogue_completed = 0;
+ flow2_completed = 0;
+ no_new_pseudos = 0;
+
+ /* Clear out the insn_length contents now that they are no
+ longer valid. */
+ init_insn_lengths ();
+
+ /* Show no temporary slots allocated. */
+ init_temp_slots ();
+
+ free_basic_block_vars ();
+ free_bb_for_insn ();
+
+
+ if (targetm.binds_local_p (current_function_decl))
+ {
+ int pref = cfun->preferred_stack_boundary;
+ if (cfun->stack_alignment_needed > cfun->preferred_stack_boundary)
+ pref = cfun->stack_alignment_needed;
+ cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
+ = pref;
+ }
+
+ /* Make sure volatile mem refs aren't considered valid operands for
+ arithmetic insns. We must call this here if this is a nested inline
+ function, since the above code leaves us in the init_recog state,
+ and the function context push/pop code does not save/restore volatile_ok.
+
+ ??? Maybe it isn't necessary for expand_start_function to call this
+ anymore if we do it here? */
+
+ init_recog_no_volatile ();
+
+ /* We're done with this function. Free up memory if we can. */
+ free_after_parsing (cfun);
+ free_after_compilation (cfun);
+}
+
+struct tree_opt_pass pass_clean_state =
+{
+ NULL, /* name */
+ NULL, /* gate */
+ rest_of_clean_state, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_FINAL, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ PROP_rtl, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
+ 0 /* letter */
+};
+