/* A pass for lowering trees to RTL.
- Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
+ Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
Free Software Foundation, Inc.
This file is part of GCC.
of comminucating the profile info to the builtin expanders. */
gimple currently_expanding_gimple_stmt;
+static rtx expand_debug_expr (tree);
+
/* Return an expression tree corresponding to the RHS of GIMPLE
statement STMT. */
/* The Variable. */
tree decl;
- /* The offset of the variable. During partitioning, this is the
- offset relative to the partition. After partitioning, this
- is relative to the stack frame. */
- HOST_WIDE_INT offset;
-
/* Initially, the size of the variable. Later, the size of the partition,
if this variable becomes it's partition's representative. */
HOST_WIDE_INT size;
smaller than our cutoff threshold. Used for -Wstack-protector. */
static bool has_short_buffer;
-/* Discover the byte alignment to use for DECL. Ignore alignment
+/* Compute the byte alignment to use for DECL. Ignore alignment
we can't do with expected alignment of the stack boundary. */
static unsigned int
-get_decl_align_unit (tree decl)
+align_local_variable (tree decl)
{
unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
+ DECL_ALIGN (decl) = align;
return align / BITS_PER_UNIT;
}
v = &stack_vars[stack_vars_num];
v->decl = decl;
- v->offset = 0;
v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
/* Ensure that all variables have size, so that &a != &b for any two
variables that are simultaneously live. */
if (v->size == 0)
v->size = 1;
- v->alignb = get_decl_align_unit (SSAVAR (decl));
+ v->alignb = align_local_variable (SSAVAR (decl));
+ /* An alignment of zero can mightily confuse us later. */
+ gcc_assert (v->alignb != 0);
/* All variables are initially in their own partition. */
v->representative = stack_vars_num;
to elements will conflict. In case of unions we have
to be careful as type based aliasing rules may say
access to the same memory does not conflict. So play
- safe and add a conflict in this case. */
- || contains_union)
+ safe and add a conflict in this case when
+ -fstrict-aliasing is used. */
+ || (contains_union && flag_strict_aliasing))
add_stack_var_conflict (i, j);
}
}
return (int)largeb - (int)largea;
/* Secondary compare on size, decreasing */
- if (sizea < sizeb)
- return -1;
if (sizea > sizeb)
+ return -1;
+ if (sizea < sizeb)
return 1;
/* Tertiary compare on true alignment, decreasing. */
for -O0 where we are preserving even unreferenced variables. */
gcc_assert (DECL_P (decl)
&& (!optimize
- || referenced_var_lookup (DECL_UID (decl))));
+ || referenced_var_lookup (cfun, DECL_UID (decl))));
bitmap_set_bit (part, uid);
*((bitmap *) pointer_map_insert (decls_to_partitions,
(void *)(size_t) uid)) = part;
/* Make the SSA name point to all partition members. */
pi = get_ptr_info (name);
- pt_solution_set (&pi->pt, part, false, false);
+ pt_solution_set (&pi->pt, part, false);
}
/* Make all points-to sets that contain one member of a partition
/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
partitioning algorithm. Partitions A and B are known to be non-conflicting.
- Merge them into a single partition A.
-
- At the same time, add OFFSET to all variables in partition B. At the end
- of the partitioning process we've have a nice block easy to lay out within
- the stack frame. */
+ Merge them into a single partition A. */
static void
-union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
+union_stack_vars (size_t a, size_t b)
{
- size_t i, last;
struct stack_var *vb = &stack_vars[b];
bitmap_iterator bi;
unsigned u;
- /* Update each element of partition B with the given offset,
- and merge them into partition A. */
- for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
- {
- stack_vars[i].offset += offset;
- stack_vars[i].representative = a;
- }
- stack_vars[last].next = stack_vars[a].next;
+ gcc_assert (stack_vars[b].next == EOC);
+ /* Add B to A's partition. */
+ stack_vars[b].next = stack_vars[a].next;
+ stack_vars[b].representative = a;
stack_vars[a].next = b;
/* Update the required alignment of partition A to account for B. */
partitions constrained by the interference graph. The overall
algorithm used is as follows:
- Sort the objects by size.
+ Sort the objects by size in descending order.
For each object A {
S = size(A)
O = 0
loop {
Look for the largest non-conflicting object B with size <= S.
UNION (A, B)
- offset(B) = O
- O += size(B)
- S -= size(B)
}
}
*/
for (si = 0; si < n; ++si)
{
size_t i = stack_vars_sorted[si];
- HOST_WIDE_INT isize = stack_vars[i].size;
unsigned int ialign = stack_vars[i].alignb;
- HOST_WIDE_INT offset = 0;
- for (sj = si; sj-- > 0; )
+ /* Ignore objects that aren't partition representatives. If we
+ see a var that is not a partition representative, it must
+ have been merged earlier. */
+ if (stack_vars[i].representative != i)
+ continue;
+
+ for (sj = si + 1; sj < n; ++sj)
{
size_t j = stack_vars_sorted[sj];
- HOST_WIDE_INT jsize = stack_vars[j].size;
unsigned int jalign = stack_vars[j].alignb;
/* Ignore objects that aren't partition representatives. */
if (stack_vars[j].representative != j)
continue;
- /* Ignore objects too large for the remaining space. */
- if (isize < jsize)
- continue;
-
/* Ignore conflicting objects. */
if (stack_var_conflict_p (i, j))
continue;
!= (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
continue;
- /* Refine the remaining space check to include alignment. */
- if (offset & (jalign - 1))
- {
- HOST_WIDE_INT toff = offset;
- toff += jalign - 1;
- toff &= -(HOST_WIDE_INT)jalign;
- if (isize - (toff - offset) < jsize)
- continue;
-
- isize -= toff - offset;
- offset = toff;
- }
-
/* UNION the objects, placing J at OFFSET. */
- union_stack_vars (i, j, offset);
-
- isize -= jsize;
- if (isize == 0)
- break;
+ union_stack_vars (i, j);
}
}
{
fputc ('\t', dump_file);
print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
- fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
- stack_vars[j].offset);
}
+ fputc ('\n', dump_file);
}
}
partition. */
for (j = i; j != EOC; j = stack_vars[j].next)
{
- gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
expand_one_stack_var_at (stack_vars[j].decl,
base, base_align,
- stack_vars[j].offset + offset);
+ offset);
}
}
unsigned byte_align;
size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
- byte_align = get_decl_align_unit (SSAVAR (var));
+ byte_align = align_local_variable (SSAVAR (var));
/* We handle highly aligned variables in expand_stack_vars. */
gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
mark_user_reg (x);
if (POINTER_TYPE_P (type))
- mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
+ mark_reg_pointer (x, get_pointer_alignment (var));
}
/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
/* Expand all variables at this level. */
for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
- if (TREE_USED (t))
+ if (TREE_USED (t)
+ && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
+ || !DECL_NONSHAREABLE (t)))
expand_one_var (t, toplevel, true);
this_sv_num = stack_vars_num;
for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
/* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
+ if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
+ || !DECL_NONSHAREABLE (t))
TREE_USED (t) = 0;
for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
crtl->stack_protect_guard = guard;
}
-/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
- expanding variables. Those variables that can be put into registers
- are allocated pseudos; those that can't are put on the stack.
-
- TOPLEVEL is true if this is the outermost BLOCK. */
-
-static HOST_WIDE_INT
-account_used_vars_for_block (tree block, bool toplevel)
-{
- tree t;
- HOST_WIDE_INT size = 0;
-
- /* Expand all variables at this level. */
- for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
- if (TREE_USED (t))
- size += expand_one_var (t, toplevel, false);
-
- /* Expand all variables at containing levels. */
- for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
- size += account_used_vars_for_block (t, false);
-
- return size;
-}
-
/* Prepare for expanding variables. */
static void
init_vars_expansion (void)
stack_vars_alloc = stack_vars_num = 0;
}
-/* Make a fair guess for the size of the stack frame of the decl
- passed. This doesn't have to be exact, the result is only used
- in the inline heuristics. So we don't want to run the full stack
- var packing algorithm (which is quadratic in the number of stack
- vars). Instead, we calculate the total size of all stack vars.
- This turns out to be a pretty fair estimate -- packing of stack
- vars doesn't happen very often. */
+/* Make a fair guess for the size of the stack frame of the function
+ in NODE. This doesn't have to be exact, the result is only used in
+ the inline heuristics. So we don't want to run the full stack var
+ packing algorithm (which is quadratic in the number of stack vars).
+ Instead, we calculate the total size of all stack vars. This turns
+ out to be a pretty fair estimate -- packing of stack vars doesn't
+ happen very often. */
HOST_WIDE_INT
-estimated_stack_frame_size (tree decl)
+estimated_stack_frame_size (struct cgraph_node *node)
{
HOST_WIDE_INT size = 0;
size_t i;
- tree var, outer_block = DECL_INITIAL (current_function_decl);
- unsigned ix;
+ tree var;
tree old_cur_fun_decl = current_function_decl;
- current_function_decl = decl;
- push_cfun (DECL_STRUCT_FUNCTION (decl));
+ referenced_var_iterator rvi;
+ struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
- init_vars_expansion ();
+ current_function_decl = node->decl;
+ push_cfun (fn);
- FOR_EACH_LOCAL_DECL (cfun, ix, var)
- {
- if (TREE_USED (var))
- size += expand_one_var (var, true, false);
- TREE_USED (var) = 1;
- }
- size += account_used_vars_for_block (outer_block, true);
+ gcc_checking_assert (gimple_referenced_vars (fn));
+ FOR_EACH_REFERENCED_VAR (fn, var, rvi)
+ size += expand_one_var (var, true, false);
if (stack_vars_num > 0)
{
last2 = last = get_last_insn ();
extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
- if (gimple_has_location (stmt))
- {
- set_curr_insn_source_location (gimple_location (stmt));
- set_curr_insn_block (gimple_block (stmt));
- }
+ set_curr_insn_source_location (gimple_location (stmt));
+ set_curr_insn_block (gimple_block (stmt));
/* These flags have no purpose in RTL land. */
true_edge->flags &= ~EDGE_TRUE_VALUE;
static void
expand_call_stmt (gimple stmt)
{
- tree exp;
- tree lhs = gimple_call_lhs (stmt);
- size_t i;
+ tree exp, decl, lhs;
bool builtin_p;
- tree decl;
+ size_t i;
+
+ if (gimple_call_internal_p (stmt))
+ {
+ expand_internal_call (stmt);
+ return;
+ }
exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
decl = gimple_call_fndecl (stmt);
builtin_p = decl && DECL_BUILT_IN (decl);
+ /* If this is not a builtin function, the function type through which the
+ call is made may be different from the type of the function. */
+ if (!builtin_p)
+ CALL_EXPR_FN (exp)
+ = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
+ CALL_EXPR_FN (exp));
+
TREE_TYPE (exp) = gimple_call_return_type (stmt);
CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
- CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
+ if (decl
+ && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
+ && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
+ || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
+ CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
+ else
+ CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
SET_EXPR_LOCATION (exp, gimple_location (stmt));
TREE_BLOCK (exp) = gimple_block (stmt);
+ /* Ensure RTL is created for debug args. */
+ if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
+ {
+ VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
+ unsigned int ix;
+ tree dtemp;
+
+ if (debug_args)
+ for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
+ {
+ gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
+ expand_debug_expr (dtemp);
+ }
+ }
+
+ lhs = gimple_call_lhs (stmt);
if (lhs)
expand_assignment (lhs, exp, false);
else
expand_gimple_stmt_1 (gimple stmt)
{
tree op0;
+
+ set_curr_insn_source_location (gimple_location (stmt));
+ set_curr_insn_block (gimple_block (stmt));
+
switch (gimple_code (stmt))
{
case GIMPLE_GOTO:
static rtx
expand_gimple_stmt (gimple stmt)
{
- int lp_nr = 0;
- rtx last = NULL;
location_t saved_location = input_location;
+ rtx last = get_last_insn ();
+ int lp_nr;
- last = get_last_insn ();
-
- /* If this is an expression of some kind and it has an associated line
- number, then emit the line number before expanding the expression.
-
- We need to save and restore the file and line information so that
- errors discovered during expansion are emitted with the right
- information. It would be better of the diagnostic routines
- used the file/line information embedded in the tree nodes rather
- than globals. */
gcc_assert (cfun);
+ /* We need to save and restore the current source location so that errors
+ discovered during expansion are emitted with the right location. But
+ it would be better if the diagnostic routines used the source location
+ embedded in the tree nodes rather than globals. */
if (gimple_has_location (stmt))
- {
- input_location = gimple_location (stmt);
- set_curr_insn_source_location (input_location);
-
- /* Record where the insns produced belong. */
- set_curr_insn_block (gimple_block (stmt));
- }
+ input_location = gimple_location (stmt);
expand_gimple_stmt_1 (stmt);
+
/* Free any temporaries used to evaluate this statement. */
free_temp_slots ();
any rtl. */
static rtx
-convert_debug_memory_address (enum machine_mode mode, rtx x)
+convert_debug_memory_address (enum machine_mode mode, rtx x,
+ addr_space_t as)
{
enum machine_mode xmode = GET_MODE (x);
#ifndef POINTERS_EXTEND_UNSIGNED
- gcc_assert (mode == Pmode);
+ gcc_assert (mode == Pmode
+ || mode == targetm.addr_space.address_mode (as));
gcc_assert (xmode == mode || xmode == VOIDmode);
#else
- gcc_assert (mode == Pmode || mode == ptr_mode);
+ rtx temp;
+ enum machine_mode address_mode = targetm.addr_space.address_mode (as);
+ enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
+
+ gcc_assert (mode == address_mode || mode == pointer_mode);
if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
return x;
- if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
+ if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
x = simplify_gen_subreg (mode, x, xmode,
subreg_lowpart_offset
(mode, xmode));
else if (!POINTERS_EXTEND_UNSIGNED)
x = gen_rtx_SIGN_EXTEND (mode, x);
else
- gcc_unreachable ();
+ {
+ switch (GET_CODE (x))
+ {
+ case SUBREG:
+ if ((SUBREG_PROMOTED_VAR_P (x)
+ || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
+ || (GET_CODE (SUBREG_REG (x)) == PLUS
+ && REG_P (XEXP (SUBREG_REG (x), 0))
+ && REG_POINTER (XEXP (SUBREG_REG (x), 0))
+ && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
+ && GET_MODE (SUBREG_REG (x)) == mode)
+ return SUBREG_REG (x);
+ break;
+ case LABEL_REF:
+ temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
+ LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
+ return temp;
+ case SYMBOL_REF:
+ temp = shallow_copy_rtx (x);
+ PUT_MODE (temp, mode);
+ return temp;
+ case CONST:
+ temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
+ if (temp)
+ temp = gen_rtx_CONST (mode, temp);
+ return temp;
+ case PLUS:
+ case MINUS:
+ if (CONST_INT_P (XEXP (x, 1)))
+ {
+ temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
+ if (temp)
+ return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
+ }
+ break;
+ default:
+ break;
+ }
+ /* Don't know how to express ptr_extend as operation in debug info. */
+ return NULL;
+ }
#endif /* POINTERS_EXTEND_UNSIGNED */
return x;
}
-/* Return an RTX equivalent to the value of the tree expression
- EXP. */
+/* Return an RTX equivalent to the value of the parameter DECL. */
+
+static rtx
+expand_debug_parm_decl (tree decl)
+{
+ rtx incoming = DECL_INCOMING_RTL (decl);
+
+ if (incoming
+ && GET_MODE (incoming) != BLKmode
+ && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
+ || (MEM_P (incoming)
+ && REG_P (XEXP (incoming, 0))
+ && HARD_REGISTER_P (XEXP (incoming, 0)))))
+ {
+ rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
+
+#ifdef HAVE_window_save
+ /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
+ If the target machine has an explicit window save instruction, the
+ actual entry value is the corresponding OUTGOING_REGNO instead. */
+ if (REG_P (incoming)
+ && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
+ incoming
+ = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
+ OUTGOING_REGNO (REGNO (incoming)), 0);
+ else if (MEM_P (incoming))
+ {
+ rtx reg = XEXP (incoming, 0);
+ if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
+ {
+ reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
+ incoming = replace_equiv_address_nv (incoming, reg);
+ }
+ }
+#endif
+
+ ENTRY_VALUE_EXP (rtl) = incoming;
+ return rtl;
+ }
+
+ if (incoming
+ && GET_MODE (incoming) != BLKmode
+ && !TREE_ADDRESSABLE (decl)
+ && MEM_P (incoming)
+ && (XEXP (incoming, 0) == virtual_incoming_args_rtx
+ || (GET_CODE (XEXP (incoming, 0)) == PLUS
+ && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
+ && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
+ return incoming;
+
+ return NULL_RTX;
+}
+
+/* Return an RTX equivalent to the value of the tree expression EXP. */
static rtx
expand_debug_expr (tree exp)
{
rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
+ enum machine_mode inner_mode = VOIDmode;
int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
addr_space_t as;
unary:
case tcc_unary:
+ inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
if (!op0)
return NULL_RTX;
|| !TREE_STATIC (exp)
|| !DECL_NAME (exp)
|| DECL_HARD_REGISTER (exp)
+ || DECL_IN_CONSTANT_POOL (exp)
|| mode == VOIDmode)
return NULL;
case NOP_EXPR:
case CONVERT_EXPR:
{
- enum machine_mode inner_mode = GET_MODE (op0);
+ inner_mode = GET_MODE (op0);
if (mode == inner_mode)
return op0;
op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
}
else if (CONSTANT_P (op0)
- || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
+ || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
op0 = simplify_gen_subreg (mode, op0, inner_mode,
subreg_lowpart_offset (mode,
inner_mode));
else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
: unsignedp)
- op0 = gen_rtx_ZERO_EXTEND (mode, op0);
+ op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
else
- op0 = gen_rtx_SIGN_EXTEND (mode, op0);
+ op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
return op0;
}
case MEM_REF:
+ if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
+ {
+ tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
+ TREE_OPERAND (exp, 0),
+ TREE_OPERAND (exp, 1));
+ if (newexp)
+ return expand_debug_expr (newexp);
+ }
+ /* FALLTHROUGH */
case INDIRECT_REF:
op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
if (!op0)
if (TREE_CODE (exp) == MEM_REF)
{
+ if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
+ || (GET_CODE (op0) == PLUS
+ && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
+ /* (mem (debug_implicit_ptr)) might confuse aliasing.
+ Instead just use get_inner_reference. */
+ goto component_ref;
+
op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
if (!op1 || !CONST_INT_P (op1))
return NULL;
else
as = ADDR_SPACE_GENERIC;
- op0 = gen_rtx_MEM (mode, op0);
+ op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
+ op0, as);
+ if (op0 == NULL_RTX)
+ return NULL;
+ op0 = gen_rtx_MEM (mode, op0);
set_mem_attributes (op0, exp, 0);
+ if (TREE_CODE (exp) == MEM_REF
+ && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
+ set_mem_expr (op0, NULL_TREE);
set_mem_addr_space (op0, as);
return op0;
if (!op0)
return NULL;
- as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
+ if (POINTER_TYPE_P (TREE_TYPE (exp)))
+ as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
+ else
+ as = ADDR_SPACE_GENERIC;
+
+ op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
+ op0, as);
+ if (op0 == NULL_RTX)
+ return NULL;
op0 = gen_rtx_MEM (mode, op0);
return op0;
+ component_ref:
case ARRAY_REF:
case ARRAY_RANGE_REF:
case COMPONENT_REF:
/* Don't use offset_address here, we don't need a
recognizable address, and we don't want to generate
code. */
- op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
+ op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
+ op0, op1));
}
if (MEM_P (op0))
enum machine_mode opmode = GET_MODE (op0);
if (opmode == VOIDmode)
- opmode = mode1;
+ opmode = TYPE_MODE (TREE_TYPE (tem));
/* This condition may hold if we're expanding the address
right past the end of an array that turned out not to
? SIGN_EXTRACT
: ZERO_EXTRACT, mode,
GET_MODE (op0) != VOIDmode
- ? GET_MODE (op0) : mode1,
+ ? GET_MODE (op0)
+ : TYPE_MODE (TREE_TYPE (tem)),
op0, GEN_INT (bitsize), GEN_INT (bitpos));
}
case ABS_EXPR:
- return gen_rtx_ABS (mode, op0);
+ return simplify_gen_unary (ABS, mode, op0, mode);
case NEGATE_EXPR:
- return gen_rtx_NEG (mode, op0);
+ return simplify_gen_unary (NEG, mode, op0, mode);
case BIT_NOT_EXPR:
- return gen_rtx_NOT (mode, op0);
+ return simplify_gen_unary (NOT, mode, op0, mode);
case FLOAT_EXPR:
- if (unsignedp)
- return gen_rtx_UNSIGNED_FLOAT (mode, op0);
- else
- return gen_rtx_FLOAT (mode, op0);
+ return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
+ 0)))
+ ? UNSIGNED_FLOAT : FLOAT, mode, op0,
+ inner_mode);
case FIX_TRUNC_EXPR:
- if (unsignedp)
- return gen_rtx_UNSIGNED_FIX (mode, op0);
- else
- return gen_rtx_FIX (mode, op0);
+ return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
+ inner_mode);
case POINTER_PLUS_EXPR:
/* For the rare target where pointers are not the same size as
&& GET_MODE (op0) != GET_MODE (op1))
{
if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
- op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
+ op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
+ GET_MODE (op1));
else
/* We always sign-extend, regardless of the signedness of
the operand, because the operand is always unsigned
here even if the original C expression is signed. */
- op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
+ op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
+ GET_MODE (op1));
}
/* Fall through. */
case PLUS_EXPR:
- return gen_rtx_PLUS (mode, op0, op1);
+ return simplify_gen_binary (PLUS, mode, op0, op1);
case MINUS_EXPR:
- return gen_rtx_MINUS (mode, op0, op1);
+ return simplify_gen_binary (MINUS, mode, op0, op1);
case MULT_EXPR:
- return gen_rtx_MULT (mode, op0, op1);
+ return simplify_gen_binary (MULT, mode, op0, op1);
case RDIV_EXPR:
case TRUNC_DIV_EXPR:
case EXACT_DIV_EXPR:
if (unsignedp)
- return gen_rtx_UDIV (mode, op0, op1);
+ return simplify_gen_binary (UDIV, mode, op0, op1);
else
- return gen_rtx_DIV (mode, op0, op1);
+ return simplify_gen_binary (DIV, mode, op0, op1);
case TRUNC_MOD_EXPR:
- if (unsignedp)
- return gen_rtx_UMOD (mode, op0, op1);
- else
- return gen_rtx_MOD (mode, op0, op1);
+ return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
case FLOOR_DIV_EXPR:
if (unsignedp)
- return gen_rtx_UDIV (mode, op0, op1);
+ return simplify_gen_binary (UDIV, mode, op0, op1);
else
{
- rtx div = gen_rtx_DIV (mode, op0, op1);
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx div = simplify_gen_binary (DIV, mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = floor_sdiv_adjust (mode, mod, op1);
- return gen_rtx_PLUS (mode, div, adj);
+ return simplify_gen_binary (PLUS, mode, div, adj);
}
case FLOOR_MOD_EXPR:
if (unsignedp)
- return gen_rtx_UMOD (mode, op0, op1);
+ return simplify_gen_binary (UMOD, mode, op0, op1);
else
{
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = floor_sdiv_adjust (mode, mod, op1);
- adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
- return gen_rtx_PLUS (mode, mod, adj);
+ adj = simplify_gen_unary (NEG, mode,
+ simplify_gen_binary (MULT, mode, adj, op1),
+ mode);
+ return simplify_gen_binary (PLUS, mode, mod, adj);
}
case CEIL_DIV_EXPR:
if (unsignedp)
{
- rtx div = gen_rtx_UDIV (mode, op0, op1);
- rtx mod = gen_rtx_UMOD (mode, op0, op1);
+ rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
+ rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
rtx adj = ceil_udiv_adjust (mode, mod, op1);
- return gen_rtx_PLUS (mode, div, adj);
+ return simplify_gen_binary (PLUS, mode, div, adj);
}
else
{
- rtx div = gen_rtx_DIV (mode, op0, op1);
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx div = simplify_gen_binary (DIV, mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = ceil_sdiv_adjust (mode, mod, op1);
- return gen_rtx_PLUS (mode, div, adj);
+ return simplify_gen_binary (PLUS, mode, div, adj);
}
case CEIL_MOD_EXPR:
if (unsignedp)
{
- rtx mod = gen_rtx_UMOD (mode, op0, op1);
+ rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
rtx adj = ceil_udiv_adjust (mode, mod, op1);
- adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
- return gen_rtx_PLUS (mode, mod, adj);
+ adj = simplify_gen_unary (NEG, mode,
+ simplify_gen_binary (MULT, mode, adj, op1),
+ mode);
+ return simplify_gen_binary (PLUS, mode, mod, adj);
}
else
{
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = ceil_sdiv_adjust (mode, mod, op1);
- adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
- return gen_rtx_PLUS (mode, mod, adj);
+ adj = simplify_gen_unary (NEG, mode,
+ simplify_gen_binary (MULT, mode, adj, op1),
+ mode);
+ return simplify_gen_binary (PLUS, mode, mod, adj);
}
case ROUND_DIV_EXPR:
if (unsignedp)
{
- rtx div = gen_rtx_UDIV (mode, op0, op1);
- rtx mod = gen_rtx_UMOD (mode, op0, op1);
+ rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
+ rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
rtx adj = round_udiv_adjust (mode, mod, op1);
- return gen_rtx_PLUS (mode, div, adj);
+ return simplify_gen_binary (PLUS, mode, div, adj);
}
else
{
- rtx div = gen_rtx_DIV (mode, op0, op1);
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx div = simplify_gen_binary (DIV, mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = round_sdiv_adjust (mode, mod, op1);
- return gen_rtx_PLUS (mode, div, adj);
+ return simplify_gen_binary (PLUS, mode, div, adj);
}
case ROUND_MOD_EXPR:
if (unsignedp)
{
- rtx mod = gen_rtx_UMOD (mode, op0, op1);
+ rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
rtx adj = round_udiv_adjust (mode, mod, op1);
- adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
- return gen_rtx_PLUS (mode, mod, adj);
+ adj = simplify_gen_unary (NEG, mode,
+ simplify_gen_binary (MULT, mode, adj, op1),
+ mode);
+ return simplify_gen_binary (PLUS, mode, mod, adj);
}
else
{
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = round_sdiv_adjust (mode, mod, op1);
- adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
- return gen_rtx_PLUS (mode, mod, adj);
+ adj = simplify_gen_unary (NEG, mode,
+ simplify_gen_binary (MULT, mode, adj, op1),
+ mode);
+ return simplify_gen_binary (PLUS, mode, mod, adj);
}
case LSHIFT_EXPR:
- return gen_rtx_ASHIFT (mode, op0, op1);
+ return simplify_gen_binary (ASHIFT, mode, op0, op1);
case RSHIFT_EXPR:
if (unsignedp)
- return gen_rtx_LSHIFTRT (mode, op0, op1);
+ return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
else
- return gen_rtx_ASHIFTRT (mode, op0, op1);
+ return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
case LROTATE_EXPR:
- return gen_rtx_ROTATE (mode, op0, op1);
+ return simplify_gen_binary (ROTATE, mode, op0, op1);
case RROTATE_EXPR:
- return gen_rtx_ROTATERT (mode, op0, op1);
+ return simplify_gen_binary (ROTATERT, mode, op0, op1);
case MIN_EXPR:
- if (unsignedp)
- return gen_rtx_UMIN (mode, op0, op1);
- else
- return gen_rtx_SMIN (mode, op0, op1);
+ return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
case MAX_EXPR:
- if (unsignedp)
- return gen_rtx_UMAX (mode, op0, op1);
- else
- return gen_rtx_SMAX (mode, op0, op1);
+ return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
case BIT_AND_EXPR:
case TRUTH_AND_EXPR:
- return gen_rtx_AND (mode, op0, op1);
+ return simplify_gen_binary (AND, mode, op0, op1);
case BIT_IOR_EXPR:
case TRUTH_OR_EXPR:
- return gen_rtx_IOR (mode, op0, op1);
+ return simplify_gen_binary (IOR, mode, op0, op1);
case BIT_XOR_EXPR:
case TRUTH_XOR_EXPR:
- return gen_rtx_XOR (mode, op0, op1);
+ return simplify_gen_binary (XOR, mode, op0, op1);
case TRUTH_ANDIF_EXPR:
return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
case TRUTH_NOT_EXPR:
- return gen_rtx_EQ (mode, op0, const0_rtx);
+ return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
case LT_EXPR:
- if (unsignedp)
- return gen_rtx_LTU (mode, op0, op1);
- else
- return gen_rtx_LT (mode, op0, op1);
+ return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
+ op0, op1);
case LE_EXPR:
- if (unsignedp)
- return gen_rtx_LEU (mode, op0, op1);
- else
- return gen_rtx_LE (mode, op0, op1);
+ return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
+ op0, op1);
case GT_EXPR:
- if (unsignedp)
- return gen_rtx_GTU (mode, op0, op1);
- else
- return gen_rtx_GT (mode, op0, op1);
+ return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
+ op0, op1);
case GE_EXPR:
- if (unsignedp)
- return gen_rtx_GEU (mode, op0, op1);
- else
- return gen_rtx_GE (mode, op0, op1);
+ return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
+ op0, op1);
case EQ_EXPR:
- return gen_rtx_EQ (mode, op0, op1);
+ return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
case NE_EXPR:
- return gen_rtx_NE (mode, op0, op1);
+ return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
case UNORDERED_EXPR:
- return gen_rtx_UNORDERED (mode, op0, op1);
+ return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
case ORDERED_EXPR:
- return gen_rtx_ORDERED (mode, op0, op1);
+ return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
case UNLT_EXPR:
- return gen_rtx_UNLT (mode, op0, op1);
+ return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
case UNLE_EXPR:
- return gen_rtx_UNLE (mode, op0, op1);
+ return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
case UNGT_EXPR:
- return gen_rtx_UNGT (mode, op0, op1);
+ return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
case UNGE_EXPR:
- return gen_rtx_UNGE (mode, op0, op1);
+ return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
case UNEQ_EXPR:
- return gen_rtx_UNEQ (mode, op0, op1);
+ return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
case LTGT_EXPR:
- return gen_rtx_LTGT (mode, op0, op1);
+ return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
case COND_EXPR:
return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
case CONJ_EXPR:
if (GET_CODE (op0) == CONCAT)
return gen_rtx_CONCAT (mode, XEXP (op0, 0),
- gen_rtx_NEG (GET_MODE_INNER (mode),
- XEXP (op0, 1)));
+ simplify_gen_unary (NEG, GET_MODE_INNER (mode),
+ XEXP (op0, 1),
+ GET_MODE_INNER (mode)));
else
{
enum machine_mode imode = GET_MODE_INNER (mode);
return NULL;
}
- op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
+ as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
+ op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
return op0;
int part = var_to_partition (SA.map, exp);
if (part == NO_PARTITION)
- return NULL;
+ {
+ /* If this is a reference to an incoming value of parameter
+ that is never used in the code or where the incoming
+ value is never used in the code, use PARM_DECL's
+ DECL_RTL if set. */
+ if (SSA_NAME_IS_DEFAULT_DEF (exp)
+ && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
+ {
+ op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
+ if (op0)
+ goto adjust_mode;
+ op0 = expand_debug_expr (SSA_NAME_VAR (exp));
+ if (op0)
+ goto adjust_mode;
+ }
+ return NULL;
+ }
gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
- op0 = SA.partition_to_pseudo[part];
+ op0 = copy_rtx (SA.partition_to_pseudo[part]);
}
goto adjust_mode;
}
case VEC_UNPACK_LO_EXPR:
case VEC_WIDEN_MULT_HI_EXPR:
case VEC_WIDEN_MULT_LO_EXPR:
+ case VEC_WIDEN_LSHIFT_HI_EXPR:
+ case VEC_WIDEN_LSHIFT_LO_EXPR:
+ case VEC_PERM_EXPR:
return NULL;
/* Misc codes. */
if (SCALAR_INT_MODE_P (GET_MODE (op0))
&& SCALAR_INT_MODE_P (mode))
{
- if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
- op0 = gen_rtx_ZERO_EXTEND (mode, op0);
- else
- op0 = gen_rtx_SIGN_EXTEND (mode, op0);
- if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
- op1 = gen_rtx_ZERO_EXTEND (mode, op1);
- else
- op1 = gen_rtx_SIGN_EXTEND (mode, op1);
- op0 = gen_rtx_MULT (mode, op0, op1);
- return gen_rtx_PLUS (mode, op0, op2);
+ op0
+ = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
+ 0)))
+ ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
+ inner_mode);
+ op1
+ = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
+ 1)))
+ ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
+ inner_mode);
+ op0 = simplify_gen_binary (MULT, mode, op0, op1);
+ return simplify_gen_binary (PLUS, mode, op0, op2);
}
return NULL;
if (SCALAR_INT_MODE_P (GET_MODE (op0))
&& SCALAR_INT_MODE_P (mode))
{
- enum machine_mode inner_mode = GET_MODE (op0);
+ inner_mode = GET_MODE (op0);
if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
else
op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
else
op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
- op0 = gen_rtx_MULT (mode, op0, op1);
+ op0 = simplify_gen_binary (MULT, mode, op0, op1);
if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
return op0;
else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
- return gen_rtx_PLUS (mode, op0, op2);
+ return simplify_gen_binary (PLUS, mode, op0, op2);
else
- return gen_rtx_MINUS (mode, op2, op0);
+ return simplify_gen_binary (MINUS, mode, op2, op0);
}
return NULL;
case WIDEN_SUM_EXPR:
+ case WIDEN_LSHIFT_EXPR:
if (SCALAR_INT_MODE_P (GET_MODE (op0))
&& SCALAR_INT_MODE_P (mode))
{
- if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
- op0 = gen_rtx_ZERO_EXTEND (mode, op0);
- else
- op0 = gen_rtx_SIGN_EXTEND (mode, op0);
- return gen_rtx_PLUS (mode, op0, op1);
+ op0
+ = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
+ 0)))
+ ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
+ inner_mode);
+ return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
+ ? ASHIFT : PLUS, mode, op0, op1);
}
return NULL;
case FMA_EXPR:
- return gen_rtx_FMA (mode, op0, op1, op2);
+ return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
default:
flag_unsupported:
}
}
+/* Return an RTX equivalent to the source bind value of the tree expression
+ EXP. */
+
+static rtx
+expand_debug_source_expr (tree exp)
+{
+ rtx op0 = NULL_RTX;
+ enum machine_mode mode = VOIDmode, inner_mode;
+
+ switch (TREE_CODE (exp))
+ {
+ case PARM_DECL:
+ {
+ mode = DECL_MODE (exp);
+ op0 = expand_debug_parm_decl (exp);
+ if (op0)
+ break;
+ /* See if this isn't an argument that has been completely
+ optimized out. */
+ if (!DECL_RTL_SET_P (exp)
+ && !DECL_INCOMING_RTL (exp)
+ && DECL_ABSTRACT_ORIGIN (current_function_decl))
+ {
+ tree aexp = exp;
+ if (DECL_ABSTRACT_ORIGIN (exp))
+ aexp = DECL_ABSTRACT_ORIGIN (exp);
+ if (DECL_CONTEXT (aexp)
+ == DECL_ABSTRACT_ORIGIN (current_function_decl))
+ {
+ VEC(tree, gc) **debug_args;
+ unsigned int ix;
+ tree ddecl;
+#ifdef ENABLE_CHECKING
+ tree parm;
+ for (parm = DECL_ARGUMENTS (current_function_decl);
+ parm; parm = DECL_CHAIN (parm))
+ gcc_assert (parm != exp
+ && DECL_ABSTRACT_ORIGIN (parm) != aexp);
+#endif
+ debug_args = decl_debug_args_lookup (current_function_decl);
+ if (debug_args != NULL)
+ {
+ for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
+ ix += 2)
+ if (ddecl == aexp)
+ return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
+ }
+ }
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (op0 == NULL_RTX)
+ return NULL_RTX;
+
+ inner_mode = GET_MODE (op0);
+ if (mode == inner_mode)
+ return op0;
+
+ if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
+ {
+ if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
+ op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
+ else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
+ op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
+ else
+ op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
+ }
+ else if (FLOAT_MODE_P (mode))
+ gcc_unreachable ();
+ else if (FLOAT_MODE_P (inner_mode))
+ {
+ if (TYPE_UNSIGNED (TREE_TYPE (exp)))
+ op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
+ else
+ op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
+ }
+ else if (CONSTANT_P (op0)
+ || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
+ op0 = simplify_gen_subreg (mode, op0, inner_mode,
+ subreg_lowpart_offset (mode, inner_mode));
+ else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
+ op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
+ else
+ op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
+
+ return op0;
+}
+
/* Expand the _LOCs in debug insns. We run this after expanding all
regular insns, so that any variables referenced in the function
will have their DECL_RTLs set. */
val = NULL_RTX;
else
{
- val = expand_debug_expr (value);
+ if (INSN_VAR_LOCATION_STATUS (insn)
+ == VAR_INIT_STATUS_UNINITIALIZED)
+ val = expand_debug_source_expr (value);
+ else
+ val = expand_debug_expr (value);
gcc_assert (last == get_last_insn ());
}
val = gen_rtx_VAR_LOCATION
(mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
- val = emit_debug_insn (val);
+ emit_debug_insn (val);
FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
{
val = gen_rtx_VAR_LOCATION
(mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
- val = emit_debug_insn (val);
+ emit_debug_insn (val);
if (dump_file && (dump_flags & TDF_DETAILS))
{
/* We can't dump the insn with a TREE where an RTX
is expected. */
- INSN_VAR_LOCATION_LOC (val) = const0_rtx;
+ PAT_VAR_LOCATION_LOC (val) = const0_rtx;
maybe_dump_rtl_for_gimple_stmt (stmt, last);
- INSN_VAR_LOCATION_LOC (val) = (rtx)value;
+ PAT_VAR_LOCATION_LOC (val) = (rtx)value;
}
/* In order not to generate too many debug temporaries,
set_curr_insn_source_location (sloc);
set_curr_insn_block (sblock);
}
+ else if (gimple_debug_source_bind_p (stmt))
+ {
+ location_t sloc = get_curr_insn_source_location ();
+ tree sblock = get_curr_insn_block ();
+ tree var = gimple_debug_source_bind_get_var (stmt);
+ tree value = gimple_debug_source_bind_get_value (stmt);
+ rtx val;
+ enum machine_mode mode;
+
+ last = get_last_insn ();
+
+ set_curr_insn_source_location (gimple_location (stmt));
+ set_curr_insn_block (gimple_block (stmt));
+
+ mode = DECL_MODE (var);
+
+ val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
+ VAR_INIT_STATUS_UNINITIALIZED);
+
+ emit_debug_insn (val);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ /* We can't dump the insn with a TREE where an RTX
+ is expected. */
+ PAT_VAR_LOCATION_LOC (val) = const0_rtx;
+ maybe_dump_rtl_for_gimple_stmt (stmt, last);
+ PAT_VAR_LOCATION_LOC (val) = (rtx)value;
+ }
+
+ set_curr_insn_source_location (sloc);
+ set_curr_insn_block (sblock);
+ }
else
{
if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
else
set_curr_insn_source_location (cfun->function_start_locus);
}
+ else
+ set_curr_insn_source_location (UNKNOWN_LOCATION);
set_curr_insn_block (DECL_INITIAL (current_function_decl));
prologue_locator = curr_insn_locator ();
crtl->preferred_stack_boundary = STACK_BOUNDARY;
cfun->cfg->max_jumptable_ents = 0;
+ /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
+ of the function section at exapnsion time to predict distance of calls. */
+ resolve_unique_section (current_function_decl, 0, flag_function_sections);
+
/* Expand the variables recorded during gimple lowering. */
timevar_push (TV_VAR_EXPAND);
start_sequence ();
}
}
+ /* If we have a class containing differently aligned pointers
+ we need to merge those into the corresponding RTL pointer
+ alignment. */
+ for (i = 1; i < num_ssa_names; i++)
+ {
+ tree name = ssa_name (i);
+ int part;
+ rtx r;
+
+ if (!name
+ || !POINTER_TYPE_P (TREE_TYPE (name))
+ /* We might have generated new SSA names in
+ update_alias_info_with_stack_vars. They will have a NULL
+ defining statements, and won't be part of the partitioning,
+ so ignore those. */
+ || !SSA_NAME_DEF_STMT (name))
+ continue;
+ part = var_to_partition (SA.map, name);
+ if (part == NO_PARTITION)
+ continue;
+ r = SA.partition_to_pseudo[part];
+ if (REG_P (r))
+ mark_reg_pointer (r, get_pointer_alignment (name));
+ }
+
/* If this function is `main', emit a call to `__main'
to run global initializers, etc. */
if (DECL_NAME (current_function_decl)
/* Zap the tree EH table. */
set_eh_throw_stmt_table (cfun, NULL);
+ /* We need JUMP_LABEL be set in order to redirect jumps, and hence
+ split edges which edge insertions might do. */
rebuild_jump_labels (get_insns ());
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
{
if (e->insns.r)
- commit_one_edge_insertion (e);
+ {
+ rebuild_jump_labels_chain (e->insns.r);
+ /* Avoid putting insns before parm_birth_insn. */
+ if (e->src == ENTRY_BLOCK_PTR
+ && single_succ_p (ENTRY_BLOCK_PTR)
+ && parm_birth_insn)
+ {
+ rtx insns = e->insns.r;
+ e->insns.r = NULL_RTX;
+ emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
+ }
+ else
+ commit_one_edge_insertion (e);
+ }
else
ei_next (&ei);
}
PROP_ssa | PROP_trees, /* properties_destroyed */
TODO_verify_ssa | TODO_verify_flow
| TODO_verify_stmts, /* todo_flags_start */
- TODO_dump_func
- | TODO_ggc_collect /* todo_flags_finish */
+ TODO_ggc_collect /* todo_flags_finish */
}
};