/* If registers go on the stack anyway, any argument is sure to clobber
an outgoing argument. */
-#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
- {
- tree fn = emit_block_move_libcall_fn (false);
- (void) fn;
- if (REG_PARM_STACK_SPACE (fn) != 0)
- return false;
- }
+#if defined (REG_PARM_STACK_SPACE)
+ if (OUTGOING_REG_PARM_STACK_SPACE)
+ {
+ tree fn;
+ fn = emit_block_move_libcall_fn (false);
+ if (REG_PARM_STACK_SPACE (fn) != 0)
+ return false;
+ }
#endif
/* If any argument goes in memory, then it might clobber an outgoing
emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
{
rtx dst_addr, src_addr;
- tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
+ tree call_expr, fn, src_tree, dst_tree, size_tree;
enum machine_mode size_mode;
rtx retval;
size_tree = make_tree (sizetype, size);
fn = emit_block_move_libcall_fn (true);
- arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
- arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
- arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
-
- /* Now we have to build up the CALL_EXPR itself. */
- call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
- call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
- call_expr, arg_list, NULL_TREE);
+ call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
CALL_EXPR_TAILCALL (call_expr) = tailcall;
retval = expand_normal (call_expr);
rtx
set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
{
- tree call_expr, arg_list, fn, object_tree, size_tree, val_tree;
+ tree call_expr, fn, object_tree, size_tree, val_tree;
enum machine_mode size_mode;
rtx retval;
val_tree = make_tree (integer_type_node, val);
fn = clear_storage_libcall_fn (true);
- arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
- arg_list = tree_cons (NULL_TREE, val_tree, arg_list);
- arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
-
- /* Now we have to build up the CALL_EXPR itself. */
- call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
- call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
- call_expr, arg_list, NULL_TREE);
+ call_expr = build_call_expr (fn, 3,
+ object_tree, integer_zero_node, size_tree);
CALL_EXPR_TAILCALL (call_expr) = tailcall;
retval = expand_normal (call_expr);
X is known to satisfy push_operand, and MODE is known to be complex.
Returns the last instruction emitted. */
-static rtx
+rtx
emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
{
enum machine_mode submode = GET_MODE_INNER (mode);
read_complex_part (y, !imag_first));
}
+/* A subroutine of emit_move_complex. Perform the move from Y to X
+ via two moves of the parts. Returns the last instruction emitted. */
+
+rtx
+emit_move_complex_parts (rtx x, rtx y)
+{
+ /* Show the output dies here. This is necessary for SUBREGs
+ of pseudos since we cannot track their lifetimes correctly;
+ hard regs shouldn't appear here except as return values. */
+ if (!reload_completed && !reload_in_progress
+ && REG_P (x) && !reg_overlap_mentioned_p (x, y))
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
+
+ write_complex_part (x, read_complex_part (y, false), false);
+ write_complex_part (x, read_complex_part (y, true), true);
+
+ return get_last_insn ();
+}
+
/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
MODE is known to be complex. Returns the last instruction emitted. */
return ret;
}
- /* Show the output dies here. This is necessary for SUBREGs
- of pseudos since we cannot track their lifetimes correctly;
- hard regs shouldn't appear here except as return values. */
- if (!reload_completed && !reload_in_progress
- && REG_P (x) && !reg_overlap_mentioned_p (x, y))
- emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
-
- write_complex_part (x, read_complex_part (y, false), false);
- write_complex_part (x, read_complex_part (y, true), true);
- return get_last_insn ();
+ return emit_move_complex_parts (x, y);
}
/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
{
if (TYPE_UNSIGNED (TREE_TYPE (exp))
!= SUBREG_PROMOTED_UNSIGNED_P (target))
- exp = fold_convert
- (lang_hooks.types.signed_or_unsigned_type
- (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
+ {
+ /* Some types, e.g. Fortran's logical*4, won't have a signed
+ version, so use the mode instead. */
+ tree ntype
+ = (get_signed_or_unsigned_type
+ (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
+ if (ntype == NULL)
+ ntype = lang_hooks.types.type_for_mode
+ (TYPE_MODE (TREE_TYPE (exp)),
+ SUBREG_PROMOTED_UNSIGNED_P (target));
+
+ exp = fold_convert (ntype, exp);
+ }
exp = fold_convert (lang_hooks.types.type_for_mode
(GET_MODE (SUBREG_REG (target)),
case tcc_expression:
case tcc_reference:
+ case tcc_vl_exp:
/* Now do code-specific tests. EXP_RTL is set to any rtx we find in
the expression. If it is set, we conflict iff we are that rtx or
both are in memory. Otherwise, we check all operands of the
if (exp_rtl)
break;
- nops = TREE_CODE_LENGTH (TREE_CODE (exp));
+ nops = TREE_OPERAND_LENGTH (exp);
for (i = 0; i < nops; i++)
if (TREE_OPERAND (exp, i) != 0
&& ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
return 0;
- /* If this is a language-specific tree code, it may require
- special handling. */
- if ((unsigned int) TREE_CODE (exp)
- >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
- && !lang_hooks.safe_from_p (x, exp))
- return 0;
break;
case tcc_type:
return MAX (factor, target_align);
}
\f
+/* Return &VAR expression for emulated thread local VAR. */
+
+static tree
+emutls_var_address (tree var)
+{
+ tree emuvar = emutls_decl (var);
+ tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
+ tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
+ tree arglist = build_tree_list (NULL_TREE, arg);
+ tree call = build_function_call_expr (fn, arglist);
+ return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
+}
+\f
/* Expands variable VAR. */
void
inner = TREE_OPERAND (exp, 0);
break;
+ case VAR_DECL:
+ /* TLS emulation hook - replace __thread VAR's &VAR with
+ __emutls_get_address (&_emutls.VAR). */
+ if (! targetm.have_tls
+ && TREE_CODE (exp) == VAR_DECL
+ && DECL_THREAD_LOCAL_P (exp))
+ {
+ exp = emutls_var_address (exp);
+ return expand_expr (exp, target, tmode, modifier);
+ }
+ /* Fall through. */
+
default:
/* If the object is a DECL, then expand it for its rtl. Don't bypass
expand_expr, as that can have various side effects; LABEL_DECLs for
information. It would be better of the diagnostic routines
used the file/line information embedded in the tree nodes rather
than globals. */
- if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
+ if (cfun && EXPR_HAS_LOCATION (exp))
{
location_t saved_location = input_location;
input_location = EXPR_LOCATION (exp);
- emit_line_note (input_location);
+ set_curr_insn_source_location (input_location);
/* Record where the insns produced belong. */
- record_block_change (TREE_BLOCK (exp));
+ set_curr_insn_block (TREE_BLOCK (exp));
ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
enum expand_modifier modifier, rtx *alt_rtl)
{
- rtx op0, op1, temp, decl_rtl;
+ rtx op0, op1, op2, temp, decl_rtl;
tree type;
int unsignedp;
enum machine_mode mode;
&& (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
layout_decl (exp, 0);
+ /* TLS emulation hook - replace __thread vars with
+ *__emutls_get_address (&_emutls.var). */
+ if (! targetm.have_tls
+ && TREE_CODE (exp) == VAR_DECL
+ && DECL_THREAD_LOCAL_P (exp))
+ {
+ exp = build_fold_indirect_ref (emutls_var_address (exp));
+ return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
+ }
+
/* ... fall through ... */
case FUNCTION_DECL:
case RESULT_DECL:
decl_rtl = DECL_RTL (exp);
gcc_assert (decl_rtl);
+ decl_rtl = copy_rtx (decl_rtl);
/* Ensure variable marked as used even if it doesn't go through
a parser. If it hasn't be used yet, write out an external
necessarily be constant. */
if (mode == BLKmode)
{
- rtx new
- = assign_stack_temp_for_type
- (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
+ HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
+ rtx new;
+
+ /* If the reference doesn't use the alias set of its type,
+ we cannot create the temporary using that type. */
+ if (component_uses_parent_alias_set (exp))
+ {
+ new = assign_stack_local (ext_mode, size, 0);
+ set_mem_alias_set (new, get_alias_set (exp));
+ }
+ else
+ new = assign_stack_temp_for_type (ext_mode, size, 0, type);
emit_move_insn (new, op0);
op0 = copy_rtx (new);
case CALL_EXPR:
/* Check for a built-in function. */
- if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
- && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
+ if (TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
+ && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
== FUNCTION_DECL)
- && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
+ && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
{
- if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
+ if (DECL_BUILT_IN_CLASS (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
== BUILT_IN_FRONTEND)
return lang_hooks.expand_expr (exp, original_target,
tmode, modifier,
return op0;
case PLUS_EXPR:
+ /* Check if this is a case for multiplication and addition. */
+ if (TREE_CODE (type) == INTEGER_TYPE
+ && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
+ {
+ tree subsubexp0, subsubexp1;
+ enum tree_code code0, code1;
+
+ subexp0 = TREE_OPERAND (exp, 0);
+ subsubexp0 = TREE_OPERAND (subexp0, 0);
+ subsubexp1 = TREE_OPERAND (subexp0, 1);
+ code0 = TREE_CODE (subsubexp0);
+ code1 = TREE_CODE (subsubexp1);
+ if (code0 == NOP_EXPR && code1 == NOP_EXPR
+ && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
+ < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
+ && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
+ == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
+ && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
+ == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
+ {
+ tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
+ enum machine_mode innermode = TYPE_MODE (op0type);
+ bool zextend_p = TYPE_UNSIGNED (op0type);
+ this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
+ if (mode == GET_MODE_2XWIDER_MODE (innermode)
+ && (this_optab->handlers[(int) mode].insn_code
+ != CODE_FOR_nothing))
+ {
+ expand_operands (TREE_OPERAND (subsubexp0, 0),
+ TREE_OPERAND (subsubexp1, 0),
+ NULL_RTX, &op0, &op1, EXPAND_NORMAL);
+ op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
+ VOIDmode, 0);
+ temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
+ target, unsignedp);
+ gcc_assert (temp);
+ return REDUCE_BIT_FIELD (temp);
+ }
+ }
+ }
+
/* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
something else, make sure we add the register to the constant and
then to the other thing. This case can occur during strength
return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
case MINUS_EXPR:
+ /* Check if this is a case for multiplication and subtraction. */
+ if (TREE_CODE (type) == INTEGER_TYPE
+ && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
+ {
+ tree subsubexp0, subsubexp1;
+ enum tree_code code0, code1;
+
+ subexp1 = TREE_OPERAND (exp, 1);
+ subsubexp0 = TREE_OPERAND (subexp1, 0);
+ subsubexp1 = TREE_OPERAND (subexp1, 1);
+ code0 = TREE_CODE (subsubexp0);
+ code1 = TREE_CODE (subsubexp1);
+ if (code0 == NOP_EXPR && code1 == NOP_EXPR
+ && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
+ < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
+ && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
+ == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
+ && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
+ == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
+ {
+ tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
+ enum machine_mode innermode = TYPE_MODE (op0type);
+ bool zextend_p = TYPE_UNSIGNED (op0type);
+ this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
+ if (mode == GET_MODE_2XWIDER_MODE (innermode)
+ && (this_optab->handlers[(int) mode].insn_code
+ != CODE_FOR_nothing))
+ {
+ expand_operands (TREE_OPERAND (subsubexp0, 0),
+ TREE_OPERAND (subsubexp1, 0),
+ NULL_RTX, &op0, &op1, EXPAND_NORMAL);
+ op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
+ VOIDmode, 0);
+ temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
+ target, unsignedp);
+ gcc_assert (temp);
+ return REDUCE_BIT_FIELD (temp);
+ }
+ }
+ }
+
/* For initializers, we are allowed to return a MINUS of two
symbolic constants. Here we handle all cases when both operands
are constant. */
target = expand_vec_cond_expr (exp, target);
return target;
+ case MODIFY_EXPR:
+ {
+ tree lhs = TREE_OPERAND (exp, 0);
+ tree rhs = TREE_OPERAND (exp, 1);
+ gcc_assert (ignore);
+ expand_assignment (lhs, rhs);
+ return const0_rtx;
+ }
+
case GIMPLE_MODIFY_STMT:
{
tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
}
expand_assignment (lhs, rhs);
-
return const0_rtx;
}
return target;
}
- case VEC_PACK_MOD_EXPR:
+ case VEC_PACK_TRUNC_EXPR:
case VEC_PACK_SAT_EXPR:
{
mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
HOST_WIDE_INT prec = TYPE_PRECISION (type);
if (target && GET_MODE (target) != GET_MODE (exp))
target = 0;
- if (TYPE_UNSIGNED (type))
+ /* For constant values, reduce using build_int_cst_type. */
+ if (GET_CODE (exp) == CONST_INT)
+ {
+ HOST_WIDE_INT value = INTVAL (exp);
+ tree t = build_int_cst_type (type, value);
+ return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
+ }
+ else if (TYPE_UNSIGNED (type))
{
rtx mask;
if (prec < HOST_BITS_PER_WIDE_INT)