/* If registers go on the stack anyway, any argument is sure to clobber
an outgoing argument. */
-#if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
- {
- tree fn = emit_block_move_libcall_fn (false);
- (void) fn;
- if (REG_PARM_STACK_SPACE (fn) != 0)
- return false;
- }
+#if defined (REG_PARM_STACK_SPACE)
+ if (OUTGOING_REG_PARM_STACK_SPACE)
+ {
+ tree fn;
+ fn = emit_block_move_libcall_fn (false);
+ if (REG_PARM_STACK_SPACE (fn) != 0)
+ return false;
+ }
#endif
/* If any argument goes in memory, then it might clobber an outgoing
emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
{
rtx dst_addr, src_addr;
- tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
+ tree call_expr, fn, src_tree, dst_tree, size_tree;
enum machine_mode size_mode;
rtx retval;
size_tree = make_tree (sizetype, size);
fn = emit_block_move_libcall_fn (true);
- arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
- arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
- arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
-
- /* Now we have to build up the CALL_EXPR itself. */
- call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
- call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
- call_expr, arg_list, NULL_TREE);
+ call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
CALL_EXPR_TAILCALL (call_expr) = tailcall;
retval = expand_normal (call_expr);
rtx
set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
{
- tree call_expr, arg_list, fn, object_tree, size_tree, val_tree;
+ tree call_expr, fn, object_tree, size_tree, val_tree;
enum machine_mode size_mode;
rtx retval;
val_tree = make_tree (integer_type_node, val);
fn = clear_storage_libcall_fn (true);
- arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
- arg_list = tree_cons (NULL_TREE, val_tree, arg_list);
- arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
-
- /* Now we have to build up the CALL_EXPR itself. */
- call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
- call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
- call_expr, arg_list, NULL_TREE);
+ call_expr = build_call_expr (fn, 3,
+ object_tree, integer_zero_node, size_tree);
CALL_EXPR_TAILCALL (call_expr) = tailcall;
retval = expand_normal (call_expr);
X is known to satisfy push_operand, and MODE is known to be complex.
Returns the last instruction emitted. */
-static rtx
+rtx
emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
{
enum machine_mode submode = GET_MODE_INNER (mode);
read_complex_part (y, !imag_first));
}
+/* A subroutine of emit_move_complex. Perform the move from Y to X
+ via two moves of the parts. Returns the last instruction emitted. */
+
+rtx
+emit_move_complex_parts (rtx x, rtx y)
+{
+ /* Show the output dies here. This is necessary for SUBREGs
+ of pseudos since we cannot track their lifetimes correctly;
+ hard regs shouldn't appear here except as return values. */
+ if (!reload_completed && !reload_in_progress
+ && REG_P (x) && !reg_overlap_mentioned_p (x, y))
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
+
+ write_complex_part (x, read_complex_part (y, false), false);
+ write_complex_part (x, read_complex_part (y, true), true);
+
+ return get_last_insn ();
+}
+
/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
MODE is known to be complex. Returns the last instruction emitted. */
return ret;
}
- /* Show the output dies here. This is necessary for SUBREGs
- of pseudos since we cannot track their lifetimes correctly;
- hard regs shouldn't appear here except as return values. */
- if (!reload_completed && !reload_in_progress
- && REG_P (x) && !reg_overlap_mentioned_p (x, y))
- emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
-
- write_complex_part (x, read_complex_part (y, false), false);
- write_complex_part (x, read_complex_part (y, true), true);
- return get_last_insn ();
+ return emit_move_complex_parts (x, y);
}
/* A subroutine of emit_move_insn_1. Generate a move from Y into X.
xinner = x;
- if (mode == BLKmode)
+ if (mode == BLKmode
+ || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
{
/* Copy a block into the stack, entirely or partially. */
offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
used = partial - offset;
+ if (mode != BLKmode)
+ {
+ /* A value is to be stored in an insufficiently aligned
+ stack slot; copy via a suitably aligned slot if
+ necessary. */
+ size = GEN_INT (GET_MODE_SIZE (mode));
+ if (!MEM_P (xinner))
+ {
+ temp = assign_temp (type, 0, 1, 1);
+ emit_move_insn (temp, xinner);
+ xinner = temp;
+ }
+ }
+
gcc_assert (size);
/* USED is now the # of bytes we need not copy to the stack
{
if (TYPE_UNSIGNED (TREE_TYPE (exp))
!= SUBREG_PROMOTED_UNSIGNED_P (target))
- exp = fold_convert
- (lang_hooks.types.signed_or_unsigned_type
- (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
+ {
+ /* Some types, e.g. Fortran's logical*4, won't have a signed
+ version, so use the mode instead. */
+ tree ntype
+ = (get_signed_or_unsigned_type
+ (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
+ if (ntype == NULL)
+ ntype = lang_hooks.types.type_for_mode
+ (TYPE_MODE (TREE_TYPE (exp)),
+ SUBREG_PROMOTED_UNSIGNED_P (target));
+
+ exp = fold_convert (ntype, exp);
+ }
exp = fold_convert (lang_hooks.types.type_for_mode
(GET_MODE (SUBREG_REG (target)),
case tcc_expression:
case tcc_reference:
+ case tcc_vl_exp:
/* Now do code-specific tests. EXP_RTL is set to any rtx we find in
the expression. If it is set, we conflict iff we are that rtx or
both are in memory. Otherwise, we check all operands of the
if (exp_rtl)
break;
- nops = TREE_CODE_LENGTH (TREE_CODE (exp));
+ nops = TREE_OPERAND_LENGTH (exp);
for (i = 0; i < nops; i++)
if (TREE_OPERAND (exp, i) != 0
&& ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
return 0;
- /* If this is a language-specific tree code, it may require
- special handling. */
- if ((unsigned int) TREE_CODE (exp)
- >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
- && !lang_hooks.safe_from_p (x, exp))
- return 0;
break;
case tcc_type:
return MAX (factor, target_align);
}
\f
+/* Return &VAR expression for emulated thread local VAR. */
+
+static tree
+emutls_var_address (tree var)
+{
+ tree emuvar = emutls_decl (var);
+ tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
+ tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
+ tree arglist = build_tree_list (NULL_TREE, arg);
+ tree call = build_function_call_expr (fn, arglist);
+ return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
+}
+\f
/* Expands variable VAR. */
void
inner = TREE_OPERAND (exp, 0);
break;
+ case VAR_DECL:
+ /* TLS emulation hook - replace __thread VAR's &VAR with
+ __emutls_get_address (&_emutls.VAR). */
+ if (! targetm.have_tls
+ && TREE_CODE (exp) == VAR_DECL
+ && DECL_THREAD_LOCAL_P (exp))
+ {
+ exp = emutls_var_address (exp);
+ return expand_expr (exp, target, tmode, modifier);
+ }
+ /* Fall through. */
+
default:
/* If the object is a DECL, then expand it for its rtl. Don't bypass
expand_expr, as that can have various side effects; LABEL_DECLs for
&& (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
layout_decl (exp, 0);
+ /* TLS emulation hook - replace __thread vars with
+ *__emutls_get_address (&_emutls.var). */
+ if (! targetm.have_tls
+ && TREE_CODE (exp) == VAR_DECL
+ && DECL_THREAD_LOCAL_P (exp))
+ {
+ exp = build_fold_indirect_ref (emutls_var_address (exp));
+ return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
+ }
+
/* ... fall through ... */
case FUNCTION_DECL:
necessarily be constant. */
if (mode == BLKmode)
{
- rtx new
- = assign_stack_temp_for_type
- (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
+ HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
+ rtx new;
+
+ /* If the reference doesn't use the alias set of its type,
+ we cannot create the temporary using that type. */
+ if (component_uses_parent_alias_set (exp))
+ {
+ new = assign_stack_local (ext_mode, size, 0);
+ set_mem_alias_set (new, get_alias_set (exp));
+ }
+ else
+ new = assign_stack_temp_for_type (ext_mode, size, 0, type);
emit_move_insn (new, op0);
op0 = copy_rtx (new);
case CALL_EXPR:
/* Check for a built-in function. */
- if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
- && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
+ if (TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
+ && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
== FUNCTION_DECL)
- && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
+ && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
{
- if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
+ if (DECL_BUILT_IN_CLASS (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
== BUILT_IN_FRONTEND)
return lang_hooks.expand_expr (exp, original_target,
tmode, modifier,
target = expand_vec_cond_expr (exp, target);
return target;
+ case MODIFY_EXPR:
+ {
+ tree lhs = TREE_OPERAND (exp, 0);
+ tree rhs = TREE_OPERAND (exp, 1);
+ gcc_assert (ignore);
+ expand_assignment (lhs, rhs);
+ return const0_rtx;
+ }
+
case GIMPLE_MODIFY_STMT:
{
tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
}
expand_assignment (lhs, rhs);
-
return const0_rtx;
}
&& TREE_CODE (array) != VAR_DECL)
return 0;
- /* Check if the array has a non-zero lower bound. */
+ /* Check if the array has a nonzero lower bound. */
lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
if (!integer_zerop (lower_bound))
{