X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Fexpr.c;h=32cb9bb755eb3d57217c1c4035e0b3de4ecab065;hb=c119b8ad7b84d02c43821a2a79330d79395cc462;hp=2357170dd1619f780e1be658faddf562d5815b7b;hpb=3bfa8adadb3a43aa297bebe84370f9ae3922ad34;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/expr.c b/gcc/expr.c index 2357170dd16..32cb9bb755e 100644 --- a/gcc/expr.c +++ b/gcc/expr.c @@ -1,6 +1,6 @@ /* Convert tree expression to rtl instructions, for GNU compiler. Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, - 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 + 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc. This file is part of GCC. @@ -54,6 +54,7 @@ along with GCC; see the file COPYING3. If not see #include "timevar.h" #include "df.h" #include "diagnostic.h" +#include "ssaexpand.h" /* Decide whether a function's arguments should be processed from first to last or from last to first. @@ -90,7 +91,7 @@ int cse_not_expected; /* This structure is used by move_by_pieces to describe the move to be performed. */ -struct move_by_pieces +struct move_by_pieces_d { rtx to; rtx to_addr; @@ -108,7 +109,7 @@ struct move_by_pieces /* This structure is used by store_by_pieces to describe the clear to be performed. */ -struct store_by_pieces +struct store_by_pieces_d { rtx to; rtx to_addr; @@ -125,16 +126,16 @@ static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT, unsigned int, unsigned int); static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode, - struct move_by_pieces *); + struct move_by_pieces_d *); static bool block_move_libcall_safe_for_call_parm (void); static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT); static tree emit_block_move_libcall_fn (int); static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned); static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode); static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int); -static void store_by_pieces_1 (struct store_by_pieces *, unsigned int); +static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int); static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode, - struct store_by_pieces *); + struct store_by_pieces_d *); static tree clear_storage_libcall_fn (int); static rtx compress_float_constant (rtx, rtx); static rtx get_subtarget (rtx); @@ -151,7 +152,7 @@ static int is_aligning_offset (const_tree, const_tree); static void expand_operands (tree, tree, rtx, rtx*, rtx*, enum expand_modifier); static rtx reduce_to_bit_field_precision (rtx, rtx, tree); -static rtx do_store_flag (tree, rtx, enum machine_mode, int); +static rtx do_store_flag (tree, rtx, enum machine_mode); #ifdef PUSH_ROUNDING static void emit_single_push_insn (enum machine_mode, rtx, tree); #endif @@ -175,7 +176,7 @@ static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES]; #ifndef MOVE_BY_PIECES_P #define MOVE_BY_PIECES_P(SIZE, ALIGN) \ (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \ - < (unsigned int) MOVE_RATIO) + < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ())) #endif /* This macro is used to determine whether clear_by_pieces should be @@ -183,7 +184,7 @@ static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES]; #ifndef CLEAR_BY_PIECES_P #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \ (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \ - < (unsigned int) CLEAR_RATIO) + < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ())) #endif /* This macro is used to determine whether store_by_pieces should be @@ -191,7 +192,7 @@ static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES]; #ifndef SET_BY_PIECES_P #define SET_BY_PIECES_P(SIZE, ALIGN) \ (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \ - < (unsigned int) SET_RATIO) + < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ())) #endif /* This macro is used to determine whether store_by_pieces should be @@ -199,7 +200,7 @@ static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES]; #ifndef STORE_BY_PIECES_P #define STORE_BY_PIECES_P(SIZE, ALIGN) \ (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \ - < (unsigned int) MOVE_RATIO) + < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ())) #endif /* This array records the insn_code of insns to perform block moves. */ @@ -234,7 +235,6 @@ enum insn_code sync_new_and_optab[NUM_MACHINE_MODES]; enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES]; enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES]; enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES]; -enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES]; enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES]; enum insn_code sync_lock_release[NUM_MACHINE_MODES]; @@ -268,7 +268,7 @@ init_expr_target (void) reg = gen_rtx_REG (VOIDmode, -1); insn = rtx_alloc (INSN); - pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX); + pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX); PATTERN (insn) = pat; for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES; @@ -588,27 +588,9 @@ convert_move (rtx to, rtx from, int unsignedp) if (unsignedp) fill_value = const0_rtx; else - { -#ifdef HAVE_slt - if (HAVE_slt - && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode - && STORE_FLAG_VALUE == -1) - { - emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX, - lowpart_mode, 0); - fill_value = gen_reg_rtx (word_mode); - emit_insn (gen_slt (fill_value)); - } - else -#endif - { - fill_value - = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom, - size_int (GET_MODE_BITSIZE (lowpart_mode) - 1), - NULL_RTX, 0); - fill_value = convert_to_mode (word_mode, fill_value, 1); - } - } + fill_value = emit_store_flag (gen_reg_rtx (word_mode), + LT, lowfrom, const0_rtx, + VOIDmode, 0, -1); /* Fill the remaining words. */ for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++) @@ -894,7 +876,7 @@ rtx move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len, unsigned int align, int endp) { - struct move_by_pieces data; + struct move_by_pieces_d data; rtx to_addr, from_addr = XEXP (from, 0); unsigned int max_size = MOVE_MAX_PIECES + 1; enum machine_mode mode = VOIDmode, tmode; @@ -1106,7 +1088,7 @@ move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align, static void move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode, - struct move_by_pieces *data) + struct move_by_pieces_d *data) { unsigned int size = GET_MODE_SIZE (mode); rtx to1 = NULL_RTX, from1; @@ -1360,7 +1342,8 @@ emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align, pat = GEN_FCN ((int) code) (x, y, op2, opalign); else pat = GEN_FCN ((int) code) (x, y, op2, opalign, - GEN_INT (expected_align), + GEN_INT (expected_align + / BITS_PER_UNIT), GEN_INT (expected_size)); if (pat) { @@ -1441,7 +1424,7 @@ init_block_move_fn (const char *asmspec) const_ptr_type_node, sizetype, NULL_TREE); - fn = build_decl (FUNCTION_DECL, fn, args); + fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args); DECL_EXTERNAL (fn) = 1; TREE_PUBLIC (fn) = 1; DECL_ARTIFICIAL (fn) = 1; @@ -1614,7 +1597,7 @@ gen_group_rtx (rtx orig) gcc_assert (GET_CODE (orig) == PARALLEL); length = XVECLEN (orig, 0); - tmps = alloca (sizeof (rtx) * length); + tmps = XALLOCAVEC (rtx, length); /* Skip a NULL entry in first slot. */ i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1; @@ -1819,7 +1802,7 @@ emit_group_load (rtx dst, rtx src, tree type, int ssize) rtx *tmps; int i; - tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0)); + tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0)); emit_group_load_1 (tmps, dst, src, type, ssize); /* Copy the extracted pieces into the proper (probable) hard regs. */ @@ -1939,7 +1922,7 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) start = 1; finish = XVECLEN (src, 0); - tmps = alloca (sizeof (rtx) * finish); + tmps = XALLOCAVEC (rtx, finish); /* Copy the (probable) hard regs into pseudos. */ for (i = start; i < finish; i++) @@ -2038,10 +2021,55 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1)); enum machine_mode mode = GET_MODE (tmps[i]); unsigned int bytelen = GET_MODE_SIZE (mode); + unsigned int adj_bytelen = bytelen; rtx dest = dst; /* Handle trailing fragments that run over the size of the struct. */ if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) + adj_bytelen = ssize - bytepos; + + if (GET_CODE (dst) == CONCAT) + { + if (bytepos + adj_bytelen + <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) + dest = XEXP (dst, 0); + else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) + { + bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))); + dest = XEXP (dst, 1); + } + else + { + enum machine_mode dest_mode = GET_MODE (dest); + enum machine_mode tmp_mode = GET_MODE (tmps[i]); + + gcc_assert (bytepos == 0 && XVECLEN (src, 0)); + + if (GET_MODE_ALIGNMENT (dest_mode) + >= GET_MODE_ALIGNMENT (tmp_mode)) + { + dest = assign_stack_temp (dest_mode, + GET_MODE_SIZE (dest_mode), + 0); + emit_move_insn (adjust_address (dest, + tmp_mode, + bytepos), + tmps[i]); + dst = dest; + } + else + { + dest = assign_stack_temp (tmp_mode, + GET_MODE_SIZE (tmp_mode), + 0); + emit_move_insn (dest, tmps[i]); + dst = adjust_address (dest, dest_mode, bytepos); + } + break; + } + } + + if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize) { /* store_bit_field always takes its value from the lsb. Move the fragment to the lsb if it's not already there. */ @@ -2059,28 +2087,7 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) build_int_cst (NULL_TREE, shift), tmps[i], 0); } - bytelen = ssize - bytepos; - } - - if (GET_CODE (dst) == CONCAT) - { - if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) - dest = XEXP (dst, 0); - else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))) - { - bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))); - dest = XEXP (dst, 1); - } - else - { - gcc_assert (bytepos == 0 && XVECLEN (src, 0)); - dest = assign_stack_temp (GET_MODE (dest), - GET_MODE_SIZE (GET_MODE (dest)), 0); - emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos), - tmps[i]); - dst = dest; - break; - } + bytelen = adj_bytelen; } /* Optimize the access just a bit. */ @@ -2242,6 +2249,26 @@ use_group_regs (rtx *call_fusage, rtx regs) use_reg (call_fusage, reg); } } + +/* Return the defining gimple statement for SSA_NAME NAME if it is an + assigment and the code of the expresion on the RHS is CODE. Return + NULL otherwise. */ + +static gimple +get_def_for_expr (tree name, enum tree_code code) +{ + gimple def_stmt; + + if (TREE_CODE (name) != SSA_NAME) + return NULL; + + def_stmt = get_gimple_for_ssa_name (name); + if (!def_stmt + || gimple_assign_rhs_code (def_stmt) != code) + return NULL; + + return def_stmt; +} /* Determine whether the LEN bytes generated by CONSTFUN can be @@ -2355,7 +2382,7 @@ store_by_pieces (rtx to, unsigned HOST_WIDE_INT len, rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode), void *constfundata, unsigned int align, bool memsetp, int endp) { - struct store_by_pieces data; + struct store_by_pieces_d data; if (len == 0) { @@ -2407,7 +2434,7 @@ store_by_pieces (rtx to, unsigned HOST_WIDE_INT len, static void clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align) { - struct store_by_pieces data; + struct store_by_pieces_d data; if (len == 0) return; @@ -2435,7 +2462,7 @@ clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED, rtx with BLKmode). ALIGN is maximum alignment we can assume. */ static void -store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED, +store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED, unsigned int align ATTRIBUTE_UNUSED) { rtx to_addr = XEXP (data->to, 0); @@ -2533,7 +2560,7 @@ store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED, static void store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode, - struct store_by_pieces *data) + struct store_by_pieces_d *data) { unsigned int size = GET_MODE_SIZE (mode); rtx to1, cst; @@ -2673,7 +2700,7 @@ set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall) for the function we use for block clears. The first time FOR_CALL is true, we call assemble_external. */ -static GTY(()) tree block_clear_fn; +tree block_clear_fn; void init_block_clear_fn (const char *asmspec) @@ -2687,7 +2714,7 @@ init_block_clear_fn (const char *asmspec) integer_type_node, sizetype, NULL_TREE); - fn = build_decl (FUNCTION_DECL, fn, args); + fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args); DECL_EXTERNAL (fn) = 1; TREE_PUBLIC (fn) = 1; DECL_ARTIFICIAL (fn) = 1; @@ -2780,7 +2807,8 @@ set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align, pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign); else pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign, - GEN_INT (expected_align), + GEN_INT (expected_align + / BITS_PER_UNIT), GEN_INT (expected_size)); if (pat) { @@ -3437,13 +3465,14 @@ compress_float_constant (rtx x, rtx y) enum machine_mode srcmode; REAL_VALUE_TYPE r; int oldcost, newcost; + bool speed = optimize_insn_for_speed_p (); REAL_VALUE_FROM_CONST_DOUBLE (r, y); if (LEGITIMATE_CONSTANT_P (y)) - oldcost = rtx_cost (y, SET); + oldcost = rtx_cost (y, SET, speed); else - oldcost = rtx_cost (force_const_mem (dstmode, y), SET); + oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed); for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode)); srcmode != orig_srcmode; @@ -3470,7 +3499,7 @@ compress_float_constant (rtx x, rtx y) if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode)) continue; /* This is valid, but may not be cheaper than the original. */ - newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET); + newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed); if (oldcost < newcost) continue; } @@ -3478,7 +3507,7 @@ compress_float_constant (rtx x, rtx y) { trunc_y = force_const_mem (srcmode, trunc_y); /* This is valid, but may not be cheaper than the original. */ - newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET); + newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed); if (oldcost < newcost) continue; trunc_y = validize_mem (trunc_y); @@ -4221,7 +4250,7 @@ expand_assignment (tree to, tree from, bool nontemporal) /* Handle expand_expr of a complex value returning a CONCAT. */ if (GET_CODE (to_rtx) == CONCAT) { - if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE) + if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))) { gcc_assert (bitpos == 0); result = store_expr (from, to_rtx, false, nontemporal); @@ -4267,6 +4296,36 @@ expand_assignment (tree to, tree from, bool nontemporal) return; } + else if (TREE_CODE (to) == MISALIGNED_INDIRECT_REF) + { + enum machine_mode mode, op_mode1; + enum insn_code icode; + rtx reg, addr, mem, insn; + + reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL); + reg = force_not_mem (reg); + + mode = TYPE_MODE (TREE_TYPE (to)); + addr = expand_expr (TREE_OPERAND (to, 0), NULL_RTX, VOIDmode, + EXPAND_SUM); + addr = memory_address (mode, addr); + mem = gen_rtx_MEM (mode, addr); + + set_mem_attributes (mem, to, 0); + + icode = movmisalign_optab->handlers[mode].insn_code; + gcc_assert (icode != CODE_FOR_nothing); + + op_mode1 = insn_data[icode].operand[1].mode; + if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1) + && op_mode1 != VOIDmode) + reg = copy_to_mode_reg (op_mode1, reg); + + insn = GEN_FCN (icode) (mem, reg); + emit_insn (insn); + return; + } + /* If the rhs is a function call and its value is not an aggregate, call the function before we start to compute the lhs. This is needed for correct code for cases such as @@ -4276,11 +4335,13 @@ expand_assignment (tree to, tree from, bool nontemporal) Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG since it might be a promoted variable where the zero- or sign- extension needs to be done. Handling this in the normal way is safe because no - computation is done before the call. */ + computation is done before the call. The same is true for SSA names. */ if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from) + && COMPLETE_TYPE_P (TREE_TYPE (from)) && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST - && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL) - && REG_P (DECL_RTL (to)))) + && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL) + && REG_P (DECL_RTL (to))) + || TREE_CODE (to) == SSA_NAME)) { rtx value; @@ -4555,7 +4616,7 @@ store_expr (tree exp, rtx target, int call_param_p, bool nontemporal) } str_copy_len = MIN (str_copy_len, exp_len); if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str, - (void *) TREE_STRING_POINTER (exp), + CONST_CAST(char *, TREE_STRING_POINTER (exp)), MEM_ALIGN (target), false)) goto normal_expr; @@ -4563,7 +4624,7 @@ store_expr (tree exp, rtx target, int call_param_p, bool nontemporal) dest_mem = store_by_pieces (dest_mem, str_copy_len, builtin_strncpy_read_str, - (void *) TREE_STRING_POINTER (exp), + CONST_CAST(char *, TREE_STRING_POINTER (exp)), MEM_ALIGN (target), false, exp_len > str_copy_len ? 1 : 0); if (exp_len > str_copy_len) @@ -4993,6 +5054,9 @@ count_type_elements (const_tree type, bool allow_flexarr) case REFERENCE_TYPE: return 1; + case ERROR_MARK: + return 0; + case VOID_TYPE: case METHOD_TYPE: case FUNCTION_TYPE: @@ -5434,7 +5498,8 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) expand_normal (hi_index); unsignedp = TYPE_UNSIGNED (domain); - index = build_decl (VAR_DECL, NULL_TREE, domain); + index = build_decl (EXPR_LOCATION (exp), + VAR_DECL, NULL_TREE, domain); index_r = gen_reg_rtx (promote_mode (domain, DECL_MODE (index), @@ -5548,6 +5613,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) HOST_WIDE_INT bitpos; rtvec vector = NULL; unsigned n_elts; + alias_set_type alias; gcc_assert (eltmode != BLKmode); @@ -5599,7 +5665,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) if (need_to_clear && size > 0 && !vector) { if (REG_P (target)) - emit_move_insn (target, CONST0_RTX (GET_MODE (target))); + emit_move_insn (target, CONST0_RTX (GET_MODE (target))); else clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); cleared = 1; @@ -5609,6 +5675,11 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) if (!cleared && !vector && REG_P (target)) emit_move_insn (target, CONST0_RTX (GET_MODE (target))); + if (MEM_P (target)) + alias = MEM_ALIAS_SET (target); + else + alias = get_alias_set (elttype); + /* Store each element of the constructor into the corresponding element of TARGET, determined by counting the elements. */ for (idx = 0, i = 0; @@ -5644,7 +5715,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) bitpos = eltpos * elt_size; store_constructor_field (target, bitsize, bitpos, value_mode, value, type, - cleared, get_alias_set (elttype)); + cleared, alias); } } @@ -5756,22 +5827,25 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)) { rtx temp; + gimple nop_def; /* If EXP is a NOP_EXPR of precision less than its mode, then that implies a mask operation. If the precision is the same size as the field we're storing into, that mask is redundant. This is particularly common with bit field assignments generated by the C front end. */ - if (TREE_CODE (exp) == NOP_EXPR) + nop_def = get_def_for_expr (exp, NOP_EXPR); + if (nop_def) { tree type = TREE_TYPE (exp); if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type)) && bitsize == TYPE_PRECISION (type)) { - type = TREE_TYPE (TREE_OPERAND (exp, 0)); + tree op = gimple_assign_rhs1 (nop_def); + type = TREE_TYPE (op); if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize) - exp = TREE_OPERAND (exp, 0); + exp = op; } } @@ -6048,9 +6122,9 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize, return exp; } -/* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF, - look for whether EXP or any nested component-refs within EXP is marked - as PACKED. */ +/* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an + ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within + EXP is marked as PACKED. */ bool contains_packed_reference (const_tree exp) @@ -6090,7 +6164,7 @@ contains_packed_reference (const_tree exp) } /* Return a tree of sizetype representing the size, in bytes, of the element - of EXP, an ARRAY_REF. */ + of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */ tree array_ref_element_size (tree exp) @@ -6117,7 +6191,7 @@ array_ref_element_size (tree exp) } /* Return a tree representing the lower bound of the array mentioned in - EXP, an ARRAY_REF. */ + EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */ tree array_ref_low_bound (tree exp) @@ -6138,7 +6212,7 @@ array_ref_low_bound (tree exp) } /* Return a tree representing the upper bound of the array mentioned in - EXP, an ARRAY_REF. */ + EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */ tree array_ref_up_bound (tree exp) @@ -6182,26 +6256,44 @@ component_ref_field_offset (tree exp) return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp); } -/* Return 1 if T is an expression that get_inner_reference handles. */ +/* Alignment in bits the TARGET of an assignment may be assumed to have. */ -int -handled_component_p (const_tree t) +static unsigned HOST_WIDE_INT +target_align (const_tree target) { - switch (TREE_CODE (t)) + /* We might have a chain of nested references with intermediate misaligning + bitfields components, so need to recurse to find out. */ + + unsigned HOST_WIDE_INT this_align, outer_align; + + switch (TREE_CODE (target)) { case BIT_FIELD_REF: + return 1; + case COMPONENT_REF: + this_align = DECL_ALIGN (TREE_OPERAND (target, 1)); + outer_align = target_align (TREE_OPERAND (target, 0)); + return MIN (this_align, outer_align); + case ARRAY_REF: case ARRAY_RANGE_REF: + this_align = TYPE_ALIGN (TREE_TYPE (target)); + outer_align = target_align (TREE_OPERAND (target, 0)); + return MIN (this_align, outer_align); + + CASE_CONVERT: + case NON_LVALUE_EXPR: case VIEW_CONVERT_EXPR: - case REALPART_EXPR: - case IMAGPART_EXPR: - return 1; + this_align = TYPE_ALIGN (TREE_TYPE (target)); + outer_align = target_align (TREE_OPERAND (target, 0)); + return MAX (this_align, outer_align); default: - return 0; + return TYPE_ALIGN (TREE_TYPE (target)); } } + /* Given an rtx VALUE that may contain additions and multiplications, return an equivalent value that just refers to a register, memory, or constant. @@ -6537,9 +6629,6 @@ safe_from_p (const_rtx x, tree exp, int top_p) case tcc_type: /* Should never get a type here. */ gcc_unreachable (); - - case tcc_gimple_stmt: - gcc_unreachable (); } /* If we have an rtl, find any enclosed object. Then see if we conflict @@ -6651,14 +6740,10 @@ highest_pow2_factor (const_tree exp) static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree target, const_tree exp) { - unsigned HOST_WIDE_INT target_align, factor; - - factor = highest_pow2_factor (exp); - if (TREE_CODE (target) == COMPONENT_REF) - target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1)); - else - target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target)); - return MAX (factor, target_align); + unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT; + unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp); + + return MAX (factor, talign); } /* Return &VAR expression for emulated thread local VAR. */ @@ -6787,9 +6872,10 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, CONSTRUCTORs too, which should yield a memory reference for the constructor's contents. Assume language specific tree nodes can be expanded in some interesting way. */ + gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE); if (DECL_P (exp) || TREE_CODE (exp) == CONSTRUCTOR - || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE) + || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR) { result = expand_expr (exp, target, tmode, modifier == EXPAND_INITIALIZER @@ -6828,6 +6914,16 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, gcc_assert (inner != exp); subtarget = offset || bitpos ? NULL_RTX : target; + /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than + inner alignment, force the inner to be sufficiently aligned. */ + if (CONSTANT_CLASS_P (inner) + && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp))) + { + inner = copy_node (inner); + TREE_TYPE (inner) = copy_node (TREE_TYPE (inner)); + TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp)); + TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1; + } result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier); if (offset) @@ -7046,8 +7142,7 @@ expand_expr_real (tree exp, rtx target, enum machine_mode tmode, /* Handle ERROR_MARK before anybody tries to access its type. */ if (TREE_CODE (exp) == ERROR_MARK - || TREE_CODE (exp) == PREDICT_EXPR - || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)) + || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)) { ret = CONST0_RTX (tmode); return ret ? ret : const0_rtx; @@ -7055,7 +7150,8 @@ expand_expr_real (tree exp, rtx target, enum machine_mode tmode, if (flag_non_call_exceptions) { - rn = lookup_stmt_eh_region (exp); + rn = lookup_expr_eh_region (exp); + /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */ if (rn >= 0) last = get_last_insn (); @@ -7102,10 +7198,7 @@ expand_expr_real (tree exp, rtx target, enum machine_mode tmode, && GET_CODE (PATTERN (insn)) != CLOBBER && GET_CODE (PATTERN (insn)) != USE && (CALL_P (insn) || may_trap_p (PATTERN (insn)))) - { - REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn), - REG_NOTES (insn)); - } + add_reg_note (insn, REG_EH_REGION, GEN_INT (rn)); } } @@ -7126,27 +7219,20 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, int ignore; tree context, subexp0, subexp1; bool reduce_bit_field; + gimple subexp0_def, subexp1_def; + tree top0, top1; #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \ ? reduce_to_bit_field_precision ((expr), \ target, \ type) \ : (expr)) - if (GIMPLE_STMT_P (exp)) - { - type = void_type_node; - mode = VOIDmode; - unsignedp = 0; - } - else - { - type = TREE_TYPE (exp); - mode = TYPE_MODE (type); - unsignedp = TYPE_UNSIGNED (type); - } + type = TREE_TYPE (exp); + mode = TYPE_MODE (type); + unsignedp = TYPE_UNSIGNED (type); ignore = (target == const0_rtx - || ((code == NOP_EXPR || code == CONVERT_EXPR + || ((CONVERT_EXPR_CODE_P (code) || code == COND_EXPR || code == VIEW_CONVERT_EXPR) && TREE_CODE (type) == VOID_TYPE)); @@ -7230,8 +7316,21 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, } case SSA_NAME: - return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, - NULL); + /* ??? ivopts calls expander, without any preparation from + out-of-ssa. So fake instructions as if this was an access to the + base variable. This unnecessarily allocates a pseudo, see how we can + reuse it, if partition base vars have it set already. */ + if (!currently_expanding_to_rtl) + return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, NULL); + { + gimple g = get_gimple_for_ssa_name (exp); + if (g) + return expand_expr_real_1 (gimple_assign_rhs_to_tree (g), target, + tmode, modifier, NULL); + } + decl_rtl = get_rtx_for_ssa_name (exp); + exp = SSA_NAME_VAR (exp); + goto expand_decl_rtl; case PARM_DECL: case VAR_DECL: @@ -7257,6 +7356,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case FUNCTION_DECL: case RESULT_DECL: decl_rtl = DECL_RTL (exp); + expand_decl_rtl: gcc_assert (decl_rtl); decl_rtl = copy_rtx (decl_rtl); @@ -7438,7 +7538,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, with non-BLKmode values. */ gcc_assert (GET_MODE (ret) != BLKmode); - val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp)); + val = build_decl (EXPR_LOCATION (exp), + VAR_DECL, NULL, TREE_TYPE (exp)); DECL_ARTIFICIAL (val) = 1; DECL_IGNORED_P (val) = 1; TREE_OPERAND (exp, 0) = val; @@ -7506,9 +7607,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, /* Resolve the misalignment now, so that we don't have to remember to resolve it later. Of course, this only works for reads. */ - /* ??? When we get around to supporting writes, we'll have to handle - this in store_expr directly. The vectorizer isn't generating - those yet, however. */ if (code == MISALIGNED_INDIRECT_REF) { int icode; @@ -7731,13 +7829,13 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case ARRAY_RANGE_REF: normal_inner_ref: { - enum machine_mode mode1; + enum machine_mode mode1, mode2; HOST_WIDE_INT bitsize, bitpos; tree offset; - int volatilep = 0; + int volatilep = 0, must_force_mem; tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1, &unsignedp, &volatilep, true); - rtx orig_op0; + rtx orig_op0, memloc; /* If we got back the original object, something is wrong. Perhaps we are evaluating an expression too early. In any event, don't @@ -7747,7 +7845,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, /* If TEM's type is a union of variable size, pass TARGET to the inner computation, since it will need a temporary and TARGET is known to have to do. This occurs in unchecked conversion in Ada. */ - orig_op0 = op0 = expand_expr (tem, (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE @@ -7761,45 +7858,47 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, || modifier == EXPAND_STACK_PARM) ? modifier : EXPAND_NORMAL); - /* If this is a constant, put it into a register if it is a legitimate - constant, OFFSET is 0, and we won't try to extract outside the - register (in case we were passed a partially uninitialized object - or a view_conversion to a larger size) or a BLKmode piece of it - (e.g. if it is unchecked-converted to a record type in Ada). Force - the constant to memory otherwise. */ - if (CONSTANT_P (op0)) - { - enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem)); - if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0) - && offset == 0 - && mode1 != BLKmode - && bitpos + bitsize <= GET_MODE_BITSIZE (mode)) - op0 = force_reg (mode, op0); - else - op0 = validize_mem (force_const_mem (mode, op0)); - } - - /* Otherwise, if this object not in memory and we either have an - offset, a BLKmode result, or a reference outside the object, put it - there. Such cases can occur in Ada if we have unchecked conversion - of an expression from a scalar type to an array or record type or - for an ARRAY_RANGE_REF whose type is BLKmode. */ - else if (!MEM_P (op0) - && (offset != 0 - || mode1 == BLKmode - || (bitpos + bitsize - > GET_MODE_BITSIZE (GET_MODE (op0))))) + mode2 + = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0); + + /* If we have either an offset, a BLKmode result, or a reference + outside the underlying object, we must force it to memory. + Such a case can occur in Ada if we have unchecked conversion + of an expression from a scalar type to an aggregate type or + for an ARRAY_RANGE_REF whose type is BLKmode, or if we were + passed a partially uninitialized object or a view-conversion + to a larger size. */ + must_force_mem = (offset + || mode1 == BLKmode + || bitpos + bitsize > GET_MODE_BITSIZE (mode2)); + + /* If this is a constant, put it in a register if it is a legitimate + constant and we don't need a memory reference. */ + if (CONSTANT_P (op0) + && mode2 != BLKmode + && LEGITIMATE_CONSTANT_P (op0) + && !must_force_mem) + op0 = force_reg (mode2, op0); + + /* Otherwise, if this is a constant, try to force it to the constant + pool. Note that back-ends, e.g. MIPS, may refuse to do so if it + is a legitimate constant. */ + else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0))) + op0 = validize_mem (memloc); + + /* Otherwise, if this is a constant or the object is not in memory + and need be, put it there. */ + else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem)) { tree nt = build_qualified_type (TREE_TYPE (tem), (TYPE_QUALS (TREE_TYPE (tem)) | TYPE_QUAL_CONST)); - rtx memloc = assign_temp (nt, 1, 1, 1); - + memloc = assign_temp (nt, 1, 1, 1); emit_move_insn (memloc, op0); op0 = memloc; } - if (offset != 0) + if (offset) { rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM); @@ -7956,20 +8055,20 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, if (mode == BLKmode) { HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode); - rtx new; + rtx new_rtx; /* If the reference doesn't use the alias set of its type, we cannot create the temporary using that type. */ if (component_uses_parent_alias_set (exp)) { - new = assign_stack_local (ext_mode, size, 0); - set_mem_alias_set (new, get_alias_set (exp)); + new_rtx = assign_stack_local (ext_mode, size, 0); + set_mem_alias_set (new_rtx, get_alias_set (exp)); } else - new = assign_stack_temp_for_type (ext_mode, size, 0, type); + new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type); - emit_move_insn (new, op0); - op0 = copy_rtx (new); + emit_move_insn (new_rtx, op0); + op0 = copy_rtx (new_rtx); PUT_MODE (op0, BLKmode); set_mem_attributes (op0, exp, 1); } @@ -8023,23 +8122,21 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, && (attr = lookup_attribute ("error", DECL_ATTRIBUTES (fndecl))) != NULL) error ("%Kcall to %qs declared with attribute error: %s", - exp, lang_hooks.decl_printable_name (fndecl, 1), + exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)), TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))); if (fndecl && (attr = lookup_attribute ("warning", DECL_ATTRIBUTES (fndecl))) != NULL) - warning (0, "%Kcall to %qs declared with attribute warning: %s", - exp, lang_hooks.decl_printable_name (fndecl, 1), - TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))); + warning_at (tree_nonartificial_location (exp), + 0, "%Kcall to %qs declared with attribute warning: %s", + exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)), + TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)))); /* Check for a built-in function. */ if (fndecl && DECL_BUILT_IN (fndecl)) { - if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_FRONTEND) - return lang_hooks.expand_expr (exp, original_target, - tmode, modifier, alt_rtl); - else - return expand_builtin (exp, target, subtarget, tmode, ignore); + gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND); + return expand_builtin (exp, target, subtarget, tmode, ignore); } } return expand_call (exp, target, ignore); @@ -8150,26 +8247,91 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, return REDUCE_BIT_FIELD (op0); case VIEW_CONVERT_EXPR: - op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier); + op0 = NULL_RTX; + + /* If we are converting to BLKmode, try to avoid an intermediate + temporary by fetching an inner memory reference. */ + if (mode == BLKmode + && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST + && TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != BLKmode + && handled_component_p (TREE_OPERAND (exp, 0))) + { + enum machine_mode mode1; + HOST_WIDE_INT bitsize, bitpos; + tree offset; + int unsignedp; + int volatilep = 0; + tree tem + = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, &bitpos, + &offset, &mode1, &unsignedp, &volatilep, + true); + rtx orig_op0; + + /* ??? We should work harder and deal with non-zero offsets. */ + if (!offset + && (bitpos % BITS_PER_UNIT) == 0 + && bitsize >= 0 + && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0) + { + /* See the normal_inner_ref case for the rationale. */ + orig_op0 + = expand_expr (tem, + (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE + && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) + != INTEGER_CST) + && modifier != EXPAND_STACK_PARM + ? target : NULL_RTX), + VOIDmode, + (modifier == EXPAND_INITIALIZER + || modifier == EXPAND_CONST_ADDRESS + || modifier == EXPAND_STACK_PARM) + ? modifier : EXPAND_NORMAL); + + if (MEM_P (orig_op0)) + { + op0 = orig_op0; + + /* Get a reference to just this component. */ + if (modifier == EXPAND_CONST_ADDRESS + || modifier == EXPAND_SUM + || modifier == EXPAND_INITIALIZER) + op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT); + else + op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT); + + if (op0 == orig_op0) + op0 = copy_rtx (op0); + + set_mem_attributes (op0, TREE_OPERAND (exp, 0), 0); + if (REG_P (XEXP (op0, 0))) + mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0)); + + MEM_VOLATILE_P (op0) |= volatilep; + } + } + } + + if (!op0) + op0 = expand_expr (TREE_OPERAND (exp, 0), + NULL_RTX, VOIDmode, modifier); /* If the input and output modes are both the same, we are done. */ - if (TYPE_MODE (type) == GET_MODE (op0)) + if (mode == GET_MODE (op0)) ; /* If neither mode is BLKmode, and both modes are the same size then we can use gen_lowpart. */ - else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode - && GET_MODE_SIZE (TYPE_MODE (type)) - == GET_MODE_SIZE (GET_MODE (op0))) + else if (mode != BLKmode && GET_MODE (op0) != BLKmode + && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0)) + && !COMPLEX_MODE_P (GET_MODE (op0))) { if (GET_CODE (op0) == SUBREG) op0 = force_reg (GET_MODE (op0), op0); - op0 = gen_lowpart (TYPE_MODE (type), op0); + op0 = gen_lowpart (mode, op0); } /* If both modes are integral, then we can convert from one to the other. */ - else if (SCALAR_INT_MODE_P (GET_MODE (op0)) - && SCALAR_INT_MODE_P (TYPE_MODE (type))) - op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0, + else if (SCALAR_INT_MODE_P (GET_MODE (op0)) && SCALAR_INT_MODE_P (mode)) + op0 = convert_modes (mode, GET_MODE (op0), op0, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); /* As a last resort, spill op0 to memory, and reload it in a different mode. */ @@ -8193,8 +8355,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, op0 = target; } - /* At this point, OP0 is in the correct mode. If the output type is such - that the operand is known to be aligned, indicate that it is. + /* At this point, OP0 is in the correct mode. If the output type is + such that the operand is known to be aligned, indicate that it is. Otherwise, we need only be concerned about alignment for non-BLKmode results. */ if (MEM_P (op0)) @@ -8203,31 +8365,33 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, if (TYPE_ALIGN_OK (type)) set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type))); - else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT - && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type))) + else if (STRICT_ALIGNMENT + && mode != BLKmode + && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)) { tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); HOST_WIDE_INT temp_size = MAX (int_size_in_bytes (inner_type), - (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type))); - rtx new = assign_stack_temp_for_type (TYPE_MODE (type), - temp_size, 0, type); - rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0); + (HOST_WIDE_INT) GET_MODE_SIZE (mode)); + rtx new_rtx + = assign_stack_temp_for_type (mode, temp_size, 0, type); + rtx new_with_op0_mode + = adjust_address (new_rtx, GET_MODE (op0), 0); gcc_assert (!TREE_ADDRESSABLE (exp)); if (GET_MODE (op0) == BLKmode) emit_block_move (new_with_op0_mode, op0, - GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))), + GEN_INT (GET_MODE_SIZE (mode)), (modifier == EXPAND_STACK_PARM ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL)); else emit_move_insn (new_with_op0_mode, op0); - op0 = new; + op0 = new_rtx; } - op0 = adjust_address (op0, TYPE_MODE (type), 0); + op0 = adjust_address (op0, mode, 0); } return op0; @@ -8236,32 +8400,43 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, /* Even though the sizetype mode and the pointer's mode can be different expand is able to handle this correctly and get the correct result out of the PLUS_EXPR code. */ + /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR + if sizetype precision is smaller than pointer precision. */ + if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type)) + exp = build2 (PLUS_EXPR, type, + TREE_OPERAND (exp, 0), + fold_convert (type, + fold_convert (ssizetype, + TREE_OPERAND (exp, 1)))); case PLUS_EXPR: /* Check if this is a case for multiplication and addition. */ if ((TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == FIXED_POINT_TYPE) - && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR) + && (subexp0_def = get_def_for_expr (TREE_OPERAND (exp, 0), + MULT_EXPR))) { tree subsubexp0, subsubexp1; - enum tree_code code0, code1, this_code; + gimple subsubexp0_def, subsubexp1_def; + enum tree_code this_code; - subexp0 = TREE_OPERAND (exp, 0); - subsubexp0 = TREE_OPERAND (subexp0, 0); - subsubexp1 = TREE_OPERAND (subexp0, 1); - code0 = TREE_CODE (subsubexp0); - code1 = TREE_CODE (subsubexp1); this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR : FIXED_CONVERT_EXPR; - if (code0 == this_code && code1 == this_code - && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0))) + subsubexp0 = gimple_assign_rhs1 (subexp0_def); + subsubexp0_def = get_def_for_expr (subsubexp0, this_code); + subsubexp1 = gimple_assign_rhs2 (subexp0_def); + subsubexp1_def = get_def_for_expr (subsubexp1, this_code); + if (subsubexp0_def && subsubexp1_def + && (top0 = gimple_assign_rhs1 (subsubexp0_def)) + && (top1 = gimple_assign_rhs1 (subsubexp1_def)) + && (TYPE_PRECISION (TREE_TYPE (top0)) < TYPE_PRECISION (TREE_TYPE (subsubexp0))) - && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0))) - == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))) - && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0))) - == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))) + && (TYPE_PRECISION (TREE_TYPE (top0)) + == TYPE_PRECISION (TREE_TYPE (top1))) + && (TYPE_UNSIGNED (TREE_TYPE (top0)) + == TYPE_UNSIGNED (TREE_TYPE (top1)))) { - tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0)); + tree op0type = TREE_TYPE (top0); enum machine_mode innermode = TYPE_MODE (op0type); bool zextend_p = TYPE_UNSIGNED (op0type); bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0)); @@ -8274,9 +8449,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, && (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)) { - expand_operands (TREE_OPERAND (subsubexp0, 0), - TREE_OPERAND (subsubexp1, 0), - NULL_RTX, &op0, &op1, EXPAND_NORMAL); + expand_operands (top0, top1, NULL_RTX, &op0, &op1, + EXPAND_NORMAL); op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode, EXPAND_NORMAL); temp = expand_ternary_op (mode, this_optab, op0, op1, op2, @@ -8388,7 +8562,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, || mode != ptr_mode) { expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), - subtarget, &op0, &op1, 0); + subtarget, &op0, &op1, EXPAND_NORMAL); if (op0 == const0_rtx) return op1; if (op1 == const0_rtx) @@ -8404,27 +8578,30 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, /* Check if this is a case for multiplication and subtraction. */ if ((TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == FIXED_POINT_TYPE) - && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR) + && (subexp1_def = get_def_for_expr (TREE_OPERAND (exp, 1), + MULT_EXPR))) { tree subsubexp0, subsubexp1; - enum tree_code code0, code1, this_code; + gimple subsubexp0_def, subsubexp1_def; + enum tree_code this_code; - subexp1 = TREE_OPERAND (exp, 1); - subsubexp0 = TREE_OPERAND (subexp1, 0); - subsubexp1 = TREE_OPERAND (subexp1, 1); - code0 = TREE_CODE (subsubexp0); - code1 = TREE_CODE (subsubexp1); this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR : FIXED_CONVERT_EXPR; - if (code0 == this_code && code1 == this_code - && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0))) + subsubexp0 = gimple_assign_rhs1 (subexp1_def); + subsubexp0_def = get_def_for_expr (subsubexp0, this_code); + subsubexp1 = gimple_assign_rhs2 (subexp1_def); + subsubexp1_def = get_def_for_expr (subsubexp1, this_code); + if (subsubexp0_def && subsubexp1_def + && (top0 = gimple_assign_rhs1 (subsubexp0_def)) + && (top1 = gimple_assign_rhs1 (subsubexp1_def)) + && (TYPE_PRECISION (TREE_TYPE (top0)) < TYPE_PRECISION (TREE_TYPE (subsubexp0))) - && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0))) - == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))) - && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0))) - == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))) + && (TYPE_PRECISION (TREE_TYPE (top0)) + == TYPE_PRECISION (TREE_TYPE (top1))) + && (TYPE_UNSIGNED (TREE_TYPE (top0)) + == TYPE_UNSIGNED (TREE_TYPE (top1)))) { - tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0)); + tree op0type = TREE_TYPE (top0); enum machine_mode innermode = TYPE_MODE (op0type); bool zextend_p = TYPE_UNSIGNED (op0type); bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0)); @@ -8437,9 +8614,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, && (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)) { - expand_operands (TREE_OPERAND (subsubexp0, 0), - TREE_OPERAND (subsubexp1, 0), - NULL_RTX, &op0, &op1, EXPAND_NORMAL); + expand_operands (top0, top1, NULL_RTX, &op0, &op1, + EXPAND_NORMAL); op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_NORMAL); temp = expand_ternary_op (mode, this_optab, op0, op1, op2, @@ -8538,66 +8714,65 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, subexp0 = TREE_OPERAND (exp, 0); subexp1 = TREE_OPERAND (exp, 1); + subexp0_def = get_def_for_expr (subexp0, NOP_EXPR); + subexp1_def = get_def_for_expr (subexp1, NOP_EXPR); + top0 = top1 = NULL_TREE; + /* First, check if we have a multiplication of one signed and one unsigned operand. */ - if (TREE_CODE (subexp0) == NOP_EXPR - && TREE_CODE (subexp1) == NOP_EXPR + if (subexp0_def + && (top0 = gimple_assign_rhs1 (subexp0_def)) + && subexp1_def + && (top1 = gimple_assign_rhs1 (subexp1_def)) && TREE_CODE (type) == INTEGER_TYPE - && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0))) - < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))) - && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0))) - == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0)))) - && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))) - != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0))))) + && (TYPE_PRECISION (TREE_TYPE (top0)) + < TYPE_PRECISION (TREE_TYPE (subexp0))) + && (TYPE_PRECISION (TREE_TYPE (top0)) + == TYPE_PRECISION (TREE_TYPE (top1))) + && (TYPE_UNSIGNED (TREE_TYPE (top0)) + != TYPE_UNSIGNED (TREE_TYPE (top1)))) { enum machine_mode innermode - = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0))); + = TYPE_MODE (TREE_TYPE (top0)); this_optab = usmul_widen_optab; if (mode == GET_MODE_WIDER_MODE (innermode)) { if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing) { - if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))) - expand_operands (TREE_OPERAND (subexp0, 0), - TREE_OPERAND (subexp1, 0), - NULL_RTX, &op0, &op1, 0); + if (TYPE_UNSIGNED (TREE_TYPE (top0))) + expand_operands (top0, top1, NULL_RTX, &op0, &op1, + EXPAND_NORMAL); else - expand_operands (TREE_OPERAND (subexp0, 0), - TREE_OPERAND (subexp1, 0), - NULL_RTX, &op1, &op0, 0); + expand_operands (top0, top1, NULL_RTX, &op1, &op0, + EXPAND_NORMAL); goto binop3; } } } - /* Check for a multiplication with matching signedness. */ - else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR + /* Check for a multiplication with matching signedness. If + valid, TOP0 and TOP1 were set in the previous if + condition. */ + else if (top0 && TREE_CODE (type) == INTEGER_TYPE - && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) - < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))) - && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST - && int_fits_type_p (TREE_OPERAND (exp, 1), - TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) + && (TYPE_PRECISION (TREE_TYPE (top0)) + < TYPE_PRECISION (TREE_TYPE (subexp0))) + && ((TREE_CODE (subexp1) == INTEGER_CST + && int_fits_type_p (subexp1, TREE_TYPE (top0)) /* Don't use a widening multiply if a shift will do. */ - && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)))) + && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (subexp1))) > HOST_BITS_PER_WIDE_INT) - || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0)) + || exact_log2 (TREE_INT_CST_LOW (subexp1)) < 0)) || - (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR - && (TYPE_PRECISION (TREE_TYPE - (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) - == TYPE_PRECISION (TREE_TYPE - (TREE_OPERAND - (TREE_OPERAND (exp, 0), 0)))) + (top1 + && (TYPE_PRECISION (TREE_TYPE (top1)) + == TYPE_PRECISION (TREE_TYPE (top0)) /* If both operands are extended, they must either both be zero-extended or both be sign-extended. */ - && (TYPE_UNSIGNED (TREE_TYPE - (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))) - == TYPE_UNSIGNED (TREE_TYPE - (TREE_OPERAND - (TREE_OPERAND (exp, 0), 0))))))) + && (TYPE_UNSIGNED (TREE_TYPE (top1)) + == TYPE_UNSIGNED (TREE_TYPE (top0))))))) { - tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)); + tree op0type = TREE_TYPE (top0); enum machine_mode innermode = TYPE_MODE (op0type); bool zextend_p = TYPE_UNSIGNED (op0type); optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab; @@ -8607,27 +8782,24 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, { if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing) { - if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) - expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), - TREE_OPERAND (exp, 1), - NULL_RTX, &op0, &op1, EXPAND_NORMAL); + if (TREE_CODE (subexp1) == INTEGER_CST) + expand_operands (top0, subexp1, NULL_RTX, &op0, &op1, + EXPAND_NORMAL); else - expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0), - TREE_OPERAND (TREE_OPERAND (exp, 1), 0), - NULL_RTX, &op0, &op1, EXPAND_NORMAL); + expand_operands (top0, top1, NULL_RTX, &op0, &op1, + EXPAND_NORMAL); goto binop3; } else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing && innermode == word_mode) { rtx htem, hipart; - op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)); - if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST) + op0 = expand_normal (top0); + if (TREE_CODE (subexp1) == INTEGER_CST) op1 = convert_modes (innermode, mode, - expand_normal (TREE_OPERAND (exp, 1)), - unsignedp); + expand_normal (subexp1), unsignedp); else - op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)); + op1 = expand_normal (top1); temp = expand_binop (mode, other_optab, op0, op1, target, unsignedp, OPTAB_LIB_WIDEN); hipart = gen_highpart (innermode, temp); @@ -8640,8 +8812,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, } } } - expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), - subtarget, &op0, &op1, 0); + expand_operands (subexp0, subexp1, subtarget, &op0, &op1, EXPAND_NORMAL); return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp)); case TRUNC_DIV_EXPR: @@ -8661,7 +8832,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, then if the divisor is constant can optimize the case where some terms of the dividend have coeffs divisible by it. */ expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), - subtarget, &op0, &op1, 0); + subtarget, &op0, &op1, EXPAND_NORMAL); return expand_divmod (0, code, mode, op0, op1, target, unsignedp); case RDIV_EXPR: @@ -8674,7 +8845,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, if (modifier == EXPAND_STACK_PARM) target = 0; expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), - subtarget, &op0, &op1, 0); + subtarget, &op0, &op1, EXPAND_NORMAL); return expand_divmod (1, code, mode, op0, op1, target, unsignedp); case FIXED_CONVERT_EXPR: @@ -8751,7 +8922,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, && REGNO (target) < FIRST_PSEUDO_REGISTER)) target = gen_reg_rtx (mode); expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), - target, &op0, &op1, 0); + target, &op0, &op1, EXPAND_NORMAL); /* First try to do it with a special MIN or MAX instruction. If that does not win, use a conditional jump to select the proper @@ -8937,7 +9108,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case LTGT_EXPR: temp = do_store_flag (exp, modifier != EXPAND_STACK_PARM ? target : NULL_RTX, - tmode != VOIDmode ? tmode : mode, 0); + tmode != VOIDmode ? tmode : mode); if (temp != 0) return temp; @@ -8982,7 +9153,10 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, /* If no set-flag instruction, must generate a conditional store into a temporary variable. Drop through and handle this like && and ||. */ - + /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they + are occassionally created by folding during expansion. */ + case TRUTH_ANDIF_EXPR: + case TRUTH_ORIF_EXPR: if (! ignore && (target == 0 || modifier == EXPAND_STACK_PARM @@ -9091,16 +9265,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, tree lhs = TREE_OPERAND (exp, 0); tree rhs = TREE_OPERAND (exp, 1); gcc_assert (ignore); - expand_assignment (lhs, rhs, false); - return const0_rtx; - } - - case GIMPLE_MODIFY_STMT: - { - tree lhs = GIMPLE_STMT_OPERAND (exp, 0); - tree rhs = GIMPLE_STMT_OPERAND (exp, 1); - - gcc_assert (ignore); /* Check for |= or &= of a bitfield of size one into another bitfield of size 1. In this case, (unless we need the result of the @@ -9192,18 +9356,9 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case POSTDECREMENT_EXPR: case LOOP_EXPR: case EXIT_EXPR: - case TRUTH_ANDIF_EXPR: - case TRUTH_ORIF_EXPR: /* Lowered by gimplify.c. */ gcc_unreachable (); - case CHANGE_DYNAMIC_TYPE_EXPR: - /* This is ignored at the RTL level. The tree level set - DECL_POINTER_ALIAS_SET of any variable to be 0, which is - overkill for the RTL layer but is all that we can - represent. */ - return const0_rtx; - case EXC_PTR_EXPR: return get_exception_pointer (); @@ -9268,7 +9423,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, tree oprnd0 = TREE_OPERAND (exp, 0); tree oprnd1 = TREE_OPERAND (exp, 1); - expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0); + expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1, target, unsignedp); return target; @@ -9289,7 +9444,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case VEC_EXTRACT_ODD_EXPR: { expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), - NULL_RTX, &op0, &op1, 0); + NULL_RTX, &op0, &op1, EXPAND_NORMAL); this_optab = optab_for_tree_code (code, type, optab_default); temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, OPTAB_WIDEN); @@ -9301,7 +9456,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case VEC_INTERLEAVE_LOW_EXPR: { expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), - NULL_RTX, &op0, &op1, 0); + NULL_RTX, &op0, &op1, EXPAND_NORMAL); this_optab = optab_for_tree_code (code, type, optab_default); temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, OPTAB_WIDEN); @@ -9349,7 +9504,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, tree oprnd0 = TREE_OPERAND (exp, 0); tree oprnd1 = TREE_OPERAND (exp, 1); - expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0); + expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX, target, unsignedp); gcc_assert (target); @@ -9359,27 +9514,38 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case VEC_PACK_TRUNC_EXPR: case VEC_PACK_SAT_EXPR: case VEC_PACK_FIX_TRUNC_EXPR: + mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); + goto binop; + + case COMPOUND_LITERAL_EXPR: { - mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); - goto binop; - } + /* Initialize the anonymous variable declared in the compound + literal, then return the variable. */ + tree decl = COMPOUND_LITERAL_EXPR_DECL (exp); - case OMP_ATOMIC_LOAD: - case OMP_ATOMIC_STORE: - /* OMP expansion is not run when there were errors, so these codes - can get here. */ - gcc_assert (errorcount != 0); - return NULL_RTX; + /* Create RTL for this variable. */ + if (!DECL_RTL_SET_P (decl)) + { + if (DECL_HARD_REGISTER (decl)) + /* The user specified an assembler name for this variable. + Set that up now. */ + rest_of_decl_compilation (decl, 0, 0); + else + expand_decl (decl); + } + + return expand_expr_real (decl, original_target, tmode, + modifier, alt_rtl); + } default: - return lang_hooks.expand_expr (exp, original_target, tmode, - modifier, alt_rtl); + gcc_unreachable (); } /* Here to do an ordinary binary operator. */ binop: expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), - subtarget, &op0, &op1, 0); + subtarget, &op0, &op1, EXPAND_NORMAL); binop2: this_optab = optab_for_tree_code (code, type, optab_default); binop3: @@ -9590,9 +9756,6 @@ string_constant (tree arg, tree *ptr_offset) If TARGET is nonzero, store the result there if convenient. - If ONLY_CHEAP is nonzero, only do this if it is likely to be very - cheap. - Return zero if there is no suitable set-flag instruction available on this machine. @@ -9605,7 +9768,7 @@ string_constant (tree arg, tree *ptr_offset) set/jump/set sequence. */ static rtx -do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) +do_store_flag (tree exp, rtx target, enum machine_mode mode) { enum rtx_code code; tree arg0, arg1, type; @@ -9614,7 +9777,6 @@ do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) int invert = 0; int unsignedp; rtx op0, op1; - enum insn_code icode; rtx subtarget = target; rtx result, label; @@ -9758,42 +9920,11 @@ do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) if (! can_compare_p (code, operand_mode, ccp_store_flag)) return 0; - icode = setcc_gen_code[(int) code]; - - if (icode == CODE_FOR_nothing) - { - enum machine_mode wmode; - - for (wmode = operand_mode; - icode == CODE_FOR_nothing && wmode != VOIDmode; - wmode = GET_MODE_WIDER_MODE (wmode)) - icode = optab_handler (cstore_optab, wmode)->insn_code; - } - - if (icode == CODE_FOR_nothing - || (only_cheap && insn_data[(int) icode].operand[0].mode != mode)) - { - /* We can only do this if it is one of the special cases that - can be handled without an scc insn. */ - if ((code == LT && integer_zerop (arg1)) - || (! only_cheap && code == GE && integer_zerop (arg1))) - ; - else if (! only_cheap && (code == NE || code == EQ) - && TREE_CODE (type) != REAL_TYPE - && ((optab_handler (abs_optab, operand_mode)->insn_code - != CODE_FOR_nothing) - || (optab_handler (ffs_optab, operand_mode)->insn_code - != CODE_FOR_nothing))) - ; - else - return 0; - } - if (! get_subtarget (target) || GET_MODE (subtarget) != operand_mode) subtarget = 0; - expand_operands (arg0, arg1, subtarget, &op0, &op1, 0); + expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL); if (target == 0) target = gen_reg_rtx (mode); @@ -9833,19 +9964,6 @@ do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) # define CODE_FOR_casesi CODE_FOR_nothing #endif -/* If the machine does not have a case insn that compares the bounds, - this means extra overhead for dispatch tables, which raises the - threshold for using them. */ -#ifndef CASE_VALUES_THRESHOLD -#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5) -#endif /* CASE_VALUES_THRESHOLD */ - -unsigned int -case_values_threshold (void) -{ - return CASE_VALUES_THRESHOLD; -} - /* Attempt to generate a casesi instruction. Returns 1 if successful, 0 otherwise (i.e. if there is no casesi instruction). */ int @@ -10028,16 +10146,16 @@ try_tablejump (tree index_type, tree index_expr, tree minval, tree range, int vector_mode_valid_p (enum machine_mode mode) { - enum mode_class class = GET_MODE_CLASS (mode); + enum mode_class mclass = GET_MODE_CLASS (mode); enum machine_mode innermode; /* Doh! What's going on? */ - if (class != MODE_VECTOR_INT - && class != MODE_VECTOR_FLOAT - && class != MODE_VECTOR_FRACT - && class != MODE_VECTOR_UFRACT - && class != MODE_VECTOR_ACCUM - && class != MODE_VECTOR_UACCUM) + if (mclass != MODE_VECTOR_INT + && mclass != MODE_VECTOR_FLOAT + && mclass != MODE_VECTOR_FRACT + && mclass != MODE_VECTOR_UFRACT + && mclass != MODE_VECTOR_ACCUM + && mclass != MODE_VECTOR_UACCUM) return 0; /* Hardware support. Woo hoo! */