X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Fexpr.c;h=df86a57ee1813780c655a4115706d133e665423b;hb=f5734d36e9d0cf7d7f0186e40ad9915da244bc84;hp=b334453efd597677284aedc53389a2ace9483b88;hpb=b6a5fc458e6f8824f5c43180b872141416496e00;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/expr.c b/gcc/expr.c index b334453efd5..df86a57ee18 100644 --- a/gcc/expr.c +++ b/gcc/expr.c @@ -16,8 +16,8 @@ for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING. If not, write to the Free -Software Foundation, 59 Temple Place - Suite 330, Boston, MA -02111-1307, USA. */ +Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA +02110-1301, USA. */ #include "config.h" #include "system.h" @@ -126,7 +126,7 @@ static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode, struct move_by_pieces *); static bool block_move_libcall_safe_for_call_parm (void); static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned); -static rtx emit_block_move_via_libcall (rtx, rtx, rtx); +static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool); static tree emit_block_move_libcall_fn (int); static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned); static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode); @@ -134,8 +134,7 @@ static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int); static void store_by_pieces_1 (struct store_by_pieces *, unsigned int); static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode, struct store_by_pieces *); -static bool clear_storage_via_clrmem (rtx, rtx, unsigned); -static rtx clear_storage_via_libcall (rtx, rtx); +static rtx clear_storage_via_libcall (rtx, rtx, bool); static tree clear_storage_libcall_fn (int); static rtx compress_float_constant (rtx, rtx); static rtx get_subtarget (rtx); @@ -146,7 +145,6 @@ static void store_constructor (tree, rtx, int, HOST_WIDE_INT); static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode, tree, tree, int); -static unsigned HOST_WIDE_INT highest_pow2_factor (tree); static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree); static int is_aligning_offset (tree, tree); @@ -200,12 +198,13 @@ static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES]; /* This array records the insn_code of insns to perform block moves. */ enum insn_code movmem_optab[NUM_MACHINE_MODES]; -/* This array records the insn_code of insns to perform block clears. */ -enum insn_code clrmem_optab[NUM_MACHINE_MODES]; +/* This array records the insn_code of insns to perform block sets. */ +enum insn_code setmem_optab[NUM_MACHINE_MODES]; -/* These arrays record the insn_code of two different kinds of insns +/* These arrays record the insn_code of three different kinds of insns to perform block compares. */ enum insn_code cmpstr_optab[NUM_MACHINE_MODES]; +enum insn_code cmpstrn_optab[NUM_MACHINE_MODES]; enum insn_code cmpmem_optab[NUM_MACHINE_MODES]; /* Synchronization primitives. */ @@ -350,8 +349,8 @@ convert_move (rtx to, rtx from, int unsignedp) { enum machine_mode to_mode = GET_MODE (to); enum machine_mode from_mode = GET_MODE (from); - int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT; - int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT; + int to_real = SCALAR_FLOAT_MODE_P (to_mode); + int from_real = SCALAR_FLOAT_MODE_P (from_mode); enum insn_code code; rtx libcall; @@ -466,19 +465,27 @@ convert_move (rtx to, rtx from, int unsignedp) } if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT) { + rtx new_from; enum machine_mode full_mode = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT); gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code != CODE_FOR_nothing); - emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code, - to, from, UNKNOWN); if (to_mode == full_mode) - return; + { + emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code, + to, from, UNKNOWN); + return; + } + + new_from = gen_reg_rtx (full_mode); + emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code, + new_from, from, UNKNOWN); /* else proceed to integer conversions below. */ from_mode = full_mode; + from = new_from; } /* Now both modes are integers. */ @@ -635,9 +642,6 @@ convert_move (rtx to, rtx from, int unsignedp) if ((code = can_extend_p (to_mode, from_mode, unsignedp)) != CODE_FOR_nothing) { - if (flag_force_mem) - from = force_not_mem (from); - emit_unop_insn (code, to, from, equiv_code); return; } @@ -1148,6 +1152,7 @@ emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method) switch (method) { case BLOCK_OP_NORMAL: + case BLOCK_OP_TAILCALL: may_use_call = true; break; @@ -1196,7 +1201,8 @@ emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method) else if (emit_block_move_via_movmem (x, y, size, align)) ; else if (may_use_call) - retval = emit_block_move_via_libcall (x, y, size); + retval = emit_block_move_via_libcall (x, y, size, + method == BLOCK_OP_TAILCALL); else emit_block_move_via_loop (x, y, size, align); @@ -1325,7 +1331,7 @@ emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align) Return the return value from memcpy, 0 otherwise. */ static rtx -emit_block_move_via_libcall (rtx dst, rtx src, rtx size) +emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall) { rtx dst_addr, src_addr; tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree; @@ -1367,6 +1373,7 @@ emit_block_move_via_libcall (rtx dst, rtx src, rtx size) call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)), call_expr, arg_list, NULL_TREE); + CALL_EXPR_TAILCALL (call_expr) = tailcall; retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0); @@ -2427,11 +2434,13 @@ store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode, its length in bytes. */ rtx -clear_storage (rtx object, rtx size) +clear_storage (rtx object, rtx size, enum block_op_methods method) { enum machine_mode mode = GET_MODE (object); unsigned int align; + gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL); + /* If OBJECT is not BLKmode and SIZE is the same size as its mode, just move a zero. Otherwise, do this a piece at a time. */ if (mode != BLKmode @@ -2465,75 +2474,20 @@ clear_storage (rtx object, rtx size) if (GET_CODE (size) == CONST_INT && CLEAR_BY_PIECES_P (INTVAL (size), align)) clear_by_pieces (object, INTVAL (size), align); - else if (clear_storage_via_clrmem (object, size, align)) + else if (set_storage_via_setmem (object, size, const0_rtx, align)) ; else - return clear_storage_via_libcall (object, size); + return clear_storage_via_libcall (object, size, + method == BLOCK_OP_TAILCALL); return NULL; } -/* A subroutine of clear_storage. Expand a clrmem pattern; - return true if successful. */ - -static bool -clear_storage_via_clrmem (rtx object, rtx size, unsigned int align) -{ - /* Try the most limited insn first, because there's no point - including more than one in the machine description unless - the more limited one has some advantage. */ - - rtx opalign = GEN_INT (align / BITS_PER_UNIT); - enum machine_mode mode; - - for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; - mode = GET_MODE_WIDER_MODE (mode)) - { - enum insn_code code = clrmem_optab[(int) mode]; - insn_operand_predicate_fn pred; - - if (code != CODE_FOR_nothing - /* We don't need MODE to be narrower than - BITS_PER_HOST_WIDE_INT here because if SIZE is less than - the mode mask, as it is returned by the macro, it will - definitely be less than the actual mode mask. */ - && ((GET_CODE (size) == CONST_INT - && ((unsigned HOST_WIDE_INT) INTVAL (size) - <= (GET_MODE_MASK (mode) >> 1))) - || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) - && ((pred = insn_data[(int) code].operand[0].predicate) == 0 - || (*pred) (object, BLKmode)) - && ((pred = insn_data[(int) code].operand[2].predicate) == 0 - || (*pred) (opalign, VOIDmode))) - { - rtx op1; - rtx last = get_last_insn (); - rtx pat; - - op1 = convert_to_mode (mode, size, 1); - pred = insn_data[(int) code].operand[1].predicate; - if (pred != 0 && ! (*pred) (op1, mode)) - op1 = copy_to_mode_reg (mode, op1); - - pat = GEN_FCN ((int) code) (object, op1, opalign); - if (pat) - { - emit_insn (pat); - return true; - } - else - delete_insns_since (last); - } - } - - return false; -} - /* A subroutine of clear_storage. Expand a call to memset. Return the return value of memset, 0 otherwise. */ static rtx -clear_storage_via_libcall (rtx object, rtx size) +clear_storage_via_libcall (rtx object, rtx size, bool tailcall) { tree call_expr, arg_list, fn, object_tree, size_tree; enum machine_mode size_mode; @@ -2566,6 +2520,7 @@ clear_storage_via_libcall (rtx object, rtx size) call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn); call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)), call_expr, arg_list, NULL_TREE); + CALL_EXPR_TAILCALL (call_expr) = tailcall; retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0); @@ -2621,6 +2576,73 @@ clear_storage_libcall_fn (int for_call) return block_clear_fn; } +/* Expand a setmem pattern; return true if successful. */ + +bool +set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align) +{ + /* Try the most limited insn first, because there's no point + including more than one in the machine description unless + the more limited one has some advantage. */ + + rtx opalign = GEN_INT (align / BITS_PER_UNIT); + enum machine_mode mode; + + for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode; + mode = GET_MODE_WIDER_MODE (mode)) + { + enum insn_code code = setmem_optab[(int) mode]; + insn_operand_predicate_fn pred; + + if (code != CODE_FOR_nothing + /* We don't need MODE to be narrower than + BITS_PER_HOST_WIDE_INT here because if SIZE is less than + the mode mask, as it is returned by the macro, it will + definitely be less than the actual mode mask. */ + && ((GET_CODE (size) == CONST_INT + && ((unsigned HOST_WIDE_INT) INTVAL (size) + <= (GET_MODE_MASK (mode) >> 1))) + || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD) + && ((pred = insn_data[(int) code].operand[0].predicate) == 0 + || (*pred) (object, BLKmode)) + && ((pred = insn_data[(int) code].operand[3].predicate) == 0 + || (*pred) (opalign, VOIDmode))) + { + rtx opsize, opchar; + enum machine_mode char_mode; + rtx last = get_last_insn (); + rtx pat; + + opsize = convert_to_mode (mode, size, 1); + pred = insn_data[(int) code].operand[1].predicate; + if (pred != 0 && ! (*pred) (opsize, mode)) + opsize = copy_to_mode_reg (mode, opsize); + + opchar = val; + char_mode = insn_data[(int) code].operand[2].mode; + if (char_mode != VOIDmode) + { + opchar = convert_to_mode (char_mode, opchar, 1); + pred = insn_data[(int) code].operand[2].predicate; + if (pred != 0 && ! (*pred) (opchar, char_mode)) + opchar = copy_to_mode_reg (char_mode, opchar); + } + + pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign); + if (pat) + { + emit_insn (pat); + return true; + } + else + delete_insns_since (last); + } + } + + return false; +} + + /* Write to one of the components of the complex value CPLX. Write VAL to the real part if IMAG_P is false, and the imaginary part if its true. */ @@ -2641,6 +2663,19 @@ write_complex_part (rtx cplx, rtx val, bool imag_p) imode = GET_MODE_INNER (cmode); ibitsize = GET_MODE_BITSIZE (imode); + /* For MEMs simplify_gen_subreg may generate an invalid new address + because, e.g., the original address is considered mode-dependent + by the target, which restricts simplify_subreg from invoking + adjust_address_nv. Instead of preparing fallback support for an + invalid address, we call adjust_address_nv directly. */ + if (MEM_P (cplx)) + { + emit_move_insn (adjust_address_nv (cplx, imode, + imag_p ? GET_MODE_SIZE (imode) : 0), + val); + return; + } + /* If the sub-object is at least word sized, then we know that subregging will work. This special case is important, since store_bit_field wants to operate on integer modes, and there's rarely an OImode to @@ -2650,13 +2685,9 @@ write_complex_part (rtx cplx, rtx val, bool imag_p) the original object if it spans an even number of hard regs. This special case is important for SCmode on 64-bit platforms where the natural size of floating-point regs is 32-bit. */ - || (GET_CODE (cplx) == REG + || (REG_P (cplx) && REGNO (cplx) < FIRST_PSEUDO_REGISTER - && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0) - /* For MEMs we always try to make a "subreg", that is to adjust - the MEM, because store_bit_field may generate overly - convoluted RTL for sub-word fields. */ - || MEM_P (cplx)) + && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)) { rtx part = simplify_gen_subreg (imode, cplx, cmode, imag_p ? GET_MODE_SIZE (imode) : 0); @@ -2701,6 +2732,15 @@ read_complex_part (rtx cplx, bool imag_p) } } + /* For MEMs simplify_gen_subreg may generate an invalid new address + because, e.g., the original address is considered mode-dependent + by the target, which restricts simplify_subreg from invoking + adjust_address_nv. Instead of preparing fallback support for an + invalid address, we call adjust_address_nv directly. */ + if (MEM_P (cplx)) + return adjust_address_nv (cplx, imode, + imag_p ? GET_MODE_SIZE (imode) : 0); + /* If the sub-object is at least word sized, then we know that subregging will work. This special case is important, since extract_bit_field wants to operate on integer modes, and there's rarely an OImode to @@ -2710,13 +2750,9 @@ read_complex_part (rtx cplx, bool imag_p) the original object if it spans an even number of hard regs. This special case is important for SCmode on 64-bit platforms where the natural size of floating-point regs is 32-bit. */ - || (GET_CODE (cplx) == REG + || (REG_P (cplx) && REGNO (cplx) < FIRST_PSEUDO_REGISTER - && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0) - /* For MEMs we always try to make a "subreg", that is to adjust - the MEM, because extract_bit_field may generate overly - convoluted RTL for sub-word fields. */ - || MEM_P (cplx)) + && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)) { rtx ret = simplify_gen_subreg (imode, cplx, cmode, imag_p ? GET_MODE_SIZE (imode) : 0); @@ -2776,7 +2812,7 @@ emit_move_change_mode (enum machine_mode new_mode, emitted, or NULL if such a move could not be generated. */ static rtx -emit_move_via_integer (enum machine_mode mode, rtx x, rtx y) +emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force) { enum machine_mode imode; enum insn_code code; @@ -2791,10 +2827,10 @@ emit_move_via_integer (enum machine_mode mode, rtx x, rtx y) if (code == CODE_FOR_nothing) return NULL_RTX; - x = emit_move_change_mode (imode, mode, x, false); + x = emit_move_change_mode (imode, mode, x, force); if (x == NULL_RTX) return NULL_RTX; - y = emit_move_change_mode (imode, mode, y, false); + y = emit_move_change_mode (imode, mode, y, force); if (y == NULL_RTX) return NULL_RTX; return emit_insn (GEN_FCN (code) (x, y)); @@ -2816,6 +2852,19 @@ emit_move_resolve_push (enum machine_mode mode, rtx x) #endif if (code == PRE_DEC || code == POST_DEC) adjust = -adjust; + else if (code == PRE_MODIFY || code == POST_MODIFY) + { + rtx expr = XEXP (XEXP (x, 0), 1); + HOST_WIDE_INT val; + + gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS); + gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT); + val = INTVAL (XEXP (expr, 1)); + if (GET_CODE (expr) == MINUS) + val = -val; + gcc_assert (adjust == val || adjust == -val); + adjust = val; + } /* Do not use anti_adjust_stack, since we don't want to update stack_pointer_delta. */ @@ -2829,13 +2878,13 @@ emit_move_resolve_push (enum machine_mode mode, rtx x) { case PRE_INC: case PRE_DEC: + case PRE_MODIFY: temp = stack_pointer_rtx; break; case POST_INC: - temp = plus_constant (stack_pointer_rtx, -GET_MODE_SIZE (mode)); - break; case POST_DEC: - temp = plus_constant (stack_pointer_rtx, GET_MODE_SIZE (mode)); + case POST_MODIFY: + temp = plus_constant (stack_pointer_rtx, -adjust); break; default: gcc_unreachable (); @@ -2937,7 +2986,7 @@ emit_move_complex (enum machine_mode mode, rtx x, rtx y) return get_last_insn (); } - ret = emit_move_via_integer (mode, x, y); + ret = emit_move_via_integer (mode, x, y, true); if (ret) return ret; } @@ -2975,7 +3024,7 @@ emit_move_ccmode (enum machine_mode mode, rtx x, rtx y) } /* Otherwise, find the MODE_INT mode of the same width. */ - ret = emit_move_via_integer (mode, x, y); + ret = emit_move_via_integer (mode, x, y, false); gcc_assert (ret != NULL); return ret; } @@ -3020,8 +3069,8 @@ emit_move_multi_word (enum machine_mode mode, rtx x, rtx y) rtx ypart = operand_subword (y, i, 1, mode); /* If we can't get a part of Y, put Y into memory if it is a - constant. Otherwise, force it into a register. If we still - can't get a part of Y, abort. */ + constant. Otherwise, force it into a register. Then we must + be able to get a part of Y. */ if (ypart == 0 && CONSTANT_P (y)) { y = force_const_mem (mode, y); @@ -3083,7 +3132,7 @@ emit_move_insn_1 (rtx x, rtx y) fits within a HOST_WIDE_INT. */ if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) { - rtx ret = emit_move_via_integer (mode, x, y); + rtx ret = emit_move_via_integer (mode, x, y, false); if (ret) return ret; } @@ -3168,9 +3217,15 @@ compress_float_constant (rtx x, rtx y) enum machine_mode orig_srcmode = GET_MODE (y); enum machine_mode srcmode; REAL_VALUE_TYPE r; + int oldcost, newcost; REAL_VALUE_FROM_CONST_DOUBLE (r, y); + if (LEGITIMATE_CONSTANT_P (y)) + oldcost = rtx_cost (y, SET); + else + oldcost = rtx_cost (force_const_mem (dstmode, y), SET); + for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode)); srcmode != orig_srcmode; srcmode = GET_MODE_WIDER_MODE (srcmode)) @@ -3195,12 +3250,23 @@ compress_float_constant (rtx x, rtx y) the extension. */ if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode)) continue; + /* This is valid, but may not be cheaper than the original. */ + newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET); + if (oldcost < newcost) + continue; } else if (float_extend_from_mem[dstmode][srcmode]) - trunc_y = validize_mem (force_const_mem (srcmode, trunc_y)); + { + trunc_y = force_const_mem (srcmode, trunc_y); + /* This is valid, but may not be cheaper than the original. */ + newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET); + if (oldcost < newcost) + continue; + trunc_y = validize_mem (trunc_y); + } else continue; - + emit_unop_insn (ic, x, trunc_y, UNKNOWN); last_insn = get_last_insn (); @@ -3544,7 +3610,7 @@ emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size, int not_stack; /* # bytes of start of argument that we must make space for but need not store. */ - int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD); + int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT); int args_offset = INTVAL (args_so_far); int skip; @@ -3562,8 +3628,9 @@ emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size, offset = 0; /* Now NOT_STACK gets the number of words that we don't need to - allocate on the stack. */ + allocate on the stack. Convert OFFSET to words too. */ not_stack = (partial - offset) / UNITS_PER_WORD; + offset /= UNITS_PER_WORD; /* If the partial register-part of the arg counts in its stack size, skip the part of stack space corresponding to the registers. @@ -3870,10 +3937,18 @@ expand_assignment (tree to, tree from) if (offset != 0) { - rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM); + rtx offset_rtx; - gcc_assert (MEM_P (to_rtx)); + if (!MEM_P (to_rtx)) + { + /* We can get constant negative offsets into arrays with broken + user code. Translate this to a trap instead of ICEing. */ + gcc_assert (TREE_CODE (offset) == INTEGER_CST); + expand_builtin_trap (); + to_rtx = gen_rtx_MEM (BLKmode, const0_rtx); + } + offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM); #ifdef POINTERS_EXTEND_UNSIGNED if (GET_MODE (offset_rtx) != Pmode) offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); @@ -4221,10 +4296,10 @@ store_expr (tree exp, rtx target, int call_param_p) but TARGET is not valid memory reference, TEMP will differ from TARGET although it is really the same location. */ && !(alt_rtl && rtx_equal_p (alt_rtl, target)) - /* If there's nothing to copy, don't bother. Don't call expr_size - unless necessary, because some front-ends (C++) expr_size-hook - aborts on objects that are not supposed to be bit-copied or - bit-initialized. */ + /* If there's nothing to copy, don't bother. Don't call + expr_size unless necessary, because some front-ends (C++) + expr_size-hook must not be given objects that are not + supposed to be bit-copied or bit-initialized. */ && expr_size (exp) != const0_rtx) { if (GET_MODE (temp) != GET_MODE (target) @@ -4304,7 +4379,7 @@ store_expr (tree exp, rtx target, int call_param_p) } if (size != const0_rtx) - clear_storage (target, size); + clear_storage (target, size, BLOCK_OP_NORMAL); if (label) emit_label (label); @@ -4346,17 +4421,16 @@ categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts, HOST_WIDE_INT *p_elt_count, bool *p_must_clear) { + unsigned HOST_WIDE_INT idx; HOST_WIDE_INT nz_elts, nc_elts, elt_count; - tree list; + tree value, purpose; nz_elts = 0; nc_elts = 0; elt_count = 0; - for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list)) + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value) { - tree value = TREE_VALUE (list); - tree purpose = TREE_PURPOSE (list); HOST_WIDE_INT mult; mult = 1; @@ -4430,14 +4504,16 @@ categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts, tree init_sub_type; bool clear_this = true; - list = CONSTRUCTOR_ELTS (ctor); - if (list) + if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor))) { /* We don't expect more than one element of the union to be initialized. Not sure what we should do otherwise... */ - gcc_assert (TREE_CHAIN (list) == NULL); + gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor)) + == 1); - init_sub_type = TREE_TYPE (TREE_VALUE (list)); + init_sub_type = TREE_TYPE (VEC_index (constructor_elt, + CONSTRUCTOR_ELTS (ctor), + 0)->value); /* ??? We could look at each element of the union, and find the largest element. Which would avoid comparing the size of the @@ -4449,7 +4525,7 @@ categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts, /* And now we have to find out if the element itself is fully constructed. E.g. for union { struct { int a, b; } s; } u = { .s = { .a = 1 } }. */ - if (elt_count == count_type_elements (init_sub_type)) + if (elt_count == count_type_elements (init_sub_type, false)) clear_this = false; } } @@ -4477,10 +4553,11 @@ categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts, } /* Count the number of scalars in TYPE. Return -1 on overflow or - variable-sized. */ + variable-sized. If ALLOW_FLEXARR is true, don't count flexible + array member at the end of the structure. */ HOST_WIDE_INT -count_type_elements (tree type) +count_type_elements (tree type, bool allow_flexarr) { const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1)); switch (TREE_CODE (type)) @@ -4491,7 +4568,7 @@ count_type_elements (tree type) if (telts && host_integerp (telts, 1)) { HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1; - HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type)); + HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false); if (n == 0) return 0; else if (max / n > m) @@ -4508,9 +4585,23 @@ count_type_elements (tree type) for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f)) if (TREE_CODE (f) == FIELD_DECL) { - t = count_type_elements (TREE_TYPE (f)); + t = count_type_elements (TREE_TYPE (f), false); if (t < 0) - return -1; + { + /* Check for structures with flexible array member. */ + tree tf = TREE_TYPE (f); + if (allow_flexarr + && TREE_CHAIN (f) == NULL + && TREE_CODE (tf) == ARRAY_TYPE + && TYPE_DOMAIN (tf) + && TYPE_MIN_VALUE (TYPE_DOMAIN (tf)) + && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf))) + && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf)) + && int_size_in_bytes (type) >= 0) + break; + + return -1; + } n += t; } @@ -4568,13 +4659,31 @@ mostly_zeros_p (tree exp) if (must_clear) return 1; - elts = count_type_elements (TREE_TYPE (exp)); + elts = count_type_elements (TREE_TYPE (exp), false); return nz_elts < elts / 4; } return initializer_zerop (exp); } + +/* Return 1 if EXP contains all zeros. */ + +static int +all_zeros_p (tree exp) +{ + if (TREE_CODE (exp) == CONSTRUCTOR) + + { + HOST_WIDE_INT nz_elts, nc_elts, count; + bool must_clear; + + categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear); + return nz_elts == 0; + } + + return initializer_zerop (exp); +} /* Helper function for store_constructor. TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field. @@ -4647,7 +4756,8 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) case UNION_TYPE: case QUAL_UNION_TYPE: { - tree elt; + unsigned HOST_WIDE_INT idx; + tree field, value; /* If size is zero or the target is already cleared, do nothing. */ if (size == 0 || cleared) @@ -4658,7 +4768,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) && ! CONSTRUCTOR_ELTS (exp)) /* If the constructor is empty, clear the union. */ { - clear_storage (target, expr_size (exp)); + clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL); cleared = 1; } @@ -4679,14 +4789,14 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) register whose mode size isn't equal to SIZE since clear_storage can't handle this case. */ else if (size > 0 - && ((list_length (CONSTRUCTOR_ELTS (exp)) + && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp)) != fields_length (type)) || mostly_zeros_p (exp)) && (!REG_P (target) || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))) { - clear_storage (target, GEN_INT (size)); + clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); cleared = 1; } @@ -4695,11 +4805,8 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) /* Store each element of the constructor into the corresponding field of TARGET. */ - - for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value) { - tree field = TREE_PURPOSE (elt); - tree value = TREE_VALUE (elt); enum machine_mode mode; HOST_WIDE_INT bitsize; HOST_WIDE_INT bitpos = 0; @@ -4782,9 +4889,9 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) if (BYTES_BIG_ENDIAN) value - = fold (build2 (LSHIFT_EXPR, type, value, - build_int_cst (NULL_TREE, - BITS_PER_WORD - bitsize))); + = fold_build2 (LSHIFT_EXPR, type, value, + build_int_cst (NULL_TREE, + BITS_PER_WORD - bitsize)); bitsize = BITS_PER_WORD; mode = word_mode; } @@ -4805,8 +4912,8 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) } case ARRAY_TYPE: { - tree elt; - int i; + tree value, index; + unsigned HOST_WIDE_INT i; int need_to_clear; tree domain; tree elttype = TREE_TYPE (type); @@ -4836,18 +4943,20 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) need_to_clear = 1; else { + unsigned HOST_WIDE_INT idx; + tree index, value; HOST_WIDE_INT count = 0, zero_count = 0; need_to_clear = ! const_bounds_p; /* This loop is a more accurate version of the loop in mostly_zeros_p (it handles RANGE_EXPR in an index). It is also needed to check for missing elements. */ - for (elt = CONSTRUCTOR_ELTS (exp); - elt != NULL_TREE && ! need_to_clear; - elt = TREE_CHAIN (elt)) + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value) { - tree index = TREE_PURPOSE (elt); HOST_WIDE_INT this_node_count; + + if (need_to_clear) + break; if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) { @@ -4868,7 +4977,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) this_node_count = 1; count += this_node_count; - if (mostly_zeros_p (TREE_VALUE (elt))) + if (mostly_zeros_p (value)) zero_count += this_node_count; } @@ -4886,7 +4995,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) if (REG_P (target)) emit_move_insn (target, CONST0_RTX (GET_MODE (target))); else - clear_storage (target, GEN_INT (size)); + clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); cleared = 1; } @@ -4897,16 +5006,12 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) /* Store each element of the constructor into the corresponding element of TARGET, determined by counting the elements. */ - for (elt = CONSTRUCTOR_ELTS (exp), i = 0; - elt; - elt = TREE_CHAIN (elt), i++) + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value) { enum machine_mode mode; HOST_WIDE_INT bitsize; HOST_WIDE_INT bitpos; int unsignedp; - tree value = TREE_VALUE (elt); - tree index = TREE_PURPOSE (elt); rtx xtarget = target; if (cleared && initializer_zerop (value)) @@ -4985,8 +5090,8 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) /* Assign value to element index. */ position = convert (ssizetype, - fold (build2 (MINUS_EXPR, TREE_TYPE (index), - index, TYPE_MIN_VALUE (domain)))); + fold_build2 (MINUS_EXPR, TREE_TYPE (index), + index, TYPE_MIN_VALUE (domain))); position = size_binop (MULT_EXPR, position, convert (ssizetype, TYPE_SIZE_UNIT (elttype))); @@ -5028,10 +5133,10 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) if (minelt) index = fold_convert (ssizetype, - fold (build2 (MINUS_EXPR, - TREE_TYPE (index), - index, - TYPE_MIN_VALUE (domain)))); + fold_build2 (MINUS_EXPR, + TREE_TYPE (index), + index, + TYPE_MIN_VALUE (domain))); position = size_binop (MULT_EXPR, index, convert (ssizetype, @@ -5066,7 +5171,8 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) case VECTOR_TYPE: { - tree elt; + unsigned HOST_WIDE_INT idx; + constructor_elt *ce; int i; int need_to_clear; int icode = 0; @@ -5106,18 +5212,17 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) else { unsigned HOST_WIDE_INT count = 0, zero_count = 0; + tree value; - for (elt = CONSTRUCTOR_ELTS (exp); - elt != NULL_TREE; - elt = TREE_CHAIN (elt)) + FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value) { int n_elts_here = tree_low_cst (int_const_binop (TRUNC_DIV_EXPR, - TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))), + TYPE_SIZE (TREE_TYPE (value)), TYPE_SIZE (elttype), 0), 1); count += n_elts_here; - if (mostly_zeros_p (TREE_VALUE (elt))) + if (mostly_zeros_p (value)) zero_count += n_elts_here; } @@ -5131,30 +5236,29 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) if (REG_P (target)) emit_move_insn (target, CONST0_RTX (GET_MODE (target))); else - clear_storage (target, GEN_INT (size)); + clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL); cleared = 1; } + /* Inform later passes that the old value is dead. */ if (!cleared && REG_P (target)) - /* Inform later passes that the old value is dead. */ - emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); + emit_move_insn (target, CONST0_RTX (GET_MODE (target))); /* Store each element of the constructor into the corresponding element of TARGET, determined by counting the elements. */ - for (elt = CONSTRUCTOR_ELTS (exp), i = 0; - elt; - elt = TREE_CHAIN (elt), i += bitsize / elt_size) + for (idx = 0, i = 0; + VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce); + idx++, i += bitsize / elt_size) { - tree value = TREE_VALUE (elt); - tree index = TREE_PURPOSE (elt); HOST_WIDE_INT eltpos; + tree value = ce->value; bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1); if (cleared && initializer_zerop (value)) continue; - if (index != 0) - eltpos = tree_low_cst (index, 1); + if (ce->index) + eltpos = tree_low_cst (ce->index, 1); else eltpos = i; @@ -5484,8 +5588,8 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize, index, then convert to sizetype and multiply by the size of the array element. */ if (! integer_zerop (low_bound)) - index = fold (build2 (MINUS_EXPR, TREE_TYPE (index), - index, low_bound)); + index = fold_build2 (MINUS_EXPR, TREE_TYPE (index), + index, low_bound); offset = size_binop (PLUS_EXPR, offset, size_binop (MULT_EXPR, @@ -5776,8 +5880,20 @@ force_operand (rtx value, rtx target) } if (UNARY_P (value)) { + int unsignedp = 0; + op1 = force_operand (XEXP (value, 0), NULL_RTX); - return expand_simple_unop (GET_MODE (value), code, op1, target, 0); + switch (code) + { + case ZERO_EXTEND: case UNSIGNED_FIX: case UNSIGNED_FLOAT: + unsignedp = 1; + /* fall through. */ + case TRUNCATE: + case SIGN_EXTEND: case FIX: case FLOAT: + return convert_to_mode (GET_MODE (value), op1, unsignedp); + default: + return expand_simple_unop (GET_MODE (value), code, op1, target, 0); + } } #ifdef INSN_SCHEDULING @@ -5999,7 +6115,7 @@ safe_from_p (rtx x, tree exp, int top_p) /* Return the highest power of two that EXP is known to be a multiple of. This is used in updating alignment of MEMs in array references. */ -static unsigned HOST_WIDE_INT +unsigned HOST_WIDE_INT highest_pow2_factor (tree exp) { unsigned HOST_WIDE_INT c0, c1; @@ -6100,7 +6216,7 @@ expand_var (tree var) ? !TREE_ASM_WRITTEN (var) : !DECL_RTL_SET_P (var)) { - if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var)) + if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var)) /* Should be ignored. */; else if (lang_hooks.expand_decl (var)) /* OK. */; @@ -6212,7 +6328,7 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, /* If the DECL isn't in memory, then the DECL wasn't properly marked TREE_ADDRESSABLE, which will be either a front-end or a tree optimizer bug. */ - gcc_assert (GET_CODE (result) == MEM); + gcc_assert (MEM_P (result)); result = XEXP (result, 0); /* ??? Is this needed anymore? */ @@ -6255,7 +6371,7 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, result = convert_memory_address (tmode, result); tmp = convert_memory_address (tmode, tmp); - if (modifier == EXPAND_SUM) + if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER) result = gen_rtx_PLUS (tmode, result, tmp); else { @@ -6397,7 +6513,7 @@ expand_expr_real (tree exp, rtx target, enum machine_mode tmode, information. It would be better of the diagnostic routines used the file/line information embedded in the tree nodes rather than globals. */ - if (cfun && EXPR_HAS_LOCATION (exp)) + if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp)) { location_t saved_location = input_location; input_location = EXPR_LOCATION (exp); @@ -6452,7 +6568,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, optab this_optab; rtx subtarget, original_target; int ignore; - tree context; + tree context, subexp0, subexp1; bool reduce_bit_field = false; #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \ ? reduce_to_bit_field_precision ((expr), \ @@ -6531,18 +6647,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, target = 0; } - /* If will do cse, generate all results into pseudo registers - since 1) that allows cse to find more things - and 2) otherwise cse could produce an insn the machine - cannot support. An exception is a CONSTRUCTOR into a multi-word - MEM: that's much more likely to be most efficient into the MEM. - Another is a CALL_EXPR which must return in memory. */ - - if (! cse_not_expected && mode != BLKmode && target - && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER) - && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD) - && ! (code == CALL_EXPR && aggregate_value_p (exp, exp))) - target = 0; switch (code) { @@ -6681,8 +6785,9 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT) return const_vector_from_tree (exp); else - return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp), - TREE_VECTOR_CST_ELTS (exp)), + return expand_expr (build_constructor_from_list + (TREE_TYPE (exp), + TREE_VECTOR_CST_ELTS (exp)), ignore ? const0_rtx : target, tmode, modifier); case CONST_DECL: @@ -6780,14 +6885,28 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, subexpressions. */ if (ignore) { - tree elt; + unsigned HOST_WIDE_INT idx; + tree value; - for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) - expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0); + FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value) + expand_expr (value, const0_rtx, VOIDmode, 0); return const0_rtx; } + /* Try to avoid creating a temporary at all. This is possible + if all of the initializer is zero. + FIXME: try to handle all [0..255] initializers we can handle + with memset. */ + else if (TREE_STATIC (exp) + && !TREE_ADDRESSABLE (exp) + && target != 0 && mode == BLKmode + && all_zeros_p (exp)) + { + clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL); + return target; + } + /* All elts simple constants => refer to a constant in memory. But if this is a non-BLKmode mode, let it store a field at a time since that should make a CONST_INT or CONST_DOUBLE when we @@ -6846,7 +6965,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case INDIRECT_REF: { tree exp1 = TREE_OPERAND (exp, 0); - tree orig; if (modifier != EXPAND_WRITE) { @@ -6869,10 +6987,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, temp = gen_rtx_MEM (mode, op0); - orig = REF_ORIGINAL (exp); - if (!orig) - orig = exp; - set_mem_attributes (temp, orig, 0); + set_mem_attributes (temp, exp, 0); /* Resolve the misalignment now, so that we don't have to remember to resolve it later. Of course, this only works for reads. */ @@ -6884,7 +6999,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, int icode; rtx reg, insn; - gcc_assert (modifier == EXPAND_NORMAL); + gcc_assert (modifier == EXPAND_NORMAL + || modifier == EXPAND_STACK_PARM); /* The vectorizer should have already checked the mode. */ icode = movmisalign_optab->handlers[mode].insn_code; @@ -6904,6 +7020,18 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, return temp; } + case TARGET_MEM_REF: + { + struct mem_address addr; + + get_address_description (exp, &addr); + op0 = addr_for_mem_ref (&addr, true); + op0 = memory_address (mode, op0); + temp = gen_rtx_MEM (mode, op0); + set_mem_attributes (temp, TMR_ORIGINAL (exp), 0); + } + return temp; + case ARRAY_REF: { @@ -6937,16 +7065,17 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, && ! TREE_SIDE_EFFECTS (array) && TREE_CODE (index) == INTEGER_CST) { - tree elem; - - for (elem = CONSTRUCTOR_ELTS (array); - (elem && !tree_int_cst_equal (TREE_PURPOSE (elem), index)); - elem = TREE_CHAIN (elem)) - ; + unsigned HOST_WIDE_INT ix; + tree field, value; - if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem))) - return expand_expr (fold (TREE_VALUE (elem)), target, tmode, - modifier); + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix, + field, value) + if (tree_int_cst_equal (field, index)) + { + if (!TREE_SIDE_EFFECTS (value)) + return expand_expr (fold (value), target, tmode, modifier); + break; + } } else if (optimize >= 1 @@ -6964,17 +7093,18 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, if (TREE_CODE (init) == CONSTRUCTOR) { - tree elem; - - for (elem = CONSTRUCTOR_ELTS (init); - (elem - && !tree_int_cst_equal (TREE_PURPOSE (elem), index)); - elem = TREE_CHAIN (elem)) - ; - - if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem))) - return expand_expr (fold (TREE_VALUE (elem)), target, - tmode, modifier); + unsigned HOST_WIDE_INT ix; + tree field, value; + + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix, + field, value) + if (tree_int_cst_equal (field, index)) + { + if (!TREE_SIDE_EFFECTS (value)) + return expand_expr (fold (value), target, tmode, + modifier); + break; + } } else if (TREE_CODE (init) == STRING_CST && 0 > compare_tree_int (index, @@ -6998,11 +7128,12 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, appropriate field if it is present. */ if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR) { - tree elt; + unsigned HOST_WIDE_INT idx; + tree field, value; - for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt; - elt = TREE_CHAIN (elt)) - if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1) + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)), + idx, field, value) + if (field == TREE_OPERAND (exp, 1) /* We can normally use the value of the field in the CONSTRUCTOR. However, if this is a bitfield in an integral mode that we can fit in a HOST_WIDE_INT, @@ -7010,24 +7141,21 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, since this is done implicitly by the constructor. If the bitfield does not meet either of those conditions, we can't do this optimization. */ - && (! DECL_BIT_FIELD (TREE_PURPOSE (elt)) - || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt))) - == MODE_INT) - && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt))) + && (! DECL_BIT_FIELD (field) + || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT) + && (GET_MODE_BITSIZE (DECL_MODE (field)) <= HOST_BITS_PER_WIDE_INT)))) { - if (DECL_BIT_FIELD (TREE_PURPOSE (elt)) + if (DECL_BIT_FIELD (field) && modifier == EXPAND_STACK_PARM) target = 0; - op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier); - if (DECL_BIT_FIELD (TREE_PURPOSE (elt))) + op0 = expand_expr (value, target, tmode, modifier); + if (DECL_BIT_FIELD (field)) { - HOST_WIDE_INT bitsize - = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt))); - enum machine_mode imode - = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt))); + HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)); + enum machine_mode imode = TYPE_MODE (TREE_TYPE (field)); - if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt)))) + if (TYPE_UNSIGNED (TREE_TYPE (field))) { op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1); op0 = expand_and (imode, op0, op1, target); @@ -7084,25 +7212,30 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, || modifier == EXPAND_STACK_PARM) ? modifier : EXPAND_NORMAL); - /* If this is a constant, put it into a register if it is a - legitimate constant and OFFSET is 0 and memory if it isn't. */ + /* If this is a constant, put it into a register if it is a legitimate + constant, OFFSET is 0, and we won't try to extract outside the + register (in case we were passed a partially uninitialized object + or a view_conversion to a larger size). Force the constant to + memory otherwise. */ if (CONSTANT_P (op0)) { enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem)); if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0) - && offset == 0) + && offset == 0 + && bitpos + bitsize <= GET_MODE_BITSIZE (mode)) op0 = force_reg (mode, op0); else op0 = validize_mem (force_const_mem (mode, op0)); } - /* Otherwise, if this object not in memory and we either have an - offset or a BLKmode result, put it there. This case can't occur in - C, but can in Ada if we have unchecked conversion of an expression - from a scalar type to an array or record type or for an - ARRAY_RANGE_REF whose type is BLKmode. */ + /* Otherwise, if this object not in memory and we either have an + offset, a BLKmode result, or a reference outside the object, put it + there. Such cases can occur in Ada if we have unchecked conversion + of an expression from a scalar type to an array or record type or + for an ARRAY_RANGE_REF whose type is BLKmode. */ else if (!MEM_P (op0) && (offset != 0 + || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0))) || (code == ARRAY_RANGE_REF && mode == BLKmode))) { tree nt = build_qualified_type (TREE_TYPE (tem), @@ -7442,18 +7575,27 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case VIEW_CONVERT_EXPR: op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier); - /* If the input and output modes are both the same, we are done. - Otherwise, if neither mode is BLKmode and both are integral and within - a word, we can use gen_lowpart. If neither is true, make sure the - operand is in memory and convert the MEM to the new mode. */ + /* If the input and output modes are both the same, we are done. */ if (TYPE_MODE (type) == GET_MODE (op0)) ; + /* If neither mode is BLKmode, and both modes are the same size + then we can use gen_lowpart. */ else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode - && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT - && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT - && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD - && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD) - op0 = gen_lowpart (TYPE_MODE (type), op0); + && GET_MODE_SIZE (TYPE_MODE (type)) + == GET_MODE_SIZE (GET_MODE (op0))) + { + if (GET_CODE (op0) == SUBREG) + op0 = force_reg (GET_MODE (op0), op0); + op0 = gen_lowpart (TYPE_MODE (type), op0); + } + /* If both modes are integral, then we can convert from one to the + other. */ + else if (SCALAR_INT_MODE_P (GET_MODE (op0)) + && SCALAR_INT_MODE_P (TYPE_MODE (type))) + op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0, + TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))); + /* As a last resort, spill op0 to memory, and reload it in a + different mode. */ else if (!MEM_P (op0)) { /* If the operand is not a MEM, force it into memory. Since we @@ -7574,7 +7716,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, } else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST - && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT + && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT && TREE_CONSTANT (TREE_OPERAND (exp, 0))) { rtx constant_part; @@ -7707,7 +7849,43 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, from a narrower type. If this machine supports multiplying in that narrower type with a result in the desired type, do it that way, and avoid the explicit type-conversion. */ - if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR + + subexp0 = TREE_OPERAND (exp, 0); + subexp1 = TREE_OPERAND (exp, 1); + /* First, check if we have a multiplication of one signed and one + unsigned operand. */ + if (TREE_CODE (subexp0) == NOP_EXPR + && TREE_CODE (subexp1) == NOP_EXPR + && TREE_CODE (type) == INTEGER_TYPE + && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0))) + < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))) + && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0))) + == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0)))) + && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))) + != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0))))) + { + enum machine_mode innermode + = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0))); + this_optab = usmul_widen_optab; + if (mode == GET_MODE_WIDER_MODE (innermode)) + { + if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) + { + if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))) + expand_operands (TREE_OPERAND (subexp0, 0), + TREE_OPERAND (subexp1, 0), + NULL_RTX, &op0, &op1, 0); + else + expand_operands (TREE_OPERAND (subexp0, 0), + TREE_OPERAND (subexp1, 0), + NULL_RTX, &op1, &op0, 0); + + goto binop3; + } + } + } + /* Check for a multiplication with matching signedness. */ + else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR && TREE_CODE (type) == INTEGER_TYPE && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))) @@ -7739,7 +7917,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab; this_optab = zextend_p ? umul_widen_optab : smul_widen_optab; - if (mode == GET_MODE_WIDER_MODE (innermode)) + if (mode == GET_MODE_2XWIDER_MODE (innermode)) { if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) { @@ -7798,18 +7976,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, return expand_divmod (0, code, mode, op0, op1, target, unsignedp); case RDIV_EXPR: - /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving - expensive divide. If not, combine will rebuild the original - computation. */ - if (flag_unsafe_math_optimizations && optimize && !optimize_size - && TREE_CODE (type) == REAL_TYPE - && !real_onep (TREE_OPERAND (exp, 0))) - return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0), - build2 (RDIV_EXPR, type, - build_real (type, dconst1), - TREE_OPERAND (exp, 1))), - target, tmode, modifier); - goto binop; case TRUNC_MOD_EXPR: @@ -7915,69 +8081,92 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, if (! CONSTANT_P (op1)) op1 = force_reg (mode, op1); -#ifdef HAVE_conditional_move - /* Use a conditional move if possible. */ - if (can_conditionally_move_p (mode)) - { - enum rtx_code comparison_code; - rtx insn; + { + enum rtx_code comparison_code; + rtx cmpop1 = op1; - if (code == MAX_EXPR) - comparison_code = unsignedp ? GEU : GE; - else - comparison_code = unsignedp ? LEU : LE; + if (code == MAX_EXPR) + comparison_code = unsignedp ? GEU : GE; + else + comparison_code = unsignedp ? LEU : LE; - /* ??? Same problem as in expmed.c: emit_conditional_move - forces a stack adjustment via compare_from_rtx, and we - lose the stack adjustment if the sequence we are about - to create is discarded. */ - do_pending_stack_adjust (); + /* Canonicalize to comparisons against 0. */ + if (op1 == const1_rtx) + { + /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1) + or (a != 0 ? a : 1) for unsigned. + For MIN we are safe converting (a <= 1 ? a : 1) + into (a <= 0 ? a : 1) */ + cmpop1 = const0_rtx; + if (code == MAX_EXPR) + comparison_code = unsignedp ? NE : GT; + } + if (op1 == constm1_rtx && !unsignedp) + { + /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1) + and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */ + cmpop1 = const0_rtx; + if (code == MIN_EXPR) + comparison_code = LT; + } +#ifdef HAVE_conditional_move + /* Use a conditional move if possible. */ + if (can_conditionally_move_p (mode)) + { + rtx insn; - start_sequence (); + /* ??? Same problem as in expmed.c: emit_conditional_move + forces a stack adjustment via compare_from_rtx, and we + lose the stack adjustment if the sequence we are about + to create is discarded. */ + do_pending_stack_adjust (); - /* Try to emit the conditional move. */ - insn = emit_conditional_move (target, comparison_code, - op0, op1, mode, - op0, op1, mode, - unsignedp); + start_sequence (); - /* If we could do the conditional move, emit the sequence, - and return. */ - if (insn) - { - rtx seq = get_insns (); - end_sequence (); - emit_insn (seq); - return target; - } + /* Try to emit the conditional move. */ + insn = emit_conditional_move (target, comparison_code, + op0, cmpop1, mode, + op0, op1, mode, + unsignedp); - /* Otherwise discard the sequence and fall back to code with - branches. */ - end_sequence (); - } + /* If we could do the conditional move, emit the sequence, + and return. */ + if (insn) + { + rtx seq = get_insns (); + end_sequence (); + emit_insn (seq); + return target; + } + + /* Otherwise discard the sequence and fall back to code with + branches. */ + end_sequence (); + } #endif - if (target != op0) - emit_move_insn (target, op0); + if (target != op0) + emit_move_insn (target, op0); - temp = gen_label_rtx (); + temp = gen_label_rtx (); - /* If this mode is an integer too wide to compare properly, - compare word by word. Rely on cse to optimize constant cases. */ - if (GET_MODE_CLASS (mode) == MODE_INT - && ! can_compare_p (GE, mode, ccp_jump)) - { - if (code == MAX_EXPR) - do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1, - NULL_RTX, temp); - else - do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target, - NULL_RTX, temp); - } - else - { - do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE, - unsignedp, mode, NULL_RTX, NULL_RTX, temp); - } + /* If this mode is an integer too wide to compare properly, + compare word by word. Rely on cse to optimize constant cases. */ + if (GET_MODE_CLASS (mode) == MODE_INT + && ! can_compare_p (GE, mode, ccp_jump)) + { + if (code == MAX_EXPR) + do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1, + NULL_RTX, temp); + else + do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target, + NULL_RTX, temp); + } + else + { + do_compare_rtx_and_jump (target, cmpop1, comparison_code, + unsignedp, mode, NULL_RTX, NULL_RTX, temp); + } + } emit_move_insn (target, op1); emit_label (temp); return target; @@ -8340,11 +8529,27 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0); temp = expand_ternary_op (mode, this_optab, op0, op1, op2, target, unsignedp); - if (temp == 0) - abort (); + gcc_assert (temp); + return temp; + } + + case REDUC_MAX_EXPR: + case REDUC_MIN_EXPR: + case REDUC_PLUS_EXPR: + { + op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); + this_optab = optab_for_tree_code (code, type); + temp = expand_unop (mode, this_optab, op0, target, unsignedp); + gcc_assert (temp); return temp; } + case VEC_LSHIFT_EXPR: + case VEC_RSHIFT_EXPR: + { + target = expand_vec_shift_expr (exp, target); + return target; + } default: return lang_hooks.expand_expr (exp, original_target, tmode, @@ -8726,8 +8931,7 @@ do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) if ((code == LT && integer_zerop (arg1)) || (! only_cheap && code == GE && integer_zerop (arg1))) ; - else if (BRANCH_COST >= 0 - && ! only_cheap && (code == NE || code == EQ) + else if (! only_cheap && (code == NE || code == EQ) && TREE_CODE (type) != REAL_TYPE && ((abs_optab->handlers[(int) operand_mode].insn_code != CODE_FOR_nothing) @@ -8971,9 +9175,9 @@ try_tablejump (tree index_type, tree index_expr, tree minval, tree range, if (! HAVE_tablejump) return 0; - index_expr = fold (build2 (MINUS_EXPR, index_type, - convert (index_type, index_expr), - convert (index_type, minval))); + index_expr = fold_build2 (MINUS_EXPR, index_type, + convert (index_type, index_expr), + convert (index_type, minval)); index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); do_pending_stack_adjust ();