X-Git-Url: http://git.sourceforge.jp/view?p=pf3gnuchains%2Fgcc-fork.git;a=blobdiff_plain;f=gcc%2Fexpr.c;h=4c248e061550b83a33983f4bb684568ca7619804;hp=e29f3f6f4f9fece7820c8a46658ea3cc25940c4f;hb=595afebdab4178820580b10fd829cad8a42914bd;hpb=f32d300e7da31c677823798d7a57534953310f1e diff --git a/gcc/expr.c b/gcc/expr.c index e29f3f6f4f9..4c248e06155 100644 --- a/gcc/expr.c +++ b/gcc/expr.c @@ -1,7 +1,7 @@ /* Convert tree expression to rtl instructions, for GNU compiler. Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, - 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 - Free Software Foundation, Inc. + 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, + 2012 Free Software Foundation, Inc. This file is part of GCC. @@ -123,9 +123,6 @@ struct store_by_pieces_d int reverse; }; -static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT, - unsigned int, - unsigned int); static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode, struct move_by_pieces_d *); static bool block_move_libcall_safe_for_call_parm (void); @@ -1016,7 +1013,7 @@ move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len, /* Return number of insns required to move L bytes by pieces. ALIGN (in bits) is maximum alignment we can assume. */ -static unsigned HOST_WIDE_INT +unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align, unsigned int max_size) { @@ -2180,17 +2177,122 @@ copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type) return tgtblk; } +/* Copy BLKmode value SRC into a register of mode MODE. Return the + register if it contains any data, otherwise return null. + + This is used on targets that return BLKmode values in registers. */ + +rtx +copy_blkmode_to_reg (enum machine_mode mode, tree src) +{ + int i, n_regs; + unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes; + unsigned int bitsize; + rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX; + enum machine_mode dst_mode; + + gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode); + + x = expand_normal (src); + + bytes = int_size_in_bytes (TREE_TYPE (src)); + if (bytes == 0) + return NULL_RTX; + + /* If the structure doesn't take up a whole number of words, see + whether the register value should be padded on the left or on + the right. Set PADDING_CORRECTION to the number of padding + bits needed on the left side. + + In most ABIs, the structure will be returned at the least end of + the register, which translates to right padding on little-endian + targets and left padding on big-endian targets. The opposite + holds if the structure is returned at the most significant + end of the register. */ + if (bytes % UNITS_PER_WORD != 0 + && (targetm.calls.return_in_msb (TREE_TYPE (src)) + ? !BYTES_BIG_ENDIAN + : BYTES_BIG_ENDIAN)) + padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) + * BITS_PER_UNIT)); + + n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; + dst_words = XALLOCAVEC (rtx, n_regs); + bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD); + + /* Copy the structure BITSIZE bits at a time. */ + for (bitpos = 0, xbitpos = padding_correction; + bitpos < bytes * BITS_PER_UNIT; + bitpos += bitsize, xbitpos += bitsize) + { + /* We need a new destination pseudo each time xbitpos is + on a word boundary and when xbitpos == padding_correction + (the first time through). */ + if (xbitpos % BITS_PER_WORD == 0 + || xbitpos == padding_correction) + { + /* Generate an appropriate register. */ + dst_word = gen_reg_rtx (word_mode); + dst_words[xbitpos / BITS_PER_WORD] = dst_word; + + /* Clear the destination before we move anything into it. */ + emit_move_insn (dst_word, CONST0_RTX (word_mode)); + } + + /* We need a new source operand each time bitpos is on a word + boundary. */ + if (bitpos % BITS_PER_WORD == 0) + src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode); + + /* Use bitpos for the source extraction (left justified) and + xbitpos for the destination store (right justified). */ + store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD, + 0, 0, word_mode, + extract_bit_field (src_word, bitsize, + bitpos % BITS_PER_WORD, 1, false, + NULL_RTX, word_mode, word_mode)); + } + + if (mode == BLKmode) + { + /* Find the smallest integer mode large enough to hold the + entire structure. */ + for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); + mode != VOIDmode; + mode = GET_MODE_WIDER_MODE (mode)) + /* Have we found a large enough mode? */ + if (GET_MODE_SIZE (mode) >= bytes) + break; + + /* A suitable mode should have been found. */ + gcc_assert (mode != VOIDmode); + } + + if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode)) + dst_mode = word_mode; + else + dst_mode = mode; + dst = gen_reg_rtx (dst_mode); + + for (i = 0; i < n_regs; i++) + emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]); + + if (mode != dst_mode) + dst = gen_lowpart (mode, dst); + + return dst; +} + /* Add a USE expression for REG to the (possibly empty) list pointed to by CALL_FUSAGE. REG must denote a hard register. */ void -use_reg (rtx *call_fusage, rtx reg) +use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode) { gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER); *call_fusage - = gen_rtx_EXPR_LIST (VOIDmode, - gen_rtx_USE (VOIDmode, reg), *call_fusage); + = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage); } /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs, @@ -3540,9 +3642,11 @@ mem_autoinc_base (rtx mem) (1) One or more auto-inc style memory references (aka pushes), (2) One or more addition/subtraction with the SP as destination, (3) A single move insn with the SP as destination, - (4) A call_pop insn. + (4) A call_pop insn, + (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS. - Insns in the sequence that do not modify the SP are ignored. + Insns in the sequence that do not modify the SP are ignored, + except for noreturn calls. The return value is the amount of adjustment that can be trivially verified, via immediate operand or auto-inc. If the adjustment @@ -3687,7 +3791,12 @@ fixup_args_size_notes (rtx prev, rtx last, int end_args_size) this_delta = find_args_size_adjust (insn); if (this_delta == 0) - continue; + { + if (!CALL_P (insn) + || ACCUMULATE_OUTGOING_ARGS + || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX) + continue; + } gcc_assert (!saw_unknown); if (this_delta == HOST_WIDE_INT_MIN) @@ -4330,113 +4439,105 @@ optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize, /* In the C++ memory model, consecutive bit fields in a structure are considered one memory location. - Given a COMPONENT_REF, this function returns the bit range of - consecutive bits in which this COMPONENT_REF belongs in. The - values are returned in *BITSTART and *BITEND. If either the C++ - memory model is not activated, or this memory access is not thread - visible, 0 is returned in *BITSTART and *BITEND. - - EXP is the COMPONENT_REF. - INNERDECL is the actual object being referenced. - BITPOS is the position in bits where the bit starts within the structure. - BITSIZE is size in bits of the field being referenced in EXP. + Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function + returns the bit range of consecutive bits in which this COMPONENT_REF + belongs. The values are returned in *BITSTART and *BITEND. *BITPOS + and *OFFSET may be adjusted in the process. - For example, while storing into FOO.A here... - - struct { - BIT 0: - unsigned int a : 4; - unsigned int b : 1; - BIT 8: - unsigned char c; - unsigned int d : 6; - } foo; - - ...we are not allowed to store past , so for the layout above, a - range of 0..7 (because no one cares if we store into the - padding). */ + If the access does not need to be restricted, 0 is returned in both + *BITSTART and *BITEND. */ static void get_bit_range (unsigned HOST_WIDE_INT *bitstart, unsigned HOST_WIDE_INT *bitend, - tree exp, tree innerdecl, - HOST_WIDE_INT bitpos, HOST_WIDE_INT bitsize) + tree exp, + HOST_WIDE_INT *bitpos, + tree *offset) { - tree field, record_type, fld; - bool found_field = false; - bool prev_field_is_bitfield; + HOST_WIDE_INT bitoffset; + tree field, repr; gcc_assert (TREE_CODE (exp) == COMPONENT_REF); - /* If other threads can't see this value, no need to restrict stores. */ - if (ALLOW_STORE_DATA_RACES - || ((TREE_CODE (innerdecl) == MEM_REF - || TREE_CODE (innerdecl) == TARGET_MEM_REF) - && !ptr_deref_may_alias_global_p (TREE_OPERAND (innerdecl, 0))) - || (DECL_P (innerdecl) - && ((TREE_CODE (innerdecl) == VAR_DECL - && DECL_THREAD_LOCAL_P (innerdecl)) - || !TREE_STATIC (innerdecl)))) + field = TREE_OPERAND (exp, 1); + repr = DECL_BIT_FIELD_REPRESENTATIVE (field); + /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no + need to limit the range we can access. */ + if (!repr) { *bitstart = *bitend = 0; return; } - /* Bit field we're storing into. */ - field = TREE_OPERAND (exp, 1); - record_type = DECL_FIELD_CONTEXT (field); - - /* Count the contiguous bitfields for the memory location that - contains FIELD. */ - *bitstart = 0; - prev_field_is_bitfield = true; - for (fld = TYPE_FIELDS (record_type); fld; fld = DECL_CHAIN (fld)) + /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is + part of a larger bit field, then the representative does not serve any + useful purpose. This can occur in Ada. */ + if (handled_component_p (TREE_OPERAND (exp, 0))) { - tree t, offset; - enum machine_mode mode; - int unsignedp, volatilep; - - if (TREE_CODE (fld) != FIELD_DECL) - continue; - - t = build3 (COMPONENT_REF, TREE_TYPE (exp), - unshare_expr (TREE_OPERAND (exp, 0)), - fld, NULL_TREE); - get_inner_reference (t, &bitsize, &bitpos, &offset, - &mode, &unsignedp, &volatilep, true); - - if (field == fld) - found_field = true; - - if (DECL_BIT_FIELD_TYPE (fld) && bitsize > 0) - { - if (prev_field_is_bitfield == false) - { - *bitstart = bitpos; - prev_field_is_bitfield = true; - } - } - else + enum machine_mode rmode; + HOST_WIDE_INT rbitsize, rbitpos; + tree roffset; + int unsignedp; + int volatilep = 0; + get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos, + &roffset, &rmode, &unsignedp, &volatilep, false); + if ((rbitpos % BITS_PER_UNIT) != 0) { - prev_field_is_bitfield = false; - if (found_field) - break; + *bitstart = *bitend = 0; + return; } } - gcc_assert (found_field); - if (fld) - { - /* We found the end of the bit field sequence. Include the - padding up to the next field and be done. */ - *bitend = bitpos - 1; - } + /* Compute the adjustment to bitpos from the offset of the field + relative to the representative. DECL_FIELD_OFFSET of field and + repr are the same by construction if they are not constants, + see finish_bitfield_layout. */ + if (host_integerp (DECL_FIELD_OFFSET (field), 1) + && host_integerp (DECL_FIELD_OFFSET (repr), 1)) + bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1) + - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT; else + bitoffset = 0; + bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1) + - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1)); + + /* If the adjustment is larger than bitpos, we would have a negative bit + position for the lower bound and this may wreak havoc later. This can + occur only if we have a non-null offset, so adjust offset and bitpos + to make the lower bound non-negative. */ + if (bitoffset > *bitpos) { - /* If this is the last element in the structure, include the padding - at the end of structure. */ - *bitend = TREE_INT_CST_LOW (TYPE_SIZE (record_type)) - 1; + HOST_WIDE_INT adjust = bitoffset - *bitpos; + + gcc_assert ((adjust % BITS_PER_UNIT) == 0); + gcc_assert (*offset != NULL_TREE); + + *bitpos += adjust; + *offset + = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT)); + *bitstart = 0; } + else + *bitstart = *bitpos - bitoffset; + + *bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1; +} + +/* Returns true if the MEM_REF REF refers to an object that does not + reside in memory and has non-BLKmode. */ + +static bool +mem_ref_refers_to_non_mem_p (tree ref) +{ + tree base = TREE_OPERAND (ref, 0); + if (TREE_CODE (base) != ADDR_EXPR) + return false; + base = TREE_OPERAND (base, 0); + return (DECL_P (base) + && !TREE_ADDRESSABLE (base) + && DECL_MODE (base) != BLKmode + && DECL_RTL_SET_P (base) + && !MEM_P (DECL_RTL (base))); } /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL @@ -4448,7 +4549,7 @@ expand_assignment (tree to, tree from, bool nontemporal) rtx to_rtx = 0; rtx result; enum machine_mode mode; - int align; + unsigned int align; enum insn_code icode; /* Don't crash if the lhs of the assignment was erroneous. */ @@ -4462,15 +4563,19 @@ expand_assignment (tree to, tree from, bool nontemporal) if (operand_equal_p (to, from, 0)) return; + /* Handle misaligned stores. */ mode = TYPE_MODE (TREE_TYPE (to)); if ((TREE_CODE (to) == MEM_REF || TREE_CODE (to) == TARGET_MEM_REF) && mode != BLKmode - && ((align = MAX (TYPE_ALIGN (TREE_TYPE (to)), get_object_alignment (to))) - < (signed) GET_MODE_ALIGNMENT (mode)) + && !mem_ref_refers_to_non_mem_p (to) + && ((align = get_object_or_type_alignment (to)) + < GET_MODE_ALIGNMENT (mode)) && ((icode = optab_handler (movmisalign_optab, mode)) != CODE_FOR_nothing)) { + addr_space_t as + = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0)))); struct expand_operand ops[2]; enum machine_mode address_mode; rtx reg, op0, mem; @@ -4480,8 +4585,6 @@ expand_assignment (tree to, tree from, bool nontemporal) if (TREE_CODE (to) == MEM_REF) { - addr_space_t as - = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 1)))); tree base = TREE_OPERAND (to, 0); address_mode = targetm.addr_space.address_mode (as); op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL); @@ -4489,7 +4592,7 @@ expand_assignment (tree to, tree from, bool nontemporal) if (!integer_zerop (TREE_OPERAND (to, 1))) { rtx off - = immed_double_int_const (mem_ref_offset (to), address_mode); + = immed_double_int_const (mem_ref_offset (to), address_mode); op0 = simplify_gen_binary (PLUS, address_mode, op0, off); } op0 = memory_address_addr_space (mode, op0, as); @@ -4499,9 +4602,7 @@ expand_assignment (tree to, tree from, bool nontemporal) } else if (TREE_CODE (to) == TARGET_MEM_REF) { - addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (to)); struct mem_address addr; - get_address_description (to, &addr); op0 = addr_for_mem_ref (&addr, as, true); op0 = memory_address_addr_space (mode, op0, as); @@ -4517,7 +4618,7 @@ expand_assignment (tree to, tree from, bool nontemporal) create_fixed_operand (&ops[0], mem); create_input_operand (&ops[1], reg, mode); /* The movmisalign pattern cannot fail, else the assignment would - silently be omitted. */ + silently be omitted. */ expand_insn (icode, 2, ops); return; } @@ -4526,12 +4627,10 @@ expand_assignment (tree to, tree from, bool nontemporal) if the structure component's rtx is not simply a MEM. Assignment of an array element at a constant index, and assignment of an array element in an unaligned packed structure field, has the same - problem. */ + problem. Same for (partially) storing into a non-memory object. */ if (handled_component_p (to) - /* ??? We only need to handle MEM_REF here if the access is not - a full access of the base object. */ || (TREE_CODE (to) == MEM_REF - && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR) + && mem_ref_refers_to_non_mem_p (to)) || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE) { enum machine_mode mode1; @@ -4542,6 +4641,8 @@ expand_assignment (tree to, tree from, bool nontemporal) int unsignedp; int volatilep = 0; tree tem; + bool misalignp; + rtx mem = NULL_RTX; push_temp_slots (); tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1, @@ -4549,13 +4650,62 @@ expand_assignment (tree to, tree from, bool nontemporal) if (TREE_CODE (to) == COMPONENT_REF && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1))) - get_bit_range (&bitregion_start, &bitregion_end, - to, tem, bitpos, bitsize); + get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset); /* If we are going to use store_bit_field and extract_bit_field, make sure to_rtx will be safe for multiple use. */ + mode = TYPE_MODE (TREE_TYPE (tem)); + if (TREE_CODE (tem) == MEM_REF + && mode != BLKmode + && ((align = get_object_or_type_alignment (tem)) + < GET_MODE_ALIGNMENT (mode)) + && ((icode = optab_handler (movmisalign_optab, mode)) + != CODE_FOR_nothing)) + { + enum machine_mode address_mode; + rtx op0; + struct expand_operand ops[2]; + addr_space_t as = TYPE_ADDR_SPACE + (TREE_TYPE (TREE_TYPE (TREE_OPERAND (tem, 0)))); + tree base = TREE_OPERAND (tem, 0); + + misalignp = true; + to_rtx = gen_reg_rtx (mode); + + address_mode = targetm.addr_space.address_mode (as); + op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL); + op0 = convert_memory_address_addr_space (address_mode, op0, as); + if (!integer_zerop (TREE_OPERAND (tem, 1))) + { + rtx off = immed_double_int_const (mem_ref_offset (tem), + address_mode); + op0 = simplify_gen_binary (PLUS, address_mode, op0, off); + } + op0 = memory_address_addr_space (mode, op0, as); + mem = gen_rtx_MEM (mode, op0); + set_mem_attributes (mem, tem, 0); + set_mem_addr_space (mem, as); + if (TREE_THIS_VOLATILE (tem)) + MEM_VOLATILE_P (mem) = 1; + + /* If the misaligned store doesn't overwrite all bits, perform + rmw cycle on MEM. */ + if (bitsize != GET_MODE_BITSIZE (mode)) + { + create_input_operand (&ops[0], to_rtx, mode); + create_fixed_operand (&ops[1], mem); + /* The movmisalign pattern cannot fail, else the assignment + would silently be omitted. */ + expand_insn (icode, 2, ops); - to_rtx = expand_normal (tem); + mem = copy_rtx (mem); + } + } + else + { + misalignp = false; + to_rtx = expand_normal (tem); + } /* If the bitfield is volatile, we want to access it in the field's mode, not the computed mode. @@ -4701,6 +4851,17 @@ expand_assignment (tree to, tree from, bool nontemporal) nontemporal); } + if (misalignp) + { + struct expand_operand ops[2]; + + create_fixed_operand (&ops[0], mem); + create_input_operand (&ops[1], to_rtx, mode); + /* The movmisalign pattern cannot fail, else the assignment + would silently be omitted. */ + expand_insn (icode, 2, ops); + } + if (result) preserve_temp_slots (result); free_temp_slots (); @@ -4721,7 +4882,9 @@ expand_assignment (tree to, tree from, bool nontemporal) if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from) && COMPLETE_TYPE_P (TREE_TYPE (from)) && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST - && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL) + && ! (((TREE_CODE (to) == VAR_DECL + || TREE_CODE (to) == PARM_DECL + || TREE_CODE (to) == RESULT_DECL) && REG_P (DECL_RTL (to))) || TREE_CODE (to) == SSA_NAME)) { @@ -4754,11 +4917,8 @@ expand_assignment (tree to, tree from, bool nontemporal) return; } - /* Ordinary treatment. Expand TO to get a REG or MEM rtx. - Don't re-expand if it was expanded already (in COMPONENT_REF case). */ - - if (to_rtx == 0) - to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); + /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */ + to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); /* Don't move directly into a return register. */ if (TREE_CODE (to) == RESULT_DECL @@ -4767,12 +4927,15 @@ expand_assignment (tree to, tree from, bool nontemporal) rtx temp; push_temp_slots (); - temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL); + if (REG_P (to_rtx) && TYPE_MODE (TREE_TYPE (from)) == BLKmode) + temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from); + else + temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL); if (GET_CODE (to_rtx) == PARALLEL) emit_group_load (to_rtx, temp, TREE_TYPE (from), int_size_in_bytes (TREE_TYPE (from))); - else + else if (temp) emit_move_insn (to_rtx, temp); preserve_temp_slots (to_rtx); @@ -5297,6 +5460,7 @@ count_type_elements (const_tree type, bool for_ctor_p) case POINTER_TYPE: case OFFSET_TYPE: case REFERENCE_TYPE: + case NULLPTR_TYPE: return 1; case ERROR_MARK: @@ -6183,7 +6347,7 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, store_field (blk_object, bitsize, bitpos, bitregion_start, bitregion_end, - mode, exp, type, alias_set, nontemporal); + mode, exp, type, MEM_ALIAS_SET (blk_object), nontemporal); emit_move_insn (target, object); @@ -6216,6 +6380,8 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, || bitpos % GET_MODE_ALIGNMENT (mode)) && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))) || (bitpos % BITS_PER_UNIT != 0))) + || (bitsize >= 0 && mode != BLKmode + && GET_MODE_BITSIZE (mode) > bitsize) /* If the RHS and field are a constant size and the size of the RHS isn't the same size as the bitfield, we must use bitfield operations. */ @@ -6311,8 +6477,6 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, if (to_rtx == target) to_rtx = copy_rtx (to_rtx); - if (!MEM_SCALAR_P (to_rtx)) - MEM_IN_STRUCT_P (to_rtx) = 1; if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0) set_mem_alias_set (to_rtx, alias_set); @@ -6544,6 +6708,24 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize, /* Otherwise, split it up. */ if (offset) { + /* Avoid returning a negative bitpos as this may wreak havoc later. */ + if (double_int_negative_p (bit_offset)) + { + double_int mask + = double_int_mask (BITS_PER_UNIT == 8 + ? 3 : exact_log2 (BITS_PER_UNIT)); + double_int tem = double_int_and_not (bit_offset, mask); + /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf. + Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */ + bit_offset = double_int_sub (bit_offset, tem); + tem = double_int_rshift (tem, + BITS_PER_UNIT == 8 + ? 3 : exact_log2 (BITS_PER_UNIT), + HOST_BITS_PER_DOUBLE_INT, true); + offset = size_binop (PLUS_EXPR, offset, + double_int_to_tree (sizetype, tem)); + } + *pbitpos = double_int_to_shwi (bit_offset); *poffset = offset; } @@ -7082,8 +7264,7 @@ safe_from_p (const_rtx x, tree exp, int top_p) are memory and they conflict. */ return ! (rtx_equal_p (x, exp_rtl) || (MEM_P (x) && MEM_P (exp_rtl) - && true_dependence (exp_rtl, VOIDmode, x, - rtx_addr_varies_p))); + && true_dependence (exp_rtl, VOIDmode, x))); } /* If we reach here, it is safe. */ @@ -7243,7 +7424,12 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, generating ADDR_EXPR of something that isn't an LVALUE. The only exception here is STRING_CST. */ if (CONSTANT_CLASS_P (exp)) - return XEXP (expand_expr_constant (exp, 0, modifier), 0); + { + result = XEXP (expand_expr_constant (exp, 0, modifier), 0); + if (modifier < EXPAND_SUM) + result = force_operand (result, target); + return result; + } /* Everything must be something allowed by is_gimple_addressable. */ switch (TREE_CODE (exp)) @@ -7262,7 +7448,11 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, case CONST_DECL: /* Expand the initializer like constants above. */ - return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0); + result = XEXP (expand_expr_constant (DECL_INITIAL (exp), + 0, modifier), 0); + if (modifier < EXPAND_SUM) + result = force_operand (result, target); + return result; case REALPART_EXPR: /* The real part of the complex number is always first, therefore @@ -7320,7 +7510,8 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, } if (modifier != EXPAND_INITIALIZER - && modifier != EXPAND_CONST_ADDRESS) + && modifier != EXPAND_CONST_ADDRESS + && modifier != EXPAND_SUM) result = force_operand (result, target); return result; } @@ -8401,8 +8592,9 @@ expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode, if (modifier == EXPAND_STACK_PARM) target = 0; /* In case we have to reduce the result to bitfield precision - expand this as XOR with a proper constant instead. */ - if (reduce_bit_field) + for unsigned bitfield expand this as XOR with a proper constant + instead. */ + if (reduce_bit_field && TYPE_UNSIGNED (type)) temp = expand_binop (mode, xor_optab, op0, immed_double_int_const (double_int_mask (TYPE_PRECISION (type)), mode), @@ -8504,6 +8696,54 @@ expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode, if (!target) target = gen_reg_rtx (TYPE_MODE (type)); + else + /* If target overlaps with op1, then either we need to force + op1 into a pseudo (if target also overlaps with op0), + or write the complex parts in reverse order. */ + switch (GET_CODE (target)) + { + case CONCAT: + if (reg_overlap_mentioned_p (XEXP (target, 0), op1)) + { + if (reg_overlap_mentioned_p (XEXP (target, 1), op0)) + { + complex_expr_force_op1: + temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target))); + emit_move_insn (temp, op1); + op1 = temp; + break; + } + complex_expr_swap_order: + /* Move the imaginary (op1) and real (op0) parts to their + location. */ + write_complex_part (target, op1, true); + write_complex_part (target, op0, false); + + return target; + } + break; + case MEM: + temp = adjust_address_nv (target, + GET_MODE_INNER (GET_MODE (target)), 0); + if (reg_overlap_mentioned_p (temp, op1)) + { + enum machine_mode imode = GET_MODE_INNER (GET_MODE (target)); + temp = adjust_address_nv (target, imode, + GET_MODE_SIZE (imode)); + if (reg_overlap_mentioned_p (temp, op0)) + goto complex_expr_force_op1; + goto complex_expr_swap_order; + } + break; + default: + if (reg_overlap_mentioned_p (target, op1)) + { + if (reg_overlap_mentioned_p (target, op0)) + goto complex_expr_force_op1; + goto complex_expr_swap_order; + } + break; + } /* Move the real (op0) and imaginary (op1) parts to their location. */ write_complex_part (target, op0, false); @@ -8533,30 +8773,6 @@ expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode, return temp; } - case VEC_EXTRACT_EVEN_EXPR: - case VEC_EXTRACT_ODD_EXPR: - { - expand_operands (treeop0, treeop1, - NULL_RTX, &op0, &op1, EXPAND_NORMAL); - this_optab = optab_for_tree_code (code, type, optab_default); - temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, - OPTAB_WIDEN); - gcc_assert (temp); - return temp; - } - - case VEC_INTERLEAVE_HIGH_EXPR: - case VEC_INTERLEAVE_LOW_EXPR: - { - expand_operands (treeop0, treeop1, - NULL_RTX, &op0, &op1, EXPAND_NORMAL); - this_optab = optab_for_tree_code (code, type, optab_default); - temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, - OPTAB_WIDEN); - gcc_assert (temp); - return temp; - } - case VEC_LSHIFT_EXPR: case VEC_RSHIFT_EXPR: { @@ -8600,12 +8816,49 @@ expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode, return target; } + case VEC_WIDEN_LSHIFT_HI_EXPR: + case VEC_WIDEN_LSHIFT_LO_EXPR: + { + tree oprnd0 = treeop0; + tree oprnd1 = treeop1; + + expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL); + target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX, + target, unsignedp); + gcc_assert (target); + return target; + } + case VEC_PACK_TRUNC_EXPR: case VEC_PACK_SAT_EXPR: case VEC_PACK_FIX_TRUNC_EXPR: mode = TYPE_MODE (TREE_TYPE (treeop0)); goto binop; + case VEC_PERM_EXPR: + expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL); + op2 = expand_normal (treeop2); + + /* Careful here: if the target doesn't support integral vector modes, + a constant selection vector could wind up smooshed into a normal + integral constant. */ + if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR) + { + tree sel_type = TREE_TYPE (treeop2); + enum machine_mode vmode + = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)), + TYPE_VECTOR_SUBPARTS (sel_type)); + gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT); + op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0); + gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR); + } + else + gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT); + + temp = expand_vec_perm (mode, op0, op1, op2, target); + gcc_assert (temp); + return temp; + case DOT_PROD_EXPR: { tree oprnd0 = treeop0; @@ -8636,6 +8889,64 @@ expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode, return temp; } + case COND_EXPR: + /* A COND_EXPR with its type being VOID_TYPE represents a + conditional jump and is handled in + expand_gimple_cond_expr. */ + gcc_assert (!VOID_TYPE_P (type)); + + /* Note that COND_EXPRs whose type is a structure or union + are required to be constructed to contain assignments of + a temporary variable, so that we can evaluate them here + for side effect only. If type is void, we must do likewise. */ + + gcc_assert (!TREE_ADDRESSABLE (type) + && !ignore + && TREE_TYPE (treeop1) != void_type_node + && TREE_TYPE (treeop2) != void_type_node); + + /* If we are not to produce a result, we have no target. Otherwise, + if a target was specified use it; it will not be used as an + intermediate target unless it is safe. If no target, use a + temporary. */ + + if (modifier != EXPAND_STACK_PARM + && original_target + && safe_from_p (original_target, treeop0, 1) + && GET_MODE (original_target) == mode +#ifdef HAVE_conditional_move + && (! can_conditionally_move_p (mode) + || REG_P (original_target)) +#endif + && !MEM_P (original_target)) + temp = original_target; + else + temp = assign_temp (type, 0, 0, 1); + + do_pending_stack_adjust (); + NO_DEFER_POP; + op0 = gen_label_rtx (); + op1 = gen_label_rtx (); + jumpifnot (treeop0, op0, -1); + store_expr (treeop1, temp, + modifier == EXPAND_STACK_PARM, + false); + + emit_jump_insn (gen_jump (op1)); + emit_barrier (); + emit_label (op0); + store_expr (treeop2, temp, + modifier == EXPAND_STACK_PARM, + false); + + emit_label (op1); + OK_DEFER_POP; + return temp; + + case VEC_COND_EXPR: + target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target); + return target; + default: gcc_unreachable (); } @@ -8892,10 +9203,15 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, return temp; } - /* If the mode of DECL_RTL does not match that of the decl, it - must be a promoted value. We return a SUBREG of the wanted mode, - but mark it so that we know that it was already extended. */ - if (REG_P (decl_rtl) && GET_MODE (decl_rtl) != DECL_MODE (exp)) + /* If the mode of DECL_RTL does not match that of the decl, + there are two cases: we are dealing with a BLKmode value + that is returned in a register, or we are dealing with + a promoted value. In the latter case, return a SUBREG + of the wanted mode, but mark it so that we know that it + was already extended. */ + if (REG_P (decl_rtl) + && DECL_MODE (exp) != BLKmode + && GET_MODE (decl_rtl) != DECL_MODE (exp)) { enum machine_mode pmode; @@ -9059,10 +9375,11 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case TARGET_MEM_REF: { - addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp)); + addr_space_t as + = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))); struct mem_address addr; enum insn_code icode; - int align; + unsigned int align; get_address_description (exp, &addr); op0 = addr_for_mem_ref (&addr, as, true); @@ -9070,9 +9387,9 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, temp = gen_rtx_MEM (mode, op0); set_mem_attributes (temp, exp, 0); set_mem_addr_space (temp, as); - align = MAX (TYPE_ALIGN (TREE_TYPE (exp)), get_object_alignment (exp)); + align = get_object_or_type_alignment (exp); if (mode != BLKmode - && (unsigned) align < GET_MODE_ALIGNMENT (mode) + && align < GET_MODE_ALIGNMENT (mode) /* If the target does not have special handling for unaligned loads of mode then it can use regular moves for them. */ && ((icode = optab_handler (movmisalign_optab, mode)) @@ -9094,52 +9411,46 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case MEM_REF: { addr_space_t as - = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1)))); + = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))); enum machine_mode address_mode; tree base = TREE_OPERAND (exp, 0); gimple def_stmt; enum insn_code icode; - int align; + unsigned align; /* Handle expansion of non-aliased memory with non-BLKmode. That might end up in a register. */ - if (TREE_CODE (base) == ADDR_EXPR) + if (mem_ref_refers_to_non_mem_p (exp)) { HOST_WIDE_INT offset = mem_ref_offset (exp).low; tree bit_offset; + tree bftype; base = TREE_OPERAND (base, 0); - if (!DECL_P (base)) - { - HOST_WIDE_INT off; - base = get_addr_base_and_unit_offset (base, &off); - gcc_assert (base); - offset += off; - } - /* If we are expanding a MEM_REF of a non-BLKmode non-addressable - decl we must use bitfield operations. */ - if (DECL_P (base) - && !TREE_ADDRESSABLE (base) - && DECL_MODE (base) != BLKmode - && DECL_RTL_SET_P (base) - && !MEM_P (DECL_RTL (base))) + if (offset == 0 + && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1) + && (GET_MODE_BITSIZE (DECL_MODE (base)) + == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))))) + return expand_expr (build1 (VIEW_CONVERT_EXPR, + TREE_TYPE (exp), base), + target, tmode, modifier); + bit_offset = bitsize_int (offset * BITS_PER_UNIT); + bftype = TREE_TYPE (base); + if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode) + bftype = TREE_TYPE (exp); + else { - tree bftype; - if (offset == 0 - && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1) - && (GET_MODE_BITSIZE (DECL_MODE (base)) - == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))))) - return expand_expr (build1 (VIEW_CONVERT_EXPR, - TREE_TYPE (exp), base), - target, tmode, modifier); - bit_offset = bitsize_int (offset * BITS_PER_UNIT); - bftype = TREE_TYPE (base); - if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode) - bftype = TREE_TYPE (exp); - return expand_expr (build3 (BIT_FIELD_REF, bftype, - base, - TYPE_SIZE (TREE_TYPE (exp)), - bit_offset), - target, tmode, modifier); + temp = assign_stack_temp (DECL_MODE (base), + GET_MODE_SIZE (DECL_MODE (base)), + 0); + store_expr (base, temp, 0, false); + temp = adjust_address (temp, BLKmode, offset); + set_mem_size (temp, int_size_in_bytes (TREE_TYPE (exp))); + return temp; } + return expand_expr (build3 (BIT_FIELD_REF, bftype, + base, + TYPE_SIZE (TREE_TYPE (exp)), + bit_offset), + target, tmode, modifier); } address_mode = targetm.addr_space.address_mode (as); base = TREE_OPERAND (exp, 0); @@ -9150,7 +9461,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, gimple_assign_rhs1 (def_stmt), mask); TREE_OPERAND (exp, 0) = base; } - align = MAX (TYPE_ALIGN (TREE_TYPE (exp)), get_object_alignment (exp)); + align = get_object_or_type_alignment (exp); op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM); op0 = memory_address_addr_space (address_mode, op0, as); if (!integer_zerop (TREE_OPERAND (exp, 1))) @@ -9166,7 +9477,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, if (TREE_THIS_VOLATILE (exp)) MEM_VOLATILE_P (temp) = 1; if (mode != BLKmode - && (unsigned) align < GET_MODE_ALIGNMENT (mode) + && align < GET_MODE_ALIGNMENT (mode) /* If the target does not have special handling for unaligned loads of mode then it can use regular moves for them. */ && ((icode = optab_handler (movmisalign_optab, mode)) @@ -9394,6 +9705,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, orig_op0 = op0 = expand_expr (tem, (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE + && COMPLETE_TYPE_P (TREE_TYPE (tem)) && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) != INTEGER_CST) && modifier != EXPAND_STACK_PARM @@ -9543,11 +9855,16 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, && modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER) /* If the field is volatile, we always want an aligned - access. Only do this if the access is not already naturally + access. Do this in following two situations: + 1. the access is not already naturally aligned, otherwise "normal" (non-bitfield) volatile fields - become non-addressable. */ + become non-addressable. + 2. the bitsize is narrower than the access size. Need + to extract bitfields from the access. */ || (volatilep && flag_strict_volatile_bitfields > 0 - && (bitpos % GET_MODE_ALIGNMENT (mode) != 0)) + && (bitpos % GET_MODE_ALIGNMENT (mode) != 0 + || (mode1 != BLKmode + && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT))) /* If the field isn't aligned enough to fetch as a memref, fetch it as a bit field. */ || (mode1 != BLKmode @@ -9843,10 +10160,32 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, results. */ if (MEM_P (op0)) { + enum insn_code icode; + op0 = copy_rtx (op0); if (TYPE_ALIGN_OK (type)) set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type))); + else if (mode != BLKmode + && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode) + /* If the target does have special handling for unaligned + loads of mode then use them. */ + && ((icode = optab_handler (movmisalign_optab, mode)) + != CODE_FOR_nothing)) + { + rtx reg, insn; + + op0 = adjust_address (op0, mode, 0); + /* We've already validated the memory, and we're creating a + new pseudo destination. The predicates really can't + fail. */ + reg = gen_reg_rtx (mode); + + /* Nor can the insn generator. */ + insn = GEN_FCN (icode) (reg, op0); + emit_insn (insn); + return reg; + } else if (STRICT_ALIGNMENT && mode != BLKmode && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)) @@ -9878,64 +10217,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, return op0; - case COND_EXPR: - /* A COND_EXPR with its type being VOID_TYPE represents a - conditional jump and is handled in - expand_gimple_cond_expr. */ - gcc_assert (!VOID_TYPE_P (type)); - - /* Note that COND_EXPRs whose type is a structure or union - are required to be constructed to contain assignments of - a temporary variable, so that we can evaluate them here - for side effect only. If type is void, we must do likewise. */ - - gcc_assert (!TREE_ADDRESSABLE (type) - && !ignore - && TREE_TYPE (treeop1) != void_type_node - && TREE_TYPE (treeop2) != void_type_node); - - /* If we are not to produce a result, we have no target. Otherwise, - if a target was specified use it; it will not be used as an - intermediate target unless it is safe. If no target, use a - temporary. */ - - if (modifier != EXPAND_STACK_PARM - && original_target - && safe_from_p (original_target, treeop0, 1) - && GET_MODE (original_target) == mode -#ifdef HAVE_conditional_move - && (! can_conditionally_move_p (mode) - || REG_P (original_target)) -#endif - && !MEM_P (original_target)) - temp = original_target; - else - temp = assign_temp (type, 0, 0, 1); - - do_pending_stack_adjust (); - NO_DEFER_POP; - op0 = gen_label_rtx (); - op1 = gen_label_rtx (); - jumpifnot (treeop0, op0, -1); - store_expr (treeop1, temp, - modifier == EXPAND_STACK_PARM, - false); - - emit_jump_insn (gen_jump (op1)); - emit_barrier (); - emit_label (op0); - store_expr (treeop2, temp, - modifier == EXPAND_STACK_PARM, - false); - - emit_label (op1); - OK_DEFER_POP; - return temp; - - case VEC_COND_EXPR: - target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target); - return target; - case MODIFY_EXPR: { tree lhs = treeop0; @@ -10308,6 +10589,28 @@ do_store_flag (sepops ops, rtx target, enum machine_mode mode) STRIP_NOPS (arg0); STRIP_NOPS (arg1); + + /* For vector typed comparisons emit code to generate the desired + all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR + expander for this. */ + if (TREE_CODE (ops->type) == VECTOR_TYPE) + { + tree ifexp = build2 (ops->code, ops->type, arg0, arg1); + tree if_true = constant_boolean_node (true, ops->type); + tree if_false = constant_boolean_node (false, ops->type); + return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target); + } + + /* For vector typed comparisons emit code to generate the desired + all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR + expander for this. */ + if (TREE_CODE (ops->type) == VECTOR_TYPE) + { + tree ifexp = build2 (ops->code, ops->type, arg0, arg1); + tree if_true = constant_boolean_node (true, ops->type); + tree if_false = constant_boolean_node (false, ops->type); + return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target); + } /* Get the rtx comparison code to use. We know that EXP is a comparison operation of some type. Some comparisons against 1 and -1 can be @@ -10396,15 +10699,22 @@ do_store_flag (sepops ops, rtx target, enum machine_mode mode) so we just call into the folder and expand its result. */ if ((code == NE || code == EQ) - && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) - && integer_pow2p (TREE_OPERAND (arg0, 1)) + && integer_zerop (arg1) && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type))) { - tree type = lang_hooks.types.type_for_mode (mode, unsignedp); - return expand_expr (fold_single_bit_test (loc, - code == NE ? NE_EXPR : EQ_EXPR, - arg0, arg1, type), - target, VOIDmode, EXPAND_NORMAL); + gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR); + if (srcstmt + && integer_pow2p (gimple_assign_rhs2 (srcstmt))) + { + enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR; + tree type = lang_hooks.types.type_for_mode (mode, unsignedp); + tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1), + gimple_assign_rhs1 (srcstmt), + gimple_assign_rhs2 (srcstmt)); + temp = fold_single_bit_test (loc, tcode, temp, arg1, type); + if (temp) + return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL); + } } if (! get_subtarget (target)