X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Fexpr.c;h=4c248e061550b83a33983f4bb684568ca7619804;hb=a4c14d168bd99a9d62a7afe0add5005260dfb2c0;hp=bd2f6b18f7c5a06d32ee1117c0ffedefb8a05386;hpb=402f6a9e6b225e401135286881427884dad42680;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/expr.c b/gcc/expr.c index bd2f6b18f7c..4c248e06155 100644 --- a/gcc/expr.c +++ b/gcc/expr.c @@ -4439,113 +4439,105 @@ optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize, /* In the C++ memory model, consecutive bit fields in a structure are considered one memory location. - Given a COMPONENT_REF, this function returns the bit range of - consecutive bits in which this COMPONENT_REF belongs in. The - values are returned in *BITSTART and *BITEND. If either the C++ - memory model is not activated, or this memory access is not thread - visible, 0 is returned in *BITSTART and *BITEND. - - EXP is the COMPONENT_REF. - INNERDECL is the actual object being referenced. - BITPOS is the position in bits where the bit starts within the structure. - BITSIZE is size in bits of the field being referenced in EXP. - - For example, while storing into FOO.A here... - - struct { - BIT 0: - unsigned int a : 4; - unsigned int b : 1; - BIT 8: - unsigned char c; - unsigned int d : 6; - } foo; - - ...we are not allowed to store past , so for the layout above, a - range of 0..7 (because no one cares if we store into the - padding). */ + Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function + returns the bit range of consecutive bits in which this COMPONENT_REF + belongs. The values are returned in *BITSTART and *BITEND. *BITPOS + and *OFFSET may be adjusted in the process. + + If the access does not need to be restricted, 0 is returned in both + *BITSTART and *BITEND. */ static void get_bit_range (unsigned HOST_WIDE_INT *bitstart, unsigned HOST_WIDE_INT *bitend, - tree exp, tree innerdecl, - HOST_WIDE_INT bitpos, HOST_WIDE_INT bitsize) + tree exp, + HOST_WIDE_INT *bitpos, + tree *offset) { - tree field, record_type, fld; - bool found_field = false; - bool prev_field_is_bitfield; + HOST_WIDE_INT bitoffset; + tree field, repr; gcc_assert (TREE_CODE (exp) == COMPONENT_REF); - /* If other threads can't see this value, no need to restrict stores. */ - if (ALLOW_STORE_DATA_RACES - || ((TREE_CODE (innerdecl) == MEM_REF - || TREE_CODE (innerdecl) == TARGET_MEM_REF) - && !ptr_deref_may_alias_global_p (TREE_OPERAND (innerdecl, 0))) - || (DECL_P (innerdecl) - && ((TREE_CODE (innerdecl) == VAR_DECL - && DECL_THREAD_LOCAL_P (innerdecl)) - || !TREE_STATIC (innerdecl)))) + field = TREE_OPERAND (exp, 1); + repr = DECL_BIT_FIELD_REPRESENTATIVE (field); + /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no + need to limit the range we can access. */ + if (!repr) { *bitstart = *bitend = 0; return; } - /* Bit field we're storing into. */ - field = TREE_OPERAND (exp, 1); - record_type = DECL_FIELD_CONTEXT (field); - - /* Count the contiguous bitfields for the memory location that - contains FIELD. */ - *bitstart = 0; - prev_field_is_bitfield = true; - for (fld = TYPE_FIELDS (record_type); fld; fld = DECL_CHAIN (fld)) + /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is + part of a larger bit field, then the representative does not serve any + useful purpose. This can occur in Ada. */ + if (handled_component_p (TREE_OPERAND (exp, 0))) { - tree t, offset; - enum machine_mode mode; - int unsignedp, volatilep; - - if (TREE_CODE (fld) != FIELD_DECL) - continue; - - t = build3 (COMPONENT_REF, TREE_TYPE (exp), - unshare_expr (TREE_OPERAND (exp, 0)), - fld, NULL_TREE); - get_inner_reference (t, &bitsize, &bitpos, &offset, - &mode, &unsignedp, &volatilep, true); - - if (field == fld) - found_field = true; - - if (DECL_BIT_FIELD_TYPE (fld) && bitsize > 0) - { - if (prev_field_is_bitfield == false) - { - *bitstart = bitpos; - prev_field_is_bitfield = true; - } - } - else + enum machine_mode rmode; + HOST_WIDE_INT rbitsize, rbitpos; + tree roffset; + int unsignedp; + int volatilep = 0; + get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos, + &roffset, &rmode, &unsignedp, &volatilep, false); + if ((rbitpos % BITS_PER_UNIT) != 0) { - prev_field_is_bitfield = false; - if (found_field) - break; + *bitstart = *bitend = 0; + return; } } - gcc_assert (found_field); - if (fld) - { - /* We found the end of the bit field sequence. Include the - padding up to the next field and be done. */ - *bitend = bitpos - 1; - } + /* Compute the adjustment to bitpos from the offset of the field + relative to the representative. DECL_FIELD_OFFSET of field and + repr are the same by construction if they are not constants, + see finish_bitfield_layout. */ + if (host_integerp (DECL_FIELD_OFFSET (field), 1) + && host_integerp (DECL_FIELD_OFFSET (repr), 1)) + bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1) + - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT; else + bitoffset = 0; + bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1) + - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1)); + + /* If the adjustment is larger than bitpos, we would have a negative bit + position for the lower bound and this may wreak havoc later. This can + occur only if we have a non-null offset, so adjust offset and bitpos + to make the lower bound non-negative. */ + if (bitoffset > *bitpos) { - /* If this is the last element in the structure, include the padding - at the end of structure. */ - *bitend = TREE_INT_CST_LOW (TYPE_SIZE (record_type)) - 1; + HOST_WIDE_INT adjust = bitoffset - *bitpos; + + gcc_assert ((adjust % BITS_PER_UNIT) == 0); + gcc_assert (*offset != NULL_TREE); + + *bitpos += adjust; + *offset + = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT)); + *bitstart = 0; } + else + *bitstart = *bitpos - bitoffset; + + *bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1; +} + +/* Returns true if the MEM_REF REF refers to an object that does not + reside in memory and has non-BLKmode. */ + +static bool +mem_ref_refers_to_non_mem_p (tree ref) +{ + tree base = TREE_OPERAND (ref, 0); + if (TREE_CODE (base) != ADDR_EXPR) + return false; + base = TREE_OPERAND (base, 0); + return (DECL_P (base) + && !TREE_ADDRESSABLE (base) + && DECL_MODE (base) != BLKmode + && DECL_RTL_SET_P (base) + && !MEM_P (DECL_RTL (base))); } /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL @@ -4571,15 +4563,19 @@ expand_assignment (tree to, tree from, bool nontemporal) if (operand_equal_p (to, from, 0)) return; + /* Handle misaligned stores. */ mode = TYPE_MODE (TREE_TYPE (to)); if ((TREE_CODE (to) == MEM_REF || TREE_CODE (to) == TARGET_MEM_REF) && mode != BLKmode + && !mem_ref_refers_to_non_mem_p (to) && ((align = get_object_or_type_alignment (to)) < GET_MODE_ALIGNMENT (mode)) && ((icode = optab_handler (movmisalign_optab, mode)) != CODE_FOR_nothing)) { + addr_space_t as + = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0)))); struct expand_operand ops[2]; enum machine_mode address_mode; rtx reg, op0, mem; @@ -4589,8 +4585,6 @@ expand_assignment (tree to, tree from, bool nontemporal) if (TREE_CODE (to) == MEM_REF) { - addr_space_t as - = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0)))); tree base = TREE_OPERAND (to, 0); address_mode = targetm.addr_space.address_mode (as); op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL); @@ -4598,7 +4592,7 @@ expand_assignment (tree to, tree from, bool nontemporal) if (!integer_zerop (TREE_OPERAND (to, 1))) { rtx off - = immed_double_int_const (mem_ref_offset (to), address_mode); + = immed_double_int_const (mem_ref_offset (to), address_mode); op0 = simplify_gen_binary (PLUS, address_mode, op0, off); } op0 = memory_address_addr_space (mode, op0, as); @@ -4608,10 +4602,7 @@ expand_assignment (tree to, tree from, bool nontemporal) } else if (TREE_CODE (to) == TARGET_MEM_REF) { - addr_space_t as - = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0)))); struct mem_address addr; - get_address_description (to, &addr); op0 = addr_for_mem_ref (&addr, as, true); op0 = memory_address_addr_space (mode, op0, as); @@ -4627,7 +4618,7 @@ expand_assignment (tree to, tree from, bool nontemporal) create_fixed_operand (&ops[0], mem); create_input_operand (&ops[1], reg, mode); /* The movmisalign pattern cannot fail, else the assignment would - silently be omitted. */ + silently be omitted. */ expand_insn (icode, 2, ops); return; } @@ -4636,12 +4627,10 @@ expand_assignment (tree to, tree from, bool nontemporal) if the structure component's rtx is not simply a MEM. Assignment of an array element at a constant index, and assignment of an array element in an unaligned packed structure field, has the same - problem. */ + problem. Same for (partially) storing into a non-memory object. */ if (handled_component_p (to) - /* ??? We only need to handle MEM_REF here if the access is not - a full access of the base object. */ || (TREE_CODE (to) == MEM_REF - && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR) + && mem_ref_refers_to_non_mem_p (to)) || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE) { enum machine_mode mode1; @@ -4652,6 +4641,8 @@ expand_assignment (tree to, tree from, bool nontemporal) int unsignedp; int volatilep = 0; tree tem; + bool misalignp; + rtx mem = NULL_RTX; push_temp_slots (); tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1, @@ -4659,13 +4650,62 @@ expand_assignment (tree to, tree from, bool nontemporal) if (TREE_CODE (to) == COMPONENT_REF && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1))) - get_bit_range (&bitregion_start, &bitregion_end, - to, tem, bitpos, bitsize); + get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset); /* If we are going to use store_bit_field and extract_bit_field, make sure to_rtx will be safe for multiple use. */ + mode = TYPE_MODE (TREE_TYPE (tem)); + if (TREE_CODE (tem) == MEM_REF + && mode != BLKmode + && ((align = get_object_or_type_alignment (tem)) + < GET_MODE_ALIGNMENT (mode)) + && ((icode = optab_handler (movmisalign_optab, mode)) + != CODE_FOR_nothing)) + { + enum machine_mode address_mode; + rtx op0; + struct expand_operand ops[2]; + addr_space_t as = TYPE_ADDR_SPACE + (TREE_TYPE (TREE_TYPE (TREE_OPERAND (tem, 0)))); + tree base = TREE_OPERAND (tem, 0); + + misalignp = true; + to_rtx = gen_reg_rtx (mode); + + address_mode = targetm.addr_space.address_mode (as); + op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL); + op0 = convert_memory_address_addr_space (address_mode, op0, as); + if (!integer_zerop (TREE_OPERAND (tem, 1))) + { + rtx off = immed_double_int_const (mem_ref_offset (tem), + address_mode); + op0 = simplify_gen_binary (PLUS, address_mode, op0, off); + } + op0 = memory_address_addr_space (mode, op0, as); + mem = gen_rtx_MEM (mode, op0); + set_mem_attributes (mem, tem, 0); + set_mem_addr_space (mem, as); + if (TREE_THIS_VOLATILE (tem)) + MEM_VOLATILE_P (mem) = 1; - to_rtx = expand_normal (tem); + /* If the misaligned store doesn't overwrite all bits, perform + rmw cycle on MEM. */ + if (bitsize != GET_MODE_BITSIZE (mode)) + { + create_input_operand (&ops[0], to_rtx, mode); + create_fixed_operand (&ops[1], mem); + /* The movmisalign pattern cannot fail, else the assignment + would silently be omitted. */ + expand_insn (icode, 2, ops); + + mem = copy_rtx (mem); + } + } + else + { + misalignp = false; + to_rtx = expand_normal (tem); + } /* If the bitfield is volatile, we want to access it in the field's mode, not the computed mode. @@ -4811,6 +4851,17 @@ expand_assignment (tree to, tree from, bool nontemporal) nontemporal); } + if (misalignp) + { + struct expand_operand ops[2]; + + create_fixed_operand (&ops[0], mem); + create_input_operand (&ops[1], to_rtx, mode); + /* The movmisalign pattern cannot fail, else the assignment + would silently be omitted. */ + expand_insn (icode, 2, ops); + } + if (result) preserve_temp_slots (result); free_temp_slots (); @@ -4866,11 +4917,8 @@ expand_assignment (tree to, tree from, bool nontemporal) return; } - /* Ordinary treatment. Expand TO to get a REG or MEM rtx. - Don't re-expand if it was expanded already (in COMPONENT_REF case). */ - - if (to_rtx == 0) - to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); + /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */ + to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE); /* Don't move directly into a return register. */ if (TREE_CODE (to) == RESULT_DECL @@ -6299,7 +6347,7 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, store_field (blk_object, bitsize, bitpos, bitregion_start, bitregion_end, - mode, exp, type, alias_set, nontemporal); + mode, exp, type, MEM_ALIAS_SET (blk_object), nontemporal); emit_move_insn (target, object); @@ -6660,6 +6708,24 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize, /* Otherwise, split it up. */ if (offset) { + /* Avoid returning a negative bitpos as this may wreak havoc later. */ + if (double_int_negative_p (bit_offset)) + { + double_int mask + = double_int_mask (BITS_PER_UNIT == 8 + ? 3 : exact_log2 (BITS_PER_UNIT)); + double_int tem = double_int_and_not (bit_offset, mask); + /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf. + Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */ + bit_offset = double_int_sub (bit_offset, tem); + tem = double_int_rshift (tem, + BITS_PER_UNIT == 8 + ? 3 : exact_log2 (BITS_PER_UNIT), + HOST_BITS_PER_DOUBLE_INT, true); + offset = size_binop (PLUS_EXPR, offset, + double_int_to_tree (sizetype, tem)); + } + *pbitpos = double_int_to_shwi (bit_offset); *poffset = offset; } @@ -7358,7 +7424,12 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, generating ADDR_EXPR of something that isn't an LVALUE. The only exception here is STRING_CST. */ if (CONSTANT_CLASS_P (exp)) - return XEXP (expand_expr_constant (exp, 0, modifier), 0); + { + result = XEXP (expand_expr_constant (exp, 0, modifier), 0); + if (modifier < EXPAND_SUM) + result = force_operand (result, target); + return result; + } /* Everything must be something allowed by is_gimple_addressable. */ switch (TREE_CODE (exp)) @@ -7377,7 +7448,11 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode, case CONST_DECL: /* Expand the initializer like constants above. */ - return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0); + result = XEXP (expand_expr_constant (DECL_INITIAL (exp), + 0, modifier), 0); + if (modifier < EXPAND_SUM) + result = force_operand (result, target); + return result; case REALPART_EXPR: /* The real part of the complex number is always first, therefore @@ -8517,8 +8592,9 @@ expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode, if (modifier == EXPAND_STACK_PARM) target = 0; /* In case we have to reduce the result to bitfield precision - expand this as XOR with a proper constant instead. */ - if (reduce_bit_field) + for unsigned bitfield expand this as XOR with a proper constant + instead. */ + if (reduce_bit_field && TYPE_UNSIGNED (type)) temp = expand_binop (mode, xor_optab, op0, immed_double_int_const (double_int_mask (TYPE_PRECISION (type)), mode), @@ -8620,6 +8696,54 @@ expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode, if (!target) target = gen_reg_rtx (TYPE_MODE (type)); + else + /* If target overlaps with op1, then either we need to force + op1 into a pseudo (if target also overlaps with op0), + or write the complex parts in reverse order. */ + switch (GET_CODE (target)) + { + case CONCAT: + if (reg_overlap_mentioned_p (XEXP (target, 0), op1)) + { + if (reg_overlap_mentioned_p (XEXP (target, 1), op0)) + { + complex_expr_force_op1: + temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target))); + emit_move_insn (temp, op1); + op1 = temp; + break; + } + complex_expr_swap_order: + /* Move the imaginary (op1) and real (op0) parts to their + location. */ + write_complex_part (target, op1, true); + write_complex_part (target, op0, false); + + return target; + } + break; + case MEM: + temp = adjust_address_nv (target, + GET_MODE_INNER (GET_MODE (target)), 0); + if (reg_overlap_mentioned_p (temp, op1)) + { + enum machine_mode imode = GET_MODE_INNER (GET_MODE (target)); + temp = adjust_address_nv (target, imode, + GET_MODE_SIZE (imode)); + if (reg_overlap_mentioned_p (temp, op0)) + goto complex_expr_force_op1; + goto complex_expr_swap_order; + } + break; + default: + if (reg_overlap_mentioned_p (target, op1)) + { + if (reg_overlap_mentioned_p (target, op0)) + goto complex_expr_force_op1; + goto complex_expr_swap_order; + } + break; + } /* Move the real (op0) and imaginary (op1) parts to their location. */ write_complex_part (target, op0, false); @@ -9295,44 +9419,38 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, unsigned align; /* Handle expansion of non-aliased memory with non-BLKmode. That might end up in a register. */ - if (TREE_CODE (base) == ADDR_EXPR) + if (mem_ref_refers_to_non_mem_p (exp)) { HOST_WIDE_INT offset = mem_ref_offset (exp).low; tree bit_offset; + tree bftype; base = TREE_OPERAND (base, 0); - if (!DECL_P (base)) - { - HOST_WIDE_INT off; - base = get_addr_base_and_unit_offset (base, &off); - gcc_assert (base); - offset += off; - } - /* If we are expanding a MEM_REF of a non-BLKmode non-addressable - decl we must use bitfield operations. */ - if (DECL_P (base) - && !TREE_ADDRESSABLE (base) - && DECL_MODE (base) != BLKmode - && DECL_RTL_SET_P (base) - && !MEM_P (DECL_RTL (base))) + if (offset == 0 + && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1) + && (GET_MODE_BITSIZE (DECL_MODE (base)) + == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))))) + return expand_expr (build1 (VIEW_CONVERT_EXPR, + TREE_TYPE (exp), base), + target, tmode, modifier); + bit_offset = bitsize_int (offset * BITS_PER_UNIT); + bftype = TREE_TYPE (base); + if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode) + bftype = TREE_TYPE (exp); + else { - tree bftype; - if (offset == 0 - && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1) - && (GET_MODE_BITSIZE (DECL_MODE (base)) - == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))))) - return expand_expr (build1 (VIEW_CONVERT_EXPR, - TREE_TYPE (exp), base), - target, tmode, modifier); - bit_offset = bitsize_int (offset * BITS_PER_UNIT); - bftype = TREE_TYPE (base); - if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode) - bftype = TREE_TYPE (exp); - return expand_expr (build3 (BIT_FIELD_REF, bftype, - base, - TYPE_SIZE (TREE_TYPE (exp)), - bit_offset), - target, tmode, modifier); + temp = assign_stack_temp (DECL_MODE (base), + GET_MODE_SIZE (DECL_MODE (base)), + 0); + store_expr (base, temp, 0, false); + temp = adjust_address (temp, BLKmode, offset); + set_mem_size (temp, int_size_in_bytes (TREE_TYPE (exp))); + return temp; } + return expand_expr (build3 (BIT_FIELD_REF, bftype, + base, + TYPE_SIZE (TREE_TYPE (exp)), + bit_offset), + target, tmode, modifier); } address_mode = targetm.addr_space.address_mode (as); base = TREE_OPERAND (exp, 0); @@ -9587,6 +9705,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, orig_op0 = op0 = expand_expr (tem, (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE + && COMPLETE_TYPE_P (TREE_TYPE (tem)) && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) != INTEGER_CST) && modifier != EXPAND_STACK_PARM