+/* A subroutine of ix86_expand_fp_absneg_operator and copysign expanders.
+ Create a mask for the sign bit in MODE for an SSE register. If VECT is
+ true, then replicate the mask for all elements of the vector register.
+ If INVERT is true, then create a mask excluding the sign bit. */
+
+rtx
+ix86_build_signbit_mask (enum machine_mode mode, bool vect, bool invert)
+{
+ enum machine_mode vec_mode;
+ HOST_WIDE_INT hi, lo;
+ int shift = 63;
+ rtvec v;
+ rtx mask;
+
+ /* Find the sign bit, sign extended to 2*HWI. */
+ if (mode == SFmode)
+ lo = 0x80000000, hi = lo < 0;
+ else if (HOST_BITS_PER_WIDE_INT >= 64)
+ lo = (HOST_WIDE_INT)1 << shift, hi = -1;
+ else
+ lo = 0, hi = (HOST_WIDE_INT)1 << (shift - HOST_BITS_PER_WIDE_INT);
+
+ if (invert)
+ lo = ~lo, hi = ~hi;
+
+ /* Force this value into the low part of a fp vector constant. */
+ mask = immed_double_const (lo, hi, mode == SFmode ? SImode : DImode);
+ mask = gen_lowpart (mode, mask);
+
+ if (mode == SFmode)
+ {
+ if (vect)
+ v = gen_rtvec (4, mask, mask, mask, mask);
+ else
+ v = gen_rtvec (4, mask, CONST0_RTX (SFmode),
+ CONST0_RTX (SFmode), CONST0_RTX (SFmode));
+ vec_mode = V4SFmode;
+ }
+ else
+ {
+ if (vect)
+ v = gen_rtvec (2, mask, mask);
+ else
+ v = gen_rtvec (2, mask, CONST0_RTX (DFmode));
+ vec_mode = V2DFmode;
+ }
+
+ return force_reg (vec_mode, gen_rtx_CONST_VECTOR (vec_mode, v));
+}
+
+/* Generate code for floating point ABS or NEG. */
+
+void
+ix86_expand_fp_absneg_operator (enum rtx_code code, enum machine_mode mode,
+ rtx operands[])
+{
+ rtx mask, set, use, clob, dst, src;
+ bool matching_memory;
+ bool use_sse = false;
+ bool vector_mode = VECTOR_MODE_P (mode);
+ enum machine_mode elt_mode = mode;
+
+ if (vector_mode)
+ {
+ elt_mode = GET_MODE_INNER (mode);
+ use_sse = true;
+ }
+ else if (TARGET_SSE_MATH)
+ use_sse = SSE_FLOAT_MODE_P (mode);
+
+ /* NEG and ABS performed with SSE use bitwise mask operations.
+ Create the appropriate mask now. */
+ if (use_sse)
+ mask = ix86_build_signbit_mask (elt_mode, vector_mode, code == ABS);
+ else
+ {
+ /* When not using SSE, we don't use the mask, but prefer to keep the
+ same general form of the insn pattern to reduce duplication when
+ it comes time to split. */
+ mask = const0_rtx;
+ }
+
+ dst = operands[0];
+ src = operands[1];
+
+ /* If the destination is memory, and we don't have matching source
+ operands, do things in registers. */
+ matching_memory = false;
+ if (MEM_P (dst))
+ {
+ if (rtx_equal_p (dst, src) && (!optimize || no_new_pseudos))
+ matching_memory = true;
+ else
+ dst = gen_reg_rtx (mode);
+ }
+ if (MEM_P (src) && !matching_memory)
+ src = force_reg (mode, src);
+
+ if (vector_mode)
+ {
+ set = gen_rtx_fmt_ee (code == NEG ? XOR : AND, mode, src, mask);
+ set = gen_rtx_SET (VOIDmode, dst, set);
+ emit_insn (set);
+ }
+ else
+ {
+ set = gen_rtx_fmt_e (code, mode, src);
+ set = gen_rtx_SET (VOIDmode, dst, set);
+ use = gen_rtx_USE (VOIDmode, mask);
+ clob = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, FLAGS_REG));
+ emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (3, set, use, clob)));
+ }
+
+ if (dst != operands[0])
+ emit_move_insn (operands[0], dst);
+}
+
+/* Expand a copysign operation. Special case operand 0 being a constant. */
+
+void
+ix86_expand_copysign (rtx operands[])
+{
+ enum machine_mode mode, vmode;
+ rtx dest, op0, op1, mask, nmask;
+
+ dest = operands[0];
+ op0 = operands[1];
+ op1 = operands[2];
+
+ mode = GET_MODE (dest);
+ vmode = mode == SFmode ? V4SFmode : V2DFmode;
+
+ if (GET_CODE (op0) == CONST_DOUBLE)
+ {
+ rtvec v;
+
+ if (real_isneg (CONST_DOUBLE_REAL_VALUE (op0)))
+ op0 = simplify_unary_operation (ABS, mode, op0, mode);
+
+ if (op0 == CONST0_RTX (mode))
+ op0 = CONST0_RTX (vmode);
+ else
+ {
+ if (mode == SFmode)
+ v = gen_rtvec (4, op0, CONST0_RTX (SFmode),
+ CONST0_RTX (SFmode), CONST0_RTX (SFmode));
+ else
+ v = gen_rtvec (2, op0, CONST0_RTX (DFmode));
+ op0 = force_reg (vmode, gen_rtx_CONST_VECTOR (vmode, v));
+ }
+
+ mask = ix86_build_signbit_mask (mode, 0, 0);
+
+ if (mode == SFmode)
+ emit_insn (gen_copysignsf3_const (dest, op0, op1, mask));
+ else
+ emit_insn (gen_copysigndf3_const (dest, op0, op1, mask));
+ }
+ else
+ {
+ nmask = ix86_build_signbit_mask (mode, 0, 1);
+ mask = ix86_build_signbit_mask (mode, 0, 0);
+
+ if (mode == SFmode)
+ emit_insn (gen_copysignsf3_var (dest, NULL, op0, op1, nmask, mask));
+ else
+ emit_insn (gen_copysigndf3_var (dest, NULL, op0, op1, nmask, mask));
+ }
+}
+
+/* Deconstruct a copysign operation into bit masks. Operand 0 is known to
+ be a constant, and so has already been expanded into a vector constant. */
+
+void
+ix86_split_copysign_const (rtx operands[])
+{
+ enum machine_mode mode, vmode;
+ rtx dest, op0, op1, mask, x;
+
+ dest = operands[0];
+ op0 = operands[1];
+ op1 = operands[2];
+ mask = operands[3];
+
+ mode = GET_MODE (dest);
+ vmode = GET_MODE (mask);
+
+ dest = simplify_gen_subreg (vmode, dest, mode, 0);
+ x = gen_rtx_AND (vmode, dest, mask);
+ emit_insn (gen_rtx_SET (VOIDmode, dest, x));
+
+ if (op0 != CONST0_RTX (vmode))
+ {
+ x = gen_rtx_IOR (vmode, dest, op0);
+ emit_insn (gen_rtx_SET (VOIDmode, dest, x));
+ }
+}
+
+/* Deconstruct a copysign operation into bit masks. Operand 0 is variable,
+ so we have to do two masks. */
+
+void
+ix86_split_copysign_var (rtx operands[])
+{
+ enum machine_mode mode, vmode;
+ rtx dest, scratch, op0, op1, mask, nmask, x;
+
+ dest = operands[0];
+ scratch = operands[1];
+ op0 = operands[2];
+ op1 = operands[3];
+ nmask = operands[4];
+ mask = operands[5];
+
+ mode = GET_MODE (dest);
+ vmode = GET_MODE (mask);
+
+ if (rtx_equal_p (op0, op1))
+ {
+ /* Shouldn't happen often (it's useless, obviously), but when it does
+ we'd generate incorrect code if we continue below. */
+ emit_move_insn (dest, op0);
+ return;
+ }
+
+ if (REG_P (mask) && REGNO (dest) == REGNO (mask)) /* alternative 0 */
+ {
+ gcc_assert (REGNO (op1) == REGNO (scratch));
+
+ x = gen_rtx_AND (vmode, scratch, mask);
+ emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
+
+ dest = mask;
+ op0 = simplify_gen_subreg (vmode, op0, mode, 0);
+ x = gen_rtx_NOT (vmode, dest);
+ x = gen_rtx_AND (vmode, x, op0);
+ emit_insn (gen_rtx_SET (VOIDmode, dest, x));
+ }
+ else
+ {
+ if (REGNO (op1) == REGNO (scratch)) /* alternative 1,3 */
+ {
+ x = gen_rtx_AND (vmode, scratch, mask);
+ }
+ else /* alternative 2,4 */
+ {
+ gcc_assert (REGNO (mask) == REGNO (scratch));
+ op1 = simplify_gen_subreg (vmode, op1, mode, 0);
+ x = gen_rtx_AND (vmode, scratch, op1);
+ }
+ emit_insn (gen_rtx_SET (VOIDmode, scratch, x));
+
+ if (REGNO (op0) == REGNO (dest)) /* alternative 1,2 */
+ {
+ dest = simplify_gen_subreg (vmode, op0, mode, 0);
+ x = gen_rtx_AND (vmode, dest, nmask);
+ }
+ else /* alternative 3,4 */
+ {
+ gcc_assert (REGNO (nmask) == REGNO (dest));
+ dest = nmask;
+ op0 = simplify_gen_subreg (vmode, op0, mode, 0);
+ x = gen_rtx_AND (vmode, dest, op0);
+ }
+ emit_insn (gen_rtx_SET (VOIDmode, dest, x));
+ }
+
+ x = gen_rtx_IOR (vmode, dest, scratch);
+ emit_insn (gen_rtx_SET (VOIDmode, dest, x));
+}
+