1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 #include "coretypes.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "langhooks.h"
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40 static void do_jump_by_parts_equality (tree, rtx, rtx);
41 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
48 init_pending_stack_adjust (void)
50 pending_stack_adjust = 0;
53 /* Discard any pending stack adjustment. This avoid relying on the
54 RTL optimizers to remove useless adjustments when we know the
55 stack pointer value is dead. */
56 void discard_pending_stack_adjust (void)
58 stack_pointer_delta -= pending_stack_adjust;
59 pending_stack_adjust = 0;
62 /* When exiting from function, if safe, clear out any pending stack adjust
63 so the adjustment won't get done.
65 Note, if the current function calls alloca, then it must have a
66 frame pointer regardless of the value of flag_omit_frame_pointer. */
69 clear_pending_stack_adjust (void)
72 && (! flag_omit_frame_pointer || current_function_calls_alloca)
74 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline))
75 discard_pending_stack_adjust ();
78 /* Pop any previously-pushed arguments that have not been popped yet. */
81 do_pending_stack_adjust (void)
83 if (inhibit_defer_pop == 0)
85 if (pending_stack_adjust != 0)
86 adjust_stack (GEN_INT (pending_stack_adjust));
87 pending_stack_adjust = 0;
91 /* Expand conditional expressions. */
93 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
94 LABEL is an rtx of code CODE_LABEL, in this function and all the
98 jumpifnot (tree exp, rtx label)
100 do_jump (exp, label, NULL_RTX);
103 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
106 jumpif (tree exp, rtx label)
108 do_jump (exp, NULL_RTX, label);
111 /* Used internally by prefer_and_bit_test. */
113 static GTY(()) rtx and_reg;
114 static GTY(()) rtx and_test;
115 static GTY(()) rtx shift_test;
117 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
118 where X is an arbitrary register of mode MODE. Return true if the former
122 prefer_and_bit_test (enum machine_mode mode, int bitnum)
126 /* Set up rtxes for the two variations. Use NULL as a placeholder
127 for the BITNUM-based constants. */
128 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
129 and_test = gen_rtx_AND (mode, and_reg, NULL);
130 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
135 /* Change the mode of the previously-created rtxes. */
136 PUT_MODE (and_reg, mode);
137 PUT_MODE (and_test, mode);
138 PUT_MODE (shift_test, mode);
139 PUT_MODE (XEXP (shift_test, 0), mode);
142 /* Fill in the integers. */
143 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
144 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
146 return (rtx_cost (and_test, IF_THEN_ELSE)
147 <= rtx_cost (shift_test, IF_THEN_ELSE));
150 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
151 the result is zero, or IF_TRUE_LABEL if the result is one.
152 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
153 meaning fall through in that case.
155 do_jump always does any pending stack adjust except when it does not
156 actually perform a jump. An example where there is no jump
157 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */
160 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
162 enum tree_code code = TREE_CODE (exp);
166 enum machine_mode mode;
167 rtx drop_through_label = 0;
175 temp = integer_zerop (exp) ? if_false_label : if_true_label;
181 /* This is not true with #pragma weak */
183 /* The address of something can never be zero. */
185 emit_jump (if_true_label);
190 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
191 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
192 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
193 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
196 /* If we are narrowing the operand, we have to do the compare in the
198 if ((TYPE_PRECISION (TREE_TYPE (exp))
199 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
201 case NON_LVALUE_EXPR:
206 /* These cannot change zero->nonzero or vice versa. */
207 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
211 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
212 See if the former is preferred for jump tests and restore it
214 if (integer_onep (TREE_OPERAND (exp, 1)))
216 tree exp0 = TREE_OPERAND (exp, 0);
217 rtx set_label, clr_label;
219 /* Strip narrowing integral type conversions. */
220 while ((TREE_CODE (exp0) == NOP_EXPR
221 || TREE_CODE (exp0) == CONVERT_EXPR
222 || TREE_CODE (exp0) == NON_LVALUE_EXPR)
223 && TREE_OPERAND (exp0, 0) != error_mark_node
224 && TYPE_PRECISION (TREE_TYPE (exp0))
225 <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
226 exp0 = TREE_OPERAND (exp0, 0);
228 /* "exp0 ^ 1" inverts the sense of the single bit test. */
229 if (TREE_CODE (exp0) == BIT_XOR_EXPR
230 && integer_onep (TREE_OPERAND (exp0, 1)))
232 exp0 = TREE_OPERAND (exp0, 0);
233 clr_label = if_true_label;
234 set_label = if_false_label;
238 clr_label = if_false_label;
239 set_label = if_true_label;
242 if (TREE_CODE (exp0) == RSHIFT_EXPR)
244 tree arg = TREE_OPERAND (exp0, 0);
245 tree shift = TREE_OPERAND (exp0, 1);
246 tree argtype = TREE_TYPE (arg);
247 if (TREE_CODE (shift) == INTEGER_CST
248 && compare_tree_int (shift, 0) >= 0
249 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
250 && prefer_and_bit_test (TYPE_MODE (argtype),
251 TREE_INT_CST_LOW (shift)))
253 HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
254 << TREE_INT_CST_LOW (shift);
255 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
256 build_int_cst_type (argtype, mask)),
257 clr_label, set_label);
263 /* If we are AND'ing with a small constant, do this comparison in the
264 smallest type that fits. If the machine doesn't have comparisons
265 that small, it will be converted back to the wider comparison.
266 This helps if we are testing the sign bit of a narrower object.
267 combine can't do this for us because it can't know whether a
268 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
270 if (! SLOW_BYTE_ACCESS
271 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
272 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
273 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
274 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
275 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
276 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
277 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
278 != CODE_FOR_nothing))
280 do_jump (convert (type, exp), if_false_label, if_true_label);
286 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
291 rtx label1 = gen_label_rtx ();
292 if (!if_true_label || !if_false_label)
294 drop_through_label = gen_label_rtx ();
296 if_true_label = drop_through_label;
298 if_false_label = drop_through_label;
301 do_pending_stack_adjust ();
302 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
303 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
305 do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label);
309 case TRUTH_ANDIF_EXPR:
310 case TRUTH_ORIF_EXPR:
312 /* Lowered by gimplify.c. */
318 case ARRAY_RANGE_REF:
320 HOST_WIDE_INT bitsize, bitpos;
322 enum machine_mode mode;
327 /* Get description of this reference. We don't actually care
328 about the underlying object here. */
329 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
330 &unsignedp, &volatilep, false);
332 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
333 if (! SLOW_BYTE_ACCESS
334 && type != 0 && bitsize >= 0
335 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
336 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
337 != CODE_FOR_nothing))
339 do_jump (convert (type, exp), if_false_label, if_true_label);
347 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
349 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
350 != MODE_COMPLEX_FLOAT);
351 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
352 != MODE_COMPLEX_INT);
354 if (integer_zerop (TREE_OPERAND (exp, 1)))
355 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
356 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
357 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
358 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
360 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
365 /* Nonzero iff operands of minus differ. */
366 exp = build2 (NE_EXPR, TREE_TYPE (exp),
367 TREE_OPERAND (exp, 0),
368 TREE_OPERAND (exp, 1));
372 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
374 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
375 != MODE_COMPLEX_FLOAT);
376 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
377 != MODE_COMPLEX_INT);
379 if (integer_zerop (TREE_OPERAND (exp, 1)))
380 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
381 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
382 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
383 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
385 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
390 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
391 if (GET_MODE_CLASS (mode) == MODE_INT
392 && ! can_compare_p (LT, mode, ccp_jump))
393 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
395 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
399 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
400 if (GET_MODE_CLASS (mode) == MODE_INT
401 && ! can_compare_p (LE, mode, ccp_jump))
402 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
404 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
408 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
409 if (GET_MODE_CLASS (mode) == MODE_INT
410 && ! can_compare_p (GT, mode, ccp_jump))
411 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
413 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
417 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
418 if (GET_MODE_CLASS (mode) == MODE_INT
419 && ! can_compare_p (GE, mode, ccp_jump))
420 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
422 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
428 enum rtx_code cmp, rcmp;
431 if (code == UNORDERED_EXPR)
432 cmp = UNORDERED, rcmp = ORDERED;
434 cmp = ORDERED, rcmp = UNORDERED;
435 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
438 if (! can_compare_p (cmp, mode, ccp_jump)
439 && (can_compare_p (rcmp, mode, ccp_jump)
440 /* If the target doesn't provide either UNORDERED or ORDERED
441 comparisons, canonicalize on UNORDERED for the library. */
442 || rcmp == UNORDERED))
446 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
448 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
453 enum rtx_code rcode1;
454 enum tree_code tcode1, tcode2;
458 tcode1 = UNORDERED_EXPR;
463 tcode1 = UNORDERED_EXPR;
468 tcode1 = UNORDERED_EXPR;
473 tcode1 = UNORDERED_EXPR;
478 tcode1 = UNORDERED_EXPR;
482 /* It is ok for LTGT_EXPR to trap when the result is unordered,
483 so expand to (a < b) || (a > b). */
490 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
491 if (can_compare_p (rcode1, mode, ccp_jump))
492 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
496 tree op0 = save_expr (TREE_OPERAND (exp, 0));
497 tree op1 = save_expr (TREE_OPERAND (exp, 1));
500 /* If the target doesn't support combined unordered
501 compares, decompose into two comparisons. */
502 if (if_true_label == 0)
503 drop_through_label = if_true_label = gen_label_rtx ();
505 cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1);
506 cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1);
507 do_jump (cmp0, 0, if_true_label);
508 do_jump (cmp1, if_false_label, if_true_label);
514 /* High branch cost, expand as the bitwise AND of the conditions. */
515 if (BRANCH_COST >= 4)
518 if (if_false_label == NULL_RTX)
520 drop_through_label = gen_label_rtx ();
521 do_jump (TREE_OPERAND (exp, 0), drop_through_label, NULL_RTX);
522 do_jump (TREE_OPERAND (exp, 1), NULL_RTX, if_true_label);
526 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
527 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
532 /* High branch cost, expand as the bitwise OR of the conditions. */
533 if (BRANCH_COST >= 4)
536 if (if_true_label == NULL_RTX)
538 drop_through_label = gen_label_rtx ();
539 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, drop_through_label);
540 do_jump (TREE_OPERAND (exp, 1), if_false_label, NULL_RTX);
544 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
545 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
550 __builtin_expect (<test>, 0) and
551 __builtin_expect (<test>, 1)
553 We need to do this here, so that <test> is not converted to a SCC
554 operation on machines that use condition code registers and COMPARE
555 like the PowerPC, and then the jump is done based on whether the SCC
556 operation produced a 1 or 0. */
558 /* Check for a built-in function. */
560 tree fndecl = get_callee_fndecl (exp);
561 tree arglist = TREE_OPERAND (exp, 1);
564 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
565 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
566 && arglist != NULL_TREE
567 && TREE_CHAIN (arglist) != NULL_TREE)
569 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
580 /* Fall through and generate the normal code. */
583 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
584 do_pending_stack_adjust ();
586 if (GET_CODE (temp) == CONST_INT
587 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
588 || GET_CODE (temp) == LABEL_REF)
590 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
594 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
595 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
596 /* Note swapping the labels gives us not-equal. */
597 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
600 gcc_assert (GET_MODE (temp) != VOIDmode);
602 /* The RTL optimizers prefer comparisons against pseudos. */
603 if (GET_CODE (temp) == SUBREG)
605 /* Compare promoted variables in their promoted mode. */
606 if (SUBREG_PROMOTED_VAR_P (temp)
607 && REG_P (XEXP (temp, 0)))
608 temp = XEXP (temp, 0);
610 temp = copy_to_reg (temp);
612 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
613 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
614 GET_MODE (temp), NULL_RTX,
615 if_false_label, if_true_label);
619 if (drop_through_label)
621 do_pending_stack_adjust ();
622 emit_label (drop_through_label);
626 /* Given a comparison expression EXP for values too wide to be compared
627 with one insn, test the comparison and jump to the appropriate label.
628 The code of EXP is ignored; we always test GT if SWAP is 0,
629 and LT if SWAP is 1. */
632 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
635 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
636 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
637 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
638 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
640 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
644 /* Compare OP0 with OP1, word at a time, in mode MODE.
645 UNSIGNEDP says to do unsigned comparison.
646 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
649 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
650 rtx op1, rtx if_false_label, rtx if_true_label)
652 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
653 rtx drop_through_label = 0;
656 if (! if_true_label || ! if_false_label)
657 drop_through_label = gen_label_rtx ();
659 if_true_label = drop_through_label;
660 if (! if_false_label)
661 if_false_label = drop_through_label;
663 /* Compare a word at a time, high order first. */
664 for (i = 0; i < nwords; i++)
666 rtx op0_word, op1_word;
668 if (WORDS_BIG_ENDIAN)
670 op0_word = operand_subword_force (op0, i, mode);
671 op1_word = operand_subword_force (op1, i, mode);
675 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
676 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
679 /* All but high-order word must be compared as unsigned. */
680 do_compare_rtx_and_jump (op0_word, op1_word, GT,
681 (unsignedp || i > 0), word_mode, NULL_RTX,
682 NULL_RTX, if_true_label);
684 /* Consider lower words only if these are equal. */
685 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
686 NULL_RTX, NULL_RTX, if_false_label);
690 emit_jump (if_false_label);
691 if (drop_through_label)
692 emit_label (drop_through_label);
695 /* Given an EQ_EXPR expression EXP for values too wide to be compared
696 with one insn, test the comparison and jump to the appropriate label. */
699 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
701 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
702 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
703 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
704 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
706 rtx drop_through_label = 0;
708 if (! if_false_label)
709 drop_through_label = if_false_label = gen_label_rtx ();
711 for (i = 0; i < nwords; i++)
712 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
713 operand_subword_force (op1, i, mode),
714 EQ, TYPE_UNSIGNED (TREE_TYPE (exp)),
715 word_mode, NULL_RTX, if_false_label, NULL_RTX);
718 emit_jump (if_true_label);
719 if (drop_through_label)
720 emit_label (drop_through_label);
723 /* Jump according to whether OP0 is 0.
724 We assume that OP0 has an integer mode that is too wide
725 for the available compare insns. */
728 do_jump_by_parts_equality_rtx (rtx op0, rtx if_false_label, rtx if_true_label)
730 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
733 rtx drop_through_label = 0;
735 /* The fastest way of doing this comparison on almost any machine is to
736 "or" all the words and compare the result. If all have to be loaded
737 from memory and this is a very wide item, it's possible this may
738 be slower, but that's highly unlikely. */
740 part = gen_reg_rtx (word_mode);
741 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
742 for (i = 1; i < nwords && part != 0; i++)
743 part = expand_binop (word_mode, ior_optab, part,
744 operand_subword_force (op0, i, GET_MODE (op0)),
745 part, 1, OPTAB_WIDEN);
749 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
750 NULL_RTX, if_false_label, if_true_label);
755 /* If we couldn't do the "or" simply, do this with a series of compares. */
756 if (! if_false_label)
757 drop_through_label = if_false_label = gen_label_rtx ();
759 for (i = 0; i < nwords; i++)
760 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
761 const0_rtx, EQ, 1, word_mode, NULL_RTX,
762 if_false_label, NULL_RTX);
765 emit_jump (if_true_label);
767 if (drop_through_label)
768 emit_label (drop_through_label);
771 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
772 MODE is the machine mode of the comparison, not of the result.
773 (including code to compute the values to be compared) and set CC0
774 according to the result. The decision as to signed or unsigned
775 comparison must be made by the caller.
777 We force a stack adjustment unless there are currently
778 things pushed on the stack that aren't yet used.
780 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
784 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
785 enum machine_mode mode, rtx size)
789 /* If one operand is constant, make it the second one. Only do this
790 if the other operand is not constant as well. */
792 if (swap_commutative_operands_p (op0, op1))
797 code = swap_condition (code);
800 do_pending_stack_adjust ();
802 code = unsignedp ? unsigned_condition (code) : code;
803 tem = simplify_relational_operation (code, VOIDmode, mode, op0, op1);
806 if (CONSTANT_P (tem))
809 if (COMPARISON_P (tem))
811 code = GET_CODE (tem);
814 mode = GET_MODE (op0);
815 unsignedp = (code == GTU || code == LTU
816 || code == GEU || code == LEU);
820 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
823 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
825 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
829 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
830 The decision as to signed or unsigned comparison must be made by the caller.
832 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
836 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
837 enum machine_mode mode, rtx size, rtx if_false_label,
841 int dummy_true_label = 0;
843 /* Reverse the comparison if that is safe and we want to jump if it is
845 if (! if_true_label && ! FLOAT_MODE_P (mode))
847 if_true_label = if_false_label;
849 code = reverse_condition (code);
852 /* If one operand is constant, make it the second one. Only do this
853 if the other operand is not constant as well. */
855 if (swap_commutative_operands_p (op0, op1))
860 code = swap_condition (code);
863 do_pending_stack_adjust ();
865 code = unsignedp ? unsigned_condition (code) : code;
866 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
869 if (CONSTANT_P (tem))
871 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
872 ? if_false_label : if_true_label;
878 code = GET_CODE (tem);
879 mode = GET_MODE (tem);
882 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
887 dummy_true_label = 1;
888 if_true_label = gen_label_rtx ();
891 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
895 emit_jump (if_false_label);
896 if (dummy_true_label)
897 emit_label (if_true_label);
900 /* Generate code for a comparison expression EXP (including code to compute
901 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
902 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
903 generated code will drop through.
904 SIGNED_CODE should be the rtx operation for this comparison for
905 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
907 We force a stack adjustment unless there are currently
908 things pushed on the stack that aren't yet used. */
911 do_compare_and_jump (tree exp, enum rtx_code signed_code,
912 enum rtx_code unsigned_code, rtx if_false_label,
917 enum machine_mode mode;
921 /* Don't crash if the comparison was erroneous. */
922 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
923 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
926 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
927 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
930 type = TREE_TYPE (TREE_OPERAND (exp, 0));
931 mode = TYPE_MODE (type);
932 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
933 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
934 || (GET_MODE_BITSIZE (mode)
935 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
938 /* op0 might have been replaced by promoted constant, in which
939 case the type of second argument should be used. */
940 type = TREE_TYPE (TREE_OPERAND (exp, 1));
941 mode = TYPE_MODE (type);
943 unsignedp = TYPE_UNSIGNED (type);
944 code = unsignedp ? unsigned_code : signed_code;
946 #ifdef HAVE_canonicalize_funcptr_for_compare
947 /* If function pointers need to be "canonicalized" before they can
948 be reliably compared, then canonicalize them.
949 Only do this if *both* sides of the comparison are function pointers.
950 If one side isn't, we want a noncanonicalized comparison. See PR
952 if (HAVE_canonicalize_funcptr_for_compare
953 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
954 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
956 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
957 && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
960 rtx new_op0 = gen_reg_rtx (mode);
961 rtx new_op1 = gen_reg_rtx (mode);
963 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
966 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
971 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
973 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
974 if_false_label, if_true_label);
977 #include "gt-dojump.h"