1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "langhooks.h"
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40 static void do_jump_by_parts_equality (tree, rtx, rtx);
41 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
48 init_pending_stack_adjust (void)
50 pending_stack_adjust = 0;
53 /* Discard any pending stack adjustment. This avoid relying on the
54 RTL optimizers to remove useless adjustments when we know the
55 stack pointer value is dead. */
56 void discard_pending_stack_adjust (void)
58 stack_pointer_delta -= pending_stack_adjust;
59 pending_stack_adjust = 0;
62 /* When exiting from function, if safe, clear out any pending stack adjust
63 so the adjustment won't get done.
65 Note, if the current function calls alloca, then it must have a
66 frame pointer regardless of the value of flag_omit_frame_pointer. */
69 clear_pending_stack_adjust (void)
72 && (! flag_omit_frame_pointer || current_function_calls_alloca)
74 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline))
75 discard_pending_stack_adjust ();
78 /* Pop any previously-pushed arguments that have not been popped yet. */
81 do_pending_stack_adjust (void)
83 if (inhibit_defer_pop == 0)
85 if (pending_stack_adjust != 0)
86 adjust_stack (GEN_INT (pending_stack_adjust));
87 pending_stack_adjust = 0;
91 /* Expand conditional expressions. */
93 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
94 LABEL is an rtx of code CODE_LABEL, in this function and all the
98 jumpifnot (tree exp, rtx label)
100 do_jump (exp, label, NULL_RTX);
103 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
106 jumpif (tree exp, rtx label)
108 do_jump (exp, NULL_RTX, label);
111 /* Used internally by prefer_and_bit_test. */
113 static GTY(()) rtx and_reg;
114 static GTY(()) rtx and_test;
115 static GTY(()) rtx shift_test;
117 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
118 where X is an arbitrary register of mode MODE. Return true if the former
122 prefer_and_bit_test (enum machine_mode mode, int bitnum)
126 /* Set up rtxes for the two variations. Use NULL as a placeholder
127 for the BITNUM-based constants. */
128 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
129 and_test = gen_rtx_AND (mode, and_reg, NULL);
130 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
135 /* Change the mode of the previously-created rtxes. */
136 PUT_MODE (and_reg, mode);
137 PUT_MODE (and_test, mode);
138 PUT_MODE (shift_test, mode);
139 PUT_MODE (XEXP (shift_test, 0), mode);
142 /* Fill in the integers. */
143 XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
144 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
146 return (rtx_cost (and_test, IF_THEN_ELSE)
147 <= rtx_cost (shift_test, IF_THEN_ELSE));
150 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
151 the result is zero, or IF_TRUE_LABEL if the result is one.
152 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
153 meaning fall through in that case.
155 do_jump always does any pending stack adjust except when it does not
156 actually perform a jump. An example where there is no jump
157 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null. */
160 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
162 enum tree_code code = TREE_CODE (exp);
166 enum machine_mode mode;
174 temp = integer_zerop (exp) ? if_false_label : if_true_label;
180 /* This is not true with #pragma weak */
182 /* The address of something can never be zero. */
184 emit_jump (if_true_label);
189 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
190 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
191 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
192 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
195 /* If we are narrowing the operand, we have to do the compare in the
197 if ((TYPE_PRECISION (TREE_TYPE (exp))
198 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
200 case NON_LVALUE_EXPR:
205 /* These cannot change zero->nonzero or vice versa. */
206 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
210 /* Nonzero iff operands of minus differ. */
211 do_compare_and_jump (build2 (NE_EXPR, TREE_TYPE (exp),
212 TREE_OPERAND (exp, 0),
213 TREE_OPERAND (exp, 1)),
214 NE, NE, if_false_label, if_true_label);
218 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
219 See if the former is preferred for jump tests and restore it
221 if (TREE_CODE (TREE_OPERAND (exp, 0)) == RSHIFT_EXPR
222 && integer_onep (TREE_OPERAND (exp, 1)))
224 tree arg = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
225 tree shift = TREE_OPERAND (TREE_OPERAND (exp, 0), 1);
226 tree one = TREE_OPERAND (exp, 1);
227 tree argtype = TREE_TYPE (arg);
228 if (TREE_CODE (shift) == INTEGER_CST
229 && compare_tree_int (shift, 0) > 0
230 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
231 && prefer_and_bit_test (TYPE_MODE (argtype),
232 TREE_INT_CST_LOW (shift)))
234 do_jump (build2 (BIT_AND_EXPR, argtype, arg,
235 fold (build2 (LSHIFT_EXPR, argtype,
237 if_false_label, if_true_label);
242 /* If we are AND'ing with a small constant, do this comparison in the
243 smallest type that fits. If the machine doesn't have comparisons
244 that small, it will be converted back to the wider comparison.
245 This helps if we are testing the sign bit of a narrower object.
246 combine can't do this for us because it can't know whether a
247 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
249 if (! SLOW_BYTE_ACCESS
250 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
251 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
252 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
253 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
254 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
255 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
256 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
257 != CODE_FOR_nothing))
259 do_jump (convert (type, exp), if_false_label, if_true_label);
265 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
268 case TRUTH_ANDIF_EXPR:
269 case TRUTH_ORIF_EXPR:
272 /* Lowered by gimplify.c. */
278 case ARRAY_RANGE_REF:
280 HOST_WIDE_INT bitsize, bitpos;
282 enum machine_mode mode;
287 /* Get description of this reference. We don't actually care
288 about the underlying object here. */
289 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
290 &unsignedp, &volatilep);
292 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
293 if (! SLOW_BYTE_ACCESS
294 && type != 0 && bitsize >= 0
295 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
296 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
297 != CODE_FOR_nothing))
299 do_jump (convert (type, exp), if_false_label, if_true_label);
307 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
309 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
310 != MODE_COMPLEX_FLOAT);
311 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
312 != MODE_COMPLEX_INT);
314 if (integer_zerop (TREE_OPERAND (exp, 1)))
315 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
316 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
317 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
318 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
320 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
326 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
328 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
329 != MODE_COMPLEX_FLOAT);
330 gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
331 != MODE_COMPLEX_INT);
333 if (integer_zerop (TREE_OPERAND (exp, 1)))
334 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
335 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
336 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
337 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
339 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
344 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
345 if (GET_MODE_CLASS (mode) == MODE_INT
346 && ! can_compare_p (LT, mode, ccp_jump))
347 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
349 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
353 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
354 if (GET_MODE_CLASS (mode) == MODE_INT
355 && ! can_compare_p (LE, mode, ccp_jump))
356 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
358 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
362 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
363 if (GET_MODE_CLASS (mode) == MODE_INT
364 && ! can_compare_p (GT, mode, ccp_jump))
365 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
367 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
371 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
372 if (GET_MODE_CLASS (mode) == MODE_INT
373 && ! can_compare_p (GE, mode, ccp_jump))
374 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
376 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
382 enum rtx_code cmp, rcmp;
385 if (code == UNORDERED_EXPR)
386 cmp = UNORDERED, rcmp = ORDERED;
388 cmp = ORDERED, rcmp = UNORDERED;
389 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
392 if (! can_compare_p (cmp, mode, ccp_jump)
393 && (can_compare_p (rcmp, mode, ccp_jump)
394 /* If the target doesn't provide either UNORDERED or ORDERED
395 comparisons, canonicalize on UNORDERED for the library. */
396 || rcmp == UNORDERED))
400 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
402 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
407 enum rtx_code rcode1;
408 enum tree_code tcode1, tcode2;
412 tcode1 = UNORDERED_EXPR;
417 tcode1 = UNORDERED_EXPR;
422 tcode1 = UNORDERED_EXPR;
427 tcode1 = UNORDERED_EXPR;
432 tcode1 = UNORDERED_EXPR;
436 /* It is ok for LTGT_EXPR to trap when the result is unordered,
437 so expand to (a < b) || (a > b). */
444 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
445 if (can_compare_p (rcode1, mode, ccp_jump))
446 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
450 tree op0 = save_expr (TREE_OPERAND (exp, 0));
451 tree op1 = save_expr (TREE_OPERAND (exp, 1));
453 rtx drop_through_label = 0;
455 /* If the target doesn't support combined unordered
456 compares, decompose into two comparisons. */
457 if (if_true_label == 0)
458 drop_through_label = if_true_label = gen_label_rtx ();
460 cmp0 = fold (build2 (tcode1, TREE_TYPE (exp), op0, op1));
461 cmp1 = fold (build2 (tcode2, TREE_TYPE (exp), op0, op1));
462 do_jump (cmp0, 0, if_true_label);
463 do_jump (cmp1, if_false_label, if_true_label);
465 if (drop_through_label)
467 do_pending_stack_adjust ();
468 emit_label (drop_through_label);
475 __builtin_expect (<test>, 0) and
476 __builtin_expect (<test>, 1)
478 We need to do this here, so that <test> is not converted to a SCC
479 operation on machines that use condition code registers and COMPARE
480 like the PowerPC, and then the jump is done based on whether the SCC
481 operation produced a 1 or 0. */
483 /* Check for a built-in function. */
485 tree fndecl = get_callee_fndecl (exp);
486 tree arglist = TREE_OPERAND (exp, 1);
489 && DECL_BUILT_IN (fndecl)
490 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
491 && arglist != NULL_TREE
492 && TREE_CHAIN (arglist) != NULL_TREE)
494 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
504 /* Fall through and generate the normal code. */
508 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
509 do_pending_stack_adjust ();
511 if (GET_CODE (temp) == CONST_INT
512 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
513 || GET_CODE (temp) == LABEL_REF)
515 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
519 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
520 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
521 /* Note swapping the labels gives us not-equal. */
522 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
525 gcc_assert (GET_MODE (temp) != VOIDmode);
527 /* The RTL optimizers prefer comparisons against pseudos. */
528 if (GET_CODE (temp) == SUBREG)
530 /* Compare promoted variables in their promoted mode. */
531 if (SUBREG_PROMOTED_VAR_P (temp)
532 && REG_P (XEXP (temp, 0)))
533 temp = XEXP (temp, 0);
535 temp = copy_to_reg (temp);
537 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
538 NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
539 GET_MODE (temp), NULL_RTX,
540 if_false_label, if_true_label);
545 /* Given a comparison expression EXP for values too wide to be compared
546 with one insn, test the comparison and jump to the appropriate label.
547 The code of EXP is ignored; we always test GT if SWAP is 0,
548 and LT if SWAP is 1. */
551 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
554 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
555 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
556 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
557 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
559 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
563 /* Compare OP0 with OP1, word at a time, in mode MODE.
564 UNSIGNEDP says to do unsigned comparison.
565 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
568 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
569 rtx op1, rtx if_false_label, rtx if_true_label)
571 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
572 rtx drop_through_label = 0;
575 if (! if_true_label || ! if_false_label)
576 drop_through_label = gen_label_rtx ();
578 if_true_label = drop_through_label;
579 if (! if_false_label)
580 if_false_label = drop_through_label;
582 /* Compare a word at a time, high order first. */
583 for (i = 0; i < nwords; i++)
585 rtx op0_word, op1_word;
587 if (WORDS_BIG_ENDIAN)
589 op0_word = operand_subword_force (op0, i, mode);
590 op1_word = operand_subword_force (op1, i, mode);
594 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
595 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
598 /* All but high-order word must be compared as unsigned. */
599 do_compare_rtx_and_jump (op0_word, op1_word, GT,
600 (unsignedp || i > 0), word_mode, NULL_RTX,
601 NULL_RTX, if_true_label);
603 /* Consider lower words only if these are equal. */
604 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
605 NULL_RTX, NULL_RTX, if_false_label);
609 emit_jump (if_false_label);
610 if (drop_through_label)
611 emit_label (drop_through_label);
614 /* Given an EQ_EXPR expression EXP for values too wide to be compared
615 with one insn, test the comparison and jump to the appropriate label. */
618 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
620 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
621 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
622 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
623 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
625 rtx drop_through_label = 0;
627 if (! if_false_label)
628 drop_through_label = if_false_label = gen_label_rtx ();
630 for (i = 0; i < nwords; i++)
631 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
632 operand_subword_force (op1, i, mode),
633 EQ, TYPE_UNSIGNED (TREE_TYPE (exp)),
634 word_mode, NULL_RTX, if_false_label, NULL_RTX);
637 emit_jump (if_true_label);
638 if (drop_through_label)
639 emit_label (drop_through_label);
642 /* Jump according to whether OP0 is 0.
643 We assume that OP0 has an integer mode that is too wide
644 for the available compare insns. */
647 do_jump_by_parts_equality_rtx (rtx op0, rtx if_false_label, rtx if_true_label)
649 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
652 rtx drop_through_label = 0;
654 /* The fastest way of doing this comparison on almost any machine is to
655 "or" all the words and compare the result. If all have to be loaded
656 from memory and this is a very wide item, it's possible this may
657 be slower, but that's highly unlikely. */
659 part = gen_reg_rtx (word_mode);
660 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
661 for (i = 1; i < nwords && part != 0; i++)
662 part = expand_binop (word_mode, ior_optab, part,
663 operand_subword_force (op0, i, GET_MODE (op0)),
664 part, 1, OPTAB_WIDEN);
668 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
669 NULL_RTX, if_false_label, if_true_label);
674 /* If we couldn't do the "or" simply, do this with a series of compares. */
675 if (! if_false_label)
676 drop_through_label = if_false_label = gen_label_rtx ();
678 for (i = 0; i < nwords; i++)
679 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
680 const0_rtx, EQ, 1, word_mode, NULL_RTX,
681 if_false_label, NULL_RTX);
684 emit_jump (if_true_label);
686 if (drop_through_label)
687 emit_label (drop_through_label);
690 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
691 MODE is the machine mode of the comparison, not of the result.
692 (including code to compute the values to be compared) and set CC0
693 according to the result. The decision as to signed or unsigned
694 comparison must be made by the caller.
696 We force a stack adjustment unless there are currently
697 things pushed on the stack that aren't yet used.
699 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
703 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
704 enum machine_mode mode, rtx size)
708 /* If one operand is constant, make it the second one. Only do this
709 if the other operand is not constant as well. */
711 if (swap_commutative_operands_p (op0, op1))
716 code = swap_condition (code);
721 op0 = force_not_mem (op0);
722 op1 = force_not_mem (op1);
725 do_pending_stack_adjust ();
727 code = unsignedp ? unsigned_condition (code) : code;
728 tem = simplify_relational_operation (code, VOIDmode, mode, op0, op1);
731 if (CONSTANT_P (tem))
734 if (COMPARISON_P (tem))
736 code = GET_CODE (tem);
739 mode = GET_MODE (op0);
740 unsignedp = (code == GTU || code == LTU
741 || code == GEU || code == LEU);
745 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
748 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
750 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
754 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
755 The decision as to signed or unsigned comparison must be made by the caller.
757 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
761 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
762 enum machine_mode mode, rtx size, rtx if_false_label,
766 int dummy_true_label = 0;
768 /* Reverse the comparison if that is safe and we want to jump if it is
770 if (! if_true_label && ! FLOAT_MODE_P (mode))
772 if_true_label = if_false_label;
774 code = reverse_condition (code);
777 /* If one operand is constant, make it the second one. Only do this
778 if the other operand is not constant as well. */
780 if (swap_commutative_operands_p (op0, op1))
785 code = swap_condition (code);
790 op0 = force_not_mem (op0);
791 op1 = force_not_mem (op1);
794 do_pending_stack_adjust ();
796 code = unsignedp ? unsigned_condition (code) : code;
797 if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
800 if (CONSTANT_P (tem))
802 rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
803 ? if_false_label : if_true_label;
809 code = GET_CODE (tem);
810 mode = GET_MODE (tem);
813 unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
818 dummy_true_label = 1;
819 if_true_label = gen_label_rtx ();
822 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
826 emit_jump (if_false_label);
827 if (dummy_true_label)
828 emit_label (if_true_label);
831 /* Generate code for a comparison expression EXP (including code to compute
832 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
833 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
834 generated code will drop through.
835 SIGNED_CODE should be the rtx operation for this comparison for
836 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
838 We force a stack adjustment unless there are currently
839 things pushed on the stack that aren't yet used. */
842 do_compare_and_jump (tree exp, enum rtx_code signed_code,
843 enum rtx_code unsigned_code, rtx if_false_label,
848 enum machine_mode mode;
852 /* Don't crash if the comparison was erroneous. */
853 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
854 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
857 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
858 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
861 type = TREE_TYPE (TREE_OPERAND (exp, 0));
862 mode = TYPE_MODE (type);
863 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
864 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
865 || (GET_MODE_BITSIZE (mode)
866 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
869 /* op0 might have been replaced by promoted constant, in which
870 case the type of second argument should be used. */
871 type = TREE_TYPE (TREE_OPERAND (exp, 1));
872 mode = TYPE_MODE (type);
874 unsignedp = TYPE_UNSIGNED (type);
875 code = unsignedp ? unsigned_code : signed_code;
877 #ifdef HAVE_canonicalize_funcptr_for_compare
878 /* If function pointers need to be "canonicalized" before they can
879 be reliably compared, then canonicalize them. */
880 if (HAVE_canonicalize_funcptr_for_compare
881 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
882 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
885 rtx new_op0 = gen_reg_rtx (mode);
887 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
891 if (HAVE_canonicalize_funcptr_for_compare
892 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
893 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
896 rtx new_op1 = gen_reg_rtx (mode);
898 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
903 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
905 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
906 if_false_label, if_true_label);
909 #include "gt-dojump.h"