1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "langhooks.h"
38 static bool prefer_and_bit_test (enum machine_mode, int);
39 static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40 static void do_jump_by_parts_equality (tree, rtx, rtx);
41 static void do_compare_and_jump (tree, enum rtx_code, enum rtx_code, rtx,
44 /* At the start of a function, record that we have no previously-pushed
45 arguments waiting to be popped. */
48 init_pending_stack_adjust (void)
50 pending_stack_adjust = 0;
53 /* When exiting from function, if safe, clear out any pending stack adjust
54 so the adjustment won't get done.
56 Note, if the current function calls alloca, then it must have a
57 frame pointer regardless of the value of flag_omit_frame_pointer. */
60 clear_pending_stack_adjust (void)
63 && (! flag_omit_frame_pointer || current_function_calls_alloca)
65 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
66 && ! flag_inline_functions)
68 stack_pointer_delta -= pending_stack_adjust,
69 pending_stack_adjust = 0;
73 /* Pop any previously-pushed arguments that have not been popped yet. */
76 do_pending_stack_adjust (void)
78 if (inhibit_defer_pop == 0)
80 if (pending_stack_adjust != 0)
81 adjust_stack (GEN_INT (pending_stack_adjust));
82 pending_stack_adjust = 0;
86 /* Expand conditional expressions. */
88 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
89 LABEL is an rtx of code CODE_LABEL, in this function and all the
93 jumpifnot (tree exp, rtx label)
95 do_jump (exp, label, NULL_RTX);
98 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
101 jumpif (tree exp, rtx label)
103 do_jump (exp, NULL_RTX, label);
106 /* Used internally by prefer_and_bit_test. */
108 static GTY(()) rtx and_reg;
109 static GTY(()) rtx and_test;
110 static GTY(()) rtx shift_test;
112 /* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
113 where X is an arbitrary register of mode MODE. Return true if the former
117 prefer_and_bit_test (enum machine_mode mode, int bitnum)
121 /* Set up rtxes for the two variations. Use NULL as a placeholder
122 for the BITNUM-based constants. */
123 and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
124 and_test = gen_rtx_AND (mode, and_reg, NULL);
125 shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
130 /* Change the mode of the previously-created rtxes. */
131 PUT_MODE (and_reg, mode);
132 PUT_MODE (and_test, mode);
133 PUT_MODE (shift_test, mode);
134 PUT_MODE (XEXP (shift_test, 0), mode);
137 /* Fill in the integers. */
138 XEXP (and_test, 0) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
139 XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
141 return (rtx_cost (and_test, IF_THEN_ELSE)
142 <= rtx_cost (shift_test, IF_THEN_ELSE));
145 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
146 the result is zero, or IF_TRUE_LABEL if the result is one.
147 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
148 meaning fall through in that case.
150 do_jump always does any pending stack adjust except when it does not
151 actually perform a jump. An example where there is no jump
152 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
154 This function is responsible for optimizing cases such as
155 &&, || and comparison operators in EXP. */
158 do_jump (tree exp, rtx if_false_label, rtx if_true_label)
160 enum tree_code code = TREE_CODE (exp);
161 /* Some cases need to create a label to jump to
162 in order to properly fall through.
163 These cases set DROP_THROUGH_LABEL nonzero. */
164 rtx drop_through_label = 0;
168 enum machine_mode mode;
178 temp = integer_zerop (exp) ? if_false_label : if_true_label;
184 /* This is not true with #pragma weak */
186 /* The address of something can never be zero. */
188 emit_jump (if_true_label);
193 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
194 TREE_OPERAND (exp, 0)
195 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
199 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
200 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
201 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
202 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
205 /* If we are narrowing the operand, we have to do the compare in the
207 if ((TYPE_PRECISION (TREE_TYPE (exp))
208 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
210 case NON_LVALUE_EXPR:
216 /* These cannot change zero->nonzero or vice versa. */
217 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
220 case WITH_RECORD_EXPR:
221 /* Put the object on the placeholder list, recurse through our first
222 operand, and pop the list. */
223 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
225 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
226 placeholder_list = TREE_CHAIN (placeholder_list);
230 /* This is never less insns than evaluating the PLUS_EXPR followed by
231 a test and can be longer if the test is eliminated. */
233 /* Reduce to minus. */
234 exp = build (MINUS_EXPR, TREE_TYPE (exp),
235 TREE_OPERAND (exp, 0),
236 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
237 TREE_OPERAND (exp, 1))));
238 /* Process as MINUS. */
242 /* Nonzero iff operands of minus differ. */
243 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
244 TREE_OPERAND (exp, 0),
245 TREE_OPERAND (exp, 1)),
246 NE, NE, if_false_label, if_true_label);
250 /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
251 See if the former is preferred for jump tests and restore it
253 if (TREE_CODE (TREE_OPERAND (exp, 0)) == RSHIFT_EXPR
254 && integer_onep (TREE_OPERAND (exp, 1)))
256 tree arg = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
257 tree shift = TREE_OPERAND (TREE_OPERAND (exp, 0), 1);
258 tree one = TREE_OPERAND (exp, 1);
259 tree argtype = TREE_TYPE (arg);
260 if (TREE_CODE (shift) == INTEGER_CST
261 && compare_tree_int (shift, 0) > 0
262 && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
263 && prefer_and_bit_test (TYPE_MODE (argtype),
264 TREE_INT_CST_LOW (shift)))
266 do_jump (build (BIT_AND_EXPR, argtype, arg,
267 fold (build (LSHIFT_EXPR, argtype, one, shift))),
268 if_false_label, if_true_label);
273 /* If we are AND'ing with a small constant, do this comparison in the
274 smallest type that fits. If the machine doesn't have comparisons
275 that small, it will be converted back to the wider comparison.
276 This helps if we are testing the sign bit of a narrower object.
277 combine can't do this for us because it can't know whether a
278 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
280 if (! SLOW_BYTE_ACCESS
281 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
282 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
283 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
284 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
285 && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
286 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
287 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
288 != CODE_FOR_nothing))
290 do_jump (convert (type, exp), if_false_label, if_true_label);
296 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
299 case TRUTH_ANDIF_EXPR:
300 if (if_false_label == 0)
301 if_false_label = drop_through_label = gen_label_rtx ();
302 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
303 start_cleanup_deferral ();
304 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
305 end_cleanup_deferral ();
308 case TRUTH_ORIF_EXPR:
309 if (if_true_label == 0)
310 if_true_label = drop_through_label = gen_label_rtx ();
311 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
312 start_cleanup_deferral ();
313 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
314 end_cleanup_deferral ();
319 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
320 preserve_temp_slots (NULL_RTX);
324 do_pending_stack_adjust ();
325 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
331 case ARRAY_RANGE_REF:
333 HOST_WIDE_INT bitsize, bitpos;
335 enum machine_mode mode;
340 /* Get description of this reference. We don't actually care
341 about the underlying object here. */
342 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
343 &unsignedp, &volatilep);
345 type = lang_hooks.types.type_for_size (bitsize, unsignedp);
346 if (! SLOW_BYTE_ACCESS
347 && type != 0 && bitsize >= 0
348 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
349 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
350 != CODE_FOR_nothing))
352 do_jump (convert (type, exp), if_false_label, if_true_label);
359 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
360 if (integer_onep (TREE_OPERAND (exp, 1))
361 && integer_zerop (TREE_OPERAND (exp, 2)))
362 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
364 else if (integer_zerop (TREE_OPERAND (exp, 1))
365 && integer_onep (TREE_OPERAND (exp, 2)))
366 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
370 rtx label1 = gen_label_rtx ();
371 drop_through_label = gen_label_rtx ();
373 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
375 start_cleanup_deferral ();
376 /* Now the THEN-expression. */
377 do_jump (TREE_OPERAND (exp, 1),
378 if_false_label ? if_false_label : drop_through_label,
379 if_true_label ? if_true_label : drop_through_label);
380 /* In case the do_jump just above never jumps. */
381 do_pending_stack_adjust ();
384 /* Now the ELSE-expression. */
385 do_jump (TREE_OPERAND (exp, 2),
386 if_false_label ? if_false_label : drop_through_label,
387 if_true_label ? if_true_label : drop_through_label);
388 end_cleanup_deferral ();
394 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
396 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
397 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
399 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
400 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
403 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
404 fold (build (EQ_EXPR, TREE_TYPE (exp),
405 fold (build1 (REALPART_EXPR,
406 TREE_TYPE (inner_type),
408 fold (build1 (REALPART_EXPR,
409 TREE_TYPE (inner_type),
411 fold (build (EQ_EXPR, TREE_TYPE (exp),
412 fold (build1 (IMAGPART_EXPR,
413 TREE_TYPE (inner_type),
415 fold (build1 (IMAGPART_EXPR,
416 TREE_TYPE (inner_type),
418 if_false_label, if_true_label);
421 else if (integer_zerop (TREE_OPERAND (exp, 1)))
422 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
424 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
425 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
426 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
428 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
434 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
436 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
437 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
439 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
440 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
443 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
444 fold (build (NE_EXPR, TREE_TYPE (exp),
445 fold (build1 (REALPART_EXPR,
446 TREE_TYPE (inner_type),
448 fold (build1 (REALPART_EXPR,
449 TREE_TYPE (inner_type),
451 fold (build (NE_EXPR, TREE_TYPE (exp),
452 fold (build1 (IMAGPART_EXPR,
453 TREE_TYPE (inner_type),
455 fold (build1 (IMAGPART_EXPR,
456 TREE_TYPE (inner_type),
458 if_false_label, if_true_label);
461 else if (integer_zerop (TREE_OPERAND (exp, 1)))
462 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
464 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
465 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
466 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
468 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
473 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
474 if (GET_MODE_CLASS (mode) == MODE_INT
475 && ! can_compare_p (LT, mode, ccp_jump))
476 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
478 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
482 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
483 if (GET_MODE_CLASS (mode) == MODE_INT
484 && ! can_compare_p (LE, mode, ccp_jump))
485 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
487 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
491 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
492 if (GET_MODE_CLASS (mode) == MODE_INT
493 && ! can_compare_p (GT, mode, ccp_jump))
494 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
496 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
500 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
501 if (GET_MODE_CLASS (mode) == MODE_INT
502 && ! can_compare_p (GE, mode, ccp_jump))
503 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
505 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
511 enum rtx_code cmp, rcmp;
514 if (code == UNORDERED_EXPR)
515 cmp = UNORDERED, rcmp = ORDERED;
517 cmp = ORDERED, rcmp = UNORDERED;
518 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
521 if (! can_compare_p (cmp, mode, ccp_jump)
522 && (can_compare_p (rcmp, mode, ccp_jump)
523 /* If the target doesn't provide either UNORDERED or ORDERED
524 comparisons, canonicalize on UNORDERED for the library. */
525 || rcmp == UNORDERED))
529 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
531 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
536 enum rtx_code rcode1;
537 enum tree_code tcode2;
561 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
562 if (can_compare_p (rcode1, mode, ccp_jump))
563 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
567 tree op0 = save_expr (TREE_OPERAND (exp, 0));
568 tree op1 = save_expr (TREE_OPERAND (exp, 1));
571 /* If the target doesn't support combined unordered
572 compares, decompose into UNORDERED + comparison. */
573 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
574 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
575 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
576 do_jump (exp, if_false_label, if_true_label);
582 __builtin_expect (<test>, 0) and
583 __builtin_expect (<test>, 1)
585 We need to do this here, so that <test> is not converted to a SCC
586 operation on machines that use condition code registers and COMPARE
587 like the PowerPC, and then the jump is done based on whether the SCC
588 operation produced a 1 or 0. */
590 /* Check for a built-in function. */
592 tree fndecl = get_callee_fndecl (exp);
593 tree arglist = TREE_OPERAND (exp, 1);
596 && DECL_BUILT_IN (fndecl)
597 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
598 && arglist != NULL_TREE
599 && TREE_CHAIN (arglist) != NULL_TREE)
601 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
611 /* Fall through and generate the normal code. */
615 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
617 /* This is not needed any more and causes poor code since it causes
618 comparisons and tests from non-SI objects to have different code
620 /* Copy to register to avoid generating bad insns by cse
621 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
622 if (!cse_not_expected && GET_CODE (temp) == MEM)
623 temp = copy_to_reg (temp);
625 do_pending_stack_adjust ();
626 /* Do any postincrements in the expression that was tested. */
629 if (GET_CODE (temp) == CONST_INT
630 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
631 || GET_CODE (temp) == LABEL_REF)
633 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
637 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
638 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
639 /* Note swapping the labels gives us not-equal. */
640 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
641 else if (GET_MODE (temp) != VOIDmode)
643 /* The RTL optimizers prefer comparisons against pseudos. */
644 if (GET_CODE (temp) == SUBREG)
646 /* Compare promoted variables in their promoted mode. */
647 if (SUBREG_PROMOTED_VAR_P (temp)
648 && GET_CODE (XEXP (temp, 0)) == REG)
649 temp = XEXP (temp, 0);
651 temp = copy_to_reg (temp);
653 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
654 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
655 GET_MODE (temp), NULL_RTX,
656 if_false_label, if_true_label);
662 if (drop_through_label)
664 /* If do_jump produces code that might be jumped around,
665 do any stack adjusts from that code, before the place
666 where control merges in. */
667 do_pending_stack_adjust ();
668 emit_label (drop_through_label);
672 /* Given a comparison expression EXP for values too wide to be compared
673 with one insn, test the comparison and jump to the appropriate label.
674 The code of EXP is ignored; we always test GT if SWAP is 0,
675 and LT if SWAP is 1. */
678 do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
681 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
682 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
683 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
684 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
686 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
689 /* Compare OP0 with OP1, word at a time, in mode MODE.
690 UNSIGNEDP says to do unsigned comparison.
691 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
694 do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
695 rtx op1, rtx if_false_label, rtx if_true_label)
697 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
698 rtx drop_through_label = 0;
701 if (! if_true_label || ! if_false_label)
702 drop_through_label = gen_label_rtx ();
704 if_true_label = drop_through_label;
705 if (! if_false_label)
706 if_false_label = drop_through_label;
708 /* Compare a word at a time, high order first. */
709 for (i = 0; i < nwords; i++)
711 rtx op0_word, op1_word;
713 if (WORDS_BIG_ENDIAN)
715 op0_word = operand_subword_force (op0, i, mode);
716 op1_word = operand_subword_force (op1, i, mode);
720 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
721 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
724 /* All but high-order word must be compared as unsigned. */
725 do_compare_rtx_and_jump (op0_word, op1_word, GT,
726 (unsignedp || i > 0), word_mode, NULL_RTX,
727 NULL_RTX, if_true_label);
729 /* Consider lower words only if these are equal. */
730 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
731 NULL_RTX, NULL_RTX, if_false_label);
735 emit_jump (if_false_label);
736 if (drop_through_label)
737 emit_label (drop_through_label);
740 /* Given an EQ_EXPR expression EXP for values too wide to be compared
741 with one insn, test the comparison and jump to the appropriate label. */
744 do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
746 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
747 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
748 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
749 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
751 rtx drop_through_label = 0;
753 if (! if_false_label)
754 drop_through_label = if_false_label = gen_label_rtx ();
756 for (i = 0; i < nwords; i++)
757 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
758 operand_subword_force (op1, i, mode),
759 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
760 word_mode, NULL_RTX, if_false_label, NULL_RTX);
763 emit_jump (if_true_label);
764 if (drop_through_label)
765 emit_label (drop_through_label);
768 /* Jump according to whether OP0 is 0.
769 We assume that OP0 has an integer mode that is too wide
770 for the available compare insns. */
773 do_jump_by_parts_equality_rtx (rtx op0, rtx if_false_label, rtx if_true_label)
775 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
778 rtx drop_through_label = 0;
780 /* The fastest way of doing this comparison on almost any machine is to
781 "or" all the words and compare the result. If all have to be loaded
782 from memory and this is a very wide item, it's possible this may
783 be slower, but that's highly unlikely. */
785 part = gen_reg_rtx (word_mode);
786 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
787 for (i = 1; i < nwords && part != 0; i++)
788 part = expand_binop (word_mode, ior_optab, part,
789 operand_subword_force (op0, i, GET_MODE (op0)),
790 part, 1, OPTAB_WIDEN);
794 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
795 NULL_RTX, if_false_label, if_true_label);
800 /* If we couldn't do the "or" simply, do this with a series of compares. */
801 if (! if_false_label)
802 drop_through_label = if_false_label = gen_label_rtx ();
804 for (i = 0; i < nwords; i++)
805 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
806 const0_rtx, EQ, 1, word_mode, NULL_RTX,
807 if_false_label, NULL_RTX);
810 emit_jump (if_true_label);
812 if (drop_through_label)
813 emit_label (drop_through_label);
816 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
817 (including code to compute the values to be compared)
818 and set (CC0) according to the result.
819 The decision as to signed or unsigned comparison must be made by the caller.
821 We force a stack adjustment unless there are currently
822 things pushed on the stack that aren't yet used.
824 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
828 compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
829 enum machine_mode mode, rtx size)
834 /* If one operand is constant, make it the second one. Only do this
835 if the other operand is not constant as well. */
837 if (swap_commutative_operands_p (op0, op1))
842 code = swap_condition (code);
847 op0 = force_not_mem (op0);
848 op1 = force_not_mem (op1);
851 do_pending_stack_adjust ();
853 ucode = unsignedp ? unsigned_condition (code) : code;
854 tem = simplify_const_relational_operation (ucode, mode, op0, op1);
859 /* There's no need to do this now that combine.c can eliminate lots of
860 sign extensions. This can be less efficient in certain cases on other
863 /* If this is a signed equality comparison, we can do it as an
864 unsigned comparison since zero-extension is cheaper than sign
865 extension and comparisons with zero are done as unsigned. This is
866 the case even on machines that can do fast sign extension, since
867 zero-extension is easier to combine with other operations than
868 sign-extension is. If we are comparing against a constant, we must
869 convert it to what it would look like unsigned. */
870 if ((code == EQ || code == NE) && ! unsignedp
871 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
873 if (GET_CODE (op1) == CONST_INT
874 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
875 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
880 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
883 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
885 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
889 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
890 The decision as to signed or unsigned comparison must be made by the caller.
892 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
896 do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
897 enum machine_mode mode, rtx size, rtx if_false_label,
902 int dummy_true_label = 0;
904 /* Reverse the comparison if that is safe and we want to jump if it is
906 if (! if_true_label && ! FLOAT_MODE_P (mode))
908 if_true_label = if_false_label;
910 code = reverse_condition (code);
913 /* If one operand is constant, make it the second one. Only do this
914 if the other operand is not constant as well. */
916 if (swap_commutative_operands_p (op0, op1))
921 code = swap_condition (code);
926 op0 = force_not_mem (op0);
927 op1 = force_not_mem (op1);
930 do_pending_stack_adjust ();
932 ucode = unsignedp ? unsigned_condition (code) : code;
933 tem = simplify_const_relational_operation (ucode, mode, op0, op1);
936 if (tem == const_true_rtx)
939 emit_jump (if_true_label);
944 emit_jump (if_false_label);
950 /* There's no need to do this now that combine.c can eliminate lots of
951 sign extensions. This can be less efficient in certain cases on other
954 /* If this is a signed equality comparison, we can do it as an
955 unsigned comparison since zero-extension is cheaper than sign
956 extension and comparisons with zero are done as unsigned. This is
957 the case even on machines that can do fast sign extension, since
958 zero-extension is easier to combine with other operations than
959 sign-extension is. If we are comparing against a constant, we must
960 convert it to what it would look like unsigned. */
961 if ((code == EQ || code == NE) && ! unsignedp
962 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
964 if (GET_CODE (op1) == CONST_INT
965 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
966 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
973 dummy_true_label = 1;
974 if_true_label = gen_label_rtx ();
977 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
981 emit_jump (if_false_label);
982 if (dummy_true_label)
983 emit_label (if_true_label);
986 /* Generate code for a comparison expression EXP (including code to compute
987 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
988 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
989 generated code will drop through.
990 SIGNED_CODE should be the rtx operation for this comparison for
991 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
993 We force a stack adjustment unless there are currently
994 things pushed on the stack that aren't yet used. */
997 do_compare_and_jump (tree exp, enum rtx_code signed_code,
998 enum rtx_code unsigned_code, rtx if_false_label,
1003 enum machine_mode mode;
1007 /* Don't crash if the comparison was erroneous. */
1008 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1009 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
1012 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
1013 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
1016 type = TREE_TYPE (TREE_OPERAND (exp, 0));
1017 mode = TYPE_MODE (type);
1018 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
1019 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
1020 || (GET_MODE_BITSIZE (mode)
1021 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
1024 /* op0 might have been replaced by promoted constant, in which
1025 case the type of second argument should be used. */
1026 type = TREE_TYPE (TREE_OPERAND (exp, 1));
1027 mode = TYPE_MODE (type);
1029 unsignedp = TREE_UNSIGNED (type);
1030 code = unsignedp ? unsigned_code : signed_code;
1032 #ifdef HAVE_canonicalize_funcptr_for_compare
1033 /* If function pointers need to be "canonicalized" before they can
1034 be reliably compared, then canonicalize them. */
1035 if (HAVE_canonicalize_funcptr_for_compare
1036 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
1037 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
1040 rtx new_op0 = gen_reg_rtx (mode);
1042 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1046 if (HAVE_canonicalize_funcptr_for_compare
1047 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
1048 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
1051 rtx new_op1 = gen_reg_rtx (mode);
1053 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1058 /* Do any postincrements in the expression that was tested. */
1061 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1063 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
1064 if_false_label, if_true_label);
1067 #include "gt-dojump.h"