1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
115 #include "coretypes.h"
120 #include "basic-block.h"
122 #include "function.h"
123 #include "tree-pretty-print.h"
124 #include "gimple-pretty-print.h"
126 #include "tree-dump.h"
127 #include "tree-flow.h"
128 #include "tree-pass.h"
129 #include "tree-ssa-propagate.h"
130 #include "value-prof.h"
131 #include "langhooks.h"
133 #include "diagnostic-core.h"
138 /* Possible lattice values. */
147 struct prop_value_d {
149 ccp_lattice_t lattice_val;
151 /* Propagated value. */
154 /* Mask that applies to the propagated value during CCP. For
155 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
159 typedef struct prop_value_d prop_value_t;
161 /* Array of propagated constant values. After propagation,
162 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
163 the constant is held in an SSA name representing a memory store
164 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
165 memory reference used to store (i.e., the LHS of the assignment
167 static prop_value_t *const_val;
169 static void canonicalize_float_value (prop_value_t *);
170 static bool ccp_fold_stmt (gimple_stmt_iterator *);
172 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
175 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
177 switch (val.lattice_val)
180 fprintf (outf, "%sUNINITIALIZED", prefix);
183 fprintf (outf, "%sUNDEFINED", prefix);
186 fprintf (outf, "%sVARYING", prefix);
189 fprintf (outf, "%sCONSTANT ", prefix);
190 if (TREE_CODE (val.value) != INTEGER_CST
191 || double_int_zero_p (val.mask))
192 print_generic_expr (outf, val.value, dump_flags);
195 double_int cval = double_int_and_not (tree_to_double_int (val.value),
197 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
198 prefix, cval.high, cval.low);
199 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
200 val.mask.high, val.mask.low);
209 /* Print lattice value VAL to stderr. */
211 void debug_lattice_value (prop_value_t val);
214 debug_lattice_value (prop_value_t val)
216 dump_lattice_value (stderr, "", val);
217 fprintf (stderr, "\n");
221 /* Compute a default value for variable VAR and store it in the
222 CONST_VAL array. The following rules are used to get default
225 1- Global and static variables that are declared constant are
228 2- Any other value is considered UNDEFINED. This is useful when
229 considering PHI nodes. PHI arguments that are undefined do not
230 change the constant value of the PHI node, which allows for more
231 constants to be propagated.
233 3- Variables defined by statements other than assignments and PHI
234 nodes are considered VARYING.
236 4- Initial values of variables that are not GIMPLE registers are
237 considered VARYING. */
240 get_default_value (tree var)
242 tree sym = SSA_NAME_VAR (var);
243 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
246 stmt = SSA_NAME_DEF_STMT (var);
248 if (gimple_nop_p (stmt))
250 /* Variables defined by an empty statement are those used
251 before being initialized. If VAR is a local variable, we
252 can assume initially that it is UNDEFINED, otherwise we must
253 consider it VARYING. */
254 if (is_gimple_reg (sym)
255 && TREE_CODE (sym) == VAR_DECL)
256 val.lattice_val = UNDEFINED;
259 val.lattice_val = VARYING;
260 val.mask = double_int_minus_one;
263 else if (is_gimple_assign (stmt)
264 /* Value-returning GIMPLE_CALL statements assign to
265 a variable, and are treated similarly to GIMPLE_ASSIGN. */
266 || (is_gimple_call (stmt)
267 && gimple_call_lhs (stmt) != NULL_TREE)
268 || gimple_code (stmt) == GIMPLE_PHI)
271 if (gimple_assign_single_p (stmt)
272 && DECL_P (gimple_assign_rhs1 (stmt))
273 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
275 val.lattice_val = CONSTANT;
279 /* Any other variable defined by an assignment or a PHI node
280 is considered UNDEFINED. */
281 val.lattice_val = UNDEFINED;
285 /* Otherwise, VAR will never take on a constant value. */
286 val.lattice_val = VARYING;
287 val.mask = double_int_minus_one;
294 /* Get the constant value associated with variable VAR. */
296 static inline prop_value_t *
301 if (const_val == NULL)
304 val = &const_val[SSA_NAME_VERSION (var)];
305 if (val->lattice_val == UNINITIALIZED)
306 *val = get_default_value (var);
308 canonicalize_float_value (val);
313 /* Return the constant tree value associated with VAR. */
316 get_constant_value (tree var)
318 prop_value_t *val = get_value (var);
320 && val->lattice_val == CONSTANT
321 && (TREE_CODE (val->value) != INTEGER_CST
322 || double_int_zero_p (val->mask)))
327 /* Sets the value associated with VAR to VARYING. */
330 set_value_varying (tree var)
332 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
334 val->lattice_val = VARYING;
335 val->value = NULL_TREE;
336 val->mask = double_int_minus_one;
339 /* For float types, modify the value of VAL to make ccp work correctly
340 for non-standard values (-0, NaN):
342 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
343 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
344 This is to fix the following problem (see PR 29921): Suppose we have
348 and we set value of y to NaN. This causes value of x to be set to NaN.
349 When we later determine that y is in fact VARYING, fold uses the fact
350 that HONOR_NANS is false, and we try to change the value of x to 0,
351 causing an ICE. With HONOR_NANS being false, the real appearance of
352 NaN would cause undefined behavior, though, so claiming that y (and x)
353 are UNDEFINED initially is correct. */
356 canonicalize_float_value (prop_value_t *val)
358 enum machine_mode mode;
362 if (val->lattice_val != CONSTANT
363 || TREE_CODE (val->value) != REAL_CST)
366 d = TREE_REAL_CST (val->value);
367 type = TREE_TYPE (val->value);
368 mode = TYPE_MODE (type);
370 if (!HONOR_SIGNED_ZEROS (mode)
371 && REAL_VALUE_MINUS_ZERO (d))
373 val->value = build_real (type, dconst0);
377 if (!HONOR_NANS (mode)
378 && REAL_VALUE_ISNAN (d))
380 val->lattice_val = UNDEFINED;
386 /* Return whether the lattice transition is valid. */
389 valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
391 /* Lattice transitions must always be monotonically increasing in
393 if (old_val.lattice_val < new_val.lattice_val)
396 if (old_val.lattice_val != new_val.lattice_val)
399 if (!old_val.value && !new_val.value)
402 /* Now both lattice values are CONSTANT. */
404 /* Allow transitioning from &x to &x & ~3. */
405 if (TREE_CODE (old_val.value) != INTEGER_CST
406 && TREE_CODE (new_val.value) == INTEGER_CST)
409 /* Bit-lattices have to agree in the still valid bits. */
410 if (TREE_CODE (old_val.value) == INTEGER_CST
411 && TREE_CODE (new_val.value) == INTEGER_CST)
412 return double_int_equal_p
413 (double_int_and_not (tree_to_double_int (old_val.value),
415 double_int_and_not (tree_to_double_int (new_val.value),
418 /* Otherwise constant values have to agree. */
419 return operand_equal_p (old_val.value, new_val.value, 0);
422 /* Set the value for variable VAR to NEW_VAL. Return true if the new
423 value is different from VAR's previous value. */
426 set_lattice_value (tree var, prop_value_t new_val)
428 /* We can deal with old UNINITIALIZED values just fine here. */
429 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
431 canonicalize_float_value (&new_val);
433 /* We have to be careful to not go up the bitwise lattice
434 represented by the mask.
435 ??? This doesn't seem to be the best place to enforce this. */
436 if (new_val.lattice_val == CONSTANT
437 && old_val->lattice_val == CONSTANT
438 && TREE_CODE (new_val.value) == INTEGER_CST
439 && TREE_CODE (old_val->value) == INTEGER_CST)
442 diff = double_int_xor (tree_to_double_int (new_val.value),
443 tree_to_double_int (old_val->value));
444 new_val.mask = double_int_ior (new_val.mask,
445 double_int_ior (old_val->mask, diff));
448 gcc_assert (valid_lattice_transition (*old_val, new_val));
450 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
451 caller that this was a non-transition. */
452 if (old_val->lattice_val != new_val.lattice_val
453 || (new_val.lattice_val == CONSTANT
454 && TREE_CODE (new_val.value) == INTEGER_CST
455 && (TREE_CODE (old_val->value) != INTEGER_CST
456 || !double_int_equal_p (new_val.mask, old_val->mask))))
458 /* ??? We would like to delay creation of INTEGER_CSTs from
459 partially constants here. */
461 if (dump_file && (dump_flags & TDF_DETAILS))
463 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
464 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
469 gcc_assert (new_val.lattice_val != UNINITIALIZED);
476 static prop_value_t get_value_for_expr (tree, bool);
477 static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
478 static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
479 tree, double_int, double_int,
480 tree, double_int, double_int);
482 /* Return a double_int that can be used for bitwise simplifications
486 value_to_double_int (prop_value_t val)
489 && TREE_CODE (val.value) == INTEGER_CST)
490 return tree_to_double_int (val.value);
492 return double_int_zero;
495 /* Return the value for the address expression EXPR based on alignment
499 get_value_from_alignment (tree expr)
502 HOST_WIDE_INT bitsize, bitpos;
504 enum machine_mode mode;
507 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
509 base = get_inner_reference (TREE_OPERAND (expr, 0),
510 &bitsize, &bitpos, &offset,
511 &mode, &align, &align, false);
512 if (TREE_CODE (base) == MISALIGNED_INDIRECT_REF)
513 val = get_value_for_expr (TREE_OPERAND (base, 0), true);
514 else if (TREE_CODE (base) == MEM_REF)
515 val = bit_value_binop (PLUS_EXPR, TREE_TYPE (expr),
516 TREE_OPERAND (base, 0), TREE_OPERAND (base, 1));
518 && ((align = get_object_alignment (base, BITS_PER_UNIT,
522 val.lattice_val = CONSTANT;
523 /* We assume pointers are zero-extended. */
524 val.mask = double_int_and_not
525 (double_int_mask (TYPE_PRECISION (TREE_TYPE (expr))),
526 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
527 val.value = build_int_cst (TREE_TYPE (expr), 0);
531 val.lattice_val = VARYING;
532 val.mask = double_int_minus_one;
533 val.value = NULL_TREE;
537 double_int value, mask;
538 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
539 TREE_TYPE (expr), value_to_double_int (val), val.mask,
541 shwi_to_double_int (bitpos / BITS_PER_UNIT),
543 val.lattice_val = double_int_minus_one_p (mask) ? VARYING : CONSTANT;
545 if (val.lattice_val == CONSTANT)
546 val.value = double_int_to_tree (TREE_TYPE (expr), value);
548 val.value = NULL_TREE;
550 /* ??? We should handle i * 4 and more complex expressions from
551 the offset, possibly by just expanding get_value_for_expr. */
552 if (offset != NULL_TREE)
554 double_int value, mask;
555 prop_value_t oval = get_value_for_expr (offset, true);
556 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
557 TREE_TYPE (expr), value_to_double_int (val), val.mask,
558 TREE_TYPE (expr), value_to_double_int (oval),
561 if (double_int_minus_one_p (mask))
563 val.lattice_val = VARYING;
564 val.value = NULL_TREE;
568 val.lattice_val = CONSTANT;
569 val.value = double_int_to_tree (TREE_TYPE (expr), value);
576 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
577 return constant bits extracted from alignment information for
578 invariant addresses. */
581 get_value_for_expr (tree expr, bool for_bits_p)
585 if (TREE_CODE (expr) == SSA_NAME)
587 val = *get_value (expr);
589 && val.lattice_val == CONSTANT
590 && TREE_CODE (val.value) == ADDR_EXPR)
591 val = get_value_from_alignment (val.value);
593 else if (is_gimple_min_invariant (expr)
594 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
596 val.lattice_val = CONSTANT;
598 val.mask = double_int_zero;
599 canonicalize_float_value (&val);
601 else if (TREE_CODE (expr) == ADDR_EXPR)
602 val = get_value_from_alignment (expr);
605 val.lattice_val = VARYING;
606 val.mask = double_int_minus_one;
607 val.value = NULL_TREE;
612 /* Return the likely CCP lattice value for STMT.
614 If STMT has no operands, then return CONSTANT.
616 Else if undefinedness of operands of STMT cause its value to be
617 undefined, then return UNDEFINED.
619 Else if any operands of STMT are constants, then return CONSTANT.
621 Else return VARYING. */
624 likely_value (gimple stmt)
626 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
631 enum gimple_code code = gimple_code (stmt);
633 /* This function appears to be called only for assignments, calls,
634 conditionals, and switches, due to the logic in visit_stmt. */
635 gcc_assert (code == GIMPLE_ASSIGN
636 || code == GIMPLE_CALL
637 || code == GIMPLE_COND
638 || code == GIMPLE_SWITCH);
640 /* If the statement has volatile operands, it won't fold to a
642 if (gimple_has_volatile_ops (stmt))
645 /* Arrive here for more complex cases. */
646 has_constant_operand = false;
647 has_undefined_operand = false;
648 all_undefined_operands = true;
649 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
651 prop_value_t *val = get_value (use);
653 if (val->lattice_val == UNDEFINED)
654 has_undefined_operand = true;
656 all_undefined_operands = false;
658 if (val->lattice_val == CONSTANT)
659 has_constant_operand = true;
662 /* There may be constants in regular rhs operands. For calls we
663 have to ignore lhs, fndecl and static chain, otherwise only
665 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
666 i < gimple_num_ops (stmt); ++i)
668 tree op = gimple_op (stmt, i);
669 if (!op || TREE_CODE (op) == SSA_NAME)
671 if (is_gimple_min_invariant (op))
672 has_constant_operand = true;
675 if (has_constant_operand)
676 all_undefined_operands = false;
678 /* If the operation combines operands like COMPLEX_EXPR make sure to
679 not mark the result UNDEFINED if only one part of the result is
681 if (has_undefined_operand && all_undefined_operands)
683 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
685 switch (gimple_assign_rhs_code (stmt))
687 /* Unary operators are handled with all_undefined_operands. */
690 case POINTER_PLUS_EXPR:
691 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
692 Not bitwise operators, one VARYING operand may specify the
693 result completely. Not logical operators for the same reason.
694 Not COMPLEX_EXPR as one VARYING operand makes the result partly
695 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
696 the undefined operand may be promoted. */
703 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
704 fall back to VARYING even if there were CONSTANT operands. */
705 if (has_undefined_operand)
708 /* We do not consider virtual operands here -- load from read-only
709 memory may have only VARYING virtual operands, but still be
711 if (has_constant_operand
712 || gimple_references_memory_p (stmt))
718 /* Returns true if STMT cannot be constant. */
721 surely_varying_stmt_p (gimple stmt)
723 /* If the statement has operands that we cannot handle, it cannot be
725 if (gimple_has_volatile_ops (stmt))
728 /* If it is a call and does not return a value or is not a
729 builtin and not an indirect call, it is varying. */
730 if (is_gimple_call (stmt))
733 if (!gimple_call_lhs (stmt)
734 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
735 && !DECL_BUILT_IN (fndecl)))
739 /* Any other store operation is not interesting. */
740 else if (gimple_vdef (stmt))
743 /* Anything other than assignments and conditional jumps are not
744 interesting for CCP. */
745 if (gimple_code (stmt) != GIMPLE_ASSIGN
746 && gimple_code (stmt) != GIMPLE_COND
747 && gimple_code (stmt) != GIMPLE_SWITCH
748 && gimple_code (stmt) != GIMPLE_CALL)
754 /* Initialize local data structures for CCP. */
757 ccp_initialize (void)
761 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
763 /* Initialize simulation flags for PHI nodes and statements. */
766 gimple_stmt_iterator i;
768 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
770 gimple stmt = gsi_stmt (i);
773 /* If the statement is a control insn, then we do not
774 want to avoid simulating the statement once. Failure
775 to do so means that those edges will never get added. */
776 if (stmt_ends_bb_p (stmt))
779 is_varying = surely_varying_stmt_p (stmt);
786 /* If the statement will not produce a constant, mark
787 all its outputs VARYING. */
788 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
789 set_value_varying (def);
791 prop_set_simulate_again (stmt, !is_varying);
795 /* Now process PHI nodes. We never clear the simulate_again flag on
796 phi nodes, since we do not know which edges are executable yet,
797 except for phi nodes for virtual operands when we do not do store ccp. */
800 gimple_stmt_iterator i;
802 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
804 gimple phi = gsi_stmt (i);
806 if (!is_gimple_reg (gimple_phi_result (phi)))
807 prop_set_simulate_again (phi, false);
809 prop_set_simulate_again (phi, true);
814 /* Debug count support. Reset the values of ssa names
815 VARYING when the total number ssa names analyzed is
816 beyond the debug count specified. */
822 for (i = 0; i < num_ssa_names; i++)
826 const_val[i].lattice_val = VARYING;
827 const_val[i].mask = double_int_minus_one;
828 const_val[i].value = NULL_TREE;
834 /* Do final substitution of propagated values, cleanup the flowgraph and
835 free allocated storage.
837 Return TRUE when something was optimized. */
842 bool something_changed;
845 /* Perform substitutions based on the known constant values. */
846 something_changed = substitute_and_fold (get_constant_value,
847 ccp_fold_stmt, true);
851 return something_changed;;
855 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
858 any M UNDEFINED = any
859 any M VARYING = VARYING
860 Ci M Cj = Ci if (i == j)
861 Ci M Cj = VARYING if (i != j)
865 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
867 if (val1->lattice_val == UNDEFINED)
869 /* UNDEFINED M any = any */
872 else if (val2->lattice_val == UNDEFINED)
874 /* any M UNDEFINED = any
875 Nothing to do. VAL1 already contains the value we want. */
878 else if (val1->lattice_val == VARYING
879 || val2->lattice_val == VARYING)
881 /* any M VARYING = VARYING. */
882 val1->lattice_val = VARYING;
883 val1->mask = double_int_minus_one;
884 val1->value = NULL_TREE;
886 else if (val1->lattice_val == CONSTANT
887 && val2->lattice_val == CONSTANT
888 && TREE_CODE (val1->value) == INTEGER_CST
889 && TREE_CODE (val2->value) == INTEGER_CST)
891 /* Ci M Cj = Ci if (i == j)
892 Ci M Cj = VARYING if (i != j)
894 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
897 = double_int_ior (double_int_ior (val1->mask,
899 double_int_xor (tree_to_double_int (val1->value),
900 tree_to_double_int (val2->value)));
901 if (double_int_minus_one_p (val1->mask))
903 val1->lattice_val = VARYING;
904 val1->value = NULL_TREE;
907 else if (val1->lattice_val == CONSTANT
908 && val2->lattice_val == CONSTANT
909 && simple_cst_equal (val1->value, val2->value) == 1)
911 /* Ci M Cj = Ci if (i == j)
912 Ci M Cj = VARYING if (i != j)
914 VAL1 already contains the value we want for equivalent values. */
916 else if (val1->lattice_val == CONSTANT
917 && val2->lattice_val == CONSTANT
918 && (TREE_CODE (val1->value) == ADDR_EXPR
919 || TREE_CODE (val2->value) == ADDR_EXPR))
921 /* When not equal addresses are involved try meeting for
923 prop_value_t tem = *val2;
924 if (TREE_CODE (val1->value) == ADDR_EXPR)
925 *val1 = get_value_for_expr (val1->value, true);
926 if (TREE_CODE (val2->value) == ADDR_EXPR)
927 tem = get_value_for_expr (val2->value, true);
928 ccp_lattice_meet (val1, &tem);
932 /* Any other combination is VARYING. */
933 val1->lattice_val = VARYING;
934 val1->mask = double_int_minus_one;
935 val1->value = NULL_TREE;
940 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
941 lattice values to determine PHI_NODE's lattice value. The value of a
942 PHI node is determined calling ccp_lattice_meet with all the arguments
943 of the PHI node that are incoming via executable edges. */
945 static enum ssa_prop_result
946 ccp_visit_phi_node (gimple phi)
949 prop_value_t *old_val, new_val;
951 if (dump_file && (dump_flags & TDF_DETAILS))
953 fprintf (dump_file, "\nVisiting PHI node: ");
954 print_gimple_stmt (dump_file, phi, 0, dump_flags);
957 old_val = get_value (gimple_phi_result (phi));
958 switch (old_val->lattice_val)
961 return SSA_PROP_VARYING;
968 new_val.lattice_val = UNDEFINED;
969 new_val.value = NULL_TREE;
976 for (i = 0; i < gimple_phi_num_args (phi); i++)
978 /* Compute the meet operator over all the PHI arguments flowing
979 through executable edges. */
980 edge e = gimple_phi_arg_edge (phi, i);
982 if (dump_file && (dump_flags & TDF_DETAILS))
985 "\n Argument #%d (%d -> %d %sexecutable)\n",
986 i, e->src->index, e->dest->index,
987 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
990 /* If the incoming edge is executable, Compute the meet operator for
991 the existing value of the PHI node and the current PHI argument. */
992 if (e->flags & EDGE_EXECUTABLE)
994 tree arg = gimple_phi_arg (phi, i)->def;
995 prop_value_t arg_val = get_value_for_expr (arg, false);
997 ccp_lattice_meet (&new_val, &arg_val);
999 if (dump_file && (dump_flags & TDF_DETAILS))
1001 fprintf (dump_file, "\t");
1002 print_generic_expr (dump_file, arg, dump_flags);
1003 dump_lattice_value (dump_file, "\tValue: ", arg_val);
1004 fprintf (dump_file, "\n");
1007 if (new_val.lattice_val == VARYING)
1012 if (dump_file && (dump_flags & TDF_DETAILS))
1014 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1015 fprintf (dump_file, "\n\n");
1018 /* Make the transition to the new value. */
1019 if (set_lattice_value (gimple_phi_result (phi), new_val))
1021 if (new_val.lattice_val == VARYING)
1022 return SSA_PROP_VARYING;
1024 return SSA_PROP_INTERESTING;
1027 return SSA_PROP_NOT_INTERESTING;
1030 /* Return the constant value for OP or OP otherwise. */
1033 valueize_op (tree op)
1035 if (TREE_CODE (op) == SSA_NAME)
1037 tree tem = get_constant_value (op);
1044 /* CCP specific front-end to the non-destructive constant folding
1047 Attempt to simplify the RHS of STMT knowing that one or more
1048 operands are constants.
1050 If simplification is possible, return the simplified RHS,
1051 otherwise return the original RHS or NULL_TREE. */
1054 ccp_fold (gimple stmt)
1056 location_t loc = gimple_location (stmt);
1057 switch (gimple_code (stmt))
1061 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1063 switch (get_gimple_rhs_class (subcode))
1065 case GIMPLE_SINGLE_RHS:
1067 tree rhs = gimple_assign_rhs1 (stmt);
1068 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
1070 if (TREE_CODE (rhs) == SSA_NAME)
1072 /* If the RHS is an SSA_NAME, return its known constant value,
1074 return get_constant_value (rhs);
1076 /* Handle propagating invariant addresses into address operations.
1077 The folding we do here matches that in tree-ssa-forwprop.c. */
1078 else if (TREE_CODE (rhs) == ADDR_EXPR)
1081 base = &TREE_OPERAND (rhs, 0);
1082 while (handled_component_p (*base))
1083 base = &TREE_OPERAND (*base, 0);
1084 if (TREE_CODE (*base) == MEM_REF
1085 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
1087 tree val = get_constant_value (TREE_OPERAND (*base, 0));
1089 && TREE_CODE (val) == ADDR_EXPR)
1091 tree ret, save = *base;
1093 new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
1095 TREE_OPERAND (*base, 1));
1096 /* We need to return a new tree, not modify the IL
1097 or share parts of it. So play some tricks to
1098 avoid manually building it. */
1100 ret = unshare_expr (rhs);
1101 recompute_tree_invariant_for_addr_expr (ret);
1107 else if (TREE_CODE (rhs) == CONSTRUCTOR
1108 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
1109 && (CONSTRUCTOR_NELTS (rhs)
1110 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
1116 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
1118 val = valueize_op (val);
1119 if (TREE_CODE (val) == INTEGER_CST
1120 || TREE_CODE (val) == REAL_CST
1121 || TREE_CODE (val) == FIXED_CST)
1122 list = tree_cons (NULL_TREE, val, list);
1127 return build_vector (TREE_TYPE (rhs), nreverse (list));
1130 if (kind == tcc_reference)
1132 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
1133 || TREE_CODE (rhs) == REALPART_EXPR
1134 || TREE_CODE (rhs) == IMAGPART_EXPR)
1135 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1137 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1139 return fold_unary_loc (EXPR_LOCATION (rhs),
1141 TREE_TYPE (rhs), val);
1143 else if (TREE_CODE (rhs) == MEM_REF
1144 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1146 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1148 && TREE_CODE (val) == ADDR_EXPR)
1150 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
1152 TREE_OPERAND (rhs, 1));
1157 return fold_const_aggregate_ref (rhs);
1159 else if (kind == tcc_declaration)
1160 return get_symbol_constant_value (rhs);
1164 case GIMPLE_UNARY_RHS:
1166 /* Handle unary operators that can appear in GIMPLE form.
1167 Note that we know the single operand must be a constant,
1168 so this should almost always return a simplified RHS. */
1169 tree lhs = gimple_assign_lhs (stmt);
1170 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1172 /* Conversions are useless for CCP purposes if they are
1173 value-preserving. Thus the restrictions that
1174 useless_type_conversion_p places for pointer type conversions
1175 do not apply here. Substitution later will only substitute to
1177 if (CONVERT_EXPR_CODE_P (subcode)
1178 && POINTER_TYPE_P (TREE_TYPE (lhs))
1179 && POINTER_TYPE_P (TREE_TYPE (op0)))
1182 /* Try to re-construct array references on-the-fly. */
1183 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1185 && ((tem = maybe_fold_offset_to_address
1187 op0, integer_zero_node, TREE_TYPE (lhs)))
1194 fold_unary_ignore_overflow_loc (loc, subcode,
1195 gimple_expr_type (stmt), op0);
1198 case GIMPLE_BINARY_RHS:
1200 /* Handle binary operators that can appear in GIMPLE form. */
1201 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1202 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1204 /* Translate &x + CST into an invariant form suitable for
1205 further propagation. */
1206 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1207 && TREE_CODE (op0) == ADDR_EXPR
1208 && TREE_CODE (op1) == INTEGER_CST)
1210 tree off = fold_convert (ptr_type_node, op1);
1211 return build_fold_addr_expr
1212 (fold_build2 (MEM_REF,
1213 TREE_TYPE (TREE_TYPE (op0)),
1214 unshare_expr (op0), off));
1217 return fold_binary_loc (loc, subcode,
1218 gimple_expr_type (stmt), op0, op1);
1221 case GIMPLE_TERNARY_RHS:
1223 /* Handle ternary operators that can appear in GIMPLE form. */
1224 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1225 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1226 tree op2 = valueize_op (gimple_assign_rhs3 (stmt));
1228 return fold_ternary_loc (loc, subcode,
1229 gimple_expr_type (stmt), op0, op1, op2);
1240 tree fn = valueize_op (gimple_call_fn (stmt));
1241 if (TREE_CODE (fn) == ADDR_EXPR
1242 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1243 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1245 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1248 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1249 args[i] = valueize_op (gimple_call_arg (stmt, i));
1250 call = build_call_array_loc (loc,
1251 gimple_call_return_type (stmt),
1252 fn, gimple_call_num_args (stmt), args);
1253 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
1255 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1256 STRIP_NOPS (retval);
1264 /* Handle comparison operators that can appear in GIMPLE form. */
1265 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1266 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1267 enum tree_code code = gimple_cond_code (stmt);
1268 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1273 /* Return the constant switch index. */
1274 return valueize_op (gimple_switch_index (stmt));
1282 /* Return the tree representing the element referenced by T if T is an
1283 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1284 NULL_TREE otherwise. */
1287 fold_const_aggregate_ref (tree t)
1289 tree base, ctor, idx, field;
1290 unsigned HOST_WIDE_INT cnt;
1294 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1295 return get_symbol_constant_value (t);
1297 switch (TREE_CODE (t))
1300 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1301 DECL_INITIAL. If BASE is a nested reference into another
1302 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1303 the inner reference. */
1304 base = TREE_OPERAND (t, 0);
1305 switch (TREE_CODE (base))
1308 /* ??? We could handle this case. */
1309 if (!integer_zerop (TREE_OPERAND (base, 1)))
1311 base = get_base_address (base);
1313 || TREE_CODE (base) != VAR_DECL)
1318 if (!TREE_READONLY (base)
1319 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1320 || !targetm.binds_local_p (base))
1323 ctor = DECL_INITIAL (base);
1328 ctor = fold_const_aggregate_ref (base);
1340 if (ctor == NULL_TREE
1341 || (TREE_CODE (ctor) != CONSTRUCTOR
1342 && TREE_CODE (ctor) != STRING_CST)
1343 || !TREE_STATIC (ctor))
1346 /* Get the index. If we have an SSA_NAME, try to resolve it
1347 with the current lattice value for the SSA_NAME. */
1348 idx = TREE_OPERAND (t, 1);
1349 switch (TREE_CODE (idx))
1352 if ((tem = get_constant_value (idx))
1353 && TREE_CODE (tem) == INTEGER_CST)
1366 /* Fold read from constant string. */
1367 if (TREE_CODE (ctor) == STRING_CST)
1369 if ((TYPE_MODE (TREE_TYPE (t))
1370 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1371 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1373 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1374 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1375 return build_int_cst_type (TREE_TYPE (t),
1376 (TREE_STRING_POINTER (ctor)
1377 [TREE_INT_CST_LOW (idx)]));
1381 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1382 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1383 if (tree_int_cst_equal (cfield, idx))
1386 if (TREE_CODE (cval) == ADDR_EXPR)
1388 tree base = get_base_address (TREE_OPERAND (cval, 0));
1389 if (base && TREE_CODE (base) == VAR_DECL)
1390 add_referenced_var (base);
1397 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1398 DECL_INITIAL. If BASE is a nested reference into another
1399 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1400 the inner reference. */
1401 base = TREE_OPERAND (t, 0);
1402 switch (TREE_CODE (base))
1405 if (!TREE_READONLY (base)
1406 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1407 || !targetm.binds_local_p (base))
1410 ctor = DECL_INITIAL (base);
1415 ctor = fold_const_aggregate_ref (base);
1422 if (ctor == NULL_TREE
1423 || TREE_CODE (ctor) != CONSTRUCTOR
1424 || !TREE_STATIC (ctor))
1427 field = TREE_OPERAND (t, 1);
1429 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1431 /* FIXME: Handle bit-fields. */
1432 && ! DECL_BIT_FIELD (cfield))
1435 if (TREE_CODE (cval) == ADDR_EXPR)
1437 tree base = get_base_address (TREE_OPERAND (cval, 0));
1438 if (base && TREE_CODE (base) == VAR_DECL)
1439 add_referenced_var (base);
1448 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1449 if (c && TREE_CODE (c) == COMPLEX_CST)
1450 return fold_build1_loc (EXPR_LOCATION (t),
1451 TREE_CODE (t), TREE_TYPE (t), c);
1456 /* Get the base object we are accessing. */
1457 base = TREE_OPERAND (t, 0);
1458 if (TREE_CODE (base) == SSA_NAME
1459 && (tem = get_constant_value (base)))
1461 if (TREE_CODE (base) != ADDR_EXPR)
1463 base = TREE_OPERAND (base, 0);
1464 switch (TREE_CODE (base))
1468 && !AGGREGATE_TYPE_P (TREE_TYPE (base))
1469 && integer_zerop (TREE_OPERAND (t, 1)))
1471 tree res = get_symbol_constant_value (base);
1473 && !useless_type_conversion_p
1474 (TREE_TYPE (t), TREE_TYPE (res)))
1475 res = fold_unary (VIEW_CONVERT_EXPR, TREE_TYPE (t), res);
1479 if (!TREE_READONLY (base)
1480 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1481 || !targetm.binds_local_p (base))
1484 ctor = DECL_INITIAL (base);
1496 if (ctor == NULL_TREE
1497 || (TREE_CODE (ctor) != CONSTRUCTOR
1498 && TREE_CODE (ctor) != STRING_CST)
1499 || !TREE_STATIC (ctor))
1502 /* Get the byte offset. */
1503 idx = TREE_OPERAND (t, 1);
1505 /* Fold read from constant string. */
1506 if (TREE_CODE (ctor) == STRING_CST)
1508 if ((TYPE_MODE (TREE_TYPE (t))
1509 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1510 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1512 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1513 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1514 return build_int_cst_type (TREE_TYPE (t),
1515 (TREE_STRING_POINTER (ctor)
1516 [TREE_INT_CST_LOW (idx)]));
1520 /* ??? Implement byte-offset indexing into a non-array CONSTRUCTOR. */
1521 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
1522 && (TYPE_MODE (TREE_TYPE (t))
1523 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1524 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t))) != 0
1527 (TRUNC_MOD_EXPR, idx,
1528 size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t)))), 0)))
1530 idx = int_const_binop (TRUNC_DIV_EXPR, idx,
1531 size_int (GET_MODE_SIZE
1532 (TYPE_MODE (TREE_TYPE (t)))), 0);
1533 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1534 if (tree_int_cst_equal (cfield, idx))
1537 if (TREE_CODE (cval) == ADDR_EXPR)
1539 tree base = get_base_address (TREE_OPERAND (cval, 0));
1540 if (base && TREE_CODE (base) == VAR_DECL)
1541 add_referenced_var (base);
1543 if (useless_type_conversion_p (TREE_TYPE (t), TREE_TYPE (cval)))
1545 else if (CONSTANT_CLASS_P (cval))
1546 return fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (t), cval);
1560 /* Apply the operation CODE in type TYPE to the value, mask pair
1561 RVAL and RMASK representing a value of type RTYPE and set
1562 the value, mask pair *VAL and *MASK to the result. */
1565 bit_value_unop_1 (enum tree_code code, tree type,
1566 double_int *val, double_int *mask,
1567 tree rtype, double_int rval, double_int rmask)
1573 *val = double_int_not (rval);
1578 double_int temv, temm;
1579 /* Return ~rval + 1. */
1580 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1581 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1583 type, double_int_one, double_int_zero);
1591 /* First extend mask and value according to the original type. */
1592 uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype)
1593 ? 0 : TYPE_UNSIGNED (rtype));
1594 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1595 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1597 /* Then extend mask and value according to the target type. */
1598 uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1599 ? 0 : TYPE_UNSIGNED (type));
1600 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1601 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1606 *mask = double_int_minus_one;
1611 /* Apply the operation CODE in type TYPE to the value, mask pairs
1612 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1613 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1616 bit_value_binop_1 (enum tree_code code, tree type,
1617 double_int *val, double_int *mask,
1618 tree r1type, double_int r1val, double_int r1mask,
1619 tree r2type, double_int r2val, double_int r2mask)
1621 bool uns = (TREE_CODE (type) == INTEGER_TYPE
1622 && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type));
1623 /* Assume we'll get a constant result. Use an initial varying value,
1624 we fall back to varying in the end if necessary. */
1625 *mask = double_int_minus_one;
1629 /* The mask is constant where there is a known not
1630 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1631 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1632 double_int_and (double_int_ior (r1val, r1mask),
1633 double_int_ior (r2val, r2mask)));
1634 *val = double_int_and (r1val, r2val);
1638 /* The mask is constant where there is a known
1639 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1640 *mask = double_int_and_not
1641 (double_int_ior (r1mask, r2mask),
1642 double_int_ior (double_int_and_not (r1val, r1mask),
1643 double_int_and_not (r2val, r2mask)));
1644 *val = double_int_ior (r1val, r2val);
1649 *mask = double_int_ior (r1mask, r2mask);
1650 *val = double_int_xor (r1val, r2val);
1655 if (double_int_zero_p (r2mask))
1657 HOST_WIDE_INT shift = r2val.low;
1658 if (code == RROTATE_EXPR)
1660 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1661 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1667 /* ??? We can handle partially known shift counts if we know
1668 its sign. That way we can tell that (x << (y | 8)) & 255
1670 if (double_int_zero_p (r2mask))
1672 HOST_WIDE_INT shift = r2val.low;
1673 if (code == RSHIFT_EXPR)
1675 /* We need to know if we are doing a left or a right shift
1676 to properly shift in zeros for left shift and unsigned
1677 right shifts and the sign bit for signed right shifts.
1678 For signed right shifts we shift in varying in case
1679 the sign bit was varying. */
1682 *mask = double_int_lshift (r1mask, shift,
1683 TYPE_PRECISION (type), false);
1684 *val = double_int_lshift (r1val, shift,
1685 TYPE_PRECISION (type), false);
1690 *mask = double_int_rshift (r1mask, shift,
1691 TYPE_PRECISION (type), !uns);
1692 *val = double_int_rshift (r1val, shift,
1693 TYPE_PRECISION (type), !uns);
1704 case POINTER_PLUS_EXPR:
1707 /* Do the addition with unknown bits set to zero, to give carry-ins of
1708 zero wherever possible. */
1709 lo = double_int_add (double_int_and_not (r1val, r1mask),
1710 double_int_and_not (r2val, r2mask));
1711 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1712 /* Do the addition with unknown bits set to one, to give carry-ins of
1713 one wherever possible. */
1714 hi = double_int_add (double_int_ior (r1val, r1mask),
1715 double_int_ior (r2val, r2mask));
1716 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1717 /* Each bit in the result is known if (a) the corresponding bits in
1718 both inputs are known, and (b) the carry-in to that bit position
1719 is known. We can check condition (b) by seeing if we got the same
1720 result with minimised carries as with maximised carries. */
1721 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1722 double_int_xor (lo, hi));
1723 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1724 /* It shouldn't matter whether we choose lo or hi here. */
1731 double_int temv, temm;
1732 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1733 r2type, r2val, r2mask);
1734 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1735 r1type, r1val, r1mask,
1736 r2type, temv, temm);
1742 /* Just track trailing zeros in both operands and transfer
1743 them to the other. */
1744 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1745 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1746 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1748 *mask = double_int_zero;
1749 *val = double_int_zero;
1751 else if (r1tz + r2tz > 0)
1753 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1754 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1755 *val = double_int_zero;
1763 double_int m = double_int_ior (r1mask, r2mask);
1764 if (!double_int_equal_p (double_int_and_not (r1val, m),
1765 double_int_and_not (r2val, m)))
1767 *mask = double_int_zero;
1768 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1772 /* We know the result of a comparison is always one or zero. */
1773 *mask = double_int_one;
1774 *val = double_int_zero;
1782 double_int tem = r1val;
1788 code = swap_tree_comparison (code);
1795 /* If the most significant bits are not known we know nothing. */
1796 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1799 /* If we know the most significant bits we know the values
1800 value ranges by means of treating varying bits as zero
1801 or one. Do a cross comparison of the max/min pairs. */
1802 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1803 double_int_and_not (r2val, r2mask), uns);
1804 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1805 double_int_ior (r2val, r2mask), uns);
1806 if (maxmin < 0) /* r1 is less than r2. */
1808 *mask = double_int_zero;
1809 *val = double_int_one;
1811 else if (minmax > 0) /* r1 is not less or equal to r2. */
1813 *mask = double_int_zero;
1814 *val = double_int_zero;
1816 else if (maxmin == minmax) /* r1 and r2 are equal. */
1818 /* This probably should never happen as we'd have
1819 folded the thing during fully constant value folding. */
1820 *mask = double_int_zero;
1821 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1825 /* We know the result of a comparison is always one or zero. */
1826 *mask = double_int_one;
1827 *val = double_int_zero;
1836 /* Return the propagation value when applying the operation CODE to
1837 the value RHS yielding type TYPE. */
1840 bit_value_unop (enum tree_code code, tree type, tree rhs)
1842 prop_value_t rval = get_value_for_expr (rhs, true);
1843 double_int value, mask;
1845 gcc_assert ((rval.lattice_val == CONSTANT
1846 && TREE_CODE (rval.value) == INTEGER_CST)
1847 || double_int_minus_one_p (rval.mask));
1848 bit_value_unop_1 (code, type, &value, &mask,
1849 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1850 if (!double_int_minus_one_p (mask))
1852 val.lattice_val = CONSTANT;
1854 /* ??? Delay building trees here. */
1855 val.value = double_int_to_tree (type, value);
1859 val.lattice_val = VARYING;
1860 val.value = NULL_TREE;
1861 val.mask = double_int_minus_one;
1866 /* Return the propagation value when applying the operation CODE to
1867 the values RHS1 and RHS2 yielding type TYPE. */
1870 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1872 prop_value_t r1val = get_value_for_expr (rhs1, true);
1873 prop_value_t r2val = get_value_for_expr (rhs2, true);
1874 double_int value, mask;
1876 gcc_assert ((r1val.lattice_val == CONSTANT
1877 && TREE_CODE (r1val.value) == INTEGER_CST)
1878 || double_int_minus_one_p (r1val.mask));
1879 gcc_assert ((r2val.lattice_val == CONSTANT
1880 && TREE_CODE (r2val.value) == INTEGER_CST)
1881 || double_int_minus_one_p (r2val.mask));
1882 bit_value_binop_1 (code, type, &value, &mask,
1883 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
1884 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
1885 if (!double_int_minus_one_p (mask))
1887 val.lattice_val = CONSTANT;
1889 /* ??? Delay building trees here. */
1890 val.value = double_int_to_tree (type, value);
1894 val.lattice_val = VARYING;
1895 val.value = NULL_TREE;
1896 val.mask = double_int_minus_one;
1901 /* Evaluate statement STMT.
1902 Valid only for assignments, calls, conditionals, and switches. */
1905 evaluate_stmt (gimple stmt)
1908 tree simplified = NULL_TREE;
1909 ccp_lattice_t likelyvalue = likely_value (stmt);
1910 bool is_constant = false;
1912 if (dump_file && (dump_flags & TDF_DETAILS))
1914 fprintf (dump_file, "which is likely ");
1915 switch (likelyvalue)
1918 fprintf (dump_file, "CONSTANT");
1921 fprintf (dump_file, "UNDEFINED");
1924 fprintf (dump_file, "VARYING");
1928 fprintf (dump_file, "\n");
1931 /* If the statement is likely to have a CONSTANT result, then try
1932 to fold the statement to determine the constant value. */
1933 /* FIXME. This is the only place that we call ccp_fold.
1934 Since likely_value never returns CONSTANT for calls, we will
1935 not attempt to fold them, including builtins that may profit. */
1936 if (likelyvalue == CONSTANT)
1938 fold_defer_overflow_warnings ();
1939 simplified = ccp_fold (stmt);
1940 is_constant = simplified && is_gimple_min_invariant (simplified);
1941 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1944 /* The statement produced a constant value. */
1945 val.lattice_val = CONSTANT;
1946 val.value = simplified;
1947 val.mask = double_int_zero;
1950 /* If the statement is likely to have a VARYING result, then do not
1951 bother folding the statement. */
1952 else if (likelyvalue == VARYING)
1954 enum gimple_code code = gimple_code (stmt);
1955 if (code == GIMPLE_ASSIGN)
1957 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1959 /* Other cases cannot satisfy is_gimple_min_invariant
1961 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1962 simplified = gimple_assign_rhs1 (stmt);
1964 else if (code == GIMPLE_SWITCH)
1965 simplified = gimple_switch_index (stmt);
1967 /* These cannot satisfy is_gimple_min_invariant without folding. */
1968 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1969 is_constant = simplified && is_gimple_min_invariant (simplified);
1972 /* The statement produced a constant value. */
1973 val.lattice_val = CONSTANT;
1974 val.value = simplified;
1975 val.mask = double_int_zero;
1979 /* Resort to simplification for bitwise tracking. */
1980 if (flag_tree_bit_ccp
1981 && likelyvalue == CONSTANT
1984 enum gimple_code code = gimple_code (stmt);
1985 val.lattice_val = VARYING;
1986 val.value = NULL_TREE;
1987 val.mask = double_int_minus_one;
1988 if (code == GIMPLE_ASSIGN)
1990 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1991 tree rhs1 = gimple_assign_rhs1 (stmt);
1992 switch (get_gimple_rhs_class (subcode))
1994 case GIMPLE_SINGLE_RHS:
1995 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1996 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1997 val = get_value_for_expr (rhs1, true);
2000 case GIMPLE_UNARY_RHS:
2001 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2002 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2003 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
2004 || POINTER_TYPE_P (gimple_expr_type (stmt))))
2005 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
2008 case GIMPLE_BINARY_RHS:
2009 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2010 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2012 tree rhs2 = gimple_assign_rhs2 (stmt);
2013 val = bit_value_binop (subcode,
2014 TREE_TYPE (rhs1), rhs1, rhs2);
2021 else if (code == GIMPLE_COND)
2023 enum tree_code code = gimple_cond_code (stmt);
2024 tree rhs1 = gimple_cond_lhs (stmt);
2025 tree rhs2 = gimple_cond_rhs (stmt);
2026 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2027 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2028 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
2030 is_constant = (val.lattice_val == CONSTANT);
2035 /* The statement produced a nonconstant value. If the statement
2036 had UNDEFINED operands, then the result of the statement
2037 should be UNDEFINED. Otherwise, the statement is VARYING. */
2038 if (likelyvalue == UNDEFINED)
2040 val.lattice_val = likelyvalue;
2041 val.mask = double_int_zero;
2045 val.lattice_val = VARYING;
2046 val.mask = double_int_minus_one;
2049 val.value = NULL_TREE;
2055 /* Fold the stmt at *GSI with CCP specific information that propagating
2056 and regular folding does not catch. */
2059 ccp_fold_stmt (gimple_stmt_iterator *gsi)
2061 gimple stmt = gsi_stmt (*gsi);
2063 switch (gimple_code (stmt))
2068 /* Statement evaluation will handle type mismatches in constants
2069 more gracefully than the final propagation. This allows us to
2070 fold more conditionals here. */
2071 val = evaluate_stmt (stmt);
2072 if (val.lattice_val != CONSTANT
2073 || !double_int_zero_p (val.mask))
2078 fprintf (dump_file, "Folding predicate ");
2079 print_gimple_expr (dump_file, stmt, 0, 0);
2080 fprintf (dump_file, " to ");
2081 print_generic_expr (dump_file, val.value, 0);
2082 fprintf (dump_file, "\n");
2085 if (integer_zerop (val.value))
2086 gimple_cond_make_false (stmt);
2088 gimple_cond_make_true (stmt);
2095 tree lhs = gimple_call_lhs (stmt);
2098 bool changed = false;
2101 /* If the call was folded into a constant make sure it goes
2102 away even if we cannot propagate into all uses because of
2105 && TREE_CODE (lhs) == SSA_NAME
2106 && (val = get_constant_value (lhs)))
2108 tree new_rhs = unshare_expr (val);
2110 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2111 TREE_TYPE (new_rhs)))
2112 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2113 res = update_call_from_tree (gsi, new_rhs);
2118 /* Propagate into the call arguments. Compared to replace_uses_in
2119 this can use the argument slot types for type verification
2120 instead of the current argument type. We also can safely
2121 drop qualifiers here as we are dealing with constants anyway. */
2122 argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
2123 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2124 ++i, argt = TREE_CHAIN (argt))
2126 tree arg = gimple_call_arg (stmt, i);
2127 if (TREE_CODE (arg) == SSA_NAME
2128 && (val = get_constant_value (arg))
2129 && useless_type_conversion_p
2130 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
2131 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
2133 gimple_call_set_arg (stmt, i, unshare_expr (val));
2143 tree lhs = gimple_assign_lhs (stmt);
2146 /* If we have a load that turned out to be constant replace it
2147 as we cannot propagate into all uses in all cases. */
2148 if (gimple_assign_single_p (stmt)
2149 && TREE_CODE (lhs) == SSA_NAME
2150 && (val = get_constant_value (lhs)))
2152 tree rhs = unshare_expr (val);
2153 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2154 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2155 gimple_assign_set_rhs_from_tree (gsi, rhs);
2167 /* Visit the assignment statement STMT. Set the value of its LHS to the
2168 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2169 creates virtual definitions, set the value of each new name to that
2170 of the RHS (if we can derive a constant out of the RHS).
2171 Value-returning call statements also perform an assignment, and
2172 are handled here. */
2174 static enum ssa_prop_result
2175 visit_assignment (gimple stmt, tree *output_p)
2178 enum ssa_prop_result retval;
2180 tree lhs = gimple_get_lhs (stmt);
2182 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
2183 || gimple_call_lhs (stmt) != NULL_TREE);
2185 if (gimple_assign_single_p (stmt)
2186 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2187 /* For a simple copy operation, we copy the lattice values. */
2188 val = *get_value (gimple_assign_rhs1 (stmt));
2190 /* Evaluate the statement, which could be
2191 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2192 val = evaluate_stmt (stmt);
2194 retval = SSA_PROP_NOT_INTERESTING;
2196 /* Set the lattice value of the statement's output. */
2197 if (TREE_CODE (lhs) == SSA_NAME)
2199 /* If STMT is an assignment to an SSA_NAME, we only have one
2201 if (set_lattice_value (lhs, val))
2204 if (val.lattice_val == VARYING)
2205 retval = SSA_PROP_VARYING;
2207 retval = SSA_PROP_INTERESTING;
2215 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2216 if it can determine which edge will be taken. Otherwise, return
2217 SSA_PROP_VARYING. */
2219 static enum ssa_prop_result
2220 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
2225 block = gimple_bb (stmt);
2226 val = evaluate_stmt (stmt);
2227 if (val.lattice_val != CONSTANT
2228 || !double_int_zero_p (val.mask))
2229 return SSA_PROP_VARYING;
2231 /* Find which edge out of the conditional block will be taken and add it
2232 to the worklist. If no single edge can be determined statically,
2233 return SSA_PROP_VARYING to feed all the outgoing edges to the
2234 propagation engine. */
2235 *taken_edge_p = find_taken_edge (block, val.value);
2237 return SSA_PROP_INTERESTING;
2239 return SSA_PROP_VARYING;
2243 /* Evaluate statement STMT. If the statement produces an output value and
2244 its evaluation changes the lattice value of its output, return
2245 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2248 If STMT is a conditional branch and we can determine its truth
2249 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2250 value, return SSA_PROP_VARYING. */
2252 static enum ssa_prop_result
2253 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
2258 if (dump_file && (dump_flags & TDF_DETAILS))
2260 fprintf (dump_file, "\nVisiting statement:\n");
2261 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2264 switch (gimple_code (stmt))
2267 /* If the statement is an assignment that produces a single
2268 output value, evaluate its RHS to see if the lattice value of
2269 its output has changed. */
2270 return visit_assignment (stmt, output_p);
2273 /* A value-returning call also performs an assignment. */
2274 if (gimple_call_lhs (stmt) != NULL_TREE)
2275 return visit_assignment (stmt, output_p);
2280 /* If STMT is a conditional branch, see if we can determine
2281 which branch will be taken. */
2282 /* FIXME. It appears that we should be able to optimize
2283 computed GOTOs here as well. */
2284 return visit_cond_stmt (stmt, taken_edge_p);
2290 /* Any other kind of statement is not interesting for constant
2291 propagation and, therefore, not worth simulating. */
2292 if (dump_file && (dump_flags & TDF_DETAILS))
2293 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2295 /* Definitions made by statements other than assignments to
2296 SSA_NAMEs represent unknown modifications to their outputs.
2297 Mark them VARYING. */
2298 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2300 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
2301 set_lattice_value (def, v);
2304 return SSA_PROP_VARYING;
2308 /* Main entry point for SSA Conditional Constant Propagation. */
2314 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2315 if (ccp_finalize ())
2316 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
2325 return flag_tree_ccp != 0;
2329 struct gimple_opt_pass pass_ccp =
2334 gate_ccp, /* gate */
2335 do_ssa_ccp, /* execute */
2338 0, /* static_pass_number */
2339 TV_TREE_CCP, /* tv_id */
2340 PROP_cfg | PROP_ssa, /* properties_required */
2341 0, /* properties_provided */
2342 0, /* properties_destroyed */
2343 0, /* todo_flags_start */
2344 TODO_dump_func | TODO_verify_ssa
2345 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2351 /* Try to optimize out __builtin_stack_restore. Optimize it out
2352 if there is another __builtin_stack_restore in the same basic
2353 block and no calls or ASM_EXPRs are in between, or if this block's
2354 only outgoing edge is to EXIT_BLOCK and there are no calls or
2355 ASM_EXPRs after this __builtin_stack_restore. */
2358 optimize_stack_restore (gimple_stmt_iterator i)
2363 basic_block bb = gsi_bb (i);
2364 gimple call = gsi_stmt (i);
2366 if (gimple_code (call) != GIMPLE_CALL
2367 || gimple_call_num_args (call) != 1
2368 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2369 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2372 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2374 stmt = gsi_stmt (i);
2375 if (gimple_code (stmt) == GIMPLE_ASM)
2377 if (gimple_code (stmt) != GIMPLE_CALL)
2380 callee = gimple_call_fndecl (stmt);
2382 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2383 /* All regular builtins are ok, just obviously not alloca. */
2384 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
2387 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2388 goto second_stack_restore;
2394 /* Allow one successor of the exit block, or zero successors. */
2395 switch (EDGE_COUNT (bb->succs))
2400 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2406 second_stack_restore:
2408 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2409 If there are multiple uses, then the last one should remove the call.
2410 In any case, whether the call to __builtin_stack_save can be removed
2411 or not is irrelevant to removing the call to __builtin_stack_restore. */
2412 if (has_single_use (gimple_call_arg (call, 0)))
2414 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2415 if (is_gimple_call (stack_save))
2417 callee = gimple_call_fndecl (stack_save);
2419 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2420 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2422 gimple_stmt_iterator stack_save_gsi;
2425 stack_save_gsi = gsi_for_stmt (stack_save);
2426 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2427 update_call_from_tree (&stack_save_gsi, rhs);
2432 /* No effect, so the statement will be deleted. */
2433 return integer_zero_node;
2436 /* If va_list type is a simple pointer and nothing special is needed,
2437 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2438 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2439 pointer assignment. */
2442 optimize_stdarg_builtin (gimple call)
2444 tree callee, lhs, rhs, cfun_va_list;
2445 bool va_list_simple_ptr;
2446 location_t loc = gimple_location (call);
2448 if (gimple_code (call) != GIMPLE_CALL)
2451 callee = gimple_call_fndecl (call);
2453 cfun_va_list = targetm.fn_abi_va_list (callee);
2454 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2455 && (TREE_TYPE (cfun_va_list) == void_type_node
2456 || TREE_TYPE (cfun_va_list) == char_type_node);
2458 switch (DECL_FUNCTION_CODE (callee))
2460 case BUILT_IN_VA_START:
2461 if (!va_list_simple_ptr
2462 || targetm.expand_builtin_va_start != NULL
2463 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
2466 if (gimple_call_num_args (call) != 2)
2469 lhs = gimple_call_arg (call, 0);
2470 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2471 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2472 != TYPE_MAIN_VARIANT (cfun_va_list))
2475 lhs = build_fold_indirect_ref_loc (loc, lhs);
2476 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
2477 1, integer_zero_node);
2478 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2479 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2481 case BUILT_IN_VA_COPY:
2482 if (!va_list_simple_ptr)
2485 if (gimple_call_num_args (call) != 2)
2488 lhs = gimple_call_arg (call, 0);
2489 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2490 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2491 != TYPE_MAIN_VARIANT (cfun_va_list))
2494 lhs = build_fold_indirect_ref_loc (loc, lhs);
2495 rhs = gimple_call_arg (call, 1);
2496 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2497 != TYPE_MAIN_VARIANT (cfun_va_list))
2500 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2501 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2503 case BUILT_IN_VA_END:
2504 /* No effect, so the statement will be deleted. */
2505 return integer_zero_node;
2512 /* A simple pass that attempts to fold all builtin functions. This pass
2513 is run after we've propagated as many constants as we can. */
2516 execute_fold_all_builtins (void)
2518 bool cfg_changed = false;
2520 unsigned int todoflags = 0;
2524 gimple_stmt_iterator i;
2525 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2527 gimple stmt, old_stmt;
2528 tree callee, result;
2529 enum built_in_function fcode;
2531 stmt = gsi_stmt (i);
2533 if (gimple_code (stmt) != GIMPLE_CALL)
2538 callee = gimple_call_fndecl (stmt);
2539 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2544 fcode = DECL_FUNCTION_CODE (callee);
2546 result = gimple_fold_builtin (stmt);
2549 gimple_remove_stmt_histograms (cfun, stmt);
2552 switch (DECL_FUNCTION_CODE (callee))
2554 case BUILT_IN_CONSTANT_P:
2555 /* Resolve __builtin_constant_p. If it hasn't been
2556 folded to integer_one_node by now, it's fairly
2557 certain that the value simply isn't constant. */
2558 result = integer_zero_node;
2561 case BUILT_IN_STACK_RESTORE:
2562 result = optimize_stack_restore (i);
2568 case BUILT_IN_VA_START:
2569 case BUILT_IN_VA_END:
2570 case BUILT_IN_VA_COPY:
2571 /* These shouldn't be folded before pass_stdarg. */
2572 result = optimize_stdarg_builtin (stmt);
2582 if (dump_file && (dump_flags & TDF_DETAILS))
2584 fprintf (dump_file, "Simplified\n ");
2585 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2589 if (!update_call_from_tree (&i, result))
2591 gimplify_and_update_call_from_tree (&i, result);
2592 todoflags |= TODO_update_address_taken;
2595 stmt = gsi_stmt (i);
2598 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2599 && gimple_purge_dead_eh_edges (bb))
2602 if (dump_file && (dump_flags & TDF_DETAILS))
2604 fprintf (dump_file, "to\n ");
2605 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2606 fprintf (dump_file, "\n");
2609 /* Retry the same statement if it changed into another
2610 builtin, there might be new opportunities now. */
2611 if (gimple_code (stmt) != GIMPLE_CALL)
2616 callee = gimple_call_fndecl (stmt);
2618 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2619 || DECL_FUNCTION_CODE (callee) == fcode)
2624 /* Delete unreachable blocks. */
2626 todoflags |= TODO_cleanup_cfg;
2632 struct gimple_opt_pass pass_fold_builtins =
2638 execute_fold_all_builtins, /* execute */
2641 0, /* static_pass_number */
2642 TV_NONE, /* tv_id */
2643 PROP_cfg | PROP_ssa, /* properties_required */
2644 0, /* properties_provided */
2645 0, /* properties_destroyed */
2646 0, /* todo_flags_start */
2649 | TODO_update_ssa /* todo_flags_finish */