1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010, 2011, 2012 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
115 #include "coretypes.h"
120 #include "basic-block.h"
122 #include "function.h"
123 #include "tree-pretty-print.h"
124 #include "gimple-pretty-print.h"
126 #include "tree-dump.h"
127 #include "tree-flow.h"
128 #include "tree-pass.h"
129 #include "tree-ssa-propagate.h"
130 #include "value-prof.h"
131 #include "langhooks.h"
133 #include "diagnostic-core.h"
135 #include "gimple-fold.h"
139 /* Possible lattice values. */
148 struct prop_value_d {
150 ccp_lattice_t lattice_val;
152 /* Propagated value. */
155 /* Mask that applies to the propagated value during CCP. For
156 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
160 typedef struct prop_value_d prop_value_t;
162 /* Array of propagated constant values. After propagation,
163 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
164 the constant is held in an SSA name representing a memory store
165 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
166 memory reference used to store (i.e., the LHS of the assignment
168 static prop_value_t *const_val;
170 static void canonicalize_float_value (prop_value_t *);
171 static bool ccp_fold_stmt (gimple_stmt_iterator *);
173 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
176 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
178 switch (val.lattice_val)
181 fprintf (outf, "%sUNINITIALIZED", prefix);
184 fprintf (outf, "%sUNDEFINED", prefix);
187 fprintf (outf, "%sVARYING", prefix);
190 fprintf (outf, "%sCONSTANT ", prefix);
191 if (TREE_CODE (val.value) != INTEGER_CST
192 || double_int_zero_p (val.mask))
193 print_generic_expr (outf, val.value, dump_flags);
196 double_int cval = double_int_and_not (tree_to_double_int (val.value),
198 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
199 prefix, cval.high, cval.low);
200 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
201 val.mask.high, val.mask.low);
210 /* Print lattice value VAL to stderr. */
212 void debug_lattice_value (prop_value_t val);
215 debug_lattice_value (prop_value_t val)
217 dump_lattice_value (stderr, "", val);
218 fprintf (stderr, "\n");
222 /* Compute a default value for variable VAR and store it in the
223 CONST_VAL array. The following rules are used to get default
226 1- Global and static variables that are declared constant are
229 2- Any other value is considered UNDEFINED. This is useful when
230 considering PHI nodes. PHI arguments that are undefined do not
231 change the constant value of the PHI node, which allows for more
232 constants to be propagated.
234 3- Variables defined by statements other than assignments and PHI
235 nodes are considered VARYING.
237 4- Initial values of variables that are not GIMPLE registers are
238 considered VARYING. */
241 get_default_value (tree var)
243 tree sym = SSA_NAME_VAR (var);
244 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
247 stmt = SSA_NAME_DEF_STMT (var);
249 if (gimple_nop_p (stmt))
251 /* Variables defined by an empty statement are those used
252 before being initialized. If VAR is a local variable, we
253 can assume initially that it is UNDEFINED, otherwise we must
254 consider it VARYING. */
255 if (is_gimple_reg (sym)
256 && TREE_CODE (sym) == VAR_DECL)
257 val.lattice_val = UNDEFINED;
260 val.lattice_val = VARYING;
261 val.mask = double_int_minus_one;
264 else if (is_gimple_assign (stmt)
265 /* Value-returning GIMPLE_CALL statements assign to
266 a variable, and are treated similarly to GIMPLE_ASSIGN. */
267 || (is_gimple_call (stmt)
268 && gimple_call_lhs (stmt) != NULL_TREE)
269 || gimple_code (stmt) == GIMPLE_PHI)
272 if (gimple_assign_single_p (stmt)
273 && DECL_P (gimple_assign_rhs1 (stmt))
274 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
276 val.lattice_val = CONSTANT;
280 /* Any other variable defined by an assignment or a PHI node
281 is considered UNDEFINED. */
282 val.lattice_val = UNDEFINED;
286 /* Otherwise, VAR will never take on a constant value. */
287 val.lattice_val = VARYING;
288 val.mask = double_int_minus_one;
295 /* Get the constant value associated with variable VAR. */
297 static inline prop_value_t *
302 if (const_val == NULL)
305 val = &const_val[SSA_NAME_VERSION (var)];
306 if (val->lattice_val == UNINITIALIZED)
307 *val = get_default_value (var);
309 canonicalize_float_value (val);
314 /* Return the constant tree value associated with VAR. */
317 get_constant_value (tree var)
320 if (TREE_CODE (var) != SSA_NAME)
322 if (is_gimple_min_invariant (var))
326 val = get_value (var);
328 && val->lattice_val == CONSTANT
329 && (TREE_CODE (val->value) != INTEGER_CST
330 || double_int_zero_p (val->mask)))
335 /* Sets the value associated with VAR to VARYING. */
338 set_value_varying (tree var)
340 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
342 val->lattice_val = VARYING;
343 val->value = NULL_TREE;
344 val->mask = double_int_minus_one;
347 /* For float types, modify the value of VAL to make ccp work correctly
348 for non-standard values (-0, NaN):
350 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
351 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
352 This is to fix the following problem (see PR 29921): Suppose we have
356 and we set value of y to NaN. This causes value of x to be set to NaN.
357 When we later determine that y is in fact VARYING, fold uses the fact
358 that HONOR_NANS is false, and we try to change the value of x to 0,
359 causing an ICE. With HONOR_NANS being false, the real appearance of
360 NaN would cause undefined behavior, though, so claiming that y (and x)
361 are UNDEFINED initially is correct. */
364 canonicalize_float_value (prop_value_t *val)
366 enum machine_mode mode;
370 if (val->lattice_val != CONSTANT
371 || TREE_CODE (val->value) != REAL_CST)
374 d = TREE_REAL_CST (val->value);
375 type = TREE_TYPE (val->value);
376 mode = TYPE_MODE (type);
378 if (!HONOR_SIGNED_ZEROS (mode)
379 && REAL_VALUE_MINUS_ZERO (d))
381 val->value = build_real (type, dconst0);
385 if (!HONOR_NANS (mode)
386 && REAL_VALUE_ISNAN (d))
388 val->lattice_val = UNDEFINED;
394 /* Return whether the lattice transition is valid. */
397 valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
399 /* Lattice transitions must always be monotonically increasing in
401 if (old_val.lattice_val < new_val.lattice_val)
404 if (old_val.lattice_val != new_val.lattice_val)
407 if (!old_val.value && !new_val.value)
410 /* Now both lattice values are CONSTANT. */
412 /* Allow transitioning from PHI <&x, not executable> == &x
413 to PHI <&x, &y> == common alignment. */
414 if (TREE_CODE (old_val.value) != INTEGER_CST
415 && TREE_CODE (new_val.value) == INTEGER_CST)
418 /* Bit-lattices have to agree in the still valid bits. */
419 if (TREE_CODE (old_val.value) == INTEGER_CST
420 && TREE_CODE (new_val.value) == INTEGER_CST)
421 return double_int_equal_p
422 (double_int_and_not (tree_to_double_int (old_val.value),
424 double_int_and_not (tree_to_double_int (new_val.value),
427 /* Otherwise constant values have to agree. */
428 return operand_equal_p (old_val.value, new_val.value, 0);
431 /* Set the value for variable VAR to NEW_VAL. Return true if the new
432 value is different from VAR's previous value. */
435 set_lattice_value (tree var, prop_value_t new_val)
437 /* We can deal with old UNINITIALIZED values just fine here. */
438 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
440 canonicalize_float_value (&new_val);
442 /* We have to be careful to not go up the bitwise lattice
443 represented by the mask.
444 ??? This doesn't seem to be the best place to enforce this. */
445 if (new_val.lattice_val == CONSTANT
446 && old_val->lattice_val == CONSTANT
447 && TREE_CODE (new_val.value) == INTEGER_CST
448 && TREE_CODE (old_val->value) == INTEGER_CST)
451 diff = double_int_xor (tree_to_double_int (new_val.value),
452 tree_to_double_int (old_val->value));
453 new_val.mask = double_int_ior (new_val.mask,
454 double_int_ior (old_val->mask, diff));
457 gcc_assert (valid_lattice_transition (*old_val, new_val));
459 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
460 caller that this was a non-transition. */
461 if (old_val->lattice_val != new_val.lattice_val
462 || (new_val.lattice_val == CONSTANT
463 && TREE_CODE (new_val.value) == INTEGER_CST
464 && (TREE_CODE (old_val->value) != INTEGER_CST
465 || !double_int_equal_p (new_val.mask, old_val->mask))))
467 /* ??? We would like to delay creation of INTEGER_CSTs from
468 partially constants here. */
470 if (dump_file && (dump_flags & TDF_DETAILS))
472 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
473 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
478 gcc_assert (new_val.lattice_val != UNINITIALIZED);
485 static prop_value_t get_value_for_expr (tree, bool);
486 static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
487 static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
488 tree, double_int, double_int,
489 tree, double_int, double_int);
491 /* Return a double_int that can be used for bitwise simplifications
495 value_to_double_int (prop_value_t val)
498 && TREE_CODE (val.value) == INTEGER_CST)
499 return tree_to_double_int (val.value);
501 return double_int_zero;
504 /* Return the value for the address expression EXPR based on alignment
508 get_value_from_alignment (tree expr)
510 tree type = TREE_TYPE (expr);
512 unsigned HOST_WIDE_INT bitpos;
515 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
517 align = get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitpos);
519 = double_int_and_not (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
520 ? double_int_mask (TYPE_PRECISION (type))
521 : double_int_minus_one,
522 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
523 val.lattice_val = double_int_minus_one_p (val.mask) ? VARYING : CONSTANT;
524 if (val.lattice_val == CONSTANT)
526 = double_int_to_tree (type, uhwi_to_double_int (bitpos / BITS_PER_UNIT));
528 val.value = NULL_TREE;
533 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
534 return constant bits extracted from alignment information for
535 invariant addresses. */
538 get_value_for_expr (tree expr, bool for_bits_p)
542 if (TREE_CODE (expr) == SSA_NAME)
544 val = *get_value (expr);
546 && val.lattice_val == CONSTANT
547 && TREE_CODE (val.value) == ADDR_EXPR)
548 val = get_value_from_alignment (val.value);
550 else if (is_gimple_min_invariant (expr)
551 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
553 val.lattice_val = CONSTANT;
555 val.mask = double_int_zero;
556 canonicalize_float_value (&val);
558 else if (TREE_CODE (expr) == ADDR_EXPR)
559 val = get_value_from_alignment (expr);
562 val.lattice_val = VARYING;
563 val.mask = double_int_minus_one;
564 val.value = NULL_TREE;
569 /* Return the likely CCP lattice value for STMT.
571 If STMT has no operands, then return CONSTANT.
573 Else if undefinedness of operands of STMT cause its value to be
574 undefined, then return UNDEFINED.
576 Else if any operands of STMT are constants, then return CONSTANT.
578 Else return VARYING. */
581 likely_value (gimple stmt)
583 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
588 enum gimple_code code = gimple_code (stmt);
590 /* This function appears to be called only for assignments, calls,
591 conditionals, and switches, due to the logic in visit_stmt. */
592 gcc_assert (code == GIMPLE_ASSIGN
593 || code == GIMPLE_CALL
594 || code == GIMPLE_COND
595 || code == GIMPLE_SWITCH);
597 /* If the statement has volatile operands, it won't fold to a
599 if (gimple_has_volatile_ops (stmt))
602 /* Arrive here for more complex cases. */
603 has_constant_operand = false;
604 has_undefined_operand = false;
605 all_undefined_operands = true;
606 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
608 prop_value_t *val = get_value (use);
610 if (val->lattice_val == UNDEFINED)
611 has_undefined_operand = true;
613 all_undefined_operands = false;
615 if (val->lattice_val == CONSTANT)
616 has_constant_operand = true;
619 /* There may be constants in regular rhs operands. For calls we
620 have to ignore lhs, fndecl and static chain, otherwise only
622 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
623 i < gimple_num_ops (stmt); ++i)
625 tree op = gimple_op (stmt, i);
626 if (!op || TREE_CODE (op) == SSA_NAME)
628 if (is_gimple_min_invariant (op))
629 has_constant_operand = true;
632 if (has_constant_operand)
633 all_undefined_operands = false;
635 /* If the operation combines operands like COMPLEX_EXPR make sure to
636 not mark the result UNDEFINED if only one part of the result is
638 if (has_undefined_operand && all_undefined_operands)
640 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
642 switch (gimple_assign_rhs_code (stmt))
644 /* Unary operators are handled with all_undefined_operands. */
647 case POINTER_PLUS_EXPR:
648 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
649 Not bitwise operators, one VARYING operand may specify the
650 result completely. Not logical operators for the same reason.
651 Not COMPLEX_EXPR as one VARYING operand makes the result partly
652 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
653 the undefined operand may be promoted. */
657 /* If any part of an address is UNDEFINED, like the index
658 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
665 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
666 fall back to CONSTANT. During iteration UNDEFINED may still drop
668 if (has_undefined_operand)
671 /* We do not consider virtual operands here -- load from read-only
672 memory may have only VARYING virtual operands, but still be
674 if (has_constant_operand
675 || gimple_references_memory_p (stmt))
681 /* Returns true if STMT cannot be constant. */
684 surely_varying_stmt_p (gimple stmt)
686 /* If the statement has operands that we cannot handle, it cannot be
688 if (gimple_has_volatile_ops (stmt))
691 /* If it is a call and does not return a value or is not a
692 builtin and not an indirect call, it is varying. */
693 if (is_gimple_call (stmt))
696 if (!gimple_call_lhs (stmt)
697 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
698 && !DECL_BUILT_IN (fndecl)))
702 /* Any other store operation is not interesting. */
703 else if (gimple_vdef (stmt))
706 /* Anything other than assignments and conditional jumps are not
707 interesting for CCP. */
708 if (gimple_code (stmt) != GIMPLE_ASSIGN
709 && gimple_code (stmt) != GIMPLE_COND
710 && gimple_code (stmt) != GIMPLE_SWITCH
711 && gimple_code (stmt) != GIMPLE_CALL)
717 /* Initialize local data structures for CCP. */
720 ccp_initialize (void)
724 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
726 /* Initialize simulation flags for PHI nodes and statements. */
729 gimple_stmt_iterator i;
731 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
733 gimple stmt = gsi_stmt (i);
736 /* If the statement is a control insn, then we do not
737 want to avoid simulating the statement once. Failure
738 to do so means that those edges will never get added. */
739 if (stmt_ends_bb_p (stmt))
742 is_varying = surely_varying_stmt_p (stmt);
749 /* If the statement will not produce a constant, mark
750 all its outputs VARYING. */
751 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
752 set_value_varying (def);
754 prop_set_simulate_again (stmt, !is_varying);
758 /* Now process PHI nodes. We never clear the simulate_again flag on
759 phi nodes, since we do not know which edges are executable yet,
760 except for phi nodes for virtual operands when we do not do store ccp. */
763 gimple_stmt_iterator i;
765 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
767 gimple phi = gsi_stmt (i);
769 if (!is_gimple_reg (gimple_phi_result (phi)))
770 prop_set_simulate_again (phi, false);
772 prop_set_simulate_again (phi, true);
777 /* Debug count support. Reset the values of ssa names
778 VARYING when the total number ssa names analyzed is
779 beyond the debug count specified. */
785 for (i = 0; i < num_ssa_names; i++)
789 const_val[i].lattice_val = VARYING;
790 const_val[i].mask = double_int_minus_one;
791 const_val[i].value = NULL_TREE;
797 /* Do final substitution of propagated values, cleanup the flowgraph and
798 free allocated storage.
800 Return TRUE when something was optimized. */
805 bool something_changed;
810 /* Derive alignment and misalignment information from partially
811 constant pointers in the lattice. */
812 for (i = 1; i < num_ssa_names; ++i)
814 tree name = ssa_name (i);
816 struct ptr_info_def *pi;
817 unsigned int tem, align;
820 || !POINTER_TYPE_P (TREE_TYPE (name)))
823 val = get_value (name);
824 if (val->lattice_val != CONSTANT
825 || TREE_CODE (val->value) != INTEGER_CST)
828 /* Trailing constant bits specify the alignment, trailing value
829 bits the misalignment. */
831 align = (tem & -tem);
835 pi = get_ptr_info (name);
837 pi->misalign = TREE_INT_CST_LOW (val->value) & (align - 1);
840 /* Perform substitutions based on the known constant values. */
841 something_changed = substitute_and_fold (get_constant_value,
842 ccp_fold_stmt, true);
846 return something_changed;;
850 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
853 any M UNDEFINED = any
854 any M VARYING = VARYING
855 Ci M Cj = Ci if (i == j)
856 Ci M Cj = VARYING if (i != j)
860 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
862 if (val1->lattice_val == UNDEFINED)
864 /* UNDEFINED M any = any */
867 else if (val2->lattice_val == UNDEFINED)
869 /* any M UNDEFINED = any
870 Nothing to do. VAL1 already contains the value we want. */
873 else if (val1->lattice_val == VARYING
874 || val2->lattice_val == VARYING)
876 /* any M VARYING = VARYING. */
877 val1->lattice_val = VARYING;
878 val1->mask = double_int_minus_one;
879 val1->value = NULL_TREE;
881 else if (val1->lattice_val == CONSTANT
882 && val2->lattice_val == CONSTANT
883 && TREE_CODE (val1->value) == INTEGER_CST
884 && TREE_CODE (val2->value) == INTEGER_CST)
886 /* Ci M Cj = Ci if (i == j)
887 Ci M Cj = VARYING if (i != j)
889 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
892 = double_int_ior (double_int_ior (val1->mask,
894 double_int_xor (tree_to_double_int (val1->value),
895 tree_to_double_int (val2->value)));
896 if (double_int_minus_one_p (val1->mask))
898 val1->lattice_val = VARYING;
899 val1->value = NULL_TREE;
902 else if (val1->lattice_val == CONSTANT
903 && val2->lattice_val == CONSTANT
904 && simple_cst_equal (val1->value, val2->value) == 1)
906 /* Ci M Cj = Ci if (i == j)
907 Ci M Cj = VARYING if (i != j)
909 VAL1 already contains the value we want for equivalent values. */
911 else if (val1->lattice_val == CONSTANT
912 && val2->lattice_val == CONSTANT
913 && (TREE_CODE (val1->value) == ADDR_EXPR
914 || TREE_CODE (val2->value) == ADDR_EXPR))
916 /* When not equal addresses are involved try meeting for
918 prop_value_t tem = *val2;
919 if (TREE_CODE (val1->value) == ADDR_EXPR)
920 *val1 = get_value_for_expr (val1->value, true);
921 if (TREE_CODE (val2->value) == ADDR_EXPR)
922 tem = get_value_for_expr (val2->value, true);
923 ccp_lattice_meet (val1, &tem);
927 /* Any other combination is VARYING. */
928 val1->lattice_val = VARYING;
929 val1->mask = double_int_minus_one;
930 val1->value = NULL_TREE;
935 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
936 lattice values to determine PHI_NODE's lattice value. The value of a
937 PHI node is determined calling ccp_lattice_meet with all the arguments
938 of the PHI node that are incoming via executable edges. */
940 static enum ssa_prop_result
941 ccp_visit_phi_node (gimple phi)
944 prop_value_t *old_val, new_val;
946 if (dump_file && (dump_flags & TDF_DETAILS))
948 fprintf (dump_file, "\nVisiting PHI node: ");
949 print_gimple_stmt (dump_file, phi, 0, dump_flags);
952 old_val = get_value (gimple_phi_result (phi));
953 switch (old_val->lattice_val)
956 return SSA_PROP_VARYING;
963 new_val.lattice_val = UNDEFINED;
964 new_val.value = NULL_TREE;
971 for (i = 0; i < gimple_phi_num_args (phi); i++)
973 /* Compute the meet operator over all the PHI arguments flowing
974 through executable edges. */
975 edge e = gimple_phi_arg_edge (phi, i);
977 if (dump_file && (dump_flags & TDF_DETAILS))
980 "\n Argument #%d (%d -> %d %sexecutable)\n",
981 i, e->src->index, e->dest->index,
982 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
985 /* If the incoming edge is executable, Compute the meet operator for
986 the existing value of the PHI node and the current PHI argument. */
987 if (e->flags & EDGE_EXECUTABLE)
989 tree arg = gimple_phi_arg (phi, i)->def;
990 prop_value_t arg_val = get_value_for_expr (arg, false);
992 ccp_lattice_meet (&new_val, &arg_val);
994 if (dump_file && (dump_flags & TDF_DETAILS))
996 fprintf (dump_file, "\t");
997 print_generic_expr (dump_file, arg, dump_flags);
998 dump_lattice_value (dump_file, "\tValue: ", arg_val);
999 fprintf (dump_file, "\n");
1002 if (new_val.lattice_val == VARYING)
1007 if (dump_file && (dump_flags & TDF_DETAILS))
1009 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1010 fprintf (dump_file, "\n\n");
1013 /* Make the transition to the new value. */
1014 if (set_lattice_value (gimple_phi_result (phi), new_val))
1016 if (new_val.lattice_val == VARYING)
1017 return SSA_PROP_VARYING;
1019 return SSA_PROP_INTERESTING;
1022 return SSA_PROP_NOT_INTERESTING;
1025 /* Return the constant value for OP or OP otherwise. */
1028 valueize_op (tree op)
1030 if (TREE_CODE (op) == SSA_NAME)
1032 tree tem = get_constant_value (op);
1039 /* CCP specific front-end to the non-destructive constant folding
1042 Attempt to simplify the RHS of STMT knowing that one or more
1043 operands are constants.
1045 If simplification is possible, return the simplified RHS,
1046 otherwise return the original RHS or NULL_TREE. */
1049 ccp_fold (gimple stmt)
1051 location_t loc = gimple_location (stmt);
1052 switch (gimple_code (stmt))
1056 /* Handle comparison operators that can appear in GIMPLE form. */
1057 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1058 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1059 enum tree_code code = gimple_cond_code (stmt);
1060 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1065 /* Return the constant switch index. */
1066 return valueize_op (gimple_switch_index (stmt));
1071 return gimple_fold_stmt_to_constant_1 (stmt, valueize_op);
1078 /* Apply the operation CODE in type TYPE to the value, mask pair
1079 RVAL and RMASK representing a value of type RTYPE and set
1080 the value, mask pair *VAL and *MASK to the result. */
1083 bit_value_unop_1 (enum tree_code code, tree type,
1084 double_int *val, double_int *mask,
1085 tree rtype, double_int rval, double_int rmask)
1091 *val = double_int_not (rval);
1096 double_int temv, temm;
1097 /* Return ~rval + 1. */
1098 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1099 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1101 type, double_int_one, double_int_zero);
1109 /* First extend mask and value according to the original type. */
1110 uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype)
1111 ? 0 : TYPE_UNSIGNED (rtype));
1112 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1113 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1115 /* Then extend mask and value according to the target type. */
1116 uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1117 ? 0 : TYPE_UNSIGNED (type));
1118 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1119 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1124 *mask = double_int_minus_one;
1129 /* Apply the operation CODE in type TYPE to the value, mask pairs
1130 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1131 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1134 bit_value_binop_1 (enum tree_code code, tree type,
1135 double_int *val, double_int *mask,
1136 tree r1type, double_int r1val, double_int r1mask,
1137 tree r2type, double_int r2val, double_int r2mask)
1139 bool uns = (TREE_CODE (type) == INTEGER_TYPE
1140 && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type));
1141 /* Assume we'll get a constant result. Use an initial varying value,
1142 we fall back to varying in the end if necessary. */
1143 *mask = double_int_minus_one;
1147 /* The mask is constant where there is a known not
1148 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1149 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1150 double_int_and (double_int_ior (r1val, r1mask),
1151 double_int_ior (r2val, r2mask)));
1152 *val = double_int_and (r1val, r2val);
1156 /* The mask is constant where there is a known
1157 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1158 *mask = double_int_and_not
1159 (double_int_ior (r1mask, r2mask),
1160 double_int_ior (double_int_and_not (r1val, r1mask),
1161 double_int_and_not (r2val, r2mask)));
1162 *val = double_int_ior (r1val, r2val);
1167 *mask = double_int_ior (r1mask, r2mask);
1168 *val = double_int_xor (r1val, r2val);
1173 if (double_int_zero_p (r2mask))
1175 HOST_WIDE_INT shift = r2val.low;
1176 if (code == RROTATE_EXPR)
1178 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1179 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1185 /* ??? We can handle partially known shift counts if we know
1186 its sign. That way we can tell that (x << (y | 8)) & 255
1188 if (double_int_zero_p (r2mask))
1190 HOST_WIDE_INT shift = r2val.low;
1191 if (code == RSHIFT_EXPR)
1193 /* We need to know if we are doing a left or a right shift
1194 to properly shift in zeros for left shift and unsigned
1195 right shifts and the sign bit for signed right shifts.
1196 For signed right shifts we shift in varying in case
1197 the sign bit was varying. */
1200 *mask = double_int_lshift (r1mask, shift,
1201 TYPE_PRECISION (type), false);
1202 *val = double_int_lshift (r1val, shift,
1203 TYPE_PRECISION (type), false);
1207 /* ??? We can have sizetype related inconsistencies in
1209 if ((TREE_CODE (r1type) == INTEGER_TYPE
1210 && (TYPE_IS_SIZETYPE (r1type)
1211 ? 0 : TYPE_UNSIGNED (r1type))) != uns)
1215 *mask = double_int_rshift (r1mask, shift,
1216 TYPE_PRECISION (type), !uns);
1217 *val = double_int_rshift (r1val, shift,
1218 TYPE_PRECISION (type), !uns);
1229 case POINTER_PLUS_EXPR:
1232 /* Do the addition with unknown bits set to zero, to give carry-ins of
1233 zero wherever possible. */
1234 lo = double_int_add (double_int_and_not (r1val, r1mask),
1235 double_int_and_not (r2val, r2mask));
1236 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1237 /* Do the addition with unknown bits set to one, to give carry-ins of
1238 one wherever possible. */
1239 hi = double_int_add (double_int_ior (r1val, r1mask),
1240 double_int_ior (r2val, r2mask));
1241 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1242 /* Each bit in the result is known if (a) the corresponding bits in
1243 both inputs are known, and (b) the carry-in to that bit position
1244 is known. We can check condition (b) by seeing if we got the same
1245 result with minimised carries as with maximised carries. */
1246 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1247 double_int_xor (lo, hi));
1248 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1249 /* It shouldn't matter whether we choose lo or hi here. */
1256 double_int temv, temm;
1257 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1258 r2type, r2val, r2mask);
1259 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1260 r1type, r1val, r1mask,
1261 r2type, temv, temm);
1267 /* Just track trailing zeros in both operands and transfer
1268 them to the other. */
1269 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1270 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1271 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1273 *mask = double_int_zero;
1274 *val = double_int_zero;
1276 else if (r1tz + r2tz > 0)
1278 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1279 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1280 *val = double_int_zero;
1288 double_int m = double_int_ior (r1mask, r2mask);
1289 if (!double_int_equal_p (double_int_and_not (r1val, m),
1290 double_int_and_not (r2val, m)))
1292 *mask = double_int_zero;
1293 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1297 /* We know the result of a comparison is always one or zero. */
1298 *mask = double_int_one;
1299 *val = double_int_zero;
1307 double_int tem = r1val;
1313 code = swap_tree_comparison (code);
1320 /* If the most significant bits are not known we know nothing. */
1321 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1324 /* For comparisons the signedness is in the comparison operands. */
1325 uns = (TREE_CODE (r1type) == INTEGER_TYPE
1326 && TYPE_IS_SIZETYPE (r1type) ? 0 : TYPE_UNSIGNED (r1type));
1327 /* ??? We can have sizetype related inconsistencies in the IL. */
1328 if ((TREE_CODE (r2type) == INTEGER_TYPE
1329 && TYPE_IS_SIZETYPE (r2type) ? 0 : TYPE_UNSIGNED (r2type)) != uns)
1332 /* If we know the most significant bits we know the values
1333 value ranges by means of treating varying bits as zero
1334 or one. Do a cross comparison of the max/min pairs. */
1335 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1336 double_int_and_not (r2val, r2mask), uns);
1337 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1338 double_int_ior (r2val, r2mask), uns);
1339 if (maxmin < 0) /* r1 is less than r2. */
1341 *mask = double_int_zero;
1342 *val = double_int_one;
1344 else if (minmax > 0) /* r1 is not less or equal to r2. */
1346 *mask = double_int_zero;
1347 *val = double_int_zero;
1349 else if (maxmin == minmax) /* r1 and r2 are equal. */
1351 /* This probably should never happen as we'd have
1352 folded the thing during fully constant value folding. */
1353 *mask = double_int_zero;
1354 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1358 /* We know the result of a comparison is always one or zero. */
1359 *mask = double_int_one;
1360 *val = double_int_zero;
1369 /* Return the propagation value when applying the operation CODE to
1370 the value RHS yielding type TYPE. */
1373 bit_value_unop (enum tree_code code, tree type, tree rhs)
1375 prop_value_t rval = get_value_for_expr (rhs, true);
1376 double_int value, mask;
1379 if (rval.lattice_val == UNDEFINED)
1382 gcc_assert ((rval.lattice_val == CONSTANT
1383 && TREE_CODE (rval.value) == INTEGER_CST)
1384 || double_int_minus_one_p (rval.mask));
1385 bit_value_unop_1 (code, type, &value, &mask,
1386 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1387 if (!double_int_minus_one_p (mask))
1389 val.lattice_val = CONSTANT;
1391 /* ??? Delay building trees here. */
1392 val.value = double_int_to_tree (type, value);
1396 val.lattice_val = VARYING;
1397 val.value = NULL_TREE;
1398 val.mask = double_int_minus_one;
1403 /* Return the propagation value when applying the operation CODE to
1404 the values RHS1 and RHS2 yielding type TYPE. */
1407 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1409 prop_value_t r1val = get_value_for_expr (rhs1, true);
1410 prop_value_t r2val = get_value_for_expr (rhs2, true);
1411 double_int value, mask;
1414 if (r1val.lattice_val == UNDEFINED
1415 || r2val.lattice_val == UNDEFINED)
1417 val.lattice_val = VARYING;
1418 val.value = NULL_TREE;
1419 val.mask = double_int_minus_one;
1423 gcc_assert ((r1val.lattice_val == CONSTANT
1424 && TREE_CODE (r1val.value) == INTEGER_CST)
1425 || double_int_minus_one_p (r1val.mask));
1426 gcc_assert ((r2val.lattice_val == CONSTANT
1427 && TREE_CODE (r2val.value) == INTEGER_CST)
1428 || double_int_minus_one_p (r2val.mask));
1429 bit_value_binop_1 (code, type, &value, &mask,
1430 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
1431 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
1432 if (!double_int_minus_one_p (mask))
1434 val.lattice_val = CONSTANT;
1436 /* ??? Delay building trees here. */
1437 val.value = double_int_to_tree (type, value);
1441 val.lattice_val = VARYING;
1442 val.value = NULL_TREE;
1443 val.mask = double_int_minus_one;
1448 /* Return the propagation value when applying __builtin_assume_aligned to
1452 bit_value_assume_aligned (gimple stmt)
1454 tree ptr = gimple_call_arg (stmt, 0), align, misalign = NULL_TREE;
1455 tree type = TREE_TYPE (ptr);
1456 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1457 prop_value_t ptrval = get_value_for_expr (ptr, true);
1458 prop_value_t alignval;
1459 double_int value, mask;
1461 if (ptrval.lattice_val == UNDEFINED)
1463 gcc_assert ((ptrval.lattice_val == CONSTANT
1464 && TREE_CODE (ptrval.value) == INTEGER_CST)
1465 || double_int_minus_one_p (ptrval.mask));
1466 align = gimple_call_arg (stmt, 1);
1467 if (!host_integerp (align, 1))
1469 aligni = tree_low_cst (align, 1);
1471 || (aligni & (aligni - 1)) != 0)
1473 if (gimple_call_num_args (stmt) > 2)
1475 misalign = gimple_call_arg (stmt, 2);
1476 if (!host_integerp (misalign, 1))
1478 misaligni = tree_low_cst (misalign, 1);
1479 if (misaligni >= aligni)
1482 align = build_int_cst_type (type, -aligni);
1483 alignval = get_value_for_expr (align, true);
1484 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
1485 type, value_to_double_int (ptrval), ptrval.mask,
1486 type, value_to_double_int (alignval), alignval.mask);
1487 if (!double_int_minus_one_p (mask))
1489 val.lattice_val = CONSTANT;
1491 gcc_assert ((mask.low & (aligni - 1)) == 0);
1492 gcc_assert ((value.low & (aligni - 1)) == 0);
1493 value.low |= misaligni;
1494 /* ??? Delay building trees here. */
1495 val.value = double_int_to_tree (type, value);
1499 val.lattice_val = VARYING;
1500 val.value = NULL_TREE;
1501 val.mask = double_int_minus_one;
1506 /* Evaluate statement STMT.
1507 Valid only for assignments, calls, conditionals, and switches. */
1510 evaluate_stmt (gimple stmt)
1513 tree simplified = NULL_TREE;
1514 ccp_lattice_t likelyvalue = likely_value (stmt);
1515 bool is_constant = false;
1518 if (dump_file && (dump_flags & TDF_DETAILS))
1520 fprintf (dump_file, "which is likely ");
1521 switch (likelyvalue)
1524 fprintf (dump_file, "CONSTANT");
1527 fprintf (dump_file, "UNDEFINED");
1530 fprintf (dump_file, "VARYING");
1534 fprintf (dump_file, "\n");
1537 /* If the statement is likely to have a CONSTANT result, then try
1538 to fold the statement to determine the constant value. */
1539 /* FIXME. This is the only place that we call ccp_fold.
1540 Since likely_value never returns CONSTANT for calls, we will
1541 not attempt to fold them, including builtins that may profit. */
1542 if (likelyvalue == CONSTANT)
1544 fold_defer_overflow_warnings ();
1545 simplified = ccp_fold (stmt);
1546 is_constant = simplified && is_gimple_min_invariant (simplified);
1547 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1550 /* The statement produced a constant value. */
1551 val.lattice_val = CONSTANT;
1552 val.value = simplified;
1553 val.mask = double_int_zero;
1556 /* If the statement is likely to have a VARYING result, then do not
1557 bother folding the statement. */
1558 else if (likelyvalue == VARYING)
1560 enum gimple_code code = gimple_code (stmt);
1561 if (code == GIMPLE_ASSIGN)
1563 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1565 /* Other cases cannot satisfy is_gimple_min_invariant
1567 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1568 simplified = gimple_assign_rhs1 (stmt);
1570 else if (code == GIMPLE_SWITCH)
1571 simplified = gimple_switch_index (stmt);
1573 /* These cannot satisfy is_gimple_min_invariant without folding. */
1574 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1575 is_constant = simplified && is_gimple_min_invariant (simplified);
1578 /* The statement produced a constant value. */
1579 val.lattice_val = CONSTANT;
1580 val.value = simplified;
1581 val.mask = double_int_zero;
1585 /* Resort to simplification for bitwise tracking. */
1586 if (flag_tree_bit_ccp
1587 && (likelyvalue == CONSTANT || is_gimple_call (stmt))
1590 enum gimple_code code = gimple_code (stmt);
1592 val.lattice_val = VARYING;
1593 val.value = NULL_TREE;
1594 val.mask = double_int_minus_one;
1595 if (code == GIMPLE_ASSIGN)
1597 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1598 tree rhs1 = gimple_assign_rhs1 (stmt);
1599 switch (get_gimple_rhs_class (subcode))
1601 case GIMPLE_SINGLE_RHS:
1602 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1603 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1604 val = get_value_for_expr (rhs1, true);
1607 case GIMPLE_UNARY_RHS:
1608 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1609 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1610 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
1611 || POINTER_TYPE_P (gimple_expr_type (stmt))))
1612 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
1615 case GIMPLE_BINARY_RHS:
1616 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1617 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1619 tree lhs = gimple_assign_lhs (stmt);
1620 tree rhs2 = gimple_assign_rhs2 (stmt);
1621 val = bit_value_binop (subcode,
1622 TREE_TYPE (lhs), rhs1, rhs2);
1629 else if (code == GIMPLE_COND)
1631 enum tree_code code = gimple_cond_code (stmt);
1632 tree rhs1 = gimple_cond_lhs (stmt);
1633 tree rhs2 = gimple_cond_rhs (stmt);
1634 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1635 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1636 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1638 else if (code == GIMPLE_CALL
1639 && (fndecl = gimple_call_fndecl (stmt))
1640 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1642 switch (DECL_FUNCTION_CODE (fndecl))
1644 case BUILT_IN_MALLOC:
1645 case BUILT_IN_REALLOC:
1646 case BUILT_IN_CALLOC:
1647 case BUILT_IN_STRDUP:
1648 case BUILT_IN_STRNDUP:
1649 val.lattice_val = CONSTANT;
1650 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1651 val.mask = shwi_to_double_int
1652 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
1653 / BITS_PER_UNIT - 1));
1656 case BUILT_IN_ALLOCA:
1657 case BUILT_IN_ALLOCA_WITH_ALIGN:
1658 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
1659 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
1660 : BIGGEST_ALIGNMENT);
1661 val.lattice_val = CONSTANT;
1662 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1663 val.mask = shwi_to_double_int
1664 (~(((HOST_WIDE_INT) align)
1665 / BITS_PER_UNIT - 1));
1668 /* These builtins return their first argument, unmodified. */
1669 case BUILT_IN_MEMCPY:
1670 case BUILT_IN_MEMMOVE:
1671 case BUILT_IN_MEMSET:
1672 case BUILT_IN_STRCPY:
1673 case BUILT_IN_STRNCPY:
1674 case BUILT_IN_MEMCPY_CHK:
1675 case BUILT_IN_MEMMOVE_CHK:
1676 case BUILT_IN_MEMSET_CHK:
1677 case BUILT_IN_STRCPY_CHK:
1678 case BUILT_IN_STRNCPY_CHK:
1679 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1682 case BUILT_IN_ASSUME_ALIGNED:
1683 val = bit_value_assume_aligned (stmt);
1689 is_constant = (val.lattice_val == CONSTANT);
1694 /* The statement produced a nonconstant value. If the statement
1695 had UNDEFINED operands, then the result of the statement
1696 should be UNDEFINED. Otherwise, the statement is VARYING. */
1697 if (likelyvalue == UNDEFINED)
1699 val.lattice_val = likelyvalue;
1700 val.mask = double_int_zero;
1704 val.lattice_val = VARYING;
1705 val.mask = double_int_minus_one;
1708 val.value = NULL_TREE;
1714 /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1715 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1718 insert_clobber_before_stack_restore (tree saved_val, tree var, htab_t *visited)
1720 gimple stmt, clobber_stmt;
1722 imm_use_iterator iter;
1723 gimple_stmt_iterator i;
1726 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
1727 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1729 clobber = build_constructor (TREE_TYPE (var), NULL);
1730 TREE_THIS_VOLATILE (clobber) = 1;
1731 clobber_stmt = gimple_build_assign (var, clobber);
1733 i = gsi_for_stmt (stmt);
1734 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
1736 else if (gimple_code (stmt) == GIMPLE_PHI)
1738 if (*visited == NULL)
1739 *visited = htab_create (10, htab_hash_pointer, htab_eq_pointer, NULL);
1741 slot = (gimple *)htab_find_slot (*visited, stmt, INSERT);
1746 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
1750 gcc_assert (is_gimple_debug (stmt));
1753 /* Advance the iterator to the previous non-debug gimple statement in the same
1754 or dominating basic block. */
1757 gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
1761 gsi_prev_nondebug (i);
1762 while (gsi_end_p (*i))
1764 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
1765 if (dom == NULL || dom == ENTRY_BLOCK_PTR)
1768 *i = gsi_last_bb (dom);
1772 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
1773 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
1775 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
1776 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
1777 that case the function gives up without inserting the clobbers. */
1780 insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
1784 htab_t visited = NULL;
1786 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
1788 stmt = gsi_stmt (i);
1790 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
1793 saved_val = gimple_call_lhs (stmt);
1794 if (saved_val == NULL_TREE)
1797 insert_clobber_before_stack_restore (saved_val, var, &visited);
1801 if (visited != NULL)
1802 htab_delete (visited);
1805 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
1806 fixed-size array and returns the address, if found, otherwise returns
1810 fold_builtin_alloca_with_align (gimple stmt)
1812 unsigned HOST_WIDE_INT size, threshold, n_elem;
1813 tree lhs, arg, block, var, elem_type, array_type;
1816 lhs = gimple_call_lhs (stmt);
1817 if (lhs == NULL_TREE)
1820 /* Detect constant argument. */
1821 arg = get_constant_value (gimple_call_arg (stmt, 0));
1822 if (arg == NULL_TREE
1823 || TREE_CODE (arg) != INTEGER_CST
1824 || !host_integerp (arg, 1))
1827 size = TREE_INT_CST_LOW (arg);
1829 /* Heuristic: don't fold large allocas. */
1830 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
1831 /* In case the alloca is located at function entry, it has the same lifetime
1832 as a declared array, so we allow a larger size. */
1833 block = gimple_block (stmt);
1834 if (!(cfun->after_inlining
1835 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
1837 if (size > threshold)
1840 /* Declare array. */
1841 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
1842 n_elem = size * 8 / BITS_PER_UNIT;
1843 array_type = build_array_type_nelts (elem_type, n_elem);
1844 var = create_tmp_var (array_type, NULL);
1845 DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
1847 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
1848 if (pi != NULL && !pi->pt.anything)
1852 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
1853 gcc_assert (singleton_p);
1854 SET_DECL_PT_UID (var, uid);
1858 /* Fold alloca to the address of the array. */
1859 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
1862 /* Fold the stmt at *GSI with CCP specific information that propagating
1863 and regular folding does not catch. */
1866 ccp_fold_stmt (gimple_stmt_iterator *gsi)
1868 gimple stmt = gsi_stmt (*gsi);
1870 switch (gimple_code (stmt))
1875 /* Statement evaluation will handle type mismatches in constants
1876 more gracefully than the final propagation. This allows us to
1877 fold more conditionals here. */
1878 val = evaluate_stmt (stmt);
1879 if (val.lattice_val != CONSTANT
1880 || !double_int_zero_p (val.mask))
1885 fprintf (dump_file, "Folding predicate ");
1886 print_gimple_expr (dump_file, stmt, 0, 0);
1887 fprintf (dump_file, " to ");
1888 print_generic_expr (dump_file, val.value, 0);
1889 fprintf (dump_file, "\n");
1892 if (integer_zerop (val.value))
1893 gimple_cond_make_false (stmt);
1895 gimple_cond_make_true (stmt);
1902 tree lhs = gimple_call_lhs (stmt);
1903 int flags = gimple_call_flags (stmt);
1906 bool changed = false;
1909 /* If the call was folded into a constant make sure it goes
1910 away even if we cannot propagate into all uses because of
1913 && TREE_CODE (lhs) == SSA_NAME
1914 && (val = get_constant_value (lhs))
1915 /* Don't optimize away calls that have side-effects. */
1916 && (flags & (ECF_CONST|ECF_PURE)) != 0
1917 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
1919 tree new_rhs = unshare_expr (val);
1921 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1922 TREE_TYPE (new_rhs)))
1923 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1924 res = update_call_from_tree (gsi, new_rhs);
1929 /* Internal calls provide no argument types, so the extra laxity
1930 for normal calls does not apply. */
1931 if (gimple_call_internal_p (stmt))
1934 /* The heuristic of fold_builtin_alloca_with_align differs before and
1935 after inlining, so we don't require the arg to be changed into a
1936 constant for folding, but just to be constant. */
1937 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
1939 tree new_rhs = fold_builtin_alloca_with_align (stmt);
1942 bool res = update_call_from_tree (gsi, new_rhs);
1943 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
1945 insert_clobbers_for_var (*gsi, var);
1950 /* Propagate into the call arguments. Compared to replace_uses_in
1951 this can use the argument slot types for type verification
1952 instead of the current argument type. We also can safely
1953 drop qualifiers here as we are dealing with constants anyway. */
1954 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
1955 for (i = 0; i < gimple_call_num_args (stmt) && argt;
1956 ++i, argt = TREE_CHAIN (argt))
1958 tree arg = gimple_call_arg (stmt, i);
1959 if (TREE_CODE (arg) == SSA_NAME
1960 && (val = get_constant_value (arg))
1961 && useless_type_conversion_p
1962 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
1963 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
1965 gimple_call_set_arg (stmt, i, unshare_expr (val));
1975 tree lhs = gimple_assign_lhs (stmt);
1978 /* If we have a load that turned out to be constant replace it
1979 as we cannot propagate into all uses in all cases. */
1980 if (gimple_assign_single_p (stmt)
1981 && TREE_CODE (lhs) == SSA_NAME
1982 && (val = get_constant_value (lhs)))
1984 tree rhs = unshare_expr (val);
1985 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1986 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1987 gimple_assign_set_rhs_from_tree (gsi, rhs);
1999 /* Visit the assignment statement STMT. Set the value of its LHS to the
2000 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2001 creates virtual definitions, set the value of each new name to that
2002 of the RHS (if we can derive a constant out of the RHS).
2003 Value-returning call statements also perform an assignment, and
2004 are handled here. */
2006 static enum ssa_prop_result
2007 visit_assignment (gimple stmt, tree *output_p)
2010 enum ssa_prop_result retval;
2012 tree lhs = gimple_get_lhs (stmt);
2014 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
2015 || gimple_call_lhs (stmt) != NULL_TREE);
2017 if (gimple_assign_single_p (stmt)
2018 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2019 /* For a simple copy operation, we copy the lattice values. */
2020 val = *get_value (gimple_assign_rhs1 (stmt));
2022 /* Evaluate the statement, which could be
2023 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2024 val = evaluate_stmt (stmt);
2026 retval = SSA_PROP_NOT_INTERESTING;
2028 /* Set the lattice value of the statement's output. */
2029 if (TREE_CODE (lhs) == SSA_NAME)
2031 /* If STMT is an assignment to an SSA_NAME, we only have one
2033 if (set_lattice_value (lhs, val))
2036 if (val.lattice_val == VARYING)
2037 retval = SSA_PROP_VARYING;
2039 retval = SSA_PROP_INTERESTING;
2047 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2048 if it can determine which edge will be taken. Otherwise, return
2049 SSA_PROP_VARYING. */
2051 static enum ssa_prop_result
2052 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
2057 block = gimple_bb (stmt);
2058 val = evaluate_stmt (stmt);
2059 if (val.lattice_val != CONSTANT
2060 || !double_int_zero_p (val.mask))
2061 return SSA_PROP_VARYING;
2063 /* Find which edge out of the conditional block will be taken and add it
2064 to the worklist. If no single edge can be determined statically,
2065 return SSA_PROP_VARYING to feed all the outgoing edges to the
2066 propagation engine. */
2067 *taken_edge_p = find_taken_edge (block, val.value);
2069 return SSA_PROP_INTERESTING;
2071 return SSA_PROP_VARYING;
2075 /* Evaluate statement STMT. If the statement produces an output value and
2076 its evaluation changes the lattice value of its output, return
2077 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2080 If STMT is a conditional branch and we can determine its truth
2081 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2082 value, return SSA_PROP_VARYING. */
2084 static enum ssa_prop_result
2085 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
2090 if (dump_file && (dump_flags & TDF_DETAILS))
2092 fprintf (dump_file, "\nVisiting statement:\n");
2093 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2096 switch (gimple_code (stmt))
2099 /* If the statement is an assignment that produces a single
2100 output value, evaluate its RHS to see if the lattice value of
2101 its output has changed. */
2102 return visit_assignment (stmt, output_p);
2105 /* A value-returning call also performs an assignment. */
2106 if (gimple_call_lhs (stmt) != NULL_TREE)
2107 return visit_assignment (stmt, output_p);
2112 /* If STMT is a conditional branch, see if we can determine
2113 which branch will be taken. */
2114 /* FIXME. It appears that we should be able to optimize
2115 computed GOTOs here as well. */
2116 return visit_cond_stmt (stmt, taken_edge_p);
2122 /* Any other kind of statement is not interesting for constant
2123 propagation and, therefore, not worth simulating. */
2124 if (dump_file && (dump_flags & TDF_DETAILS))
2125 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2127 /* Definitions made by statements other than assignments to
2128 SSA_NAMEs represent unknown modifications to their outputs.
2129 Mark them VARYING. */
2130 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2132 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
2133 set_lattice_value (def, v);
2136 return SSA_PROP_VARYING;
2140 /* Main entry point for SSA Conditional Constant Propagation. */
2145 unsigned int todo = 0;
2146 calculate_dominance_info (CDI_DOMINATORS);
2148 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2149 if (ccp_finalize ())
2150 todo = (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
2151 free_dominance_info (CDI_DOMINATORS);
2159 return flag_tree_ccp != 0;
2163 struct gimple_opt_pass pass_ccp =
2168 gate_ccp, /* gate */
2169 do_ssa_ccp, /* execute */
2172 0, /* static_pass_number */
2173 TV_TREE_CCP, /* tv_id */
2174 PROP_cfg | PROP_ssa, /* properties_required */
2175 0, /* properties_provided */
2176 0, /* properties_destroyed */
2177 0, /* todo_flags_start */
2179 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2185 /* Try to optimize out __builtin_stack_restore. Optimize it out
2186 if there is another __builtin_stack_restore in the same basic
2187 block and no calls or ASM_EXPRs are in between, or if this block's
2188 only outgoing edge is to EXIT_BLOCK and there are no calls or
2189 ASM_EXPRs after this __builtin_stack_restore. */
2192 optimize_stack_restore (gimple_stmt_iterator i)
2197 basic_block bb = gsi_bb (i);
2198 gimple call = gsi_stmt (i);
2200 if (gimple_code (call) != GIMPLE_CALL
2201 || gimple_call_num_args (call) != 1
2202 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2203 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2206 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2208 stmt = gsi_stmt (i);
2209 if (gimple_code (stmt) == GIMPLE_ASM)
2211 if (gimple_code (stmt) != GIMPLE_CALL)
2214 callee = gimple_call_fndecl (stmt);
2216 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2217 /* All regular builtins are ok, just obviously not alloca. */
2218 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2219 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
2222 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2223 goto second_stack_restore;
2229 /* Allow one successor of the exit block, or zero successors. */
2230 switch (EDGE_COUNT (bb->succs))
2235 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2241 second_stack_restore:
2243 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2244 If there are multiple uses, then the last one should remove the call.
2245 In any case, whether the call to __builtin_stack_save can be removed
2246 or not is irrelevant to removing the call to __builtin_stack_restore. */
2247 if (has_single_use (gimple_call_arg (call, 0)))
2249 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2250 if (is_gimple_call (stack_save))
2252 callee = gimple_call_fndecl (stack_save);
2254 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2255 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2257 gimple_stmt_iterator stack_save_gsi;
2260 stack_save_gsi = gsi_for_stmt (stack_save);
2261 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2262 update_call_from_tree (&stack_save_gsi, rhs);
2267 /* No effect, so the statement will be deleted. */
2268 return integer_zero_node;
2271 /* If va_list type is a simple pointer and nothing special is needed,
2272 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2273 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2274 pointer assignment. */
2277 optimize_stdarg_builtin (gimple call)
2279 tree callee, lhs, rhs, cfun_va_list;
2280 bool va_list_simple_ptr;
2281 location_t loc = gimple_location (call);
2283 if (gimple_code (call) != GIMPLE_CALL)
2286 callee = gimple_call_fndecl (call);
2288 cfun_va_list = targetm.fn_abi_va_list (callee);
2289 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2290 && (TREE_TYPE (cfun_va_list) == void_type_node
2291 || TREE_TYPE (cfun_va_list) == char_type_node);
2293 switch (DECL_FUNCTION_CODE (callee))
2295 case BUILT_IN_VA_START:
2296 if (!va_list_simple_ptr
2297 || targetm.expand_builtin_va_start != NULL
2298 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
2301 if (gimple_call_num_args (call) != 2)
2304 lhs = gimple_call_arg (call, 0);
2305 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2306 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2307 != TYPE_MAIN_VARIANT (cfun_va_list))
2310 lhs = build_fold_indirect_ref_loc (loc, lhs);
2311 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
2312 1, integer_zero_node);
2313 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2314 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2316 case BUILT_IN_VA_COPY:
2317 if (!va_list_simple_ptr)
2320 if (gimple_call_num_args (call) != 2)
2323 lhs = gimple_call_arg (call, 0);
2324 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2325 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2326 != TYPE_MAIN_VARIANT (cfun_va_list))
2329 lhs = build_fold_indirect_ref_loc (loc, lhs);
2330 rhs = gimple_call_arg (call, 1);
2331 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2332 != TYPE_MAIN_VARIANT (cfun_va_list))
2335 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2336 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2338 case BUILT_IN_VA_END:
2339 /* No effect, so the statement will be deleted. */
2340 return integer_zero_node;
2347 /* A simple pass that attempts to fold all builtin functions. This pass
2348 is run after we've propagated as many constants as we can. */
2351 execute_fold_all_builtins (void)
2353 bool cfg_changed = false;
2355 unsigned int todoflags = 0;
2359 gimple_stmt_iterator i;
2360 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2362 gimple stmt, old_stmt;
2363 tree callee, result;
2364 enum built_in_function fcode;
2366 stmt = gsi_stmt (i);
2368 if (gimple_code (stmt) != GIMPLE_CALL)
2373 callee = gimple_call_fndecl (stmt);
2374 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2379 fcode = DECL_FUNCTION_CODE (callee);
2381 result = gimple_fold_builtin (stmt);
2384 gimple_remove_stmt_histograms (cfun, stmt);
2387 switch (DECL_FUNCTION_CODE (callee))
2389 case BUILT_IN_CONSTANT_P:
2390 /* Resolve __builtin_constant_p. If it hasn't been
2391 folded to integer_one_node by now, it's fairly
2392 certain that the value simply isn't constant. */
2393 result = integer_zero_node;
2396 case BUILT_IN_ASSUME_ALIGNED:
2397 /* Remove __builtin_assume_aligned. */
2398 result = gimple_call_arg (stmt, 0);
2401 case BUILT_IN_STACK_RESTORE:
2402 result = optimize_stack_restore (i);
2408 case BUILT_IN_VA_START:
2409 case BUILT_IN_VA_END:
2410 case BUILT_IN_VA_COPY:
2411 /* These shouldn't be folded before pass_stdarg. */
2412 result = optimize_stdarg_builtin (stmt);
2422 if (dump_file && (dump_flags & TDF_DETAILS))
2424 fprintf (dump_file, "Simplified\n ");
2425 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2429 if (!update_call_from_tree (&i, result))
2431 gimplify_and_update_call_from_tree (&i, result);
2432 todoflags |= TODO_update_address_taken;
2435 stmt = gsi_stmt (i);
2438 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2439 && gimple_purge_dead_eh_edges (bb))
2442 if (dump_file && (dump_flags & TDF_DETAILS))
2444 fprintf (dump_file, "to\n ");
2445 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2446 fprintf (dump_file, "\n");
2449 /* Retry the same statement if it changed into another
2450 builtin, there might be new opportunities now. */
2451 if (gimple_code (stmt) != GIMPLE_CALL)
2456 callee = gimple_call_fndecl (stmt);
2458 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2459 || DECL_FUNCTION_CODE (callee) == fcode)
2464 /* Delete unreachable blocks. */
2466 todoflags |= TODO_cleanup_cfg;
2472 struct gimple_opt_pass pass_fold_builtins =
2478 execute_fold_all_builtins, /* execute */
2481 0, /* static_pass_number */
2482 TV_NONE, /* tv_id */
2483 PROP_cfg | PROP_ssa, /* properties_required */
2484 0, /* properties_provided */
2485 0, /* properties_destroyed */
2486 0, /* todo_flags_start */
2488 | TODO_update_ssa /* todo_flags_finish */