1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
213 /* Possible lattice values. */
222 /* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
228 static prop_value_t *const_val;
230 /* True if we are also propagating constants in stores and loads. */
231 static bool do_store_ccp;
233 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
236 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
238 switch (val.lattice_val)
241 fprintf (outf, "%sUNINITIALIZED", prefix);
244 fprintf (outf, "%sUNDEFINED", prefix);
247 fprintf (outf, "%sVARYING", prefix);
250 fprintf (outf, "%sCONSTANT ", prefix);
251 print_generic_expr (outf, val.value, dump_flags);
259 /* Print lattice value VAL to stderr. */
261 void debug_lattice_value (prop_value_t val);
264 debug_lattice_value (prop_value_t val)
266 dump_lattice_value (stderr, "", val);
267 fprintf (stderr, "\n");
272 /* If SYM is a constant variable with known value, return the value.
273 NULL_TREE is returned otherwise. */
276 get_symbol_constant_value (tree sym)
278 if (TREE_STATIC (sym)
279 && TREE_READONLY (sym)
282 tree val = DECL_INITIAL (sym);
285 STRIP_USELESS_TYPE_CONVERSION (val);
286 if (is_gimple_min_invariant (val))
289 /* Variables declared 'const' without an initializer
290 have zero as the initializer if they may not be
291 overridden at link or run time. */
293 && targetm.binds_local_p (sym)
294 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
295 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
296 return fold_convert (TREE_TYPE (sym), integer_zero_node);
302 /* Compute a default value for variable VAR and store it in the
303 CONST_VAL array. The following rules are used to get default
306 1- Global and static variables that are declared constant are
309 2- Any other value is considered UNDEFINED. This is useful when
310 considering PHI nodes. PHI arguments that are undefined do not
311 change the constant value of the PHI node, which allows for more
312 constants to be propagated.
314 3- Variables defined by statements other than assignments and PHI
315 nodes are considered VARYING.
317 4- Initial values of variables that are not GIMPLE registers are
318 considered VARYING. */
321 get_default_value (tree var)
323 tree sym = SSA_NAME_VAR (var);
324 prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE };
327 if (!do_store_ccp && !is_gimple_reg (var))
329 /* Short circuit for regular CCP. We are not interested in any
330 non-register when DO_STORE_CCP is false. */
331 val.lattice_val = VARYING;
333 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
335 /* Globals and static variables declared 'const' take their
337 val.lattice_val = CONSTANT;
343 gimple stmt = SSA_NAME_DEF_STMT (var);
345 if (gimple_nop_p (stmt))
347 /* Variables defined by an empty statement are those used
348 before being initialized. If VAR is a local variable, we
349 can assume initially that it is UNDEFINED, otherwise we must
350 consider it VARYING. */
351 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
352 val.lattice_val = UNDEFINED;
354 val.lattice_val = VARYING;
356 else if (is_gimple_assign (stmt)
357 /* Value-returning GIMPLE_CALL statements assign to
358 a variable, and are treated similarly to GIMPLE_ASSIGN. */
359 || (is_gimple_call (stmt)
360 && gimple_call_lhs (stmt) != NULL_TREE)
361 || gimple_code (stmt) == GIMPLE_PHI)
363 /* Any other variable defined by an assignment or a PHI node
364 is considered UNDEFINED. */
365 val.lattice_val = UNDEFINED;
369 /* Otherwise, VAR will never take on a constant value. */
370 val.lattice_val = VARYING;
378 /* Get the constant value associated with variable VAR. */
380 static inline prop_value_t *
385 if (const_val == NULL)
388 val = &const_val[SSA_NAME_VERSION (var)];
389 if (val->lattice_val == UNINITIALIZED)
390 *val = get_default_value (var);
395 /* Sets the value associated with VAR to VARYING. */
398 set_value_varying (tree var)
400 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
402 val->lattice_val = VARYING;
403 val->value = NULL_TREE;
404 val->mem_ref = NULL_TREE;
407 /* For float types, modify the value of VAL to make ccp work correctly
408 for non-standard values (-0, NaN):
410 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
411 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
412 This is to fix the following problem (see PR 29921): Suppose we have
416 and we set value of y to NaN. This causes value of x to be set to NaN.
417 When we later determine that y is in fact VARYING, fold uses the fact
418 that HONOR_NANS is false, and we try to change the value of x to 0,
419 causing an ICE. With HONOR_NANS being false, the real appearance of
420 NaN would cause undefined behavior, though, so claiming that y (and x)
421 are UNDEFINED initially is correct. */
424 canonicalize_float_value (prop_value_t *val)
426 enum machine_mode mode;
430 if (val->lattice_val != CONSTANT
431 || TREE_CODE (val->value) != REAL_CST)
434 d = TREE_REAL_CST (val->value);
435 type = TREE_TYPE (val->value);
436 mode = TYPE_MODE (type);
438 if (!HONOR_SIGNED_ZEROS (mode)
439 && REAL_VALUE_MINUS_ZERO (d))
441 val->value = build_real (type, dconst0);
445 if (!HONOR_NANS (mode)
446 && REAL_VALUE_ISNAN (d))
448 val->lattice_val = UNDEFINED;
455 /* Set the value for variable VAR to NEW_VAL. Return true if the new
456 value is different from VAR's previous value. */
459 set_lattice_value (tree var, prop_value_t new_val)
461 prop_value_t *old_val = get_value (var);
463 canonicalize_float_value (&new_val);
465 /* Lattice transitions must always be monotonically increasing in
466 value. If *OLD_VAL and NEW_VAL are the same, return false to
467 inform the caller that this was a non-transition. */
469 gcc_assert (old_val->lattice_val < new_val.lattice_val
470 || (old_val->lattice_val == new_val.lattice_val
471 && ((!old_val->value && !new_val.value)
472 || operand_equal_p (old_val->value, new_val.value, 0))
473 && old_val->mem_ref == new_val.mem_ref));
475 if (old_val->lattice_val != new_val.lattice_val)
477 if (dump_file && (dump_flags & TDF_DETAILS))
479 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
480 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
485 gcc_assert (new_val.lattice_val != UNDEFINED);
493 /* Return the likely CCP lattice value for STMT.
495 If STMT has no operands, then return CONSTANT.
497 Else if undefinedness of operands of STMT cause its value to be
498 undefined, then return UNDEFINED.
500 Else if any operands of STMT are constants, then return CONSTANT.
502 Else return VARYING. */
505 likely_value (gimple stmt)
507 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
511 enum tree_code code = gimple_code (stmt);
513 /* This function appears to be called only for assignments, calls,
514 conditionals, and switches, due to the logic in visit_stmt. */
515 gcc_assert (code == GIMPLE_ASSIGN
516 || code == GIMPLE_CALL
517 || code == GIMPLE_COND
518 || code == GIMPLE_SWITCH);
520 /* If the statement has volatile operands, it won't fold to a
522 if (gimple_has_volatile_ops (stmt))
525 /* If we are not doing store-ccp, statements with loads
526 and/or stores will never fold into a constant. */
528 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
531 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
532 is_gimple_min_invariant, so we do not consider calls or
533 other forms of assignment. */
534 if (gimple_assign_single_p (stmt)
535 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
538 if (code == GIMPLE_COND
539 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
540 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
543 if (code == GIMPLE_SWITCH
544 && is_gimple_min_invariant (gimple_switch_index (stmt)))
547 /* Arrive here for more complex cases. */
549 has_constant_operand = false;
550 has_undefined_operand = false;
551 all_undefined_operands = true;
552 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
554 prop_value_t *val = get_value (use);
556 if (val->lattice_val == UNDEFINED)
557 has_undefined_operand = true;
559 all_undefined_operands = false;
561 if (val->lattice_val == CONSTANT)
562 has_constant_operand = true;
565 /* If the operation combines operands like COMPLEX_EXPR make sure to
566 not mark the result UNDEFINED if only one part of the result is
568 if (has_undefined_operand && all_undefined_operands)
570 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
572 switch (gimple_assign_rhs_code (stmt))
574 /* Unary operators are handled with all_undefined_operands. */
577 case POINTER_PLUS_EXPR:
578 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
579 Not bitwise operators, one VARYING operand may specify the
580 result completely. Not logical operators for the same reason.
581 Not COMPLEX_EXPR as one VARYING operand makes the result partly
582 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
583 the undefined operand may be promoted. */
590 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
591 fall back to VARYING even if there were CONSTANT operands. */
592 if (has_undefined_operand)
595 if (has_constant_operand
596 /* We do not consider virtual operands here -- load from read-only
597 memory may have only VARYING virtual operands, but still be
599 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
605 /* Returns true if STMT cannot be constant. */
608 surely_varying_stmt_p (gimple stmt)
610 /* If the statement has operands that we cannot handle, it cannot be
612 if (gimple_has_volatile_ops (stmt))
615 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
620 /* We can only handle simple loads and stores. */
621 if (!stmt_makes_single_load (stmt)
622 && !stmt_makes_single_store (stmt))
626 /* If it is a call and does not return a value or is not a
627 builtin and not an indirect call, it is varying. */
628 if (is_gimple_call (stmt))
631 if (!gimple_call_lhs (stmt)
632 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
633 && !DECL_BUILT_IN (fndecl)))
637 /* Anything other than assignments and conditional jumps are not
638 interesting for CCP. */
639 if (gimple_code (stmt) != GIMPLE_ASSIGN
640 && gimple_code (stmt) != GIMPLE_COND
641 && gimple_code (stmt) != GIMPLE_SWITCH
642 && gimple_code (stmt) != GIMPLE_CALL)
648 /* Initialize local data structures for CCP. */
651 ccp_initialize (void)
655 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
657 /* Initialize simulation flags for PHI nodes and statements. */
660 gimple_stmt_iterator i;
662 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
664 gimple stmt = gsi_stmt (i);
665 bool is_varying = surely_varying_stmt_p (stmt);
672 /* If the statement will not produce a constant, mark
673 all its outputs VARYING. */
674 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
677 set_value_varying (def);
680 prop_set_simulate_again (stmt, !is_varying);
684 /* Now process PHI nodes. We never clear the simulate_again flag on
685 phi nodes, since we do not know which edges are executable yet,
686 except for phi nodes for virtual operands when we do not do store ccp. */
689 gimple_stmt_iterator i;
691 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
693 gimple phi = gsi_stmt (i);
695 if (!do_store_ccp && !is_gimple_reg (gimple_phi_result (phi)))
696 prop_set_simulate_again (phi, false);
698 prop_set_simulate_again (phi, true);
704 /* Do final substitution of propagated values, cleanup the flowgraph and
705 free allocated storage.
707 Return TRUE when something was optimized. */
712 /* Perform substitutions based on the known constant values. */
713 bool something_changed = substitute_and_fold (const_val, false);
717 return something_changed;;
721 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
724 any M UNDEFINED = any
725 any M VARYING = VARYING
726 Ci M Cj = Ci if (i == j)
727 Ci M Cj = VARYING if (i != j)
731 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
733 if (val1->lattice_val == UNDEFINED)
735 /* UNDEFINED M any = any */
738 else if (val2->lattice_val == UNDEFINED)
740 /* any M UNDEFINED = any
741 Nothing to do. VAL1 already contains the value we want. */
744 else if (val1->lattice_val == VARYING
745 || val2->lattice_val == VARYING)
747 /* any M VARYING = VARYING. */
748 val1->lattice_val = VARYING;
749 val1->value = NULL_TREE;
750 val1->mem_ref = NULL_TREE;
752 else if (val1->lattice_val == CONSTANT
753 && val2->lattice_val == CONSTANT
754 && simple_cst_equal (val1->value, val2->value) == 1
756 || (val1->mem_ref && val2->mem_ref
757 && operand_equal_p (val1->mem_ref, val2->mem_ref, 0))))
759 /* Ci M Cj = Ci if (i == j)
760 Ci M Cj = VARYING if (i != j)
762 If these two values come from memory stores, make sure that
763 they come from the same memory reference. */
764 val1->lattice_val = CONSTANT;
765 val1->value = val1->value;
766 val1->mem_ref = val1->mem_ref;
770 /* Any other combination is VARYING. */
771 val1->lattice_val = VARYING;
772 val1->value = NULL_TREE;
773 val1->mem_ref = NULL_TREE;
778 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
779 lattice values to determine PHI_NODE's lattice value. The value of a
780 PHI node is determined calling ccp_lattice_meet with all the arguments
781 of the PHI node that are incoming via executable edges. */
783 static enum ssa_prop_result
784 ccp_visit_phi_node (gimple phi)
787 prop_value_t *old_val, new_val;
789 if (dump_file && (dump_flags & TDF_DETAILS))
791 fprintf (dump_file, "\nVisiting PHI node: ");
792 print_gimple_stmt (dump_file, phi, 0, dump_flags);
795 old_val = get_value (gimple_phi_result (phi));
796 switch (old_val->lattice_val)
799 return SSA_PROP_VARYING;
806 new_val.lattice_val = UNDEFINED;
807 new_val.value = NULL_TREE;
808 new_val.mem_ref = NULL_TREE;
815 for (i = 0; i < gimple_phi_num_args (phi); i++)
817 /* Compute the meet operator over all the PHI arguments flowing
818 through executable edges. */
819 edge e = gimple_phi_arg_edge (phi, i);
821 if (dump_file && (dump_flags & TDF_DETAILS))
824 "\n Argument #%d (%d -> %d %sexecutable)\n",
825 i, e->src->index, e->dest->index,
826 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
829 /* If the incoming edge is executable, Compute the meet operator for
830 the existing value of the PHI node and the current PHI argument. */
831 if (e->flags & EDGE_EXECUTABLE)
833 tree arg = gimple_phi_arg (phi, i)->def;
834 prop_value_t arg_val;
836 if (is_gimple_min_invariant (arg))
838 arg_val.lattice_val = CONSTANT;
840 arg_val.mem_ref = NULL_TREE;
843 arg_val = *(get_value (arg));
845 ccp_lattice_meet (&new_val, &arg_val);
847 if (dump_file && (dump_flags & TDF_DETAILS))
849 fprintf (dump_file, "\t");
850 print_generic_expr (dump_file, arg, dump_flags);
851 dump_lattice_value (dump_file, "\tValue: ", arg_val);
852 fprintf (dump_file, "\n");
855 if (new_val.lattice_val == VARYING)
860 if (dump_file && (dump_flags & TDF_DETAILS))
862 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
863 fprintf (dump_file, "\n\n");
866 /* Make the transition to the new value. */
867 if (set_lattice_value (gimple_phi_result (phi), new_val))
869 if (new_val.lattice_val == VARYING)
870 return SSA_PROP_VARYING;
872 return SSA_PROP_INTERESTING;
875 return SSA_PROP_NOT_INTERESTING;
879 /* CCP specific front-end to the non-destructive constant folding
882 Attempt to simplify the RHS of STMT knowing that one or more
883 operands are constants.
885 If simplification is possible, return the simplified RHS,
886 otherwise return the original RHS or NULL_TREE. */
889 ccp_fold (gimple stmt)
891 switch (gimple_code (stmt))
895 enum tree_code subcode = gimple_assign_rhs_code (stmt);
897 switch (get_gimple_rhs_class (subcode))
899 case GIMPLE_SINGLE_RHS:
901 tree rhs = gimple_assign_rhs1 (stmt);
902 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
904 if (TREE_CODE (rhs) == SSA_NAME)
906 /* If the RHS is an SSA_NAME, return its known constant value,
908 return get_value (rhs)->value;
910 /* Handle propagating invariant addresses into address operations.
911 The folding we do here matches that in tree-ssa-forwprop.c. */
912 else if (TREE_CODE (rhs) == ADDR_EXPR)
915 base = &TREE_OPERAND (rhs, 0);
916 while (handled_component_p (*base))
917 base = &TREE_OPERAND (*base, 0);
918 if (TREE_CODE (*base) == INDIRECT_REF
919 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
921 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
922 if (val->lattice_val == CONSTANT
923 && TREE_CODE (val->value) == ADDR_EXPR
924 && useless_type_conversion_p
925 (TREE_TYPE (TREE_OPERAND (*base, 0)),
926 TREE_TYPE (val->value))
927 && useless_type_conversion_p
929 TREE_TYPE (TREE_OPERAND (val->value, 0))))
931 /* We need to return a new tree, not modify the IL
932 or share parts of it. So play some tricks to
933 avoid manually building it. */
934 tree ret, save = *base;
935 *base = TREE_OPERAND (val->value, 0);
936 ret = unshare_expr (rhs);
937 recompute_tree_invariant_for_addr_expr (ret);
944 else if (do_store_ccp && stmt_makes_single_load (stmt))
946 /* If the RHS is a memory load, see if the VUSEs associated with
947 it are a valid constant for that memory load. */
948 prop_value_t *val = get_value_loaded_by (stmt, const_val);
949 if (val && val->mem_ref)
951 if (operand_equal_p (val->mem_ref, rhs, 0))
954 /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
955 complex type with a known constant value, return it. */
956 if ((TREE_CODE (rhs) == REALPART_EXPR
957 || TREE_CODE (rhs) == IMAGPART_EXPR)
958 && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0))
959 return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value);
963 if (kind == tcc_reference)
964 return fold_const_aggregate_ref (rhs);
965 else if (kind == tcc_declaration)
966 return get_symbol_constant_value (rhs);
970 case GIMPLE_UNARY_RHS:
972 /* Handle unary operators that can appear in GIMPLE form.
973 Note that we know the single operand must be a constant,
974 so this should almost always return a simplified RHS. */
975 tree lhs = gimple_assign_lhs (stmt);
976 tree op0 = gimple_assign_rhs1 (stmt);
978 /* Simplify the operand down to a constant. */
979 if (TREE_CODE (op0) == SSA_NAME)
981 prop_value_t *val = get_value (op0);
982 if (val->lattice_val == CONSTANT)
983 op0 = get_value (op0)->value;
986 /* Conversions are useless for CCP purposes if they are
987 value-preserving. Thus the restrictions that
988 useless_type_conversion_p places for pointer type conversions
989 do not apply here. Substitution later will only substitute to
991 if (IS_CONVERT_EXPR_CODE_P (subcode)
992 && POINTER_TYPE_P (TREE_TYPE (lhs))
993 && POINTER_TYPE_P (TREE_TYPE (op0))
994 /* Do not allow differences in volatile qualification
995 as this might get us confused as to whether a
996 propagation destination statement is volatile
997 or not. See PR36988. */
998 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
999 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
1002 /* Still try to generate a constant of correct type. */
1003 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1005 && ((tem = maybe_fold_offset_to_address
1006 (op0, integer_zero_node, TREE_TYPE (lhs)))
1012 return fold_unary (subcode, gimple_expr_type (stmt), op0);
1015 case GIMPLE_BINARY_RHS:
1017 /* Handle binary operators that can appear in GIMPLE form. */
1018 tree op0 = gimple_assign_rhs1 (stmt);
1019 tree op1 = gimple_assign_rhs2 (stmt);
1021 /* Simplify the operands down to constants when appropriate. */
1022 if (TREE_CODE (op0) == SSA_NAME)
1024 prop_value_t *val = get_value (op0);
1025 if (val->lattice_val == CONSTANT)
1029 if (TREE_CODE (op1) == SSA_NAME)
1031 prop_value_t *val = get_value (op1);
1032 if (val->lattice_val == CONSTANT)
1036 /* Fold &foo + CST into an invariant reference if possible. */
1037 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1038 && TREE_CODE (op0) == ADDR_EXPR
1039 && TREE_CODE (op1) == INTEGER_CST)
1041 tree lhs = gimple_assign_lhs (stmt);
1042 tree tem = maybe_fold_offset_to_address (op0, op1,
1044 if (tem != NULL_TREE)
1048 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1059 tree fn = gimple_call_fn (stmt);
1062 if (TREE_CODE (fn) == SSA_NAME)
1064 val = get_value (fn);
1065 if (val->lattice_val == CONSTANT)
1068 if (TREE_CODE (fn) == ADDR_EXPR
1069 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1071 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1074 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1076 args[i] = gimple_call_arg (stmt, i);
1077 if (TREE_CODE (args[i]) == SSA_NAME)
1079 val = get_value (args[i]);
1080 if (val->lattice_val == CONSTANT)
1081 args[i] = val->value;
1084 call = build_call_array (gimple_call_return_type (stmt),
1085 fn, gimple_call_num_args (stmt), args);
1086 retval = fold_call_expr (call, false);
1088 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1089 STRIP_NOPS (retval);
1097 /* Handle comparison operators that can appear in GIMPLE form. */
1098 tree op0 = gimple_cond_lhs (stmt);
1099 tree op1 = gimple_cond_rhs (stmt);
1100 enum tree_code code = gimple_cond_code (stmt);
1102 /* Simplify the operands down to constants when appropriate. */
1103 if (TREE_CODE (op0) == SSA_NAME)
1105 prop_value_t *val = get_value (op0);
1106 if (val->lattice_val == CONSTANT)
1110 if (TREE_CODE (op1) == SSA_NAME)
1112 prop_value_t *val = get_value (op1);
1113 if (val->lattice_val == CONSTANT)
1117 return fold_binary (code, boolean_type_node, op0, op1);
1122 tree rhs = gimple_switch_index (stmt);
1124 if (TREE_CODE (rhs) == SSA_NAME)
1126 /* If the RHS is an SSA_NAME, return its known constant value,
1128 return get_value (rhs)->value;
1140 /* Return the tree representing the element referenced by T if T is an
1141 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1142 NULL_TREE otherwise. */
1145 fold_const_aggregate_ref (tree t)
1147 prop_value_t *value;
1148 tree base, ctor, idx, field;
1149 unsigned HOST_WIDE_INT cnt;
1152 switch (TREE_CODE (t))
1155 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1156 DECL_INITIAL. If BASE is a nested reference into another
1157 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1158 the inner reference. */
1159 base = TREE_OPERAND (t, 0);
1160 switch (TREE_CODE (base))
1163 if (!TREE_READONLY (base)
1164 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1165 || !targetm.binds_local_p (base))
1168 ctor = DECL_INITIAL (base);
1173 ctor = fold_const_aggregate_ref (base);
1185 if (ctor == NULL_TREE
1186 || (TREE_CODE (ctor) != CONSTRUCTOR
1187 && TREE_CODE (ctor) != STRING_CST)
1188 || !TREE_STATIC (ctor))
1191 /* Get the index. If we have an SSA_NAME, try to resolve it
1192 with the current lattice value for the SSA_NAME. */
1193 idx = TREE_OPERAND (t, 1);
1194 switch (TREE_CODE (idx))
1197 if ((value = get_value (idx))
1198 && value->lattice_val == CONSTANT
1199 && TREE_CODE (value->value) == INTEGER_CST)
1212 /* Fold read from constant string. */
1213 if (TREE_CODE (ctor) == STRING_CST)
1215 if ((TYPE_MODE (TREE_TYPE (t))
1216 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1217 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1219 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1220 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1221 return build_int_cst_type (TREE_TYPE (t),
1222 (TREE_STRING_POINTER (ctor)
1223 [TREE_INT_CST_LOW (idx)]));
1227 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1228 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1229 if (tree_int_cst_equal (cfield, idx))
1231 STRIP_USELESS_TYPE_CONVERSION (cval);
1237 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1238 DECL_INITIAL. If BASE is a nested reference into another
1239 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1240 the inner reference. */
1241 base = TREE_OPERAND (t, 0);
1242 switch (TREE_CODE (base))
1245 if (!TREE_READONLY (base)
1246 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1247 || !targetm.binds_local_p (base))
1250 ctor = DECL_INITIAL (base);
1255 ctor = fold_const_aggregate_ref (base);
1262 if (ctor == NULL_TREE
1263 || TREE_CODE (ctor) != CONSTRUCTOR
1264 || !TREE_STATIC (ctor))
1267 field = TREE_OPERAND (t, 1);
1269 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1271 /* FIXME: Handle bit-fields. */
1272 && ! DECL_BIT_FIELD (cfield))
1274 STRIP_USELESS_TYPE_CONVERSION (cval);
1282 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1283 if (c && TREE_CODE (c) == COMPLEX_CST)
1284 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1290 tree base = TREE_OPERAND (t, 0);
1291 if (TREE_CODE (base) == SSA_NAME
1292 && (value = get_value (base))
1293 && value->lattice_val == CONSTANT
1294 && TREE_CODE (value->value) == ADDR_EXPR)
1295 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1306 /* Evaluate statement STMT.
1307 Valid only for assignments, calls, conditionals, and switches. */
1310 evaluate_stmt (gimple stmt)
1313 tree simplified = NULL_TREE;
1314 ccp_lattice_t likelyvalue = likely_value (stmt);
1317 val.mem_ref = NULL_TREE;
1319 fold_defer_overflow_warnings ();
1321 /* If the statement is likely to have a CONSTANT result, then try
1322 to fold the statement to determine the constant value. */
1323 /* FIXME. This is the only place that we call ccp_fold.
1324 Since likely_value never returns CONSTANT for calls, we will
1325 not attempt to fold them, including builtins that may profit. */
1326 if (likelyvalue == CONSTANT)
1327 simplified = ccp_fold (stmt);
1328 /* If the statement is likely to have a VARYING result, then do not
1329 bother folding the statement. */
1330 else if (likelyvalue == VARYING)
1332 enum tree_code code = gimple_code (stmt);
1333 if (code == GIMPLE_ASSIGN)
1335 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1337 /* Other cases cannot satisfy is_gimple_min_invariant
1339 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1340 simplified = gimple_assign_rhs1 (stmt);
1342 else if (code == GIMPLE_SWITCH)
1343 simplified = gimple_switch_index (stmt);
1345 /* These cannot satisfy is_gimple_min_invariant without folding. */
1346 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1349 is_constant = simplified && is_gimple_min_invariant (simplified);
1351 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1353 if (dump_file && (dump_flags & TDF_DETAILS))
1355 fprintf (dump_file, "which is likely ");
1356 switch (likelyvalue)
1359 fprintf (dump_file, "CONSTANT");
1362 fprintf (dump_file, "UNDEFINED");
1365 fprintf (dump_file, "VARYING");
1369 fprintf (dump_file, "\n");
1374 /* The statement produced a constant value. */
1375 val.lattice_val = CONSTANT;
1376 val.value = simplified;
1380 /* The statement produced a nonconstant value. If the statement
1381 had UNDEFINED operands, then the result of the statement
1382 should be UNDEFINED. Otherwise, the statement is VARYING. */
1383 if (likelyvalue == UNDEFINED)
1384 val.lattice_val = likelyvalue;
1386 val.lattice_val = VARYING;
1388 val.value = NULL_TREE;
1394 /* Visit the assignment statement STMT. Set the value of its LHS to the
1395 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1396 creates virtual definitions, set the value of each new name to that
1397 of the RHS (if we can derive a constant out of the RHS).
1398 Value-returning call statements also perform an assignment, and
1399 are handled here. */
1401 static enum ssa_prop_result
1402 visit_assignment (gimple stmt, tree *output_p)
1405 enum ssa_prop_result retval;
1407 tree lhs = gimple_get_lhs (stmt);
1409 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1410 || gimple_call_lhs (stmt) != NULL_TREE);
1412 if (gimple_assign_copy_p (stmt))
1414 tree rhs = gimple_assign_rhs1 (stmt);
1416 if (TREE_CODE (rhs) == SSA_NAME)
1418 /* For a simple copy operation, we copy the lattice values. */
1419 prop_value_t *nval = get_value (rhs);
1422 else if (do_store_ccp && stmt_makes_single_load (stmt))
1424 /* Same as above, but the RHS is not a gimple register and yet
1425 has a known VUSE. If STMT is loading from the same memory
1426 location that created the SSA_NAMEs for the virtual operands,
1427 we can propagate the value on the RHS. */
1428 prop_value_t *nval = get_value_loaded_by (stmt, const_val);
1432 && operand_equal_p (nval->mem_ref, rhs, 0))
1435 val = evaluate_stmt (stmt);
1438 val = evaluate_stmt (stmt);
1441 /* Evaluate the statement, which could be
1442 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1443 val = evaluate_stmt (stmt);
1445 retval = SSA_PROP_NOT_INTERESTING;
1447 /* Set the lattice value of the statement's output. */
1448 if (TREE_CODE (lhs) == SSA_NAME)
1450 /* If STMT is an assignment to an SSA_NAME, we only have one
1452 if (set_lattice_value (lhs, val))
1455 if (val.lattice_val == VARYING)
1456 retval = SSA_PROP_VARYING;
1458 retval = SSA_PROP_INTERESTING;
1461 else if (do_store_ccp && stmt_makes_single_store (stmt))
1463 /* Otherwise, set the names in VDEF operands to the new
1464 constant value and mark the LHS as the memory reference
1465 associated with VAL. */
1470 /* Mark VAL as stored in the LHS of this assignment. */
1471 if (val.lattice_val == CONSTANT)
1474 /* Set the value of every VDEF to VAL. */
1476 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS)
1478 /* See PR 29801. We may have VDEFs for read-only variables
1479 (see the handling of unmodifiable variables in
1480 add_virtual_operand); do not attempt to change their value. */
1481 if (get_symbol_constant_value (SSA_NAME_VAR (vdef)) != NULL_TREE)
1484 changed |= set_lattice_value (vdef, val);
1487 /* Note that for propagation purposes, we are only interested in
1488 visiting statements that load the exact same memory reference
1489 stored here. Those statements will have the exact same list
1490 of virtual uses, so it is enough to set the output of this
1491 statement to be its first virtual definition. */
1492 *output_p = first_vdef (stmt);
1495 if (val.lattice_val == VARYING)
1496 retval = SSA_PROP_VARYING;
1498 retval = SSA_PROP_INTERESTING;
1506 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1507 if it can determine which edge will be taken. Otherwise, return
1508 SSA_PROP_VARYING. */
1510 static enum ssa_prop_result
1511 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1516 block = gimple_bb (stmt);
1517 val = evaluate_stmt (stmt);
1519 /* Find which edge out of the conditional block will be taken and add it
1520 to the worklist. If no single edge can be determined statically,
1521 return SSA_PROP_VARYING to feed all the outgoing edges to the
1522 propagation engine. */
1523 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1525 return SSA_PROP_INTERESTING;
1527 return SSA_PROP_VARYING;
1531 /* Evaluate statement STMT. If the statement produces an output value and
1532 its evaluation changes the lattice value of its output, return
1533 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1536 If STMT is a conditional branch and we can determine its truth
1537 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1538 value, return SSA_PROP_VARYING. */
1540 static enum ssa_prop_result
1541 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1546 if (dump_file && (dump_flags & TDF_DETAILS))
1548 fprintf (dump_file, "\nVisiting statement:\n");
1549 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1552 switch (gimple_code (stmt))
1555 /* If the statement is an assignment that produces a single
1556 output value, evaluate its RHS to see if the lattice value of
1557 its output has changed. */
1558 return visit_assignment (stmt, output_p);
1561 /* A value-returning call also performs an assignment. */
1562 if (gimple_call_lhs (stmt) != NULL_TREE)
1563 return visit_assignment (stmt, output_p);
1568 /* If STMT is a conditional branch, see if we can determine
1569 which branch will be taken. */
1570 /* FIXME. It appears that we should be able to optimize
1571 computed GOTOs here as well. */
1572 return visit_cond_stmt (stmt, taken_edge_p);
1578 /* Any other kind of statement is not interesting for constant
1579 propagation and, therefore, not worth simulating. */
1580 if (dump_file && (dump_flags & TDF_DETAILS))
1581 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1583 /* Definitions made by statements other than assignments to
1584 SSA_NAMEs represent unknown modifications to their outputs.
1585 Mark them VARYING. */
1586 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1588 prop_value_t v = { VARYING, NULL_TREE, NULL_TREE };
1589 set_lattice_value (def, v);
1592 return SSA_PROP_VARYING;
1596 /* Main entry point for SSA Conditional Constant Propagation. */
1599 execute_ssa_ccp (bool store_ccp)
1601 do_store_ccp = store_ccp;
1603 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1604 if (ccp_finalize ())
1605 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1614 return execute_ssa_ccp (false);
1621 return flag_tree_ccp != 0;
1625 struct gimple_opt_pass pass_ccp =
1630 gate_ccp, /* gate */
1631 do_ssa_ccp, /* execute */
1634 0, /* static_pass_number */
1635 TV_TREE_CCP, /* tv_id */
1636 PROP_cfg | PROP_ssa, /* properties_required */
1637 0, /* properties_provided */
1638 0, /* properties_destroyed */
1639 0, /* todo_flags_start */
1640 TODO_dump_func | TODO_verify_ssa
1641 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1647 do_ssa_store_ccp (void)
1649 /* If STORE-CCP is not enabled, we just run regular CCP. */
1650 return execute_ssa_ccp (flag_tree_store_ccp != 0);
1654 gate_store_ccp (void)
1656 /* STORE-CCP is enabled only with -ftree-store-ccp, but when
1657 -fno-tree-store-ccp is specified, we should run regular CCP.
1658 That's why the pass is enabled with either flag. */
1659 return flag_tree_store_ccp != 0 || flag_tree_ccp != 0;
1663 struct gimple_opt_pass pass_store_ccp =
1667 "store_ccp", /* name */
1668 gate_store_ccp, /* gate */
1669 do_ssa_store_ccp, /* execute */
1672 0, /* static_pass_number */
1673 TV_TREE_STORE_CCP, /* tv_id */
1674 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1675 0, /* properties_provided */
1676 0, /* properties_destroyed */
1677 0, /* todo_flags_start */
1678 TODO_dump_func | TODO_verify_ssa
1679 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1683 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1684 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1685 is the desired result type. */
1688 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1689 bool allow_negative_idx)
1691 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1692 tree array_type, elt_type, elt_size;
1695 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1696 measured in units of the size of elements type) from that ARRAY_REF).
1697 We can't do anything if either is variable.
1699 The case we handle here is *(&A[N]+O). */
1700 if (TREE_CODE (base) == ARRAY_REF)
1702 tree low_bound = array_ref_low_bound (base);
1704 elt_offset = TREE_OPERAND (base, 1);
1705 if (TREE_CODE (low_bound) != INTEGER_CST
1706 || TREE_CODE (elt_offset) != INTEGER_CST)
1709 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1710 base = TREE_OPERAND (base, 0);
1713 /* Ignore stupid user tricks of indexing non-array variables. */
1714 array_type = TREE_TYPE (base);
1715 if (TREE_CODE (array_type) != ARRAY_TYPE)
1717 elt_type = TREE_TYPE (array_type);
1718 if (!useless_type_conversion_p (orig_type, elt_type))
1721 /* Use signed size type for intermediate computation on the index. */
1722 idx_type = signed_type_for (size_type_node);
1724 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1725 element type (so we can use the alignment if it's not constant).
1726 Otherwise, compute the offset as an index by using a division. If the
1727 division isn't exact, then don't do anything. */
1728 elt_size = TYPE_SIZE_UNIT (elt_type);
1731 if (integer_zerop (offset))
1733 if (TREE_CODE (elt_size) != INTEGER_CST)
1734 elt_size = size_int (TYPE_ALIGN (elt_type));
1736 idx = build_int_cst (idx_type, 0);
1740 unsigned HOST_WIDE_INT lquo, lrem;
1741 HOST_WIDE_INT hquo, hrem;
1744 /* The final array offset should be signed, so we need
1745 to sign-extend the (possibly pointer) offset here
1746 and use signed division. */
1747 soffset = double_int_sext (tree_to_double_int (offset),
1748 TYPE_PRECISION (TREE_TYPE (offset)));
1749 if (TREE_CODE (elt_size) != INTEGER_CST
1750 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1751 soffset.low, soffset.high,
1752 TREE_INT_CST_LOW (elt_size),
1753 TREE_INT_CST_HIGH (elt_size),
1754 &lquo, &hquo, &lrem, &hrem)
1758 idx = build_int_cst_wide (idx_type, lquo, hquo);
1761 /* Assume the low bound is zero. If there is a domain type, get the
1762 low bound, if any, convert the index into that type, and add the
1764 min_idx = build_int_cst (idx_type, 0);
1765 domain_type = TYPE_DOMAIN (array_type);
1768 idx_type = domain_type;
1769 if (TYPE_MIN_VALUE (idx_type))
1770 min_idx = TYPE_MIN_VALUE (idx_type);
1772 min_idx = fold_convert (idx_type, min_idx);
1774 if (TREE_CODE (min_idx) != INTEGER_CST)
1777 elt_offset = fold_convert (idx_type, elt_offset);
1780 if (!integer_zerop (min_idx))
1781 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1782 if (!integer_zerop (elt_offset))
1783 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1785 /* Make sure to possibly truncate late after offsetting. */
1786 idx = fold_convert (idx_type, idx);
1788 /* We don't want to construct access past array bounds. For example
1791 should not be simplified into (*c)[14] or tree-vrp will
1792 give false warnings. The same is true for
1793 struct A { long x; char d[0]; } *a;
1795 which should be not folded to &a->d[-8]. */
1797 && TYPE_MAX_VALUE (domain_type)
1798 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1800 tree up_bound = TYPE_MAX_VALUE (domain_type);
1802 if (tree_int_cst_lt (up_bound, idx)
1803 /* Accesses after the end of arrays of size 0 (gcc
1804 extension) and 1 are likely intentional ("struct
1806 && compare_tree_int (up_bound, 1) > 0)
1810 && TYPE_MIN_VALUE (domain_type))
1812 if (!allow_negative_idx
1813 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1814 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1817 else if (!allow_negative_idx
1818 && compare_tree_int (idx, 0) < 0)
1821 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1825 /* Attempt to fold *(S+O) to S.X.
1826 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1827 is the desired result type. */
1830 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1831 tree orig_type, bool base_is_ptr)
1833 tree f, t, field_type, tail_array_field, field_offset;
1837 if (TREE_CODE (record_type) != RECORD_TYPE
1838 && TREE_CODE (record_type) != UNION_TYPE
1839 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1842 /* Short-circuit silly cases. */
1843 if (useless_type_conversion_p (record_type, orig_type))
1846 tail_array_field = NULL_TREE;
1847 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1851 if (TREE_CODE (f) != FIELD_DECL)
1853 if (DECL_BIT_FIELD (f))
1856 if (!DECL_FIELD_OFFSET (f))
1858 field_offset = byte_position (f);
1859 if (TREE_CODE (field_offset) != INTEGER_CST)
1862 /* ??? Java creates "interesting" fields for representing base classes.
1863 They have no name, and have no context. With no context, we get into
1864 trouble with nonoverlapping_component_refs_p. Skip them. */
1865 if (!DECL_FIELD_CONTEXT (f))
1868 /* The previous array field isn't at the end. */
1869 tail_array_field = NULL_TREE;
1871 /* Check to see if this offset overlaps with the field. */
1872 cmp = tree_int_cst_compare (field_offset, offset);
1876 field_type = TREE_TYPE (f);
1878 /* Here we exactly match the offset being checked. If the types match,
1879 then we can return that field. */
1881 && useless_type_conversion_p (orig_type, field_type))
1884 base = build1 (INDIRECT_REF, record_type, base);
1885 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1889 /* Don't care about offsets into the middle of scalars. */
1890 if (!AGGREGATE_TYPE_P (field_type))
1893 /* Check for array at the end of the struct. This is often
1894 used as for flexible array members. We should be able to
1895 turn this into an array access anyway. */
1896 if (TREE_CODE (field_type) == ARRAY_TYPE)
1897 tail_array_field = f;
1899 /* Check the end of the field against the offset. */
1900 if (!DECL_SIZE_UNIT (f)
1901 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1903 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1904 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1907 /* If we matched, then set offset to the displacement into
1910 new_base = build1 (INDIRECT_REF, record_type, base);
1913 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1915 /* Recurse to possibly find the match. */
1916 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1917 f == TYPE_FIELDS (record_type));
1920 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1926 if (!tail_array_field)
1929 f = tail_array_field;
1930 field_type = TREE_TYPE (f);
1931 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1933 /* If we get here, we've got an aggregate field, and a possibly
1934 nonzero offset into them. Recurse and hope for a valid match. */
1936 base = build1 (INDIRECT_REF, record_type, base);
1937 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1939 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1940 f == TYPE_FIELDS (record_type));
1943 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1947 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1948 or BASE[index] or by combination of those.
1950 Before attempting the conversion strip off existing ADDR_EXPRs and
1951 handled component refs. */
1954 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1958 bool base_is_ptr = true;
1961 if (TREE_CODE (base) == ADDR_EXPR)
1963 base_is_ptr = false;
1965 base = TREE_OPERAND (base, 0);
1967 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1968 so it needs to be removed and new COMPONENT_REF constructed.
1969 The wrong COMPONENT_REF are often constructed by folding the
1970 (type *)&object within the expression (type *)&object+offset */
1971 if (handled_component_p (base))
1973 HOST_WIDE_INT sub_offset, size, maxsize;
1975 newbase = get_ref_base_and_extent (base, &sub_offset,
1977 gcc_assert (newbase);
1979 && !(sub_offset & (BITS_PER_UNIT - 1)))
1983 offset = int_const_binop (PLUS_EXPR, offset,
1984 build_int_cst (TREE_TYPE (offset),
1985 sub_offset / BITS_PER_UNIT), 1);
1988 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1989 && integer_zerop (offset))
1991 type = TREE_TYPE (base);
1996 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1998 type = TREE_TYPE (TREE_TYPE (base));
2000 ret = maybe_fold_offset_to_component_ref (type, base, offset,
2001 orig_type, base_is_ptr);
2005 base = build1 (INDIRECT_REF, type, base);
2006 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
2011 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
2012 or &BASE[index] or by combination of those.
2014 Before attempting the conversion strip off existing component refs. */
2017 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
2021 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
2022 && POINTER_TYPE_P (orig_type));
2024 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
2030 /* For __builtin_object_size to function correctly we need to
2031 make sure not to fold address arithmetic so that we change
2032 reference from one array to another. This would happen for
2035 struct X { char s1[10]; char s2[10] } s;
2036 char *foo (void) { return &s.s2[-4]; }
2038 where we need to avoid generating &s.s1[6]. As the C and
2039 C++ frontends create different initial trees
2040 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
2041 sophisticated comparisons here. Note that checking for the
2042 condition after the fact is easier than trying to avoid doing
2045 if (TREE_CODE (orig) == ADDR_EXPR)
2046 orig = TREE_OPERAND (orig, 0);
2047 if ((TREE_CODE (orig) == ARRAY_REF
2048 || (TREE_CODE (orig) == COMPONENT_REF
2049 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
2050 && (TREE_CODE (t) == ARRAY_REF
2051 || (TREE_CODE (t) == COMPONENT_REF
2052 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1))) == ARRAY_TYPE))
2053 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
2054 ? TREE_OPERAND (orig, 0) : orig,
2055 TREE_CODE (t) == ARRAY_REF
2056 ? TREE_OPERAND (t, 0) : t, 0))
2059 ptr_type = build_pointer_type (TREE_TYPE (t));
2060 if (!useless_type_conversion_p (orig_type, ptr_type))
2062 return build_fold_addr_expr_with_type (t, ptr_type);
2068 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
2069 Return the simplified expression, or NULL if nothing could be done. */
2072 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
2075 bool volatile_p = TREE_THIS_VOLATILE (expr);
2077 /* We may well have constructed a double-nested PLUS_EXPR via multiple
2078 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
2079 are sometimes added. */
2081 STRIP_TYPE_NOPS (base);
2082 TREE_OPERAND (expr, 0) = base;
2084 /* One possibility is that the address reduces to a string constant. */
2085 t = fold_read_from_constant_string (expr);
2089 /* Add in any offset from a POINTER_PLUS_EXPR. */
2090 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
2094 offset2 = TREE_OPERAND (base, 1);
2095 if (TREE_CODE (offset2) != INTEGER_CST)
2097 base = TREE_OPERAND (base, 0);
2099 offset = fold_convert (sizetype,
2100 int_const_binop (PLUS_EXPR, offset, offset2, 1));
2103 if (TREE_CODE (base) == ADDR_EXPR)
2105 tree base_addr = base;
2107 /* Strip the ADDR_EXPR. */
2108 base = TREE_OPERAND (base, 0);
2110 /* Fold away CONST_DECL to its value, if the type is scalar. */
2111 if (TREE_CODE (base) == CONST_DECL
2112 && is_gimple_min_invariant (DECL_INITIAL (base)))
2113 return DECL_INITIAL (base);
2115 /* Try folding *(&B+O) to B.X. */
2116 t = maybe_fold_offset_to_reference (base_addr, offset,
2120 TREE_THIS_VOLATILE (t) = volatile_p;
2126 /* We can get here for out-of-range string constant accesses,
2127 such as "_"[3]. Bail out of the entire substitution search
2128 and arrange for the entire statement to be replaced by a
2129 call to __builtin_trap. In all likelihood this will all be
2130 constant-folded away, but in the meantime we can't leave with
2131 something that get_expr_operands can't understand. */
2135 if (TREE_CODE (t) == ADDR_EXPR
2136 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2138 /* FIXME: Except that this causes problems elsewhere with dead
2139 code not being deleted, and we die in the rtl expanders
2140 because we failed to remove some ssa_name. In the meantime,
2141 just return zero. */
2142 /* FIXME2: This condition should be signaled by
2143 fold_read_from_constant_string directly, rather than
2144 re-checking for it here. */
2145 return integer_zero_node;
2148 /* Try folding *(B+O) to B->X. Still an improvement. */
2149 if (POINTER_TYPE_P (TREE_TYPE (base)))
2151 t = maybe_fold_offset_to_reference (base, offset,
2158 /* Otherwise we had an offset that we could not simplify. */
2163 /* A quaint feature extant in our address arithmetic is that there
2164 can be hidden type changes here. The type of the result need
2165 not be the same as the type of the input pointer.
2167 What we're after here is an expression of the form
2168 (T *)(&array + const)
2169 where array is OP0, const is OP1, RES_TYPE is T and
2170 the cast doesn't actually exist, but is implicit in the
2171 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2173 which may be able to propagate further. */
2176 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2181 /* It had better be a constant. */
2182 if (TREE_CODE (op1) != INTEGER_CST)
2184 /* The first operand should be an ADDR_EXPR. */
2185 if (TREE_CODE (op0) != ADDR_EXPR)
2187 op0 = TREE_OPERAND (op0, 0);
2189 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2190 the offset into it. */
2191 while (TREE_CODE (op0) == ARRAY_REF)
2193 tree array_obj = TREE_OPERAND (op0, 0);
2194 tree array_idx = TREE_OPERAND (op0, 1);
2195 tree elt_type = TREE_TYPE (op0);
2196 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2199 if (TREE_CODE (array_idx) != INTEGER_CST)
2201 if (TREE_CODE (elt_size) != INTEGER_CST)
2204 /* Un-bias the index by the min index of the array type. */
2205 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2208 min_idx = TYPE_MIN_VALUE (min_idx);
2211 if (TREE_CODE (min_idx) != INTEGER_CST)
2214 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2215 if (!integer_zerop (min_idx))
2216 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2221 /* Convert the index to a byte offset. */
2222 array_idx = fold_convert (sizetype, array_idx);
2223 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2225 /* Update the operands for the next round, or for folding. */
2226 op1 = int_const_binop (PLUS_EXPR,
2231 ptd_type = TREE_TYPE (res_type);
2232 /* If we want a pointer to void, reconstruct the reference from the
2233 array element type. A pointer to that can be trivially converted
2234 to void *. This happens as we fold (void *)(ptr p+ off). */
2235 if (VOID_TYPE_P (ptd_type)
2236 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2237 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2239 /* At which point we can try some of the same things as for indirects. */
2240 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2242 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2245 t = build1 (ADDR_EXPR, res_type, t);
2250 /* For passing state through walk_tree into fold_stmt_r and its
2253 struct fold_stmt_r_data
2257 bool *inside_addr_expr_p;
2260 /* Subroutine of fold_stmt called via walk_tree. We perform several
2261 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2264 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2266 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2267 struct fold_stmt_r_data *fold_stmt_r_data;
2268 bool *inside_addr_expr_p;
2270 tree expr = *expr_p, t;
2271 bool volatile_p = TREE_THIS_VOLATILE (expr);
2273 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2274 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2275 changed_p = fold_stmt_r_data->changed_p;
2277 /* ??? It'd be nice if walk_tree had a pre-order option. */
2278 switch (TREE_CODE (expr))
2281 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2286 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2289 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2290 /* If we had a good reason for propagating the address here,
2291 make sure we end up with valid gimple. See PR34989. */
2292 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2296 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2301 if (POINTER_TYPE_P (TREE_TYPE (expr))
2302 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2303 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2305 TREE_TYPE (TREE_TYPE (expr)))))
2309 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2310 We'd only want to bother decomposing an existing ARRAY_REF if
2311 the base array is found to have another offset contained within.
2312 Otherwise we'd be wasting time. */
2314 /* If we are not processing expressions found within an
2315 ADDR_EXPR, then we can fold constant array references. */
2316 if (!*inside_addr_expr_p)
2317 t = fold_read_from_constant_string (expr);
2323 *inside_addr_expr_p = true;
2324 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2325 *inside_addr_expr_p = false;
2330 /* Make sure the value is properly considered constant, and so gets
2331 propagated as expected. */
2333 recompute_tree_invariant_for_addr_expr (expr);
2337 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2342 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2343 We've already checked that the records are compatible, so we should
2344 come up with a set of compatible fields. */
2346 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2347 tree expr_field = TREE_OPERAND (expr, 1);
2349 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2351 expr_field = find_compatible_field (expr_record, expr_field);
2352 TREE_OPERAND (expr, 1) = expr_field;
2357 case TARGET_MEM_REF:
2358 t = maybe_fold_tmr (expr);
2361 case POINTER_PLUS_EXPR:
2362 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2365 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2370 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2371 TREE_OPERAND (expr, 0),
2372 TREE_OPERAND (expr, 1));
2376 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2378 tree op0 = TREE_OPERAND (expr, 0);
2382 fold_defer_overflow_warnings ();
2383 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2384 TREE_OPERAND (op0, 0),
2385 TREE_OPERAND (op0, 1));
2386 /* This is actually a conditional expression, not a GIMPLE
2387 conditional statement, however, the valid_gimple_rhs_p
2388 test still applies. */
2389 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
2390 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2393 COND_EXPR_COND (expr) = tem;
2406 /* Preserve volatileness of the original expression. */
2407 TREE_THIS_VOLATILE (t) = volatile_p;
2415 /* Return the string length, maximum string length or maximum value of
2417 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2418 is not NULL and, for TYPE == 0, its value is not equal to the length
2419 we determine or if we are unable to determine the length or value,
2420 return false. VISITED is a bitmap of visited variables.
2421 TYPE is 0 if string length should be returned, 1 for maximum string
2422 length and 2 for maximum value ARG can have. */
2425 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2430 if (TREE_CODE (arg) != SSA_NAME)
2432 if (TREE_CODE (arg) == COND_EXPR)
2433 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2434 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2435 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2436 else if (TREE_CODE (arg) == ADDR_EXPR
2437 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2438 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2440 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2441 if (TREE_CODE (aop0) == INDIRECT_REF
2442 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2443 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2444 length, visited, type);
2450 if (TREE_CODE (val) != INTEGER_CST
2451 || tree_int_cst_sgn (val) < 0)
2455 val = c_strlen (arg, 1);
2463 if (TREE_CODE (*length) != INTEGER_CST
2464 || TREE_CODE (val) != INTEGER_CST)
2467 if (tree_int_cst_lt (*length, val))
2471 else if (simple_cst_equal (val, *length) != 1)
2479 /* If we were already here, break the infinite cycle. */
2480 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2482 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2485 def_stmt = SSA_NAME_DEF_STMT (var);
2487 switch (gimple_code (def_stmt))
2490 /* The RHS of the statement defining VAR must either have a
2491 constant length or come from another SSA_NAME with a constant
2493 if (gimple_assign_single_p (def_stmt)
2494 || gimple_assign_unary_nop_p (def_stmt))
2496 tree rhs = gimple_assign_rhs1 (def_stmt);
2497 return get_maxval_strlen (rhs, length, visited, type);
2503 /* All the arguments of the PHI node must have the same constant
2507 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2509 tree arg = gimple_phi_arg (def_stmt, i)->def;
2511 /* If this PHI has itself as an argument, we cannot
2512 determine the string length of this argument. However,
2513 if we can find a constant string length for the other
2514 PHI args then we can still be sure that this is a
2515 constant string length. So be optimistic and just
2516 continue with the next argument. */
2517 if (arg == gimple_phi_result (def_stmt))
2520 if (!get_maxval_strlen (arg, length, visited, type))
2532 /* Fold builtin call in statement STMT. Returns a simplified tree.
2533 We may return a non-constant expression, including another call
2534 to a different function and with different arguments, e.g.,
2535 substituting memcpy for strcpy when the string length is known.
2536 Note that some builtins expand into inline code that may not
2537 be valid in GIMPLE. Callers must take care. */
2540 ccp_fold_builtin (gimple stmt)
2542 tree result, val[3];
2544 int arg_mask, i, type;
2549 gcc_assert (is_gimple_call (stmt));
2551 ignore = (gimple_call_lhs (stmt) == NULL);
2553 /* First try the generic builtin folder. If that succeeds, return the
2555 result = fold_call_stmt (stmt, ignore);
2559 STRIP_NOPS (result);
2563 /* Ignore MD builtins. */
2564 callee = gimple_call_fndecl (stmt);
2565 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2568 /* If the builtin could not be folded, and it has no argument list,
2570 nargs = gimple_call_num_args (stmt);
2574 /* Limit the work only for builtins we know how to simplify. */
2575 switch (DECL_FUNCTION_CODE (callee))
2577 case BUILT_IN_STRLEN:
2578 case BUILT_IN_FPUTS:
2579 case BUILT_IN_FPUTS_UNLOCKED:
2583 case BUILT_IN_STRCPY:
2584 case BUILT_IN_STRNCPY:
2588 case BUILT_IN_MEMCPY_CHK:
2589 case BUILT_IN_MEMPCPY_CHK:
2590 case BUILT_IN_MEMMOVE_CHK:
2591 case BUILT_IN_MEMSET_CHK:
2592 case BUILT_IN_STRNCPY_CHK:
2596 case BUILT_IN_STRCPY_CHK:
2597 case BUILT_IN_STPCPY_CHK:
2601 case BUILT_IN_SNPRINTF_CHK:
2602 case BUILT_IN_VSNPRINTF_CHK:
2610 /* Try to use the dataflow information gathered by the CCP process. */
2611 visited = BITMAP_ALLOC (NULL);
2613 memset (val, 0, sizeof (val));
2614 for (i = 0; i < nargs; i++)
2616 if ((arg_mask >> i) & 1)
2618 a = gimple_call_arg (stmt, i);
2619 bitmap_clear (visited);
2620 if (!get_maxval_strlen (a, &val[i], visited, type))
2625 BITMAP_FREE (visited);
2628 switch (DECL_FUNCTION_CODE (callee))
2630 case BUILT_IN_STRLEN:
2634 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2636 /* If the result is not a valid gimple value, or not a cast
2637 of a valid gimple value, then we can not use the result. */
2638 if (is_gimple_val (new_val)
2639 || (is_gimple_cast (new_val)
2640 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2645 case BUILT_IN_STRCPY:
2646 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2647 result = fold_builtin_strcpy (callee,
2648 gimple_call_arg (stmt, 0),
2649 gimple_call_arg (stmt, 1),
2653 case BUILT_IN_STRNCPY:
2654 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2655 result = fold_builtin_strncpy (callee,
2656 gimple_call_arg (stmt, 0),
2657 gimple_call_arg (stmt, 1),
2658 gimple_call_arg (stmt, 2),
2662 case BUILT_IN_FPUTS:
2663 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2664 gimple_call_arg (stmt, 1),
2665 ignore, false, val[0]);
2668 case BUILT_IN_FPUTS_UNLOCKED:
2669 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2670 gimple_call_arg (stmt, 1),
2671 ignore, true, val[0]);
2674 case BUILT_IN_MEMCPY_CHK:
2675 case BUILT_IN_MEMPCPY_CHK:
2676 case BUILT_IN_MEMMOVE_CHK:
2677 case BUILT_IN_MEMSET_CHK:
2678 if (val[2] && is_gimple_val (val[2]))
2679 result = fold_builtin_memory_chk (callee,
2680 gimple_call_arg (stmt, 0),
2681 gimple_call_arg (stmt, 1),
2682 gimple_call_arg (stmt, 2),
2683 gimple_call_arg (stmt, 3),
2685 DECL_FUNCTION_CODE (callee));
2688 case BUILT_IN_STRCPY_CHK:
2689 case BUILT_IN_STPCPY_CHK:
2690 if (val[1] && is_gimple_val (val[1]))
2691 result = fold_builtin_stxcpy_chk (callee,
2692 gimple_call_arg (stmt, 0),
2693 gimple_call_arg (stmt, 1),
2694 gimple_call_arg (stmt, 2),
2696 DECL_FUNCTION_CODE (callee));
2699 case BUILT_IN_STRNCPY_CHK:
2700 if (val[2] && is_gimple_val (val[2]))
2701 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2702 gimple_call_arg (stmt, 1),
2703 gimple_call_arg (stmt, 2),
2704 gimple_call_arg (stmt, 3),
2708 case BUILT_IN_SNPRINTF_CHK:
2709 case BUILT_IN_VSNPRINTF_CHK:
2710 if (val[1] && is_gimple_val (val[1]))
2711 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2712 DECL_FUNCTION_CODE (callee));
2719 if (result && ignore)
2720 result = fold_ignored_result (result);
2724 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2725 replacement rhs for the statement or NULL_TREE if no simplification
2726 could be made. It is assumed that the operands have been previously
2730 fold_gimple_assign (gimple_stmt_iterator *si)
2732 gimple stmt = gsi_stmt (*si);
2733 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2737 switch (get_gimple_rhs_class (subcode))
2739 case GIMPLE_SINGLE_RHS:
2741 tree rhs = gimple_assign_rhs1 (stmt);
2743 /* Try to fold a conditional expression. */
2744 if (TREE_CODE (rhs) == COND_EXPR)
2746 tree temp = fold (COND_EXPR_COND (rhs));
2747 if (temp != COND_EXPR_COND (rhs))
2748 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2749 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2752 /* If we couldn't fold the RHS, hand over to the generic
2754 if (result == NULL_TREE)
2755 result = fold (rhs);
2757 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2758 that may have been added by fold, and "useless" type
2759 conversions that might now be apparent due to propagation. */
2760 STRIP_USELESS_TYPE_CONVERSION (result);
2762 if (result != rhs && valid_gimple_rhs_p (result))
2765 /* It is possible that fold_stmt_r simplified the RHS.
2766 Make sure that the subcode of this statement still
2767 reflects the principal operator of the rhs operand. */
2772 case GIMPLE_UNARY_RHS:
2773 result = fold_unary (subcode,
2774 gimple_expr_type (stmt),
2775 gimple_assign_rhs1 (stmt));
2779 STRIP_USELESS_TYPE_CONVERSION (result);
2780 if (valid_gimple_rhs_p (result))
2783 else if ((gimple_assign_rhs_code (stmt) == NOP_EXPR
2784 || gimple_assign_rhs_code (stmt) == CONVERT_EXPR)
2785 && POINTER_TYPE_P (gimple_expr_type (stmt))
2786 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2788 tree type = gimple_expr_type (stmt);
2789 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2790 integer_zero_node, type);
2796 case GIMPLE_BINARY_RHS:
2797 /* Try to fold pointer addition. */
2798 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2799 result = maybe_fold_stmt_addition (
2800 TREE_TYPE (gimple_assign_lhs (stmt)),
2801 gimple_assign_rhs1 (stmt),
2802 gimple_assign_rhs2 (stmt));
2805 result = fold_binary (subcode,
2806 TREE_TYPE (gimple_assign_lhs (stmt)),
2807 gimple_assign_rhs1 (stmt),
2808 gimple_assign_rhs2 (stmt));
2812 STRIP_USELESS_TYPE_CONVERSION (result);
2813 if (valid_gimple_rhs_p (result))
2818 case GIMPLE_INVALID_RHS:
2825 /* Attempt to fold a conditional statement. Return true if any changes were
2826 made. We only attempt to fold the condition expression, and do not perform
2827 any transformation that would require alteration of the cfg. It is
2828 assumed that the operands have been previously folded. */
2831 fold_gimple_cond (gimple stmt)
2833 tree result = fold_binary (gimple_cond_code (stmt),
2835 gimple_cond_lhs (stmt),
2836 gimple_cond_rhs (stmt));
2840 STRIP_USELESS_TYPE_CONVERSION (result);
2841 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2843 gimple_cond_set_condition_from_tree (stmt, result);
2852 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2853 The statement may be replaced by another statement, e.g., if the call
2854 simplifies to a constant value. Return true if any changes were made.
2855 It is assumed that the operands have been previously folded. */
2858 fold_gimple_call (gimple_stmt_iterator *gsi)
2860 gimple stmt = gsi_stmt (*gsi);
2862 tree callee = gimple_call_fndecl (stmt);
2864 /* Check for builtins that CCP can handle using information not
2865 available in the generic fold routines. */
2866 if (callee && DECL_BUILT_IN (callee))
2868 tree result = ccp_fold_builtin (stmt);
2871 return update_call_from_tree (gsi, result);
2875 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2876 here are when we've propagated the address of a decl into the
2878 /* ??? Should perhaps do this in fold proper. However, doing it
2879 there requires that we create a new CALL_EXPR, and that requires
2880 copying EH region info to the new node. Easier to just do it
2881 here where we can just smash the call operand. */
2882 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2883 callee = gimple_call_fn (stmt);
2884 if (TREE_CODE (callee) == OBJ_TYPE_REF
2885 && lang_hooks.fold_obj_type_ref
2886 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2887 && DECL_P (TREE_OPERAND
2888 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2892 /* ??? Caution: Broken ADDR_EXPR semantics means that
2893 looking at the type of the operand of the addr_expr
2894 can yield an array type. See silly exception in
2895 check_pointer_types_r. */
2896 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2897 t = lang_hooks.fold_obj_type_ref (callee, t);
2900 gimple_call_set_fn (stmt, t);
2909 /* Fold the statement pointed to by GSI. In some cases, this function may
2910 replace the whole statement with a new one. Returns true iff folding
2911 makes any changes. */
2914 fold_stmt (gimple_stmt_iterator *gsi)
2917 struct fold_stmt_r_data fold_stmt_r_data;
2918 struct walk_stmt_info wi;
2920 bool changed = false;
2921 bool inside_addr_expr = false;
2923 gimple stmt = gsi_stmt (*gsi);
2925 fold_stmt_r_data.stmt = stmt;
2926 fold_stmt_r_data.changed_p = &changed;
2927 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2929 memset (&wi, 0, sizeof (wi));
2930 wi.info = &fold_stmt_r_data;
2932 /* Fold the individual operands.
2933 For example, fold instances of *&VAR into VAR, etc. */
2934 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2937 /* Fold the main computation performed by the statement. */
2938 switch (gimple_code (stmt))
2942 tree new_rhs = fold_gimple_assign (gsi);
2943 if (new_rhs != NULL_TREE)
2945 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2948 stmt = gsi_stmt (*gsi);
2952 changed |= fold_gimple_cond (stmt);
2955 /* The entire statement may be replaced in this case. */
2956 changed |= fold_gimple_call (gsi);
2967 /* Perform the minimal folding on statement STMT. Only operations like
2968 *&x created by constant propagation are handled. The statement cannot
2969 be replaced with a new one. Return true if the statement was
2970 changed, false otherwise. */
2973 fold_stmt_inplace (gimple stmt)
2976 struct fold_stmt_r_data fold_stmt_r_data;
2977 struct walk_stmt_info wi;
2978 gimple_stmt_iterator si;
2980 bool changed = false;
2981 bool inside_addr_expr = false;
2983 fold_stmt_r_data.stmt = stmt;
2984 fold_stmt_r_data.changed_p = &changed;
2985 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2987 memset (&wi, 0, sizeof (wi));
2988 wi.info = &fold_stmt_r_data;
2990 /* Fold the individual operands.
2991 For example, fold instances of *&VAR into VAR, etc.
2993 It appears that, at one time, maybe_fold_stmt_indirect
2994 would cause the walk to return non-null in order to
2995 signal that the entire statement should be replaced with
2996 a call to _builtin_trap. This functionality is currently
2997 disabled, as noted in a FIXME, and cannot be supported here. */
2998 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
3001 /* Fold the main computation performed by the statement. */
3002 switch (gimple_code (stmt))
3006 unsigned old_num_ops;
3008 old_num_ops = gimple_num_ops (stmt);
3009 si = gsi_for_stmt (stmt);
3010 new_rhs = fold_gimple_assign (&si);
3011 if (new_rhs != NULL_TREE
3012 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
3014 gimple_assign_set_rhs_from_tree (&si, new_rhs);
3017 gcc_assert (gsi_stmt (si) == stmt);
3021 changed |= fold_gimple_cond (stmt);
3031 /* Try to optimize out __builtin_stack_restore. Optimize it out
3032 if there is another __builtin_stack_restore in the same basic
3033 block and no calls or ASM_EXPRs are in between, or if this block's
3034 only outgoing edge is to EXIT_BLOCK and there are no calls or
3035 ASM_EXPRs after this __builtin_stack_restore. */
3038 optimize_stack_restore (gimple_stmt_iterator i)
3041 gimple stmt, stack_save;
3042 gimple_stmt_iterator stack_save_gsi;
3044 basic_block bb = gsi_bb (i);
3045 gimple call = gsi_stmt (i);
3047 if (gimple_code (call) != GIMPLE_CALL
3048 || gimple_call_num_args (call) != 1
3049 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
3050 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
3053 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
3055 stmt = gsi_stmt (i);
3056 if (gimple_code (stmt) == GIMPLE_ASM)
3058 if (gimple_code (stmt) != GIMPLE_CALL)
3061 callee = gimple_call_fndecl (stmt);
3062 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3065 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3070 && (! single_succ_p (bb)
3071 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3074 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3075 if (gimple_code (stack_save) != GIMPLE_CALL
3076 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3077 || stmt_could_throw_p (stack_save)
3078 || !has_single_use (gimple_call_arg (call, 0)))
3081 callee = gimple_call_fndecl (stack_save);
3083 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3084 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
3085 || gimple_call_num_args (stack_save) != 0)
3088 stack_save_gsi = gsi_for_stmt (stack_save);
3089 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3090 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3091 if (!update_call_from_tree (&stack_save_gsi, rhs))
3093 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3096 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3098 /* No effect, so the statement will be deleted. */
3099 return integer_zero_node;
3102 /* If va_list type is a simple pointer and nothing special is needed,
3103 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3104 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3105 pointer assignment. */
3108 optimize_stdarg_builtin (gimple call)
3110 tree callee, lhs, rhs, cfun_va_list;
3111 bool va_list_simple_ptr;
3113 if (gimple_code (call) != GIMPLE_CALL)
3116 callee = gimple_call_fndecl (call);
3118 cfun_va_list = targetm.fn_abi_va_list (callee);
3119 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3120 && (TREE_TYPE (cfun_va_list) == void_type_node
3121 || TREE_TYPE (cfun_va_list) == char_type_node);
3123 switch (DECL_FUNCTION_CODE (callee))
3125 case BUILT_IN_VA_START:
3126 if (!va_list_simple_ptr
3127 || targetm.expand_builtin_va_start != NULL
3128 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3131 if (gimple_call_num_args (call) != 2)
3134 lhs = gimple_call_arg (call, 0);
3135 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3136 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3137 != TYPE_MAIN_VARIANT (cfun_va_list))
3140 lhs = build_fold_indirect_ref (lhs);
3141 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3142 1, integer_zero_node);
3143 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3144 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3146 case BUILT_IN_VA_COPY:
3147 if (!va_list_simple_ptr)
3150 if (gimple_call_num_args (call) != 2)
3153 lhs = gimple_call_arg (call, 0);
3154 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3155 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3156 != TYPE_MAIN_VARIANT (cfun_va_list))
3159 lhs = build_fold_indirect_ref (lhs);
3160 rhs = gimple_call_arg (call, 1);
3161 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3162 != TYPE_MAIN_VARIANT (cfun_va_list))
3165 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3166 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3168 case BUILT_IN_VA_END:
3169 /* No effect, so the statement will be deleted. */
3170 return integer_zero_node;
3177 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3178 RHS of an assignment. Insert the necessary statements before
3179 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3180 is replaced. If the call is expected to produces a result, then it
3181 is replaced by an assignment of the new RHS to the result variable.
3182 If the result is to be ignored, then the call is replaced by a
3186 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3189 tree tmp = NULL_TREE; /* Silence warning. */
3190 gimple stmt, new_stmt;
3191 gimple_stmt_iterator i;
3192 gimple_seq stmts = gimple_seq_alloc();
3193 struct gimplify_ctx gctx;
3195 stmt = gsi_stmt (*si_p);
3197 gcc_assert (is_gimple_call (stmt));
3199 lhs = gimple_call_lhs (stmt);
3201 push_gimplify_context (&gctx);
3203 if (lhs == NULL_TREE)
3204 gimplify_and_add (expr, &stmts);
3206 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3208 pop_gimplify_context (NULL);
3210 if (gimple_has_location (stmt))
3211 annotate_all_with_location (stmts, gimple_location (stmt));
3213 /* The replacement can expose previously unreferenced variables. */
3214 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3216 new_stmt = gsi_stmt (i);
3217 find_new_referenced_vars (new_stmt);
3218 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3219 mark_symbols_for_renaming (new_stmt);
3223 if (lhs == NULL_TREE)
3224 new_stmt = gimple_build_nop ();
3227 new_stmt = gimple_build_assign (lhs, tmp);
3228 copy_virtual_operands (new_stmt, stmt);
3229 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3232 gimple_set_location (new_stmt, gimple_location (stmt));
3233 gsi_replace (si_p, new_stmt, false);
3236 /* A simple pass that attempts to fold all builtin functions. This pass
3237 is run after we've propagated as many constants as we can. */
3240 execute_fold_all_builtins (void)
3242 bool cfg_changed = false;
3244 unsigned int todoflags = 0;
3248 gimple_stmt_iterator i;
3249 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3251 gimple stmt, old_stmt;
3252 tree callee, result;
3253 enum built_in_function fcode;
3255 stmt = gsi_stmt (i);
3257 if (gimple_code (stmt) != GIMPLE_CALL)
3262 callee = gimple_call_fndecl (stmt);
3263 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3268 fcode = DECL_FUNCTION_CODE (callee);
3270 result = ccp_fold_builtin (stmt);
3273 gimple_remove_stmt_histograms (cfun, stmt);
3276 switch (DECL_FUNCTION_CODE (callee))
3278 case BUILT_IN_CONSTANT_P:
3279 /* Resolve __builtin_constant_p. If it hasn't been
3280 folded to integer_one_node by now, it's fairly
3281 certain that the value simply isn't constant. */
3282 result = integer_zero_node;
3285 case BUILT_IN_STACK_RESTORE:
3286 result = optimize_stack_restore (i);
3292 case BUILT_IN_VA_START:
3293 case BUILT_IN_VA_END:
3294 case BUILT_IN_VA_COPY:
3295 /* These shouldn't be folded before pass_stdarg. */
3296 result = optimize_stdarg_builtin (stmt);
3306 if (dump_file && (dump_flags & TDF_DETAILS))
3308 fprintf (dump_file, "Simplified\n ");
3309 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3313 push_stmt_changes (gsi_stmt_ptr (&i));
3315 if (!update_call_from_tree (&i, result))
3317 gimplify_and_update_call_from_tree (&i, result);
3318 todoflags |= TODO_rebuild_alias;
3321 stmt = gsi_stmt (i);
3322 pop_stmt_changes (gsi_stmt_ptr (&i));
3324 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3325 && gimple_purge_dead_eh_edges (bb))
3328 if (dump_file && (dump_flags & TDF_DETAILS))
3330 fprintf (dump_file, "to\n ");
3331 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3332 fprintf (dump_file, "\n");
3335 /* Retry the same statement if it changed into another
3336 builtin, there might be new opportunities now. */
3337 if (gimple_code (stmt) != GIMPLE_CALL)
3342 callee = gimple_call_fndecl (stmt);
3344 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3345 || DECL_FUNCTION_CODE (callee) == fcode)
3350 /* Delete unreachable blocks. */
3352 todoflags |= TODO_cleanup_cfg;
3358 struct gimple_opt_pass pass_fold_builtins =
3364 execute_fold_all_builtins, /* execute */
3367 0, /* static_pass_number */
3369 PROP_cfg | PROP_ssa, /* properties_required */
3370 0, /* properties_provided */
3371 0, /* properties_destroyed */
3372 0, /* todo_flags_start */
3375 | TODO_update_ssa /* todo_flags_finish */