1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
115 #include "coretypes.h"
120 #include "basic-block.h"
122 #include "function.h"
123 #include "tree-pretty-print.h"
124 #include "gimple-pretty-print.h"
126 #include "tree-dump.h"
127 #include "tree-flow.h"
128 #include "tree-pass.h"
129 #include "tree-ssa-propagate.h"
130 #include "value-prof.h"
131 #include "langhooks.h"
133 #include "diagnostic-core.h"
138 /* Possible lattice values. */
147 struct prop_value_d {
149 ccp_lattice_t lattice_val;
151 /* Propagated value. */
155 typedef struct prop_value_d prop_value_t;
157 /* Array of propagated constant values. After propagation,
158 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
159 the constant is held in an SSA name representing a memory store
160 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
161 memory reference used to store (i.e., the LHS of the assignment
163 static prop_value_t *const_val;
165 static void canonicalize_float_value (prop_value_t *);
166 static bool ccp_fold_stmt (gimple_stmt_iterator *);
168 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
171 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
173 switch (val.lattice_val)
176 fprintf (outf, "%sUNINITIALIZED", prefix);
179 fprintf (outf, "%sUNDEFINED", prefix);
182 fprintf (outf, "%sVARYING", prefix);
185 fprintf (outf, "%sCONSTANT ", prefix);
186 print_generic_expr (outf, val.value, dump_flags);
194 /* Print lattice value VAL to stderr. */
196 void debug_lattice_value (prop_value_t val);
199 debug_lattice_value (prop_value_t val)
201 dump_lattice_value (stderr, "", val);
202 fprintf (stderr, "\n");
206 /* Compute a default value for variable VAR and store it in the
207 CONST_VAL array. The following rules are used to get default
210 1- Global and static variables that are declared constant are
213 2- Any other value is considered UNDEFINED. This is useful when
214 considering PHI nodes. PHI arguments that are undefined do not
215 change the constant value of the PHI node, which allows for more
216 constants to be propagated.
218 3- Variables defined by statements other than assignments and PHI
219 nodes are considered VARYING.
221 4- Initial values of variables that are not GIMPLE registers are
222 considered VARYING. */
225 get_default_value (tree var)
227 tree sym = SSA_NAME_VAR (var);
228 prop_value_t val = { UNINITIALIZED, NULL_TREE };
231 stmt = SSA_NAME_DEF_STMT (var);
233 if (gimple_nop_p (stmt))
235 /* Variables defined by an empty statement are those used
236 before being initialized. If VAR is a local variable, we
237 can assume initially that it is UNDEFINED, otherwise we must
238 consider it VARYING. */
239 if (is_gimple_reg (sym)
240 && TREE_CODE (sym) == VAR_DECL)
241 val.lattice_val = UNDEFINED;
243 val.lattice_val = VARYING;
245 else if (is_gimple_assign (stmt)
246 /* Value-returning GIMPLE_CALL statements assign to
247 a variable, and are treated similarly to GIMPLE_ASSIGN. */
248 || (is_gimple_call (stmt)
249 && gimple_call_lhs (stmt) != NULL_TREE)
250 || gimple_code (stmt) == GIMPLE_PHI)
253 if (gimple_assign_single_p (stmt)
254 && DECL_P (gimple_assign_rhs1 (stmt))
255 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
257 val.lattice_val = CONSTANT;
261 /* Any other variable defined by an assignment or a PHI node
262 is considered UNDEFINED. */
263 val.lattice_val = UNDEFINED;
267 /* Otherwise, VAR will never take on a constant value. */
268 val.lattice_val = VARYING;
275 /* Get the constant value associated with variable VAR. */
277 static inline prop_value_t *
282 if (const_val == NULL)
285 val = &const_val[SSA_NAME_VERSION (var)];
286 if (val->lattice_val == UNINITIALIZED)
287 *val = get_default_value (var);
289 canonicalize_float_value (val);
294 /* Return the constant tree value associated with VAR. */
297 get_constant_value (tree var)
299 prop_value_t *val = get_value (var);
300 if (val && val->lattice_val == CONSTANT)
305 /* Sets the value associated with VAR to VARYING. */
308 set_value_varying (tree var)
310 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
312 val->lattice_val = VARYING;
313 val->value = NULL_TREE;
316 /* For float types, modify the value of VAL to make ccp work correctly
317 for non-standard values (-0, NaN):
319 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
320 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
321 This is to fix the following problem (see PR 29921): Suppose we have
325 and we set value of y to NaN. This causes value of x to be set to NaN.
326 When we later determine that y is in fact VARYING, fold uses the fact
327 that HONOR_NANS is false, and we try to change the value of x to 0,
328 causing an ICE. With HONOR_NANS being false, the real appearance of
329 NaN would cause undefined behavior, though, so claiming that y (and x)
330 are UNDEFINED initially is correct. */
333 canonicalize_float_value (prop_value_t *val)
335 enum machine_mode mode;
339 if (val->lattice_val != CONSTANT
340 || TREE_CODE (val->value) != REAL_CST)
343 d = TREE_REAL_CST (val->value);
344 type = TREE_TYPE (val->value);
345 mode = TYPE_MODE (type);
347 if (!HONOR_SIGNED_ZEROS (mode)
348 && REAL_VALUE_MINUS_ZERO (d))
350 val->value = build_real (type, dconst0);
354 if (!HONOR_NANS (mode)
355 && REAL_VALUE_ISNAN (d))
357 val->lattice_val = UNDEFINED;
363 /* Set the value for variable VAR to NEW_VAL. Return true if the new
364 value is different from VAR's previous value. */
367 set_lattice_value (tree var, prop_value_t new_val)
369 /* We can deal with old UNINITIALIZED values just fine here. */
370 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
372 canonicalize_float_value (&new_val);
374 /* Lattice transitions must always be monotonically increasing in
375 value. If *OLD_VAL and NEW_VAL are the same, return false to
376 inform the caller that this was a non-transition. */
378 gcc_assert (old_val->lattice_val < new_val.lattice_val
379 || (old_val->lattice_val == new_val.lattice_val
380 && ((!old_val->value && !new_val.value)
381 || operand_equal_p (old_val->value, new_val.value, 0))));
383 if (old_val->lattice_val != new_val.lattice_val)
385 if (dump_file && (dump_flags & TDF_DETAILS))
387 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
388 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
393 gcc_assert (new_val.lattice_val != UNINITIALIZED);
400 /* Return the value for the tree operand EXPR. */
403 get_value_for_expr (tree expr)
407 if (TREE_CODE (expr) == SSA_NAME)
408 val = *(get_value (expr));
409 else if (is_gimple_min_invariant (expr))
411 val.lattice_val = CONSTANT;
413 canonicalize_float_value (&val);
417 val.lattice_val = VARYING;
418 val.value = NULL_TREE;
425 /* Return the likely CCP lattice value for STMT.
427 If STMT has no operands, then return CONSTANT.
429 Else if undefinedness of operands of STMT cause its value to be
430 undefined, then return UNDEFINED.
432 Else if any operands of STMT are constants, then return CONSTANT.
434 Else return VARYING. */
437 likely_value (gimple stmt)
439 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
444 enum gimple_code code = gimple_code (stmt);
446 /* This function appears to be called only for assignments, calls,
447 conditionals, and switches, due to the logic in visit_stmt. */
448 gcc_assert (code == GIMPLE_ASSIGN
449 || code == GIMPLE_CALL
450 || code == GIMPLE_COND
451 || code == GIMPLE_SWITCH);
453 /* If the statement has volatile operands, it won't fold to a
455 if (gimple_has_volatile_ops (stmt))
458 /* Arrive here for more complex cases. */
459 has_constant_operand = false;
460 has_undefined_operand = false;
461 all_undefined_operands = true;
462 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
464 prop_value_t *val = get_value (use);
466 if (val->lattice_val == UNDEFINED)
467 has_undefined_operand = true;
469 all_undefined_operands = false;
471 if (val->lattice_val == CONSTANT)
472 has_constant_operand = true;
475 /* There may be constants in regular rhs operands. For calls we
476 have to ignore lhs, fndecl and static chain, otherwise only
478 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
479 i < gimple_num_ops (stmt); ++i)
481 tree op = gimple_op (stmt, i);
482 if (!op || TREE_CODE (op) == SSA_NAME)
484 if (is_gimple_min_invariant (op))
485 has_constant_operand = true;
488 if (has_constant_operand)
489 all_undefined_operands = false;
491 /* If the operation combines operands like COMPLEX_EXPR make sure to
492 not mark the result UNDEFINED if only one part of the result is
494 if (has_undefined_operand && all_undefined_operands)
496 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
498 switch (gimple_assign_rhs_code (stmt))
500 /* Unary operators are handled with all_undefined_operands. */
503 case POINTER_PLUS_EXPR:
504 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
505 Not bitwise operators, one VARYING operand may specify the
506 result completely. Not logical operators for the same reason.
507 Not COMPLEX_EXPR as one VARYING operand makes the result partly
508 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
509 the undefined operand may be promoted. */
516 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
517 fall back to VARYING even if there were CONSTANT operands. */
518 if (has_undefined_operand)
521 /* We do not consider virtual operands here -- load from read-only
522 memory may have only VARYING virtual operands, but still be
524 if (has_constant_operand
525 || gimple_references_memory_p (stmt))
531 /* Returns true if STMT cannot be constant. */
534 surely_varying_stmt_p (gimple stmt)
536 /* If the statement has operands that we cannot handle, it cannot be
538 if (gimple_has_volatile_ops (stmt))
541 /* If it is a call and does not return a value or is not a
542 builtin and not an indirect call, it is varying. */
543 if (is_gimple_call (stmt))
546 if (!gimple_call_lhs (stmt)
547 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
548 && !DECL_BUILT_IN (fndecl)))
552 /* Any other store operation is not interesting. */
553 else if (gimple_vdef (stmt))
556 /* Anything other than assignments and conditional jumps are not
557 interesting for CCP. */
558 if (gimple_code (stmt) != GIMPLE_ASSIGN
559 && gimple_code (stmt) != GIMPLE_COND
560 && gimple_code (stmt) != GIMPLE_SWITCH
561 && gimple_code (stmt) != GIMPLE_CALL)
567 /* Initialize local data structures for CCP. */
570 ccp_initialize (void)
574 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
576 /* Initialize simulation flags for PHI nodes and statements. */
579 gimple_stmt_iterator i;
581 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
583 gimple stmt = gsi_stmt (i);
586 /* If the statement is a control insn, then we do not
587 want to avoid simulating the statement once. Failure
588 to do so means that those edges will never get added. */
589 if (stmt_ends_bb_p (stmt))
592 is_varying = surely_varying_stmt_p (stmt);
599 /* If the statement will not produce a constant, mark
600 all its outputs VARYING. */
601 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
602 set_value_varying (def);
604 prop_set_simulate_again (stmt, !is_varying);
608 /* Now process PHI nodes. We never clear the simulate_again flag on
609 phi nodes, since we do not know which edges are executable yet,
610 except for phi nodes for virtual operands when we do not do store ccp. */
613 gimple_stmt_iterator i;
615 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
617 gimple phi = gsi_stmt (i);
619 if (!is_gimple_reg (gimple_phi_result (phi)))
620 prop_set_simulate_again (phi, false);
622 prop_set_simulate_again (phi, true);
627 /* Debug count support. Reset the values of ssa names
628 VARYING when the total number ssa names analyzed is
629 beyond the debug count specified. */
635 for (i = 0; i < num_ssa_names; i++)
639 const_val[i].lattice_val = VARYING;
640 const_val[i].value = NULL_TREE;
646 /* Do final substitution of propagated values, cleanup the flowgraph and
647 free allocated storage.
649 Return TRUE when something was optimized. */
654 bool something_changed;
657 /* Perform substitutions based on the known constant values. */
658 something_changed = substitute_and_fold (get_constant_value,
659 ccp_fold_stmt, true);
663 return something_changed;;
667 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
670 any M UNDEFINED = any
671 any M VARYING = VARYING
672 Ci M Cj = Ci if (i == j)
673 Ci M Cj = VARYING if (i != j)
677 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
679 if (val1->lattice_val == UNDEFINED)
681 /* UNDEFINED M any = any */
684 else if (val2->lattice_val == UNDEFINED)
686 /* any M UNDEFINED = any
687 Nothing to do. VAL1 already contains the value we want. */
690 else if (val1->lattice_val == VARYING
691 || val2->lattice_val == VARYING)
693 /* any M VARYING = VARYING. */
694 val1->lattice_val = VARYING;
695 val1->value = NULL_TREE;
697 else if (val1->lattice_val == CONSTANT
698 && val2->lattice_val == CONSTANT
699 && simple_cst_equal (val1->value, val2->value) == 1)
701 /* Ci M Cj = Ci if (i == j)
702 Ci M Cj = VARYING if (i != j)
704 If these two values come from memory stores, make sure that
705 they come from the same memory reference.
706 Nothing to do. VAL1 already contains the value we want. */
710 /* Any other combination is VARYING. */
711 val1->lattice_val = VARYING;
712 val1->value = NULL_TREE;
717 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
718 lattice values to determine PHI_NODE's lattice value. The value of a
719 PHI node is determined calling ccp_lattice_meet with all the arguments
720 of the PHI node that are incoming via executable edges. */
722 static enum ssa_prop_result
723 ccp_visit_phi_node (gimple phi)
726 prop_value_t *old_val, new_val;
728 if (dump_file && (dump_flags & TDF_DETAILS))
730 fprintf (dump_file, "\nVisiting PHI node: ");
731 print_gimple_stmt (dump_file, phi, 0, dump_flags);
734 old_val = get_value (gimple_phi_result (phi));
735 switch (old_val->lattice_val)
738 return SSA_PROP_VARYING;
745 new_val.lattice_val = UNDEFINED;
746 new_val.value = NULL_TREE;
753 for (i = 0; i < gimple_phi_num_args (phi); i++)
755 /* Compute the meet operator over all the PHI arguments flowing
756 through executable edges. */
757 edge e = gimple_phi_arg_edge (phi, i);
759 if (dump_file && (dump_flags & TDF_DETAILS))
762 "\n Argument #%d (%d -> %d %sexecutable)\n",
763 i, e->src->index, e->dest->index,
764 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
767 /* If the incoming edge is executable, Compute the meet operator for
768 the existing value of the PHI node and the current PHI argument. */
769 if (e->flags & EDGE_EXECUTABLE)
771 tree arg = gimple_phi_arg (phi, i)->def;
772 prop_value_t arg_val = get_value_for_expr (arg);
774 ccp_lattice_meet (&new_val, &arg_val);
776 if (dump_file && (dump_flags & TDF_DETAILS))
778 fprintf (dump_file, "\t");
779 print_generic_expr (dump_file, arg, dump_flags);
780 dump_lattice_value (dump_file, "\tValue: ", arg_val);
781 fprintf (dump_file, "\n");
784 if (new_val.lattice_val == VARYING)
789 if (dump_file && (dump_flags & TDF_DETAILS))
791 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
792 fprintf (dump_file, "\n\n");
795 /* Make the transition to the new value. */
796 if (set_lattice_value (gimple_phi_result (phi), new_val))
798 if (new_val.lattice_val == VARYING)
799 return SSA_PROP_VARYING;
801 return SSA_PROP_INTERESTING;
804 return SSA_PROP_NOT_INTERESTING;
807 /* Return the constant value for OP or OP otherwise. */
810 valueize_op (tree op)
812 if (TREE_CODE (op) == SSA_NAME)
814 tree tem = get_constant_value (op);
821 /* CCP specific front-end to the non-destructive constant folding
824 Attempt to simplify the RHS of STMT knowing that one or more
825 operands are constants.
827 If simplification is possible, return the simplified RHS,
828 otherwise return the original RHS or NULL_TREE. */
831 ccp_fold (gimple stmt)
833 location_t loc = gimple_location (stmt);
834 switch (gimple_code (stmt))
838 enum tree_code subcode = gimple_assign_rhs_code (stmt);
840 switch (get_gimple_rhs_class (subcode))
842 case GIMPLE_SINGLE_RHS:
844 tree rhs = gimple_assign_rhs1 (stmt);
845 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
847 if (TREE_CODE (rhs) == SSA_NAME)
849 /* If the RHS is an SSA_NAME, return its known constant value,
851 return get_constant_value (rhs);
853 /* Handle propagating invariant addresses into address operations.
854 The folding we do here matches that in tree-ssa-forwprop.c. */
855 else if (TREE_CODE (rhs) == ADDR_EXPR)
858 base = &TREE_OPERAND (rhs, 0);
859 while (handled_component_p (*base))
860 base = &TREE_OPERAND (*base, 0);
861 if (TREE_CODE (*base) == MEM_REF
862 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
864 tree val = get_constant_value (TREE_OPERAND (*base, 0));
866 && TREE_CODE (val) == ADDR_EXPR)
868 tree ret, save = *base;
870 new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
872 TREE_OPERAND (*base, 1));
873 /* We need to return a new tree, not modify the IL
874 or share parts of it. So play some tricks to
875 avoid manually building it. */
877 ret = unshare_expr (rhs);
878 recompute_tree_invariant_for_addr_expr (ret);
884 else if (TREE_CODE (rhs) == CONSTRUCTOR
885 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
886 && (CONSTRUCTOR_NELTS (rhs)
887 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
893 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
895 val = valueize_op (val);
896 if (TREE_CODE (val) == INTEGER_CST
897 || TREE_CODE (val) == REAL_CST
898 || TREE_CODE (val) == FIXED_CST)
899 list = tree_cons (NULL_TREE, val, list);
904 return build_vector (TREE_TYPE (rhs), nreverse (list));
907 if (kind == tcc_reference)
909 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
910 || TREE_CODE (rhs) == REALPART_EXPR
911 || TREE_CODE (rhs) == IMAGPART_EXPR)
912 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
914 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
916 return fold_unary_loc (EXPR_LOCATION (rhs),
918 TREE_TYPE (rhs), val);
920 else if (TREE_CODE (rhs) == MEM_REF
921 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
923 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
925 && TREE_CODE (val) == ADDR_EXPR)
927 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
929 TREE_OPERAND (rhs, 1));
934 return fold_const_aggregate_ref (rhs);
936 else if (kind == tcc_declaration)
937 return get_symbol_constant_value (rhs);
941 case GIMPLE_UNARY_RHS:
943 /* Handle unary operators that can appear in GIMPLE form.
944 Note that we know the single operand must be a constant,
945 so this should almost always return a simplified RHS. */
946 tree lhs = gimple_assign_lhs (stmt);
947 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
949 /* Conversions are useless for CCP purposes if they are
950 value-preserving. Thus the restrictions that
951 useless_type_conversion_p places for pointer type conversions
952 do not apply here. Substitution later will only substitute to
954 if (CONVERT_EXPR_CODE_P (subcode)
955 && POINTER_TYPE_P (TREE_TYPE (lhs))
956 && POINTER_TYPE_P (TREE_TYPE (op0)))
959 /* Try to re-construct array references on-the-fly. */
960 if (!useless_type_conversion_p (TREE_TYPE (lhs),
962 && ((tem = maybe_fold_offset_to_address
964 op0, integer_zero_node, TREE_TYPE (lhs)))
971 fold_unary_ignore_overflow_loc (loc, subcode,
972 gimple_expr_type (stmt), op0);
975 case GIMPLE_BINARY_RHS:
977 /* Handle binary operators that can appear in GIMPLE form. */
978 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
979 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
981 /* Translate &x + CST into an invariant form suitable for
982 further propagation. */
983 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
984 && TREE_CODE (op0) == ADDR_EXPR
985 && TREE_CODE (op1) == INTEGER_CST)
987 tree off = fold_convert (ptr_type_node, op1);
988 return build_fold_addr_expr
989 (fold_build2 (MEM_REF,
990 TREE_TYPE (TREE_TYPE (op0)),
991 unshare_expr (op0), off));
994 return fold_binary_loc (loc, subcode,
995 gimple_expr_type (stmt), op0, op1);
998 case GIMPLE_TERNARY_RHS:
1000 /* Handle ternary operators that can appear in GIMPLE form. */
1001 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1002 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1003 tree op2 = valueize_op (gimple_assign_rhs3 (stmt));
1005 return fold_ternary_loc (loc, subcode,
1006 gimple_expr_type (stmt), op0, op1, op2);
1017 tree fn = valueize_op (gimple_call_fn (stmt));
1018 if (TREE_CODE (fn) == ADDR_EXPR
1019 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1020 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1022 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1025 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1026 args[i] = valueize_op (gimple_call_arg (stmt, i));
1027 call = build_call_array_loc (loc,
1028 gimple_call_return_type (stmt),
1029 fn, gimple_call_num_args (stmt), args);
1030 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
1032 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1033 STRIP_NOPS (retval);
1041 /* Handle comparison operators that can appear in GIMPLE form. */
1042 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1043 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1044 enum tree_code code = gimple_cond_code (stmt);
1045 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1050 /* Return the constant switch index. */
1051 return valueize_op (gimple_switch_index (stmt));
1059 /* Return the tree representing the element referenced by T if T is an
1060 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1061 NULL_TREE otherwise. */
1064 fold_const_aggregate_ref (tree t)
1066 tree base, ctor, idx, field;
1067 unsigned HOST_WIDE_INT cnt;
1071 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1072 return get_symbol_constant_value (t);
1074 switch (TREE_CODE (t))
1077 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1078 DECL_INITIAL. If BASE is a nested reference into another
1079 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1080 the inner reference. */
1081 base = TREE_OPERAND (t, 0);
1082 switch (TREE_CODE (base))
1085 /* ??? We could handle this case. */
1086 if (!integer_zerop (TREE_OPERAND (base, 1)))
1088 base = get_base_address (base);
1090 || TREE_CODE (base) != VAR_DECL)
1095 if (!TREE_READONLY (base)
1096 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1097 || !targetm.binds_local_p (base))
1100 ctor = DECL_INITIAL (base);
1105 ctor = fold_const_aggregate_ref (base);
1117 if (ctor == NULL_TREE
1118 || (TREE_CODE (ctor) != CONSTRUCTOR
1119 && TREE_CODE (ctor) != STRING_CST)
1120 || !TREE_STATIC (ctor))
1123 /* Get the index. If we have an SSA_NAME, try to resolve it
1124 with the current lattice value for the SSA_NAME. */
1125 idx = TREE_OPERAND (t, 1);
1126 switch (TREE_CODE (idx))
1129 if ((tem = get_constant_value (idx))
1130 && TREE_CODE (tem) == INTEGER_CST)
1143 /* Fold read from constant string. */
1144 if (TREE_CODE (ctor) == STRING_CST)
1146 if ((TYPE_MODE (TREE_TYPE (t))
1147 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1148 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1150 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1151 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1152 return build_int_cst_type (TREE_TYPE (t),
1153 (TREE_STRING_POINTER (ctor)
1154 [TREE_INT_CST_LOW (idx)]));
1158 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1159 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1160 if (tree_int_cst_equal (cfield, idx))
1163 if (TREE_CODE (cval) == ADDR_EXPR)
1165 tree base = get_base_address (TREE_OPERAND (cval, 0));
1166 if (base && TREE_CODE (base) == VAR_DECL)
1167 add_referenced_var (base);
1174 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1175 DECL_INITIAL. If BASE is a nested reference into another
1176 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1177 the inner reference. */
1178 base = TREE_OPERAND (t, 0);
1179 switch (TREE_CODE (base))
1182 if (!TREE_READONLY (base)
1183 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1184 || !targetm.binds_local_p (base))
1187 ctor = DECL_INITIAL (base);
1192 ctor = fold_const_aggregate_ref (base);
1199 if (ctor == NULL_TREE
1200 || TREE_CODE (ctor) != CONSTRUCTOR
1201 || !TREE_STATIC (ctor))
1204 field = TREE_OPERAND (t, 1);
1206 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1208 /* FIXME: Handle bit-fields. */
1209 && ! DECL_BIT_FIELD (cfield))
1212 if (TREE_CODE (cval) == ADDR_EXPR)
1214 tree base = get_base_address (TREE_OPERAND (cval, 0));
1215 if (base && TREE_CODE (base) == VAR_DECL)
1216 add_referenced_var (base);
1225 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1226 if (c && TREE_CODE (c) == COMPLEX_CST)
1227 return fold_build1_loc (EXPR_LOCATION (t),
1228 TREE_CODE (t), TREE_TYPE (t), c);
1233 /* Get the base object we are accessing. */
1234 base = TREE_OPERAND (t, 0);
1235 if (TREE_CODE (base) == SSA_NAME
1236 && (tem = get_constant_value (base)))
1238 if (TREE_CODE (base) != ADDR_EXPR)
1240 base = TREE_OPERAND (base, 0);
1241 switch (TREE_CODE (base))
1245 && !AGGREGATE_TYPE_P (TREE_TYPE (base))
1246 && integer_zerop (TREE_OPERAND (t, 1)))
1248 tree res = get_symbol_constant_value (base);
1250 && !useless_type_conversion_p
1251 (TREE_TYPE (t), TREE_TYPE (res)))
1252 res = fold_unary (VIEW_CONVERT_EXPR, TREE_TYPE (t), res);
1256 if (!TREE_READONLY (base)
1257 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1258 || !targetm.binds_local_p (base))
1261 ctor = DECL_INITIAL (base);
1273 if (ctor == NULL_TREE
1274 || (TREE_CODE (ctor) != CONSTRUCTOR
1275 && TREE_CODE (ctor) != STRING_CST)
1276 || !TREE_STATIC (ctor))
1279 /* Get the byte offset. */
1280 idx = TREE_OPERAND (t, 1);
1282 /* Fold read from constant string. */
1283 if (TREE_CODE (ctor) == STRING_CST)
1285 if ((TYPE_MODE (TREE_TYPE (t))
1286 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1287 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1289 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1290 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1291 return build_int_cst_type (TREE_TYPE (t),
1292 (TREE_STRING_POINTER (ctor)
1293 [TREE_INT_CST_LOW (idx)]));
1297 /* ??? Implement byte-offset indexing into a non-array CONSTRUCTOR. */
1298 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
1299 && (TYPE_MODE (TREE_TYPE (t))
1300 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1301 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t))) != 0
1304 (TRUNC_MOD_EXPR, idx,
1305 size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t)))), 0)))
1307 idx = int_const_binop (TRUNC_DIV_EXPR, idx,
1308 size_int (GET_MODE_SIZE
1309 (TYPE_MODE (TREE_TYPE (t)))), 0);
1310 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1311 if (tree_int_cst_equal (cfield, idx))
1314 if (TREE_CODE (cval) == ADDR_EXPR)
1316 tree base = get_base_address (TREE_OPERAND (cval, 0));
1317 if (base && TREE_CODE (base) == VAR_DECL)
1318 add_referenced_var (base);
1320 if (useless_type_conversion_p (TREE_TYPE (t), TREE_TYPE (cval)))
1322 else if (CONSTANT_CLASS_P (cval))
1323 return fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (t), cval);
1337 /* Evaluate statement STMT.
1338 Valid only for assignments, calls, conditionals, and switches. */
1341 evaluate_stmt (gimple stmt)
1344 tree simplified = NULL_TREE;
1345 ccp_lattice_t likelyvalue = likely_value (stmt);
1348 fold_defer_overflow_warnings ();
1350 /* If the statement is likely to have a CONSTANT result, then try
1351 to fold the statement to determine the constant value. */
1352 /* FIXME. This is the only place that we call ccp_fold.
1353 Since likely_value never returns CONSTANT for calls, we will
1354 not attempt to fold them, including builtins that may profit. */
1355 if (likelyvalue == CONSTANT)
1356 simplified = ccp_fold (stmt);
1357 /* If the statement is likely to have a VARYING result, then do not
1358 bother folding the statement. */
1359 else if (likelyvalue == VARYING)
1361 enum gimple_code code = gimple_code (stmt);
1362 if (code == GIMPLE_ASSIGN)
1364 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1366 /* Other cases cannot satisfy is_gimple_min_invariant
1368 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1369 simplified = gimple_assign_rhs1 (stmt);
1371 else if (code == GIMPLE_SWITCH)
1372 simplified = gimple_switch_index (stmt);
1374 /* These cannot satisfy is_gimple_min_invariant without folding. */
1375 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1378 is_constant = simplified && is_gimple_min_invariant (simplified);
1380 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1382 if (dump_file && (dump_flags & TDF_DETAILS))
1384 fprintf (dump_file, "which is likely ");
1385 switch (likelyvalue)
1388 fprintf (dump_file, "CONSTANT");
1391 fprintf (dump_file, "UNDEFINED");
1394 fprintf (dump_file, "VARYING");
1398 fprintf (dump_file, "\n");
1403 /* The statement produced a constant value. */
1404 val.lattice_val = CONSTANT;
1405 val.value = simplified;
1409 /* The statement produced a nonconstant value. If the statement
1410 had UNDEFINED operands, then the result of the statement
1411 should be UNDEFINED. Otherwise, the statement is VARYING. */
1412 if (likelyvalue == UNDEFINED)
1413 val.lattice_val = likelyvalue;
1415 val.lattice_val = VARYING;
1417 val.value = NULL_TREE;
1423 /* Fold the stmt at *GSI with CCP specific information that propagating
1424 and regular folding does not catch. */
1427 ccp_fold_stmt (gimple_stmt_iterator *gsi)
1429 gimple stmt = gsi_stmt (*gsi);
1431 switch (gimple_code (stmt))
1436 /* Statement evaluation will handle type mismatches in constants
1437 more gracefully than the final propagation. This allows us to
1438 fold more conditionals here. */
1439 val = evaluate_stmt (stmt);
1440 if (val.lattice_val != CONSTANT
1441 || TREE_CODE (val.value) != INTEGER_CST)
1444 if (integer_zerop (val.value))
1445 gimple_cond_make_false (stmt);
1447 gimple_cond_make_true (stmt);
1454 tree lhs = gimple_call_lhs (stmt);
1457 bool changed = false;
1460 /* If the call was folded into a constant make sure it goes
1461 away even if we cannot propagate into all uses because of
1464 && TREE_CODE (lhs) == SSA_NAME
1465 && (val = get_constant_value (lhs)))
1467 tree new_rhs = unshare_expr (val);
1469 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1470 TREE_TYPE (new_rhs)))
1471 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1472 res = update_call_from_tree (gsi, new_rhs);
1477 /* Propagate into the call arguments. Compared to replace_uses_in
1478 this can use the argument slot types for type verification
1479 instead of the current argument type. We also can safely
1480 drop qualifiers here as we are dealing with constants anyway. */
1481 argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
1482 for (i = 0; i < gimple_call_num_args (stmt) && argt;
1483 ++i, argt = TREE_CHAIN (argt))
1485 tree arg = gimple_call_arg (stmt, i);
1486 if (TREE_CODE (arg) == SSA_NAME
1487 && (val = get_constant_value (arg))
1488 && useless_type_conversion_p
1489 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
1490 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
1492 gimple_call_set_arg (stmt, i, unshare_expr (val));
1502 tree lhs = gimple_assign_lhs (stmt);
1505 /* If we have a load that turned out to be constant replace it
1506 as we cannot propagate into all uses in all cases. */
1507 if (gimple_assign_single_p (stmt)
1508 && TREE_CODE (lhs) == SSA_NAME
1509 && (val = get_constant_value (lhs)))
1511 tree rhs = unshare_expr (val);
1512 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1513 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1514 gimple_assign_set_rhs_from_tree (gsi, rhs);
1526 /* Visit the assignment statement STMT. Set the value of its LHS to the
1527 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1528 creates virtual definitions, set the value of each new name to that
1529 of the RHS (if we can derive a constant out of the RHS).
1530 Value-returning call statements also perform an assignment, and
1531 are handled here. */
1533 static enum ssa_prop_result
1534 visit_assignment (gimple stmt, tree *output_p)
1537 enum ssa_prop_result retval;
1539 tree lhs = gimple_get_lhs (stmt);
1541 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1542 || gimple_call_lhs (stmt) != NULL_TREE);
1544 if (gimple_assign_single_p (stmt)
1545 && gimple_assign_rhs_code (stmt) == SSA_NAME)
1546 /* For a simple copy operation, we copy the lattice values. */
1547 val = *get_value (gimple_assign_rhs1 (stmt));
1549 /* Evaluate the statement, which could be
1550 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1551 val = evaluate_stmt (stmt);
1553 retval = SSA_PROP_NOT_INTERESTING;
1555 /* Set the lattice value of the statement's output. */
1556 if (TREE_CODE (lhs) == SSA_NAME)
1558 /* If STMT is an assignment to an SSA_NAME, we only have one
1560 if (set_lattice_value (lhs, val))
1563 if (val.lattice_val == VARYING)
1564 retval = SSA_PROP_VARYING;
1566 retval = SSA_PROP_INTERESTING;
1574 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1575 if it can determine which edge will be taken. Otherwise, return
1576 SSA_PROP_VARYING. */
1578 static enum ssa_prop_result
1579 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1584 block = gimple_bb (stmt);
1585 val = evaluate_stmt (stmt);
1587 /* Find which edge out of the conditional block will be taken and add it
1588 to the worklist. If no single edge can be determined statically,
1589 return SSA_PROP_VARYING to feed all the outgoing edges to the
1590 propagation engine. */
1591 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1593 return SSA_PROP_INTERESTING;
1595 return SSA_PROP_VARYING;
1599 /* Evaluate statement STMT. If the statement produces an output value and
1600 its evaluation changes the lattice value of its output, return
1601 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1604 If STMT is a conditional branch and we can determine its truth
1605 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1606 value, return SSA_PROP_VARYING. */
1608 static enum ssa_prop_result
1609 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1614 if (dump_file && (dump_flags & TDF_DETAILS))
1616 fprintf (dump_file, "\nVisiting statement:\n");
1617 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1620 switch (gimple_code (stmt))
1623 /* If the statement is an assignment that produces a single
1624 output value, evaluate its RHS to see if the lattice value of
1625 its output has changed. */
1626 return visit_assignment (stmt, output_p);
1629 /* A value-returning call also performs an assignment. */
1630 if (gimple_call_lhs (stmt) != NULL_TREE)
1631 return visit_assignment (stmt, output_p);
1636 /* If STMT is a conditional branch, see if we can determine
1637 which branch will be taken. */
1638 /* FIXME. It appears that we should be able to optimize
1639 computed GOTOs here as well. */
1640 return visit_cond_stmt (stmt, taken_edge_p);
1646 /* Any other kind of statement is not interesting for constant
1647 propagation and, therefore, not worth simulating. */
1648 if (dump_file && (dump_flags & TDF_DETAILS))
1649 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1651 /* Definitions made by statements other than assignments to
1652 SSA_NAMEs represent unknown modifications to their outputs.
1653 Mark them VARYING. */
1654 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1656 prop_value_t v = { VARYING, NULL_TREE };
1657 set_lattice_value (def, v);
1660 return SSA_PROP_VARYING;
1664 /* Main entry point for SSA Conditional Constant Propagation. */
1670 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1671 if (ccp_finalize ())
1672 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1681 return flag_tree_ccp != 0;
1685 struct gimple_opt_pass pass_ccp =
1690 gate_ccp, /* gate */
1691 do_ssa_ccp, /* execute */
1694 0, /* static_pass_number */
1695 TV_TREE_CCP, /* tv_id */
1696 PROP_cfg | PROP_ssa, /* properties_required */
1697 0, /* properties_provided */
1698 0, /* properties_destroyed */
1699 0, /* todo_flags_start */
1700 TODO_dump_func | TODO_verify_ssa
1701 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1707 /* Try to optimize out __builtin_stack_restore. Optimize it out
1708 if there is another __builtin_stack_restore in the same basic
1709 block and no calls or ASM_EXPRs are in between, or if this block's
1710 only outgoing edge is to EXIT_BLOCK and there are no calls or
1711 ASM_EXPRs after this __builtin_stack_restore. */
1714 optimize_stack_restore (gimple_stmt_iterator i)
1719 basic_block bb = gsi_bb (i);
1720 gimple call = gsi_stmt (i);
1722 if (gimple_code (call) != GIMPLE_CALL
1723 || gimple_call_num_args (call) != 1
1724 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
1725 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1728 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
1730 stmt = gsi_stmt (i);
1731 if (gimple_code (stmt) == GIMPLE_ASM)
1733 if (gimple_code (stmt) != GIMPLE_CALL)
1736 callee = gimple_call_fndecl (stmt);
1738 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1739 /* All regular builtins are ok, just obviously not alloca. */
1740 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
1743 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
1744 goto second_stack_restore;
1750 /* Allow one successor of the exit block, or zero successors. */
1751 switch (EDGE_COUNT (bb->succs))
1756 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
1762 second_stack_restore:
1764 /* If there's exactly one use, then zap the call to __builtin_stack_save.
1765 If there are multiple uses, then the last one should remove the call.
1766 In any case, whether the call to __builtin_stack_save can be removed
1767 or not is irrelevant to removing the call to __builtin_stack_restore. */
1768 if (has_single_use (gimple_call_arg (call, 0)))
1770 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
1771 if (is_gimple_call (stack_save))
1773 callee = gimple_call_fndecl (stack_save);
1775 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1776 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
1778 gimple_stmt_iterator stack_save_gsi;
1781 stack_save_gsi = gsi_for_stmt (stack_save);
1782 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
1783 update_call_from_tree (&stack_save_gsi, rhs);
1788 /* No effect, so the statement will be deleted. */
1789 return integer_zero_node;
1792 /* If va_list type is a simple pointer and nothing special is needed,
1793 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
1794 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
1795 pointer assignment. */
1798 optimize_stdarg_builtin (gimple call)
1800 tree callee, lhs, rhs, cfun_va_list;
1801 bool va_list_simple_ptr;
1802 location_t loc = gimple_location (call);
1804 if (gimple_code (call) != GIMPLE_CALL)
1807 callee = gimple_call_fndecl (call);
1809 cfun_va_list = targetm.fn_abi_va_list (callee);
1810 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
1811 && (TREE_TYPE (cfun_va_list) == void_type_node
1812 || TREE_TYPE (cfun_va_list) == char_type_node);
1814 switch (DECL_FUNCTION_CODE (callee))
1816 case BUILT_IN_VA_START:
1817 if (!va_list_simple_ptr
1818 || targetm.expand_builtin_va_start != NULL
1819 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
1822 if (gimple_call_num_args (call) != 2)
1825 lhs = gimple_call_arg (call, 0);
1826 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
1827 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
1828 != TYPE_MAIN_VARIANT (cfun_va_list))
1831 lhs = build_fold_indirect_ref_loc (loc, lhs);
1832 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
1833 1, integer_zero_node);
1834 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
1835 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1837 case BUILT_IN_VA_COPY:
1838 if (!va_list_simple_ptr)
1841 if (gimple_call_num_args (call) != 2)
1844 lhs = gimple_call_arg (call, 0);
1845 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
1846 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
1847 != TYPE_MAIN_VARIANT (cfun_va_list))
1850 lhs = build_fold_indirect_ref_loc (loc, lhs);
1851 rhs = gimple_call_arg (call, 1);
1852 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
1853 != TYPE_MAIN_VARIANT (cfun_va_list))
1856 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
1857 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1859 case BUILT_IN_VA_END:
1860 /* No effect, so the statement will be deleted. */
1861 return integer_zero_node;
1868 /* A simple pass that attempts to fold all builtin functions. This pass
1869 is run after we've propagated as many constants as we can. */
1872 execute_fold_all_builtins (void)
1874 bool cfg_changed = false;
1876 unsigned int todoflags = 0;
1880 gimple_stmt_iterator i;
1881 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1883 gimple stmt, old_stmt;
1884 tree callee, result;
1885 enum built_in_function fcode;
1887 stmt = gsi_stmt (i);
1889 if (gimple_code (stmt) != GIMPLE_CALL)
1894 callee = gimple_call_fndecl (stmt);
1895 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
1900 fcode = DECL_FUNCTION_CODE (callee);
1902 result = gimple_fold_builtin (stmt);
1905 gimple_remove_stmt_histograms (cfun, stmt);
1908 switch (DECL_FUNCTION_CODE (callee))
1910 case BUILT_IN_CONSTANT_P:
1911 /* Resolve __builtin_constant_p. If it hasn't been
1912 folded to integer_one_node by now, it's fairly
1913 certain that the value simply isn't constant. */
1914 result = integer_zero_node;
1917 case BUILT_IN_STACK_RESTORE:
1918 result = optimize_stack_restore (i);
1924 case BUILT_IN_VA_START:
1925 case BUILT_IN_VA_END:
1926 case BUILT_IN_VA_COPY:
1927 /* These shouldn't be folded before pass_stdarg. */
1928 result = optimize_stdarg_builtin (stmt);
1938 if (dump_file && (dump_flags & TDF_DETAILS))
1940 fprintf (dump_file, "Simplified\n ");
1941 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1945 if (!update_call_from_tree (&i, result))
1947 gimplify_and_update_call_from_tree (&i, result);
1948 todoflags |= TODO_update_address_taken;
1951 stmt = gsi_stmt (i);
1954 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
1955 && gimple_purge_dead_eh_edges (bb))
1958 if (dump_file && (dump_flags & TDF_DETAILS))
1960 fprintf (dump_file, "to\n ");
1961 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1962 fprintf (dump_file, "\n");
1965 /* Retry the same statement if it changed into another
1966 builtin, there might be new opportunities now. */
1967 if (gimple_code (stmt) != GIMPLE_CALL)
1972 callee = gimple_call_fndecl (stmt);
1974 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1975 || DECL_FUNCTION_CODE (callee) == fcode)
1980 /* Delete unreachable blocks. */
1982 todoflags |= TODO_cleanup_cfg;
1988 struct gimple_opt_pass pass_fold_builtins =
1994 execute_fold_all_builtins, /* execute */
1997 0, /* static_pass_number */
1998 TV_NONE, /* tv_id */
1999 PROP_cfg | PROP_ssa, /* properties_required */
2000 0, /* properties_provided */
2001 0, /* properties_destroyed */
2002 0, /* todo_flags_start */
2005 | TODO_update_ssa /* todo_flags_finish */