1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
115 #include "coretypes.h"
120 #include "basic-block.h"
122 #include "function.h"
123 #include "tree-pretty-print.h"
124 #include "gimple-pretty-print.h"
126 #include "tree-dump.h"
127 #include "tree-flow.h"
128 #include "tree-pass.h"
129 #include "tree-ssa-propagate.h"
130 #include "value-prof.h"
131 #include "langhooks.h"
133 #include "diagnostic-core.h"
138 /* Possible lattice values. */
147 /* Array of propagated constant values. After propagation,
148 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
149 the constant is held in an SSA name representing a memory store
150 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
151 memory reference used to store (i.e., the LHS of the assignment
153 static prop_value_t *const_val;
155 static void canonicalize_float_value (prop_value_t *);
156 static bool ccp_fold_stmt (gimple_stmt_iterator *);
158 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
161 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
163 switch (val.lattice_val)
166 fprintf (outf, "%sUNINITIALIZED", prefix);
169 fprintf (outf, "%sUNDEFINED", prefix);
172 fprintf (outf, "%sVARYING", prefix);
175 fprintf (outf, "%sCONSTANT ", prefix);
176 print_generic_expr (outf, val.value, dump_flags);
184 /* Print lattice value VAL to stderr. */
186 void debug_lattice_value (prop_value_t val);
189 debug_lattice_value (prop_value_t val)
191 dump_lattice_value (stderr, "", val);
192 fprintf (stderr, "\n");
196 /* Compute a default value for variable VAR and store it in the
197 CONST_VAL array. The following rules are used to get default
200 1- Global and static variables that are declared constant are
203 2- Any other value is considered UNDEFINED. This is useful when
204 considering PHI nodes. PHI arguments that are undefined do not
205 change the constant value of the PHI node, which allows for more
206 constants to be propagated.
208 3- Variables defined by statements other than assignments and PHI
209 nodes are considered VARYING.
211 4- Initial values of variables that are not GIMPLE registers are
212 considered VARYING. */
215 get_default_value (tree var)
217 tree sym = SSA_NAME_VAR (var);
218 prop_value_t val = { UNINITIALIZED, NULL_TREE };
221 stmt = SSA_NAME_DEF_STMT (var);
223 if (gimple_nop_p (stmt))
225 /* Variables defined by an empty statement are those used
226 before being initialized. If VAR is a local variable, we
227 can assume initially that it is UNDEFINED, otherwise we must
228 consider it VARYING. */
229 if (is_gimple_reg (sym)
230 && TREE_CODE (sym) == VAR_DECL)
231 val.lattice_val = UNDEFINED;
233 val.lattice_val = VARYING;
235 else if (is_gimple_assign (stmt)
236 /* Value-returning GIMPLE_CALL statements assign to
237 a variable, and are treated similarly to GIMPLE_ASSIGN. */
238 || (is_gimple_call (stmt)
239 && gimple_call_lhs (stmt) != NULL_TREE)
240 || gimple_code (stmt) == GIMPLE_PHI)
243 if (gimple_assign_single_p (stmt)
244 && DECL_P (gimple_assign_rhs1 (stmt))
245 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
247 val.lattice_val = CONSTANT;
251 /* Any other variable defined by an assignment or a PHI node
252 is considered UNDEFINED. */
253 val.lattice_val = UNDEFINED;
257 /* Otherwise, VAR will never take on a constant value. */
258 val.lattice_val = VARYING;
265 /* Get the constant value associated with variable VAR. */
267 static inline prop_value_t *
272 if (const_val == NULL)
275 val = &const_val[SSA_NAME_VERSION (var)];
276 if (val->lattice_val == UNINITIALIZED)
277 *val = get_default_value (var);
279 canonicalize_float_value (val);
284 /* Return the constant tree value associated with VAR. */
287 get_constant_value (tree var)
289 prop_value_t *val = get_value (var);
290 if (val && val->lattice_val == CONSTANT)
295 /* Sets the value associated with VAR to VARYING. */
298 set_value_varying (tree var)
300 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
302 val->lattice_val = VARYING;
303 val->value = NULL_TREE;
306 /* For float types, modify the value of VAL to make ccp work correctly
307 for non-standard values (-0, NaN):
309 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
310 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
311 This is to fix the following problem (see PR 29921): Suppose we have
315 and we set value of y to NaN. This causes value of x to be set to NaN.
316 When we later determine that y is in fact VARYING, fold uses the fact
317 that HONOR_NANS is false, and we try to change the value of x to 0,
318 causing an ICE. With HONOR_NANS being false, the real appearance of
319 NaN would cause undefined behavior, though, so claiming that y (and x)
320 are UNDEFINED initially is correct. */
323 canonicalize_float_value (prop_value_t *val)
325 enum machine_mode mode;
329 if (val->lattice_val != CONSTANT
330 || TREE_CODE (val->value) != REAL_CST)
333 d = TREE_REAL_CST (val->value);
334 type = TREE_TYPE (val->value);
335 mode = TYPE_MODE (type);
337 if (!HONOR_SIGNED_ZEROS (mode)
338 && REAL_VALUE_MINUS_ZERO (d))
340 val->value = build_real (type, dconst0);
344 if (!HONOR_NANS (mode)
345 && REAL_VALUE_ISNAN (d))
347 val->lattice_val = UNDEFINED;
353 /* Set the value for variable VAR to NEW_VAL. Return true if the new
354 value is different from VAR's previous value. */
357 set_lattice_value (tree var, prop_value_t new_val)
359 /* We can deal with old UNINITIALIZED values just fine here. */
360 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
362 canonicalize_float_value (&new_val);
364 /* Lattice transitions must always be monotonically increasing in
365 value. If *OLD_VAL and NEW_VAL are the same, return false to
366 inform the caller that this was a non-transition. */
368 gcc_assert (old_val->lattice_val < new_val.lattice_val
369 || (old_val->lattice_val == new_val.lattice_val
370 && ((!old_val->value && !new_val.value)
371 || operand_equal_p (old_val->value, new_val.value, 0))));
373 if (old_val->lattice_val != new_val.lattice_val)
375 if (dump_file && (dump_flags & TDF_DETAILS))
377 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
378 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
383 gcc_assert (new_val.lattice_val != UNINITIALIZED);
390 /* Return the value for the tree operand EXPR. */
393 get_value_for_expr (tree expr)
397 if (TREE_CODE (expr) == SSA_NAME)
398 val = *(get_value (expr));
399 else if (is_gimple_min_invariant (expr))
401 val.lattice_val = CONSTANT;
403 canonicalize_float_value (&val);
407 val.lattice_val = VARYING;
408 val.value = NULL_TREE;
415 /* Return the likely CCP lattice value for STMT.
417 If STMT has no operands, then return CONSTANT.
419 Else if undefinedness of operands of STMT cause its value to be
420 undefined, then return UNDEFINED.
422 Else if any operands of STMT are constants, then return CONSTANT.
424 Else return VARYING. */
427 likely_value (gimple stmt)
429 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
434 enum gimple_code code = gimple_code (stmt);
436 /* This function appears to be called only for assignments, calls,
437 conditionals, and switches, due to the logic in visit_stmt. */
438 gcc_assert (code == GIMPLE_ASSIGN
439 || code == GIMPLE_CALL
440 || code == GIMPLE_COND
441 || code == GIMPLE_SWITCH);
443 /* If the statement has volatile operands, it won't fold to a
445 if (gimple_has_volatile_ops (stmt))
448 /* Arrive here for more complex cases. */
449 has_constant_operand = false;
450 has_undefined_operand = false;
451 all_undefined_operands = true;
452 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
454 prop_value_t *val = get_value (use);
456 if (val->lattice_val == UNDEFINED)
457 has_undefined_operand = true;
459 all_undefined_operands = false;
461 if (val->lattice_val == CONSTANT)
462 has_constant_operand = true;
465 /* There may be constants in regular rhs operands. For calls we
466 have to ignore lhs, fndecl and static chain, otherwise only
468 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
469 i < gimple_num_ops (stmt); ++i)
471 tree op = gimple_op (stmt, i);
472 if (!op || TREE_CODE (op) == SSA_NAME)
474 if (is_gimple_min_invariant (op))
475 has_constant_operand = true;
478 if (has_constant_operand)
479 all_undefined_operands = false;
481 /* If the operation combines operands like COMPLEX_EXPR make sure to
482 not mark the result UNDEFINED if only one part of the result is
484 if (has_undefined_operand && all_undefined_operands)
486 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
488 switch (gimple_assign_rhs_code (stmt))
490 /* Unary operators are handled with all_undefined_operands. */
493 case POINTER_PLUS_EXPR:
494 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
495 Not bitwise operators, one VARYING operand may specify the
496 result completely. Not logical operators for the same reason.
497 Not COMPLEX_EXPR as one VARYING operand makes the result partly
498 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
499 the undefined operand may be promoted. */
506 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
507 fall back to VARYING even if there were CONSTANT operands. */
508 if (has_undefined_operand)
511 /* We do not consider virtual operands here -- load from read-only
512 memory may have only VARYING virtual operands, but still be
514 if (has_constant_operand
515 || gimple_references_memory_p (stmt))
521 /* Returns true if STMT cannot be constant. */
524 surely_varying_stmt_p (gimple stmt)
526 /* If the statement has operands that we cannot handle, it cannot be
528 if (gimple_has_volatile_ops (stmt))
531 /* If it is a call and does not return a value or is not a
532 builtin and not an indirect call, it is varying. */
533 if (is_gimple_call (stmt))
536 if (!gimple_call_lhs (stmt)
537 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
538 && !DECL_BUILT_IN (fndecl)))
542 /* Any other store operation is not interesting. */
543 else if (gimple_vdef (stmt))
546 /* Anything other than assignments and conditional jumps are not
547 interesting for CCP. */
548 if (gimple_code (stmt) != GIMPLE_ASSIGN
549 && gimple_code (stmt) != GIMPLE_COND
550 && gimple_code (stmt) != GIMPLE_SWITCH
551 && gimple_code (stmt) != GIMPLE_CALL)
557 /* Initialize local data structures for CCP. */
560 ccp_initialize (void)
564 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
566 /* Initialize simulation flags for PHI nodes and statements. */
569 gimple_stmt_iterator i;
571 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
573 gimple stmt = gsi_stmt (i);
576 /* If the statement is a control insn, then we do not
577 want to avoid simulating the statement once. Failure
578 to do so means that those edges will never get added. */
579 if (stmt_ends_bb_p (stmt))
582 is_varying = surely_varying_stmt_p (stmt);
589 /* If the statement will not produce a constant, mark
590 all its outputs VARYING. */
591 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
592 set_value_varying (def);
594 prop_set_simulate_again (stmt, !is_varying);
598 /* Now process PHI nodes. We never clear the simulate_again flag on
599 phi nodes, since we do not know which edges are executable yet,
600 except for phi nodes for virtual operands when we do not do store ccp. */
603 gimple_stmt_iterator i;
605 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
607 gimple phi = gsi_stmt (i);
609 if (!is_gimple_reg (gimple_phi_result (phi)))
610 prop_set_simulate_again (phi, false);
612 prop_set_simulate_again (phi, true);
617 /* Debug count support. Reset the values of ssa names
618 VARYING when the total number ssa names analyzed is
619 beyond the debug count specified. */
625 for (i = 0; i < num_ssa_names; i++)
629 const_val[i].lattice_val = VARYING;
630 const_val[i].value = NULL_TREE;
636 /* Do final substitution of propagated values, cleanup the flowgraph and
637 free allocated storage.
639 Return TRUE when something was optimized. */
644 bool something_changed;
647 /* Perform substitutions based on the known constant values. */
648 something_changed = substitute_and_fold (const_val, ccp_fold_stmt, true);
652 return something_changed;;
656 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
659 any M UNDEFINED = any
660 any M VARYING = VARYING
661 Ci M Cj = Ci if (i == j)
662 Ci M Cj = VARYING if (i != j)
666 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
668 if (val1->lattice_val == UNDEFINED)
670 /* UNDEFINED M any = any */
673 else if (val2->lattice_val == UNDEFINED)
675 /* any M UNDEFINED = any
676 Nothing to do. VAL1 already contains the value we want. */
679 else if (val1->lattice_val == VARYING
680 || val2->lattice_val == VARYING)
682 /* any M VARYING = VARYING. */
683 val1->lattice_val = VARYING;
684 val1->value = NULL_TREE;
686 else if (val1->lattice_val == CONSTANT
687 && val2->lattice_val == CONSTANT
688 && simple_cst_equal (val1->value, val2->value) == 1)
690 /* Ci M Cj = Ci if (i == j)
691 Ci M Cj = VARYING if (i != j)
693 If these two values come from memory stores, make sure that
694 they come from the same memory reference.
695 Nothing to do. VAL1 already contains the value we want. */
699 /* Any other combination is VARYING. */
700 val1->lattice_val = VARYING;
701 val1->value = NULL_TREE;
706 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
707 lattice values to determine PHI_NODE's lattice value. The value of a
708 PHI node is determined calling ccp_lattice_meet with all the arguments
709 of the PHI node that are incoming via executable edges. */
711 static enum ssa_prop_result
712 ccp_visit_phi_node (gimple phi)
715 prop_value_t *old_val, new_val;
717 if (dump_file && (dump_flags & TDF_DETAILS))
719 fprintf (dump_file, "\nVisiting PHI node: ");
720 print_gimple_stmt (dump_file, phi, 0, dump_flags);
723 old_val = get_value (gimple_phi_result (phi));
724 switch (old_val->lattice_val)
727 return SSA_PROP_VARYING;
734 new_val.lattice_val = UNDEFINED;
735 new_val.value = NULL_TREE;
742 for (i = 0; i < gimple_phi_num_args (phi); i++)
744 /* Compute the meet operator over all the PHI arguments flowing
745 through executable edges. */
746 edge e = gimple_phi_arg_edge (phi, i);
748 if (dump_file && (dump_flags & TDF_DETAILS))
751 "\n Argument #%d (%d -> %d %sexecutable)\n",
752 i, e->src->index, e->dest->index,
753 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
756 /* If the incoming edge is executable, Compute the meet operator for
757 the existing value of the PHI node and the current PHI argument. */
758 if (e->flags & EDGE_EXECUTABLE)
760 tree arg = gimple_phi_arg (phi, i)->def;
761 prop_value_t arg_val = get_value_for_expr (arg);
763 ccp_lattice_meet (&new_val, &arg_val);
765 if (dump_file && (dump_flags & TDF_DETAILS))
767 fprintf (dump_file, "\t");
768 print_generic_expr (dump_file, arg, dump_flags);
769 dump_lattice_value (dump_file, "\tValue: ", arg_val);
770 fprintf (dump_file, "\n");
773 if (new_val.lattice_val == VARYING)
778 if (dump_file && (dump_flags & TDF_DETAILS))
780 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
781 fprintf (dump_file, "\n\n");
784 /* Make the transition to the new value. */
785 if (set_lattice_value (gimple_phi_result (phi), new_val))
787 if (new_val.lattice_val == VARYING)
788 return SSA_PROP_VARYING;
790 return SSA_PROP_INTERESTING;
793 return SSA_PROP_NOT_INTERESTING;
796 /* Return the constant value for OP or OP otherwise. */
799 valueize_op (tree op)
801 if (TREE_CODE (op) == SSA_NAME)
803 tree tem = get_constant_value (op);
810 /* CCP specific front-end to the non-destructive constant folding
813 Attempt to simplify the RHS of STMT knowing that one or more
814 operands are constants.
816 If simplification is possible, return the simplified RHS,
817 otherwise return the original RHS or NULL_TREE. */
820 ccp_fold (gimple stmt)
822 location_t loc = gimple_location (stmt);
823 switch (gimple_code (stmt))
827 enum tree_code subcode = gimple_assign_rhs_code (stmt);
829 switch (get_gimple_rhs_class (subcode))
831 case GIMPLE_SINGLE_RHS:
833 tree rhs = gimple_assign_rhs1 (stmt);
834 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
836 if (TREE_CODE (rhs) == SSA_NAME)
838 /* If the RHS is an SSA_NAME, return its known constant value,
840 return get_constant_value (rhs);
842 /* Handle propagating invariant addresses into address operations.
843 The folding we do here matches that in tree-ssa-forwprop.c. */
844 else if (TREE_CODE (rhs) == ADDR_EXPR)
847 base = &TREE_OPERAND (rhs, 0);
848 while (handled_component_p (*base))
849 base = &TREE_OPERAND (*base, 0);
850 if (TREE_CODE (*base) == MEM_REF
851 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
853 tree val = get_constant_value (TREE_OPERAND (*base, 0));
855 && TREE_CODE (val) == ADDR_EXPR)
857 tree ret, save = *base;
859 new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
861 TREE_OPERAND (*base, 1));
862 /* We need to return a new tree, not modify the IL
863 or share parts of it. So play some tricks to
864 avoid manually building it. */
866 ret = unshare_expr (rhs);
867 recompute_tree_invariant_for_addr_expr (ret);
873 else if (TREE_CODE (rhs) == CONSTRUCTOR
874 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
875 && (CONSTRUCTOR_NELTS (rhs)
876 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
882 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
884 val = valueize_op (val);
885 if (TREE_CODE (val) == INTEGER_CST
886 || TREE_CODE (val) == REAL_CST
887 || TREE_CODE (val) == FIXED_CST)
888 list = tree_cons (NULL_TREE, val, list);
893 return build_vector (TREE_TYPE (rhs), nreverse (list));
896 if (kind == tcc_reference)
898 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
899 || TREE_CODE (rhs) == REALPART_EXPR
900 || TREE_CODE (rhs) == IMAGPART_EXPR)
901 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
903 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
905 return fold_unary_loc (EXPR_LOCATION (rhs),
907 TREE_TYPE (rhs), val);
909 else if (TREE_CODE (rhs) == MEM_REF
910 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
912 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
914 && TREE_CODE (val) == ADDR_EXPR)
916 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
918 TREE_OPERAND (rhs, 1));
923 return fold_const_aggregate_ref (rhs);
925 else if (kind == tcc_declaration)
926 return get_symbol_constant_value (rhs);
930 case GIMPLE_UNARY_RHS:
932 /* Handle unary operators that can appear in GIMPLE form.
933 Note that we know the single operand must be a constant,
934 so this should almost always return a simplified RHS. */
935 tree lhs = gimple_assign_lhs (stmt);
936 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
938 /* Conversions are useless for CCP purposes if they are
939 value-preserving. Thus the restrictions that
940 useless_type_conversion_p places for pointer type conversions
941 do not apply here. Substitution later will only substitute to
943 if (CONVERT_EXPR_CODE_P (subcode)
944 && POINTER_TYPE_P (TREE_TYPE (lhs))
945 && POINTER_TYPE_P (TREE_TYPE (op0)))
948 /* Try to re-construct array references on-the-fly. */
949 if (!useless_type_conversion_p (TREE_TYPE (lhs),
951 && ((tem = maybe_fold_offset_to_address
953 op0, integer_zero_node, TREE_TYPE (lhs)))
960 fold_unary_ignore_overflow_loc (loc, subcode,
961 gimple_expr_type (stmt), op0);
964 case GIMPLE_BINARY_RHS:
966 /* Handle binary operators that can appear in GIMPLE form. */
967 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
968 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
970 /* Translate &x + CST into an invariant form suitable for
971 further propagation. */
972 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
973 && TREE_CODE (op0) == ADDR_EXPR
974 && TREE_CODE (op1) == INTEGER_CST)
976 tree off = fold_convert (ptr_type_node, op1);
977 return build_fold_addr_expr
978 (fold_build2 (MEM_REF,
979 TREE_TYPE (TREE_TYPE (op0)),
980 unshare_expr (op0), off));
983 return fold_binary_loc (loc, subcode,
984 gimple_expr_type (stmt), op0, op1);
987 case GIMPLE_TERNARY_RHS:
989 /* Handle ternary operators that can appear in GIMPLE form. */
990 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
991 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
992 tree op2 = valueize_op (gimple_assign_rhs3 (stmt));
994 return fold_ternary_loc (loc, subcode,
995 gimple_expr_type (stmt), op0, op1, op2);
1006 tree fn = valueize_op (gimple_call_fn (stmt));
1007 if (TREE_CODE (fn) == ADDR_EXPR
1008 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1009 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1011 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1014 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1015 args[i] = valueize_op (gimple_call_arg (stmt, i));
1016 call = build_call_array_loc (loc,
1017 gimple_call_return_type (stmt),
1018 fn, gimple_call_num_args (stmt), args);
1019 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
1021 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1022 STRIP_NOPS (retval);
1030 /* Handle comparison operators that can appear in GIMPLE form. */
1031 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1032 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1033 enum tree_code code = gimple_cond_code (stmt);
1034 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1039 /* Return the constant switch index. */
1040 return valueize_op (gimple_switch_index (stmt));
1048 /* Return the tree representing the element referenced by T if T is an
1049 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1050 NULL_TREE otherwise. */
1053 fold_const_aggregate_ref (tree t)
1055 tree base, ctor, idx, field;
1056 unsigned HOST_WIDE_INT cnt;
1060 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1061 return get_symbol_constant_value (t);
1063 switch (TREE_CODE (t))
1066 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1067 DECL_INITIAL. If BASE is a nested reference into another
1068 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1069 the inner reference. */
1070 base = TREE_OPERAND (t, 0);
1071 switch (TREE_CODE (base))
1074 /* ??? We could handle this case. */
1075 if (!integer_zerop (TREE_OPERAND (base, 1)))
1077 base = get_base_address (base);
1079 || TREE_CODE (base) != VAR_DECL)
1084 if (!TREE_READONLY (base)
1085 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1086 || !targetm.binds_local_p (base))
1089 ctor = DECL_INITIAL (base);
1094 ctor = fold_const_aggregate_ref (base);
1106 if (ctor == NULL_TREE
1107 || (TREE_CODE (ctor) != CONSTRUCTOR
1108 && TREE_CODE (ctor) != STRING_CST)
1109 || !TREE_STATIC (ctor))
1112 /* Get the index. If we have an SSA_NAME, try to resolve it
1113 with the current lattice value for the SSA_NAME. */
1114 idx = TREE_OPERAND (t, 1);
1115 switch (TREE_CODE (idx))
1118 if ((tem = get_constant_value (idx))
1119 && TREE_CODE (tem) == INTEGER_CST)
1132 /* Fold read from constant string. */
1133 if (TREE_CODE (ctor) == STRING_CST)
1135 if ((TYPE_MODE (TREE_TYPE (t))
1136 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1137 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1139 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1140 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1141 return build_int_cst_type (TREE_TYPE (t),
1142 (TREE_STRING_POINTER (ctor)
1143 [TREE_INT_CST_LOW (idx)]));
1147 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1148 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1149 if (tree_int_cst_equal (cfield, idx))
1152 if (TREE_CODE (cval) == ADDR_EXPR)
1154 tree base = get_base_address (TREE_OPERAND (cval, 0));
1155 if (base && TREE_CODE (base) == VAR_DECL)
1156 add_referenced_var (base);
1163 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1164 DECL_INITIAL. If BASE is a nested reference into another
1165 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1166 the inner reference. */
1167 base = TREE_OPERAND (t, 0);
1168 switch (TREE_CODE (base))
1171 if (!TREE_READONLY (base)
1172 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1173 || !targetm.binds_local_p (base))
1176 ctor = DECL_INITIAL (base);
1181 ctor = fold_const_aggregate_ref (base);
1188 if (ctor == NULL_TREE
1189 || TREE_CODE (ctor) != CONSTRUCTOR
1190 || !TREE_STATIC (ctor))
1193 field = TREE_OPERAND (t, 1);
1195 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1197 /* FIXME: Handle bit-fields. */
1198 && ! DECL_BIT_FIELD (cfield))
1201 if (TREE_CODE (cval) == ADDR_EXPR)
1203 tree base = get_base_address (TREE_OPERAND (cval, 0));
1204 if (base && TREE_CODE (base) == VAR_DECL)
1205 add_referenced_var (base);
1214 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1215 if (c && TREE_CODE (c) == COMPLEX_CST)
1216 return fold_build1_loc (EXPR_LOCATION (t),
1217 TREE_CODE (t), TREE_TYPE (t), c);
1222 /* Get the base object we are accessing. */
1223 base = TREE_OPERAND (t, 0);
1224 if (TREE_CODE (base) == SSA_NAME
1225 && (tem = get_constant_value (base)))
1227 if (TREE_CODE (base) != ADDR_EXPR)
1229 base = TREE_OPERAND (base, 0);
1230 switch (TREE_CODE (base))
1234 && !AGGREGATE_TYPE_P (TREE_TYPE (base))
1235 && integer_zerop (TREE_OPERAND (t, 1)))
1237 tree res = get_symbol_constant_value (base);
1239 && !useless_type_conversion_p
1240 (TREE_TYPE (t), TREE_TYPE (res)))
1241 res = fold_unary (VIEW_CONVERT_EXPR, TREE_TYPE (t), res);
1245 if (!TREE_READONLY (base)
1246 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1247 || !targetm.binds_local_p (base))
1250 ctor = DECL_INITIAL (base);
1262 if (ctor == NULL_TREE
1263 || (TREE_CODE (ctor) != CONSTRUCTOR
1264 && TREE_CODE (ctor) != STRING_CST)
1265 || !TREE_STATIC (ctor))
1268 /* Get the byte offset. */
1269 idx = TREE_OPERAND (t, 1);
1271 /* Fold read from constant string. */
1272 if (TREE_CODE (ctor) == STRING_CST)
1274 if ((TYPE_MODE (TREE_TYPE (t))
1275 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1276 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1278 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1279 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1280 return build_int_cst_type (TREE_TYPE (t),
1281 (TREE_STRING_POINTER (ctor)
1282 [TREE_INT_CST_LOW (idx)]));
1286 /* ??? Implement byte-offset indexing into a non-array CONSTRUCTOR. */
1287 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
1288 && (TYPE_MODE (TREE_TYPE (t))
1289 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1290 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t))) != 0
1293 (TRUNC_MOD_EXPR, idx,
1294 size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t)))), 0)))
1296 idx = int_const_binop (TRUNC_DIV_EXPR, idx,
1297 size_int (GET_MODE_SIZE
1298 (TYPE_MODE (TREE_TYPE (t)))), 0);
1299 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1300 if (tree_int_cst_equal (cfield, idx))
1303 if (TREE_CODE (cval) == ADDR_EXPR)
1305 tree base = get_base_address (TREE_OPERAND (cval, 0));
1306 if (base && TREE_CODE (base) == VAR_DECL)
1307 add_referenced_var (base);
1309 if (useless_type_conversion_p (TREE_TYPE (t), TREE_TYPE (cval)))
1311 else if (CONSTANT_CLASS_P (cval))
1312 return fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (t), cval);
1326 /* Evaluate statement STMT.
1327 Valid only for assignments, calls, conditionals, and switches. */
1330 evaluate_stmt (gimple stmt)
1333 tree simplified = NULL_TREE;
1334 ccp_lattice_t likelyvalue = likely_value (stmt);
1337 fold_defer_overflow_warnings ();
1339 /* If the statement is likely to have a CONSTANT result, then try
1340 to fold the statement to determine the constant value. */
1341 /* FIXME. This is the only place that we call ccp_fold.
1342 Since likely_value never returns CONSTANT for calls, we will
1343 not attempt to fold them, including builtins that may profit. */
1344 if (likelyvalue == CONSTANT)
1345 simplified = ccp_fold (stmt);
1346 /* If the statement is likely to have a VARYING result, then do not
1347 bother folding the statement. */
1348 else if (likelyvalue == VARYING)
1350 enum gimple_code code = gimple_code (stmt);
1351 if (code == GIMPLE_ASSIGN)
1353 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1355 /* Other cases cannot satisfy is_gimple_min_invariant
1357 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1358 simplified = gimple_assign_rhs1 (stmt);
1360 else if (code == GIMPLE_SWITCH)
1361 simplified = gimple_switch_index (stmt);
1363 /* These cannot satisfy is_gimple_min_invariant without folding. */
1364 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1367 is_constant = simplified && is_gimple_min_invariant (simplified);
1369 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1371 if (dump_file && (dump_flags & TDF_DETAILS))
1373 fprintf (dump_file, "which is likely ");
1374 switch (likelyvalue)
1377 fprintf (dump_file, "CONSTANT");
1380 fprintf (dump_file, "UNDEFINED");
1383 fprintf (dump_file, "VARYING");
1387 fprintf (dump_file, "\n");
1392 /* The statement produced a constant value. */
1393 val.lattice_val = CONSTANT;
1394 val.value = simplified;
1398 /* The statement produced a nonconstant value. If the statement
1399 had UNDEFINED operands, then the result of the statement
1400 should be UNDEFINED. Otherwise, the statement is VARYING. */
1401 if (likelyvalue == UNDEFINED)
1402 val.lattice_val = likelyvalue;
1404 val.lattice_val = VARYING;
1406 val.value = NULL_TREE;
1412 /* Fold the stmt at *GSI with CCP specific information that propagating
1413 and regular folding does not catch. */
1416 ccp_fold_stmt (gimple_stmt_iterator *gsi)
1418 gimple stmt = gsi_stmt (*gsi);
1420 switch (gimple_code (stmt))
1425 /* Statement evaluation will handle type mismatches in constants
1426 more gracefully than the final propagation. This allows us to
1427 fold more conditionals here. */
1428 val = evaluate_stmt (stmt);
1429 if (val.lattice_val != CONSTANT
1430 || TREE_CODE (val.value) != INTEGER_CST)
1433 if (integer_zerop (val.value))
1434 gimple_cond_make_false (stmt);
1436 gimple_cond_make_true (stmt);
1443 tree lhs = gimple_call_lhs (stmt);
1446 bool changed = false;
1449 /* If the call was folded into a constant make sure it goes
1450 away even if we cannot propagate into all uses because of
1453 && TREE_CODE (lhs) == SSA_NAME
1454 && (val = get_constant_value (lhs)))
1456 tree new_rhs = unshare_expr (val);
1458 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1459 TREE_TYPE (new_rhs)))
1460 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1461 res = update_call_from_tree (gsi, new_rhs);
1466 /* Propagate into the call arguments. Compared to replace_uses_in
1467 this can use the argument slot types for type verification
1468 instead of the current argument type. We also can safely
1469 drop qualifiers here as we are dealing with constants anyway. */
1470 argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
1471 for (i = 0; i < gimple_call_num_args (stmt) && argt;
1472 ++i, argt = TREE_CHAIN (argt))
1474 tree arg = gimple_call_arg (stmt, i);
1475 if (TREE_CODE (arg) == SSA_NAME
1476 && (val = get_constant_value (arg))
1477 && useless_type_conversion_p
1478 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
1479 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
1481 gimple_call_set_arg (stmt, i, unshare_expr (val));
1491 tree lhs = gimple_assign_lhs (stmt);
1494 /* If we have a load that turned out to be constant replace it
1495 as we cannot propagate into all uses in all cases. */
1496 if (gimple_assign_single_p (stmt)
1497 && TREE_CODE (lhs) == SSA_NAME
1498 && (val = get_constant_value (lhs)))
1500 tree rhs = unshare_expr (val);
1501 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1502 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1503 gimple_assign_set_rhs_from_tree (gsi, rhs);
1515 /* Visit the assignment statement STMT. Set the value of its LHS to the
1516 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1517 creates virtual definitions, set the value of each new name to that
1518 of the RHS (if we can derive a constant out of the RHS).
1519 Value-returning call statements also perform an assignment, and
1520 are handled here. */
1522 static enum ssa_prop_result
1523 visit_assignment (gimple stmt, tree *output_p)
1526 enum ssa_prop_result retval;
1528 tree lhs = gimple_get_lhs (stmt);
1530 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1531 || gimple_call_lhs (stmt) != NULL_TREE);
1533 if (gimple_assign_single_p (stmt)
1534 && gimple_assign_rhs_code (stmt) == SSA_NAME)
1535 /* For a simple copy operation, we copy the lattice values. */
1536 val = *get_value (gimple_assign_rhs1 (stmt));
1538 /* Evaluate the statement, which could be
1539 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1540 val = evaluate_stmt (stmt);
1542 retval = SSA_PROP_NOT_INTERESTING;
1544 /* Set the lattice value of the statement's output. */
1545 if (TREE_CODE (lhs) == SSA_NAME)
1547 /* If STMT is an assignment to an SSA_NAME, we only have one
1549 if (set_lattice_value (lhs, val))
1552 if (val.lattice_val == VARYING)
1553 retval = SSA_PROP_VARYING;
1555 retval = SSA_PROP_INTERESTING;
1563 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1564 if it can determine which edge will be taken. Otherwise, return
1565 SSA_PROP_VARYING. */
1567 static enum ssa_prop_result
1568 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1573 block = gimple_bb (stmt);
1574 val = evaluate_stmt (stmt);
1576 /* Find which edge out of the conditional block will be taken and add it
1577 to the worklist. If no single edge can be determined statically,
1578 return SSA_PROP_VARYING to feed all the outgoing edges to the
1579 propagation engine. */
1580 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1582 return SSA_PROP_INTERESTING;
1584 return SSA_PROP_VARYING;
1588 /* Evaluate statement STMT. If the statement produces an output value and
1589 its evaluation changes the lattice value of its output, return
1590 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1593 If STMT is a conditional branch and we can determine its truth
1594 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1595 value, return SSA_PROP_VARYING. */
1597 static enum ssa_prop_result
1598 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1603 if (dump_file && (dump_flags & TDF_DETAILS))
1605 fprintf (dump_file, "\nVisiting statement:\n");
1606 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1609 switch (gimple_code (stmt))
1612 /* If the statement is an assignment that produces a single
1613 output value, evaluate its RHS to see if the lattice value of
1614 its output has changed. */
1615 return visit_assignment (stmt, output_p);
1618 /* A value-returning call also performs an assignment. */
1619 if (gimple_call_lhs (stmt) != NULL_TREE)
1620 return visit_assignment (stmt, output_p);
1625 /* If STMT is a conditional branch, see if we can determine
1626 which branch will be taken. */
1627 /* FIXME. It appears that we should be able to optimize
1628 computed GOTOs here as well. */
1629 return visit_cond_stmt (stmt, taken_edge_p);
1635 /* Any other kind of statement is not interesting for constant
1636 propagation and, therefore, not worth simulating. */
1637 if (dump_file && (dump_flags & TDF_DETAILS))
1638 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1640 /* Definitions made by statements other than assignments to
1641 SSA_NAMEs represent unknown modifications to their outputs.
1642 Mark them VARYING. */
1643 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1645 prop_value_t v = { VARYING, NULL_TREE };
1646 set_lattice_value (def, v);
1649 return SSA_PROP_VARYING;
1653 /* Main entry point for SSA Conditional Constant Propagation. */
1659 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1660 if (ccp_finalize ())
1661 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1670 return flag_tree_ccp != 0;
1674 struct gimple_opt_pass pass_ccp =
1679 gate_ccp, /* gate */
1680 do_ssa_ccp, /* execute */
1683 0, /* static_pass_number */
1684 TV_TREE_CCP, /* tv_id */
1685 PROP_cfg | PROP_ssa, /* properties_required */
1686 0, /* properties_provided */
1687 0, /* properties_destroyed */
1688 0, /* todo_flags_start */
1689 TODO_dump_func | TODO_verify_ssa
1690 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1696 /* Try to optimize out __builtin_stack_restore. Optimize it out
1697 if there is another __builtin_stack_restore in the same basic
1698 block and no calls or ASM_EXPRs are in between, or if this block's
1699 only outgoing edge is to EXIT_BLOCK and there are no calls or
1700 ASM_EXPRs after this __builtin_stack_restore. */
1703 optimize_stack_restore (gimple_stmt_iterator i)
1708 basic_block bb = gsi_bb (i);
1709 gimple call = gsi_stmt (i);
1711 if (gimple_code (call) != GIMPLE_CALL
1712 || gimple_call_num_args (call) != 1
1713 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
1714 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1717 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
1719 stmt = gsi_stmt (i);
1720 if (gimple_code (stmt) == GIMPLE_ASM)
1722 if (gimple_code (stmt) != GIMPLE_CALL)
1725 callee = gimple_call_fndecl (stmt);
1727 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1728 /* All regular builtins are ok, just obviously not alloca. */
1729 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
1732 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
1733 goto second_stack_restore;
1739 /* Allow one successor of the exit block, or zero successors. */
1740 switch (EDGE_COUNT (bb->succs))
1745 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
1751 second_stack_restore:
1753 /* If there's exactly one use, then zap the call to __builtin_stack_save.
1754 If there are multiple uses, then the last one should remove the call.
1755 In any case, whether the call to __builtin_stack_save can be removed
1756 or not is irrelevant to removing the call to __builtin_stack_restore. */
1757 if (has_single_use (gimple_call_arg (call, 0)))
1759 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
1760 if (is_gimple_call (stack_save))
1762 callee = gimple_call_fndecl (stack_save);
1764 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1765 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
1767 gimple_stmt_iterator stack_save_gsi;
1770 stack_save_gsi = gsi_for_stmt (stack_save);
1771 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
1772 update_call_from_tree (&stack_save_gsi, rhs);
1777 /* No effect, so the statement will be deleted. */
1778 return integer_zero_node;
1781 /* If va_list type is a simple pointer and nothing special is needed,
1782 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
1783 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
1784 pointer assignment. */
1787 optimize_stdarg_builtin (gimple call)
1789 tree callee, lhs, rhs, cfun_va_list;
1790 bool va_list_simple_ptr;
1791 location_t loc = gimple_location (call);
1793 if (gimple_code (call) != GIMPLE_CALL)
1796 callee = gimple_call_fndecl (call);
1798 cfun_va_list = targetm.fn_abi_va_list (callee);
1799 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
1800 && (TREE_TYPE (cfun_va_list) == void_type_node
1801 || TREE_TYPE (cfun_va_list) == char_type_node);
1803 switch (DECL_FUNCTION_CODE (callee))
1805 case BUILT_IN_VA_START:
1806 if (!va_list_simple_ptr
1807 || targetm.expand_builtin_va_start != NULL
1808 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
1811 if (gimple_call_num_args (call) != 2)
1814 lhs = gimple_call_arg (call, 0);
1815 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
1816 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
1817 != TYPE_MAIN_VARIANT (cfun_va_list))
1820 lhs = build_fold_indirect_ref_loc (loc, lhs);
1821 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
1822 1, integer_zero_node);
1823 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
1824 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1826 case BUILT_IN_VA_COPY:
1827 if (!va_list_simple_ptr)
1830 if (gimple_call_num_args (call) != 2)
1833 lhs = gimple_call_arg (call, 0);
1834 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
1835 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
1836 != TYPE_MAIN_VARIANT (cfun_va_list))
1839 lhs = build_fold_indirect_ref_loc (loc, lhs);
1840 rhs = gimple_call_arg (call, 1);
1841 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
1842 != TYPE_MAIN_VARIANT (cfun_va_list))
1845 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
1846 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1848 case BUILT_IN_VA_END:
1849 /* No effect, so the statement will be deleted. */
1850 return integer_zero_node;
1857 /* A simple pass that attempts to fold all builtin functions. This pass
1858 is run after we've propagated as many constants as we can. */
1861 execute_fold_all_builtins (void)
1863 bool cfg_changed = false;
1865 unsigned int todoflags = 0;
1869 gimple_stmt_iterator i;
1870 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1872 gimple stmt, old_stmt;
1873 tree callee, result;
1874 enum built_in_function fcode;
1876 stmt = gsi_stmt (i);
1878 if (gimple_code (stmt) != GIMPLE_CALL)
1883 callee = gimple_call_fndecl (stmt);
1884 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
1889 fcode = DECL_FUNCTION_CODE (callee);
1891 result = gimple_fold_builtin (stmt);
1894 gimple_remove_stmt_histograms (cfun, stmt);
1897 switch (DECL_FUNCTION_CODE (callee))
1899 case BUILT_IN_CONSTANT_P:
1900 /* Resolve __builtin_constant_p. If it hasn't been
1901 folded to integer_one_node by now, it's fairly
1902 certain that the value simply isn't constant. */
1903 result = integer_zero_node;
1906 case BUILT_IN_STACK_RESTORE:
1907 result = optimize_stack_restore (i);
1913 case BUILT_IN_VA_START:
1914 case BUILT_IN_VA_END:
1915 case BUILT_IN_VA_COPY:
1916 /* These shouldn't be folded before pass_stdarg. */
1917 result = optimize_stdarg_builtin (stmt);
1927 if (dump_file && (dump_flags & TDF_DETAILS))
1929 fprintf (dump_file, "Simplified\n ");
1930 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1934 if (!update_call_from_tree (&i, result))
1936 gimplify_and_update_call_from_tree (&i, result);
1937 todoflags |= TODO_update_address_taken;
1940 stmt = gsi_stmt (i);
1943 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
1944 && gimple_purge_dead_eh_edges (bb))
1947 if (dump_file && (dump_flags & TDF_DETAILS))
1949 fprintf (dump_file, "to\n ");
1950 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1951 fprintf (dump_file, "\n");
1954 /* Retry the same statement if it changed into another
1955 builtin, there might be new opportunities now. */
1956 if (gimple_code (stmt) != GIMPLE_CALL)
1961 callee = gimple_call_fndecl (stmt);
1963 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1964 || DECL_FUNCTION_CODE (callee) == fcode)
1969 /* Delete unreachable blocks. */
1971 todoflags |= TODO_cleanup_cfg;
1977 struct gimple_opt_pass pass_fold_builtins =
1983 execute_fold_all_builtins, /* execute */
1986 0, /* static_pass_number */
1987 TV_NONE, /* tv_id */
1988 PROP_cfg | PROP_ssa, /* properties_required */
1989 0, /* properties_provided */
1990 0, /* properties_destroyed */
1991 0, /* todo_flags_start */
1994 | TODO_update_ssa /* todo_flags_finish */