1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
115 #include "coretypes.h"
120 #include "basic-block.h"
122 #include "function.h"
123 #include "tree-pretty-print.h"
124 #include "gimple-pretty-print.h"
126 #include "tree-dump.h"
127 #include "tree-flow.h"
128 #include "tree-pass.h"
129 #include "tree-ssa-propagate.h"
130 #include "value-prof.h"
131 #include "langhooks.h"
133 #include "diagnostic-core.h"
138 /* Possible lattice values. */
147 /* Array of propagated constant values. After propagation,
148 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
149 the constant is held in an SSA name representing a memory store
150 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
151 memory reference used to store (i.e., the LHS of the assignment
153 static prop_value_t *const_val;
155 static void canonicalize_float_value (prop_value_t *);
156 static bool ccp_fold_stmt (gimple_stmt_iterator *);
158 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
161 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
163 switch (val.lattice_val)
166 fprintf (outf, "%sUNINITIALIZED", prefix);
169 fprintf (outf, "%sUNDEFINED", prefix);
172 fprintf (outf, "%sVARYING", prefix);
175 fprintf (outf, "%sCONSTANT ", prefix);
176 print_generic_expr (outf, val.value, dump_flags);
184 /* Print lattice value VAL to stderr. */
186 void debug_lattice_value (prop_value_t val);
189 debug_lattice_value (prop_value_t val)
191 dump_lattice_value (stderr, "", val);
192 fprintf (stderr, "\n");
196 /* Compute a default value for variable VAR and store it in the
197 CONST_VAL array. The following rules are used to get default
200 1- Global and static variables that are declared constant are
203 2- Any other value is considered UNDEFINED. This is useful when
204 considering PHI nodes. PHI arguments that are undefined do not
205 change the constant value of the PHI node, which allows for more
206 constants to be propagated.
208 3- Variables defined by statements other than assignments and PHI
209 nodes are considered VARYING.
211 4- Initial values of variables that are not GIMPLE registers are
212 considered VARYING. */
215 get_default_value (tree var)
217 tree sym = SSA_NAME_VAR (var);
218 prop_value_t val = { UNINITIALIZED, NULL_TREE };
221 stmt = SSA_NAME_DEF_STMT (var);
223 if (gimple_nop_p (stmt))
225 /* Variables defined by an empty statement are those used
226 before being initialized. If VAR is a local variable, we
227 can assume initially that it is UNDEFINED, otherwise we must
228 consider it VARYING. */
229 if (is_gimple_reg (sym)
230 && TREE_CODE (sym) == VAR_DECL)
231 val.lattice_val = UNDEFINED;
233 val.lattice_val = VARYING;
235 else if (is_gimple_assign (stmt)
236 /* Value-returning GIMPLE_CALL statements assign to
237 a variable, and are treated similarly to GIMPLE_ASSIGN. */
238 || (is_gimple_call (stmt)
239 && gimple_call_lhs (stmt) != NULL_TREE)
240 || gimple_code (stmt) == GIMPLE_PHI)
243 if (gimple_assign_single_p (stmt)
244 && DECL_P (gimple_assign_rhs1 (stmt))
245 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
247 val.lattice_val = CONSTANT;
251 /* Any other variable defined by an assignment or a PHI node
252 is considered UNDEFINED. */
253 val.lattice_val = UNDEFINED;
257 /* Otherwise, VAR will never take on a constant value. */
258 val.lattice_val = VARYING;
265 /* Get the constant value associated with variable VAR. */
267 static inline prop_value_t *
272 if (const_val == NULL)
275 val = &const_val[SSA_NAME_VERSION (var)];
276 if (val->lattice_val == UNINITIALIZED)
277 *val = get_default_value (var);
279 canonicalize_float_value (val);
284 /* Sets the value associated with VAR to VARYING. */
287 set_value_varying (tree var)
289 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
291 val->lattice_val = VARYING;
292 val->value = NULL_TREE;
295 /* For float types, modify the value of VAL to make ccp work correctly
296 for non-standard values (-0, NaN):
298 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
299 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
300 This is to fix the following problem (see PR 29921): Suppose we have
304 and we set value of y to NaN. This causes value of x to be set to NaN.
305 When we later determine that y is in fact VARYING, fold uses the fact
306 that HONOR_NANS is false, and we try to change the value of x to 0,
307 causing an ICE. With HONOR_NANS being false, the real appearance of
308 NaN would cause undefined behavior, though, so claiming that y (and x)
309 are UNDEFINED initially is correct. */
312 canonicalize_float_value (prop_value_t *val)
314 enum machine_mode mode;
318 if (val->lattice_val != CONSTANT
319 || TREE_CODE (val->value) != REAL_CST)
322 d = TREE_REAL_CST (val->value);
323 type = TREE_TYPE (val->value);
324 mode = TYPE_MODE (type);
326 if (!HONOR_SIGNED_ZEROS (mode)
327 && REAL_VALUE_MINUS_ZERO (d))
329 val->value = build_real (type, dconst0);
333 if (!HONOR_NANS (mode)
334 && REAL_VALUE_ISNAN (d))
336 val->lattice_val = UNDEFINED;
342 /* Set the value for variable VAR to NEW_VAL. Return true if the new
343 value is different from VAR's previous value. */
346 set_lattice_value (tree var, prop_value_t new_val)
348 /* We can deal with old UNINITIALIZED values just fine here. */
349 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
351 canonicalize_float_value (&new_val);
353 /* Lattice transitions must always be monotonically increasing in
354 value. If *OLD_VAL and NEW_VAL are the same, return false to
355 inform the caller that this was a non-transition. */
357 gcc_assert (old_val->lattice_val < new_val.lattice_val
358 || (old_val->lattice_val == new_val.lattice_val
359 && ((!old_val->value && !new_val.value)
360 || operand_equal_p (old_val->value, new_val.value, 0))));
362 if (old_val->lattice_val != new_val.lattice_val)
364 if (dump_file && (dump_flags & TDF_DETAILS))
366 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
367 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
372 gcc_assert (new_val.lattice_val != UNINITIALIZED);
379 /* Return the value for the tree operand EXPR. */
382 get_value_for_expr (tree expr)
386 if (TREE_CODE (expr) == SSA_NAME)
387 val = *(get_value (expr));
388 else if (is_gimple_min_invariant (expr))
390 val.lattice_val = CONSTANT;
392 canonicalize_float_value (&val);
396 val.lattice_val = VARYING;
397 val.value = NULL_TREE;
404 /* Return the likely CCP lattice value for STMT.
406 If STMT has no operands, then return CONSTANT.
408 Else if undefinedness of operands of STMT cause its value to be
409 undefined, then return UNDEFINED.
411 Else if any operands of STMT are constants, then return CONSTANT.
413 Else return VARYING. */
416 likely_value (gimple stmt)
418 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
423 enum gimple_code code = gimple_code (stmt);
425 /* This function appears to be called only for assignments, calls,
426 conditionals, and switches, due to the logic in visit_stmt. */
427 gcc_assert (code == GIMPLE_ASSIGN
428 || code == GIMPLE_CALL
429 || code == GIMPLE_COND
430 || code == GIMPLE_SWITCH);
432 /* If the statement has volatile operands, it won't fold to a
434 if (gimple_has_volatile_ops (stmt))
437 /* Arrive here for more complex cases. */
438 has_constant_operand = false;
439 has_undefined_operand = false;
440 all_undefined_operands = true;
441 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
443 prop_value_t *val = get_value (use);
445 if (val->lattice_val == UNDEFINED)
446 has_undefined_operand = true;
448 all_undefined_operands = false;
450 if (val->lattice_val == CONSTANT)
451 has_constant_operand = true;
454 /* There may be constants in regular rhs operands. For calls we
455 have to ignore lhs, fndecl and static chain, otherwise only
457 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
458 i < gimple_num_ops (stmt); ++i)
460 tree op = gimple_op (stmt, i);
461 if (!op || TREE_CODE (op) == SSA_NAME)
463 if (is_gimple_min_invariant (op))
464 has_constant_operand = true;
467 if (has_constant_operand)
468 all_undefined_operands = false;
470 /* If the operation combines operands like COMPLEX_EXPR make sure to
471 not mark the result UNDEFINED if only one part of the result is
473 if (has_undefined_operand && all_undefined_operands)
475 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
477 switch (gimple_assign_rhs_code (stmt))
479 /* Unary operators are handled with all_undefined_operands. */
482 case POINTER_PLUS_EXPR:
483 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
484 Not bitwise operators, one VARYING operand may specify the
485 result completely. Not logical operators for the same reason.
486 Not COMPLEX_EXPR as one VARYING operand makes the result partly
487 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
488 the undefined operand may be promoted. */
495 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
496 fall back to VARYING even if there were CONSTANT operands. */
497 if (has_undefined_operand)
500 /* We do not consider virtual operands here -- load from read-only
501 memory may have only VARYING virtual operands, but still be
503 if (has_constant_operand
504 || gimple_references_memory_p (stmt))
510 /* Returns true if STMT cannot be constant. */
513 surely_varying_stmt_p (gimple stmt)
515 /* If the statement has operands that we cannot handle, it cannot be
517 if (gimple_has_volatile_ops (stmt))
520 /* If it is a call and does not return a value or is not a
521 builtin and not an indirect call, it is varying. */
522 if (is_gimple_call (stmt))
525 if (!gimple_call_lhs (stmt)
526 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
527 && !DECL_BUILT_IN (fndecl)))
531 /* Any other store operation is not interesting. */
532 else if (gimple_vdef (stmt))
535 /* Anything other than assignments and conditional jumps are not
536 interesting for CCP. */
537 if (gimple_code (stmt) != GIMPLE_ASSIGN
538 && gimple_code (stmt) != GIMPLE_COND
539 && gimple_code (stmt) != GIMPLE_SWITCH
540 && gimple_code (stmt) != GIMPLE_CALL)
546 /* Initialize local data structures for CCP. */
549 ccp_initialize (void)
553 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
555 /* Initialize simulation flags for PHI nodes and statements. */
558 gimple_stmt_iterator i;
560 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
562 gimple stmt = gsi_stmt (i);
565 /* If the statement is a control insn, then we do not
566 want to avoid simulating the statement once. Failure
567 to do so means that those edges will never get added. */
568 if (stmt_ends_bb_p (stmt))
571 is_varying = surely_varying_stmt_p (stmt);
578 /* If the statement will not produce a constant, mark
579 all its outputs VARYING. */
580 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
581 set_value_varying (def);
583 prop_set_simulate_again (stmt, !is_varying);
587 /* Now process PHI nodes. We never clear the simulate_again flag on
588 phi nodes, since we do not know which edges are executable yet,
589 except for phi nodes for virtual operands when we do not do store ccp. */
592 gimple_stmt_iterator i;
594 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
596 gimple phi = gsi_stmt (i);
598 if (!is_gimple_reg (gimple_phi_result (phi)))
599 prop_set_simulate_again (phi, false);
601 prop_set_simulate_again (phi, true);
606 /* Debug count support. Reset the values of ssa names
607 VARYING when the total number ssa names analyzed is
608 beyond the debug count specified. */
614 for (i = 0; i < num_ssa_names; i++)
618 const_val[i].lattice_val = VARYING;
619 const_val[i].value = NULL_TREE;
625 /* Do final substitution of propagated values, cleanup the flowgraph and
626 free allocated storage.
628 Return TRUE when something was optimized. */
633 bool something_changed;
636 /* Perform substitutions based on the known constant values. */
637 something_changed = substitute_and_fold (const_val, ccp_fold_stmt, true);
641 return something_changed;;
645 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
648 any M UNDEFINED = any
649 any M VARYING = VARYING
650 Ci M Cj = Ci if (i == j)
651 Ci M Cj = VARYING if (i != j)
655 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
657 if (val1->lattice_val == UNDEFINED)
659 /* UNDEFINED M any = any */
662 else if (val2->lattice_val == UNDEFINED)
664 /* any M UNDEFINED = any
665 Nothing to do. VAL1 already contains the value we want. */
668 else if (val1->lattice_val == VARYING
669 || val2->lattice_val == VARYING)
671 /* any M VARYING = VARYING. */
672 val1->lattice_val = VARYING;
673 val1->value = NULL_TREE;
675 else if (val1->lattice_val == CONSTANT
676 && val2->lattice_val == CONSTANT
677 && simple_cst_equal (val1->value, val2->value) == 1)
679 /* Ci M Cj = Ci if (i == j)
680 Ci M Cj = VARYING if (i != j)
682 If these two values come from memory stores, make sure that
683 they come from the same memory reference.
684 Nothing to do. VAL1 already contains the value we want. */
688 /* Any other combination is VARYING. */
689 val1->lattice_val = VARYING;
690 val1->value = NULL_TREE;
695 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
696 lattice values to determine PHI_NODE's lattice value. The value of a
697 PHI node is determined calling ccp_lattice_meet with all the arguments
698 of the PHI node that are incoming via executable edges. */
700 static enum ssa_prop_result
701 ccp_visit_phi_node (gimple phi)
704 prop_value_t *old_val, new_val;
706 if (dump_file && (dump_flags & TDF_DETAILS))
708 fprintf (dump_file, "\nVisiting PHI node: ");
709 print_gimple_stmt (dump_file, phi, 0, dump_flags);
712 old_val = get_value (gimple_phi_result (phi));
713 switch (old_val->lattice_val)
716 return SSA_PROP_VARYING;
723 new_val.lattice_val = UNDEFINED;
724 new_val.value = NULL_TREE;
731 for (i = 0; i < gimple_phi_num_args (phi); i++)
733 /* Compute the meet operator over all the PHI arguments flowing
734 through executable edges. */
735 edge e = gimple_phi_arg_edge (phi, i);
737 if (dump_file && (dump_flags & TDF_DETAILS))
740 "\n Argument #%d (%d -> %d %sexecutable)\n",
741 i, e->src->index, e->dest->index,
742 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
745 /* If the incoming edge is executable, Compute the meet operator for
746 the existing value of the PHI node and the current PHI argument. */
747 if (e->flags & EDGE_EXECUTABLE)
749 tree arg = gimple_phi_arg (phi, i)->def;
750 prop_value_t arg_val = get_value_for_expr (arg);
752 ccp_lattice_meet (&new_val, &arg_val);
754 if (dump_file && (dump_flags & TDF_DETAILS))
756 fprintf (dump_file, "\t");
757 print_generic_expr (dump_file, arg, dump_flags);
758 dump_lattice_value (dump_file, "\tValue: ", arg_val);
759 fprintf (dump_file, "\n");
762 if (new_val.lattice_val == VARYING)
767 if (dump_file && (dump_flags & TDF_DETAILS))
769 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
770 fprintf (dump_file, "\n\n");
773 /* Make the transition to the new value. */
774 if (set_lattice_value (gimple_phi_result (phi), new_val))
776 if (new_val.lattice_val == VARYING)
777 return SSA_PROP_VARYING;
779 return SSA_PROP_INTERESTING;
782 return SSA_PROP_NOT_INTERESTING;
785 /* Get operand number OPNR from the rhs of STMT. Before returning it,
786 simplify it to a constant if possible. */
789 get_rhs_assign_op_for_ccp (gimple stmt, int opnr)
791 tree op = gimple_op (stmt, opnr);
793 if (TREE_CODE (op) == SSA_NAME)
795 prop_value_t *val = get_value (op);
796 if (val->lattice_val == CONSTANT)
797 op = get_value (op)->value;
802 /* CCP specific front-end to the non-destructive constant folding
805 Attempt to simplify the RHS of STMT knowing that one or more
806 operands are constants.
808 If simplification is possible, return the simplified RHS,
809 otherwise return the original RHS or NULL_TREE. */
812 ccp_fold (gimple stmt)
814 location_t loc = gimple_location (stmt);
815 switch (gimple_code (stmt))
819 enum tree_code subcode = gimple_assign_rhs_code (stmt);
821 switch (get_gimple_rhs_class (subcode))
823 case GIMPLE_SINGLE_RHS:
825 tree rhs = gimple_assign_rhs1 (stmt);
826 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
828 if (TREE_CODE (rhs) == SSA_NAME)
830 /* If the RHS is an SSA_NAME, return its known constant value,
832 return get_value (rhs)->value;
834 /* Handle propagating invariant addresses into address operations.
835 The folding we do here matches that in tree-ssa-forwprop.c. */
836 else if (TREE_CODE (rhs) == ADDR_EXPR)
839 base = &TREE_OPERAND (rhs, 0);
840 while (handled_component_p (*base))
841 base = &TREE_OPERAND (*base, 0);
842 if (TREE_CODE (*base) == MEM_REF
843 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
845 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
846 if (val->lattice_val == CONSTANT
847 && TREE_CODE (val->value) == ADDR_EXPR)
849 tree ret, save = *base;
851 new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
852 unshare_expr (val->value),
853 TREE_OPERAND (*base, 1));
854 /* We need to return a new tree, not modify the IL
855 or share parts of it. So play some tricks to
856 avoid manually building it. */
858 ret = unshare_expr (rhs);
859 recompute_tree_invariant_for_addr_expr (ret);
865 else if (TREE_CODE (rhs) == CONSTRUCTOR
866 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
867 && (CONSTRUCTOR_NELTS (rhs)
868 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
874 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
876 if (TREE_CODE (val) == SSA_NAME
877 && get_value (val)->lattice_val == CONSTANT)
878 val = get_value (val)->value;
879 if (TREE_CODE (val) == INTEGER_CST
880 || TREE_CODE (val) == REAL_CST
881 || TREE_CODE (val) == FIXED_CST)
882 list = tree_cons (NULL_TREE, val, list);
887 return build_vector (TREE_TYPE (rhs), nreverse (list));
890 if (kind == tcc_reference)
892 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
893 || TREE_CODE (rhs) == REALPART_EXPR
894 || TREE_CODE (rhs) == IMAGPART_EXPR)
895 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
897 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
898 if (val->lattice_val == CONSTANT)
899 return fold_unary_loc (EXPR_LOCATION (rhs),
901 TREE_TYPE (rhs), val->value);
903 else if (TREE_CODE (rhs) == MEM_REF
904 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
906 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
907 if (val->lattice_val == CONSTANT
908 && TREE_CODE (val->value) == ADDR_EXPR)
910 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
911 unshare_expr (val->value),
912 TREE_OPERAND (rhs, 1));
917 return fold_const_aggregate_ref (rhs);
919 else if (kind == tcc_declaration)
920 return get_symbol_constant_value (rhs);
924 case GIMPLE_UNARY_RHS:
926 /* Handle unary operators that can appear in GIMPLE form.
927 Note that we know the single operand must be a constant,
928 so this should almost always return a simplified RHS. */
929 tree lhs = gimple_assign_lhs (stmt);
930 tree op0 = get_rhs_assign_op_for_ccp (stmt, 1);
932 /* Conversions are useless for CCP purposes if they are
933 value-preserving. Thus the restrictions that
934 useless_type_conversion_p places for pointer type conversions
935 do not apply here. Substitution later will only substitute to
937 if (CONVERT_EXPR_CODE_P (subcode)
938 && POINTER_TYPE_P (TREE_TYPE (lhs))
939 && POINTER_TYPE_P (TREE_TYPE (op0)))
942 /* Try to re-construct array references on-the-fly. */
943 if (!useless_type_conversion_p (TREE_TYPE (lhs),
945 && ((tem = maybe_fold_offset_to_address
947 op0, integer_zero_node, TREE_TYPE (lhs)))
954 fold_unary_ignore_overflow_loc (loc, subcode,
955 gimple_expr_type (stmt), op0);
958 case GIMPLE_BINARY_RHS:
960 /* Handle binary operators that can appear in GIMPLE form. */
961 tree op0 = get_rhs_assign_op_for_ccp (stmt, 1);
962 tree op1 = get_rhs_assign_op_for_ccp (stmt, 2);
964 /* Translate &x + CST into an invariant form suitable for
965 further propagation. */
966 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
967 && TREE_CODE (op0) == ADDR_EXPR
968 && TREE_CODE (op1) == INTEGER_CST)
970 tree off = fold_convert (ptr_type_node, op1);
971 return build_fold_addr_expr
972 (fold_build2 (MEM_REF,
973 TREE_TYPE (TREE_TYPE (op0)),
974 unshare_expr (op0), off));
977 return fold_binary_loc (loc, subcode,
978 gimple_expr_type (stmt), op0, op1);
981 case GIMPLE_TERNARY_RHS:
983 /* Handle binary operators that can appear in GIMPLE form. */
984 tree op0 = get_rhs_assign_op_for_ccp (stmt, 1);
985 tree op1 = get_rhs_assign_op_for_ccp (stmt, 2);
986 tree op2 = get_rhs_assign_op_for_ccp (stmt, 3);
988 return fold_ternary_loc (loc, subcode,
989 gimple_expr_type (stmt), op0, op1, op2);
1000 tree fn = gimple_call_fn (stmt);
1003 if (TREE_CODE (fn) == SSA_NAME)
1005 val = get_value (fn);
1006 if (val->lattice_val == CONSTANT)
1009 if (TREE_CODE (fn) == ADDR_EXPR
1010 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1011 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1013 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1016 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1018 args[i] = gimple_call_arg (stmt, i);
1019 if (TREE_CODE (args[i]) == SSA_NAME)
1021 val = get_value (args[i]);
1022 if (val->lattice_val == CONSTANT)
1023 args[i] = val->value;
1026 call = build_call_array_loc (loc,
1027 gimple_call_return_type (stmt),
1028 fn, gimple_call_num_args (stmt), args);
1029 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
1031 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1032 STRIP_NOPS (retval);
1040 /* Handle comparison operators that can appear in GIMPLE form. */
1041 tree op0 = gimple_cond_lhs (stmt);
1042 tree op1 = gimple_cond_rhs (stmt);
1043 enum tree_code code = gimple_cond_code (stmt);
1045 /* Simplify the operands down to constants when appropriate. */
1046 if (TREE_CODE (op0) == SSA_NAME)
1048 prop_value_t *val = get_value (op0);
1049 if (val->lattice_val == CONSTANT)
1053 if (TREE_CODE (op1) == SSA_NAME)
1055 prop_value_t *val = get_value (op1);
1056 if (val->lattice_val == CONSTANT)
1060 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1065 tree rhs = gimple_switch_index (stmt);
1067 if (TREE_CODE (rhs) == SSA_NAME)
1069 /* If the RHS is an SSA_NAME, return its known constant value,
1071 return get_value (rhs)->value;
1082 /* Return the tree representing the element referenced by T if T is an
1083 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1084 NULL_TREE otherwise. */
1087 fold_const_aggregate_ref (tree t)
1089 prop_value_t *value;
1090 tree base, ctor, idx, field;
1091 unsigned HOST_WIDE_INT cnt;
1094 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1095 return get_symbol_constant_value (t);
1097 switch (TREE_CODE (t))
1100 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1101 DECL_INITIAL. If BASE is a nested reference into another
1102 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1103 the inner reference. */
1104 base = TREE_OPERAND (t, 0);
1105 switch (TREE_CODE (base))
1108 /* ??? We could handle this case. */
1109 if (!integer_zerop (TREE_OPERAND (base, 1)))
1111 base = get_base_address (base);
1113 || TREE_CODE (base) != VAR_DECL)
1118 if (!TREE_READONLY (base)
1119 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1120 || !targetm.binds_local_p (base))
1123 ctor = DECL_INITIAL (base);
1128 ctor = fold_const_aggregate_ref (base);
1140 if (ctor == NULL_TREE
1141 || (TREE_CODE (ctor) != CONSTRUCTOR
1142 && TREE_CODE (ctor) != STRING_CST)
1143 || !TREE_STATIC (ctor))
1146 /* Get the index. If we have an SSA_NAME, try to resolve it
1147 with the current lattice value for the SSA_NAME. */
1148 idx = TREE_OPERAND (t, 1);
1149 switch (TREE_CODE (idx))
1152 if ((value = get_value (idx))
1153 && value->lattice_val == CONSTANT
1154 && TREE_CODE (value->value) == INTEGER_CST)
1167 /* Fold read from constant string. */
1168 if (TREE_CODE (ctor) == STRING_CST)
1170 if ((TYPE_MODE (TREE_TYPE (t))
1171 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1172 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1174 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1175 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1176 return build_int_cst_type (TREE_TYPE (t),
1177 (TREE_STRING_POINTER (ctor)
1178 [TREE_INT_CST_LOW (idx)]));
1182 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1183 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1184 if (tree_int_cst_equal (cfield, idx))
1187 if (TREE_CODE (cval) == ADDR_EXPR)
1189 tree base = get_base_address (TREE_OPERAND (cval, 0));
1190 if (base && TREE_CODE (base) == VAR_DECL)
1191 add_referenced_var (base);
1198 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1199 DECL_INITIAL. If BASE is a nested reference into another
1200 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1201 the inner reference. */
1202 base = TREE_OPERAND (t, 0);
1203 switch (TREE_CODE (base))
1206 if (!TREE_READONLY (base)
1207 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1208 || !targetm.binds_local_p (base))
1211 ctor = DECL_INITIAL (base);
1216 ctor = fold_const_aggregate_ref (base);
1223 if (ctor == NULL_TREE
1224 || TREE_CODE (ctor) != CONSTRUCTOR
1225 || !TREE_STATIC (ctor))
1228 field = TREE_OPERAND (t, 1);
1230 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1232 /* FIXME: Handle bit-fields. */
1233 && ! DECL_BIT_FIELD (cfield))
1236 if (TREE_CODE (cval) == ADDR_EXPR)
1238 tree base = get_base_address (TREE_OPERAND (cval, 0));
1239 if (base && TREE_CODE (base) == VAR_DECL)
1240 add_referenced_var (base);
1249 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1250 if (c && TREE_CODE (c) == COMPLEX_CST)
1251 return fold_build1_loc (EXPR_LOCATION (t),
1252 TREE_CODE (t), TREE_TYPE (t), c);
1257 /* Get the base object we are accessing. */
1258 base = TREE_OPERAND (t, 0);
1259 if (TREE_CODE (base) == SSA_NAME
1260 && (value = get_value (base))
1261 && value->lattice_val == CONSTANT)
1262 base = value->value;
1263 if (TREE_CODE (base) != ADDR_EXPR)
1265 base = TREE_OPERAND (base, 0);
1266 switch (TREE_CODE (base))
1270 && !AGGREGATE_TYPE_P (TREE_TYPE (base))
1271 && integer_zerop (TREE_OPERAND (t, 1)))
1273 tree res = get_symbol_constant_value (base);
1275 && !useless_type_conversion_p
1276 (TREE_TYPE (t), TREE_TYPE (res)))
1277 res = fold_unary (VIEW_CONVERT_EXPR, TREE_TYPE (t), res);
1281 if (!TREE_READONLY (base)
1282 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1283 || !targetm.binds_local_p (base))
1286 ctor = DECL_INITIAL (base);
1298 if (ctor == NULL_TREE
1299 || (TREE_CODE (ctor) != CONSTRUCTOR
1300 && TREE_CODE (ctor) != STRING_CST)
1301 || !TREE_STATIC (ctor))
1304 /* Get the byte offset. */
1305 idx = TREE_OPERAND (t, 1);
1307 /* Fold read from constant string. */
1308 if (TREE_CODE (ctor) == STRING_CST)
1310 if ((TYPE_MODE (TREE_TYPE (t))
1311 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1312 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1314 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1315 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1316 return build_int_cst_type (TREE_TYPE (t),
1317 (TREE_STRING_POINTER (ctor)
1318 [TREE_INT_CST_LOW (idx)]));
1322 /* ??? Implement byte-offset indexing into a non-array CONSTRUCTOR. */
1323 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
1324 && (TYPE_MODE (TREE_TYPE (t))
1325 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1326 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t))) != 0
1329 (TRUNC_MOD_EXPR, idx,
1330 size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t)))), 0)))
1332 idx = int_const_binop (TRUNC_DIV_EXPR, idx,
1333 size_int (GET_MODE_SIZE
1334 (TYPE_MODE (TREE_TYPE (t)))), 0);
1335 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1336 if (tree_int_cst_equal (cfield, idx))
1339 if (TREE_CODE (cval) == ADDR_EXPR)
1341 tree base = get_base_address (TREE_OPERAND (cval, 0));
1342 if (base && TREE_CODE (base) == VAR_DECL)
1343 add_referenced_var (base);
1345 if (useless_type_conversion_p (TREE_TYPE (t), TREE_TYPE (cval)))
1347 else if (CONSTANT_CLASS_P (cval))
1348 return fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (t), cval);
1362 /* Evaluate statement STMT.
1363 Valid only for assignments, calls, conditionals, and switches. */
1366 evaluate_stmt (gimple stmt)
1369 tree simplified = NULL_TREE;
1370 ccp_lattice_t likelyvalue = likely_value (stmt);
1373 fold_defer_overflow_warnings ();
1375 /* If the statement is likely to have a CONSTANT result, then try
1376 to fold the statement to determine the constant value. */
1377 /* FIXME. This is the only place that we call ccp_fold.
1378 Since likely_value never returns CONSTANT for calls, we will
1379 not attempt to fold them, including builtins that may profit. */
1380 if (likelyvalue == CONSTANT)
1381 simplified = ccp_fold (stmt);
1382 /* If the statement is likely to have a VARYING result, then do not
1383 bother folding the statement. */
1384 else if (likelyvalue == VARYING)
1386 enum gimple_code code = gimple_code (stmt);
1387 if (code == GIMPLE_ASSIGN)
1389 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1391 /* Other cases cannot satisfy is_gimple_min_invariant
1393 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1394 simplified = gimple_assign_rhs1 (stmt);
1396 else if (code == GIMPLE_SWITCH)
1397 simplified = gimple_switch_index (stmt);
1399 /* These cannot satisfy is_gimple_min_invariant without folding. */
1400 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1403 is_constant = simplified && is_gimple_min_invariant (simplified);
1405 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1407 if (dump_file && (dump_flags & TDF_DETAILS))
1409 fprintf (dump_file, "which is likely ");
1410 switch (likelyvalue)
1413 fprintf (dump_file, "CONSTANT");
1416 fprintf (dump_file, "UNDEFINED");
1419 fprintf (dump_file, "VARYING");
1423 fprintf (dump_file, "\n");
1428 /* The statement produced a constant value. */
1429 val.lattice_val = CONSTANT;
1430 val.value = simplified;
1434 /* The statement produced a nonconstant value. If the statement
1435 had UNDEFINED operands, then the result of the statement
1436 should be UNDEFINED. Otherwise, the statement is VARYING. */
1437 if (likelyvalue == UNDEFINED)
1438 val.lattice_val = likelyvalue;
1440 val.lattice_val = VARYING;
1442 val.value = NULL_TREE;
1448 /* Fold the stmt at *GSI with CCP specific information that propagating
1449 and regular folding does not catch. */
1452 ccp_fold_stmt (gimple_stmt_iterator *gsi)
1454 gimple stmt = gsi_stmt (*gsi);
1456 switch (gimple_code (stmt))
1461 /* Statement evaluation will handle type mismatches in constants
1462 more gracefully than the final propagation. This allows us to
1463 fold more conditionals here. */
1464 val = evaluate_stmt (stmt);
1465 if (val.lattice_val != CONSTANT
1466 || TREE_CODE (val.value) != INTEGER_CST)
1469 if (integer_zerop (val.value))
1470 gimple_cond_make_false (stmt);
1472 gimple_cond_make_true (stmt);
1479 tree lhs = gimple_call_lhs (stmt);
1482 bool changed = false;
1485 /* If the call was folded into a constant make sure it goes
1486 away even if we cannot propagate into all uses because of
1489 && TREE_CODE (lhs) == SSA_NAME
1490 && (val = get_value (lhs))
1491 && val->lattice_val == CONSTANT)
1493 tree new_rhs = unshare_expr (val->value);
1495 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1496 TREE_TYPE (new_rhs)))
1497 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1498 res = update_call_from_tree (gsi, new_rhs);
1503 /* Propagate into the call arguments. Compared to replace_uses_in
1504 this can use the argument slot types for type verification
1505 instead of the current argument type. We also can safely
1506 drop qualifiers here as we are dealing with constants anyway. */
1507 argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
1508 for (i = 0; i < gimple_call_num_args (stmt) && argt;
1509 ++i, argt = TREE_CHAIN (argt))
1511 tree arg = gimple_call_arg (stmt, i);
1512 if (TREE_CODE (arg) == SSA_NAME
1513 && (val = get_value (arg))
1514 && val->lattice_val == CONSTANT
1515 && useless_type_conversion_p
1516 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
1517 TYPE_MAIN_VARIANT (TREE_TYPE (val->value))))
1519 gimple_call_set_arg (stmt, i, unshare_expr (val->value));
1529 tree lhs = gimple_assign_lhs (stmt);
1532 /* If we have a load that turned out to be constant replace it
1533 as we cannot propagate into all uses in all cases. */
1534 if (gimple_assign_single_p (stmt)
1535 && TREE_CODE (lhs) == SSA_NAME
1536 && (val = get_value (lhs))
1537 && val->lattice_val == CONSTANT)
1539 tree rhs = unshare_expr (val->value);
1540 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1541 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1542 gimple_assign_set_rhs_from_tree (gsi, rhs);
1554 /* Visit the assignment statement STMT. Set the value of its LHS to the
1555 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1556 creates virtual definitions, set the value of each new name to that
1557 of the RHS (if we can derive a constant out of the RHS).
1558 Value-returning call statements also perform an assignment, and
1559 are handled here. */
1561 static enum ssa_prop_result
1562 visit_assignment (gimple stmt, tree *output_p)
1565 enum ssa_prop_result retval;
1567 tree lhs = gimple_get_lhs (stmt);
1569 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1570 || gimple_call_lhs (stmt) != NULL_TREE);
1572 if (gimple_assign_copy_p (stmt))
1574 tree rhs = gimple_assign_rhs1 (stmt);
1576 if (TREE_CODE (rhs) == SSA_NAME)
1578 /* For a simple copy operation, we copy the lattice values. */
1579 prop_value_t *nval = get_value (rhs);
1583 val = evaluate_stmt (stmt);
1586 /* Evaluate the statement, which could be
1587 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1588 val = evaluate_stmt (stmt);
1590 retval = SSA_PROP_NOT_INTERESTING;
1592 /* Set the lattice value of the statement's output. */
1593 if (TREE_CODE (lhs) == SSA_NAME)
1595 /* If STMT is an assignment to an SSA_NAME, we only have one
1597 if (set_lattice_value (lhs, val))
1600 if (val.lattice_val == VARYING)
1601 retval = SSA_PROP_VARYING;
1603 retval = SSA_PROP_INTERESTING;
1611 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1612 if it can determine which edge will be taken. Otherwise, return
1613 SSA_PROP_VARYING. */
1615 static enum ssa_prop_result
1616 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1621 block = gimple_bb (stmt);
1622 val = evaluate_stmt (stmt);
1624 /* Find which edge out of the conditional block will be taken and add it
1625 to the worklist. If no single edge can be determined statically,
1626 return SSA_PROP_VARYING to feed all the outgoing edges to the
1627 propagation engine. */
1628 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1630 return SSA_PROP_INTERESTING;
1632 return SSA_PROP_VARYING;
1636 /* Evaluate statement STMT. If the statement produces an output value and
1637 its evaluation changes the lattice value of its output, return
1638 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1641 If STMT is a conditional branch and we can determine its truth
1642 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1643 value, return SSA_PROP_VARYING. */
1645 static enum ssa_prop_result
1646 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1651 if (dump_file && (dump_flags & TDF_DETAILS))
1653 fprintf (dump_file, "\nVisiting statement:\n");
1654 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1657 switch (gimple_code (stmt))
1660 /* If the statement is an assignment that produces a single
1661 output value, evaluate its RHS to see if the lattice value of
1662 its output has changed. */
1663 return visit_assignment (stmt, output_p);
1666 /* A value-returning call also performs an assignment. */
1667 if (gimple_call_lhs (stmt) != NULL_TREE)
1668 return visit_assignment (stmt, output_p);
1673 /* If STMT is a conditional branch, see if we can determine
1674 which branch will be taken. */
1675 /* FIXME. It appears that we should be able to optimize
1676 computed GOTOs here as well. */
1677 return visit_cond_stmt (stmt, taken_edge_p);
1683 /* Any other kind of statement is not interesting for constant
1684 propagation and, therefore, not worth simulating. */
1685 if (dump_file && (dump_flags & TDF_DETAILS))
1686 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1688 /* Definitions made by statements other than assignments to
1689 SSA_NAMEs represent unknown modifications to their outputs.
1690 Mark them VARYING. */
1691 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1693 prop_value_t v = { VARYING, NULL_TREE };
1694 set_lattice_value (def, v);
1697 return SSA_PROP_VARYING;
1701 /* Main entry point for SSA Conditional Constant Propagation. */
1707 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1708 if (ccp_finalize ())
1709 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1718 return flag_tree_ccp != 0;
1722 struct gimple_opt_pass pass_ccp =
1727 gate_ccp, /* gate */
1728 do_ssa_ccp, /* execute */
1731 0, /* static_pass_number */
1732 TV_TREE_CCP, /* tv_id */
1733 PROP_cfg | PROP_ssa, /* properties_required */
1734 0, /* properties_provided */
1735 0, /* properties_destroyed */
1736 0, /* todo_flags_start */
1737 TODO_dump_func | TODO_verify_ssa
1738 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1744 /* Try to optimize out __builtin_stack_restore. Optimize it out
1745 if there is another __builtin_stack_restore in the same basic
1746 block and no calls or ASM_EXPRs are in between, or if this block's
1747 only outgoing edge is to EXIT_BLOCK and there are no calls or
1748 ASM_EXPRs after this __builtin_stack_restore. */
1751 optimize_stack_restore (gimple_stmt_iterator i)
1756 basic_block bb = gsi_bb (i);
1757 gimple call = gsi_stmt (i);
1759 if (gimple_code (call) != GIMPLE_CALL
1760 || gimple_call_num_args (call) != 1
1761 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
1762 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
1765 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
1767 stmt = gsi_stmt (i);
1768 if (gimple_code (stmt) == GIMPLE_ASM)
1770 if (gimple_code (stmt) != GIMPLE_CALL)
1773 callee = gimple_call_fndecl (stmt);
1775 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
1776 /* All regular builtins are ok, just obviously not alloca. */
1777 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
1780 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
1781 goto second_stack_restore;
1787 /* Allow one successor of the exit block, or zero successors. */
1788 switch (EDGE_COUNT (bb->succs))
1793 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
1799 second_stack_restore:
1801 /* If there's exactly one use, then zap the call to __builtin_stack_save.
1802 If there are multiple uses, then the last one should remove the call.
1803 In any case, whether the call to __builtin_stack_save can be removed
1804 or not is irrelevant to removing the call to __builtin_stack_restore. */
1805 if (has_single_use (gimple_call_arg (call, 0)))
1807 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
1808 if (is_gimple_call (stack_save))
1810 callee = gimple_call_fndecl (stack_save);
1812 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1813 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
1815 gimple_stmt_iterator stack_save_gsi;
1818 stack_save_gsi = gsi_for_stmt (stack_save);
1819 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
1820 update_call_from_tree (&stack_save_gsi, rhs);
1825 /* No effect, so the statement will be deleted. */
1826 return integer_zero_node;
1829 /* If va_list type is a simple pointer and nothing special is needed,
1830 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
1831 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
1832 pointer assignment. */
1835 optimize_stdarg_builtin (gimple call)
1837 tree callee, lhs, rhs, cfun_va_list;
1838 bool va_list_simple_ptr;
1839 location_t loc = gimple_location (call);
1841 if (gimple_code (call) != GIMPLE_CALL)
1844 callee = gimple_call_fndecl (call);
1846 cfun_va_list = targetm.fn_abi_va_list (callee);
1847 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
1848 && (TREE_TYPE (cfun_va_list) == void_type_node
1849 || TREE_TYPE (cfun_va_list) == char_type_node);
1851 switch (DECL_FUNCTION_CODE (callee))
1853 case BUILT_IN_VA_START:
1854 if (!va_list_simple_ptr
1855 || targetm.expand_builtin_va_start != NULL
1856 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
1859 if (gimple_call_num_args (call) != 2)
1862 lhs = gimple_call_arg (call, 0);
1863 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
1864 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
1865 != TYPE_MAIN_VARIANT (cfun_va_list))
1868 lhs = build_fold_indirect_ref_loc (loc, lhs);
1869 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
1870 1, integer_zero_node);
1871 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
1872 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1874 case BUILT_IN_VA_COPY:
1875 if (!va_list_simple_ptr)
1878 if (gimple_call_num_args (call) != 2)
1881 lhs = gimple_call_arg (call, 0);
1882 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
1883 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
1884 != TYPE_MAIN_VARIANT (cfun_va_list))
1887 lhs = build_fold_indirect_ref_loc (loc, lhs);
1888 rhs = gimple_call_arg (call, 1);
1889 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
1890 != TYPE_MAIN_VARIANT (cfun_va_list))
1893 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
1894 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
1896 case BUILT_IN_VA_END:
1897 /* No effect, so the statement will be deleted. */
1898 return integer_zero_node;
1905 /* A simple pass that attempts to fold all builtin functions. This pass
1906 is run after we've propagated as many constants as we can. */
1909 execute_fold_all_builtins (void)
1911 bool cfg_changed = false;
1913 unsigned int todoflags = 0;
1917 gimple_stmt_iterator i;
1918 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1920 gimple stmt, old_stmt;
1921 tree callee, result;
1922 enum built_in_function fcode;
1924 stmt = gsi_stmt (i);
1926 if (gimple_code (stmt) != GIMPLE_CALL)
1931 callee = gimple_call_fndecl (stmt);
1932 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
1937 fcode = DECL_FUNCTION_CODE (callee);
1939 result = gimple_fold_builtin (stmt);
1942 gimple_remove_stmt_histograms (cfun, stmt);
1945 switch (DECL_FUNCTION_CODE (callee))
1947 case BUILT_IN_CONSTANT_P:
1948 /* Resolve __builtin_constant_p. If it hasn't been
1949 folded to integer_one_node by now, it's fairly
1950 certain that the value simply isn't constant. */
1951 result = integer_zero_node;
1954 case BUILT_IN_STACK_RESTORE:
1955 result = optimize_stack_restore (i);
1961 case BUILT_IN_VA_START:
1962 case BUILT_IN_VA_END:
1963 case BUILT_IN_VA_COPY:
1964 /* These shouldn't be folded before pass_stdarg. */
1965 result = optimize_stdarg_builtin (stmt);
1975 if (dump_file && (dump_flags & TDF_DETAILS))
1977 fprintf (dump_file, "Simplified\n ");
1978 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1982 if (!update_call_from_tree (&i, result))
1984 gimplify_and_update_call_from_tree (&i, result);
1985 todoflags |= TODO_update_address_taken;
1988 stmt = gsi_stmt (i);
1991 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
1992 && gimple_purge_dead_eh_edges (bb))
1995 if (dump_file && (dump_flags & TDF_DETAILS))
1997 fprintf (dump_file, "to\n ");
1998 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1999 fprintf (dump_file, "\n");
2002 /* Retry the same statement if it changed into another
2003 builtin, there might be new opportunities now. */
2004 if (gimple_code (stmt) != GIMPLE_CALL)
2009 callee = gimple_call_fndecl (stmt);
2011 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2012 || DECL_FUNCTION_CODE (callee) == fcode)
2017 /* Delete unreachable blocks. */
2019 todoflags |= TODO_cleanup_cfg;
2025 struct gimple_opt_pass pass_fold_builtins =
2031 execute_fold_all_builtins, /* execute */
2034 0, /* static_pass_number */
2035 TV_NONE, /* tv_id */
2036 PROP_cfg | PROP_ssa, /* properties_required */
2037 0, /* properties_provided */
2038 0, /* properties_destroyed */
2039 0, /* todo_flags_start */
2042 | TODO_update_ssa /* todo_flags_finish */