1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by the
10 Free Software Foundation; either version 2, or (at your option) any
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* Conditional constant propagation.
27 Constant propagation with conditional branches,
28 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
30 Building an Optimizing Compiler,
31 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
33 Advanced Compiler Design and Implementation,
34 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
38 #include "coretypes.h"
43 #include "langhooks.h"
45 /* These RTL headers are needed for basic-block.h. */
48 #include "hard-reg-set.h"
49 #include "basic-block.h"
51 #include "diagnostic.h"
52 #include "tree-inline.h"
53 #include "tree-flow.h"
54 #include "tree-gimple.h"
55 #include "tree-dump.h"
56 #include "tree-pass.h"
62 /* Possible lattice values. */
71 /* Use the TREE_VISITED bitflag to mark statements and PHI nodes that have
72 been deemed VARYING and shouldn't be simulated again. */
73 #define DONT_SIMULATE_AGAIN(T) TREE_VISITED (T)
75 /* Main structure for CCP. Contains the lattice value and, if it's a
76 constant, the constant value. */
79 latticevalue lattice_val;
83 /* A bitmap to keep track of executable blocks in the CFG. */
84 static sbitmap executable_blocks;
86 /* Array of control flow edges on the worklist. */
87 static GTY(()) varray_type cfg_blocks = NULL;
89 static unsigned int cfg_blocks_num = 0;
90 static int cfg_blocks_tail;
91 static int cfg_blocks_head;
93 static sbitmap bb_in_list;
95 /* This is used to track the current value of each variable. */
96 static value *value_vector;
98 /* Worklist of SSA edges which will need reexamination as their definition
99 has changed. SSA edges are def-use edges in the SSA web. For each
100 edge, we store the definition statement or PHI node D. The destination
101 nodes that need to be visited are accessed using immediate_uses
103 static GTY(()) varray_type ssa_edges;
105 /* Identical to SSA_EDGES. For performance reasons, the list of SSA
106 edges is split into two. One contains all SSA edges who need to be
107 reexamined because their lattice value changed to varying (this
108 worklist), and the other contains all other SSA edges to be
109 reexamined (ssa_edges).
111 Since most values in the program are varying, the ideal situation
112 is to move them to that lattice value as quickly as possible.
113 Thus, it doesn't make sense to process any other type of lattice
114 value until all varying values are propagated fully, which is one
115 thing using the varying worklist achieves. In addition, if you
116 don't use a separate worklist for varying edges, you end up with
117 situations where lattice values move from
118 undefined->constant->varying instead of undefined->varying.
120 static GTY(()) varray_type varying_ssa_edges;
123 static void initialize (void);
124 static void finalize (void);
125 static void visit_phi_node (tree);
126 static tree ccp_fold (tree);
127 static value cp_lattice_meet (value, value);
128 static void visit_stmt (tree);
129 static void visit_cond_stmt (tree);
130 static void visit_assignment (tree);
131 static void add_var_to_ssa_edges_worklist (tree, value);
132 static void add_outgoing_control_edges (basic_block);
133 static void add_control_edge (edge);
134 static void def_to_varying (tree);
135 static void set_lattice_value (tree, value);
136 static void simulate_block (basic_block);
137 static void simulate_stmt (tree);
138 static void substitute_and_fold (void);
139 static value evaluate_stmt (tree);
140 static void dump_lattice_value (FILE *, const char *, value);
141 static bool replace_uses_in (tree, bool *);
142 static latticevalue likely_value (tree);
143 static tree get_rhs (tree);
144 static void set_rhs (tree *, tree);
145 static value *get_value (tree);
146 static value get_default_value (tree);
147 static tree ccp_fold_builtin (tree, tree);
148 static bool get_strlen (tree, tree *, bitmap);
149 static inline bool cfg_blocks_empty_p (void);
150 static void cfg_blocks_add (basic_block);
151 static basic_block cfg_blocks_get (void);
152 static bool need_imm_uses_for (tree var);
154 /* Process an SSA edge worklist. WORKLIST is the SSA edge worklist to
155 drain. This pops statements off the given WORKLIST and processes
156 them until there are no more statements on WORKLIST. */
159 process_ssa_edge_worklist (varray_type *worklist)
161 /* Drain the entire worklist. */
162 while (VARRAY_ACTIVE_SIZE (*worklist) > 0)
164 /* Pull the statement to simulate off the worklist. */
165 tree stmt = VARRAY_TOP_TREE (*worklist);
166 stmt_ann_t ann = stmt_ann (stmt);
167 VARRAY_POP (*worklist);
169 /* visit_stmt can "cancel" reevaluation of some statements.
170 If it does, then in_ccp_worklist will be zero. */
171 if (ann->in_ccp_worklist)
173 ann->in_ccp_worklist = 0;
174 simulate_stmt (stmt);
179 /* Main entry point for SSA Conditional Constant Propagation. FNDECL is
180 the declaration for the function to optimize.
182 On exit, VARS_TO_RENAME will contain the symbols that have been exposed by
183 the propagation of ADDR_EXPR expressions into pointer dereferences and need
184 to be renamed into SSA.
186 PHASE indicates which dump file from the DUMP_FILES array to use when
187 dumping debugging information. */
194 /* Iterate until the worklists are empty. */
195 while (!cfg_blocks_empty_p ()
196 || VARRAY_ACTIVE_SIZE (ssa_edges) > 0
197 || VARRAY_ACTIVE_SIZE (varying_ssa_edges) > 0)
199 if (!cfg_blocks_empty_p ())
201 /* Pull the next block to simulate off the worklist. */
202 basic_block dest_block = cfg_blocks_get ();
203 simulate_block (dest_block);
206 /* In order to move things to varying as quickly as
207 possible,process the VARYING_SSA_EDGES worklist first. */
208 process_ssa_edge_worklist (&varying_ssa_edges);
210 /* Now process the SSA_EDGES worklist. */
211 process_ssa_edge_worklist (&ssa_edges);
214 /* Now perform substitutions based on the known constant values. */
215 substitute_and_fold ();
217 /* Now cleanup any unreachable code. */
220 /* Free allocated memory. */
223 /* Debugging dumps. */
224 if (dump_file && (dump_flags & TDF_DETAILS))
226 dump_referenced_vars (dump_file);
227 fprintf (dump_file, "\n\n");
234 return flag_tree_ccp != 0;
237 struct tree_opt_pass pass_ccp =
241 tree_ssa_ccp, /* execute */
244 0, /* static_pass_number */
245 TV_TREE_CCP, /* tv_id */
246 PROP_cfg | PROP_ssa, /* properties_required */
247 0, /* properties_provided */
248 0, /* properties_destroyed */
249 0, /* todo_flags_start */
250 TODO_dump_func | TODO_rename_vars
251 | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
255 /* Get the constant value associated with variable VAR. */
262 #if defined ENABLE_CHECKING
263 if (TREE_CODE (var) != SSA_NAME)
267 val = &value_vector[SSA_NAME_VERSION (var)];
268 if (val->lattice_val == UNINITIALIZED)
269 *val = get_default_value (var);
275 /* Simulate the execution of BLOCK. Evaluate the statement associated
276 with each variable reference inside the block. */
279 simulate_block (basic_block block)
283 /* There is nothing to do for the exit block. */
284 if (block == EXIT_BLOCK_PTR)
287 if (dump_file && (dump_flags & TDF_DETAILS))
288 fprintf (dump_file, "\nSimulating block %d\n", block->index);
290 /* Always simulate PHI nodes, even if we have simulated this block
292 for (phi = phi_nodes (block); phi; phi = TREE_CHAIN (phi))
293 visit_phi_node (phi);
295 /* If this is the first time we've simulated this block, then we
296 must simulate each of its statements. */
297 if (!TEST_BIT (executable_blocks, block->index))
299 block_stmt_iterator j;
300 unsigned int normal_edge_count;
303 /* Note that we have simulated this block. */
304 SET_BIT (executable_blocks, block->index);
306 for (j = bsi_start (block); !bsi_end_p (j); bsi_next (&j))
307 visit_stmt (bsi_stmt (j));
309 /* We can not predict when abnormal edges will be executed, so
310 once a block is considered executable, we consider any
311 outgoing abnormal edges as executable.
313 At the same time, if this block has only one successor that is
314 reached by non-abnormal edges, then add that successor to the
316 normal_edge_count = 0;
318 for (e = block->succ; e; e = e->succ_next)
320 if (e->flags & EDGE_ABNORMAL)
322 add_control_edge (e);
331 if (normal_edge_count == 1)
332 add_control_edge (normal_edge);
337 /* Follow the def-use edges for statement DEF_STMT and simulate all the
338 statements reached by it. */
341 simulate_stmt (tree use_stmt)
343 basic_block use_bb = bb_for_stmt (use_stmt);
345 if (dump_file && (dump_flags & TDF_DETAILS))
347 fprintf (dump_file, "\nSimulating statement (from ssa_edges): ");
348 print_generic_stmt (dump_file, use_stmt, dump_flags);
351 if (TREE_CODE (use_stmt) == PHI_NODE)
353 /* PHI nodes are always visited, regardless of whether or not the
354 destination block is executable. */
355 visit_phi_node (use_stmt);
357 else if (TEST_BIT (executable_blocks, use_bb->index))
359 /* Otherwise, visit the statement containing the use reached by
360 DEF, only if the destination block is marked executable. */
361 visit_stmt (use_stmt);
366 /* Perform final substitution and folding. After this pass the program
367 should still be in SSA form. */
370 substitute_and_fold (void)
374 if (dump_file && (dump_flags & TDF_DETAILS))
376 "\nSubstituing constants and folding statements\n\n");
378 /* Substitute constants in every statement of every basic block. */
381 block_stmt_iterator i;
384 /* Propagate our known constants into PHI nodes. */
385 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
389 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
392 tree *orig_p = &PHI_ARG_DEF (phi, i);
394 if (! SSA_VAR_P (*orig_p))
397 new_val = get_value (*orig_p);
398 if (new_val->lattice_val == CONSTANT
399 && may_propagate_copy (*orig_p, new_val->const_val))
400 *orig_p = new_val->const_val;
404 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
406 bool replaced_address;
407 tree stmt = bsi_stmt (i);
409 /* Skip statements that have been folded already. */
410 if (stmt_modified_p (stmt) || !is_exec_stmt (stmt))
413 /* Replace the statement with its folded version and mark it
415 if (dump_file && (dump_flags & TDF_DETAILS))
417 fprintf (dump_file, "Line %d: replaced ", get_lineno (stmt));
418 print_generic_stmt (dump_file, stmt, TDF_SLIM);
421 if (replace_uses_in (stmt, &replaced_address))
423 bool changed = fold_stmt (bsi_stmt_ptr (i));
426 /* If we folded a builtin function, we'll likely
427 need to rename VDEFs. */
428 if (replaced_address || changed)
429 mark_new_vars_to_rename (stmt, vars_to_rename);
432 if (dump_file && (dump_flags & TDF_DETAILS))
434 fprintf (dump_file, " with ");
435 print_generic_stmt (dump_file, stmt, TDF_SLIM);
436 fprintf (dump_file, "\n");
443 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
444 lattice values to determine PHI_NODE's lattice value. The value of a
445 PHI node is determined calling cp_lattice_meet() with all the arguments
446 of the PHI node that are incoming via executable edges. */
449 visit_phi_node (tree phi)
451 bool short_circuit = 0;
452 value phi_val, *curr_val;
455 /* If the PHI node has already been deemed to be VARYING, don't simulate
457 if (DONT_SIMULATE_AGAIN (phi))
460 if (dump_file && (dump_flags & TDF_DETAILS))
462 fprintf (dump_file, "\nVisiting PHI node: ");
463 print_generic_expr (dump_file, phi, dump_flags);
466 curr_val = get_value (PHI_RESULT (phi));
467 switch (curr_val->lattice_val)
470 if (dump_file && (dump_flags & TDF_DETAILS))
471 fprintf (dump_file, "\n Shortcircuit. Default of VARYING.");
481 phi_val.lattice_val = UNDEFINED;
482 phi_val.const_val = NULL_TREE;
489 /* If the variable is volatile or the variable is never referenced in a
490 real operand, then consider the PHI node VARYING. */
491 if (short_circuit || TREE_THIS_VOLATILE (SSA_NAME_VAR (PHI_RESULT (phi))))
493 phi_val.lattice_val = VARYING;
494 phi_val.const_val = NULL;
497 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
499 /* Compute the meet operator over all the PHI arguments. */
500 edge e = PHI_ARG_EDGE (phi, i);
502 if (dump_file && (dump_flags & TDF_DETAILS))
505 "\n Argument #%d (%d -> %d %sexecutable)\n",
506 i, e->src->index, e->dest->index,
507 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
510 /* If the incoming edge is executable, Compute the meet operator for
511 the existing value of the PHI node and the current PHI argument. */
512 if (e->flags & EDGE_EXECUTABLE)
514 tree rdef = PHI_ARG_DEF (phi, i);
515 value *rdef_val, val;
517 if (is_gimple_min_invariant (rdef))
519 val.lattice_val = CONSTANT;
520 val.const_val = rdef;
524 rdef_val = get_value (rdef);
526 phi_val = cp_lattice_meet (phi_val, *rdef_val);
528 if (dump_file && (dump_flags & TDF_DETAILS))
530 fprintf (dump_file, "\t");
531 print_generic_expr (dump_file, rdef, dump_flags);
532 dump_lattice_value (dump_file, "\tValue: ", *rdef_val);
533 fprintf (dump_file, "\n");
536 if (phi_val.lattice_val == VARYING)
541 if (dump_file && (dump_flags & TDF_DETAILS))
543 dump_lattice_value (dump_file, "\n PHI node value: ", phi_val);
544 fprintf (dump_file, "\n\n");
547 set_lattice_value (PHI_RESULT (phi), phi_val);
548 if (phi_val.lattice_val == VARYING)
549 DONT_SIMULATE_AGAIN (phi) = 1;
553 /* Compute the meet operator between VAL1 and VAL2:
555 any M UNDEFINED = any
556 any M VARYING = VARYING
557 Ci M Cj = Ci if (i == j)
558 Ci M Cj = VARYING if (i != j) */
560 cp_lattice_meet (value val1, value val2)
564 /* any M UNDEFINED = any. */
565 if (val1.lattice_val == UNDEFINED)
567 else if (val2.lattice_val == UNDEFINED)
570 /* any M VARYING = VARYING. */
571 if (val1.lattice_val == VARYING || val2.lattice_val == VARYING)
573 result.lattice_val = VARYING;
574 result.const_val = NULL_TREE;
578 /* Ci M Cj = Ci if (i == j)
579 Ci M Cj = VARYING if (i != j) */
580 if (simple_cst_equal (val1.const_val, val2.const_val) == 1)
582 result.lattice_val = CONSTANT;
583 result.const_val = val1.const_val;
587 result.lattice_val = VARYING;
588 result.const_val = NULL_TREE;
595 /* Evaluate statement STMT. If the statement produces an output value and
596 its evaluation changes the lattice value of its output, do the following:
598 - If the statement is an assignment, add all the SSA edges starting at
601 - If the statement is a conditional branch:
602 . If the statement evaluates to non-constant, add all edges to
604 . If the statement is constant, add the edge executed as the
605 result of the branch. */
608 visit_stmt (tree stmt)
615 /* If the statement has already been deemed to be VARYING, don't simulate
617 if (DONT_SIMULATE_AGAIN (stmt))
620 if (dump_file && (dump_flags & TDF_DETAILS))
622 fprintf (dump_file, "\nVisiting statement: ");
623 print_generic_stmt (dump_file, stmt, TDF_SLIM);
624 fprintf (dump_file, "\n");
627 ann = stmt_ann (stmt);
629 /* If this statement is already in the worklist then "cancel" it. The
630 reevaluation implied by the worklist entry will produce the same
631 value we generate here and thus reevaluating it again from the
632 worklist is pointless. */
633 if (ann->in_ccp_worklist)
634 ann->in_ccp_worklist = 0;
636 /* Now examine the statement. If the statement is an assignment that
637 produces a single output value, evaluate its RHS to see if the lattice
638 value of its output has changed. */
639 if (TREE_CODE (stmt) == MODIFY_EXPR
640 && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME)
641 visit_assignment (stmt);
643 /* Definitions made by statements other than assignments to SSA_NAMEs
644 represent unknown modifications to their outputs. Mark them VARYING. */
645 else if (NUM_DEFS (defs = DEF_OPS (ann)) != 0)
647 DONT_SIMULATE_AGAIN (stmt) = 1;
648 for (i = 0; i < NUM_DEFS (defs); i++)
650 tree def = DEF_OP (defs, i);
651 def_to_varying (def);
655 /* If STMT is a conditional branch, see if we can determine which branch
657 else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
658 visit_cond_stmt (stmt);
660 /* Any other kind of statement is not interesting for constant
661 propagation and, therefore, not worth simulating. */
664 DONT_SIMULATE_AGAIN (stmt) = 1;
666 /* If STMT is a computed goto, then mark all the output edges
668 if (computed_goto_p (stmt))
669 add_outgoing_control_edges (bb_for_stmt (stmt));
672 /* Mark all VDEF operands VARYING. */
673 vdefs = VDEF_OPS (ann);
674 for (i = 0; i < NUM_VDEFS (vdefs); i++)
675 def_to_varying (VDEF_RESULT (vdefs, i));
679 /* Visit the assignment statement STMT. Set the value of its LHS to the
680 value computed by the RHS. */
683 visit_assignment (tree stmt)
688 lhs = TREE_OPERAND (stmt, 0);
689 rhs = TREE_OPERAND (stmt, 1);
691 if (TREE_THIS_VOLATILE (SSA_NAME_VAR (lhs)))
693 /* Volatile variables are always VARYING. */
694 val.lattice_val = VARYING;
695 val.const_val = NULL_TREE;
697 else if (TREE_CODE (rhs) == SSA_NAME)
699 /* For a simple copy operation, we copy the lattice values. */
700 value *nval = get_value (rhs);
705 /* Evaluate the statement. */
706 val = evaluate_stmt (stmt);
709 /* FIXME: Hack. If this was a definition of a bitfield, we need to widen
710 the constant value into the type of the destination variable. This
711 should not be necessary if GCC represented bitfields properly. */
713 tree lhs = TREE_OPERAND (stmt, 0);
714 if (val.lattice_val == CONSTANT
715 && TREE_CODE (lhs) == COMPONENT_REF
716 && DECL_BIT_FIELD (TREE_OPERAND (lhs, 1)))
718 tree w = widen_bitfield (val.const_val, TREE_OPERAND (lhs, 1), lhs);
720 if (w && is_gimple_min_invariant (w))
724 val.lattice_val = VARYING;
725 val.const_val = NULL;
730 /* Set the lattice value of the statement's output. */
731 set_lattice_value (lhs, val);
732 if (val.lattice_val == VARYING)
733 DONT_SIMULATE_AGAIN (stmt) = 1;
737 /* Visit the conditional statement STMT. If it evaluates to a constant value,
738 mark outgoing edges appropriately. */
741 visit_cond_stmt (tree stmt)
747 block = bb_for_stmt (stmt);
748 val = evaluate_stmt (stmt);
750 /* Find which edge out of the conditional block will be taken and add it
751 to the worklist. If no single edge can be determined statically, add
752 all outgoing edges from BLOCK. */
753 e = find_taken_edge (block, val.const_val);
755 add_control_edge (e);
758 DONT_SIMULATE_AGAIN (stmt) = 1;
759 add_outgoing_control_edges (block);
764 /* Add all the edges coming out of BB to the control flow worklist. */
767 add_outgoing_control_edges (basic_block bb)
771 for (e = bb->succ; e; e = e->succ_next)
772 add_control_edge (e);
776 /* Add edge E to the control flow worklist. */
779 add_control_edge (edge e)
781 basic_block bb = e->dest;
782 if (bb == EXIT_BLOCK_PTR)
785 /* If the edge had already been executed, skip it. */
786 if (e->flags & EDGE_EXECUTABLE)
789 e->flags |= EDGE_EXECUTABLE;
791 /* If the block is already in the list, we're done. */
792 if (TEST_BIT (bb_in_list, bb->index))
797 if (dump_file && (dump_flags & TDF_DETAILS))
798 fprintf (dump_file, "Adding Destination of edge (%d -> %d) to worklist\n\n",
799 e->src->index, e->dest->index);
803 /* CCP specific front-end to the non-destructive constant folding routines.
805 Attempt to simplify the RHS of STMT knowing that one or more
806 operands are constants.
808 If simplification is possible, return the simplified RHS,
809 otherwise return the original RHS. */
814 tree rhs = get_rhs (stmt);
815 enum tree_code code = TREE_CODE (rhs);
816 int kind = TREE_CODE_CLASS (code);
817 tree retval = NULL_TREE;
819 /* If the RHS is just a variable, then that variable must now have
820 a constant value that we can return directly. */
821 if (TREE_CODE (rhs) == SSA_NAME)
822 return get_value (rhs)->const_val;
824 /* Unary operators. Note that we know the single operand must
825 be a constant. So this should almost always return a
829 /* Handle unary operators which can appear in GIMPLE form. */
830 tree op0 = TREE_OPERAND (rhs, 0);
832 /* Simplify the operand down to a constant. */
833 if (TREE_CODE (op0) == SSA_NAME)
835 value *val = get_value (op0);
836 if (val->lattice_val == CONSTANT)
837 op0 = get_value (op0)->const_val;
840 retval = nondestructive_fold_unary_to_constant (code,
844 /* If we folded, but did not create an invariant, then we can not
845 use this expression. */
846 if (retval && ! is_gimple_min_invariant (retval))
849 /* If we could not fold the expression, but the arguments are all
850 constants and gimple values, then build and return the new
853 In some cases the new expression is still something we can
854 use as a replacement for an argument. This happens with
855 NOP conversions of types for example.
857 In other cases the new expression can not be used as a
858 replacement for an argument (as it would create non-gimple
859 code). But the new expression can still be used to derive
861 if (! retval && is_gimple_min_invariant (op0))
862 return build1 (code, TREE_TYPE (rhs), op0);
865 /* Binary and comparison operators. We know one or both of the
866 operands are constants. */
869 || code == TRUTH_AND_EXPR
870 || code == TRUTH_OR_EXPR
871 || code == TRUTH_XOR_EXPR)
873 /* Handle binary and comparison operators that can appear in
875 tree op0 = TREE_OPERAND (rhs, 0);
876 tree op1 = TREE_OPERAND (rhs, 1);
878 /* Simplify the operands down to constants when appropriate. */
879 if (TREE_CODE (op0) == SSA_NAME)
881 value *val = get_value (op0);
882 if (val->lattice_val == CONSTANT)
883 op0 = val->const_val;
886 if (TREE_CODE (op1) == SSA_NAME)
888 value *val = get_value (op1);
889 if (val->lattice_val == CONSTANT)
890 op1 = val->const_val;
893 retval = nondestructive_fold_binary_to_constant (code,
897 /* If we folded, but did not create an invariant, then we can not
898 use this expression. */
899 if (retval && ! is_gimple_min_invariant (retval))
902 /* If we could not fold the expression, but the arguments are all
903 constants and gimple values, then build and return the new
906 In some cases the new expression is still something we can
907 use as a replacement for an argument. This happens with
908 NOP conversions of types for example.
910 In other cases the new expression can not be used as a
911 replacement for an argument (as it would create non-gimple
912 code). But the new expression can still be used to derive
915 && is_gimple_min_invariant (op0)
916 && is_gimple_min_invariant (op1))
917 return build (code, TREE_TYPE (rhs), op0, op1);
920 /* We may be able to fold away calls to builtin functions if their
921 arguments are constants. */
922 else if (code == CALL_EXPR
923 && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
924 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0))
926 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)))
928 use_optype uses = STMT_USE_OPS (stmt);
929 if (NUM_USES (uses) != 0)
934 /* Preserve the original values of every operand. */
935 orig = xmalloc (sizeof (tree) * NUM_USES (uses));
936 for (i = 0; i < NUM_USES (uses); i++)
937 orig[i] = USE_OP (uses, i);
939 /* Substitute operands with their values and try to fold. */
940 replace_uses_in (stmt, NULL);
941 retval = fold_builtin (rhs);
943 /* Restore operands to their original form. */
944 for (i = 0; i < NUM_USES (uses); i++)
945 *(USE_OP_PTR (uses, i)) = orig[i];
952 /* If we got a simplified form, see if we need to convert its type. */
955 if (TREE_TYPE (retval) != TREE_TYPE (rhs))
956 retval = convert (TREE_TYPE (rhs), retval);
958 if (TREE_TYPE (retval) == TREE_TYPE (rhs))
962 /* No simplification was possible. */
967 /* Evaluate statement STMT. */
970 evaluate_stmt (tree stmt)
974 latticevalue likelyvalue = likely_value (stmt);
976 /* If the statement is likely to have a CONSTANT result, then try
977 to fold the statement to determine the constant value. */
978 if (likelyvalue == CONSTANT)
979 simplified = ccp_fold (stmt);
980 /* If the statement is likely to have a VARYING result, then do not
981 bother folding the statement. */
982 else if (likelyvalue == VARYING)
983 simplified = get_rhs (stmt);
984 /* Otherwise the statement is likely to have an UNDEFINED value and
985 there will be nothing to do. */
987 simplified = NULL_TREE;
989 if (simplified && is_gimple_min_invariant (simplified))
991 /* The statement produced a constant value. */
992 val.lattice_val = CONSTANT;
993 val.const_val = simplified;
997 /* The statement produced a nonconstant value. If the statement
998 had undefined operands, then the result of the statement should
999 be undefined. Else the result of the statement is VARYING. */
1000 val.lattice_val = (likelyvalue == UNDEFINED ? UNDEFINED : VARYING);
1001 val.const_val = NULL_TREE;
1008 /* Debugging dumps. */
1011 dump_lattice_value (FILE *outf, const char *prefix, value val)
1013 switch (val.lattice_val)
1016 fprintf (outf, "%sUNDEFINED", prefix);
1019 fprintf (outf, "%sVARYING", prefix);
1022 fprintf (outf, "%sCONSTANT ", prefix);
1023 print_generic_expr (outf, val.const_val, dump_flags);
1030 /* Given a constant value VAL for bitfield FIELD, and a destination
1031 variable VAR, return VAL appropriately widened to fit into VAR. If
1032 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1035 widen_bitfield (tree val, tree field, tree var)
1037 unsigned var_size, field_size;
1039 unsigned HOST_WIDE_INT mask;
1042 var_size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE ((var))));
1043 field_size = TREE_INT_CST_LOW (DECL_SIZE (field));
1045 /* Give up if either the bitfield or the variable are too wide. */
1046 if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT)
1049 #if defined ENABLE_CHECKING
1050 if (var_size < field_size)
1054 /* If VAL is not an integer constant, then give up. */
1055 if (TREE_CODE (val) != INTEGER_CST)
1058 /* If the sign bit of the value is not set, or the field's type is
1059 unsigned, then just mask off the high order bits of the value. */
1060 if ((TREE_INT_CST_LOW (val) & (1 << (field_size - 1))) == 0
1061 || DECL_UNSIGNED (field))
1063 /* Zero extension. Build a mask with the lower 'field_size' bits
1064 set and a BIT_AND_EXPR node to clear the high order bits of
1066 for (i = 0, mask = 0; i < field_size; i++)
1069 wide_val = build (BIT_AND_EXPR, TREE_TYPE (var), val,
1070 build_int_2 (mask, 0));
1074 /* Sign extension. Create a mask with the upper 'field_size'
1075 bits set and a BIT_IOR_EXPR to set the high order bits of the
1077 for (i = 0, mask = 0; i < (var_size - field_size); i++)
1078 mask |= 1 << (var_size - i - 1);
1080 wide_val = build (BIT_IOR_EXPR, TREE_TYPE (var), val,
1081 build_int_2 (mask, 0));
1084 return fold (wide_val);
1088 /* Function indicating whether we ought to include information for 'var'
1089 when calculating immediate uses. */
1092 need_imm_uses_for (tree var)
1094 return get_value (var)->lattice_val != VARYING;
1098 /* Initialize local data structures and worklists for CCP. */
1105 sbitmap virtual_var;
1107 /* Worklists of SSA edges. */
1108 VARRAY_TREE_INIT (ssa_edges, 20, "ssa_edges");
1109 VARRAY_TREE_INIT (varying_ssa_edges, 20, "varying_ssa_edges");
1111 executable_blocks = sbitmap_alloc (last_basic_block);
1112 sbitmap_zero (executable_blocks);
1114 bb_in_list = sbitmap_alloc (last_basic_block);
1115 sbitmap_zero (bb_in_list);
1117 value_vector = (value *) xmalloc (highest_ssa_version * sizeof (value));
1118 memset (value_vector, 0, highest_ssa_version * sizeof (value));
1120 /* 1 if ssa variable is used in a virtual variable context. */
1121 virtual_var = sbitmap_alloc (highest_ssa_version);
1122 sbitmap_zero (virtual_var);
1124 /* Initialize default values and simulation flags for PHI nodes, statements
1128 block_stmt_iterator i;
1136 /* Get the default value for each definition. */
1137 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
1140 stmt = bsi_stmt (i);
1141 get_stmt_operands (stmt);
1142 ann = stmt_ann (stmt);
1143 defs = DEF_OPS (ann);
1144 for (x = 0; x < NUM_DEFS (defs); x++)
1146 tree def = DEF_OP (defs, x);
1147 if (get_value (def)->lattice_val == VARYING)
1150 DONT_SIMULATE_AGAIN (stmt) = vary;
1152 /* Mark all VDEF operands VARYING. */
1153 vdefs = VDEF_OPS (ann);
1154 for (x = 0; x < NUM_VDEFS (vdefs); x++)
1156 tree res = VDEF_RESULT (vdefs, x);
1157 get_value (res)->lattice_val = VARYING;
1158 SET_BIT (virtual_var, SSA_NAME_VERSION (res));
1162 for (e = bb->succ; e; e = e->succ_next)
1163 e->flags &= ~EDGE_EXECUTABLE;
1166 /* Now process PHI nodes. */
1171 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
1174 val = get_value (PHI_RESULT (phi));
1175 if (val->lattice_val != VARYING)
1177 for (x = 0; x < PHI_NUM_ARGS (phi); x++)
1179 var = PHI_ARG_DEF (phi, x);
1180 /* If one argument is virtual, the result is virtual, and
1181 therefore varying. */
1182 if (TREE_CODE (var) == SSA_NAME)
1184 if (TEST_BIT (virtual_var, SSA_NAME_VERSION (var)))
1186 val->lattice_val = VARYING;
1187 SET_BIT (virtual_var,
1188 SSA_NAME_VERSION (PHI_RESULT (phi)));
1194 DONT_SIMULATE_AGAIN (phi) = ((val->lattice_val == VARYING) ? 1 : 0);
1198 sbitmap_free (virtual_var);
1199 /* Compute immediate uses for variables we care about. */
1200 compute_immediate_uses (TDFA_USE_OPS, need_imm_uses_for);
1202 if (dump_file && (dump_flags & TDF_DETAILS))
1203 dump_immediate_uses (dump_file);
1205 VARRAY_BB_INIT (cfg_blocks, 20, "cfg_blocks");
1207 /* Seed the algorithm by adding the successors of the entry block to the
1209 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1211 if (e->dest != EXIT_BLOCK_PTR)
1213 e->flags |= EDGE_EXECUTABLE;
1214 cfg_blocks_add (e->dest);
1220 /* Free allocated storage. */
1226 varying_ssa_edges = NULL;
1228 free (value_vector);
1229 sbitmap_free (bb_in_list);
1230 sbitmap_free (executable_blocks);
1234 /* Is the block worklist empty. */
1237 cfg_blocks_empty_p (void)
1239 return (cfg_blocks_num == 0);
1242 /* Add a basic block to the worklist. */
1245 cfg_blocks_add (basic_block bb)
1247 if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
1250 if (TEST_BIT (bb_in_list, bb->index))
1253 if (cfg_blocks_empty_p ())
1255 cfg_blocks_tail = cfg_blocks_head = 0;
1261 if (cfg_blocks_num > VARRAY_SIZE (cfg_blocks))
1263 /* We have to grow the array now. Adjust to queue to occupy the
1264 full space of the original array. */
1265 cfg_blocks_tail = VARRAY_SIZE (cfg_blocks);
1266 cfg_blocks_head = 0;
1267 VARRAY_GROW (cfg_blocks, 2 * VARRAY_SIZE (cfg_blocks));
1270 cfg_blocks_tail = (cfg_blocks_tail + 1) % VARRAY_SIZE (cfg_blocks);
1272 VARRAY_BB (cfg_blocks, cfg_blocks_tail) = bb;
1273 SET_BIT (bb_in_list, bb->index);
1276 /* Remove a block from the worklist. */
1279 cfg_blocks_get (void)
1283 bb = VARRAY_BB (cfg_blocks, cfg_blocks_head);
1285 #ifdef ENABLE_CHECKING
1286 if (cfg_blocks_empty_p () || !bb)
1290 cfg_blocks_head = (cfg_blocks_head + 1) % VARRAY_SIZE (cfg_blocks);
1292 RESET_BIT (bb_in_list, bb->index);
1297 /* We have just defined a new value for VAR. Add all immediate uses
1298 of VAR to the ssa_edges or varying_ssa_edges worklist. */
1300 add_var_to_ssa_edges_worklist (tree var, value val)
1302 tree stmt = SSA_NAME_DEF_STMT (var);
1303 dataflow_t df = get_immediate_uses (stmt);
1304 int num_uses = num_immediate_uses (df);
1307 for (i = 0; i < num_uses; i++)
1309 tree use = immediate_use (df, i);
1311 if (!DONT_SIMULATE_AGAIN (use))
1313 stmt_ann_t ann = stmt_ann (use);
1314 if (ann->in_ccp_worklist == 0)
1316 ann->in_ccp_worklist = 1;
1317 if (val.lattice_val == VARYING)
1318 VARRAY_PUSH_TREE (varying_ssa_edges, use);
1320 VARRAY_PUSH_TREE (ssa_edges, use);
1326 /* Set the lattice value for the variable VAR to VARYING. */
1329 def_to_varying (tree var)
1332 val.lattice_val = VARYING;
1333 val.const_val = NULL_TREE;
1334 set_lattice_value (var, val);
1337 /* Set the lattice value for variable VAR to VAL. */
1340 set_lattice_value (tree var, value val)
1342 value *old = get_value (var);
1344 #ifdef ENABLE_CHECKING
1345 if (val.lattice_val == UNDEFINED)
1347 /* CONSTANT->UNDEFINED is never a valid state transition. */
1348 if (old->lattice_val == CONSTANT)
1351 /* VARYING->UNDEFINED is generally not a valid state transition,
1352 except for values which are initialized to VARYING. */
1353 if (old->lattice_val == VARYING
1354 && get_default_value (var).lattice_val != VARYING)
1357 else if (val.lattice_val == CONSTANT)
1359 /* VARYING -> CONSTANT is an invalid state transition, except
1360 for objects which start off in a VARYING state. */
1361 if (old->lattice_val == VARYING
1362 && get_default_value (var).lattice_val != VARYING)
1367 /* If the constant for VAR has changed, then this VAR is really varying. */
1368 if (old->lattice_val == CONSTANT && val.lattice_val == CONSTANT
1369 && !simple_cst_equal (old->const_val, val.const_val))
1371 val.lattice_val = VARYING;
1372 val.const_val = NULL_TREE;
1375 if (old->lattice_val != val.lattice_val)
1377 if (dump_file && (dump_flags & TDF_DETAILS))
1379 dump_lattice_value (dump_file,
1380 "Lattice value changed to ", val);
1381 fprintf (dump_file, ". Adding definition to SSA edges.\n");
1384 add_var_to_ssa_edges_worklist (var, val);
1389 /* Replace USE references in statement STMT with their immediate reaching
1390 definition. Return true if at least one reference was replaced. If
1391 REPLACED_ADDRESSES_P is given, it will be set to true if an address
1392 constant was replaced. */
1395 replace_uses_in (tree stmt, bool *replaced_addresses_p)
1397 bool replaced = false;
1401 if (replaced_addresses_p)
1402 *replaced_addresses_p = false;
1404 get_stmt_operands (stmt);
1406 uses = STMT_USE_OPS (stmt);
1407 for (i = 0; i < NUM_USES (uses); i++)
1409 tree *use = USE_OP_PTR (uses, i);
1410 value *val = get_value (*use);
1412 if (val->lattice_val == CONSTANT)
1414 *use = val->const_val;
1416 if (POINTER_TYPE_P (TREE_TYPE (*use)) && replaced_addresses_p)
1417 *replaced_addresses_p = true;
1424 /* Return the likely latticevalue for STMT.
1426 If STMT has no operands, then return CONSTANT.
1428 Else if any operands of STMT are undefined, then return UNDEFINED.
1430 Else if any operands of STMT are constants, then return CONSTANT.
1432 Else return VARYING. */
1435 likely_value (tree stmt)
1439 int found_constant = 0;
1442 /* If the statement makes aliased loads or has volatile operands, it
1443 won't fold to a constant value. */
1444 ann = stmt_ann (stmt);
1445 if (ann->makes_aliased_loads || ann->has_volatile_ops)
1448 /* A CALL_EXPR is assumed to be varying. This may be overly conservative,
1449 in the presence of const and pure calls. */
1450 if (get_call_expr_in (stmt) != NULL_TREE)
1453 get_stmt_operands (stmt);
1455 uses = USE_OPS (ann);
1456 for (i = 0; i < NUM_USES (uses); i++)
1458 tree use = USE_OP (uses, i);
1459 value *val = get_value (use);
1461 if (val->lattice_val == UNDEFINED)
1464 if (val->lattice_val == CONSTANT)
1468 return ((found_constant || !uses) ? CONSTANT : VARYING);
1471 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1472 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1473 is the desired result type. */
1476 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type)
1478 unsigned HOST_WIDE_INT lquo, lrem;
1479 HOST_WIDE_INT hquo, hrem;
1480 tree elt_size, min_idx, idx;
1481 tree array_type, elt_type;
1483 /* Ignore stupid user tricks of indexing non-array variables. */
1484 array_type = TREE_TYPE (base);
1485 if (TREE_CODE (array_type) != ARRAY_TYPE)
1487 elt_type = TREE_TYPE (array_type);
1488 if (!lang_hooks.types_compatible_p (orig_type, elt_type))
1491 /* Whee. Ignore indexing of variable sized types. */
1492 elt_size = TYPE_SIZE_UNIT (elt_type);
1493 if (TREE_CODE (elt_size) != INTEGER_CST)
1496 /* If the division isn't exact, then don't do anything. Equally
1497 invalid as the above indexing of non-array variables. */
1498 if (div_and_round_double (TRUNC_DIV_EXPR, 1,
1499 TREE_INT_CST_LOW (offset),
1500 TREE_INT_CST_HIGH (offset),
1501 TREE_INT_CST_LOW (elt_size),
1502 TREE_INT_CST_HIGH (elt_size),
1503 &lquo, &hquo, &lrem, &hrem)
1506 idx = build_int_2_wide (lquo, hquo);
1508 /* Re-bias the index by the min index of the array type. */
1509 min_idx = TYPE_DOMAIN (TREE_TYPE (base));
1512 min_idx = TYPE_MIN_VALUE (min_idx);
1515 idx = convert (TREE_TYPE (min_idx), idx);
1516 if (!integer_zerop (min_idx))
1517 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 1);
1521 return build (ARRAY_REF, orig_type, base, idx);
1524 /* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1525 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1526 is the desired result type. */
1527 /* ??? This doesn't handle class inheritance. */
1530 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1531 tree orig_type, bool base_is_ptr)
1533 tree f, t, field_type, tail_array_field;
1535 if (TREE_CODE (record_type) != RECORD_TYPE
1536 && TREE_CODE (record_type) != UNION_TYPE
1537 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1540 /* Short-circuit silly cases. */
1541 if (lang_hooks.types_compatible_p (record_type, orig_type))
1544 tail_array_field = NULL_TREE;
1545 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1549 if (TREE_CODE (f) != FIELD_DECL)
1551 if (DECL_BIT_FIELD (f))
1553 if (TREE_CODE (DECL_FIELD_OFFSET (f)) != INTEGER_CST)
1556 /* ??? Java creates "interesting" fields for representing base classes.
1557 They have no name, and have no context. With no context, we get into
1558 trouble with nonoverlapping_component_refs_p. Skip them. */
1559 if (!DECL_FIELD_CONTEXT (f))
1562 /* The previous array field isn't at the end. */
1563 tail_array_field = NULL_TREE;
1565 /* Check to see if this offset overlaps with the field. */
1566 cmp = tree_int_cst_compare (DECL_FIELD_OFFSET (f), offset);
1570 field_type = TREE_TYPE (f);
1573 /* Don't care about offsets into the middle of scalars. */
1574 if (!AGGREGATE_TYPE_P (field_type))
1577 /* Check for array at the end of the struct. This is often
1578 used as for flexible array members. We should be able to
1579 turn this into an array access anyway. */
1580 if (TREE_CODE (field_type) == ARRAY_TYPE)
1581 tail_array_field = f;
1583 /* Check the end of the field against the offset. */
1584 if (!DECL_SIZE_UNIT (f)
1585 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1587 t = int_const_binop (MINUS_EXPR, offset, DECL_FIELD_OFFSET (f), 1);
1588 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1591 /* If we matched, then set offset to the displacement into
1596 /* Here we exactly match the offset being checked. If the types match,
1597 then we can return that field. */
1598 else if (lang_hooks.types_compatible_p (orig_type, field_type))
1601 base = build1 (INDIRECT_REF, record_type, base);
1602 t = build (COMPONENT_REF, field_type, base, f);
1606 /* Don't care about type-punning of scalars. */
1607 else if (!AGGREGATE_TYPE_P (field_type))
1613 if (!tail_array_field)
1616 f = tail_array_field;
1617 field_type = TREE_TYPE (f);
1620 /* If we get here, we've got an aggregate field, and a possibly
1621 nonzero offset into them. Recurse and hope for a valid match. */
1623 base = build1 (INDIRECT_REF, record_type, base);
1624 base = build (COMPONENT_REF, field_type, base, f);
1626 t = maybe_fold_offset_to_array_ref (base, offset, orig_type);
1629 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1633 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1634 Return the simplified expression, or NULL if nothing could be done. */
1637 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1641 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1642 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1643 are sometimes added. */
1646 TREE_OPERAND (expr, 0) = base;
1648 /* One possibility is that the address reduces to a string constant. */
1649 t = fold_read_from_constant_string (expr);
1653 /* Add in any offset from a PLUS_EXPR. */
1654 if (TREE_CODE (base) == PLUS_EXPR)
1658 offset2 = TREE_OPERAND (base, 1);
1659 if (TREE_CODE (offset2) != INTEGER_CST)
1661 base = TREE_OPERAND (base, 0);
1663 offset = int_const_binop (PLUS_EXPR, offset, offset2, 1);
1666 if (TREE_CODE (base) == ADDR_EXPR)
1668 /* Strip the ADDR_EXPR. */
1669 base = TREE_OPERAND (base, 0);
1671 /* Try folding *(&B+O) to B[X]. */
1672 t = maybe_fold_offset_to_array_ref (base, offset, TREE_TYPE (expr));
1676 /* Try folding *(&B+O) to B.X. */
1677 t = maybe_fold_offset_to_component_ref (TREE_TYPE (base), base, offset,
1678 TREE_TYPE (expr), false);
1682 /* Fold *&B to B. */
1683 if (integer_zerop (offset))
1688 /* We can get here for out-of-range string constant accesses,
1689 such as "_"[3]. Bail out of the entire substitution search
1690 and arrange for the entire statement to be replaced by a
1691 call to __builtin_trap. In all likelyhood this will all be
1692 constant-folded away, but in the meantime we can't leave with
1693 something that get_expr_operands can't understand. */
1697 if (TREE_CODE (t) == ADDR_EXPR
1698 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
1700 /* FIXME: Except that this causes problems elsewhere with dead
1701 code not being deleted, and we abort in the rtl expanders
1702 because we failed to remove some ssa_name. In the meantime,
1703 just return zero. */
1704 /* FIXME2: This condition should be signaled by
1705 fold_read_from_constant_string directly, rather than
1706 re-checking for it here. */
1707 return integer_zero_node;
1710 /* Try folding *(B+O) to B->X. Still an improvement. */
1711 if (POINTER_TYPE_P (TREE_TYPE (base)))
1713 t = maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base)),
1715 TREE_TYPE (expr), true);
1721 /* Otherwise we had an offset that we could not simplify. */
1725 /* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1727 A quaint feature extant in our address arithmetic is that there
1728 can be hidden type changes here. The type of the result need
1729 not be the same as the type of the input pointer.
1731 What we're after here is an expression of the form
1732 (T *)(&array + const)
1733 where the cast doesn't actually exist, but is implicit in the
1734 type of the PLUS_EXPR. We'd like to turn this into
1736 which may be able to propagate further. */
1739 maybe_fold_stmt_addition (tree expr)
1741 tree op0 = TREE_OPERAND (expr, 0);
1742 tree op1 = TREE_OPERAND (expr, 1);
1743 tree ptr_type = TREE_TYPE (expr);
1746 bool subtract = (TREE_CODE (expr) == MINUS_EXPR);
1748 /* We're only interested in pointer arithmetic. */
1749 if (!POINTER_TYPE_P (ptr_type))
1751 /* Canonicalize the integral operand to op1. */
1752 if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
1756 t = op0, op0 = op1, op1 = t;
1758 /* It had better be a constant. */
1759 if (TREE_CODE (op1) != INTEGER_CST)
1761 /* The first operand should be an ADDR_EXPR. */
1762 if (TREE_CODE (op0) != ADDR_EXPR)
1764 op0 = TREE_OPERAND (op0, 0);
1766 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1767 the offset into it. */
1768 while (TREE_CODE (op0) == ARRAY_REF)
1770 tree array_obj = TREE_OPERAND (op0, 0);
1771 tree array_idx = TREE_OPERAND (op0, 1);
1772 tree elt_type = TREE_TYPE (op0);
1773 tree elt_size = TYPE_SIZE_UNIT (elt_type);
1776 if (TREE_CODE (array_idx) != INTEGER_CST)
1778 if (TREE_CODE (elt_size) != INTEGER_CST)
1781 /* Un-bias the index by the min index of the array type. */
1782 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
1785 min_idx = TYPE_MIN_VALUE (min_idx);
1788 array_idx = convert (TREE_TYPE (min_idx), array_idx);
1789 if (!integer_zerop (min_idx))
1790 array_idx = int_const_binop (MINUS_EXPR, array_idx,
1795 /* Convert the index to a byte offset. */
1796 array_idx = convert (sizetype, array_idx);
1797 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
1799 /* Update the operands for the next round, or for folding. */
1800 /* If we're manipulating unsigned types, then folding into negative
1801 values can produce incorrect results. Particularly if the type
1802 is smaller than the width of the pointer. */
1804 && TYPE_UNSIGNED (TREE_TYPE (op1))
1805 && tree_int_cst_lt (array_idx, op1))
1807 op1 = int_const_binop (subtract ? MINUS_EXPR : PLUS_EXPR,
1813 /* If we weren't able to fold the subtraction into another array reference,
1814 canonicalize the integer for passing to the array and component ref
1815 simplification functions. */
1818 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
1820 op1 = fold (build1 (NEGATE_EXPR, TREE_TYPE (op1), op1));
1821 /* ??? In theory fold should always produce another integer. */
1822 if (TREE_CODE (op1) != INTEGER_CST)
1826 ptd_type = TREE_TYPE (ptr_type);
1828 /* At which point we can try some of the same things as for indirects. */
1829 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type);
1831 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
1834 t = build1 (ADDR_EXPR, ptr_type, t);
1839 /* Subroutine of fold_stmt called via walk_tree. We perform several
1840 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1843 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
1845 bool *changed_p = data;
1846 tree expr = *expr_p, t;
1848 /* ??? It'd be nice if walk_tree had a pre-order option. */
1849 switch (TREE_CODE (expr))
1852 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1857 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
1861 /* ??? Could handle ARRAY_REF here, as a variant of INDIRECT_REF.
1862 We'd only want to bother decomposing an existing ARRAY_REF if
1863 the base array is found to have another offset contained within.
1864 Otherwise we'd be wasting time. */
1867 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1872 /* Set TREE_INVARIANT properly so that the value is properly
1873 considered constant, and so gets propagated as expected. */
1875 recompute_tree_invarant_for_addr_expr (expr);
1880 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1883 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
1888 t = maybe_fold_stmt_addition (expr);
1892 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1897 /* Make sure the FIELD_DECL is actually a field in the type on
1898 the lhs. In cases with IMA it is possible that it came
1899 from another, equivalent type at this point. We have
1900 already checked the equivalence in this case.
1901 Match on type plus offset, to allow for unnamed fields.
1902 We won't necessarily get the corresponding field for
1903 unions; this is believed to be harmless. */
1905 if ((current_file_decl && TREE_CHAIN (current_file_decl))
1906 && (DECL_FIELD_CONTEXT (TREE_OPERAND (expr, 1)) !=
1907 TREE_TYPE (TREE_OPERAND (expr, 0))))
1910 tree orig_field = TREE_OPERAND (expr, 1);
1911 tree orig_type = TREE_TYPE (orig_field);
1912 for (f = TYPE_FIELDS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1913 f; f = TREE_CHAIN (f))
1915 if (lang_hooks.types_compatible_p (TREE_TYPE (f), orig_type)
1916 && tree_int_cst_compare (DECL_FIELD_BIT_OFFSET (f),
1917 DECL_FIELD_BIT_OFFSET (orig_field))
1919 && tree_int_cst_compare (DECL_FIELD_OFFSET (f),
1920 DECL_FIELD_OFFSET (orig_field))
1923 TREE_OPERAND (expr, 1) = f;
1927 /* Fall through is an error; it will be detected in tree-sra. */
1944 /* Fold the statement pointed by STMT_P. In some cases, this function may
1945 replace the whole statement with a new one. Returns true iff folding
1946 makes any changes. */
1949 fold_stmt (tree *stmt_p)
1951 tree rhs, result, stmt;
1952 bool changed = false;
1956 /* If we replaced constants and the statement makes pointer dereferences,
1957 then we may need to fold instances of *&VAR into VAR, etc. */
1958 if (walk_tree (stmt_p, fold_stmt_r, &changed, NULL))
1961 = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
1966 rhs = get_rhs (stmt);
1971 /* Check for builtins that CCP can handle using information not
1972 available in the generic fold routines. */
1973 if (TREE_CODE (rhs) == CALL_EXPR)
1975 tree callee = get_callee_fndecl (rhs);
1976 if (callee && DECL_BUILT_IN (callee))
1977 result = ccp_fold_builtin (stmt, rhs);
1980 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
1981 if (result == NULL_TREE)
1982 result = fold (rhs);
1984 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
1985 may have been added by fold, and "useless" type conversions that might
1986 now be apparent due to propagation. */
1987 STRIP_MAIN_TYPE_NOPS (result);
1988 STRIP_USELESS_TYPE_CONVERSION (result);
1993 set_rhs (stmt_p, result);
1999 /* Get the main expression from statement STMT. */
2004 enum tree_code code = TREE_CODE (stmt);
2006 if (code == MODIFY_EXPR)
2007 return TREE_OPERAND (stmt, 1);
2008 if (code == COND_EXPR)
2009 return COND_EXPR_COND (stmt);
2010 else if (code == SWITCH_EXPR)
2011 return SWITCH_COND (stmt);
2012 else if (code == RETURN_EXPR)
2014 if (!TREE_OPERAND (stmt, 0))
2016 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
2017 return TREE_OPERAND (TREE_OPERAND (stmt, 0), 1);
2019 return TREE_OPERAND (stmt, 0);
2021 else if (code == GOTO_EXPR)
2022 return GOTO_DESTINATION (stmt);
2023 else if (code == LABEL_EXPR)
2024 return LABEL_EXPR_LABEL (stmt);
2030 /* Set the main expression of *STMT_P to EXPR. */
2033 set_rhs (tree *stmt_p, tree expr)
2035 tree stmt = *stmt_p;
2036 enum tree_code code = TREE_CODE (stmt);
2038 if (code == MODIFY_EXPR)
2039 TREE_OPERAND (stmt, 1) = expr;
2040 else if (code == COND_EXPR)
2041 COND_EXPR_COND (stmt) = expr;
2042 else if (code == SWITCH_EXPR)
2043 SWITCH_COND (stmt) = expr;
2044 else if (code == RETURN_EXPR)
2046 if (TREE_OPERAND (stmt, 0)
2047 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
2048 TREE_OPERAND (TREE_OPERAND (stmt, 0), 1) = expr;
2050 TREE_OPERAND (stmt, 0) = expr;
2052 else if (code == GOTO_EXPR)
2053 GOTO_DESTINATION (stmt) = expr;
2054 else if (code == LABEL_EXPR)
2055 LABEL_EXPR_LABEL (stmt) = expr;
2058 /* Replace the whole statement with EXPR. If EXPR has no side
2059 effects, then replace *STMT_P with an empty statement. */
2060 stmt_ann_t ann = stmt_ann (stmt);
2061 *stmt_p = TREE_SIDE_EFFECTS (expr) ? expr : build_empty_stmt ();
2062 (*stmt_p)->common.ann = (tree_ann) ann;
2064 if (TREE_SIDE_EFFECTS (expr))
2070 /* Fix all the SSA_NAMEs created by *STMT_P to point to its new
2072 defs = DEF_OPS (ann);
2073 for (i = 0; i < NUM_DEFS (defs); i++)
2075 tree var = DEF_OP (defs, i);
2076 if (TREE_CODE (var) == SSA_NAME)
2077 SSA_NAME_DEF_STMT (var) = *stmt_p;
2080 vdefs = VDEF_OPS (ann);
2081 for (i = 0; i < NUM_VDEFS (vdefs); i++)
2083 tree var = VDEF_RESULT (vdefs, i);
2084 if (TREE_CODE (var) == SSA_NAME)
2085 SSA_NAME_DEF_STMT (var) = *stmt_p;
2092 /* Return a default value for variable VAR using the following rules:
2094 1- Global and static variables are considered VARYING, unless they are
2097 2- Function arguments are considered VARYING.
2099 3- Any other value is considered UNDEFINED. This is useful when
2100 considering PHI nodes. PHI arguments that are undefined do not
2101 change the constant value of the PHI node, which allows for more
2102 constants to be propagated. */
2105 get_default_value (tree var)
2110 if (TREE_CODE (var) == SSA_NAME)
2111 sym = SSA_NAME_VAR (var);
2114 #ifdef ENABLE_CHECKING
2121 val.lattice_val = UNDEFINED;
2122 val.const_val = NULL_TREE;
2124 if (TREE_CODE (sym) == PARM_DECL || TREE_THIS_VOLATILE (sym))
2126 /* Function arguments and volatile variables are considered VARYING. */
2127 val.lattice_val = VARYING;
2129 else if (decl_function_context (sym) != current_function_decl
2130 || TREE_STATIC (sym))
2132 /* Globals and static variables are considered VARYING, unless they
2133 are declared 'const'. */
2134 val.lattice_val = VARYING;
2136 if (TREE_READONLY (sym)
2137 && DECL_INITIAL (sym)
2138 && is_gimple_min_invariant (DECL_INITIAL (sym)))
2140 val.lattice_val = CONSTANT;
2141 val.const_val = DECL_INITIAL (sym);
2146 enum tree_code code;
2147 tree stmt = SSA_NAME_DEF_STMT (var);
2149 if (!IS_EMPTY_STMT (stmt))
2151 code = TREE_CODE (stmt);
2152 if (code != MODIFY_EXPR && code != PHI_NODE)
2153 val.lattice_val = VARYING;
2161 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
2162 constant, return NULL_TREE. Otherwise, return its constant value. */
2165 ccp_fold_builtin (tree stmt, tree fn)
2167 tree result, strlen_val[2];
2168 tree arglist = TREE_OPERAND (fn, 1), a;
2169 tree callee = get_callee_fndecl (fn);
2173 /* Ignore MD builtins. */
2174 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2177 /* First try the generic builtin folder. If that succeeds, return the
2179 result = fold_builtin (fn);
2183 /* If the builtin could not be folded, and it has no argument list,
2188 /* Limit the work only for builtins we know how to simplify. */
2189 switch (DECL_FUNCTION_CODE (callee))
2191 case BUILT_IN_STRLEN:
2192 case BUILT_IN_FPUTS:
2193 case BUILT_IN_FPUTS_UNLOCKED:
2196 case BUILT_IN_STRCPY:
2197 case BUILT_IN_STRNCPY:
2204 /* Try to use the dataflow information gathered by the CCP process. */
2205 visited = BITMAP_XMALLOC ();
2207 memset (strlen_val, 0, sizeof (strlen_val));
2208 for (i = 0, a = arglist;
2210 i++, strlen_arg >>= 1, a = TREE_CHAIN (a))
2213 bitmap_clear (visited);
2214 if (!get_strlen (TREE_VALUE (a), &strlen_val[i], visited))
2215 strlen_val[i] = NULL_TREE;
2218 BITMAP_XFREE (visited);
2220 /* FIXME. All this code looks dangerous in the sense that it might
2221 create non-gimple expressions. */
2222 switch (DECL_FUNCTION_CODE (callee))
2224 case BUILT_IN_STRLEN:
2225 /* Convert from the internal "sizetype" type to "size_t". */
2229 tree new = convert (size_type_node, strlen_val[0]);
2231 /* If the result is not a valid gimple value, or not a cast
2232 of a valid gimple value, then we can not use the result. */
2233 if (is_gimple_val (new)
2234 || (is_gimple_cast (new)
2235 && is_gimple_val (TREE_OPERAND (new, 0))))
2240 return strlen_val[0];
2241 case BUILT_IN_STRCPY:
2243 && is_gimple_val (strlen_val[1]))
2244 return simplify_builtin_strcpy (arglist, strlen_val[1]);
2245 case BUILT_IN_STRNCPY:
2247 && is_gimple_val (strlen_val[1]))
2248 return simplify_builtin_strncpy (arglist, strlen_val[1]);
2249 case BUILT_IN_FPUTS:
2250 return simplify_builtin_fputs (arglist,
2251 TREE_CODE (stmt) != MODIFY_EXPR, 0,
2253 case BUILT_IN_FPUTS_UNLOCKED:
2254 return simplify_builtin_fputs (arglist,
2255 TREE_CODE (stmt) != MODIFY_EXPR, 1,
2266 /* Return the string length of ARG in LENGTH. If ARG is an SSA name variable,
2267 follow its use-def chains. If LENGTH is not NULL and its value is not
2268 equal to the length we determine, or if we are unable to determine the
2269 length, return false. VISITED is a bitmap of visited variables. */
2272 get_strlen (tree arg, tree *length, bitmap visited)
2274 tree var, def_stmt, val;
2276 if (TREE_CODE (arg) != SSA_NAME)
2278 val = c_strlen (arg, 1);
2282 if (*length && simple_cst_equal (val, *length) != 1)
2289 /* If we were already here, break the infinite cycle. */
2290 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2292 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2295 def_stmt = SSA_NAME_DEF_STMT (var);
2297 switch (TREE_CODE (def_stmt))
2303 /* The RHS of the statement defining VAR must either have a
2304 constant length or come from another SSA_NAME with a constant
2306 rhs = TREE_OPERAND (def_stmt, 1);
2308 if (TREE_CODE (rhs) == SSA_NAME)
2309 return get_strlen (rhs, length, visited);
2311 /* See if the RHS is a constant length. */
2312 len = c_strlen (rhs, 1);
2315 if (*length && simple_cst_equal (len, *length) != 1)
2327 /* All the arguments of the PHI node must have the same constant
2331 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
2333 tree arg = PHI_ARG_DEF (def_stmt, i);
2335 /* If this PHI has itself as an argument, we cannot
2336 determine the string length of this argument. However,
2337 if we can find a constant string length for the other
2338 PHI args then we can still be sure that this is a
2339 constant string length. So be optimistic and just
2340 continue with the next argument. */
2341 if (arg == PHI_RESULT (def_stmt))
2344 if (!get_strlen (arg, length, visited))
2360 /* A simple pass that attempts to fold all builtin functions. This pass
2361 is run after we've propagated as many constants as we can. */
2364 execute_fold_all_builtins (void)
2369 block_stmt_iterator i;
2370 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
2372 tree *stmtp = bsi_stmt_ptr (i);
2373 tree call = get_rhs (*stmtp);
2374 tree callee, result;
2376 if (!call || TREE_CODE (call) != CALL_EXPR)
2378 callee = get_callee_fndecl (call);
2379 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2382 result = ccp_fold_builtin (*stmtp, call);
2384 switch (DECL_FUNCTION_CODE (callee))
2386 case BUILT_IN_CONSTANT_P:
2387 /* Resolve __builtin_constant_p. If it hasn't been
2388 folded to integer_one_node by now, it's fairly
2389 certain that the value simply isn't constant. */
2390 result = integer_zero_node;
2397 if (dump_file && (dump_flags & TDF_DETAILS))
2399 fprintf (dump_file, "Simplified\n ");
2400 print_generic_stmt (dump_file, *stmtp, dump_flags);
2403 set_rhs (stmtp, result);
2404 modify_stmt (*stmtp);
2406 if (dump_file && (dump_flags & TDF_DETAILS))
2408 fprintf (dump_file, "to\n ");
2409 print_generic_stmt (dump_file, *stmtp, dump_flags);
2410 fprintf (dump_file, "\n");
2416 struct tree_opt_pass pass_fold_builtins =
2420 execute_fold_all_builtins, /* execute */
2423 0, /* static_pass_number */
2425 PROP_cfg | PROP_ssa, /* properties_required */
2426 0, /* properties_provided */
2427 0, /* properties_destroyed */
2428 0, /* todo_flags_start */
2429 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
2433 #include "gt-tree-ssa-ccp.h"