1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 /* Conditional constant propagation (CCP) is based on the SSA
25 propagation engine (tree-ssa-propagate.c). Constant assignments of
26 the form VAR = CST are propagated from the assignments into uses of
27 VAR, which in turn may generate new constants. The simulation uses
28 a four level lattice to keep track of constant values associated
29 with SSA names. Given an SSA name V_i, it may take one of the
32 UNINITIALIZED -> the initial state of the value. This value
33 is replaced with a correct initial value
34 the first time the value is used, so the
35 rest of the pass does not need to care about
36 it. Using this value simplifies initialization
37 of the pass, and prevents us from needlessly
38 scanning statements that are never reached.
40 UNDEFINED -> V_i is a local variable whose definition
41 has not been processed yet. Therefore we
42 don't yet know if its value is a constant
45 CONSTANT -> V_i has been found to hold a constant
48 VARYING -> V_i cannot take a constant value, or if it
49 does, it is not possible to determine it
52 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
54 1- In ccp_visit_stmt, we are interested in assignments whose RHS
55 evaluates into a constant and conditional jumps whose predicate
56 evaluates into a boolean true or false. When an assignment of
57 the form V_i = CONST is found, V_i's lattice value is set to
58 CONSTANT and CONST is associated with it. This causes the
59 propagation engine to add all the SSA edges coming out the
60 assignment into the worklists, so that statements that use V_i
63 If the statement is a conditional with a constant predicate, we
64 mark the outgoing edges as executable or not executable
65 depending on the predicate's value. This is then used when
66 visiting PHI nodes to know when a PHI argument can be ignored.
69 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
70 same constant C, then the LHS of the PHI is set to C. This
71 evaluation is known as the "meet operation". Since one of the
72 goals of this evaluation is to optimistically return constant
73 values as often as possible, it uses two main short cuts:
75 - If an argument is flowing in through a non-executable edge, it
76 is ignored. This is useful in cases like this:
82 a_11 = PHI (a_9, a_10)
84 If PRED is known to always evaluate to false, then we can
85 assume that a_11 will always take its value from a_10, meaning
86 that instead of consider it VARYING (a_9 and a_10 have
87 different values), we can consider it CONSTANT 100.
89 - If an argument has an UNDEFINED value, then it does not affect
90 the outcome of the meet operation. If a variable V_i has an
91 UNDEFINED value, it means that either its defining statement
92 hasn't been visited yet or V_i has no defining statement, in
93 which case the original symbol 'V' is being used
94 uninitialized. Since 'V' is a local variable, the compiler
95 may assume any initial value for it.
98 After propagation, every variable V_i that ends up with a lattice
99 value of CONSTANT will have the associated constant value in the
100 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
101 final substitution and folding.
104 Constant propagation in stores and loads (STORE-CCP)
105 ----------------------------------------------------
107 While CCP has all the logic to propagate constants in GIMPLE
108 registers, it is missing the ability to associate constants with
109 stores and loads (i.e., pointer dereferences, structures and
110 global/aliased variables). We don't keep loads and stores in
111 SSA, but we do build a factored use-def web for them (in the
114 For instance, consider the following code fragment:
133 We should be able to deduce that the predicate 'a.a != B' is always
134 false. To achieve this, we associate constant values to the SSA
135 names in the V_MAY_DEF and V_MUST_DEF operands for each store.
136 Additionally, since we also glob partial loads/stores with the base
137 symbol, we also keep track of the memory reference where the
138 constant value was stored (in the MEM_REF field of PROP_VALUE_T).
141 # a_5 = V_MAY_DEF <a_4>
147 In the example above, CCP will associate value '2' with 'a_5', but
148 it would be wrong to replace the load from 'a.b' with '2', because
149 '2' had been stored into a.a.
151 Note that the initial value of virtual operands is VARYING, not
152 UNDEFINED. Consider, for instance global variables:
160 # A_5 = PHI (A_4, A_2);
168 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
169 been defined outside of foo. If we were to assume it UNDEFINED, we
170 would erroneously optimize the above into 'return 3;'.
172 Though STORE-CCP is not too expensive, it does have to do more work
173 than regular CCP, so it is only enabled at -O2. Both regular CCP
174 and STORE-CCP use the exact same algorithm. The only distinction
175 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
176 set to true. This affects the evaluation of statements and PHI
181 Constant propagation with conditional branches,
182 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
184 Building an Optimizing Compiler,
185 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
187 Advanced Compiler Design and Implementation,
188 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
192 #include "coretypes.h"
199 #include "basic-block.h"
202 #include "function.h"
203 #include "diagnostic.h"
205 #include "tree-dump.h"
206 #include "tree-flow.h"
207 #include "tree-pass.h"
208 #include "tree-ssa-propagate.h"
209 #include "langhooks.h"
213 /* Possible lattice values. */
222 /* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
225 (i.e., a V_MAY_DEF or V_MUST_DEF), CONST_VAL[I].MEM_REF will
226 contain the actual memory reference used to store (i.e., the LHS of
227 the assignment doing the store). */
228 static prop_value_t *const_val;
230 /* True if we are also propagating constants in stores and loads. */
231 static bool do_store_ccp;
233 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
236 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
238 switch (val.lattice_val)
241 fprintf (outf, "%sUNINITIALIZED", prefix);
244 fprintf (outf, "%sUNDEFINED", prefix);
247 fprintf (outf, "%sVARYING", prefix);
250 fprintf (outf, "%sCONSTANT ", prefix);
251 print_generic_expr (outf, val.value, dump_flags);
259 /* Print lattice value VAL to stderr. */
261 void debug_lattice_value (prop_value_t val);
264 debug_lattice_value (prop_value_t val)
266 dump_lattice_value (stderr, "", val);
267 fprintf (stderr, "\n");
271 /* The regular is_gimple_min_invariant does a shallow test of the object.
272 It assumes that full gimplification has happened, or will happen on the
273 object. For a value coming from DECL_INITIAL, this is not true, so we
274 have to be more strict ourselves. */
277 ccp_decl_initial_min_invariant (tree t)
279 if (!is_gimple_min_invariant (t))
281 if (TREE_CODE (t) == ADDR_EXPR)
283 /* Inline and unroll is_gimple_addressable. */
286 t = TREE_OPERAND (t, 0);
287 if (is_gimple_id (t))
289 if (!handled_component_p (t))
297 /* Compute a default value for variable VAR and store it in the
298 CONST_VAL array. The following rules are used to get default
301 1- Global and static variables that are declared constant are
304 2- Any other value is considered UNDEFINED. This is useful when
305 considering PHI nodes. PHI arguments that are undefined do not
306 change the constant value of the PHI node, which allows for more
307 constants to be propagated.
309 3- If SSA_NAME_VALUE is set and it is a constant, its value is
312 4- Variables defined by statements other than assignments and PHI
313 nodes are considered VARYING.
315 5- Initial values of variables that are not GIMPLE registers are
316 considered VARYING. */
319 get_default_value (tree var)
321 tree sym = SSA_NAME_VAR (var);
322 prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE };
324 if (!do_store_ccp && !is_gimple_reg (var))
326 /* Short circuit for regular CCP. We are not interested in any
327 non-register when DO_STORE_CCP is false. */
328 val.lattice_val = VARYING;
330 else if (SSA_NAME_VALUE (var)
331 && is_gimple_min_invariant (SSA_NAME_VALUE (var)))
333 val.lattice_val = CONSTANT;
334 val.value = SSA_NAME_VALUE (var);
336 else if (TREE_STATIC (sym)
337 && TREE_READONLY (sym)
339 && DECL_INITIAL (sym)
340 && ccp_decl_initial_min_invariant (DECL_INITIAL (sym)))
342 /* Globals and static variables declared 'const' take their
344 val.lattice_val = CONSTANT;
345 val.value = DECL_INITIAL (sym);
350 tree stmt = SSA_NAME_DEF_STMT (var);
352 if (IS_EMPTY_STMT (stmt))
354 /* Variables defined by an empty statement are those used
355 before being initialized. If VAR is a local variable, we
356 can assume initially that it is UNDEFINED, otherwise we must
357 consider it VARYING. */
358 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
359 val.lattice_val = UNDEFINED;
361 val.lattice_val = VARYING;
363 else if (TREE_CODE (stmt) == MODIFY_EXPR
364 || TREE_CODE (stmt) == PHI_NODE)
366 /* Any other variable defined by an assignment or a PHI node
367 is considered UNDEFINED. */
368 val.lattice_val = UNDEFINED;
372 /* Otherwise, VAR will never take on a constant value. */
373 val.lattice_val = VARYING;
381 /* Get the constant value associated with variable VAR. */
383 static inline prop_value_t *
386 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
388 if (val->lattice_val == UNINITIALIZED)
389 *val = get_default_value (var);
394 /* Sets the value associated with VAR to VARYING. */
397 set_value_varying (tree var)
399 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
401 val->lattice_val = VARYING;
402 val->value = NULL_TREE;
403 val->mem_ref = NULL_TREE;
406 /* Set the value for variable VAR to NEW_VAL. Return true if the new
407 value is different from VAR's previous value. */
410 set_lattice_value (tree var, prop_value_t new_val)
412 prop_value_t *old_val = get_value (var);
414 /* Lattice transitions must always be monotonically increasing in
415 value. If *OLD_VAL and NEW_VAL are the same, return false to
416 inform the caller that this was a non-transition. */
418 gcc_assert (old_val->lattice_val <= new_val.lattice_val
419 || (old_val->lattice_val == new_val.lattice_val
420 && old_val->value == new_val.value
421 && old_val->mem_ref == new_val.mem_ref));
423 if (old_val->lattice_val != new_val.lattice_val)
425 if (dump_file && (dump_flags & TDF_DETAILS))
427 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
428 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
433 gcc_assert (new_val.lattice_val != UNDEFINED);
441 /* Return the likely CCP lattice value for STMT.
443 If STMT has no operands, then return CONSTANT.
445 Else if any operands of STMT are undefined, then return UNDEFINED.
447 Else if any operands of STMT are constants, then return CONSTANT.
449 Else return VARYING. */
452 likely_value (tree stmt)
454 bool has_constant_operand;
459 ann = stmt_ann (stmt);
461 /* If the statement has volatile operands, it won't fold to a
463 if (ann->has_volatile_ops)
466 /* If we are not doing store-ccp, statements with loads
467 and/or stores will never fold into a constant. */
469 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
473 /* A CALL_EXPR is assumed to be varying. NOTE: This may be overly
474 conservative, in the presence of const and pure calls. */
475 if (get_call_expr_in (stmt) != NULL_TREE)
478 /* Anything other than assignments and conditional jumps are not
479 interesting for CCP. */
480 if (TREE_CODE (stmt) != MODIFY_EXPR
481 && !(TREE_CODE (stmt) == RETURN_EXPR && get_rhs (stmt) != NULL_TREE)
482 && TREE_CODE (stmt) != COND_EXPR
483 && TREE_CODE (stmt) != SWITCH_EXPR)
486 if (is_gimple_min_invariant (get_rhs (stmt)))
489 has_constant_operand = false;
490 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
492 prop_value_t *val = get_value (use);
494 if (val->lattice_val == UNDEFINED)
497 if (val->lattice_val == CONSTANT)
498 has_constant_operand = true;
501 if (has_constant_operand
502 /* We do not consider virtual operands here -- load from read-only
503 memory may have only VARYING virtual operands, but still be
505 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
511 /* Returns true if STMT cannot be constant. */
514 surely_varying_stmt_p (tree stmt)
516 /* If the statement has operands that we cannot handle, it cannot be
518 if (stmt_ann (stmt)->has_volatile_ops)
521 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
526 /* We can only handle simple loads and stores. */
527 if (!stmt_makes_single_load (stmt)
528 && !stmt_makes_single_store (stmt))
532 /* If it contains a call, it is varying. */
533 if (get_call_expr_in (stmt) != NULL_TREE)
536 /* Anything other than assignments and conditional jumps are not
537 interesting for CCP. */
538 if (TREE_CODE (stmt) != MODIFY_EXPR
539 && !(TREE_CODE (stmt) == RETURN_EXPR && get_rhs (stmt) != NULL_TREE)
540 && TREE_CODE (stmt) != COND_EXPR
541 && TREE_CODE (stmt) != SWITCH_EXPR)
547 /* Initialize local data structures for CCP. */
550 ccp_initialize (void)
554 const_val = XNEWVEC (prop_value_t, num_ssa_names);
555 memset (const_val, 0, num_ssa_names * sizeof (*const_val));
557 /* Initialize simulation flags for PHI nodes and statements. */
560 block_stmt_iterator i;
562 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
564 tree stmt = bsi_stmt (i);
565 bool is_varying = surely_varying_stmt_p (stmt);
572 /* If the statement will not produce a constant, mark
573 all its outputs VARYING. */
574 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
577 set_value_varying (def);
581 DONT_SIMULATE_AGAIN (stmt) = is_varying;
585 /* Now process PHI nodes. We never set DONT_SIMULATE_AGAIN on phi node,
586 since we do not know which edges are executable yet, except for
587 phi nodes for virtual operands when we do not do store ccp. */
592 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
594 if (!do_store_ccp && !is_gimple_reg (PHI_RESULT (phi)))
595 DONT_SIMULATE_AGAIN (phi) = true;
597 DONT_SIMULATE_AGAIN (phi) = false;
603 /* Do final substitution of propagated values, cleanup the flowgraph and
604 free allocated storage. */
609 /* Perform substitutions based on the known constant values. */
610 substitute_and_fold (const_val, false);
616 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
619 any M UNDEFINED = any
620 any M VARYING = VARYING
621 Ci M Cj = Ci if (i == j)
622 Ci M Cj = VARYING if (i != j)
626 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
628 if (val1->lattice_val == UNDEFINED)
630 /* UNDEFINED M any = any */
633 else if (val2->lattice_val == UNDEFINED)
635 /* any M UNDEFINED = any
636 Nothing to do. VAL1 already contains the value we want. */
639 else if (val1->lattice_val == VARYING
640 || val2->lattice_val == VARYING)
642 /* any M VARYING = VARYING. */
643 val1->lattice_val = VARYING;
644 val1->value = NULL_TREE;
645 val1->mem_ref = NULL_TREE;
647 else if (val1->lattice_val == CONSTANT
648 && val2->lattice_val == CONSTANT
649 && simple_cst_equal (val1->value, val2->value) == 1
651 || (val1->mem_ref && val2->mem_ref
652 && operand_equal_p (val1->mem_ref, val2->mem_ref, 0))))
654 /* Ci M Cj = Ci if (i == j)
655 Ci M Cj = VARYING if (i != j)
657 If these two values come from memory stores, make sure that
658 they come from the same memory reference. */
659 val1->lattice_val = CONSTANT;
660 val1->value = val1->value;
661 val1->mem_ref = val1->mem_ref;
665 /* Any other combination is VARYING. */
666 val1->lattice_val = VARYING;
667 val1->value = NULL_TREE;
668 val1->mem_ref = NULL_TREE;
673 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
674 lattice values to determine PHI_NODE's lattice value. The value of a
675 PHI node is determined calling ccp_lattice_meet with all the arguments
676 of the PHI node that are incoming via executable edges. */
678 static enum ssa_prop_result
679 ccp_visit_phi_node (tree phi)
682 prop_value_t *old_val, new_val;
684 if (dump_file && (dump_flags & TDF_DETAILS))
686 fprintf (dump_file, "\nVisiting PHI node: ");
687 print_generic_expr (dump_file, phi, dump_flags);
690 old_val = get_value (PHI_RESULT (phi));
691 switch (old_val->lattice_val)
694 return SSA_PROP_VARYING;
701 new_val.lattice_val = UNDEFINED;
702 new_val.value = NULL_TREE;
703 new_val.mem_ref = NULL_TREE;
710 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
712 /* Compute the meet operator over all the PHI arguments flowing
713 through executable edges. */
714 edge e = PHI_ARG_EDGE (phi, i);
716 if (dump_file && (dump_flags & TDF_DETAILS))
719 "\n Argument #%d (%d -> %d %sexecutable)\n",
720 i, e->src->index, e->dest->index,
721 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
724 /* If the incoming edge is executable, Compute the meet operator for
725 the existing value of the PHI node and the current PHI argument. */
726 if (e->flags & EDGE_EXECUTABLE)
728 tree arg = PHI_ARG_DEF (phi, i);
729 prop_value_t arg_val;
731 if (is_gimple_min_invariant (arg))
733 arg_val.lattice_val = CONSTANT;
735 arg_val.mem_ref = NULL_TREE;
738 arg_val = *(get_value (arg));
740 ccp_lattice_meet (&new_val, &arg_val);
742 if (dump_file && (dump_flags & TDF_DETAILS))
744 fprintf (dump_file, "\t");
745 print_generic_expr (dump_file, arg, dump_flags);
746 dump_lattice_value (dump_file, "\tValue: ", arg_val);
747 fprintf (dump_file, "\n");
750 if (new_val.lattice_val == VARYING)
755 if (dump_file && (dump_flags & TDF_DETAILS))
757 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
758 fprintf (dump_file, "\n\n");
761 /* Make the transition to the new value. */
762 if (set_lattice_value (PHI_RESULT (phi), new_val))
764 if (new_val.lattice_val == VARYING)
765 return SSA_PROP_VARYING;
767 return SSA_PROP_INTERESTING;
770 return SSA_PROP_NOT_INTERESTING;
774 /* CCP specific front-end to the non-destructive constant folding
777 Attempt to simplify the RHS of STMT knowing that one or more
778 operands are constants.
780 If simplification is possible, return the simplified RHS,
781 otherwise return the original RHS. */
786 tree rhs = get_rhs (stmt);
787 enum tree_code code = TREE_CODE (rhs);
788 enum tree_code_class kind = TREE_CODE_CLASS (code);
789 tree retval = NULL_TREE;
791 if (TREE_CODE (rhs) == SSA_NAME)
793 /* If the RHS is an SSA_NAME, return its known constant value,
795 return get_value (rhs)->value;
797 else if (do_store_ccp && stmt_makes_single_load (stmt))
799 /* If the RHS is a memory load, see if the VUSEs associated with
800 it are a valid constant for that memory load. */
801 prop_value_t *val = get_value_loaded_by (stmt, const_val);
802 if (val && val->mem_ref)
804 if (operand_equal_p (val->mem_ref, rhs, 0))
807 /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
808 complex type with a known constant value, return it. */
809 if ((TREE_CODE (rhs) == REALPART_EXPR
810 || TREE_CODE (rhs) == IMAGPART_EXPR)
811 && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0))
812 return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value);
817 /* Unary operators. Note that we know the single operand must
818 be a constant. So this should almost always return a
820 if (kind == tcc_unary)
822 /* Handle unary operators which can appear in GIMPLE form. */
823 tree op0 = TREE_OPERAND (rhs, 0);
825 /* Simplify the operand down to a constant. */
826 if (TREE_CODE (op0) == SSA_NAME)
828 prop_value_t *val = get_value (op0);
829 if (val->lattice_val == CONSTANT)
830 op0 = get_value (op0)->value;
833 if ((code == NOP_EXPR || code == CONVERT_EXPR)
834 && tree_ssa_useless_type_conversion_1 (TREE_TYPE (rhs),
837 return fold_unary (code, TREE_TYPE (rhs), op0);
840 /* Binary and comparison operators. We know one or both of the
841 operands are constants. */
842 else if (kind == tcc_binary
843 || kind == tcc_comparison
844 || code == TRUTH_AND_EXPR
845 || code == TRUTH_OR_EXPR
846 || code == TRUTH_XOR_EXPR)
848 /* Handle binary and comparison operators that can appear in
850 tree op0 = TREE_OPERAND (rhs, 0);
851 tree op1 = TREE_OPERAND (rhs, 1);
853 /* Simplify the operands down to constants when appropriate. */
854 if (TREE_CODE (op0) == SSA_NAME)
856 prop_value_t *val = get_value (op0);
857 if (val->lattice_val == CONSTANT)
861 if (TREE_CODE (op1) == SSA_NAME)
863 prop_value_t *val = get_value (op1);
864 if (val->lattice_val == CONSTANT)
868 return fold_binary (code, TREE_TYPE (rhs), op0, op1);
871 /* We may be able to fold away calls to builtin functions if their
872 arguments are constants. */
873 else if (code == CALL_EXPR
874 && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
875 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0))
877 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)))
879 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
882 tree fndecl, arglist;
887 /* Preserve the original values of every operand. */
888 orig = XNEWVEC (tree, NUM_SSA_OPERANDS (stmt, SSA_OP_USE));
889 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
892 /* Substitute operands with their values and try to fold. */
893 replace_uses_in (stmt, NULL, const_val);
894 fndecl = get_callee_fndecl (rhs);
895 arglist = TREE_OPERAND (rhs, 1);
896 retval = fold_builtin (fndecl, arglist, false);
898 /* Restore operands to their original form. */
900 FOR_EACH_SSA_USE_OPERAND (var_p, stmt, iter, SSA_OP_USE)
901 SET_USE (var_p, orig[i++]);
908 /* If we got a simplified form, see if we need to convert its type. */
910 return fold_convert (TREE_TYPE (rhs), retval);
912 /* No simplification was possible. */
917 /* Return the tree representing the element referenced by T if T is an
918 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
919 NULL_TREE otherwise. */
922 fold_const_aggregate_ref (tree t)
925 tree base, ctor, idx, field;
926 unsigned HOST_WIDE_INT cnt;
929 switch (TREE_CODE (t))
932 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
933 DECL_INITIAL. If BASE is a nested reference into another
934 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
935 the inner reference. */
936 base = TREE_OPERAND (t, 0);
937 switch (TREE_CODE (base))
940 if (!TREE_READONLY (base)
941 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
942 || !targetm.binds_local_p (base))
945 ctor = DECL_INITIAL (base);
950 ctor = fold_const_aggregate_ref (base);
957 if (ctor == NULL_TREE
958 || (TREE_CODE (ctor) != CONSTRUCTOR
959 && TREE_CODE (ctor) != STRING_CST)
960 || !TREE_STATIC (ctor))
963 /* Get the index. If we have an SSA_NAME, try to resolve it
964 with the current lattice value for the SSA_NAME. */
965 idx = TREE_OPERAND (t, 1);
966 switch (TREE_CODE (idx))
969 if ((value = get_value (idx))
970 && value->lattice_val == CONSTANT
971 && TREE_CODE (value->value) == INTEGER_CST)
984 /* Fold read from constant string. */
985 if (TREE_CODE (ctor) == STRING_CST)
987 if ((TYPE_MODE (TREE_TYPE (t))
988 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
989 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
991 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
992 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
993 return build_int_cst (TREE_TYPE (t), (TREE_STRING_POINTER (ctor)
994 [TREE_INT_CST_LOW (idx)]));
998 /* Whoo-hoo! I'll fold ya baby. Yeah! */
999 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1000 if (tree_int_cst_equal (cfield, idx))
1005 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1006 DECL_INITIAL. If BASE is a nested reference into another
1007 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1008 the inner reference. */
1009 base = TREE_OPERAND (t, 0);
1010 switch (TREE_CODE (base))
1013 if (!TREE_READONLY (base)
1014 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1015 || !targetm.binds_local_p (base))
1018 ctor = DECL_INITIAL (base);
1023 ctor = fold_const_aggregate_ref (base);
1030 if (ctor == NULL_TREE
1031 || TREE_CODE (ctor) != CONSTRUCTOR
1032 || !TREE_STATIC (ctor))
1035 field = TREE_OPERAND (t, 1);
1037 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1039 /* FIXME: Handle bit-fields. */
1040 && ! DECL_BIT_FIELD (cfield))
1047 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1048 if (c && TREE_CODE (c) == COMPLEX_CST)
1049 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1060 /* Evaluate statement STMT. */
1063 evaluate_stmt (tree stmt)
1066 tree simplified = NULL_TREE;
1067 ccp_lattice_t likelyvalue = likely_value (stmt);
1069 val.mem_ref = NULL_TREE;
1071 /* If the statement is likely to have a CONSTANT result, then try
1072 to fold the statement to determine the constant value. */
1073 if (likelyvalue == CONSTANT)
1074 simplified = ccp_fold (stmt);
1075 /* If the statement is likely to have a VARYING result, then do not
1076 bother folding the statement. */
1077 if (likelyvalue == VARYING)
1078 simplified = get_rhs (stmt);
1079 /* If the statement is an ARRAY_REF or COMPONENT_REF into constant
1080 aggregates, extract the referenced constant. Otherwise the
1081 statement is likely to have an UNDEFINED value, and there will be
1082 nothing to do. Note that fold_const_aggregate_ref returns
1083 NULL_TREE if the first case does not match. */
1084 else if (!simplified)
1085 simplified = fold_const_aggregate_ref (get_rhs (stmt));
1087 if (simplified && is_gimple_min_invariant (simplified))
1089 /* The statement produced a constant value. */
1090 val.lattice_val = CONSTANT;
1091 val.value = simplified;
1095 /* The statement produced a nonconstant value. If the statement
1096 had UNDEFINED operands, then the result of the statement
1097 should be UNDEFINED. Otherwise, the statement is VARYING. */
1098 if (likelyvalue == UNDEFINED)
1099 val.lattice_val = likelyvalue;
1101 val.lattice_val = VARYING;
1103 val.value = NULL_TREE;
1110 /* Visit the assignment statement STMT. Set the value of its LHS to the
1111 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1112 creates virtual definitions, set the value of each new name to that
1113 of the RHS (if we can derive a constant out of the RHS). */
1115 static enum ssa_prop_result
1116 visit_assignment (tree stmt, tree *output_p)
1120 enum ssa_prop_result retval;
1122 lhs = TREE_OPERAND (stmt, 0);
1123 rhs = TREE_OPERAND (stmt, 1);
1125 if (TREE_CODE (rhs) == SSA_NAME)
1127 /* For a simple copy operation, we copy the lattice values. */
1128 prop_value_t *nval = get_value (rhs);
1131 else if (do_store_ccp && stmt_makes_single_load (stmt))
1133 /* Same as above, but the RHS is not a gimple register and yet
1134 has a known VUSE. If STMT is loading from the same memory
1135 location that created the SSA_NAMEs for the virtual operands,
1136 we can propagate the value on the RHS. */
1137 prop_value_t *nval = get_value_loaded_by (stmt, const_val);
1141 && operand_equal_p (nval->mem_ref, rhs, 0))
1144 val = evaluate_stmt (stmt);
1147 /* Evaluate the statement. */
1148 val = evaluate_stmt (stmt);
1150 /* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant
1151 value to be a VIEW_CONVERT_EXPR of the old constant value.
1153 ??? Also, if this was a definition of a bitfield, we need to widen
1154 the constant value into the type of the destination variable. This
1155 should not be necessary if GCC represented bitfields properly. */
1157 tree orig_lhs = TREE_OPERAND (stmt, 0);
1159 if (TREE_CODE (orig_lhs) == VIEW_CONVERT_EXPR
1160 && val.lattice_val == CONSTANT)
1162 tree w = fold_unary (VIEW_CONVERT_EXPR,
1163 TREE_TYPE (TREE_OPERAND (orig_lhs, 0)),
1166 orig_lhs = TREE_OPERAND (orig_lhs, 0);
1167 if (w && is_gimple_min_invariant (w))
1171 val.lattice_val = VARYING;
1176 if (val.lattice_val == CONSTANT
1177 && TREE_CODE (orig_lhs) == COMPONENT_REF
1178 && DECL_BIT_FIELD (TREE_OPERAND (orig_lhs, 1)))
1180 tree w = widen_bitfield (val.value, TREE_OPERAND (orig_lhs, 1),
1183 if (w && is_gimple_min_invariant (w))
1187 val.lattice_val = VARYING;
1188 val.value = NULL_TREE;
1189 val.mem_ref = NULL_TREE;
1194 retval = SSA_PROP_NOT_INTERESTING;
1196 /* Set the lattice value of the statement's output. */
1197 if (TREE_CODE (lhs) == SSA_NAME)
1199 /* If STMT is an assignment to an SSA_NAME, we only have one
1201 if (set_lattice_value (lhs, val))
1204 if (val.lattice_val == VARYING)
1205 retval = SSA_PROP_VARYING;
1207 retval = SSA_PROP_INTERESTING;
1210 else if (do_store_ccp && stmt_makes_single_store (stmt))
1212 /* Otherwise, set the names in V_MAY_DEF/V_MUST_DEF operands
1213 to the new constant value and mark the LHS as the memory
1214 reference associated with VAL. */
1219 /* Mark VAL as stored in the LHS of this assignment. */
1220 if (val.lattice_val == CONSTANT)
1223 /* Set the value of every VDEF to VAL. */
1225 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS)
1226 changed |= set_lattice_value (vdef, val);
1228 /* Note that for propagation purposes, we are only interested in
1229 visiting statements that load the exact same memory reference
1230 stored here. Those statements will have the exact same list
1231 of virtual uses, so it is enough to set the output of this
1232 statement to be its first virtual definition. */
1233 *output_p = first_vdef (stmt);
1236 if (val.lattice_val == VARYING)
1237 retval = SSA_PROP_VARYING;
1239 retval = SSA_PROP_INTERESTING;
1247 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1248 if it can determine which edge will be taken. Otherwise, return
1249 SSA_PROP_VARYING. */
1251 static enum ssa_prop_result
1252 visit_cond_stmt (tree stmt, edge *taken_edge_p)
1257 block = bb_for_stmt (stmt);
1258 val = evaluate_stmt (stmt);
1260 /* Find which edge out of the conditional block will be taken and add it
1261 to the worklist. If no single edge can be determined statically,
1262 return SSA_PROP_VARYING to feed all the outgoing edges to the
1263 propagation engine. */
1264 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1266 return SSA_PROP_INTERESTING;
1268 return SSA_PROP_VARYING;
1272 /* Evaluate statement STMT. If the statement produces an output value and
1273 its evaluation changes the lattice value of its output, return
1274 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1277 If STMT is a conditional branch and we can determine its truth
1278 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1279 value, return SSA_PROP_VARYING. */
1281 static enum ssa_prop_result
1282 ccp_visit_stmt (tree stmt, edge *taken_edge_p, tree *output_p)
1287 if (dump_file && (dump_flags & TDF_DETAILS))
1289 fprintf (dump_file, "\nVisiting statement:\n");
1290 print_generic_stmt (dump_file, stmt, dump_flags);
1291 fprintf (dump_file, "\n");
1294 if (TREE_CODE (stmt) == MODIFY_EXPR)
1296 /* If the statement is an assignment that produces a single
1297 output value, evaluate its RHS to see if the lattice value of
1298 its output has changed. */
1299 return visit_assignment (stmt, output_p);
1301 else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
1303 /* If STMT is a conditional branch, see if we can determine
1304 which branch will be taken. */
1305 return visit_cond_stmt (stmt, taken_edge_p);
1308 /* Any other kind of statement is not interesting for constant
1309 propagation and, therefore, not worth simulating. */
1310 if (dump_file && (dump_flags & TDF_DETAILS))
1311 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1313 /* Definitions made by statements other than assignments to
1314 SSA_NAMEs represent unknown modifications to their outputs.
1315 Mark them VARYING. */
1316 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1318 prop_value_t v = { VARYING, NULL_TREE, NULL_TREE };
1319 set_lattice_value (def, v);
1322 return SSA_PROP_VARYING;
1326 /* Main entry point for SSA Conditional Constant Propagation. */
1329 execute_ssa_ccp (bool store_ccp)
1331 do_store_ccp = store_ccp;
1333 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1341 execute_ssa_ccp (false);
1349 return flag_tree_ccp != 0;
1353 struct tree_opt_pass pass_ccp =
1356 gate_ccp, /* gate */
1357 do_ssa_ccp, /* execute */
1360 0, /* static_pass_number */
1361 TV_TREE_CCP, /* tv_id */
1362 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1363 0, /* properties_provided */
1364 PROP_smt_usage, /* properties_destroyed */
1365 0, /* todo_flags_start */
1366 TODO_cleanup_cfg | TODO_dump_func | TODO_update_ssa
1367 | TODO_ggc_collect | TODO_verify_ssa
1368 | TODO_verify_stmts | TODO_update_smt_usage, /* todo_flags_finish */
1374 do_ssa_store_ccp (void)
1376 /* If STORE-CCP is not enabled, we just run regular CCP. */
1377 execute_ssa_ccp (flag_tree_store_ccp != 0);
1382 gate_store_ccp (void)
1384 /* STORE-CCP is enabled only with -ftree-store-ccp, but when
1385 -fno-tree-store-ccp is specified, we should run regular CCP.
1386 That's why the pass is enabled with either flag. */
1387 return flag_tree_store_ccp != 0 || flag_tree_ccp != 0;
1391 struct tree_opt_pass pass_store_ccp =
1393 "store_ccp", /* name */
1394 gate_store_ccp, /* gate */
1395 do_ssa_store_ccp, /* execute */
1398 0, /* static_pass_number */
1399 TV_TREE_STORE_CCP, /* tv_id */
1400 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1401 0, /* properties_provided */
1402 PROP_smt_usage, /* properties_destroyed */
1403 0, /* todo_flags_start */
1404 TODO_dump_func | TODO_update_ssa
1405 | TODO_ggc_collect | TODO_verify_ssa
1407 | TODO_verify_stmts | TODO_update_smt_usage, /* todo_flags_finish */
1411 /* Given a constant value VAL for bitfield FIELD, and a destination
1412 variable VAR, return VAL appropriately widened to fit into VAR. If
1413 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1416 widen_bitfield (tree val, tree field, tree var)
1418 unsigned HOST_WIDE_INT var_size, field_size;
1420 unsigned HOST_WIDE_INT mask;
1423 /* We can only do this if the size of the type and field and VAL are
1424 all constants representable in HOST_WIDE_INT. */
1425 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var)), 1)
1426 || !host_integerp (DECL_SIZE (field), 1)
1427 || !host_integerp (val, 0))
1430 var_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1);
1431 field_size = tree_low_cst (DECL_SIZE (field), 1);
1433 /* Give up if either the bitfield or the variable are too wide. */
1434 if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT)
1437 gcc_assert (var_size >= field_size);
1439 /* If the sign bit of the value is not set or the field's type is unsigned,
1440 just mask off the high order bits of the value. */
1441 if (DECL_UNSIGNED (field)
1442 || !(tree_low_cst (val, 0) & (((HOST_WIDE_INT)1) << (field_size - 1))))
1444 /* Zero extension. Build a mask with the lower 'field_size' bits
1445 set and a BIT_AND_EXPR node to clear the high order bits of
1447 for (i = 0, mask = 0; i < field_size; i++)
1448 mask |= ((HOST_WIDE_INT) 1) << i;
1450 wide_val = fold_build2 (BIT_AND_EXPR, TREE_TYPE (var), val,
1451 build_int_cst (TREE_TYPE (var), mask));
1455 /* Sign extension. Create a mask with the upper 'field_size'
1456 bits set and a BIT_IOR_EXPR to set the high order bits of the
1458 for (i = 0, mask = 0; i < (var_size - field_size); i++)
1459 mask |= ((HOST_WIDE_INT) 1) << (var_size - i - 1);
1461 wide_val = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (var), val,
1462 build_int_cst (TREE_TYPE (var), mask));
1469 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1470 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1471 is the desired result type. */
1474 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type)
1476 tree min_idx, idx, elt_offset = integer_zero_node;
1477 tree array_type, elt_type, elt_size;
1479 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1480 measured in units of the size of elements type) from that ARRAY_REF).
1481 We can't do anything if either is variable.
1483 The case we handle here is *(&A[N]+O). */
1484 if (TREE_CODE (base) == ARRAY_REF)
1486 tree low_bound = array_ref_low_bound (base);
1488 elt_offset = TREE_OPERAND (base, 1);
1489 if (TREE_CODE (low_bound) != INTEGER_CST
1490 || TREE_CODE (elt_offset) != INTEGER_CST)
1493 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1494 base = TREE_OPERAND (base, 0);
1497 /* Ignore stupid user tricks of indexing non-array variables. */
1498 array_type = TREE_TYPE (base);
1499 if (TREE_CODE (array_type) != ARRAY_TYPE)
1501 elt_type = TREE_TYPE (array_type);
1502 if (!lang_hooks.types_compatible_p (orig_type, elt_type))
1505 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1506 element type (so we can use the alignment if it's not constant).
1507 Otherwise, compute the offset as an index by using a division. If the
1508 division isn't exact, then don't do anything. */
1509 elt_size = TYPE_SIZE_UNIT (elt_type);
1510 if (integer_zerop (offset))
1512 if (TREE_CODE (elt_size) != INTEGER_CST)
1513 elt_size = size_int (TYPE_ALIGN (elt_type));
1515 idx = integer_zero_node;
1519 unsigned HOST_WIDE_INT lquo, lrem;
1520 HOST_WIDE_INT hquo, hrem;
1522 if (TREE_CODE (elt_size) != INTEGER_CST
1523 || div_and_round_double (TRUNC_DIV_EXPR, 1,
1524 TREE_INT_CST_LOW (offset),
1525 TREE_INT_CST_HIGH (offset),
1526 TREE_INT_CST_LOW (elt_size),
1527 TREE_INT_CST_HIGH (elt_size),
1528 &lquo, &hquo, &lrem, &hrem)
1532 idx = build_int_cst_wide (NULL_TREE, lquo, hquo);
1535 /* Assume the low bound is zero. If there is a domain type, get the
1536 low bound, if any, convert the index into that type, and add the
1538 min_idx = integer_zero_node;
1539 if (TYPE_DOMAIN (array_type))
1541 if (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
1542 min_idx = TYPE_MIN_VALUE (TYPE_DOMAIN (array_type));
1544 min_idx = fold_convert (TYPE_DOMAIN (array_type), min_idx);
1546 if (TREE_CODE (min_idx) != INTEGER_CST)
1549 idx = fold_convert (TYPE_DOMAIN (array_type), idx);
1550 elt_offset = fold_convert (TYPE_DOMAIN (array_type), elt_offset);
1553 if (!integer_zerop (min_idx))
1554 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1555 if (!integer_zerop (elt_offset))
1556 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1558 return build4 (ARRAY_REF, orig_type, base, idx, min_idx,
1559 size_int (tree_low_cst (elt_size, 1)
1560 / (TYPE_ALIGN_UNIT (elt_type))));
1564 /* A subroutine of fold_stmt_r. Attempts to fold *(S+O) to S.X.
1565 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1566 is the desired result type. */
1567 /* ??? This doesn't handle class inheritance. */
1570 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1571 tree orig_type, bool base_is_ptr)
1573 tree f, t, field_type, tail_array_field, field_offset;
1575 if (TREE_CODE (record_type) != RECORD_TYPE
1576 && TREE_CODE (record_type) != UNION_TYPE
1577 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1580 /* Short-circuit silly cases. */
1581 if (lang_hooks.types_compatible_p (record_type, orig_type))
1584 tail_array_field = NULL_TREE;
1585 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1589 if (TREE_CODE (f) != FIELD_DECL)
1591 if (DECL_BIT_FIELD (f))
1594 field_offset = byte_position (f);
1595 if (TREE_CODE (field_offset) != INTEGER_CST)
1598 /* ??? Java creates "interesting" fields for representing base classes.
1599 They have no name, and have no context. With no context, we get into
1600 trouble with nonoverlapping_component_refs_p. Skip them. */
1601 if (!DECL_FIELD_CONTEXT (f))
1604 /* The previous array field isn't at the end. */
1605 tail_array_field = NULL_TREE;
1607 /* Check to see if this offset overlaps with the field. */
1608 cmp = tree_int_cst_compare (field_offset, offset);
1612 field_type = TREE_TYPE (f);
1614 /* Here we exactly match the offset being checked. If the types match,
1615 then we can return that field. */
1617 && lang_hooks.types_compatible_p (orig_type, field_type))
1620 base = build1 (INDIRECT_REF, record_type, base);
1621 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1625 /* Don't care about offsets into the middle of scalars. */
1626 if (!AGGREGATE_TYPE_P (field_type))
1629 /* Check for array at the end of the struct. This is often
1630 used as for flexible array members. We should be able to
1631 turn this into an array access anyway. */
1632 if (TREE_CODE (field_type) == ARRAY_TYPE)
1633 tail_array_field = f;
1635 /* Check the end of the field against the offset. */
1636 if (!DECL_SIZE_UNIT (f)
1637 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1639 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1640 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1643 /* If we matched, then set offset to the displacement into
1649 if (!tail_array_field)
1652 f = tail_array_field;
1653 field_type = TREE_TYPE (f);
1654 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1657 /* If we get here, we've got an aggregate field, and a possibly
1658 nonzero offset into them. Recurse and hope for a valid match. */
1660 base = build1 (INDIRECT_REF, record_type, base);
1661 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1663 t = maybe_fold_offset_to_array_ref (base, offset, orig_type);
1666 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1671 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1672 Return the simplified expression, or NULL if nothing could be done. */
1675 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1679 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1680 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1681 are sometimes added. */
1683 STRIP_TYPE_NOPS (base);
1684 TREE_OPERAND (expr, 0) = base;
1686 /* One possibility is that the address reduces to a string constant. */
1687 t = fold_read_from_constant_string (expr);
1691 /* Add in any offset from a PLUS_EXPR. */
1692 if (TREE_CODE (base) == PLUS_EXPR)
1696 offset2 = TREE_OPERAND (base, 1);
1697 if (TREE_CODE (offset2) != INTEGER_CST)
1699 base = TREE_OPERAND (base, 0);
1701 offset = int_const_binop (PLUS_EXPR, offset, offset2, 1);
1704 if (TREE_CODE (base) == ADDR_EXPR)
1706 /* Strip the ADDR_EXPR. */
1707 base = TREE_OPERAND (base, 0);
1709 /* Fold away CONST_DECL to its value, if the type is scalar. */
1710 if (TREE_CODE (base) == CONST_DECL
1711 && ccp_decl_initial_min_invariant (DECL_INITIAL (base)))
1712 return DECL_INITIAL (base);
1714 /* Try folding *(&B+O) to B[X]. */
1715 t = maybe_fold_offset_to_array_ref (base, offset, TREE_TYPE (expr));
1719 /* Try folding *(&B+O) to B.X. */
1720 t = maybe_fold_offset_to_component_ref (TREE_TYPE (base), base, offset,
1721 TREE_TYPE (expr), false);
1725 /* Fold *&B to B. We can only do this if EXPR is the same type
1726 as BASE. We can't do this if EXPR is the element type of an array
1727 and BASE is the array. */
1728 if (integer_zerop (offset)
1729 && lang_hooks.types_compatible_p (TREE_TYPE (base),
1735 /* We can get here for out-of-range string constant accesses,
1736 such as "_"[3]. Bail out of the entire substitution search
1737 and arrange for the entire statement to be replaced by a
1738 call to __builtin_trap. In all likelihood this will all be
1739 constant-folded away, but in the meantime we can't leave with
1740 something that get_expr_operands can't understand. */
1744 if (TREE_CODE (t) == ADDR_EXPR
1745 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
1747 /* FIXME: Except that this causes problems elsewhere with dead
1748 code not being deleted, and we die in the rtl expanders
1749 because we failed to remove some ssa_name. In the meantime,
1750 just return zero. */
1751 /* FIXME2: This condition should be signaled by
1752 fold_read_from_constant_string directly, rather than
1753 re-checking for it here. */
1754 return integer_zero_node;
1757 /* Try folding *(B+O) to B->X. Still an improvement. */
1758 if (POINTER_TYPE_P (TREE_TYPE (base)))
1760 t = maybe_fold_offset_to_component_ref (TREE_TYPE (TREE_TYPE (base)),
1762 TREE_TYPE (expr), true);
1768 /* Otherwise we had an offset that we could not simplify. */
1773 /* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
1775 A quaint feature extant in our address arithmetic is that there
1776 can be hidden type changes here. The type of the result need
1777 not be the same as the type of the input pointer.
1779 What we're after here is an expression of the form
1780 (T *)(&array + const)
1781 where the cast doesn't actually exist, but is implicit in the
1782 type of the PLUS_EXPR. We'd like to turn this into
1784 which may be able to propagate further. */
1787 maybe_fold_stmt_addition (tree expr)
1789 tree op0 = TREE_OPERAND (expr, 0);
1790 tree op1 = TREE_OPERAND (expr, 1);
1791 tree ptr_type = TREE_TYPE (expr);
1794 bool subtract = (TREE_CODE (expr) == MINUS_EXPR);
1796 /* We're only interested in pointer arithmetic. */
1797 if (!POINTER_TYPE_P (ptr_type))
1799 /* Canonicalize the integral operand to op1. */
1800 if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
1804 t = op0, op0 = op1, op1 = t;
1806 /* It had better be a constant. */
1807 if (TREE_CODE (op1) != INTEGER_CST)
1809 /* The first operand should be an ADDR_EXPR. */
1810 if (TREE_CODE (op0) != ADDR_EXPR)
1812 op0 = TREE_OPERAND (op0, 0);
1814 /* If the first operand is an ARRAY_REF, expand it so that we can fold
1815 the offset into it. */
1816 while (TREE_CODE (op0) == ARRAY_REF)
1818 tree array_obj = TREE_OPERAND (op0, 0);
1819 tree array_idx = TREE_OPERAND (op0, 1);
1820 tree elt_type = TREE_TYPE (op0);
1821 tree elt_size = TYPE_SIZE_UNIT (elt_type);
1824 if (TREE_CODE (array_idx) != INTEGER_CST)
1826 if (TREE_CODE (elt_size) != INTEGER_CST)
1829 /* Un-bias the index by the min index of the array type. */
1830 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
1833 min_idx = TYPE_MIN_VALUE (min_idx);
1836 if (TREE_CODE (min_idx) != INTEGER_CST)
1839 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
1840 if (!integer_zerop (min_idx))
1841 array_idx = int_const_binop (MINUS_EXPR, array_idx,
1846 /* Convert the index to a byte offset. */
1847 array_idx = fold_convert (sizetype, array_idx);
1848 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
1850 /* Update the operands for the next round, or for folding. */
1851 /* If we're manipulating unsigned types, then folding into negative
1852 values can produce incorrect results. Particularly if the type
1853 is smaller than the width of the pointer. */
1855 && TYPE_UNSIGNED (TREE_TYPE (op1))
1856 && tree_int_cst_lt (array_idx, op1))
1858 op1 = int_const_binop (subtract ? MINUS_EXPR : PLUS_EXPR,
1864 /* If we weren't able to fold the subtraction into another array reference,
1865 canonicalize the integer for passing to the array and component ref
1866 simplification functions. */
1869 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
1871 op1 = fold_unary (NEGATE_EXPR, TREE_TYPE (op1), op1);
1872 /* ??? In theory fold should always produce another integer. */
1873 if (op1 == NULL || TREE_CODE (op1) != INTEGER_CST)
1877 ptd_type = TREE_TYPE (ptr_type);
1879 /* At which point we can try some of the same things as for indirects. */
1880 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type);
1882 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
1885 t = build1 (ADDR_EXPR, ptr_type, t);
1890 /* For passing state through walk_tree into fold_stmt_r and its
1893 struct fold_stmt_r_data
1896 bool *inside_addr_expr_p;
1899 /* Subroutine of fold_stmt called via walk_tree. We perform several
1900 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
1903 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
1905 struct fold_stmt_r_data *fold_stmt_r_data = (struct fold_stmt_r_data *) data;
1906 bool *inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
1907 bool *changed_p = fold_stmt_r_data->changed_p;
1908 tree expr = *expr_p, t;
1910 /* ??? It'd be nice if walk_tree had a pre-order option. */
1911 switch (TREE_CODE (expr))
1914 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1919 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
1923 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
1924 We'd only want to bother decomposing an existing ARRAY_REF if
1925 the base array is found to have another offset contained within.
1926 Otherwise we'd be wasting time. */
1928 /* If we are not processing expressions found within an
1929 ADDR_EXPR, then we can fold constant array references. */
1930 if (!*inside_addr_expr_p)
1931 t = fold_read_from_constant_string (expr);
1937 *inside_addr_expr_p = true;
1938 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1939 *inside_addr_expr_p = false;
1944 /* Set TREE_INVARIANT properly so that the value is properly
1945 considered constant, and so gets propagated as expected. */
1947 recompute_tree_invariant_for_addr_expr (expr);
1952 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1955 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
1960 t = maybe_fold_stmt_addition (expr);
1964 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
1969 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
1970 We've already checked that the records are compatible, so we should
1971 come up with a set of compatible fields. */
1973 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
1974 tree expr_field = TREE_OPERAND (expr, 1);
1976 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
1978 expr_field = find_compatible_field (expr_record, expr_field);
1979 TREE_OPERAND (expr, 1) = expr_field;
1984 case TARGET_MEM_REF:
1985 t = maybe_fold_tmr (expr);
1989 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
1991 tree op0 = TREE_OPERAND (expr, 0);
1992 tree tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
1993 TREE_OPERAND (op0, 0),
1994 TREE_OPERAND (op0, 1));
1995 if (tem && set_rhs (expr_p, tem))
2017 /* Return the string length, maximum string length or maximum value of
2019 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2020 is not NULL and, for TYPE == 0, its value is not equal to the length
2021 we determine or if we are unable to determine the length or value,
2022 return false. VISITED is a bitmap of visited variables.
2023 TYPE is 0 if string length should be returned, 1 for maximum string
2024 length and 2 for maximum value ARG can have. */
2027 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2029 tree var, def_stmt, val;
2031 if (TREE_CODE (arg) != SSA_NAME)
2036 if (TREE_CODE (val) != INTEGER_CST
2037 || tree_int_cst_sgn (val) < 0)
2041 val = c_strlen (arg, 1);
2049 if (TREE_CODE (*length) != INTEGER_CST
2050 || TREE_CODE (val) != INTEGER_CST)
2053 if (tree_int_cst_lt (*length, val))
2057 else if (simple_cst_equal (val, *length) != 1)
2065 /* If we were already here, break the infinite cycle. */
2066 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2068 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2071 def_stmt = SSA_NAME_DEF_STMT (var);
2073 switch (TREE_CODE (def_stmt))
2079 /* The RHS of the statement defining VAR must either have a
2080 constant length or come from another SSA_NAME with a constant
2082 rhs = TREE_OPERAND (def_stmt, 1);
2084 return get_maxval_strlen (rhs, length, visited, type);
2089 /* All the arguments of the PHI node must have the same constant
2093 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
2095 tree arg = PHI_ARG_DEF (def_stmt, i);
2097 /* If this PHI has itself as an argument, we cannot
2098 determine the string length of this argument. However,
2099 if we can find a constant string length for the other
2100 PHI args then we can still be sure that this is a
2101 constant string length. So be optimistic and just
2102 continue with the next argument. */
2103 if (arg == PHI_RESULT (def_stmt))
2106 if (!get_maxval_strlen (arg, length, visited, type))
2122 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
2123 constant, return NULL_TREE. Otherwise, return its constant value. */
2126 ccp_fold_builtin (tree stmt, tree fn)
2128 tree result, val[3];
2129 tree callee, arglist, a;
2130 int arg_mask, i, type;
2134 ignore = TREE_CODE (stmt) != MODIFY_EXPR;
2136 /* First try the generic builtin folder. If that succeeds, return the
2138 callee = get_callee_fndecl (fn);
2139 arglist = TREE_OPERAND (fn, 1);
2140 result = fold_builtin (callee, arglist, ignore);
2144 STRIP_NOPS (result);
2148 /* Ignore MD builtins. */
2149 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2152 /* If the builtin could not be folded, and it has no argument list,
2157 /* Limit the work only for builtins we know how to simplify. */
2158 switch (DECL_FUNCTION_CODE (callee))
2160 case BUILT_IN_STRLEN:
2161 case BUILT_IN_FPUTS:
2162 case BUILT_IN_FPUTS_UNLOCKED:
2166 case BUILT_IN_STRCPY:
2167 case BUILT_IN_STRNCPY:
2171 case BUILT_IN_MEMCPY_CHK:
2172 case BUILT_IN_MEMPCPY_CHK:
2173 case BUILT_IN_MEMMOVE_CHK:
2174 case BUILT_IN_MEMSET_CHK:
2175 case BUILT_IN_STRNCPY_CHK:
2179 case BUILT_IN_STRCPY_CHK:
2180 case BUILT_IN_STPCPY_CHK:
2184 case BUILT_IN_SNPRINTF_CHK:
2185 case BUILT_IN_VSNPRINTF_CHK:
2193 /* Try to use the dataflow information gathered by the CCP process. */
2194 visited = BITMAP_ALLOC (NULL);
2196 memset (val, 0, sizeof (val));
2197 for (i = 0, a = arglist;
2199 i++, arg_mask >>= 1, a = TREE_CHAIN (a))
2202 bitmap_clear (visited);
2203 if (!get_maxval_strlen (TREE_VALUE (a), &val[i], visited, type))
2207 BITMAP_FREE (visited);
2210 switch (DECL_FUNCTION_CODE (callee))
2212 case BUILT_IN_STRLEN:
2215 tree new = fold_convert (TREE_TYPE (fn), val[0]);
2217 /* If the result is not a valid gimple value, or not a cast
2218 of a valid gimple value, then we can not use the result. */
2219 if (is_gimple_val (new)
2220 || (is_gimple_cast (new)
2221 && is_gimple_val (TREE_OPERAND (new, 0))))
2226 case BUILT_IN_STRCPY:
2227 if (val[1] && is_gimple_val (val[1]))
2228 result = fold_builtin_strcpy (callee, arglist, val[1]);
2231 case BUILT_IN_STRNCPY:
2232 if (val[1] && is_gimple_val (val[1]))
2233 result = fold_builtin_strncpy (callee, arglist, val[1]);
2236 case BUILT_IN_FPUTS:
2237 result = fold_builtin_fputs (arglist,
2238 TREE_CODE (stmt) != MODIFY_EXPR, 0,
2242 case BUILT_IN_FPUTS_UNLOCKED:
2243 result = fold_builtin_fputs (arglist,
2244 TREE_CODE (stmt) != MODIFY_EXPR, 1,
2248 case BUILT_IN_MEMCPY_CHK:
2249 case BUILT_IN_MEMPCPY_CHK:
2250 case BUILT_IN_MEMMOVE_CHK:
2251 case BUILT_IN_MEMSET_CHK:
2252 if (val[2] && is_gimple_val (val[2]))
2253 result = fold_builtin_memory_chk (callee, arglist, val[2], ignore,
2254 DECL_FUNCTION_CODE (callee));
2257 case BUILT_IN_STRCPY_CHK:
2258 case BUILT_IN_STPCPY_CHK:
2259 if (val[1] && is_gimple_val (val[1]))
2260 result = fold_builtin_stxcpy_chk (callee, arglist, val[1], ignore,
2261 DECL_FUNCTION_CODE (callee));
2264 case BUILT_IN_STRNCPY_CHK:
2265 if (val[2] && is_gimple_val (val[2]))
2266 result = fold_builtin_strncpy_chk (arglist, val[2]);
2269 case BUILT_IN_SNPRINTF_CHK:
2270 case BUILT_IN_VSNPRINTF_CHK:
2271 if (val[1] && is_gimple_val (val[1]))
2272 result = fold_builtin_snprintf_chk (arglist, val[1],
2273 DECL_FUNCTION_CODE (callee));
2280 if (result && ignore)
2281 result = fold_ignored_result (result);
2286 /* Fold the statement pointed to by STMT_P. In some cases, this function may
2287 replace the whole statement with a new one. Returns true iff folding
2288 makes any changes. */
2291 fold_stmt (tree *stmt_p)
2293 tree rhs, result, stmt;
2294 struct fold_stmt_r_data fold_stmt_r_data;
2295 bool changed = false;
2296 bool inside_addr_expr = false;
2298 fold_stmt_r_data.changed_p = &changed;
2299 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2303 /* If we replaced constants and the statement makes pointer dereferences,
2304 then we may need to fold instances of *&VAR into VAR, etc. */
2305 if (walk_tree (stmt_p, fold_stmt_r, &fold_stmt_r_data, NULL))
2308 = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
2313 rhs = get_rhs (stmt);
2318 if (TREE_CODE (rhs) == CALL_EXPR)
2322 /* Check for builtins that CCP can handle using information not
2323 available in the generic fold routines. */
2324 callee = get_callee_fndecl (rhs);
2325 if (callee && DECL_BUILT_IN (callee))
2326 result = ccp_fold_builtin (stmt, rhs);
2329 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2330 here are when we've propagated the address of a decl into the
2332 /* ??? Should perhaps do this in fold proper. However, doing it
2333 there requires that we create a new CALL_EXPR, and that requires
2334 copying EH region info to the new node. Easier to just do it
2335 here where we can just smash the call operand. Also
2336 CALL_EXPR_RETURN_SLOT_OPT needs to be handled correctly and
2337 copied, fold_ternary does not have not information. */
2338 callee = TREE_OPERAND (rhs, 0);
2339 if (TREE_CODE (callee) == OBJ_TYPE_REF
2340 && lang_hooks.fold_obj_type_ref
2341 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2342 && DECL_P (TREE_OPERAND
2343 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2347 /* ??? Caution: Broken ADDR_EXPR semantics means that
2348 looking at the type of the operand of the addr_expr
2349 can yield an array type. See silly exception in
2350 check_pointer_types_r. */
2352 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2353 t = lang_hooks.fold_obj_type_ref (callee, t);
2356 TREE_OPERAND (rhs, 0) = t;
2363 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2364 if (result == NULL_TREE)
2365 result = fold (rhs);
2367 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2368 may have been added by fold, and "useless" type conversions that might
2369 now be apparent due to propagation. */
2370 STRIP_USELESS_TYPE_CONVERSION (result);
2373 changed |= set_rhs (stmt_p, result);
2378 /* Perform the minimal folding on statement STMT. Only operations like
2379 *&x created by constant propagation are handled. The statement cannot
2380 be replaced with a new one. */
2383 fold_stmt_inplace (tree stmt)
2385 tree old_stmt = stmt, rhs, new_rhs;
2386 struct fold_stmt_r_data fold_stmt_r_data;
2387 bool changed = false;
2388 bool inside_addr_expr = false;
2390 fold_stmt_r_data.changed_p = &changed;
2391 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2393 walk_tree (&stmt, fold_stmt_r, &fold_stmt_r_data, NULL);
2394 gcc_assert (stmt == old_stmt);
2396 rhs = get_rhs (stmt);
2397 if (!rhs || rhs == stmt)
2400 new_rhs = fold (rhs);
2401 STRIP_USELESS_TYPE_CONVERSION (new_rhs);
2405 changed |= set_rhs (&stmt, new_rhs);
2406 gcc_assert (stmt == old_stmt);
2411 /* Convert EXPR into a GIMPLE value suitable for substitution on the
2412 RHS of an assignment. Insert the necessary statements before
2414 When IGNORE is set, don't worry about the return value. */
2417 convert_to_gimple_builtin (block_stmt_iterator *si_p, tree expr, bool ignore)
2419 tree_stmt_iterator ti;
2420 tree stmt = bsi_stmt (*si_p);
2421 tree tmp, stmts = NULL;
2423 push_gimplify_context ();
2426 tmp = build_empty_stmt ();
2427 gimplify_and_add (expr, &stmts);
2430 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
2431 pop_gimplify_context (NULL);
2433 if (EXPR_HAS_LOCATION (stmt))
2434 annotate_all_with_locus (&stmts, EXPR_LOCATION (stmt));
2436 /* The replacement can expose previously unreferenced variables. */
2437 for (ti = tsi_start (stmts); !tsi_end_p (ti); tsi_next (&ti))
2439 tree new_stmt = tsi_stmt (ti);
2440 find_new_referenced_vars (tsi_stmt_ptr (ti));
2441 bsi_insert_before (si_p, new_stmt, BSI_NEW_STMT);
2442 mark_new_vars_to_rename (bsi_stmt (*si_p));
2450 /* A simple pass that attempts to fold all builtin functions. This pass
2451 is run after we've propagated as many constants as we can. */
2454 execute_fold_all_builtins (void)
2456 bool cfg_changed = false;
2460 block_stmt_iterator i;
2461 for (i = bsi_start (bb); !bsi_end_p (i); )
2463 tree *stmtp = bsi_stmt_ptr (i);
2464 tree old_stmt = *stmtp;
2465 tree call = get_rhs (*stmtp);
2466 tree callee, result;
2467 enum built_in_function fcode;
2469 if (!call || TREE_CODE (call) != CALL_EXPR)
2474 callee = get_callee_fndecl (call);
2475 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2480 fcode = DECL_FUNCTION_CODE (callee);
2482 result = ccp_fold_builtin (*stmtp, call);
2484 switch (DECL_FUNCTION_CODE (callee))
2486 case BUILT_IN_CONSTANT_P:
2487 /* Resolve __builtin_constant_p. If it hasn't been
2488 folded to integer_one_node by now, it's fairly
2489 certain that the value simply isn't constant. */
2490 result = integer_zero_node;
2498 if (dump_file && (dump_flags & TDF_DETAILS))
2500 fprintf (dump_file, "Simplified\n ");
2501 print_generic_stmt (dump_file, *stmtp, dump_flags);
2504 if (!set_rhs (stmtp, result))
2506 result = convert_to_gimple_builtin (&i, result,
2507 TREE_CODE (old_stmt)
2511 bool ok = set_rhs (stmtp, result);
2516 mark_new_vars_to_rename (*stmtp);
2517 if (maybe_clean_or_replace_eh_stmt (old_stmt, *stmtp)
2518 && tree_purge_dead_eh_edges (bb))
2521 if (dump_file && (dump_flags & TDF_DETAILS))
2523 fprintf (dump_file, "to\n ");
2524 print_generic_stmt (dump_file, *stmtp, dump_flags);
2525 fprintf (dump_file, "\n");
2528 /* Retry the same statement if it changed into another
2529 builtin, there might be new opportunities now. */
2530 call = get_rhs (*stmtp);
2531 if (!call || TREE_CODE (call) != CALL_EXPR)
2536 callee = get_callee_fndecl (call);
2538 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2539 || DECL_FUNCTION_CODE (callee) == fcode)
2544 /* Delete unreachable blocks. */
2546 cleanup_tree_cfg ();
2551 struct tree_opt_pass pass_fold_builtins =
2555 execute_fold_all_builtins, /* execute */
2558 0, /* static_pass_number */
2560 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
2561 0, /* properties_provided */
2562 0, /* properties_destroyed */
2563 0, /* todo_flags_start */
2566 | TODO_update_ssa, /* todo_flags_finish */