1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
213 /* Possible lattice values. */
222 /* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
228 static prop_value_t *const_val;
230 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
233 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
235 switch (val.lattice_val)
238 fprintf (outf, "%sUNINITIALIZED", prefix);
241 fprintf (outf, "%sUNDEFINED", prefix);
244 fprintf (outf, "%sVARYING", prefix);
247 fprintf (outf, "%sCONSTANT ", prefix);
248 print_generic_expr (outf, val.value, dump_flags);
256 /* Print lattice value VAL to stderr. */
258 void debug_lattice_value (prop_value_t val);
261 debug_lattice_value (prop_value_t val)
263 dump_lattice_value (stderr, "", val);
264 fprintf (stderr, "\n");
269 /* If SYM is a constant variable with known value, return the value.
270 NULL_TREE is returned otherwise. */
273 get_symbol_constant_value (tree sym)
275 if (TREE_STATIC (sym)
276 && TREE_READONLY (sym)
279 tree val = DECL_INITIAL (sym);
282 STRIP_USELESS_TYPE_CONVERSION (val);
283 if (is_gimple_min_invariant (val))
286 /* Variables declared 'const' without an initializer
287 have zero as the initializer if they may not be
288 overridden at link or run time. */
290 && targetm.binds_local_p (sym)
291 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
292 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
293 return fold_convert (TREE_TYPE (sym), integer_zero_node);
299 /* Compute a default value for variable VAR and store it in the
300 CONST_VAL array. The following rules are used to get default
303 1- Global and static variables that are declared constant are
306 2- Any other value is considered UNDEFINED. This is useful when
307 considering PHI nodes. PHI arguments that are undefined do not
308 change the constant value of the PHI node, which allows for more
309 constants to be propagated.
311 3- Variables defined by statements other than assignments and PHI
312 nodes are considered VARYING.
314 4- Initial values of variables that are not GIMPLE registers are
315 considered VARYING. */
318 get_default_value (tree var)
320 tree sym = SSA_NAME_VAR (var);
321 prop_value_t val = { UNINITIALIZED, NULL_TREE };
324 if (!is_gimple_reg (var))
326 /* Short circuit for regular CCP. We are not interested in any
327 non-register when DO_STORE_CCP is false. */
328 val.lattice_val = VARYING;
330 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
332 /* Globals and static variables declared 'const' take their
334 val.lattice_val = CONSTANT;
339 gimple stmt = SSA_NAME_DEF_STMT (var);
341 if (gimple_nop_p (stmt))
343 /* Variables defined by an empty statement are those used
344 before being initialized. If VAR is a local variable, we
345 can assume initially that it is UNDEFINED, otherwise we must
346 consider it VARYING. */
347 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
348 val.lattice_val = UNDEFINED;
350 val.lattice_val = VARYING;
352 else if (is_gimple_assign (stmt)
353 /* Value-returning GIMPLE_CALL statements assign to
354 a variable, and are treated similarly to GIMPLE_ASSIGN. */
355 || (is_gimple_call (stmt)
356 && gimple_call_lhs (stmt) != NULL_TREE)
357 || gimple_code (stmt) == GIMPLE_PHI)
359 /* Any other variable defined by an assignment or a PHI node
360 is considered UNDEFINED. */
361 val.lattice_val = UNDEFINED;
365 /* Otherwise, VAR will never take on a constant value. */
366 val.lattice_val = VARYING;
374 /* Get the constant value associated with variable VAR. */
376 static inline prop_value_t *
381 if (const_val == NULL)
384 val = &const_val[SSA_NAME_VERSION (var)];
385 if (val->lattice_val == UNINITIALIZED)
386 *val = get_default_value (var);
391 /* Sets the value associated with VAR to VARYING. */
394 set_value_varying (tree var)
396 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
398 val->lattice_val = VARYING;
399 val->value = NULL_TREE;
402 /* For float types, modify the value of VAL to make ccp work correctly
403 for non-standard values (-0, NaN):
405 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
406 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
407 This is to fix the following problem (see PR 29921): Suppose we have
411 and we set value of y to NaN. This causes value of x to be set to NaN.
412 When we later determine that y is in fact VARYING, fold uses the fact
413 that HONOR_NANS is false, and we try to change the value of x to 0,
414 causing an ICE. With HONOR_NANS being false, the real appearance of
415 NaN would cause undefined behavior, though, so claiming that y (and x)
416 are UNDEFINED initially is correct. */
419 canonicalize_float_value (prop_value_t *val)
421 enum machine_mode mode;
425 if (val->lattice_val != CONSTANT
426 || TREE_CODE (val->value) != REAL_CST)
429 d = TREE_REAL_CST (val->value);
430 type = TREE_TYPE (val->value);
431 mode = TYPE_MODE (type);
433 if (!HONOR_SIGNED_ZEROS (mode)
434 && REAL_VALUE_MINUS_ZERO (d))
436 val->value = build_real (type, dconst0);
440 if (!HONOR_NANS (mode)
441 && REAL_VALUE_ISNAN (d))
443 val->lattice_val = UNDEFINED;
449 /* Set the value for variable VAR to NEW_VAL. Return true if the new
450 value is different from VAR's previous value. */
453 set_lattice_value (tree var, prop_value_t new_val)
455 prop_value_t *old_val = get_value (var);
457 canonicalize_float_value (&new_val);
459 /* Lattice transitions must always be monotonically increasing in
460 value. If *OLD_VAL and NEW_VAL are the same, return false to
461 inform the caller that this was a non-transition. */
463 gcc_assert (old_val->lattice_val < new_val.lattice_val
464 || (old_val->lattice_val == new_val.lattice_val
465 && ((!old_val->value && !new_val.value)
466 || operand_equal_p (old_val->value, new_val.value, 0))));
468 if (old_val->lattice_val != new_val.lattice_val)
470 if (dump_file && (dump_flags & TDF_DETAILS))
472 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
473 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
478 gcc_assert (new_val.lattice_val != UNDEFINED);
486 /* Return the likely CCP lattice value for STMT.
488 If STMT has no operands, then return CONSTANT.
490 Else if undefinedness of operands of STMT cause its value to be
491 undefined, then return UNDEFINED.
493 Else if any operands of STMT are constants, then return CONSTANT.
495 Else return VARYING. */
498 likely_value (gimple stmt)
500 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
504 enum gimple_code code = gimple_code (stmt);
506 /* This function appears to be called only for assignments, calls,
507 conditionals, and switches, due to the logic in visit_stmt. */
508 gcc_assert (code == GIMPLE_ASSIGN
509 || code == GIMPLE_CALL
510 || code == GIMPLE_COND
511 || code == GIMPLE_SWITCH);
513 /* If the statement has volatile operands, it won't fold to a
515 if (gimple_has_volatile_ops (stmt))
518 /* If we are not doing store-ccp, statements with loads
519 and/or stores will never fold into a constant. */
520 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
523 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
524 is_gimple_min_invariant, so we do not consider calls or
525 other forms of assignment. */
526 if (gimple_assign_single_p (stmt)
527 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
530 if (code == GIMPLE_COND
531 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
532 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
535 if (code == GIMPLE_SWITCH
536 && is_gimple_min_invariant (gimple_switch_index (stmt)))
539 /* Arrive here for more complex cases. */
541 has_constant_operand = false;
542 has_undefined_operand = false;
543 all_undefined_operands = true;
544 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
546 prop_value_t *val = get_value (use);
548 if (val->lattice_val == UNDEFINED)
549 has_undefined_operand = true;
551 all_undefined_operands = false;
553 if (val->lattice_val == CONSTANT)
554 has_constant_operand = true;
557 /* If the operation combines operands like COMPLEX_EXPR make sure to
558 not mark the result UNDEFINED if only one part of the result is
560 if (has_undefined_operand && all_undefined_operands)
562 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
564 switch (gimple_assign_rhs_code (stmt))
566 /* Unary operators are handled with all_undefined_operands. */
569 case POINTER_PLUS_EXPR:
570 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
571 Not bitwise operators, one VARYING operand may specify the
572 result completely. Not logical operators for the same reason.
573 Not COMPLEX_EXPR as one VARYING operand makes the result partly
574 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
575 the undefined operand may be promoted. */
582 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
583 fall back to VARYING even if there were CONSTANT operands. */
584 if (has_undefined_operand)
587 if (has_constant_operand
588 /* We do not consider virtual operands here -- load from read-only
589 memory may have only VARYING virtual operands, but still be
591 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
597 /* Returns true if STMT cannot be constant. */
600 surely_varying_stmt_p (gimple stmt)
602 /* If the statement has operands that we cannot handle, it cannot be
604 if (gimple_has_volatile_ops (stmt))
607 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
610 /* If it is a call and does not return a value or is not a
611 builtin and not an indirect call, it is varying. */
612 if (is_gimple_call (stmt))
615 if (!gimple_call_lhs (stmt)
616 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
617 && !DECL_BUILT_IN (fndecl)))
621 /* Anything other than assignments and conditional jumps are not
622 interesting for CCP. */
623 if (gimple_code (stmt) != GIMPLE_ASSIGN
624 && gimple_code (stmt) != GIMPLE_COND
625 && gimple_code (stmt) != GIMPLE_SWITCH
626 && gimple_code (stmt) != GIMPLE_CALL)
632 /* Initialize local data structures for CCP. */
635 ccp_initialize (void)
639 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
641 /* Initialize simulation flags for PHI nodes and statements. */
644 gimple_stmt_iterator i;
646 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
648 gimple stmt = gsi_stmt (i);
649 bool is_varying = surely_varying_stmt_p (stmt);
656 /* If the statement will not produce a constant, mark
657 all its outputs VARYING. */
658 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
661 set_value_varying (def);
664 prop_set_simulate_again (stmt, !is_varying);
668 /* Now process PHI nodes. We never clear the simulate_again flag on
669 phi nodes, since we do not know which edges are executable yet,
670 except for phi nodes for virtual operands when we do not do store ccp. */
673 gimple_stmt_iterator i;
675 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
677 gimple phi = gsi_stmt (i);
679 if (!is_gimple_reg (gimple_phi_result (phi)))
680 prop_set_simulate_again (phi, false);
682 prop_set_simulate_again (phi, true);
688 /* Do final substitution of propagated values, cleanup the flowgraph and
689 free allocated storage.
691 Return TRUE when something was optimized. */
696 /* Perform substitutions based on the known constant values. */
697 bool something_changed = substitute_and_fold (const_val, false);
701 return something_changed;;
705 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
708 any M UNDEFINED = any
709 any M VARYING = VARYING
710 Ci M Cj = Ci if (i == j)
711 Ci M Cj = VARYING if (i != j)
715 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
717 if (val1->lattice_val == UNDEFINED)
719 /* UNDEFINED M any = any */
722 else if (val2->lattice_val == UNDEFINED)
724 /* any M UNDEFINED = any
725 Nothing to do. VAL1 already contains the value we want. */
728 else if (val1->lattice_val == VARYING
729 || val2->lattice_val == VARYING)
731 /* any M VARYING = VARYING. */
732 val1->lattice_val = VARYING;
733 val1->value = NULL_TREE;
735 else if (val1->lattice_val == CONSTANT
736 && val2->lattice_val == CONSTANT
737 && simple_cst_equal (val1->value, val2->value) == 1)
739 /* Ci M Cj = Ci if (i == j)
740 Ci M Cj = VARYING if (i != j)
742 If these two values come from memory stores, make sure that
743 they come from the same memory reference. */
744 val1->lattice_val = CONSTANT;
745 val1->value = val1->value;
749 /* Any other combination is VARYING. */
750 val1->lattice_val = VARYING;
751 val1->value = NULL_TREE;
756 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
757 lattice values to determine PHI_NODE's lattice value. The value of a
758 PHI node is determined calling ccp_lattice_meet with all the arguments
759 of the PHI node that are incoming via executable edges. */
761 static enum ssa_prop_result
762 ccp_visit_phi_node (gimple phi)
765 prop_value_t *old_val, new_val;
767 if (dump_file && (dump_flags & TDF_DETAILS))
769 fprintf (dump_file, "\nVisiting PHI node: ");
770 print_gimple_stmt (dump_file, phi, 0, dump_flags);
773 old_val = get_value (gimple_phi_result (phi));
774 switch (old_val->lattice_val)
777 return SSA_PROP_VARYING;
784 new_val.lattice_val = UNDEFINED;
785 new_val.value = NULL_TREE;
792 for (i = 0; i < gimple_phi_num_args (phi); i++)
794 /* Compute the meet operator over all the PHI arguments flowing
795 through executable edges. */
796 edge e = gimple_phi_arg_edge (phi, i);
798 if (dump_file && (dump_flags & TDF_DETAILS))
801 "\n Argument #%d (%d -> %d %sexecutable)\n",
802 i, e->src->index, e->dest->index,
803 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
806 /* If the incoming edge is executable, Compute the meet operator for
807 the existing value of the PHI node and the current PHI argument. */
808 if (e->flags & EDGE_EXECUTABLE)
810 tree arg = gimple_phi_arg (phi, i)->def;
811 prop_value_t arg_val;
813 if (is_gimple_min_invariant (arg))
815 arg_val.lattice_val = CONSTANT;
819 arg_val = *(get_value (arg));
821 ccp_lattice_meet (&new_val, &arg_val);
823 if (dump_file && (dump_flags & TDF_DETAILS))
825 fprintf (dump_file, "\t");
826 print_generic_expr (dump_file, arg, dump_flags);
827 dump_lattice_value (dump_file, "\tValue: ", arg_val);
828 fprintf (dump_file, "\n");
831 if (new_val.lattice_val == VARYING)
836 if (dump_file && (dump_flags & TDF_DETAILS))
838 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
839 fprintf (dump_file, "\n\n");
842 /* Make the transition to the new value. */
843 if (set_lattice_value (gimple_phi_result (phi), new_val))
845 if (new_val.lattice_val == VARYING)
846 return SSA_PROP_VARYING;
848 return SSA_PROP_INTERESTING;
851 return SSA_PROP_NOT_INTERESTING;
855 /* CCP specific front-end to the non-destructive constant folding
858 Attempt to simplify the RHS of STMT knowing that one or more
859 operands are constants.
861 If simplification is possible, return the simplified RHS,
862 otherwise return the original RHS or NULL_TREE. */
865 ccp_fold (gimple stmt)
867 switch (gimple_code (stmt))
871 enum tree_code subcode = gimple_assign_rhs_code (stmt);
873 switch (get_gimple_rhs_class (subcode))
875 case GIMPLE_SINGLE_RHS:
877 tree rhs = gimple_assign_rhs1 (stmt);
878 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
880 if (TREE_CODE (rhs) == SSA_NAME)
882 /* If the RHS is an SSA_NAME, return its known constant value,
884 return get_value (rhs)->value;
886 /* Handle propagating invariant addresses into address operations.
887 The folding we do here matches that in tree-ssa-forwprop.c. */
888 else if (TREE_CODE (rhs) == ADDR_EXPR)
891 base = &TREE_OPERAND (rhs, 0);
892 while (handled_component_p (*base))
893 base = &TREE_OPERAND (*base, 0);
894 if (TREE_CODE (*base) == INDIRECT_REF
895 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
897 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
898 if (val->lattice_val == CONSTANT
899 && TREE_CODE (val->value) == ADDR_EXPR
900 && useless_type_conversion_p
901 (TREE_TYPE (TREE_OPERAND (*base, 0)),
902 TREE_TYPE (val->value))
903 && useless_type_conversion_p
905 TREE_TYPE (TREE_OPERAND (val->value, 0))))
907 /* We need to return a new tree, not modify the IL
908 or share parts of it. So play some tricks to
909 avoid manually building it. */
910 tree ret, save = *base;
911 *base = TREE_OPERAND (val->value, 0);
912 ret = unshare_expr (rhs);
913 recompute_tree_invariant_for_addr_expr (ret);
920 if (kind == tcc_reference)
922 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR
923 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
925 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
926 if (val->lattice_val == CONSTANT)
927 return fold_unary (VIEW_CONVERT_EXPR,
928 TREE_TYPE (rhs), val->value);
930 return fold_const_aggregate_ref (rhs);
932 else if (kind == tcc_declaration)
933 return get_symbol_constant_value (rhs);
937 case GIMPLE_UNARY_RHS:
939 /* Handle unary operators that can appear in GIMPLE form.
940 Note that we know the single operand must be a constant,
941 so this should almost always return a simplified RHS. */
942 tree lhs = gimple_assign_lhs (stmt);
943 tree op0 = gimple_assign_rhs1 (stmt);
946 /* Simplify the operand down to a constant. */
947 if (TREE_CODE (op0) == SSA_NAME)
949 prop_value_t *val = get_value (op0);
950 if (val->lattice_val == CONSTANT)
951 op0 = get_value (op0)->value;
954 /* Conversions are useless for CCP purposes if they are
955 value-preserving. Thus the restrictions that
956 useless_type_conversion_p places for pointer type conversions
957 do not apply here. Substitution later will only substitute to
959 if (CONVERT_EXPR_CODE_P (subcode)
960 && POINTER_TYPE_P (TREE_TYPE (lhs))
961 && POINTER_TYPE_P (TREE_TYPE (op0))
962 /* Do not allow differences in volatile qualification
963 as this might get us confused as to whether a
964 propagation destination statement is volatile
965 or not. See PR36988. */
966 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
967 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
970 /* Still try to generate a constant of correct type. */
971 if (!useless_type_conversion_p (TREE_TYPE (lhs),
973 && ((tem = maybe_fold_offset_to_address
974 (op0, integer_zero_node, TREE_TYPE (lhs)))
980 res = fold_unary (subcode, gimple_expr_type (stmt), op0);
982 /* If the operation was a conversion do _not_ mark a
983 resulting constant with TREE_OVERFLOW if the original
984 constant was not. These conversions have implementation
985 defined behavior and retaining the TREE_OVERFLOW flag
986 here would confuse later passes such as VRP. */
988 && TREE_CODE (res) == INTEGER_CST
989 && TREE_CODE (op0) == INTEGER_CST
990 && CONVERT_EXPR_CODE_P (subcode))
991 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
996 case GIMPLE_BINARY_RHS:
998 /* Handle binary operators that can appear in GIMPLE form. */
999 tree op0 = gimple_assign_rhs1 (stmt);
1000 tree op1 = gimple_assign_rhs2 (stmt);
1002 /* Simplify the operands down to constants when appropriate. */
1003 if (TREE_CODE (op0) == SSA_NAME)
1005 prop_value_t *val = get_value (op0);
1006 if (val->lattice_val == CONSTANT)
1010 if (TREE_CODE (op1) == SSA_NAME)
1012 prop_value_t *val = get_value (op1);
1013 if (val->lattice_val == CONSTANT)
1017 /* Fold &foo + CST into an invariant reference if possible. */
1018 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1019 && TREE_CODE (op0) == ADDR_EXPR
1020 && TREE_CODE (op1) == INTEGER_CST)
1022 tree lhs = gimple_assign_lhs (stmt);
1023 tree tem = maybe_fold_offset_to_address (op0, op1,
1025 if (tem != NULL_TREE)
1029 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1040 tree fn = gimple_call_fn (stmt);
1043 if (TREE_CODE (fn) == SSA_NAME)
1045 val = get_value (fn);
1046 if (val->lattice_val == CONSTANT)
1049 if (TREE_CODE (fn) == ADDR_EXPR
1050 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1052 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1055 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1057 args[i] = gimple_call_arg (stmt, i);
1058 if (TREE_CODE (args[i]) == SSA_NAME)
1060 val = get_value (args[i]);
1061 if (val->lattice_val == CONSTANT)
1062 args[i] = val->value;
1065 call = build_call_array (gimple_call_return_type (stmt),
1066 fn, gimple_call_num_args (stmt), args);
1067 retval = fold_call_expr (call, false);
1069 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1070 STRIP_NOPS (retval);
1078 /* Handle comparison operators that can appear in GIMPLE form. */
1079 tree op0 = gimple_cond_lhs (stmt);
1080 tree op1 = gimple_cond_rhs (stmt);
1081 enum tree_code code = gimple_cond_code (stmt);
1083 /* Simplify the operands down to constants when appropriate. */
1084 if (TREE_CODE (op0) == SSA_NAME)
1086 prop_value_t *val = get_value (op0);
1087 if (val->lattice_val == CONSTANT)
1091 if (TREE_CODE (op1) == SSA_NAME)
1093 prop_value_t *val = get_value (op1);
1094 if (val->lattice_val == CONSTANT)
1098 return fold_binary (code, boolean_type_node, op0, op1);
1103 tree rhs = gimple_switch_index (stmt);
1105 if (TREE_CODE (rhs) == SSA_NAME)
1107 /* If the RHS is an SSA_NAME, return its known constant value,
1109 return get_value (rhs)->value;
1121 /* Return the tree representing the element referenced by T if T is an
1122 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1123 NULL_TREE otherwise. */
1126 fold_const_aggregate_ref (tree t)
1128 prop_value_t *value;
1129 tree base, ctor, idx, field;
1130 unsigned HOST_WIDE_INT cnt;
1133 switch (TREE_CODE (t))
1136 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1137 DECL_INITIAL. If BASE is a nested reference into another
1138 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1139 the inner reference. */
1140 base = TREE_OPERAND (t, 0);
1141 switch (TREE_CODE (base))
1144 if (!TREE_READONLY (base)
1145 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1146 || !targetm.binds_local_p (base))
1149 ctor = DECL_INITIAL (base);
1154 ctor = fold_const_aggregate_ref (base);
1166 if (ctor == NULL_TREE
1167 || (TREE_CODE (ctor) != CONSTRUCTOR
1168 && TREE_CODE (ctor) != STRING_CST)
1169 || !TREE_STATIC (ctor))
1172 /* Get the index. If we have an SSA_NAME, try to resolve it
1173 with the current lattice value for the SSA_NAME. */
1174 idx = TREE_OPERAND (t, 1);
1175 switch (TREE_CODE (idx))
1178 if ((value = get_value (idx))
1179 && value->lattice_val == CONSTANT
1180 && TREE_CODE (value->value) == INTEGER_CST)
1193 /* Fold read from constant string. */
1194 if (TREE_CODE (ctor) == STRING_CST)
1196 if ((TYPE_MODE (TREE_TYPE (t))
1197 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1198 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1200 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1201 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1202 return build_int_cst_type (TREE_TYPE (t),
1203 (TREE_STRING_POINTER (ctor)
1204 [TREE_INT_CST_LOW (idx)]));
1208 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1209 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1210 if (tree_int_cst_equal (cfield, idx))
1212 STRIP_USELESS_TYPE_CONVERSION (cval);
1218 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1219 DECL_INITIAL. If BASE is a nested reference into another
1220 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1221 the inner reference. */
1222 base = TREE_OPERAND (t, 0);
1223 switch (TREE_CODE (base))
1226 if (!TREE_READONLY (base)
1227 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1228 || !targetm.binds_local_p (base))
1231 ctor = DECL_INITIAL (base);
1236 ctor = fold_const_aggregate_ref (base);
1243 if (ctor == NULL_TREE
1244 || TREE_CODE (ctor) != CONSTRUCTOR
1245 || !TREE_STATIC (ctor))
1248 field = TREE_OPERAND (t, 1);
1250 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1252 /* FIXME: Handle bit-fields. */
1253 && ! DECL_BIT_FIELD (cfield))
1255 STRIP_USELESS_TYPE_CONVERSION (cval);
1263 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1264 if (c && TREE_CODE (c) == COMPLEX_CST)
1265 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1271 tree base = TREE_OPERAND (t, 0);
1272 if (TREE_CODE (base) == SSA_NAME
1273 && (value = get_value (base))
1274 && value->lattice_val == CONSTANT
1275 && TREE_CODE (value->value) == ADDR_EXPR)
1276 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1287 /* Evaluate statement STMT.
1288 Valid only for assignments, calls, conditionals, and switches. */
1291 evaluate_stmt (gimple stmt)
1294 tree simplified = NULL_TREE;
1295 ccp_lattice_t likelyvalue = likely_value (stmt);
1298 fold_defer_overflow_warnings ();
1300 /* If the statement is likely to have a CONSTANT result, then try
1301 to fold the statement to determine the constant value. */
1302 /* FIXME. This is the only place that we call ccp_fold.
1303 Since likely_value never returns CONSTANT for calls, we will
1304 not attempt to fold them, including builtins that may profit. */
1305 if (likelyvalue == CONSTANT)
1306 simplified = ccp_fold (stmt);
1307 /* If the statement is likely to have a VARYING result, then do not
1308 bother folding the statement. */
1309 else if (likelyvalue == VARYING)
1311 enum gimple_code code = gimple_code (stmt);
1312 if (code == GIMPLE_ASSIGN)
1314 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1316 /* Other cases cannot satisfy is_gimple_min_invariant
1318 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1319 simplified = gimple_assign_rhs1 (stmt);
1321 else if (code == GIMPLE_SWITCH)
1322 simplified = gimple_switch_index (stmt);
1324 /* These cannot satisfy is_gimple_min_invariant without folding. */
1325 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1328 is_constant = simplified && is_gimple_min_invariant (simplified);
1330 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1332 if (dump_file && (dump_flags & TDF_DETAILS))
1334 fprintf (dump_file, "which is likely ");
1335 switch (likelyvalue)
1338 fprintf (dump_file, "CONSTANT");
1341 fprintf (dump_file, "UNDEFINED");
1344 fprintf (dump_file, "VARYING");
1348 fprintf (dump_file, "\n");
1353 /* The statement produced a constant value. */
1354 val.lattice_val = CONSTANT;
1355 val.value = simplified;
1359 /* The statement produced a nonconstant value. If the statement
1360 had UNDEFINED operands, then the result of the statement
1361 should be UNDEFINED. Otherwise, the statement is VARYING. */
1362 if (likelyvalue == UNDEFINED)
1363 val.lattice_val = likelyvalue;
1365 val.lattice_val = VARYING;
1367 val.value = NULL_TREE;
1373 /* Visit the assignment statement STMT. Set the value of its LHS to the
1374 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1375 creates virtual definitions, set the value of each new name to that
1376 of the RHS (if we can derive a constant out of the RHS).
1377 Value-returning call statements also perform an assignment, and
1378 are handled here. */
1380 static enum ssa_prop_result
1381 visit_assignment (gimple stmt, tree *output_p)
1384 enum ssa_prop_result retval;
1386 tree lhs = gimple_get_lhs (stmt);
1388 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1389 || gimple_call_lhs (stmt) != NULL_TREE);
1391 if (gimple_assign_copy_p (stmt))
1393 tree rhs = gimple_assign_rhs1 (stmt);
1395 if (TREE_CODE (rhs) == SSA_NAME)
1397 /* For a simple copy operation, we copy the lattice values. */
1398 prop_value_t *nval = get_value (rhs);
1402 val = evaluate_stmt (stmt);
1405 /* Evaluate the statement, which could be
1406 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1407 val = evaluate_stmt (stmt);
1409 retval = SSA_PROP_NOT_INTERESTING;
1411 /* Set the lattice value of the statement's output. */
1412 if (TREE_CODE (lhs) == SSA_NAME)
1414 /* If STMT is an assignment to an SSA_NAME, we only have one
1416 if (set_lattice_value (lhs, val))
1419 if (val.lattice_val == VARYING)
1420 retval = SSA_PROP_VARYING;
1422 retval = SSA_PROP_INTERESTING;
1430 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1431 if it can determine which edge will be taken. Otherwise, return
1432 SSA_PROP_VARYING. */
1434 static enum ssa_prop_result
1435 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1440 block = gimple_bb (stmt);
1441 val = evaluate_stmt (stmt);
1443 /* Find which edge out of the conditional block will be taken and add it
1444 to the worklist. If no single edge can be determined statically,
1445 return SSA_PROP_VARYING to feed all the outgoing edges to the
1446 propagation engine. */
1447 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1449 return SSA_PROP_INTERESTING;
1451 return SSA_PROP_VARYING;
1455 /* Evaluate statement STMT. If the statement produces an output value and
1456 its evaluation changes the lattice value of its output, return
1457 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1460 If STMT is a conditional branch and we can determine its truth
1461 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1462 value, return SSA_PROP_VARYING. */
1464 static enum ssa_prop_result
1465 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1470 if (dump_file && (dump_flags & TDF_DETAILS))
1472 fprintf (dump_file, "\nVisiting statement:\n");
1473 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1476 switch (gimple_code (stmt))
1479 /* If the statement is an assignment that produces a single
1480 output value, evaluate its RHS to see if the lattice value of
1481 its output has changed. */
1482 return visit_assignment (stmt, output_p);
1485 /* A value-returning call also performs an assignment. */
1486 if (gimple_call_lhs (stmt) != NULL_TREE)
1487 return visit_assignment (stmt, output_p);
1492 /* If STMT is a conditional branch, see if we can determine
1493 which branch will be taken. */
1494 /* FIXME. It appears that we should be able to optimize
1495 computed GOTOs here as well. */
1496 return visit_cond_stmt (stmt, taken_edge_p);
1502 /* Any other kind of statement is not interesting for constant
1503 propagation and, therefore, not worth simulating. */
1504 if (dump_file && (dump_flags & TDF_DETAILS))
1505 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1507 /* Definitions made by statements other than assignments to
1508 SSA_NAMEs represent unknown modifications to their outputs.
1509 Mark them VARYING. */
1510 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1512 prop_value_t v = { VARYING, NULL_TREE };
1513 set_lattice_value (def, v);
1516 return SSA_PROP_VARYING;
1520 /* Main entry point for SSA Conditional Constant Propagation. */
1526 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1527 if (ccp_finalize ())
1528 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1537 return flag_tree_ccp != 0;
1541 struct gimple_opt_pass pass_ccp =
1546 gate_ccp, /* gate */
1547 do_ssa_ccp, /* execute */
1550 0, /* static_pass_number */
1551 TV_TREE_CCP, /* tv_id */
1552 PROP_cfg | PROP_ssa, /* properties_required */
1553 0, /* properties_provided */
1554 0, /* properties_destroyed */
1555 0, /* todo_flags_start */
1556 TODO_dump_func | TODO_verify_ssa
1557 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1562 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1563 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1564 is the desired result type. */
1567 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1568 bool allow_negative_idx)
1570 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1571 tree array_type, elt_type, elt_size;
1574 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1575 measured in units of the size of elements type) from that ARRAY_REF).
1576 We can't do anything if either is variable.
1578 The case we handle here is *(&A[N]+O). */
1579 if (TREE_CODE (base) == ARRAY_REF)
1581 tree low_bound = array_ref_low_bound (base);
1583 elt_offset = TREE_OPERAND (base, 1);
1584 if (TREE_CODE (low_bound) != INTEGER_CST
1585 || TREE_CODE (elt_offset) != INTEGER_CST)
1588 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1589 base = TREE_OPERAND (base, 0);
1592 /* Ignore stupid user tricks of indexing non-array variables. */
1593 array_type = TREE_TYPE (base);
1594 if (TREE_CODE (array_type) != ARRAY_TYPE)
1596 elt_type = TREE_TYPE (array_type);
1597 if (!useless_type_conversion_p (orig_type, elt_type))
1600 /* Use signed size type for intermediate computation on the index. */
1601 idx_type = signed_type_for (size_type_node);
1603 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1604 element type (so we can use the alignment if it's not constant).
1605 Otherwise, compute the offset as an index by using a division. If the
1606 division isn't exact, then don't do anything. */
1607 elt_size = TYPE_SIZE_UNIT (elt_type);
1610 if (integer_zerop (offset))
1612 if (TREE_CODE (elt_size) != INTEGER_CST)
1613 elt_size = size_int (TYPE_ALIGN (elt_type));
1615 idx = build_int_cst (idx_type, 0);
1619 unsigned HOST_WIDE_INT lquo, lrem;
1620 HOST_WIDE_INT hquo, hrem;
1623 /* The final array offset should be signed, so we need
1624 to sign-extend the (possibly pointer) offset here
1625 and use signed division. */
1626 soffset = double_int_sext (tree_to_double_int (offset),
1627 TYPE_PRECISION (TREE_TYPE (offset)));
1628 if (TREE_CODE (elt_size) != INTEGER_CST
1629 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1630 soffset.low, soffset.high,
1631 TREE_INT_CST_LOW (elt_size),
1632 TREE_INT_CST_HIGH (elt_size),
1633 &lquo, &hquo, &lrem, &hrem)
1637 idx = build_int_cst_wide (idx_type, lquo, hquo);
1640 /* Assume the low bound is zero. If there is a domain type, get the
1641 low bound, if any, convert the index into that type, and add the
1643 min_idx = build_int_cst (idx_type, 0);
1644 domain_type = TYPE_DOMAIN (array_type);
1647 idx_type = domain_type;
1648 if (TYPE_MIN_VALUE (idx_type))
1649 min_idx = TYPE_MIN_VALUE (idx_type);
1651 min_idx = fold_convert (idx_type, min_idx);
1653 if (TREE_CODE (min_idx) != INTEGER_CST)
1656 elt_offset = fold_convert (idx_type, elt_offset);
1659 if (!integer_zerop (min_idx))
1660 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1661 if (!integer_zerop (elt_offset))
1662 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1664 /* Make sure to possibly truncate late after offsetting. */
1665 idx = fold_convert (idx_type, idx);
1667 /* We don't want to construct access past array bounds. For example
1670 should not be simplified into (*c)[14] or tree-vrp will
1671 give false warnings. The same is true for
1672 struct A { long x; char d[0]; } *a;
1674 which should be not folded to &a->d[-8]. */
1676 && TYPE_MAX_VALUE (domain_type)
1677 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1679 tree up_bound = TYPE_MAX_VALUE (domain_type);
1681 if (tree_int_cst_lt (up_bound, idx)
1682 /* Accesses after the end of arrays of size 0 (gcc
1683 extension) and 1 are likely intentional ("struct
1685 && compare_tree_int (up_bound, 1) > 0)
1689 && TYPE_MIN_VALUE (domain_type))
1691 if (!allow_negative_idx
1692 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1693 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1696 else if (!allow_negative_idx
1697 && compare_tree_int (idx, 0) < 0)
1700 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1704 /* Attempt to fold *(S+O) to S.X.
1705 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1706 is the desired result type. */
1709 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1710 tree orig_type, bool base_is_ptr)
1712 tree f, t, field_type, tail_array_field, field_offset;
1716 if (TREE_CODE (record_type) != RECORD_TYPE
1717 && TREE_CODE (record_type) != UNION_TYPE
1718 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1721 /* Short-circuit silly cases. */
1722 if (useless_type_conversion_p (record_type, orig_type))
1725 tail_array_field = NULL_TREE;
1726 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1730 if (TREE_CODE (f) != FIELD_DECL)
1732 if (DECL_BIT_FIELD (f))
1735 if (!DECL_FIELD_OFFSET (f))
1737 field_offset = byte_position (f);
1738 if (TREE_CODE (field_offset) != INTEGER_CST)
1741 /* ??? Java creates "interesting" fields for representing base classes.
1742 They have no name, and have no context. With no context, we get into
1743 trouble with nonoverlapping_component_refs_p. Skip them. */
1744 if (!DECL_FIELD_CONTEXT (f))
1747 /* The previous array field isn't at the end. */
1748 tail_array_field = NULL_TREE;
1750 /* Check to see if this offset overlaps with the field. */
1751 cmp = tree_int_cst_compare (field_offset, offset);
1755 field_type = TREE_TYPE (f);
1757 /* Here we exactly match the offset being checked. If the types match,
1758 then we can return that field. */
1760 && useless_type_conversion_p (orig_type, field_type))
1763 base = build1 (INDIRECT_REF, record_type, base);
1764 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1768 /* Don't care about offsets into the middle of scalars. */
1769 if (!AGGREGATE_TYPE_P (field_type))
1772 /* Check for array at the end of the struct. This is often
1773 used as for flexible array members. We should be able to
1774 turn this into an array access anyway. */
1775 if (TREE_CODE (field_type) == ARRAY_TYPE)
1776 tail_array_field = f;
1778 /* Check the end of the field against the offset. */
1779 if (!DECL_SIZE_UNIT (f)
1780 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1782 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1783 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1786 /* If we matched, then set offset to the displacement into
1789 new_base = build1 (INDIRECT_REF, record_type, base);
1792 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1794 /* Recurse to possibly find the match. */
1795 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1796 f == TYPE_FIELDS (record_type));
1799 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1805 if (!tail_array_field)
1808 f = tail_array_field;
1809 field_type = TREE_TYPE (f);
1810 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1812 /* If we get here, we've got an aggregate field, and a possibly
1813 nonzero offset into them. Recurse and hope for a valid match. */
1815 base = build1 (INDIRECT_REF, record_type, base);
1816 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1818 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1819 f == TYPE_FIELDS (record_type));
1822 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1826 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1827 or BASE[index] or by combination of those.
1829 Before attempting the conversion strip off existing ADDR_EXPRs and
1830 handled component refs. */
1833 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1837 bool base_is_ptr = true;
1840 if (TREE_CODE (base) == ADDR_EXPR)
1842 base_is_ptr = false;
1844 base = TREE_OPERAND (base, 0);
1846 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1847 so it needs to be removed and new COMPONENT_REF constructed.
1848 The wrong COMPONENT_REF are often constructed by folding the
1849 (type *)&object within the expression (type *)&object+offset */
1850 if (handled_component_p (base))
1852 HOST_WIDE_INT sub_offset, size, maxsize;
1854 newbase = get_ref_base_and_extent (base, &sub_offset,
1856 gcc_assert (newbase);
1859 && !(sub_offset & (BITS_PER_UNIT - 1)))
1863 offset = int_const_binop (PLUS_EXPR, offset,
1864 build_int_cst (TREE_TYPE (offset),
1865 sub_offset / BITS_PER_UNIT), 1);
1868 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1869 && integer_zerop (offset))
1871 type = TREE_TYPE (base);
1876 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1878 type = TREE_TYPE (TREE_TYPE (base));
1880 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1881 orig_type, base_is_ptr);
1885 base = build1 (INDIRECT_REF, type, base);
1886 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
1891 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1892 or &BASE[index] or by combination of those.
1894 Before attempting the conversion strip off existing component refs. */
1897 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1901 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1902 && POINTER_TYPE_P (orig_type));
1904 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1910 /* For __builtin_object_size to function correctly we need to
1911 make sure not to fold address arithmetic so that we change
1912 reference from one array to another. This would happen for
1915 struct X { char s1[10]; char s2[10] } s;
1916 char *foo (void) { return &s.s2[-4]; }
1918 where we need to avoid generating &s.s1[6]. As the C and
1919 C++ frontends create different initial trees
1920 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1921 sophisticated comparisons here. Note that checking for the
1922 condition after the fact is easier than trying to avoid doing
1925 if (TREE_CODE (orig) == ADDR_EXPR)
1926 orig = TREE_OPERAND (orig, 0);
1927 if ((TREE_CODE (orig) == ARRAY_REF
1928 || (TREE_CODE (orig) == COMPONENT_REF
1929 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
1930 && (TREE_CODE (t) == ARRAY_REF
1931 || (TREE_CODE (t) == COMPONENT_REF
1932 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1))) == ARRAY_TYPE))
1933 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
1934 ? TREE_OPERAND (orig, 0) : orig,
1935 TREE_CODE (t) == ARRAY_REF
1936 ? TREE_OPERAND (t, 0) : t, 0))
1939 ptr_type = build_pointer_type (TREE_TYPE (t));
1940 if (!useless_type_conversion_p (orig_type, ptr_type))
1942 return build_fold_addr_expr_with_type (t, ptr_type);
1948 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1949 Return the simplified expression, or NULL if nothing could be done. */
1952 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1955 bool volatile_p = TREE_THIS_VOLATILE (expr);
1957 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1958 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1959 are sometimes added. */
1961 STRIP_TYPE_NOPS (base);
1962 TREE_OPERAND (expr, 0) = base;
1964 /* One possibility is that the address reduces to a string constant. */
1965 t = fold_read_from_constant_string (expr);
1969 /* Add in any offset from a POINTER_PLUS_EXPR. */
1970 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
1974 offset2 = TREE_OPERAND (base, 1);
1975 if (TREE_CODE (offset2) != INTEGER_CST)
1977 base = TREE_OPERAND (base, 0);
1979 offset = fold_convert (sizetype,
1980 int_const_binop (PLUS_EXPR, offset, offset2, 1));
1983 if (TREE_CODE (base) == ADDR_EXPR)
1985 tree base_addr = base;
1987 /* Strip the ADDR_EXPR. */
1988 base = TREE_OPERAND (base, 0);
1990 /* Fold away CONST_DECL to its value, if the type is scalar. */
1991 if (TREE_CODE (base) == CONST_DECL
1992 && is_gimple_min_invariant (DECL_INITIAL (base)))
1993 return DECL_INITIAL (base);
1995 /* Try folding *(&B+O) to B.X. */
1996 t = maybe_fold_offset_to_reference (base_addr, offset,
2000 /* Preserve volatileness of the original expression.
2001 We can end up with a plain decl here which is shared
2002 and we shouldn't mess with its flags. */
2004 TREE_THIS_VOLATILE (t) = volatile_p;
2010 /* We can get here for out-of-range string constant accesses,
2011 such as "_"[3]. Bail out of the entire substitution search
2012 and arrange for the entire statement to be replaced by a
2013 call to __builtin_trap. In all likelihood this will all be
2014 constant-folded away, but in the meantime we can't leave with
2015 something that get_expr_operands can't understand. */
2019 if (TREE_CODE (t) == ADDR_EXPR
2020 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2022 /* FIXME: Except that this causes problems elsewhere with dead
2023 code not being deleted, and we die in the rtl expanders
2024 because we failed to remove some ssa_name. In the meantime,
2025 just return zero. */
2026 /* FIXME2: This condition should be signaled by
2027 fold_read_from_constant_string directly, rather than
2028 re-checking for it here. */
2029 return integer_zero_node;
2032 /* Try folding *(B+O) to B->X. Still an improvement. */
2033 if (POINTER_TYPE_P (TREE_TYPE (base)))
2035 t = maybe_fold_offset_to_reference (base, offset,
2042 /* Otherwise we had an offset that we could not simplify. */
2047 /* A quaint feature extant in our address arithmetic is that there
2048 can be hidden type changes here. The type of the result need
2049 not be the same as the type of the input pointer.
2051 What we're after here is an expression of the form
2052 (T *)(&array + const)
2053 where array is OP0, const is OP1, RES_TYPE is T and
2054 the cast doesn't actually exist, but is implicit in the
2055 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2057 which may be able to propagate further. */
2060 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2065 /* It had better be a constant. */
2066 if (TREE_CODE (op1) != INTEGER_CST)
2068 /* The first operand should be an ADDR_EXPR. */
2069 if (TREE_CODE (op0) != ADDR_EXPR)
2071 op0 = TREE_OPERAND (op0, 0);
2073 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2074 the offset into it. */
2075 while (TREE_CODE (op0) == ARRAY_REF)
2077 tree array_obj = TREE_OPERAND (op0, 0);
2078 tree array_idx = TREE_OPERAND (op0, 1);
2079 tree elt_type = TREE_TYPE (op0);
2080 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2083 if (TREE_CODE (array_idx) != INTEGER_CST)
2085 if (TREE_CODE (elt_size) != INTEGER_CST)
2088 /* Un-bias the index by the min index of the array type. */
2089 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2092 min_idx = TYPE_MIN_VALUE (min_idx);
2095 if (TREE_CODE (min_idx) != INTEGER_CST)
2098 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2099 if (!integer_zerop (min_idx))
2100 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2105 /* Convert the index to a byte offset. */
2106 array_idx = fold_convert (sizetype, array_idx);
2107 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2109 /* Update the operands for the next round, or for folding. */
2110 op1 = int_const_binop (PLUS_EXPR,
2115 ptd_type = TREE_TYPE (res_type);
2116 /* If we want a pointer to void, reconstruct the reference from the
2117 array element type. A pointer to that can be trivially converted
2118 to void *. This happens as we fold (void *)(ptr p+ off). */
2119 if (VOID_TYPE_P (ptd_type)
2120 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2121 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2123 /* At which point we can try some of the same things as for indirects. */
2124 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2126 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2129 t = build1 (ADDR_EXPR, res_type, t);
2134 /* For passing state through walk_tree into fold_stmt_r and its
2137 struct fold_stmt_r_data
2141 bool *inside_addr_expr_p;
2144 /* Subroutine of fold_stmt called via walk_tree. We perform several
2145 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2148 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2150 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2151 struct fold_stmt_r_data *fold_stmt_r_data;
2152 bool *inside_addr_expr_p;
2154 tree expr = *expr_p, t;
2155 bool volatile_p = TREE_THIS_VOLATILE (expr);
2157 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2158 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2159 changed_p = fold_stmt_r_data->changed_p;
2161 /* ??? It'd be nice if walk_tree had a pre-order option. */
2162 switch (TREE_CODE (expr))
2165 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2170 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2173 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2174 /* If we had a good reason for propagating the address here,
2175 make sure we end up with valid gimple. See PR34989. */
2176 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2180 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2185 if (POINTER_TYPE_P (TREE_TYPE (expr))
2186 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
2187 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2188 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2190 TREE_TYPE (TREE_TYPE (expr)))))
2194 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2195 We'd only want to bother decomposing an existing ARRAY_REF if
2196 the base array is found to have another offset contained within.
2197 Otherwise we'd be wasting time. */
2199 /* If we are not processing expressions found within an
2200 ADDR_EXPR, then we can fold constant array references. */
2201 if (!*inside_addr_expr_p)
2202 t = fold_read_from_constant_string (expr);
2208 *inside_addr_expr_p = true;
2209 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2210 *inside_addr_expr_p = false;
2215 /* Make sure the value is properly considered constant, and so gets
2216 propagated as expected. */
2218 recompute_tree_invariant_for_addr_expr (expr);
2222 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2227 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2228 We've already checked that the records are compatible, so we should
2229 come up with a set of compatible fields. */
2231 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2232 tree expr_field = TREE_OPERAND (expr, 1);
2234 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2236 expr_field = find_compatible_field (expr_record, expr_field);
2237 TREE_OPERAND (expr, 1) = expr_field;
2242 case TARGET_MEM_REF:
2243 t = maybe_fold_tmr (expr);
2246 case POINTER_PLUS_EXPR:
2247 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2250 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2255 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2256 TREE_OPERAND (expr, 0),
2257 TREE_OPERAND (expr, 1));
2261 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2263 tree op0 = TREE_OPERAND (expr, 0);
2267 fold_defer_overflow_warnings ();
2268 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2269 TREE_OPERAND (op0, 0),
2270 TREE_OPERAND (op0, 1));
2271 /* This is actually a conditional expression, not a GIMPLE
2272 conditional statement, however, the valid_gimple_rhs_p
2273 test still applies. */
2274 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
2275 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2278 COND_EXPR_COND (expr) = tem;
2291 /* Preserve volatileness of the original expression.
2292 We can end up with a plain decl here which is shared
2293 and we shouldn't mess with its flags. */
2295 TREE_THIS_VOLATILE (t) = volatile_p;
2303 /* Return the string length, maximum string length or maximum value of
2305 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2306 is not NULL and, for TYPE == 0, its value is not equal to the length
2307 we determine or if we are unable to determine the length or value,
2308 return false. VISITED is a bitmap of visited variables.
2309 TYPE is 0 if string length should be returned, 1 for maximum string
2310 length and 2 for maximum value ARG can have. */
2313 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2318 if (TREE_CODE (arg) != SSA_NAME)
2320 if (TREE_CODE (arg) == COND_EXPR)
2321 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2322 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2323 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2324 else if (TREE_CODE (arg) == ADDR_EXPR
2325 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2326 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2328 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2329 if (TREE_CODE (aop0) == INDIRECT_REF
2330 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2331 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2332 length, visited, type);
2338 if (TREE_CODE (val) != INTEGER_CST
2339 || tree_int_cst_sgn (val) < 0)
2343 val = c_strlen (arg, 1);
2351 if (TREE_CODE (*length) != INTEGER_CST
2352 || TREE_CODE (val) != INTEGER_CST)
2355 if (tree_int_cst_lt (*length, val))
2359 else if (simple_cst_equal (val, *length) != 1)
2367 /* If we were already here, break the infinite cycle. */
2368 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2370 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2373 def_stmt = SSA_NAME_DEF_STMT (var);
2375 switch (gimple_code (def_stmt))
2378 /* The RHS of the statement defining VAR must either have a
2379 constant length or come from another SSA_NAME with a constant
2381 if (gimple_assign_single_p (def_stmt)
2382 || gimple_assign_unary_nop_p (def_stmt))
2384 tree rhs = gimple_assign_rhs1 (def_stmt);
2385 return get_maxval_strlen (rhs, length, visited, type);
2391 /* All the arguments of the PHI node must have the same constant
2395 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2397 tree arg = gimple_phi_arg (def_stmt, i)->def;
2399 /* If this PHI has itself as an argument, we cannot
2400 determine the string length of this argument. However,
2401 if we can find a constant string length for the other
2402 PHI args then we can still be sure that this is a
2403 constant string length. So be optimistic and just
2404 continue with the next argument. */
2405 if (arg == gimple_phi_result (def_stmt))
2408 if (!get_maxval_strlen (arg, length, visited, type))
2420 /* Fold builtin call in statement STMT. Returns a simplified tree.
2421 We may return a non-constant expression, including another call
2422 to a different function and with different arguments, e.g.,
2423 substituting memcpy for strcpy when the string length is known.
2424 Note that some builtins expand into inline code that may not
2425 be valid in GIMPLE. Callers must take care. */
2428 ccp_fold_builtin (gimple stmt)
2430 tree result, val[3];
2432 int arg_mask, i, type;
2437 gcc_assert (is_gimple_call (stmt));
2439 ignore = (gimple_call_lhs (stmt) == NULL);
2441 /* First try the generic builtin folder. If that succeeds, return the
2443 result = fold_call_stmt (stmt, ignore);
2447 STRIP_NOPS (result);
2451 /* Ignore MD builtins. */
2452 callee = gimple_call_fndecl (stmt);
2453 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2456 /* If the builtin could not be folded, and it has no argument list,
2458 nargs = gimple_call_num_args (stmt);
2462 /* Limit the work only for builtins we know how to simplify. */
2463 switch (DECL_FUNCTION_CODE (callee))
2465 case BUILT_IN_STRLEN:
2466 case BUILT_IN_FPUTS:
2467 case BUILT_IN_FPUTS_UNLOCKED:
2471 case BUILT_IN_STRCPY:
2472 case BUILT_IN_STRNCPY:
2476 case BUILT_IN_MEMCPY_CHK:
2477 case BUILT_IN_MEMPCPY_CHK:
2478 case BUILT_IN_MEMMOVE_CHK:
2479 case BUILT_IN_MEMSET_CHK:
2480 case BUILT_IN_STRNCPY_CHK:
2484 case BUILT_IN_STRCPY_CHK:
2485 case BUILT_IN_STPCPY_CHK:
2489 case BUILT_IN_SNPRINTF_CHK:
2490 case BUILT_IN_VSNPRINTF_CHK:
2498 /* Try to use the dataflow information gathered by the CCP process. */
2499 visited = BITMAP_ALLOC (NULL);
2501 memset (val, 0, sizeof (val));
2502 for (i = 0; i < nargs; i++)
2504 if ((arg_mask >> i) & 1)
2506 a = gimple_call_arg (stmt, i);
2507 bitmap_clear (visited);
2508 if (!get_maxval_strlen (a, &val[i], visited, type))
2513 BITMAP_FREE (visited);
2516 switch (DECL_FUNCTION_CODE (callee))
2518 case BUILT_IN_STRLEN:
2522 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2524 /* If the result is not a valid gimple value, or not a cast
2525 of a valid gimple value, then we can not use the result. */
2526 if (is_gimple_val (new_val)
2527 || (is_gimple_cast (new_val)
2528 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2533 case BUILT_IN_STRCPY:
2534 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2535 result = fold_builtin_strcpy (callee,
2536 gimple_call_arg (stmt, 0),
2537 gimple_call_arg (stmt, 1),
2541 case BUILT_IN_STRNCPY:
2542 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2543 result = fold_builtin_strncpy (callee,
2544 gimple_call_arg (stmt, 0),
2545 gimple_call_arg (stmt, 1),
2546 gimple_call_arg (stmt, 2),
2550 case BUILT_IN_FPUTS:
2551 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2552 gimple_call_arg (stmt, 1),
2553 ignore, false, val[0]);
2556 case BUILT_IN_FPUTS_UNLOCKED:
2557 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2558 gimple_call_arg (stmt, 1),
2559 ignore, true, val[0]);
2562 case BUILT_IN_MEMCPY_CHK:
2563 case BUILT_IN_MEMPCPY_CHK:
2564 case BUILT_IN_MEMMOVE_CHK:
2565 case BUILT_IN_MEMSET_CHK:
2566 if (val[2] && is_gimple_val (val[2]))
2567 result = fold_builtin_memory_chk (callee,
2568 gimple_call_arg (stmt, 0),
2569 gimple_call_arg (stmt, 1),
2570 gimple_call_arg (stmt, 2),
2571 gimple_call_arg (stmt, 3),
2573 DECL_FUNCTION_CODE (callee));
2576 case BUILT_IN_STRCPY_CHK:
2577 case BUILT_IN_STPCPY_CHK:
2578 if (val[1] && is_gimple_val (val[1]))
2579 result = fold_builtin_stxcpy_chk (callee,
2580 gimple_call_arg (stmt, 0),
2581 gimple_call_arg (stmt, 1),
2582 gimple_call_arg (stmt, 2),
2584 DECL_FUNCTION_CODE (callee));
2587 case BUILT_IN_STRNCPY_CHK:
2588 if (val[2] && is_gimple_val (val[2]))
2589 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2590 gimple_call_arg (stmt, 1),
2591 gimple_call_arg (stmt, 2),
2592 gimple_call_arg (stmt, 3),
2596 case BUILT_IN_SNPRINTF_CHK:
2597 case BUILT_IN_VSNPRINTF_CHK:
2598 if (val[1] && is_gimple_val (val[1]))
2599 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2600 DECL_FUNCTION_CODE (callee));
2607 if (result && ignore)
2608 result = fold_ignored_result (result);
2612 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2613 replacement rhs for the statement or NULL_TREE if no simplification
2614 could be made. It is assumed that the operands have been previously
2618 fold_gimple_assign (gimple_stmt_iterator *si)
2620 gimple stmt = gsi_stmt (*si);
2621 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2625 switch (get_gimple_rhs_class (subcode))
2627 case GIMPLE_SINGLE_RHS:
2629 tree rhs = gimple_assign_rhs1 (stmt);
2631 /* Try to fold a conditional expression. */
2632 if (TREE_CODE (rhs) == COND_EXPR)
2634 tree temp = fold (COND_EXPR_COND (rhs));
2635 if (temp != COND_EXPR_COND (rhs))
2636 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2637 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2640 /* If we couldn't fold the RHS, hand over to the generic
2642 if (result == NULL_TREE)
2643 result = fold (rhs);
2645 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2646 that may have been added by fold, and "useless" type
2647 conversions that might now be apparent due to propagation. */
2648 STRIP_USELESS_TYPE_CONVERSION (result);
2650 if (result != rhs && valid_gimple_rhs_p (result))
2653 /* It is possible that fold_stmt_r simplified the RHS.
2654 Make sure that the subcode of this statement still
2655 reflects the principal operator of the rhs operand. */
2660 case GIMPLE_UNARY_RHS:
2662 tree rhs = gimple_assign_rhs1 (stmt);
2664 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2667 /* If the operation was a conversion do _not_ mark a
2668 resulting constant with TREE_OVERFLOW if the original
2669 constant was not. These conversions have implementation
2670 defined behavior and retaining the TREE_OVERFLOW flag
2671 here would confuse later passes such as VRP. */
2672 if (CONVERT_EXPR_CODE_P (subcode)
2673 && TREE_CODE (result) == INTEGER_CST
2674 && TREE_CODE (rhs) == INTEGER_CST)
2675 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2677 STRIP_USELESS_TYPE_CONVERSION (result);
2678 if (valid_gimple_rhs_p (result))
2681 else if (CONVERT_EXPR_CODE_P (subcode)
2682 && POINTER_TYPE_P (gimple_expr_type (stmt))
2683 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2685 tree type = gimple_expr_type (stmt);
2686 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2687 integer_zero_node, type);
2694 case GIMPLE_BINARY_RHS:
2695 /* Try to fold pointer addition. */
2696 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2697 result = maybe_fold_stmt_addition (
2698 TREE_TYPE (gimple_assign_lhs (stmt)),
2699 gimple_assign_rhs1 (stmt),
2700 gimple_assign_rhs2 (stmt));
2703 result = fold_binary (subcode,
2704 TREE_TYPE (gimple_assign_lhs (stmt)),
2705 gimple_assign_rhs1 (stmt),
2706 gimple_assign_rhs2 (stmt));
2710 STRIP_USELESS_TYPE_CONVERSION (result);
2711 if (valid_gimple_rhs_p (result))
2716 case GIMPLE_INVALID_RHS:
2723 /* Attempt to fold a conditional statement. Return true if any changes were
2724 made. We only attempt to fold the condition expression, and do not perform
2725 any transformation that would require alteration of the cfg. It is
2726 assumed that the operands have been previously folded. */
2729 fold_gimple_cond (gimple stmt)
2731 tree result = fold_binary (gimple_cond_code (stmt),
2733 gimple_cond_lhs (stmt),
2734 gimple_cond_rhs (stmt));
2738 STRIP_USELESS_TYPE_CONVERSION (result);
2739 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2741 gimple_cond_set_condition_from_tree (stmt, result);
2750 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2751 The statement may be replaced by another statement, e.g., if the call
2752 simplifies to a constant value. Return true if any changes were made.
2753 It is assumed that the operands have been previously folded. */
2756 fold_gimple_call (gimple_stmt_iterator *gsi)
2758 gimple stmt = gsi_stmt (*gsi);
2760 tree callee = gimple_call_fndecl (stmt);
2762 /* Check for builtins that CCP can handle using information not
2763 available in the generic fold routines. */
2764 if (callee && DECL_BUILT_IN (callee))
2766 tree result = ccp_fold_builtin (stmt);
2769 return update_call_from_tree (gsi, result);
2773 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2774 here are when we've propagated the address of a decl into the
2776 /* ??? Should perhaps do this in fold proper. However, doing it
2777 there requires that we create a new CALL_EXPR, and that requires
2778 copying EH region info to the new node. Easier to just do it
2779 here where we can just smash the call operand. */
2780 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2781 callee = gimple_call_fn (stmt);
2782 if (TREE_CODE (callee) == OBJ_TYPE_REF
2783 && lang_hooks.fold_obj_type_ref
2784 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2785 && DECL_P (TREE_OPERAND
2786 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2790 /* ??? Caution: Broken ADDR_EXPR semantics means that
2791 looking at the type of the operand of the addr_expr
2792 can yield an array type. See silly exception in
2793 check_pointer_types_r. */
2794 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2795 t = lang_hooks.fold_obj_type_ref (callee, t);
2798 gimple_call_set_fn (stmt, t);
2807 /* Fold the statement pointed to by GSI. In some cases, this function may
2808 replace the whole statement with a new one. Returns true iff folding
2809 makes any changes. */
2812 fold_stmt (gimple_stmt_iterator *gsi)
2815 struct fold_stmt_r_data fold_stmt_r_data;
2816 struct walk_stmt_info wi;
2818 bool changed = false;
2819 bool inside_addr_expr = false;
2821 gimple stmt = gsi_stmt (*gsi);
2823 fold_stmt_r_data.stmt = stmt;
2824 fold_stmt_r_data.changed_p = &changed;
2825 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2827 memset (&wi, 0, sizeof (wi));
2828 wi.info = &fold_stmt_r_data;
2830 /* Fold the individual operands.
2831 For example, fold instances of *&VAR into VAR, etc. */
2832 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2835 /* Fold the main computation performed by the statement. */
2836 switch (gimple_code (stmt))
2840 tree new_rhs = fold_gimple_assign (gsi);
2841 if (new_rhs != NULL_TREE)
2843 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2846 stmt = gsi_stmt (*gsi);
2850 changed |= fold_gimple_cond (stmt);
2853 /* The entire statement may be replaced in this case. */
2854 changed |= fold_gimple_call (gsi);
2865 /* Perform the minimal folding on statement STMT. Only operations like
2866 *&x created by constant propagation are handled. The statement cannot
2867 be replaced with a new one. Return true if the statement was
2868 changed, false otherwise. */
2871 fold_stmt_inplace (gimple stmt)
2874 struct fold_stmt_r_data fold_stmt_r_data;
2875 struct walk_stmt_info wi;
2876 gimple_stmt_iterator si;
2878 bool changed = false;
2879 bool inside_addr_expr = false;
2881 fold_stmt_r_data.stmt = stmt;
2882 fold_stmt_r_data.changed_p = &changed;
2883 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2885 memset (&wi, 0, sizeof (wi));
2886 wi.info = &fold_stmt_r_data;
2888 /* Fold the individual operands.
2889 For example, fold instances of *&VAR into VAR, etc.
2891 It appears that, at one time, maybe_fold_stmt_indirect
2892 would cause the walk to return non-null in order to
2893 signal that the entire statement should be replaced with
2894 a call to _builtin_trap. This functionality is currently
2895 disabled, as noted in a FIXME, and cannot be supported here. */
2896 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2899 /* Fold the main computation performed by the statement. */
2900 switch (gimple_code (stmt))
2904 unsigned old_num_ops;
2906 old_num_ops = gimple_num_ops (stmt);
2907 si = gsi_for_stmt (stmt);
2908 new_rhs = fold_gimple_assign (&si);
2909 if (new_rhs != NULL_TREE
2910 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
2912 gimple_assign_set_rhs_from_tree (&si, new_rhs);
2915 gcc_assert (gsi_stmt (si) == stmt);
2919 changed |= fold_gimple_cond (stmt);
2929 /* Try to optimize out __builtin_stack_restore. Optimize it out
2930 if there is another __builtin_stack_restore in the same basic
2931 block and no calls or ASM_EXPRs are in between, or if this block's
2932 only outgoing edge is to EXIT_BLOCK and there are no calls or
2933 ASM_EXPRs after this __builtin_stack_restore. */
2936 optimize_stack_restore (gimple_stmt_iterator i)
2939 gimple stmt, stack_save;
2940 gimple_stmt_iterator stack_save_gsi;
2942 basic_block bb = gsi_bb (i);
2943 gimple call = gsi_stmt (i);
2945 if (gimple_code (call) != GIMPLE_CALL
2946 || gimple_call_num_args (call) != 1
2947 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2948 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2951 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2953 stmt = gsi_stmt (i);
2954 if (gimple_code (stmt) == GIMPLE_ASM)
2956 if (gimple_code (stmt) != GIMPLE_CALL)
2959 callee = gimple_call_fndecl (stmt);
2960 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2963 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2968 && (! single_succ_p (bb)
2969 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
2972 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2973 if (gimple_code (stack_save) != GIMPLE_CALL
2974 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
2975 || stmt_could_throw_p (stack_save)
2976 || !has_single_use (gimple_call_arg (call, 0)))
2979 callee = gimple_call_fndecl (stack_save);
2981 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2982 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
2983 || gimple_call_num_args (stack_save) != 0)
2986 stack_save_gsi = gsi_for_stmt (stack_save);
2987 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
2988 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2989 if (!update_call_from_tree (&stack_save_gsi, rhs))
2991 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
2994 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
2996 /* No effect, so the statement will be deleted. */
2997 return integer_zero_node;
3000 /* If va_list type is a simple pointer and nothing special is needed,
3001 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3002 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3003 pointer assignment. */
3006 optimize_stdarg_builtin (gimple call)
3008 tree callee, lhs, rhs, cfun_va_list;
3009 bool va_list_simple_ptr;
3011 if (gimple_code (call) != GIMPLE_CALL)
3014 callee = gimple_call_fndecl (call);
3016 cfun_va_list = targetm.fn_abi_va_list (callee);
3017 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3018 && (TREE_TYPE (cfun_va_list) == void_type_node
3019 || TREE_TYPE (cfun_va_list) == char_type_node);
3021 switch (DECL_FUNCTION_CODE (callee))
3023 case BUILT_IN_VA_START:
3024 if (!va_list_simple_ptr
3025 || targetm.expand_builtin_va_start != NULL
3026 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3029 if (gimple_call_num_args (call) != 2)
3032 lhs = gimple_call_arg (call, 0);
3033 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3034 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3035 != TYPE_MAIN_VARIANT (cfun_va_list))
3038 lhs = build_fold_indirect_ref (lhs);
3039 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3040 1, integer_zero_node);
3041 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3042 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3044 case BUILT_IN_VA_COPY:
3045 if (!va_list_simple_ptr)
3048 if (gimple_call_num_args (call) != 2)
3051 lhs = gimple_call_arg (call, 0);
3052 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3053 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3054 != TYPE_MAIN_VARIANT (cfun_va_list))
3057 lhs = build_fold_indirect_ref (lhs);
3058 rhs = gimple_call_arg (call, 1);
3059 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3060 != TYPE_MAIN_VARIANT (cfun_va_list))
3063 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3064 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3066 case BUILT_IN_VA_END:
3067 /* No effect, so the statement will be deleted. */
3068 return integer_zero_node;
3075 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3076 RHS of an assignment. Insert the necessary statements before
3077 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3078 is replaced. If the call is expected to produces a result, then it
3079 is replaced by an assignment of the new RHS to the result variable.
3080 If the result is to be ignored, then the call is replaced by a
3084 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3087 tree tmp = NULL_TREE; /* Silence warning. */
3088 gimple stmt, new_stmt;
3089 gimple_stmt_iterator i;
3090 gimple_seq stmts = gimple_seq_alloc();
3091 struct gimplify_ctx gctx;
3093 stmt = gsi_stmt (*si_p);
3095 gcc_assert (is_gimple_call (stmt));
3097 lhs = gimple_call_lhs (stmt);
3099 push_gimplify_context (&gctx);
3101 if (lhs == NULL_TREE)
3102 gimplify_and_add (expr, &stmts);
3104 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3106 pop_gimplify_context (NULL);
3108 if (gimple_has_location (stmt))
3109 annotate_all_with_location (stmts, gimple_location (stmt));
3111 /* The replacement can expose previously unreferenced variables. */
3112 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3114 new_stmt = gsi_stmt (i);
3115 find_new_referenced_vars (new_stmt);
3116 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3117 mark_symbols_for_renaming (new_stmt);
3121 if (lhs == NULL_TREE)
3122 new_stmt = gimple_build_nop ();
3125 new_stmt = gimple_build_assign (lhs, tmp);
3126 copy_virtual_operands (new_stmt, stmt);
3127 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3130 gimple_set_location (new_stmt, gimple_location (stmt));
3131 gsi_replace (si_p, new_stmt, false);
3134 /* A simple pass that attempts to fold all builtin functions. This pass
3135 is run after we've propagated as many constants as we can. */
3138 execute_fold_all_builtins (void)
3140 bool cfg_changed = false;
3142 unsigned int todoflags = 0;
3146 gimple_stmt_iterator i;
3147 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3149 gimple stmt, old_stmt;
3150 tree callee, result;
3151 enum built_in_function fcode;
3153 stmt = gsi_stmt (i);
3155 if (gimple_code (stmt) != GIMPLE_CALL)
3160 callee = gimple_call_fndecl (stmt);
3161 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3166 fcode = DECL_FUNCTION_CODE (callee);
3168 result = ccp_fold_builtin (stmt);
3171 gimple_remove_stmt_histograms (cfun, stmt);
3174 switch (DECL_FUNCTION_CODE (callee))
3176 case BUILT_IN_CONSTANT_P:
3177 /* Resolve __builtin_constant_p. If it hasn't been
3178 folded to integer_one_node by now, it's fairly
3179 certain that the value simply isn't constant. */
3180 result = integer_zero_node;
3183 case BUILT_IN_STACK_RESTORE:
3184 result = optimize_stack_restore (i);
3190 case BUILT_IN_VA_START:
3191 case BUILT_IN_VA_END:
3192 case BUILT_IN_VA_COPY:
3193 /* These shouldn't be folded before pass_stdarg. */
3194 result = optimize_stdarg_builtin (stmt);
3204 if (dump_file && (dump_flags & TDF_DETAILS))
3206 fprintf (dump_file, "Simplified\n ");
3207 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3211 push_stmt_changes (gsi_stmt_ptr (&i));
3213 if (!update_call_from_tree (&i, result))
3215 gimplify_and_update_call_from_tree (&i, result);
3216 todoflags |= TODO_rebuild_alias;
3219 stmt = gsi_stmt (i);
3220 pop_stmt_changes (gsi_stmt_ptr (&i));
3222 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3223 && gimple_purge_dead_eh_edges (bb))
3226 if (dump_file && (dump_flags & TDF_DETAILS))
3228 fprintf (dump_file, "to\n ");
3229 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3230 fprintf (dump_file, "\n");
3233 /* Retry the same statement if it changed into another
3234 builtin, there might be new opportunities now. */
3235 if (gimple_code (stmt) != GIMPLE_CALL)
3240 callee = gimple_call_fndecl (stmt);
3242 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3243 || DECL_FUNCTION_CODE (callee) == fcode)
3248 /* Delete unreachable blocks. */
3250 todoflags |= TODO_cleanup_cfg;
3256 struct gimple_opt_pass pass_fold_builtins =
3262 execute_fold_all_builtins, /* execute */
3265 0, /* static_pass_number */
3267 PROP_cfg | PROP_ssa, /* properties_required */
3268 0, /* properties_provided */
3269 0, /* properties_destroyed */
3270 0, /* todo_flags_start */
3273 | TODO_update_ssa /* todo_flags_finish */