1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
213 /* Possible lattice values. */
222 /* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
228 static prop_value_t *const_val;
230 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
233 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
235 switch (val.lattice_val)
238 fprintf (outf, "%sUNINITIALIZED", prefix);
241 fprintf (outf, "%sUNDEFINED", prefix);
244 fprintf (outf, "%sVARYING", prefix);
247 fprintf (outf, "%sCONSTANT ", prefix);
248 print_generic_expr (outf, val.value, dump_flags);
256 /* Print lattice value VAL to stderr. */
258 void debug_lattice_value (prop_value_t val);
261 debug_lattice_value (prop_value_t val)
263 dump_lattice_value (stderr, "", val);
264 fprintf (stderr, "\n");
269 /* If SYM is a constant variable with known value, return the value.
270 NULL_TREE is returned otherwise. */
273 get_symbol_constant_value (tree sym)
275 if (TREE_STATIC (sym)
276 && TREE_READONLY (sym)
279 tree val = DECL_INITIAL (sym);
282 STRIP_USELESS_TYPE_CONVERSION (val);
283 if (is_gimple_min_invariant (val))
286 /* Variables declared 'const' without an initializer
287 have zero as the initializer if they may not be
288 overridden at link or run time. */
290 && targetm.binds_local_p (sym)
291 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
292 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
293 return fold_convert (TREE_TYPE (sym), integer_zero_node);
299 /* Compute a default value for variable VAR and store it in the
300 CONST_VAL array. The following rules are used to get default
303 1- Global and static variables that are declared constant are
306 2- Any other value is considered UNDEFINED. This is useful when
307 considering PHI nodes. PHI arguments that are undefined do not
308 change the constant value of the PHI node, which allows for more
309 constants to be propagated.
311 3- Variables defined by statements other than assignments and PHI
312 nodes are considered VARYING.
314 4- Initial values of variables that are not GIMPLE registers are
315 considered VARYING. */
318 get_default_value (tree var)
320 tree sym = SSA_NAME_VAR (var);
321 prop_value_t val = { UNINITIALIZED, NULL_TREE };
324 if (!is_gimple_reg (var))
326 /* Short circuit for regular CCP. We are not interested in any
327 non-register when DO_STORE_CCP is false. */
328 val.lattice_val = VARYING;
330 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
332 /* Globals and static variables declared 'const' take their
334 val.lattice_val = CONSTANT;
339 gimple stmt = SSA_NAME_DEF_STMT (var);
341 if (gimple_nop_p (stmt))
343 /* Variables defined by an empty statement are those used
344 before being initialized. If VAR is a local variable, we
345 can assume initially that it is UNDEFINED, otherwise we must
346 consider it VARYING. */
347 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
348 val.lattice_val = UNDEFINED;
350 val.lattice_val = VARYING;
352 else if (is_gimple_assign (stmt)
353 /* Value-returning GIMPLE_CALL statements assign to
354 a variable, and are treated similarly to GIMPLE_ASSIGN. */
355 || (is_gimple_call (stmt)
356 && gimple_call_lhs (stmt) != NULL_TREE)
357 || gimple_code (stmt) == GIMPLE_PHI)
359 /* Any other variable defined by an assignment or a PHI node
360 is considered UNDEFINED. */
361 val.lattice_val = UNDEFINED;
365 /* Otherwise, VAR will never take on a constant value. */
366 val.lattice_val = VARYING;
374 /* Get the constant value associated with variable VAR. */
376 static inline prop_value_t *
381 if (const_val == NULL)
384 val = &const_val[SSA_NAME_VERSION (var)];
385 if (val->lattice_val == UNINITIALIZED)
386 *val = get_default_value (var);
391 /* Sets the value associated with VAR to VARYING. */
394 set_value_varying (tree var)
396 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
398 val->lattice_val = VARYING;
399 val->value = NULL_TREE;
402 /* For float types, modify the value of VAL to make ccp work correctly
403 for non-standard values (-0, NaN):
405 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
406 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
407 This is to fix the following problem (see PR 29921): Suppose we have
411 and we set value of y to NaN. This causes value of x to be set to NaN.
412 When we later determine that y is in fact VARYING, fold uses the fact
413 that HONOR_NANS is false, and we try to change the value of x to 0,
414 causing an ICE. With HONOR_NANS being false, the real appearance of
415 NaN would cause undefined behavior, though, so claiming that y (and x)
416 are UNDEFINED initially is correct. */
419 canonicalize_float_value (prop_value_t *val)
421 enum machine_mode mode;
425 if (val->lattice_val != CONSTANT
426 || TREE_CODE (val->value) != REAL_CST)
429 d = TREE_REAL_CST (val->value);
430 type = TREE_TYPE (val->value);
431 mode = TYPE_MODE (type);
433 if (!HONOR_SIGNED_ZEROS (mode)
434 && REAL_VALUE_MINUS_ZERO (d))
436 val->value = build_real (type, dconst0);
440 if (!HONOR_NANS (mode)
441 && REAL_VALUE_ISNAN (d))
443 val->lattice_val = UNDEFINED;
449 /* Set the value for variable VAR to NEW_VAL. Return true if the new
450 value is different from VAR's previous value. */
453 set_lattice_value (tree var, prop_value_t new_val)
455 prop_value_t *old_val = get_value (var);
457 canonicalize_float_value (&new_val);
459 /* Lattice transitions must always be monotonically increasing in
460 value. If *OLD_VAL and NEW_VAL are the same, return false to
461 inform the caller that this was a non-transition. */
463 gcc_assert (old_val->lattice_val < new_val.lattice_val
464 || (old_val->lattice_val == new_val.lattice_val
465 && ((!old_val->value && !new_val.value)
466 || operand_equal_p (old_val->value, new_val.value, 0))));
468 if (old_val->lattice_val != new_val.lattice_val)
470 if (dump_file && (dump_flags & TDF_DETAILS))
472 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
473 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
478 gcc_assert (new_val.lattice_val != UNDEFINED);
486 /* Return the likely CCP lattice value for STMT.
488 If STMT has no operands, then return CONSTANT.
490 Else if undefinedness of operands of STMT cause its value to be
491 undefined, then return UNDEFINED.
493 Else if any operands of STMT are constants, then return CONSTANT.
495 Else return VARYING. */
498 likely_value (gimple stmt)
500 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
504 enum gimple_code code = gimple_code (stmt);
506 /* This function appears to be called only for assignments, calls,
507 conditionals, and switches, due to the logic in visit_stmt. */
508 gcc_assert (code == GIMPLE_ASSIGN
509 || code == GIMPLE_CALL
510 || code == GIMPLE_COND
511 || code == GIMPLE_SWITCH);
513 /* If the statement has volatile operands, it won't fold to a
515 if (gimple_has_volatile_ops (stmt))
518 /* If we are not doing store-ccp, statements with loads
519 and/or stores will never fold into a constant. */
520 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
523 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
524 is_gimple_min_invariant, so we do not consider calls or
525 other forms of assignment. */
526 if (gimple_assign_single_p (stmt)
527 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
530 if (code == GIMPLE_COND
531 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
532 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
535 if (code == GIMPLE_SWITCH
536 && is_gimple_min_invariant (gimple_switch_index (stmt)))
539 /* Arrive here for more complex cases. */
541 has_constant_operand = false;
542 has_undefined_operand = false;
543 all_undefined_operands = true;
544 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
546 prop_value_t *val = get_value (use);
548 if (val->lattice_val == UNDEFINED)
549 has_undefined_operand = true;
551 all_undefined_operands = false;
553 if (val->lattice_val == CONSTANT)
554 has_constant_operand = true;
557 /* If the operation combines operands like COMPLEX_EXPR make sure to
558 not mark the result UNDEFINED if only one part of the result is
560 if (has_undefined_operand && all_undefined_operands)
562 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
564 switch (gimple_assign_rhs_code (stmt))
566 /* Unary operators are handled with all_undefined_operands. */
569 case POINTER_PLUS_EXPR:
570 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
571 Not bitwise operators, one VARYING operand may specify the
572 result completely. Not logical operators for the same reason.
573 Not COMPLEX_EXPR as one VARYING operand makes the result partly
574 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
575 the undefined operand may be promoted. */
582 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
583 fall back to VARYING even if there were CONSTANT operands. */
584 if (has_undefined_operand)
587 if (has_constant_operand
588 /* We do not consider virtual operands here -- load from read-only
589 memory may have only VARYING virtual operands, but still be
591 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
597 /* Returns true if STMT cannot be constant. */
600 surely_varying_stmt_p (gimple stmt)
602 /* If the statement has operands that we cannot handle, it cannot be
604 if (gimple_has_volatile_ops (stmt))
607 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
610 /* If it is a call and does not return a value or is not a
611 builtin and not an indirect call, it is varying. */
612 if (is_gimple_call (stmt))
615 if (!gimple_call_lhs (stmt)
616 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
617 && !DECL_BUILT_IN (fndecl)))
621 /* Anything other than assignments and conditional jumps are not
622 interesting for CCP. */
623 if (gimple_code (stmt) != GIMPLE_ASSIGN
624 && gimple_code (stmt) != GIMPLE_COND
625 && gimple_code (stmt) != GIMPLE_SWITCH
626 && gimple_code (stmt) != GIMPLE_CALL)
632 /* Initialize local data structures for CCP. */
635 ccp_initialize (void)
639 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
641 /* Initialize simulation flags for PHI nodes and statements. */
644 gimple_stmt_iterator i;
646 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
648 gimple stmt = gsi_stmt (i);
649 bool is_varying = surely_varying_stmt_p (stmt);
656 /* If the statement will not produce a constant, mark
657 all its outputs VARYING. */
658 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
661 set_value_varying (def);
664 prop_set_simulate_again (stmt, !is_varying);
668 /* Now process PHI nodes. We never clear the simulate_again flag on
669 phi nodes, since we do not know which edges are executable yet,
670 except for phi nodes for virtual operands when we do not do store ccp. */
673 gimple_stmt_iterator i;
675 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
677 gimple phi = gsi_stmt (i);
679 if (!is_gimple_reg (gimple_phi_result (phi)))
680 prop_set_simulate_again (phi, false);
682 prop_set_simulate_again (phi, true);
688 /* Do final substitution of propagated values, cleanup the flowgraph and
689 free allocated storage.
691 Return TRUE when something was optimized. */
696 /* Perform substitutions based on the known constant values. */
697 bool something_changed = substitute_and_fold (const_val, false);
701 return something_changed;;
705 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
708 any M UNDEFINED = any
709 any M VARYING = VARYING
710 Ci M Cj = Ci if (i == j)
711 Ci M Cj = VARYING if (i != j)
715 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
717 if (val1->lattice_val == UNDEFINED)
719 /* UNDEFINED M any = any */
722 else if (val2->lattice_val == UNDEFINED)
724 /* any M UNDEFINED = any
725 Nothing to do. VAL1 already contains the value we want. */
728 else if (val1->lattice_val == VARYING
729 || val2->lattice_val == VARYING)
731 /* any M VARYING = VARYING. */
732 val1->lattice_val = VARYING;
733 val1->value = NULL_TREE;
735 else if (val1->lattice_val == CONSTANT
736 && val2->lattice_val == CONSTANT
737 && simple_cst_equal (val1->value, val2->value) == 1)
739 /* Ci M Cj = Ci if (i == j)
740 Ci M Cj = VARYING if (i != j)
742 If these two values come from memory stores, make sure that
743 they come from the same memory reference. */
744 val1->lattice_val = CONSTANT;
745 val1->value = val1->value;
749 /* Any other combination is VARYING. */
750 val1->lattice_val = VARYING;
751 val1->value = NULL_TREE;
756 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
757 lattice values to determine PHI_NODE's lattice value. The value of a
758 PHI node is determined calling ccp_lattice_meet with all the arguments
759 of the PHI node that are incoming via executable edges. */
761 static enum ssa_prop_result
762 ccp_visit_phi_node (gimple phi)
765 prop_value_t *old_val, new_val;
767 if (dump_file && (dump_flags & TDF_DETAILS))
769 fprintf (dump_file, "\nVisiting PHI node: ");
770 print_gimple_stmt (dump_file, phi, 0, dump_flags);
773 old_val = get_value (gimple_phi_result (phi));
774 switch (old_val->lattice_val)
777 return SSA_PROP_VARYING;
784 new_val.lattice_val = UNDEFINED;
785 new_val.value = NULL_TREE;
792 for (i = 0; i < gimple_phi_num_args (phi); i++)
794 /* Compute the meet operator over all the PHI arguments flowing
795 through executable edges. */
796 edge e = gimple_phi_arg_edge (phi, i);
798 if (dump_file && (dump_flags & TDF_DETAILS))
801 "\n Argument #%d (%d -> %d %sexecutable)\n",
802 i, e->src->index, e->dest->index,
803 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
806 /* If the incoming edge is executable, Compute the meet operator for
807 the existing value of the PHI node and the current PHI argument. */
808 if (e->flags & EDGE_EXECUTABLE)
810 tree arg = gimple_phi_arg (phi, i)->def;
811 prop_value_t arg_val;
813 if (is_gimple_min_invariant (arg))
815 arg_val.lattice_val = CONSTANT;
819 arg_val = *(get_value (arg));
821 ccp_lattice_meet (&new_val, &arg_val);
823 if (dump_file && (dump_flags & TDF_DETAILS))
825 fprintf (dump_file, "\t");
826 print_generic_expr (dump_file, arg, dump_flags);
827 dump_lattice_value (dump_file, "\tValue: ", arg_val);
828 fprintf (dump_file, "\n");
831 if (new_val.lattice_val == VARYING)
836 if (dump_file && (dump_flags & TDF_DETAILS))
838 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
839 fprintf (dump_file, "\n\n");
842 /* Make the transition to the new value. */
843 if (set_lattice_value (gimple_phi_result (phi), new_val))
845 if (new_val.lattice_val == VARYING)
846 return SSA_PROP_VARYING;
848 return SSA_PROP_INTERESTING;
851 return SSA_PROP_NOT_INTERESTING;
854 /* Return true if we may propagate the address expression ADDR into the
855 dereference DEREF and cancel them. */
858 may_propagate_address_into_dereference (tree addr, tree deref)
860 gcc_assert (INDIRECT_REF_P (deref)
861 && TREE_CODE (addr) == ADDR_EXPR);
863 /* If the address is invariant then we do not need to preserve restrict
864 qualifications. But we do need to preserve volatile qualifiers until
865 we can annotate the folded dereference itself properly. */
866 if (is_gimple_min_invariant (addr)
867 && (!TREE_THIS_VOLATILE (deref)
868 || TYPE_VOLATILE (TREE_TYPE (addr))))
869 return useless_type_conversion_p (TREE_TYPE (deref),
870 TREE_TYPE (TREE_OPERAND (addr, 0)));
872 /* Else both the address substitution and the folding must result in
873 a valid useless type conversion sequence. */
874 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
876 && useless_type_conversion_p (TREE_TYPE (deref),
877 TREE_TYPE (TREE_OPERAND (addr, 0))));
880 /* CCP specific front-end to the non-destructive constant folding
883 Attempt to simplify the RHS of STMT knowing that one or more
884 operands are constants.
886 If simplification is possible, return the simplified RHS,
887 otherwise return the original RHS or NULL_TREE. */
890 ccp_fold (gimple stmt)
892 switch (gimple_code (stmt))
896 enum tree_code subcode = gimple_assign_rhs_code (stmt);
898 switch (get_gimple_rhs_class (subcode))
900 case GIMPLE_SINGLE_RHS:
902 tree rhs = gimple_assign_rhs1 (stmt);
903 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
905 if (TREE_CODE (rhs) == SSA_NAME)
907 /* If the RHS is an SSA_NAME, return its known constant value,
909 return get_value (rhs)->value;
911 /* Handle propagating invariant addresses into address operations.
912 The folding we do here matches that in tree-ssa-forwprop.c. */
913 else if (TREE_CODE (rhs) == ADDR_EXPR)
916 base = &TREE_OPERAND (rhs, 0);
917 while (handled_component_p (*base))
918 base = &TREE_OPERAND (*base, 0);
919 if (TREE_CODE (*base) == INDIRECT_REF
920 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
922 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
923 if (val->lattice_val == CONSTANT
924 && TREE_CODE (val->value) == ADDR_EXPR
925 && may_propagate_address_into_dereference
928 /* We need to return a new tree, not modify the IL
929 or share parts of it. So play some tricks to
930 avoid manually building it. */
931 tree ret, save = *base;
932 *base = TREE_OPERAND (val->value, 0);
933 ret = unshare_expr (rhs);
934 recompute_tree_invariant_for_addr_expr (ret);
941 if (kind == tcc_reference)
943 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR
944 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
946 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
947 if (val->lattice_val == CONSTANT)
948 return fold_unary (VIEW_CONVERT_EXPR,
949 TREE_TYPE (rhs), val->value);
951 return fold_const_aggregate_ref (rhs);
953 else if (kind == tcc_declaration)
954 return get_symbol_constant_value (rhs);
958 case GIMPLE_UNARY_RHS:
960 /* Handle unary operators that can appear in GIMPLE form.
961 Note that we know the single operand must be a constant,
962 so this should almost always return a simplified RHS. */
963 tree lhs = gimple_assign_lhs (stmt);
964 tree op0 = gimple_assign_rhs1 (stmt);
967 /* Simplify the operand down to a constant. */
968 if (TREE_CODE (op0) == SSA_NAME)
970 prop_value_t *val = get_value (op0);
971 if (val->lattice_val == CONSTANT)
972 op0 = get_value (op0)->value;
975 /* Conversions are useless for CCP purposes if they are
976 value-preserving. Thus the restrictions that
977 useless_type_conversion_p places for pointer type conversions
978 do not apply here. Substitution later will only substitute to
980 if (CONVERT_EXPR_CODE_P (subcode)
981 && POINTER_TYPE_P (TREE_TYPE (lhs))
982 && POINTER_TYPE_P (TREE_TYPE (op0))
983 /* Do not allow differences in volatile qualification
984 as this might get us confused as to whether a
985 propagation destination statement is volatile
986 or not. See PR36988. */
987 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
988 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
991 /* Still try to generate a constant of correct type. */
992 if (!useless_type_conversion_p (TREE_TYPE (lhs),
994 && ((tem = maybe_fold_offset_to_address
995 (op0, integer_zero_node, TREE_TYPE (lhs)))
1001 res = fold_unary (subcode, gimple_expr_type (stmt), op0);
1003 /* If the operation was a conversion do _not_ mark a
1004 resulting constant with TREE_OVERFLOW if the original
1005 constant was not. These conversions have implementation
1006 defined behavior and retaining the TREE_OVERFLOW flag
1007 here would confuse later passes such as VRP. */
1009 && TREE_CODE (res) == INTEGER_CST
1010 && TREE_CODE (op0) == INTEGER_CST
1011 && CONVERT_EXPR_CODE_P (subcode))
1012 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
1017 case GIMPLE_BINARY_RHS:
1019 /* Handle binary operators that can appear in GIMPLE form. */
1020 tree op0 = gimple_assign_rhs1 (stmt);
1021 tree op1 = gimple_assign_rhs2 (stmt);
1023 /* Simplify the operands down to constants when appropriate. */
1024 if (TREE_CODE (op0) == SSA_NAME)
1026 prop_value_t *val = get_value (op0);
1027 if (val->lattice_val == CONSTANT)
1031 if (TREE_CODE (op1) == SSA_NAME)
1033 prop_value_t *val = get_value (op1);
1034 if (val->lattice_val == CONSTANT)
1038 /* Fold &foo + CST into an invariant reference if possible. */
1039 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1040 && TREE_CODE (op0) == ADDR_EXPR
1041 && TREE_CODE (op1) == INTEGER_CST)
1043 tree lhs = gimple_assign_lhs (stmt);
1044 tree tem = maybe_fold_offset_to_address (op0, op1,
1046 if (tem != NULL_TREE)
1050 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1061 tree fn = gimple_call_fn (stmt);
1064 if (TREE_CODE (fn) == SSA_NAME)
1066 val = get_value (fn);
1067 if (val->lattice_val == CONSTANT)
1070 if (TREE_CODE (fn) == ADDR_EXPR
1071 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1072 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1074 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1077 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1079 args[i] = gimple_call_arg (stmt, i);
1080 if (TREE_CODE (args[i]) == SSA_NAME)
1082 val = get_value (args[i]);
1083 if (val->lattice_val == CONSTANT)
1084 args[i] = val->value;
1087 call = build_call_array (gimple_call_return_type (stmt),
1088 fn, gimple_call_num_args (stmt), args);
1089 retval = fold_call_expr (call, false);
1091 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1092 STRIP_NOPS (retval);
1100 /* Handle comparison operators that can appear in GIMPLE form. */
1101 tree op0 = gimple_cond_lhs (stmt);
1102 tree op1 = gimple_cond_rhs (stmt);
1103 enum tree_code code = gimple_cond_code (stmt);
1105 /* Simplify the operands down to constants when appropriate. */
1106 if (TREE_CODE (op0) == SSA_NAME)
1108 prop_value_t *val = get_value (op0);
1109 if (val->lattice_val == CONSTANT)
1113 if (TREE_CODE (op1) == SSA_NAME)
1115 prop_value_t *val = get_value (op1);
1116 if (val->lattice_val == CONSTANT)
1120 return fold_binary (code, boolean_type_node, op0, op1);
1125 tree rhs = gimple_switch_index (stmt);
1127 if (TREE_CODE (rhs) == SSA_NAME)
1129 /* If the RHS is an SSA_NAME, return its known constant value,
1131 return get_value (rhs)->value;
1143 /* Return the tree representing the element referenced by T if T is an
1144 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1145 NULL_TREE otherwise. */
1148 fold_const_aggregate_ref (tree t)
1150 prop_value_t *value;
1151 tree base, ctor, idx, field;
1152 unsigned HOST_WIDE_INT cnt;
1155 switch (TREE_CODE (t))
1158 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1159 DECL_INITIAL. If BASE is a nested reference into another
1160 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1161 the inner reference. */
1162 base = TREE_OPERAND (t, 0);
1163 switch (TREE_CODE (base))
1166 if (!TREE_READONLY (base)
1167 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1168 || !targetm.binds_local_p (base))
1171 ctor = DECL_INITIAL (base);
1176 ctor = fold_const_aggregate_ref (base);
1188 if (ctor == NULL_TREE
1189 || (TREE_CODE (ctor) != CONSTRUCTOR
1190 && TREE_CODE (ctor) != STRING_CST)
1191 || !TREE_STATIC (ctor))
1194 /* Get the index. If we have an SSA_NAME, try to resolve it
1195 with the current lattice value for the SSA_NAME. */
1196 idx = TREE_OPERAND (t, 1);
1197 switch (TREE_CODE (idx))
1200 if ((value = get_value (idx))
1201 && value->lattice_val == CONSTANT
1202 && TREE_CODE (value->value) == INTEGER_CST)
1215 /* Fold read from constant string. */
1216 if (TREE_CODE (ctor) == STRING_CST)
1218 if ((TYPE_MODE (TREE_TYPE (t))
1219 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1220 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1222 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1223 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1224 return build_int_cst_type (TREE_TYPE (t),
1225 (TREE_STRING_POINTER (ctor)
1226 [TREE_INT_CST_LOW (idx)]));
1230 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1231 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1232 if (tree_int_cst_equal (cfield, idx))
1234 STRIP_USELESS_TYPE_CONVERSION (cval);
1240 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1241 DECL_INITIAL. If BASE is a nested reference into another
1242 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1243 the inner reference. */
1244 base = TREE_OPERAND (t, 0);
1245 switch (TREE_CODE (base))
1248 if (!TREE_READONLY (base)
1249 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1250 || !targetm.binds_local_p (base))
1253 ctor = DECL_INITIAL (base);
1258 ctor = fold_const_aggregate_ref (base);
1265 if (ctor == NULL_TREE
1266 || TREE_CODE (ctor) != CONSTRUCTOR
1267 || !TREE_STATIC (ctor))
1270 field = TREE_OPERAND (t, 1);
1272 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1274 /* FIXME: Handle bit-fields. */
1275 && ! DECL_BIT_FIELD (cfield))
1277 STRIP_USELESS_TYPE_CONVERSION (cval);
1285 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1286 if (c && TREE_CODE (c) == COMPLEX_CST)
1287 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1293 tree base = TREE_OPERAND (t, 0);
1294 if (TREE_CODE (base) == SSA_NAME
1295 && (value = get_value (base))
1296 && value->lattice_val == CONSTANT
1297 && TREE_CODE (value->value) == ADDR_EXPR)
1298 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1309 /* Evaluate statement STMT.
1310 Valid only for assignments, calls, conditionals, and switches. */
1313 evaluate_stmt (gimple stmt)
1316 tree simplified = NULL_TREE;
1317 ccp_lattice_t likelyvalue = likely_value (stmt);
1320 fold_defer_overflow_warnings ();
1322 /* If the statement is likely to have a CONSTANT result, then try
1323 to fold the statement to determine the constant value. */
1324 /* FIXME. This is the only place that we call ccp_fold.
1325 Since likely_value never returns CONSTANT for calls, we will
1326 not attempt to fold them, including builtins that may profit. */
1327 if (likelyvalue == CONSTANT)
1328 simplified = ccp_fold (stmt);
1329 /* If the statement is likely to have a VARYING result, then do not
1330 bother folding the statement. */
1331 else if (likelyvalue == VARYING)
1333 enum gimple_code code = gimple_code (stmt);
1334 if (code == GIMPLE_ASSIGN)
1336 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1338 /* Other cases cannot satisfy is_gimple_min_invariant
1340 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1341 simplified = gimple_assign_rhs1 (stmt);
1343 else if (code == GIMPLE_SWITCH)
1344 simplified = gimple_switch_index (stmt);
1346 /* These cannot satisfy is_gimple_min_invariant without folding. */
1347 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1350 is_constant = simplified && is_gimple_min_invariant (simplified);
1352 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1354 if (dump_file && (dump_flags & TDF_DETAILS))
1356 fprintf (dump_file, "which is likely ");
1357 switch (likelyvalue)
1360 fprintf (dump_file, "CONSTANT");
1363 fprintf (dump_file, "UNDEFINED");
1366 fprintf (dump_file, "VARYING");
1370 fprintf (dump_file, "\n");
1375 /* The statement produced a constant value. */
1376 val.lattice_val = CONSTANT;
1377 val.value = simplified;
1381 /* The statement produced a nonconstant value. If the statement
1382 had UNDEFINED operands, then the result of the statement
1383 should be UNDEFINED. Otherwise, the statement is VARYING. */
1384 if (likelyvalue == UNDEFINED)
1385 val.lattice_val = likelyvalue;
1387 val.lattice_val = VARYING;
1389 val.value = NULL_TREE;
1395 /* Visit the assignment statement STMT. Set the value of its LHS to the
1396 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1397 creates virtual definitions, set the value of each new name to that
1398 of the RHS (if we can derive a constant out of the RHS).
1399 Value-returning call statements also perform an assignment, and
1400 are handled here. */
1402 static enum ssa_prop_result
1403 visit_assignment (gimple stmt, tree *output_p)
1406 enum ssa_prop_result retval;
1408 tree lhs = gimple_get_lhs (stmt);
1410 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1411 || gimple_call_lhs (stmt) != NULL_TREE);
1413 if (gimple_assign_copy_p (stmt))
1415 tree rhs = gimple_assign_rhs1 (stmt);
1417 if (TREE_CODE (rhs) == SSA_NAME)
1419 /* For a simple copy operation, we copy the lattice values. */
1420 prop_value_t *nval = get_value (rhs);
1424 val = evaluate_stmt (stmt);
1427 /* Evaluate the statement, which could be
1428 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1429 val = evaluate_stmt (stmt);
1431 retval = SSA_PROP_NOT_INTERESTING;
1433 /* Set the lattice value of the statement's output. */
1434 if (TREE_CODE (lhs) == SSA_NAME)
1436 /* If STMT is an assignment to an SSA_NAME, we only have one
1438 if (set_lattice_value (lhs, val))
1441 if (val.lattice_val == VARYING)
1442 retval = SSA_PROP_VARYING;
1444 retval = SSA_PROP_INTERESTING;
1452 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1453 if it can determine which edge will be taken. Otherwise, return
1454 SSA_PROP_VARYING. */
1456 static enum ssa_prop_result
1457 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1462 block = gimple_bb (stmt);
1463 val = evaluate_stmt (stmt);
1465 /* Find which edge out of the conditional block will be taken and add it
1466 to the worklist. If no single edge can be determined statically,
1467 return SSA_PROP_VARYING to feed all the outgoing edges to the
1468 propagation engine. */
1469 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1471 return SSA_PROP_INTERESTING;
1473 return SSA_PROP_VARYING;
1477 /* Evaluate statement STMT. If the statement produces an output value and
1478 its evaluation changes the lattice value of its output, return
1479 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1482 If STMT is a conditional branch and we can determine its truth
1483 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1484 value, return SSA_PROP_VARYING. */
1486 static enum ssa_prop_result
1487 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1492 if (dump_file && (dump_flags & TDF_DETAILS))
1494 fprintf (dump_file, "\nVisiting statement:\n");
1495 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1498 switch (gimple_code (stmt))
1501 /* If the statement is an assignment that produces a single
1502 output value, evaluate its RHS to see if the lattice value of
1503 its output has changed. */
1504 return visit_assignment (stmt, output_p);
1507 /* A value-returning call also performs an assignment. */
1508 if (gimple_call_lhs (stmt) != NULL_TREE)
1509 return visit_assignment (stmt, output_p);
1514 /* If STMT is a conditional branch, see if we can determine
1515 which branch will be taken. */
1516 /* FIXME. It appears that we should be able to optimize
1517 computed GOTOs here as well. */
1518 return visit_cond_stmt (stmt, taken_edge_p);
1524 /* Any other kind of statement is not interesting for constant
1525 propagation and, therefore, not worth simulating. */
1526 if (dump_file && (dump_flags & TDF_DETAILS))
1527 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1529 /* Definitions made by statements other than assignments to
1530 SSA_NAMEs represent unknown modifications to their outputs.
1531 Mark them VARYING. */
1532 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1534 prop_value_t v = { VARYING, NULL_TREE };
1535 set_lattice_value (def, v);
1538 return SSA_PROP_VARYING;
1542 /* Main entry point for SSA Conditional Constant Propagation. */
1548 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1549 if (ccp_finalize ())
1550 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1559 return flag_tree_ccp != 0;
1563 struct gimple_opt_pass pass_ccp =
1568 gate_ccp, /* gate */
1569 do_ssa_ccp, /* execute */
1572 0, /* static_pass_number */
1573 TV_TREE_CCP, /* tv_id */
1574 PROP_cfg | PROP_ssa, /* properties_required */
1575 0, /* properties_provided */
1576 0, /* properties_destroyed */
1577 0, /* todo_flags_start */
1578 TODO_dump_func | TODO_verify_ssa
1579 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1584 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1585 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1586 is the desired result type. */
1589 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1590 bool allow_negative_idx)
1592 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1593 tree array_type, elt_type, elt_size;
1596 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1597 measured in units of the size of elements type) from that ARRAY_REF).
1598 We can't do anything if either is variable.
1600 The case we handle here is *(&A[N]+O). */
1601 if (TREE_CODE (base) == ARRAY_REF)
1603 tree low_bound = array_ref_low_bound (base);
1605 elt_offset = TREE_OPERAND (base, 1);
1606 if (TREE_CODE (low_bound) != INTEGER_CST
1607 || TREE_CODE (elt_offset) != INTEGER_CST)
1610 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1611 base = TREE_OPERAND (base, 0);
1614 /* Ignore stupid user tricks of indexing non-array variables. */
1615 array_type = TREE_TYPE (base);
1616 if (TREE_CODE (array_type) != ARRAY_TYPE)
1618 elt_type = TREE_TYPE (array_type);
1619 if (!useless_type_conversion_p (orig_type, elt_type))
1622 /* Use signed size type for intermediate computation on the index. */
1623 idx_type = signed_type_for (size_type_node);
1625 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1626 element type (so we can use the alignment if it's not constant).
1627 Otherwise, compute the offset as an index by using a division. If the
1628 division isn't exact, then don't do anything. */
1629 elt_size = TYPE_SIZE_UNIT (elt_type);
1632 if (integer_zerop (offset))
1634 if (TREE_CODE (elt_size) != INTEGER_CST)
1635 elt_size = size_int (TYPE_ALIGN (elt_type));
1637 idx = build_int_cst (idx_type, 0);
1641 unsigned HOST_WIDE_INT lquo, lrem;
1642 HOST_WIDE_INT hquo, hrem;
1645 /* The final array offset should be signed, so we need
1646 to sign-extend the (possibly pointer) offset here
1647 and use signed division. */
1648 soffset = double_int_sext (tree_to_double_int (offset),
1649 TYPE_PRECISION (TREE_TYPE (offset)));
1650 if (TREE_CODE (elt_size) != INTEGER_CST
1651 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1652 soffset.low, soffset.high,
1653 TREE_INT_CST_LOW (elt_size),
1654 TREE_INT_CST_HIGH (elt_size),
1655 &lquo, &hquo, &lrem, &hrem)
1659 idx = build_int_cst_wide (idx_type, lquo, hquo);
1662 /* Assume the low bound is zero. If there is a domain type, get the
1663 low bound, if any, convert the index into that type, and add the
1665 min_idx = build_int_cst (idx_type, 0);
1666 domain_type = TYPE_DOMAIN (array_type);
1669 idx_type = domain_type;
1670 if (TYPE_MIN_VALUE (idx_type))
1671 min_idx = TYPE_MIN_VALUE (idx_type);
1673 min_idx = fold_convert (idx_type, min_idx);
1675 if (TREE_CODE (min_idx) != INTEGER_CST)
1678 elt_offset = fold_convert (idx_type, elt_offset);
1681 if (!integer_zerop (min_idx))
1682 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1683 if (!integer_zerop (elt_offset))
1684 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1686 /* Make sure to possibly truncate late after offsetting. */
1687 idx = fold_convert (idx_type, idx);
1689 /* We don't want to construct access past array bounds. For example
1692 should not be simplified into (*c)[14] or tree-vrp will
1693 give false warnings. The same is true for
1694 struct A { long x; char d[0]; } *a;
1696 which should be not folded to &a->d[-8]. */
1698 && TYPE_MAX_VALUE (domain_type)
1699 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1701 tree up_bound = TYPE_MAX_VALUE (domain_type);
1703 if (tree_int_cst_lt (up_bound, idx)
1704 /* Accesses after the end of arrays of size 0 (gcc
1705 extension) and 1 are likely intentional ("struct
1707 && compare_tree_int (up_bound, 1) > 0)
1711 && TYPE_MIN_VALUE (domain_type))
1713 if (!allow_negative_idx
1714 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1715 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1718 else if (!allow_negative_idx
1719 && compare_tree_int (idx, 0) < 0)
1722 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1726 /* Attempt to fold *(S+O) to S.X.
1727 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1728 is the desired result type. */
1731 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1732 tree orig_type, bool base_is_ptr)
1734 tree f, t, field_type, tail_array_field, field_offset;
1738 if (TREE_CODE (record_type) != RECORD_TYPE
1739 && TREE_CODE (record_type) != UNION_TYPE
1740 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1743 /* Short-circuit silly cases. */
1744 if (useless_type_conversion_p (record_type, orig_type))
1747 tail_array_field = NULL_TREE;
1748 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1752 if (TREE_CODE (f) != FIELD_DECL)
1754 if (DECL_BIT_FIELD (f))
1757 if (!DECL_FIELD_OFFSET (f))
1759 field_offset = byte_position (f);
1760 if (TREE_CODE (field_offset) != INTEGER_CST)
1763 /* ??? Java creates "interesting" fields for representing base classes.
1764 They have no name, and have no context. With no context, we get into
1765 trouble with nonoverlapping_component_refs_p. Skip them. */
1766 if (!DECL_FIELD_CONTEXT (f))
1769 /* The previous array field isn't at the end. */
1770 tail_array_field = NULL_TREE;
1772 /* Check to see if this offset overlaps with the field. */
1773 cmp = tree_int_cst_compare (field_offset, offset);
1777 field_type = TREE_TYPE (f);
1779 /* Here we exactly match the offset being checked. If the types match,
1780 then we can return that field. */
1782 && useless_type_conversion_p (orig_type, field_type))
1785 base = build1 (INDIRECT_REF, record_type, base);
1786 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1790 /* Don't care about offsets into the middle of scalars. */
1791 if (!AGGREGATE_TYPE_P (field_type))
1794 /* Check for array at the end of the struct. This is often
1795 used as for flexible array members. We should be able to
1796 turn this into an array access anyway. */
1797 if (TREE_CODE (field_type) == ARRAY_TYPE)
1798 tail_array_field = f;
1800 /* Check the end of the field against the offset. */
1801 if (!DECL_SIZE_UNIT (f)
1802 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1804 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1805 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1808 /* If we matched, then set offset to the displacement into
1811 new_base = build1 (INDIRECT_REF, record_type, base);
1814 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1816 /* Recurse to possibly find the match. */
1817 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1818 f == TYPE_FIELDS (record_type));
1821 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1827 if (!tail_array_field)
1830 f = tail_array_field;
1831 field_type = TREE_TYPE (f);
1832 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1834 /* If we get here, we've got an aggregate field, and a possibly
1835 nonzero offset into them. Recurse and hope for a valid match. */
1837 base = build1 (INDIRECT_REF, record_type, base);
1838 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1840 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1841 f == TYPE_FIELDS (record_type));
1844 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1848 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1849 or BASE[index] or by combination of those.
1851 Before attempting the conversion strip off existing ADDR_EXPRs and
1852 handled component refs. */
1855 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1859 bool base_is_ptr = true;
1862 if (TREE_CODE (base) == ADDR_EXPR)
1864 base_is_ptr = false;
1866 base = TREE_OPERAND (base, 0);
1868 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1869 so it needs to be removed and new COMPONENT_REF constructed.
1870 The wrong COMPONENT_REF are often constructed by folding the
1871 (type *)&object within the expression (type *)&object+offset */
1872 if (handled_component_p (base))
1874 HOST_WIDE_INT sub_offset, size, maxsize;
1876 newbase = get_ref_base_and_extent (base, &sub_offset,
1878 gcc_assert (newbase);
1881 && !(sub_offset & (BITS_PER_UNIT - 1)))
1885 offset = int_const_binop (PLUS_EXPR, offset,
1886 build_int_cst (TREE_TYPE (offset),
1887 sub_offset / BITS_PER_UNIT), 1);
1890 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1891 && integer_zerop (offset))
1893 type = TREE_TYPE (base);
1898 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1900 type = TREE_TYPE (TREE_TYPE (base));
1902 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1903 orig_type, base_is_ptr);
1907 base = build1 (INDIRECT_REF, type, base);
1908 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
1913 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1914 or &BASE[index] or by combination of those.
1916 Before attempting the conversion strip off existing component refs. */
1919 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1923 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1924 && POINTER_TYPE_P (orig_type));
1926 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1932 /* For __builtin_object_size to function correctly we need to
1933 make sure not to fold address arithmetic so that we change
1934 reference from one array to another. This would happen for
1937 struct X { char s1[10]; char s2[10] } s;
1938 char *foo (void) { return &s.s2[-4]; }
1940 where we need to avoid generating &s.s1[6]. As the C and
1941 C++ frontends create different initial trees
1942 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1943 sophisticated comparisons here. Note that checking for the
1944 condition after the fact is easier than trying to avoid doing
1947 if (TREE_CODE (orig) == ADDR_EXPR)
1948 orig = TREE_OPERAND (orig, 0);
1949 if ((TREE_CODE (orig) == ARRAY_REF
1950 || (TREE_CODE (orig) == COMPONENT_REF
1951 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
1952 && (TREE_CODE (t) == ARRAY_REF
1953 || (TREE_CODE (t) == COMPONENT_REF
1954 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1))) == ARRAY_TYPE))
1955 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
1956 ? TREE_OPERAND (orig, 0) : orig,
1957 TREE_CODE (t) == ARRAY_REF
1958 ? TREE_OPERAND (t, 0) : t, 0))
1961 ptr_type = build_pointer_type (TREE_TYPE (t));
1962 if (!useless_type_conversion_p (orig_type, ptr_type))
1964 return build_fold_addr_expr_with_type (t, ptr_type);
1970 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1971 Return the simplified expression, or NULL if nothing could be done. */
1974 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1977 bool volatile_p = TREE_THIS_VOLATILE (expr);
1979 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1980 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1981 are sometimes added. */
1983 STRIP_TYPE_NOPS (base);
1984 TREE_OPERAND (expr, 0) = base;
1986 /* One possibility is that the address reduces to a string constant. */
1987 t = fold_read_from_constant_string (expr);
1991 /* Add in any offset from a POINTER_PLUS_EXPR. */
1992 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
1996 offset2 = TREE_OPERAND (base, 1);
1997 if (TREE_CODE (offset2) != INTEGER_CST)
1999 base = TREE_OPERAND (base, 0);
2001 offset = fold_convert (sizetype,
2002 int_const_binop (PLUS_EXPR, offset, offset2, 1));
2005 if (TREE_CODE (base) == ADDR_EXPR)
2007 tree base_addr = base;
2009 /* Strip the ADDR_EXPR. */
2010 base = TREE_OPERAND (base, 0);
2012 /* Fold away CONST_DECL to its value, if the type is scalar. */
2013 if (TREE_CODE (base) == CONST_DECL
2014 && is_gimple_min_invariant (DECL_INITIAL (base)))
2015 return DECL_INITIAL (base);
2017 /* Try folding *(&B+O) to B.X. */
2018 t = maybe_fold_offset_to_reference (base_addr, offset,
2022 /* Preserve volatileness of the original expression.
2023 We can end up with a plain decl here which is shared
2024 and we shouldn't mess with its flags. */
2026 TREE_THIS_VOLATILE (t) = volatile_p;
2032 /* We can get here for out-of-range string constant accesses,
2033 such as "_"[3]. Bail out of the entire substitution search
2034 and arrange for the entire statement to be replaced by a
2035 call to __builtin_trap. In all likelihood this will all be
2036 constant-folded away, but in the meantime we can't leave with
2037 something that get_expr_operands can't understand. */
2041 if (TREE_CODE (t) == ADDR_EXPR
2042 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2044 /* FIXME: Except that this causes problems elsewhere with dead
2045 code not being deleted, and we die in the rtl expanders
2046 because we failed to remove some ssa_name. In the meantime,
2047 just return zero. */
2048 /* FIXME2: This condition should be signaled by
2049 fold_read_from_constant_string directly, rather than
2050 re-checking for it here. */
2051 return integer_zero_node;
2054 /* Try folding *(B+O) to B->X. Still an improvement. */
2055 if (POINTER_TYPE_P (TREE_TYPE (base)))
2057 t = maybe_fold_offset_to_reference (base, offset,
2064 /* Otherwise we had an offset that we could not simplify. */
2069 /* A quaint feature extant in our address arithmetic is that there
2070 can be hidden type changes here. The type of the result need
2071 not be the same as the type of the input pointer.
2073 What we're after here is an expression of the form
2074 (T *)(&array + const)
2075 where array is OP0, const is OP1, RES_TYPE is T and
2076 the cast doesn't actually exist, but is implicit in the
2077 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2079 which may be able to propagate further. */
2082 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2087 /* It had better be a constant. */
2088 if (TREE_CODE (op1) != INTEGER_CST)
2090 /* The first operand should be an ADDR_EXPR. */
2091 if (TREE_CODE (op0) != ADDR_EXPR)
2093 op0 = TREE_OPERAND (op0, 0);
2095 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2096 the offset into it. */
2097 while (TREE_CODE (op0) == ARRAY_REF)
2099 tree array_obj = TREE_OPERAND (op0, 0);
2100 tree array_idx = TREE_OPERAND (op0, 1);
2101 tree elt_type = TREE_TYPE (op0);
2102 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2105 if (TREE_CODE (array_idx) != INTEGER_CST)
2107 if (TREE_CODE (elt_size) != INTEGER_CST)
2110 /* Un-bias the index by the min index of the array type. */
2111 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2114 min_idx = TYPE_MIN_VALUE (min_idx);
2117 if (TREE_CODE (min_idx) != INTEGER_CST)
2120 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2121 if (!integer_zerop (min_idx))
2122 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2127 /* Convert the index to a byte offset. */
2128 array_idx = fold_convert (sizetype, array_idx);
2129 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2131 /* Update the operands for the next round, or for folding. */
2132 op1 = int_const_binop (PLUS_EXPR,
2137 ptd_type = TREE_TYPE (res_type);
2138 /* If we want a pointer to void, reconstruct the reference from the
2139 array element type. A pointer to that can be trivially converted
2140 to void *. This happens as we fold (void *)(ptr p+ off). */
2141 if (VOID_TYPE_P (ptd_type)
2142 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2143 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2145 /* At which point we can try some of the same things as for indirects. */
2146 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2148 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2151 t = build1 (ADDR_EXPR, res_type, t);
2156 /* For passing state through walk_tree into fold_stmt_r and its
2159 struct fold_stmt_r_data
2163 bool *inside_addr_expr_p;
2166 /* Subroutine of fold_stmt called via walk_tree. We perform several
2167 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2170 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2172 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2173 struct fold_stmt_r_data *fold_stmt_r_data;
2174 bool *inside_addr_expr_p;
2176 tree expr = *expr_p, t;
2177 bool volatile_p = TREE_THIS_VOLATILE (expr);
2179 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2180 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2181 changed_p = fold_stmt_r_data->changed_p;
2183 /* ??? It'd be nice if walk_tree had a pre-order option. */
2184 switch (TREE_CODE (expr))
2187 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2192 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2194 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2195 if (wi->is_lhs && t && TREE_CODE (t) == INTEGER_CST)
2198 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2199 /* If we had a good reason for propagating the address here,
2200 make sure we end up with valid gimple. See PR34989. */
2201 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2205 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2210 if (POINTER_TYPE_P (TREE_TYPE (expr))
2211 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
2212 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2213 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2215 TREE_TYPE (TREE_TYPE (expr)))))
2219 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2220 We'd only want to bother decomposing an existing ARRAY_REF if
2221 the base array is found to have another offset contained within.
2222 Otherwise we'd be wasting time. */
2224 /* If we are not processing expressions found within an
2225 ADDR_EXPR, then we can fold constant array references.
2226 Don't fold on LHS either, to avoid folding "abc"[0] = 5
2228 if (!*inside_addr_expr_p && !wi->is_lhs)
2229 t = fold_read_from_constant_string (expr);
2235 *inside_addr_expr_p = true;
2236 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2237 *inside_addr_expr_p = false;
2242 /* Make sure the value is properly considered constant, and so gets
2243 propagated as expected. */
2245 recompute_tree_invariant_for_addr_expr (expr);
2249 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2254 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2255 We've already checked that the records are compatible, so we should
2256 come up with a set of compatible fields. */
2258 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2259 tree expr_field = TREE_OPERAND (expr, 1);
2261 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2263 expr_field = find_compatible_field (expr_record, expr_field);
2264 TREE_OPERAND (expr, 1) = expr_field;
2269 case TARGET_MEM_REF:
2270 t = maybe_fold_tmr (expr);
2273 case POINTER_PLUS_EXPR:
2274 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2277 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2282 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2283 TREE_OPERAND (expr, 0),
2284 TREE_OPERAND (expr, 1));
2288 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2290 tree op0 = TREE_OPERAND (expr, 0);
2294 fold_defer_overflow_warnings ();
2295 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2296 TREE_OPERAND (op0, 0),
2297 TREE_OPERAND (op0, 1));
2298 /* This is actually a conditional expression, not a GIMPLE
2299 conditional statement, however, the valid_gimple_rhs_p
2300 test still applies. */
2301 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
2302 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2305 COND_EXPR_COND (expr) = tem;
2318 /* Preserve volatileness of the original expression.
2319 We can end up with a plain decl here which is shared
2320 and we shouldn't mess with its flags. */
2322 TREE_THIS_VOLATILE (t) = volatile_p;
2330 /* Return the string length, maximum string length or maximum value of
2332 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2333 is not NULL and, for TYPE == 0, its value is not equal to the length
2334 we determine or if we are unable to determine the length or value,
2335 return false. VISITED is a bitmap of visited variables.
2336 TYPE is 0 if string length should be returned, 1 for maximum string
2337 length and 2 for maximum value ARG can have. */
2340 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2345 if (TREE_CODE (arg) != SSA_NAME)
2347 if (TREE_CODE (arg) == COND_EXPR)
2348 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2349 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2350 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2351 else if (TREE_CODE (arg) == ADDR_EXPR
2352 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2353 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2355 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2356 if (TREE_CODE (aop0) == INDIRECT_REF
2357 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2358 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2359 length, visited, type);
2365 if (TREE_CODE (val) != INTEGER_CST
2366 || tree_int_cst_sgn (val) < 0)
2370 val = c_strlen (arg, 1);
2378 if (TREE_CODE (*length) != INTEGER_CST
2379 || TREE_CODE (val) != INTEGER_CST)
2382 if (tree_int_cst_lt (*length, val))
2386 else if (simple_cst_equal (val, *length) != 1)
2394 /* If we were already here, break the infinite cycle. */
2395 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2397 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2400 def_stmt = SSA_NAME_DEF_STMT (var);
2402 switch (gimple_code (def_stmt))
2405 /* The RHS of the statement defining VAR must either have a
2406 constant length or come from another SSA_NAME with a constant
2408 if (gimple_assign_single_p (def_stmt)
2409 || gimple_assign_unary_nop_p (def_stmt))
2411 tree rhs = gimple_assign_rhs1 (def_stmt);
2412 return get_maxval_strlen (rhs, length, visited, type);
2418 /* All the arguments of the PHI node must have the same constant
2422 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2424 tree arg = gimple_phi_arg (def_stmt, i)->def;
2426 /* If this PHI has itself as an argument, we cannot
2427 determine the string length of this argument. However,
2428 if we can find a constant string length for the other
2429 PHI args then we can still be sure that this is a
2430 constant string length. So be optimistic and just
2431 continue with the next argument. */
2432 if (arg == gimple_phi_result (def_stmt))
2435 if (!get_maxval_strlen (arg, length, visited, type))
2447 /* Fold builtin call in statement STMT. Returns a simplified tree.
2448 We may return a non-constant expression, including another call
2449 to a different function and with different arguments, e.g.,
2450 substituting memcpy for strcpy when the string length is known.
2451 Note that some builtins expand into inline code that may not
2452 be valid in GIMPLE. Callers must take care. */
2455 ccp_fold_builtin (gimple stmt)
2457 tree result, val[3];
2464 gcc_assert (is_gimple_call (stmt));
2466 ignore = (gimple_call_lhs (stmt) == NULL);
2468 /* First try the generic builtin folder. If that succeeds, return the
2470 result = fold_call_stmt (stmt, ignore);
2474 STRIP_NOPS (result);
2478 /* Ignore MD builtins. */
2479 callee = gimple_call_fndecl (stmt);
2480 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2483 /* If the builtin could not be folded, and it has no argument list,
2485 nargs = gimple_call_num_args (stmt);
2489 /* Limit the work only for builtins we know how to simplify. */
2490 switch (DECL_FUNCTION_CODE (callee))
2492 case BUILT_IN_STRLEN:
2493 case BUILT_IN_FPUTS:
2494 case BUILT_IN_FPUTS_UNLOCKED:
2498 case BUILT_IN_STRCPY:
2499 case BUILT_IN_STRNCPY:
2503 case BUILT_IN_MEMCPY_CHK:
2504 case BUILT_IN_MEMPCPY_CHK:
2505 case BUILT_IN_MEMMOVE_CHK:
2506 case BUILT_IN_MEMSET_CHK:
2507 case BUILT_IN_STRNCPY_CHK:
2511 case BUILT_IN_STRCPY_CHK:
2512 case BUILT_IN_STPCPY_CHK:
2516 case BUILT_IN_SNPRINTF_CHK:
2517 case BUILT_IN_VSNPRINTF_CHK:
2525 if (arg_idx >= nargs)
2528 /* Try to use the dataflow information gathered by the CCP process. */
2529 visited = BITMAP_ALLOC (NULL);
2530 bitmap_clear (visited);
2532 memset (val, 0, sizeof (val));
2533 a = gimple_call_arg (stmt, arg_idx);
2534 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2535 val[arg_idx] = NULL_TREE;
2537 BITMAP_FREE (visited);
2540 switch (DECL_FUNCTION_CODE (callee))
2542 case BUILT_IN_STRLEN:
2543 if (val[0] && nargs == 1)
2546 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2548 /* If the result is not a valid gimple value, or not a cast
2549 of a valid gimple value, then we can not use the result. */
2550 if (is_gimple_val (new_val)
2551 || (is_gimple_cast (new_val)
2552 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2557 case BUILT_IN_STRCPY:
2558 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2559 result = fold_builtin_strcpy (callee,
2560 gimple_call_arg (stmt, 0),
2561 gimple_call_arg (stmt, 1),
2565 case BUILT_IN_STRNCPY:
2566 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2567 result = fold_builtin_strncpy (callee,
2568 gimple_call_arg (stmt, 0),
2569 gimple_call_arg (stmt, 1),
2570 gimple_call_arg (stmt, 2),
2574 case BUILT_IN_FPUTS:
2576 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2577 gimple_call_arg (stmt, 1),
2578 ignore, false, val[0]);
2581 case BUILT_IN_FPUTS_UNLOCKED:
2583 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2584 gimple_call_arg (stmt, 1),
2585 ignore, true, val[0]);
2588 case BUILT_IN_MEMCPY_CHK:
2589 case BUILT_IN_MEMPCPY_CHK:
2590 case BUILT_IN_MEMMOVE_CHK:
2591 case BUILT_IN_MEMSET_CHK:
2592 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2593 result = fold_builtin_memory_chk (callee,
2594 gimple_call_arg (stmt, 0),
2595 gimple_call_arg (stmt, 1),
2596 gimple_call_arg (stmt, 2),
2597 gimple_call_arg (stmt, 3),
2599 DECL_FUNCTION_CODE (callee));
2602 case BUILT_IN_STRCPY_CHK:
2603 case BUILT_IN_STPCPY_CHK:
2604 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2605 result = fold_builtin_stxcpy_chk (callee,
2606 gimple_call_arg (stmt, 0),
2607 gimple_call_arg (stmt, 1),
2608 gimple_call_arg (stmt, 2),
2610 DECL_FUNCTION_CODE (callee));
2613 case BUILT_IN_STRNCPY_CHK:
2614 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2615 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2616 gimple_call_arg (stmt, 1),
2617 gimple_call_arg (stmt, 2),
2618 gimple_call_arg (stmt, 3),
2622 case BUILT_IN_SNPRINTF_CHK:
2623 case BUILT_IN_VSNPRINTF_CHK:
2624 if (val[1] && is_gimple_val (val[1]))
2625 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2626 DECL_FUNCTION_CODE (callee));
2633 if (result && ignore)
2634 result = fold_ignored_result (result);
2638 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2639 replacement rhs for the statement or NULL_TREE if no simplification
2640 could be made. It is assumed that the operands have been previously
2644 fold_gimple_assign (gimple_stmt_iterator *si)
2646 gimple stmt = gsi_stmt (*si);
2647 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2651 switch (get_gimple_rhs_class (subcode))
2653 case GIMPLE_SINGLE_RHS:
2655 tree rhs = gimple_assign_rhs1 (stmt);
2657 /* Try to fold a conditional expression. */
2658 if (TREE_CODE (rhs) == COND_EXPR)
2660 tree temp = fold (COND_EXPR_COND (rhs));
2661 if (temp != COND_EXPR_COND (rhs))
2662 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2663 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2666 /* If we couldn't fold the RHS, hand over to the generic
2668 if (result == NULL_TREE)
2669 result = fold (rhs);
2671 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2672 that may have been added by fold, and "useless" type
2673 conversions that might now be apparent due to propagation. */
2674 STRIP_USELESS_TYPE_CONVERSION (result);
2676 if (result != rhs && valid_gimple_rhs_p (result))
2679 /* It is possible that fold_stmt_r simplified the RHS.
2680 Make sure that the subcode of this statement still
2681 reflects the principal operator of the rhs operand. */
2686 case GIMPLE_UNARY_RHS:
2688 tree rhs = gimple_assign_rhs1 (stmt);
2690 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2693 /* If the operation was a conversion do _not_ mark a
2694 resulting constant with TREE_OVERFLOW if the original
2695 constant was not. These conversions have implementation
2696 defined behavior and retaining the TREE_OVERFLOW flag
2697 here would confuse later passes such as VRP. */
2698 if (CONVERT_EXPR_CODE_P (subcode)
2699 && TREE_CODE (result) == INTEGER_CST
2700 && TREE_CODE (rhs) == INTEGER_CST)
2701 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2703 STRIP_USELESS_TYPE_CONVERSION (result);
2704 if (valid_gimple_rhs_p (result))
2707 else if (CONVERT_EXPR_CODE_P (subcode)
2708 && POINTER_TYPE_P (gimple_expr_type (stmt))
2709 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2711 tree type = gimple_expr_type (stmt);
2712 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2713 integer_zero_node, type);
2720 case GIMPLE_BINARY_RHS:
2721 /* Try to fold pointer addition. */
2722 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2723 result = maybe_fold_stmt_addition (
2724 TREE_TYPE (gimple_assign_lhs (stmt)),
2725 gimple_assign_rhs1 (stmt),
2726 gimple_assign_rhs2 (stmt));
2729 result = fold_binary (subcode,
2730 TREE_TYPE (gimple_assign_lhs (stmt)),
2731 gimple_assign_rhs1 (stmt),
2732 gimple_assign_rhs2 (stmt));
2736 STRIP_USELESS_TYPE_CONVERSION (result);
2737 if (valid_gimple_rhs_p (result))
2740 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2741 we lose canonicalization opportunities. Do not go again
2742 through fold here though, or the same non-GIMPLE will be
2744 if (commutative_tree_code (subcode)
2745 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2746 gimple_assign_rhs2 (stmt), false))
2747 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2748 gimple_assign_rhs2 (stmt),
2749 gimple_assign_rhs1 (stmt));
2753 case GIMPLE_INVALID_RHS:
2760 /* Attempt to fold a conditional statement. Return true if any changes were
2761 made. We only attempt to fold the condition expression, and do not perform
2762 any transformation that would require alteration of the cfg. It is
2763 assumed that the operands have been previously folded. */
2766 fold_gimple_cond (gimple stmt)
2768 tree result = fold_binary (gimple_cond_code (stmt),
2770 gimple_cond_lhs (stmt),
2771 gimple_cond_rhs (stmt));
2775 STRIP_USELESS_TYPE_CONVERSION (result);
2776 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2778 gimple_cond_set_condition_from_tree (stmt, result);
2787 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2788 The statement may be replaced by another statement, e.g., if the call
2789 simplifies to a constant value. Return true if any changes were made.
2790 It is assumed that the operands have been previously folded. */
2793 fold_gimple_call (gimple_stmt_iterator *gsi)
2795 gimple stmt = gsi_stmt (*gsi);
2797 tree callee = gimple_call_fndecl (stmt);
2799 /* Check for builtins that CCP can handle using information not
2800 available in the generic fold routines. */
2801 if (callee && DECL_BUILT_IN (callee))
2803 tree result = ccp_fold_builtin (stmt);
2806 return update_call_from_tree (gsi, result);
2810 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2811 here are when we've propagated the address of a decl into the
2813 /* ??? Should perhaps do this in fold proper. However, doing it
2814 there requires that we create a new CALL_EXPR, and that requires
2815 copying EH region info to the new node. Easier to just do it
2816 here where we can just smash the call operand. */
2817 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2818 callee = gimple_call_fn (stmt);
2819 if (TREE_CODE (callee) == OBJ_TYPE_REF
2820 && lang_hooks.fold_obj_type_ref
2821 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2822 && DECL_P (TREE_OPERAND
2823 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2827 /* ??? Caution: Broken ADDR_EXPR semantics means that
2828 looking at the type of the operand of the addr_expr
2829 can yield an array type. See silly exception in
2830 check_pointer_types_r. */
2831 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2832 t = lang_hooks.fold_obj_type_ref (callee, t);
2835 gimple_call_set_fn (stmt, t);
2844 /* Fold the statement pointed to by GSI. In some cases, this function may
2845 replace the whole statement with a new one. Returns true iff folding
2846 makes any changes. */
2849 fold_stmt (gimple_stmt_iterator *gsi)
2852 struct fold_stmt_r_data fold_stmt_r_data;
2853 struct walk_stmt_info wi;
2855 bool changed = false;
2856 bool inside_addr_expr = false;
2858 gimple stmt = gsi_stmt (*gsi);
2860 fold_stmt_r_data.stmt = stmt;
2861 fold_stmt_r_data.changed_p = &changed;
2862 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2864 memset (&wi, 0, sizeof (wi));
2865 wi.info = &fold_stmt_r_data;
2867 /* Fold the individual operands.
2868 For example, fold instances of *&VAR into VAR, etc. */
2869 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2872 /* Fold the main computation performed by the statement. */
2873 switch (gimple_code (stmt))
2877 tree new_rhs = fold_gimple_assign (gsi);
2878 if (new_rhs != NULL_TREE)
2880 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2883 stmt = gsi_stmt (*gsi);
2887 changed |= fold_gimple_cond (stmt);
2890 /* The entire statement may be replaced in this case. */
2891 changed |= fold_gimple_call (gsi);
2902 /* Perform the minimal folding on statement STMT. Only operations like
2903 *&x created by constant propagation are handled. The statement cannot
2904 be replaced with a new one. Return true if the statement was
2905 changed, false otherwise. */
2908 fold_stmt_inplace (gimple stmt)
2911 struct fold_stmt_r_data fold_stmt_r_data;
2912 struct walk_stmt_info wi;
2913 gimple_stmt_iterator si;
2915 bool changed = false;
2916 bool inside_addr_expr = false;
2918 fold_stmt_r_data.stmt = stmt;
2919 fold_stmt_r_data.changed_p = &changed;
2920 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2922 memset (&wi, 0, sizeof (wi));
2923 wi.info = &fold_stmt_r_data;
2925 /* Fold the individual operands.
2926 For example, fold instances of *&VAR into VAR, etc.
2928 It appears that, at one time, maybe_fold_stmt_indirect
2929 would cause the walk to return non-null in order to
2930 signal that the entire statement should be replaced with
2931 a call to _builtin_trap. This functionality is currently
2932 disabled, as noted in a FIXME, and cannot be supported here. */
2933 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2936 /* Fold the main computation performed by the statement. */
2937 switch (gimple_code (stmt))
2941 unsigned old_num_ops;
2943 old_num_ops = gimple_num_ops (stmt);
2944 si = gsi_for_stmt (stmt);
2945 new_rhs = fold_gimple_assign (&si);
2946 if (new_rhs != NULL_TREE
2947 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
2949 gimple_assign_set_rhs_from_tree (&si, new_rhs);
2952 gcc_assert (gsi_stmt (si) == stmt);
2956 changed |= fold_gimple_cond (stmt);
2966 /* Try to optimize out __builtin_stack_restore. Optimize it out
2967 if there is another __builtin_stack_restore in the same basic
2968 block and no calls or ASM_EXPRs are in between, or if this block's
2969 only outgoing edge is to EXIT_BLOCK and there are no calls or
2970 ASM_EXPRs after this __builtin_stack_restore. */
2973 optimize_stack_restore (gimple_stmt_iterator i)
2976 gimple stmt, stack_save;
2977 gimple_stmt_iterator stack_save_gsi;
2979 basic_block bb = gsi_bb (i);
2980 gimple call = gsi_stmt (i);
2982 if (gimple_code (call) != GIMPLE_CALL
2983 || gimple_call_num_args (call) != 1
2984 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2985 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2988 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2990 stmt = gsi_stmt (i);
2991 if (gimple_code (stmt) == GIMPLE_ASM)
2993 if (gimple_code (stmt) != GIMPLE_CALL)
2996 callee = gimple_call_fndecl (stmt);
2997 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3000 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3005 && (! single_succ_p (bb)
3006 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3009 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3010 if (gimple_code (stack_save) != GIMPLE_CALL
3011 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3012 || stmt_could_throw_p (stack_save)
3013 || !has_single_use (gimple_call_arg (call, 0)))
3016 callee = gimple_call_fndecl (stack_save);
3018 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3019 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
3020 || gimple_call_num_args (stack_save) != 0)
3023 stack_save_gsi = gsi_for_stmt (stack_save);
3024 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3025 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3026 if (!update_call_from_tree (&stack_save_gsi, rhs))
3028 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3031 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3033 /* No effect, so the statement will be deleted. */
3034 return integer_zero_node;
3037 /* If va_list type is a simple pointer and nothing special is needed,
3038 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3039 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3040 pointer assignment. */
3043 optimize_stdarg_builtin (gimple call)
3045 tree callee, lhs, rhs, cfun_va_list;
3046 bool va_list_simple_ptr;
3048 if (gimple_code (call) != GIMPLE_CALL)
3051 callee = gimple_call_fndecl (call);
3053 cfun_va_list = targetm.fn_abi_va_list (callee);
3054 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3055 && (TREE_TYPE (cfun_va_list) == void_type_node
3056 || TREE_TYPE (cfun_va_list) == char_type_node);
3058 switch (DECL_FUNCTION_CODE (callee))
3060 case BUILT_IN_VA_START:
3061 if (!va_list_simple_ptr
3062 || targetm.expand_builtin_va_start != NULL
3063 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3066 if (gimple_call_num_args (call) != 2)
3069 lhs = gimple_call_arg (call, 0);
3070 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3071 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3072 != TYPE_MAIN_VARIANT (cfun_va_list))
3075 lhs = build_fold_indirect_ref (lhs);
3076 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3077 1, integer_zero_node);
3078 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3079 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3081 case BUILT_IN_VA_COPY:
3082 if (!va_list_simple_ptr)
3085 if (gimple_call_num_args (call) != 2)
3088 lhs = gimple_call_arg (call, 0);
3089 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3090 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3091 != TYPE_MAIN_VARIANT (cfun_va_list))
3094 lhs = build_fold_indirect_ref (lhs);
3095 rhs = gimple_call_arg (call, 1);
3096 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3097 != TYPE_MAIN_VARIANT (cfun_va_list))
3100 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3101 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3103 case BUILT_IN_VA_END:
3104 /* No effect, so the statement will be deleted. */
3105 return integer_zero_node;
3112 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3113 RHS of an assignment. Insert the necessary statements before
3114 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3115 is replaced. If the call is expected to produces a result, then it
3116 is replaced by an assignment of the new RHS to the result variable.
3117 If the result is to be ignored, then the call is replaced by a
3121 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3124 tree tmp = NULL_TREE; /* Silence warning. */
3125 gimple stmt, new_stmt;
3126 gimple_stmt_iterator i;
3127 gimple_seq stmts = gimple_seq_alloc();
3128 struct gimplify_ctx gctx;
3130 stmt = gsi_stmt (*si_p);
3132 gcc_assert (is_gimple_call (stmt));
3134 lhs = gimple_call_lhs (stmt);
3136 push_gimplify_context (&gctx);
3138 if (lhs == NULL_TREE)
3139 gimplify_and_add (expr, &stmts);
3141 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3143 pop_gimplify_context (NULL);
3145 if (gimple_has_location (stmt))
3146 annotate_all_with_location (stmts, gimple_location (stmt));
3148 /* The replacement can expose previously unreferenced variables. */
3149 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3151 new_stmt = gsi_stmt (i);
3152 find_new_referenced_vars (new_stmt);
3153 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3154 mark_symbols_for_renaming (new_stmt);
3158 if (lhs == NULL_TREE)
3159 new_stmt = gimple_build_nop ();
3162 new_stmt = gimple_build_assign (lhs, tmp);
3163 copy_virtual_operands (new_stmt, stmt);
3164 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3167 gimple_set_location (new_stmt, gimple_location (stmt));
3168 gsi_replace (si_p, new_stmt, false);
3171 /* A simple pass that attempts to fold all builtin functions. This pass
3172 is run after we've propagated as many constants as we can. */
3175 execute_fold_all_builtins (void)
3177 bool cfg_changed = false;
3179 unsigned int todoflags = 0;
3183 gimple_stmt_iterator i;
3184 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3186 gimple stmt, old_stmt;
3187 tree callee, result;
3188 enum built_in_function fcode;
3190 stmt = gsi_stmt (i);
3192 if (gimple_code (stmt) != GIMPLE_CALL)
3197 callee = gimple_call_fndecl (stmt);
3198 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3203 fcode = DECL_FUNCTION_CODE (callee);
3205 result = ccp_fold_builtin (stmt);
3208 gimple_remove_stmt_histograms (cfun, stmt);
3211 switch (DECL_FUNCTION_CODE (callee))
3213 case BUILT_IN_CONSTANT_P:
3214 /* Resolve __builtin_constant_p. If it hasn't been
3215 folded to integer_one_node by now, it's fairly
3216 certain that the value simply isn't constant. */
3217 result = integer_zero_node;
3220 case BUILT_IN_STACK_RESTORE:
3221 result = optimize_stack_restore (i);
3227 case BUILT_IN_VA_START:
3228 case BUILT_IN_VA_END:
3229 case BUILT_IN_VA_COPY:
3230 /* These shouldn't be folded before pass_stdarg. */
3231 result = optimize_stdarg_builtin (stmt);
3241 if (dump_file && (dump_flags & TDF_DETAILS))
3243 fprintf (dump_file, "Simplified\n ");
3244 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3248 push_stmt_changes (gsi_stmt_ptr (&i));
3250 if (!update_call_from_tree (&i, result))
3252 gimplify_and_update_call_from_tree (&i, result);
3253 todoflags |= TODO_rebuild_alias;
3256 stmt = gsi_stmt (i);
3257 pop_stmt_changes (gsi_stmt_ptr (&i));
3259 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3260 && gimple_purge_dead_eh_edges (bb))
3263 if (dump_file && (dump_flags & TDF_DETAILS))
3265 fprintf (dump_file, "to\n ");
3266 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3267 fprintf (dump_file, "\n");
3270 /* Retry the same statement if it changed into another
3271 builtin, there might be new opportunities now. */
3272 if (gimple_code (stmt) != GIMPLE_CALL)
3277 callee = gimple_call_fndecl (stmt);
3279 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3280 || DECL_FUNCTION_CODE (callee) == fcode)
3285 /* Delete unreachable blocks. */
3287 todoflags |= TODO_cleanup_cfg;
3293 struct gimple_opt_pass pass_fold_builtins =
3299 execute_fold_all_builtins, /* execute */
3302 0, /* static_pass_number */
3304 PROP_cfg | PROP_ssa, /* properties_required */
3305 0, /* properties_provided */
3306 0, /* properties_destroyed */
3307 0, /* todo_flags_start */
3310 | TODO_update_ssa /* todo_flags_finish */