1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
213 /* Possible lattice values. */
222 /* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
228 static prop_value_t *const_val;
230 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
233 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
235 switch (val.lattice_val)
238 fprintf (outf, "%sUNINITIALIZED", prefix);
241 fprintf (outf, "%sUNDEFINED", prefix);
244 fprintf (outf, "%sVARYING", prefix);
247 fprintf (outf, "%sCONSTANT ", prefix);
248 print_generic_expr (outf, val.value, dump_flags);
256 /* Print lattice value VAL to stderr. */
258 void debug_lattice_value (prop_value_t val);
261 debug_lattice_value (prop_value_t val)
263 dump_lattice_value (stderr, "", val);
264 fprintf (stderr, "\n");
269 /* If SYM is a constant variable with known value, return the value.
270 NULL_TREE is returned otherwise. */
273 get_symbol_constant_value (tree sym)
275 if (TREE_STATIC (sym)
276 && TREE_READONLY (sym)
279 tree val = DECL_INITIAL (sym);
282 STRIP_USELESS_TYPE_CONVERSION (val);
283 if (is_gimple_min_invariant (val))
286 /* Variables declared 'const' without an initializer
287 have zero as the initializer if they may not be
288 overridden at link or run time. */
290 && !DECL_EXTERNAL (sym)
291 && targetm.binds_local_p (sym)
292 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
293 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
294 return fold_convert (TREE_TYPE (sym), integer_zero_node);
300 /* Compute a default value for variable VAR and store it in the
301 CONST_VAL array. The following rules are used to get default
304 1- Global and static variables that are declared constant are
307 2- Any other value is considered UNDEFINED. This is useful when
308 considering PHI nodes. PHI arguments that are undefined do not
309 change the constant value of the PHI node, which allows for more
310 constants to be propagated.
312 3- Variables defined by statements other than assignments and PHI
313 nodes are considered VARYING.
315 4- Initial values of variables that are not GIMPLE registers are
316 considered VARYING. */
319 get_default_value (tree var)
321 tree sym = SSA_NAME_VAR (var);
322 prop_value_t val = { UNINITIALIZED, NULL_TREE };
325 stmt = SSA_NAME_DEF_STMT (var);
327 if (gimple_nop_p (stmt))
329 /* Variables defined by an empty statement are those used
330 before being initialized. If VAR is a local variable, we
331 can assume initially that it is UNDEFINED, otherwise we must
332 consider it VARYING. */
333 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
334 val.lattice_val = UNDEFINED;
336 val.lattice_val = VARYING;
338 else if (is_gimple_assign (stmt)
339 /* Value-returning GIMPLE_CALL statements assign to
340 a variable, and are treated similarly to GIMPLE_ASSIGN. */
341 || (is_gimple_call (stmt)
342 && gimple_call_lhs (stmt) != NULL_TREE)
343 || gimple_code (stmt) == GIMPLE_PHI)
346 if (gimple_assign_single_p (stmt)
347 && DECL_P (gimple_assign_rhs1 (stmt))
348 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
350 val.lattice_val = CONSTANT;
354 /* Any other variable defined by an assignment or a PHI node
355 is considered UNDEFINED. */
356 val.lattice_val = UNDEFINED;
360 /* Otherwise, VAR will never take on a constant value. */
361 val.lattice_val = VARYING;
368 /* Get the constant value associated with variable VAR. */
370 static inline prop_value_t *
375 if (const_val == NULL)
378 val = &const_val[SSA_NAME_VERSION (var)];
379 if (val->lattice_val == UNINITIALIZED)
380 *val = get_default_value (var);
385 /* Sets the value associated with VAR to VARYING. */
388 set_value_varying (tree var)
390 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
392 val->lattice_val = VARYING;
393 val->value = NULL_TREE;
396 /* For float types, modify the value of VAL to make ccp work correctly
397 for non-standard values (-0, NaN):
399 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
400 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
401 This is to fix the following problem (see PR 29921): Suppose we have
405 and we set value of y to NaN. This causes value of x to be set to NaN.
406 When we later determine that y is in fact VARYING, fold uses the fact
407 that HONOR_NANS is false, and we try to change the value of x to 0,
408 causing an ICE. With HONOR_NANS being false, the real appearance of
409 NaN would cause undefined behavior, though, so claiming that y (and x)
410 are UNDEFINED initially is correct. */
413 canonicalize_float_value (prop_value_t *val)
415 enum machine_mode mode;
419 if (val->lattice_val != CONSTANT
420 || TREE_CODE (val->value) != REAL_CST)
423 d = TREE_REAL_CST (val->value);
424 type = TREE_TYPE (val->value);
425 mode = TYPE_MODE (type);
427 if (!HONOR_SIGNED_ZEROS (mode)
428 && REAL_VALUE_MINUS_ZERO (d))
430 val->value = build_real (type, dconst0);
434 if (!HONOR_NANS (mode)
435 && REAL_VALUE_ISNAN (d))
437 val->lattice_val = UNDEFINED;
443 /* Set the value for variable VAR to NEW_VAL. Return true if the new
444 value is different from VAR's previous value. */
447 set_lattice_value (tree var, prop_value_t new_val)
449 prop_value_t *old_val = get_value (var);
451 canonicalize_float_value (&new_val);
453 /* Lattice transitions must always be monotonically increasing in
454 value. If *OLD_VAL and NEW_VAL are the same, return false to
455 inform the caller that this was a non-transition. */
457 gcc_assert (old_val->lattice_val < new_val.lattice_val
458 || (old_val->lattice_val == new_val.lattice_val
459 && ((!old_val->value && !new_val.value)
460 || operand_equal_p (old_val->value, new_val.value, 0))));
462 if (old_val->lattice_val != new_val.lattice_val)
464 if (dump_file && (dump_flags & TDF_DETAILS))
466 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
467 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
472 gcc_assert (new_val.lattice_val != UNDEFINED);
480 /* Return the likely CCP lattice value for STMT.
482 If STMT has no operands, then return CONSTANT.
484 Else if undefinedness of operands of STMT cause its value to be
485 undefined, then return UNDEFINED.
487 Else if any operands of STMT are constants, then return CONSTANT.
489 Else return VARYING. */
492 likely_value (gimple stmt)
494 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
499 enum gimple_code code = gimple_code (stmt);
501 /* This function appears to be called only for assignments, calls,
502 conditionals, and switches, due to the logic in visit_stmt. */
503 gcc_assert (code == GIMPLE_ASSIGN
504 || code == GIMPLE_CALL
505 || code == GIMPLE_COND
506 || code == GIMPLE_SWITCH);
508 /* If the statement has volatile operands, it won't fold to a
510 if (gimple_has_volatile_ops (stmt))
513 /* Arrive here for more complex cases. */
514 has_constant_operand = false;
515 has_undefined_operand = false;
516 all_undefined_operands = true;
517 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
519 prop_value_t *val = get_value (use);
521 if (val->lattice_val == UNDEFINED)
522 has_undefined_operand = true;
524 all_undefined_operands = false;
526 if (val->lattice_val == CONSTANT)
527 has_constant_operand = true;
530 /* There may be constants in regular rhs operands. */
531 for (i = is_gimple_call (stmt) + gimple_has_lhs (stmt);
532 i < gimple_num_ops (stmt); ++i)
534 tree op = gimple_op (stmt, i);
535 if (!op || TREE_CODE (op) == SSA_NAME)
537 if (is_gimple_min_invariant (op))
538 has_constant_operand = true;
541 /* If the operation combines operands like COMPLEX_EXPR make sure to
542 not mark the result UNDEFINED if only one part of the result is
544 if (has_undefined_operand && all_undefined_operands)
546 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
548 switch (gimple_assign_rhs_code (stmt))
550 /* Unary operators are handled with all_undefined_operands. */
553 case POINTER_PLUS_EXPR:
554 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
555 Not bitwise operators, one VARYING operand may specify the
556 result completely. Not logical operators for the same reason.
557 Not COMPLEX_EXPR as one VARYING operand makes the result partly
558 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
559 the undefined operand may be promoted. */
566 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
567 fall back to VARYING even if there were CONSTANT operands. */
568 if (has_undefined_operand)
571 /* We do not consider virtual operands here -- load from read-only
572 memory may have only VARYING virtual operands, but still be
574 if (has_constant_operand
575 || gimple_references_memory_p (stmt))
581 /* Returns true if STMT cannot be constant. */
584 surely_varying_stmt_p (gimple stmt)
586 /* If the statement has operands that we cannot handle, it cannot be
588 if (gimple_has_volatile_ops (stmt))
591 /* If it is a call and does not return a value or is not a
592 builtin and not an indirect call, it is varying. */
593 if (is_gimple_call (stmt))
596 if (!gimple_call_lhs (stmt)
597 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
598 && !DECL_BUILT_IN (fndecl)))
602 /* Any other store operation is not interesting. */
603 else if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS))
606 /* Anything other than assignments and conditional jumps are not
607 interesting for CCP. */
608 if (gimple_code (stmt) != GIMPLE_ASSIGN
609 && gimple_code (stmt) != GIMPLE_COND
610 && gimple_code (stmt) != GIMPLE_SWITCH
611 && gimple_code (stmt) != GIMPLE_CALL)
617 /* Initialize local data structures for CCP. */
620 ccp_initialize (void)
624 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
626 /* Initialize simulation flags for PHI nodes and statements. */
629 gimple_stmt_iterator i;
631 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
633 gimple stmt = gsi_stmt (i);
634 bool is_varying = surely_varying_stmt_p (stmt);
641 /* If the statement will not produce a constant, mark
642 all its outputs VARYING. */
643 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
644 set_value_varying (def);
646 prop_set_simulate_again (stmt, !is_varying);
650 /* Now process PHI nodes. We never clear the simulate_again flag on
651 phi nodes, since we do not know which edges are executable yet,
652 except for phi nodes for virtual operands when we do not do store ccp. */
655 gimple_stmt_iterator i;
657 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
659 gimple phi = gsi_stmt (i);
661 if (!is_gimple_reg (gimple_phi_result (phi)))
662 prop_set_simulate_again (phi, false);
664 prop_set_simulate_again (phi, true);
670 /* Do final substitution of propagated values, cleanup the flowgraph and
671 free allocated storage.
673 Return TRUE when something was optimized. */
678 /* Perform substitutions based on the known constant values. */
679 bool something_changed = substitute_and_fold (const_val, false);
683 return something_changed;;
687 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
690 any M UNDEFINED = any
691 any M VARYING = VARYING
692 Ci M Cj = Ci if (i == j)
693 Ci M Cj = VARYING if (i != j)
697 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
699 if (val1->lattice_val == UNDEFINED)
701 /* UNDEFINED M any = any */
704 else if (val2->lattice_val == UNDEFINED)
706 /* any M UNDEFINED = any
707 Nothing to do. VAL1 already contains the value we want. */
710 else if (val1->lattice_val == VARYING
711 || val2->lattice_val == VARYING)
713 /* any M VARYING = VARYING. */
714 val1->lattice_val = VARYING;
715 val1->value = NULL_TREE;
717 else if (val1->lattice_val == CONSTANT
718 && val2->lattice_val == CONSTANT
719 && simple_cst_equal (val1->value, val2->value) == 1)
721 /* Ci M Cj = Ci if (i == j)
722 Ci M Cj = VARYING if (i != j)
724 If these two values come from memory stores, make sure that
725 they come from the same memory reference. */
726 val1->lattice_val = CONSTANT;
727 val1->value = val1->value;
731 /* Any other combination is VARYING. */
732 val1->lattice_val = VARYING;
733 val1->value = NULL_TREE;
738 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
739 lattice values to determine PHI_NODE's lattice value. The value of a
740 PHI node is determined calling ccp_lattice_meet with all the arguments
741 of the PHI node that are incoming via executable edges. */
743 static enum ssa_prop_result
744 ccp_visit_phi_node (gimple phi)
747 prop_value_t *old_val, new_val;
749 if (dump_file && (dump_flags & TDF_DETAILS))
751 fprintf (dump_file, "\nVisiting PHI node: ");
752 print_gimple_stmt (dump_file, phi, 0, dump_flags);
755 old_val = get_value (gimple_phi_result (phi));
756 switch (old_val->lattice_val)
759 return SSA_PROP_VARYING;
766 new_val.lattice_val = UNDEFINED;
767 new_val.value = NULL_TREE;
774 for (i = 0; i < gimple_phi_num_args (phi); i++)
776 /* Compute the meet operator over all the PHI arguments flowing
777 through executable edges. */
778 edge e = gimple_phi_arg_edge (phi, i);
780 if (dump_file && (dump_flags & TDF_DETAILS))
783 "\n Argument #%d (%d -> %d %sexecutable)\n",
784 i, e->src->index, e->dest->index,
785 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
788 /* If the incoming edge is executable, Compute the meet operator for
789 the existing value of the PHI node and the current PHI argument. */
790 if (e->flags & EDGE_EXECUTABLE)
792 tree arg = gimple_phi_arg (phi, i)->def;
793 prop_value_t arg_val;
795 if (is_gimple_min_invariant (arg))
797 arg_val.lattice_val = CONSTANT;
801 arg_val = *(get_value (arg));
803 ccp_lattice_meet (&new_val, &arg_val);
805 if (dump_file && (dump_flags & TDF_DETAILS))
807 fprintf (dump_file, "\t");
808 print_generic_expr (dump_file, arg, dump_flags);
809 dump_lattice_value (dump_file, "\tValue: ", arg_val);
810 fprintf (dump_file, "\n");
813 if (new_val.lattice_val == VARYING)
818 if (dump_file && (dump_flags & TDF_DETAILS))
820 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
821 fprintf (dump_file, "\n\n");
824 /* Make the transition to the new value. */
825 if (set_lattice_value (gimple_phi_result (phi), new_val))
827 if (new_val.lattice_val == VARYING)
828 return SSA_PROP_VARYING;
830 return SSA_PROP_INTERESTING;
833 return SSA_PROP_NOT_INTERESTING;
836 /* Return true if we may propagate the address expression ADDR into the
837 dereference DEREF and cancel them. */
840 may_propagate_address_into_dereference (tree addr, tree deref)
842 gcc_assert (INDIRECT_REF_P (deref)
843 && TREE_CODE (addr) == ADDR_EXPR);
845 /* Don't propagate if ADDR's operand has incomplete type. */
846 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
849 /* If the address is invariant then we do not need to preserve restrict
850 qualifications. But we do need to preserve volatile qualifiers until
851 we can annotate the folded dereference itself properly. */
852 if (is_gimple_min_invariant (addr)
853 && (!TREE_THIS_VOLATILE (deref)
854 || TYPE_VOLATILE (TREE_TYPE (addr))))
855 return useless_type_conversion_p (TREE_TYPE (deref),
856 TREE_TYPE (TREE_OPERAND (addr, 0)));
858 /* Else both the address substitution and the folding must result in
859 a valid useless type conversion sequence. */
860 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
862 && useless_type_conversion_p (TREE_TYPE (deref),
863 TREE_TYPE (TREE_OPERAND (addr, 0))));
866 /* CCP specific front-end to the non-destructive constant folding
869 Attempt to simplify the RHS of STMT knowing that one or more
870 operands are constants.
872 If simplification is possible, return the simplified RHS,
873 otherwise return the original RHS or NULL_TREE. */
876 ccp_fold (gimple stmt)
878 switch (gimple_code (stmt))
882 enum tree_code subcode = gimple_assign_rhs_code (stmt);
884 switch (get_gimple_rhs_class (subcode))
886 case GIMPLE_SINGLE_RHS:
888 tree rhs = gimple_assign_rhs1 (stmt);
889 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
891 if (TREE_CODE (rhs) == SSA_NAME)
893 /* If the RHS is an SSA_NAME, return its known constant value,
895 return get_value (rhs)->value;
897 /* Handle propagating invariant addresses into address operations.
898 The folding we do here matches that in tree-ssa-forwprop.c. */
899 else if (TREE_CODE (rhs) == ADDR_EXPR)
902 base = &TREE_OPERAND (rhs, 0);
903 while (handled_component_p (*base))
904 base = &TREE_OPERAND (*base, 0);
905 if (TREE_CODE (*base) == INDIRECT_REF
906 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
908 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
909 if (val->lattice_val == CONSTANT
910 && TREE_CODE (val->value) == ADDR_EXPR
911 && may_propagate_address_into_dereference
914 /* We need to return a new tree, not modify the IL
915 or share parts of it. So play some tricks to
916 avoid manually building it. */
917 tree ret, save = *base;
918 *base = TREE_OPERAND (val->value, 0);
919 ret = unshare_expr (rhs);
920 recompute_tree_invariant_for_addr_expr (ret);
927 if (kind == tcc_reference)
929 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR
930 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
932 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
933 if (val->lattice_val == CONSTANT)
934 return fold_unary (VIEW_CONVERT_EXPR,
935 TREE_TYPE (rhs), val->value);
937 else if (TREE_CODE (rhs) == INDIRECT_REF
938 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
940 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
941 if (val->lattice_val == CONSTANT
942 && TREE_CODE (val->value) == ADDR_EXPR
943 && useless_type_conversion_p (TREE_TYPE (rhs),
944 TREE_TYPE (TREE_TYPE (val->value))))
945 rhs = TREE_OPERAND (val->value, 0);
947 return fold_const_aggregate_ref (rhs);
949 else if (kind == tcc_declaration)
950 return get_symbol_constant_value (rhs);
954 case GIMPLE_UNARY_RHS:
956 /* Handle unary operators that can appear in GIMPLE form.
957 Note that we know the single operand must be a constant,
958 so this should almost always return a simplified RHS. */
959 tree lhs = gimple_assign_lhs (stmt);
960 tree op0 = gimple_assign_rhs1 (stmt);
962 /* Simplify the operand down to a constant. */
963 if (TREE_CODE (op0) == SSA_NAME)
965 prop_value_t *val = get_value (op0);
966 if (val->lattice_val == CONSTANT)
967 op0 = get_value (op0)->value;
970 /* Conversions are useless for CCP purposes if they are
971 value-preserving. Thus the restrictions that
972 useless_type_conversion_p places for pointer type conversions
973 do not apply here. Substitution later will only substitute to
975 if (CONVERT_EXPR_CODE_P (subcode)
976 && POINTER_TYPE_P (TREE_TYPE (lhs))
977 && POINTER_TYPE_P (TREE_TYPE (op0))
978 /* Do not allow differences in volatile qualification
979 as this might get us confused as to whether a
980 propagation destination statement is volatile
981 or not. See PR36988. */
982 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
983 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
986 /* Still try to generate a constant of correct type. */
987 if (!useless_type_conversion_p (TREE_TYPE (lhs),
989 && ((tem = maybe_fold_offset_to_address
990 (op0, integer_zero_node, TREE_TYPE (lhs)))
996 return fold_unary_ignore_overflow (subcode,
997 gimple_expr_type (stmt), op0);
1000 case GIMPLE_BINARY_RHS:
1002 /* Handle binary operators that can appear in GIMPLE form. */
1003 tree op0 = gimple_assign_rhs1 (stmt);
1004 tree op1 = gimple_assign_rhs2 (stmt);
1006 /* Simplify the operands down to constants when appropriate. */
1007 if (TREE_CODE (op0) == SSA_NAME)
1009 prop_value_t *val = get_value (op0);
1010 if (val->lattice_val == CONSTANT)
1014 if (TREE_CODE (op1) == SSA_NAME)
1016 prop_value_t *val = get_value (op1);
1017 if (val->lattice_val == CONSTANT)
1021 /* Fold &foo + CST into an invariant reference if possible. */
1022 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1023 && TREE_CODE (op0) == ADDR_EXPR
1024 && TREE_CODE (op1) == INTEGER_CST)
1026 tree lhs = gimple_assign_lhs (stmt);
1027 tree tem = maybe_fold_offset_to_address (op0, op1,
1029 if (tem != NULL_TREE)
1033 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1044 tree fn = gimple_call_fn (stmt);
1047 if (TREE_CODE (fn) == SSA_NAME)
1049 val = get_value (fn);
1050 if (val->lattice_val == CONSTANT)
1053 if (TREE_CODE (fn) == ADDR_EXPR
1054 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1055 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1057 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1060 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1062 args[i] = gimple_call_arg (stmt, i);
1063 if (TREE_CODE (args[i]) == SSA_NAME)
1065 val = get_value (args[i]);
1066 if (val->lattice_val == CONSTANT)
1067 args[i] = val->value;
1070 call = build_call_array (gimple_call_return_type (stmt),
1071 fn, gimple_call_num_args (stmt), args);
1072 retval = fold_call_expr (call, false);
1074 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1075 STRIP_NOPS (retval);
1083 /* Handle comparison operators that can appear in GIMPLE form. */
1084 tree op0 = gimple_cond_lhs (stmt);
1085 tree op1 = gimple_cond_rhs (stmt);
1086 enum tree_code code = gimple_cond_code (stmt);
1088 /* Simplify the operands down to constants when appropriate. */
1089 if (TREE_CODE (op0) == SSA_NAME)
1091 prop_value_t *val = get_value (op0);
1092 if (val->lattice_val == CONSTANT)
1096 if (TREE_CODE (op1) == SSA_NAME)
1098 prop_value_t *val = get_value (op1);
1099 if (val->lattice_val == CONSTANT)
1103 return fold_binary (code, boolean_type_node, op0, op1);
1108 tree rhs = gimple_switch_index (stmt);
1110 if (TREE_CODE (rhs) == SSA_NAME)
1112 /* If the RHS is an SSA_NAME, return its known constant value,
1114 return get_value (rhs)->value;
1126 /* Return the tree representing the element referenced by T if T is an
1127 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1128 NULL_TREE otherwise. */
1131 fold_const_aggregate_ref (tree t)
1133 prop_value_t *value;
1134 tree base, ctor, idx, field;
1135 unsigned HOST_WIDE_INT cnt;
1138 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1139 return get_symbol_constant_value (t);
1141 switch (TREE_CODE (t))
1144 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1145 DECL_INITIAL. If BASE is a nested reference into another
1146 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1147 the inner reference. */
1148 base = TREE_OPERAND (t, 0);
1149 switch (TREE_CODE (base))
1152 if (!TREE_READONLY (base)
1153 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1154 || !targetm.binds_local_p (base))
1157 ctor = DECL_INITIAL (base);
1162 ctor = fold_const_aggregate_ref (base);
1174 if (ctor == NULL_TREE
1175 || (TREE_CODE (ctor) != CONSTRUCTOR
1176 && TREE_CODE (ctor) != STRING_CST)
1177 || !TREE_STATIC (ctor))
1180 /* Get the index. If we have an SSA_NAME, try to resolve it
1181 with the current lattice value for the SSA_NAME. */
1182 idx = TREE_OPERAND (t, 1);
1183 switch (TREE_CODE (idx))
1186 if ((value = get_value (idx))
1187 && value->lattice_val == CONSTANT
1188 && TREE_CODE (value->value) == INTEGER_CST)
1201 /* Fold read from constant string. */
1202 if (TREE_CODE (ctor) == STRING_CST)
1204 if ((TYPE_MODE (TREE_TYPE (t))
1205 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1206 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1208 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1209 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1210 return build_int_cst_type (TREE_TYPE (t),
1211 (TREE_STRING_POINTER (ctor)
1212 [TREE_INT_CST_LOW (idx)]));
1216 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1217 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1218 if (tree_int_cst_equal (cfield, idx))
1220 STRIP_USELESS_TYPE_CONVERSION (cval);
1226 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1227 DECL_INITIAL. If BASE is a nested reference into another
1228 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1229 the inner reference. */
1230 base = TREE_OPERAND (t, 0);
1231 switch (TREE_CODE (base))
1234 if (!TREE_READONLY (base)
1235 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1236 || !targetm.binds_local_p (base))
1239 ctor = DECL_INITIAL (base);
1244 ctor = fold_const_aggregate_ref (base);
1251 if (ctor == NULL_TREE
1252 || TREE_CODE (ctor) != CONSTRUCTOR
1253 || !TREE_STATIC (ctor))
1256 field = TREE_OPERAND (t, 1);
1258 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1260 /* FIXME: Handle bit-fields. */
1261 && ! DECL_BIT_FIELD (cfield))
1263 STRIP_USELESS_TYPE_CONVERSION (cval);
1271 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1272 if (c && TREE_CODE (c) == COMPLEX_CST)
1273 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1279 tree base = TREE_OPERAND (t, 0);
1280 if (TREE_CODE (base) == SSA_NAME
1281 && (value = get_value (base))
1282 && value->lattice_val == CONSTANT
1283 && TREE_CODE (value->value) == ADDR_EXPR)
1284 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1295 /* Evaluate statement STMT.
1296 Valid only for assignments, calls, conditionals, and switches. */
1299 evaluate_stmt (gimple stmt)
1302 tree simplified = NULL_TREE;
1303 ccp_lattice_t likelyvalue = likely_value (stmt);
1306 fold_defer_overflow_warnings ();
1308 /* If the statement is likely to have a CONSTANT result, then try
1309 to fold the statement to determine the constant value. */
1310 /* FIXME. This is the only place that we call ccp_fold.
1311 Since likely_value never returns CONSTANT for calls, we will
1312 not attempt to fold them, including builtins that may profit. */
1313 if (likelyvalue == CONSTANT)
1314 simplified = ccp_fold (stmt);
1315 /* If the statement is likely to have a VARYING result, then do not
1316 bother folding the statement. */
1317 else if (likelyvalue == VARYING)
1319 enum gimple_code code = gimple_code (stmt);
1320 if (code == GIMPLE_ASSIGN)
1322 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1324 /* Other cases cannot satisfy is_gimple_min_invariant
1326 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1327 simplified = gimple_assign_rhs1 (stmt);
1329 else if (code == GIMPLE_SWITCH)
1330 simplified = gimple_switch_index (stmt);
1332 /* These cannot satisfy is_gimple_min_invariant without folding. */
1333 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1336 is_constant = simplified && is_gimple_min_invariant (simplified);
1338 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1340 if (dump_file && (dump_flags & TDF_DETAILS))
1342 fprintf (dump_file, "which is likely ");
1343 switch (likelyvalue)
1346 fprintf (dump_file, "CONSTANT");
1349 fprintf (dump_file, "UNDEFINED");
1352 fprintf (dump_file, "VARYING");
1356 fprintf (dump_file, "\n");
1361 /* The statement produced a constant value. */
1362 val.lattice_val = CONSTANT;
1363 val.value = simplified;
1367 /* The statement produced a nonconstant value. If the statement
1368 had UNDEFINED operands, then the result of the statement
1369 should be UNDEFINED. Otherwise, the statement is VARYING. */
1370 if (likelyvalue == UNDEFINED)
1371 val.lattice_val = likelyvalue;
1373 val.lattice_val = VARYING;
1375 val.value = NULL_TREE;
1381 /* Visit the assignment statement STMT. Set the value of its LHS to the
1382 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1383 creates virtual definitions, set the value of each new name to that
1384 of the RHS (if we can derive a constant out of the RHS).
1385 Value-returning call statements also perform an assignment, and
1386 are handled here. */
1388 static enum ssa_prop_result
1389 visit_assignment (gimple stmt, tree *output_p)
1392 enum ssa_prop_result retval;
1394 tree lhs = gimple_get_lhs (stmt);
1396 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1397 || gimple_call_lhs (stmt) != NULL_TREE);
1399 if (gimple_assign_copy_p (stmt))
1401 tree rhs = gimple_assign_rhs1 (stmt);
1403 if (TREE_CODE (rhs) == SSA_NAME)
1405 /* For a simple copy operation, we copy the lattice values. */
1406 prop_value_t *nval = get_value (rhs);
1410 val = evaluate_stmt (stmt);
1413 /* Evaluate the statement, which could be
1414 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1415 val = evaluate_stmt (stmt);
1417 retval = SSA_PROP_NOT_INTERESTING;
1419 /* Set the lattice value of the statement's output. */
1420 if (TREE_CODE (lhs) == SSA_NAME)
1422 /* If STMT is an assignment to an SSA_NAME, we only have one
1424 if (set_lattice_value (lhs, val))
1427 if (val.lattice_val == VARYING)
1428 retval = SSA_PROP_VARYING;
1430 retval = SSA_PROP_INTERESTING;
1438 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1439 if it can determine which edge will be taken. Otherwise, return
1440 SSA_PROP_VARYING. */
1442 static enum ssa_prop_result
1443 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1448 block = gimple_bb (stmt);
1449 val = evaluate_stmt (stmt);
1451 /* Find which edge out of the conditional block will be taken and add it
1452 to the worklist. If no single edge can be determined statically,
1453 return SSA_PROP_VARYING to feed all the outgoing edges to the
1454 propagation engine. */
1455 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1457 return SSA_PROP_INTERESTING;
1459 return SSA_PROP_VARYING;
1463 /* Evaluate statement STMT. If the statement produces an output value and
1464 its evaluation changes the lattice value of its output, return
1465 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1468 If STMT is a conditional branch and we can determine its truth
1469 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1470 value, return SSA_PROP_VARYING. */
1472 static enum ssa_prop_result
1473 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1478 if (dump_file && (dump_flags & TDF_DETAILS))
1480 fprintf (dump_file, "\nVisiting statement:\n");
1481 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1484 switch (gimple_code (stmt))
1487 /* If the statement is an assignment that produces a single
1488 output value, evaluate its RHS to see if the lattice value of
1489 its output has changed. */
1490 return visit_assignment (stmt, output_p);
1493 /* A value-returning call also performs an assignment. */
1494 if (gimple_call_lhs (stmt) != NULL_TREE)
1495 return visit_assignment (stmt, output_p);
1500 /* If STMT is a conditional branch, see if we can determine
1501 which branch will be taken. */
1502 /* FIXME. It appears that we should be able to optimize
1503 computed GOTOs here as well. */
1504 return visit_cond_stmt (stmt, taken_edge_p);
1510 /* Any other kind of statement is not interesting for constant
1511 propagation and, therefore, not worth simulating. */
1512 if (dump_file && (dump_flags & TDF_DETAILS))
1513 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1515 /* Definitions made by statements other than assignments to
1516 SSA_NAMEs represent unknown modifications to their outputs.
1517 Mark them VARYING. */
1518 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1520 prop_value_t v = { VARYING, NULL_TREE };
1521 set_lattice_value (def, v);
1524 return SSA_PROP_VARYING;
1528 /* Main entry point for SSA Conditional Constant Propagation. */
1534 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1535 if (ccp_finalize ())
1536 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1545 return flag_tree_ccp != 0;
1549 struct gimple_opt_pass pass_ccp =
1554 gate_ccp, /* gate */
1555 do_ssa_ccp, /* execute */
1558 0, /* static_pass_number */
1559 TV_TREE_CCP, /* tv_id */
1560 PROP_cfg | PROP_ssa, /* properties_required */
1561 0, /* properties_provided */
1562 0, /* properties_destroyed */
1563 0, /* todo_flags_start */
1564 TODO_dump_func | TODO_verify_ssa
1565 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1570 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1571 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1572 is the desired result type. */
1575 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1576 bool allow_negative_idx)
1578 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1579 tree array_type, elt_type, elt_size;
1582 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1583 measured in units of the size of elements type) from that ARRAY_REF).
1584 We can't do anything if either is variable.
1586 The case we handle here is *(&A[N]+O). */
1587 if (TREE_CODE (base) == ARRAY_REF)
1589 tree low_bound = array_ref_low_bound (base);
1591 elt_offset = TREE_OPERAND (base, 1);
1592 if (TREE_CODE (low_bound) != INTEGER_CST
1593 || TREE_CODE (elt_offset) != INTEGER_CST)
1596 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1597 base = TREE_OPERAND (base, 0);
1600 /* Ignore stupid user tricks of indexing non-array variables. */
1601 array_type = TREE_TYPE (base);
1602 if (TREE_CODE (array_type) != ARRAY_TYPE)
1604 elt_type = TREE_TYPE (array_type);
1605 if (!useless_type_conversion_p (orig_type, elt_type))
1608 /* Use signed size type for intermediate computation on the index. */
1609 idx_type = signed_type_for (size_type_node);
1611 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1612 element type (so we can use the alignment if it's not constant).
1613 Otherwise, compute the offset as an index by using a division. If the
1614 division isn't exact, then don't do anything. */
1615 elt_size = TYPE_SIZE_UNIT (elt_type);
1618 if (integer_zerop (offset))
1620 if (TREE_CODE (elt_size) != INTEGER_CST)
1621 elt_size = size_int (TYPE_ALIGN (elt_type));
1623 idx = build_int_cst (idx_type, 0);
1627 unsigned HOST_WIDE_INT lquo, lrem;
1628 HOST_WIDE_INT hquo, hrem;
1631 /* The final array offset should be signed, so we need
1632 to sign-extend the (possibly pointer) offset here
1633 and use signed division. */
1634 soffset = double_int_sext (tree_to_double_int (offset),
1635 TYPE_PRECISION (TREE_TYPE (offset)));
1636 if (TREE_CODE (elt_size) != INTEGER_CST
1637 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1638 soffset.low, soffset.high,
1639 TREE_INT_CST_LOW (elt_size),
1640 TREE_INT_CST_HIGH (elt_size),
1641 &lquo, &hquo, &lrem, &hrem)
1645 idx = build_int_cst_wide (idx_type, lquo, hquo);
1648 /* Assume the low bound is zero. If there is a domain type, get the
1649 low bound, if any, convert the index into that type, and add the
1651 min_idx = build_int_cst (idx_type, 0);
1652 domain_type = TYPE_DOMAIN (array_type);
1655 idx_type = domain_type;
1656 if (TYPE_MIN_VALUE (idx_type))
1657 min_idx = TYPE_MIN_VALUE (idx_type);
1659 min_idx = fold_convert (idx_type, min_idx);
1661 if (TREE_CODE (min_idx) != INTEGER_CST)
1664 elt_offset = fold_convert (idx_type, elt_offset);
1667 if (!integer_zerop (min_idx))
1668 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1669 if (!integer_zerop (elt_offset))
1670 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1672 /* Make sure to possibly truncate late after offsetting. */
1673 idx = fold_convert (idx_type, idx);
1675 /* We don't want to construct access past array bounds. For example
1678 should not be simplified into (*c)[14] or tree-vrp will
1679 give false warnings. The same is true for
1680 struct A { long x; char d[0]; } *a;
1682 which should be not folded to &a->d[-8]. */
1684 && TYPE_MAX_VALUE (domain_type)
1685 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1687 tree up_bound = TYPE_MAX_VALUE (domain_type);
1689 if (tree_int_cst_lt (up_bound, idx)
1690 /* Accesses after the end of arrays of size 0 (gcc
1691 extension) and 1 are likely intentional ("struct
1693 && compare_tree_int (up_bound, 1) > 0)
1697 && TYPE_MIN_VALUE (domain_type))
1699 if (!allow_negative_idx
1700 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1701 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1704 else if (!allow_negative_idx
1705 && compare_tree_int (idx, 0) < 0)
1708 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1712 /* Attempt to fold *(S+O) to S.X.
1713 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1714 is the desired result type. */
1717 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1718 tree orig_type, bool base_is_ptr)
1720 tree f, t, field_type, tail_array_field, field_offset;
1724 if (TREE_CODE (record_type) != RECORD_TYPE
1725 && TREE_CODE (record_type) != UNION_TYPE
1726 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1729 /* Short-circuit silly cases. */
1730 if (useless_type_conversion_p (record_type, orig_type))
1733 tail_array_field = NULL_TREE;
1734 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1738 if (TREE_CODE (f) != FIELD_DECL)
1740 if (DECL_BIT_FIELD (f))
1743 if (!DECL_FIELD_OFFSET (f))
1745 field_offset = byte_position (f);
1746 if (TREE_CODE (field_offset) != INTEGER_CST)
1749 /* ??? Java creates "interesting" fields for representing base classes.
1750 They have no name, and have no context. With no context, we get into
1751 trouble with nonoverlapping_component_refs_p. Skip them. */
1752 if (!DECL_FIELD_CONTEXT (f))
1755 /* The previous array field isn't at the end. */
1756 tail_array_field = NULL_TREE;
1758 /* Check to see if this offset overlaps with the field. */
1759 cmp = tree_int_cst_compare (field_offset, offset);
1763 field_type = TREE_TYPE (f);
1765 /* Here we exactly match the offset being checked. If the types match,
1766 then we can return that field. */
1768 && useless_type_conversion_p (orig_type, field_type))
1771 base = build1 (INDIRECT_REF, record_type, base);
1772 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1776 /* Don't care about offsets into the middle of scalars. */
1777 if (!AGGREGATE_TYPE_P (field_type))
1780 /* Check for array at the end of the struct. This is often
1781 used as for flexible array members. We should be able to
1782 turn this into an array access anyway. */
1783 if (TREE_CODE (field_type) == ARRAY_TYPE)
1784 tail_array_field = f;
1786 /* Check the end of the field against the offset. */
1787 if (!DECL_SIZE_UNIT (f)
1788 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1790 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1791 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1794 /* If we matched, then set offset to the displacement into
1797 new_base = build1 (INDIRECT_REF, record_type, base);
1800 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1802 /* Recurse to possibly find the match. */
1803 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1804 f == TYPE_FIELDS (record_type));
1807 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1813 if (!tail_array_field)
1816 f = tail_array_field;
1817 field_type = TREE_TYPE (f);
1818 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1820 /* If we get here, we've got an aggregate field, and a possibly
1821 nonzero offset into them. Recurse and hope for a valid match. */
1823 base = build1 (INDIRECT_REF, record_type, base);
1824 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1826 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1827 f == TYPE_FIELDS (record_type));
1830 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1834 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1835 or BASE[index] or by combination of those.
1837 Before attempting the conversion strip off existing ADDR_EXPRs and
1838 handled component refs. */
1841 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1845 bool base_is_ptr = true;
1848 if (TREE_CODE (base) == ADDR_EXPR)
1850 base_is_ptr = false;
1852 base = TREE_OPERAND (base, 0);
1854 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1855 so it needs to be removed and new COMPONENT_REF constructed.
1856 The wrong COMPONENT_REF are often constructed by folding the
1857 (type *)&object within the expression (type *)&object+offset */
1858 if (handled_component_p (base))
1860 HOST_WIDE_INT sub_offset, size, maxsize;
1862 newbase = get_ref_base_and_extent (base, &sub_offset,
1864 gcc_assert (newbase);
1867 && !(sub_offset & (BITS_PER_UNIT - 1)))
1871 offset = int_const_binop (PLUS_EXPR, offset,
1872 build_int_cst (TREE_TYPE (offset),
1873 sub_offset / BITS_PER_UNIT), 1);
1876 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1877 && integer_zerop (offset))
1879 type = TREE_TYPE (base);
1884 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1886 type = TREE_TYPE (TREE_TYPE (base));
1888 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1889 orig_type, base_is_ptr);
1893 base = build1 (INDIRECT_REF, type, base);
1894 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
1899 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1900 or &BASE[index] or by combination of those.
1902 Before attempting the conversion strip off existing component refs. */
1905 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1909 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1910 && POINTER_TYPE_P (orig_type));
1912 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1918 /* For __builtin_object_size to function correctly we need to
1919 make sure not to fold address arithmetic so that we change
1920 reference from one array to another. This would happen for
1923 struct X { char s1[10]; char s2[10] } s;
1924 char *foo (void) { return &s.s2[-4]; }
1926 where we need to avoid generating &s.s1[6]. As the C and
1927 C++ frontends create different initial trees
1928 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1929 sophisticated comparisons here. Note that checking for the
1930 condition after the fact is easier than trying to avoid doing
1933 if (TREE_CODE (orig) == ADDR_EXPR)
1934 orig = TREE_OPERAND (orig, 0);
1935 if ((TREE_CODE (orig) == ARRAY_REF
1936 || (TREE_CODE (orig) == COMPONENT_REF
1937 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
1938 && (TREE_CODE (t) == ARRAY_REF
1939 || TREE_CODE (t) == COMPONENT_REF)
1940 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
1941 ? TREE_OPERAND (orig, 0) : orig,
1942 TREE_CODE (t) == ARRAY_REF
1943 ? TREE_OPERAND (t, 0) : t, 0))
1946 ptr_type = build_pointer_type (TREE_TYPE (t));
1947 if (!useless_type_conversion_p (orig_type, ptr_type))
1949 return build_fold_addr_expr_with_type (t, ptr_type);
1955 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1956 Return the simplified expression, or NULL if nothing could be done. */
1959 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1962 bool volatile_p = TREE_THIS_VOLATILE (expr);
1964 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1965 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1966 are sometimes added. */
1968 STRIP_TYPE_NOPS (base);
1969 TREE_OPERAND (expr, 0) = base;
1971 /* One possibility is that the address reduces to a string constant. */
1972 t = fold_read_from_constant_string (expr);
1976 /* Add in any offset from a POINTER_PLUS_EXPR. */
1977 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
1981 offset2 = TREE_OPERAND (base, 1);
1982 if (TREE_CODE (offset2) != INTEGER_CST)
1984 base = TREE_OPERAND (base, 0);
1986 offset = fold_convert (sizetype,
1987 int_const_binop (PLUS_EXPR, offset, offset2, 1));
1990 if (TREE_CODE (base) == ADDR_EXPR)
1992 tree base_addr = base;
1994 /* Strip the ADDR_EXPR. */
1995 base = TREE_OPERAND (base, 0);
1997 /* Fold away CONST_DECL to its value, if the type is scalar. */
1998 if (TREE_CODE (base) == CONST_DECL
1999 && is_gimple_min_invariant (DECL_INITIAL (base)))
2000 return DECL_INITIAL (base);
2002 /* Try folding *(&B+O) to B.X. */
2003 t = maybe_fold_offset_to_reference (base_addr, offset,
2007 /* Preserve volatileness of the original expression.
2008 We can end up with a plain decl here which is shared
2009 and we shouldn't mess with its flags. */
2011 TREE_THIS_VOLATILE (t) = volatile_p;
2017 /* We can get here for out-of-range string constant accesses,
2018 such as "_"[3]. Bail out of the entire substitution search
2019 and arrange for the entire statement to be replaced by a
2020 call to __builtin_trap. In all likelihood this will all be
2021 constant-folded away, but in the meantime we can't leave with
2022 something that get_expr_operands can't understand. */
2026 if (TREE_CODE (t) == ADDR_EXPR
2027 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2029 /* FIXME: Except that this causes problems elsewhere with dead
2030 code not being deleted, and we die in the rtl expanders
2031 because we failed to remove some ssa_name. In the meantime,
2032 just return zero. */
2033 /* FIXME2: This condition should be signaled by
2034 fold_read_from_constant_string directly, rather than
2035 re-checking for it here. */
2036 return integer_zero_node;
2039 /* Try folding *(B+O) to B->X. Still an improvement. */
2040 if (POINTER_TYPE_P (TREE_TYPE (base)))
2042 t = maybe_fold_offset_to_reference (base, offset,
2049 /* Otherwise we had an offset that we could not simplify. */
2054 /* A quaint feature extant in our address arithmetic is that there
2055 can be hidden type changes here. The type of the result need
2056 not be the same as the type of the input pointer.
2058 What we're after here is an expression of the form
2059 (T *)(&array + const)
2060 where array is OP0, const is OP1, RES_TYPE is T and
2061 the cast doesn't actually exist, but is implicit in the
2062 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2064 which may be able to propagate further. */
2067 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2072 /* It had better be a constant. */
2073 if (TREE_CODE (op1) != INTEGER_CST)
2075 /* The first operand should be an ADDR_EXPR. */
2076 if (TREE_CODE (op0) != ADDR_EXPR)
2078 op0 = TREE_OPERAND (op0, 0);
2080 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2081 the offset into it. */
2082 while (TREE_CODE (op0) == ARRAY_REF)
2084 tree array_obj = TREE_OPERAND (op0, 0);
2085 tree array_idx = TREE_OPERAND (op0, 1);
2086 tree elt_type = TREE_TYPE (op0);
2087 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2090 if (TREE_CODE (array_idx) != INTEGER_CST)
2092 if (TREE_CODE (elt_size) != INTEGER_CST)
2095 /* Un-bias the index by the min index of the array type. */
2096 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2099 min_idx = TYPE_MIN_VALUE (min_idx);
2102 if (TREE_CODE (min_idx) != INTEGER_CST)
2105 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2106 if (!integer_zerop (min_idx))
2107 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2112 /* Convert the index to a byte offset. */
2113 array_idx = fold_convert (sizetype, array_idx);
2114 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2116 /* Update the operands for the next round, or for folding. */
2117 op1 = int_const_binop (PLUS_EXPR,
2122 ptd_type = TREE_TYPE (res_type);
2123 /* If we want a pointer to void, reconstruct the reference from the
2124 array element type. A pointer to that can be trivially converted
2125 to void *. This happens as we fold (void *)(ptr p+ off). */
2126 if (VOID_TYPE_P (ptd_type)
2127 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2128 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2130 /* At which point we can try some of the same things as for indirects. */
2131 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2133 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2136 t = build1 (ADDR_EXPR, res_type, t);
2141 /* For passing state through walk_tree into fold_stmt_r and its
2144 struct fold_stmt_r_data
2148 bool *inside_addr_expr_p;
2151 /* Subroutine of fold_stmt called via walk_tree. We perform several
2152 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2155 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2157 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2158 struct fold_stmt_r_data *fold_stmt_r_data;
2159 bool *inside_addr_expr_p;
2161 tree expr = *expr_p, t;
2162 bool volatile_p = TREE_THIS_VOLATILE (expr);
2164 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2165 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2166 changed_p = fold_stmt_r_data->changed_p;
2168 /* ??? It'd be nice if walk_tree had a pre-order option. */
2169 switch (TREE_CODE (expr))
2172 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2177 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2179 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2180 if (wi->is_lhs && t && TREE_CODE (t) == INTEGER_CST)
2183 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2184 /* If we had a good reason for propagating the address here,
2185 make sure we end up with valid gimple. See PR34989. */
2186 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2190 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2195 if (POINTER_TYPE_P (TREE_TYPE (expr))
2196 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
2197 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2198 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2200 TREE_TYPE (TREE_TYPE (expr)))))
2204 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2205 We'd only want to bother decomposing an existing ARRAY_REF if
2206 the base array is found to have another offset contained within.
2207 Otherwise we'd be wasting time. */
2209 /* If we are not processing expressions found within an
2210 ADDR_EXPR, then we can fold constant array references.
2211 Don't fold on LHS either, to avoid folding "abc"[0] = 5
2213 if (!*inside_addr_expr_p && !wi->is_lhs)
2214 t = fold_read_from_constant_string (expr);
2220 *inside_addr_expr_p = true;
2221 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2222 *inside_addr_expr_p = false;
2227 /* Make sure the value is properly considered constant, and so gets
2228 propagated as expected. */
2230 recompute_tree_invariant_for_addr_expr (expr);
2234 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2239 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2240 We've already checked that the records are compatible, so we should
2241 come up with a set of compatible fields. */
2243 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2244 tree expr_field = TREE_OPERAND (expr, 1);
2246 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2248 expr_field = find_compatible_field (expr_record, expr_field);
2249 TREE_OPERAND (expr, 1) = expr_field;
2254 case TARGET_MEM_REF:
2255 t = maybe_fold_tmr (expr);
2258 case POINTER_PLUS_EXPR:
2259 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2262 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2267 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2268 TREE_OPERAND (expr, 0),
2269 TREE_OPERAND (expr, 1));
2273 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2275 tree op0 = TREE_OPERAND (expr, 0);
2279 fold_defer_overflow_warnings ();
2280 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2281 TREE_OPERAND (op0, 0),
2282 TREE_OPERAND (op0, 1));
2283 /* This is actually a conditional expression, not a GIMPLE
2284 conditional statement, however, the valid_gimple_rhs_p
2285 test still applies. */
2286 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
2287 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2290 COND_EXPR_COND (expr) = tem;
2303 /* Preserve volatileness of the original expression.
2304 We can end up with a plain decl here which is shared
2305 and we shouldn't mess with its flags. */
2307 TREE_THIS_VOLATILE (t) = volatile_p;
2315 /* Return the string length, maximum string length or maximum value of
2317 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2318 is not NULL and, for TYPE == 0, its value is not equal to the length
2319 we determine or if we are unable to determine the length or value,
2320 return false. VISITED is a bitmap of visited variables.
2321 TYPE is 0 if string length should be returned, 1 for maximum string
2322 length and 2 for maximum value ARG can have. */
2325 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2330 if (TREE_CODE (arg) != SSA_NAME)
2332 if (TREE_CODE (arg) == COND_EXPR)
2333 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2334 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2335 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2336 else if (TREE_CODE (arg) == ADDR_EXPR
2337 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2338 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2340 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2341 if (TREE_CODE (aop0) == INDIRECT_REF
2342 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2343 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2344 length, visited, type);
2350 if (TREE_CODE (val) != INTEGER_CST
2351 || tree_int_cst_sgn (val) < 0)
2355 val = c_strlen (arg, 1);
2363 if (TREE_CODE (*length) != INTEGER_CST
2364 || TREE_CODE (val) != INTEGER_CST)
2367 if (tree_int_cst_lt (*length, val))
2371 else if (simple_cst_equal (val, *length) != 1)
2379 /* If we were already here, break the infinite cycle. */
2380 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2382 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2385 def_stmt = SSA_NAME_DEF_STMT (var);
2387 switch (gimple_code (def_stmt))
2390 /* The RHS of the statement defining VAR must either have a
2391 constant length or come from another SSA_NAME with a constant
2393 if (gimple_assign_single_p (def_stmt)
2394 || gimple_assign_unary_nop_p (def_stmt))
2396 tree rhs = gimple_assign_rhs1 (def_stmt);
2397 return get_maxval_strlen (rhs, length, visited, type);
2403 /* All the arguments of the PHI node must have the same constant
2407 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2409 tree arg = gimple_phi_arg (def_stmt, i)->def;
2411 /* If this PHI has itself as an argument, we cannot
2412 determine the string length of this argument. However,
2413 if we can find a constant string length for the other
2414 PHI args then we can still be sure that this is a
2415 constant string length. So be optimistic and just
2416 continue with the next argument. */
2417 if (arg == gimple_phi_result (def_stmt))
2420 if (!get_maxval_strlen (arg, length, visited, type))
2432 /* Fold builtin call in statement STMT. Returns a simplified tree.
2433 We may return a non-constant expression, including another call
2434 to a different function and with different arguments, e.g.,
2435 substituting memcpy for strcpy when the string length is known.
2436 Note that some builtins expand into inline code that may not
2437 be valid in GIMPLE. Callers must take care. */
2440 ccp_fold_builtin (gimple stmt)
2442 tree result, val[3];
2449 gcc_assert (is_gimple_call (stmt));
2451 ignore = (gimple_call_lhs (stmt) == NULL);
2453 /* First try the generic builtin folder. If that succeeds, return the
2455 result = fold_call_stmt (stmt, ignore);
2459 STRIP_NOPS (result);
2463 /* Ignore MD builtins. */
2464 callee = gimple_call_fndecl (stmt);
2465 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2468 /* If the builtin could not be folded, and it has no argument list,
2470 nargs = gimple_call_num_args (stmt);
2474 /* Limit the work only for builtins we know how to simplify. */
2475 switch (DECL_FUNCTION_CODE (callee))
2477 case BUILT_IN_STRLEN:
2478 case BUILT_IN_FPUTS:
2479 case BUILT_IN_FPUTS_UNLOCKED:
2483 case BUILT_IN_STRCPY:
2484 case BUILT_IN_STRNCPY:
2488 case BUILT_IN_MEMCPY_CHK:
2489 case BUILT_IN_MEMPCPY_CHK:
2490 case BUILT_IN_MEMMOVE_CHK:
2491 case BUILT_IN_MEMSET_CHK:
2492 case BUILT_IN_STRNCPY_CHK:
2496 case BUILT_IN_STRCPY_CHK:
2497 case BUILT_IN_STPCPY_CHK:
2501 case BUILT_IN_SNPRINTF_CHK:
2502 case BUILT_IN_VSNPRINTF_CHK:
2510 if (arg_idx >= nargs)
2513 /* Try to use the dataflow information gathered by the CCP process. */
2514 visited = BITMAP_ALLOC (NULL);
2515 bitmap_clear (visited);
2517 memset (val, 0, sizeof (val));
2518 a = gimple_call_arg (stmt, arg_idx);
2519 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2520 val[arg_idx] = NULL_TREE;
2522 BITMAP_FREE (visited);
2525 switch (DECL_FUNCTION_CODE (callee))
2527 case BUILT_IN_STRLEN:
2528 if (val[0] && nargs == 1)
2531 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2533 /* If the result is not a valid gimple value, or not a cast
2534 of a valid gimple value, then we can not use the result. */
2535 if (is_gimple_val (new_val)
2536 || (is_gimple_cast (new_val)
2537 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2542 case BUILT_IN_STRCPY:
2543 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2544 result = fold_builtin_strcpy (callee,
2545 gimple_call_arg (stmt, 0),
2546 gimple_call_arg (stmt, 1),
2550 case BUILT_IN_STRNCPY:
2551 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2552 result = fold_builtin_strncpy (callee,
2553 gimple_call_arg (stmt, 0),
2554 gimple_call_arg (stmt, 1),
2555 gimple_call_arg (stmt, 2),
2559 case BUILT_IN_FPUTS:
2561 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2562 gimple_call_arg (stmt, 1),
2563 ignore, false, val[0]);
2566 case BUILT_IN_FPUTS_UNLOCKED:
2568 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2569 gimple_call_arg (stmt, 1),
2570 ignore, true, val[0]);
2573 case BUILT_IN_MEMCPY_CHK:
2574 case BUILT_IN_MEMPCPY_CHK:
2575 case BUILT_IN_MEMMOVE_CHK:
2576 case BUILT_IN_MEMSET_CHK:
2577 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2578 result = fold_builtin_memory_chk (callee,
2579 gimple_call_arg (stmt, 0),
2580 gimple_call_arg (stmt, 1),
2581 gimple_call_arg (stmt, 2),
2582 gimple_call_arg (stmt, 3),
2584 DECL_FUNCTION_CODE (callee));
2587 case BUILT_IN_STRCPY_CHK:
2588 case BUILT_IN_STPCPY_CHK:
2589 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2590 result = fold_builtin_stxcpy_chk (callee,
2591 gimple_call_arg (stmt, 0),
2592 gimple_call_arg (stmt, 1),
2593 gimple_call_arg (stmt, 2),
2595 DECL_FUNCTION_CODE (callee));
2598 case BUILT_IN_STRNCPY_CHK:
2599 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2600 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2601 gimple_call_arg (stmt, 1),
2602 gimple_call_arg (stmt, 2),
2603 gimple_call_arg (stmt, 3),
2607 case BUILT_IN_SNPRINTF_CHK:
2608 case BUILT_IN_VSNPRINTF_CHK:
2609 if (val[1] && is_gimple_val (val[1]))
2610 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2611 DECL_FUNCTION_CODE (callee));
2618 if (result && ignore)
2619 result = fold_ignored_result (result);
2623 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2624 replacement rhs for the statement or NULL_TREE if no simplification
2625 could be made. It is assumed that the operands have been previously
2629 fold_gimple_assign (gimple_stmt_iterator *si)
2631 gimple stmt = gsi_stmt (*si);
2632 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2636 switch (get_gimple_rhs_class (subcode))
2638 case GIMPLE_SINGLE_RHS:
2640 tree rhs = gimple_assign_rhs1 (stmt);
2642 /* Try to fold a conditional expression. */
2643 if (TREE_CODE (rhs) == COND_EXPR)
2645 tree temp = fold (COND_EXPR_COND (rhs));
2646 if (temp != COND_EXPR_COND (rhs))
2647 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2648 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2651 /* If we couldn't fold the RHS, hand over to the generic
2653 if (result == NULL_TREE)
2654 result = fold (rhs);
2656 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2657 that may have been added by fold, and "useless" type
2658 conversions that might now be apparent due to propagation. */
2659 STRIP_USELESS_TYPE_CONVERSION (result);
2661 if (result != rhs && valid_gimple_rhs_p (result))
2664 /* It is possible that fold_stmt_r simplified the RHS.
2665 Make sure that the subcode of this statement still
2666 reflects the principal operator of the rhs operand. */
2671 case GIMPLE_UNARY_RHS:
2673 tree rhs = gimple_assign_rhs1 (stmt);
2675 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2678 /* If the operation was a conversion do _not_ mark a
2679 resulting constant with TREE_OVERFLOW if the original
2680 constant was not. These conversions have implementation
2681 defined behavior and retaining the TREE_OVERFLOW flag
2682 here would confuse later passes such as VRP. */
2683 if (CONVERT_EXPR_CODE_P (subcode)
2684 && TREE_CODE (result) == INTEGER_CST
2685 && TREE_CODE (rhs) == INTEGER_CST)
2686 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2688 STRIP_USELESS_TYPE_CONVERSION (result);
2689 if (valid_gimple_rhs_p (result))
2692 else if (CONVERT_EXPR_CODE_P (subcode)
2693 && POINTER_TYPE_P (gimple_expr_type (stmt))
2694 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2696 tree type = gimple_expr_type (stmt);
2697 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2698 integer_zero_node, type);
2705 case GIMPLE_BINARY_RHS:
2706 /* Try to fold pointer addition. */
2707 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2709 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2710 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
2712 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2713 if (!useless_type_conversion_p
2714 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2715 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2717 result = maybe_fold_stmt_addition (type,
2718 gimple_assign_rhs1 (stmt),
2719 gimple_assign_rhs2 (stmt));
2723 result = fold_binary (subcode,
2724 TREE_TYPE (gimple_assign_lhs (stmt)),
2725 gimple_assign_rhs1 (stmt),
2726 gimple_assign_rhs2 (stmt));
2730 STRIP_USELESS_TYPE_CONVERSION (result);
2731 if (valid_gimple_rhs_p (result))
2734 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2735 we lose canonicalization opportunities. Do not go again
2736 through fold here though, or the same non-GIMPLE will be
2738 if (commutative_tree_code (subcode)
2739 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2740 gimple_assign_rhs2 (stmt), false))
2741 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2742 gimple_assign_rhs2 (stmt),
2743 gimple_assign_rhs1 (stmt));
2747 case GIMPLE_INVALID_RHS:
2754 /* Attempt to fold a conditional statement. Return true if any changes were
2755 made. We only attempt to fold the condition expression, and do not perform
2756 any transformation that would require alteration of the cfg. It is
2757 assumed that the operands have been previously folded. */
2760 fold_gimple_cond (gimple stmt)
2762 tree result = fold_binary (gimple_cond_code (stmt),
2764 gimple_cond_lhs (stmt),
2765 gimple_cond_rhs (stmt));
2769 STRIP_USELESS_TYPE_CONVERSION (result);
2770 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2772 gimple_cond_set_condition_from_tree (stmt, result);
2781 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2782 The statement may be replaced by another statement, e.g., if the call
2783 simplifies to a constant value. Return true if any changes were made.
2784 It is assumed that the operands have been previously folded. */
2787 fold_gimple_call (gimple_stmt_iterator *gsi)
2789 gimple stmt = gsi_stmt (*gsi);
2791 tree callee = gimple_call_fndecl (stmt);
2793 /* Check for builtins that CCP can handle using information not
2794 available in the generic fold routines. */
2795 if (callee && DECL_BUILT_IN (callee))
2797 tree result = ccp_fold_builtin (stmt);
2800 return update_call_from_tree (gsi, result);
2804 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2805 here are when we've propagated the address of a decl into the
2807 /* ??? Should perhaps do this in fold proper. However, doing it
2808 there requires that we create a new CALL_EXPR, and that requires
2809 copying EH region info to the new node. Easier to just do it
2810 here where we can just smash the call operand. */
2811 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2812 callee = gimple_call_fn (stmt);
2813 if (TREE_CODE (callee) == OBJ_TYPE_REF
2814 && lang_hooks.fold_obj_type_ref
2815 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2816 && DECL_P (TREE_OPERAND
2817 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2821 /* ??? Caution: Broken ADDR_EXPR semantics means that
2822 looking at the type of the operand of the addr_expr
2823 can yield an array type. See silly exception in
2824 check_pointer_types_r. */
2825 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2826 t = lang_hooks.fold_obj_type_ref (callee, t);
2829 gimple_call_set_fn (stmt, t);
2838 /* Fold the statement pointed to by GSI. In some cases, this function may
2839 replace the whole statement with a new one. Returns true iff folding
2840 makes any changes. */
2843 fold_stmt (gimple_stmt_iterator *gsi)
2846 struct fold_stmt_r_data fold_stmt_r_data;
2847 struct walk_stmt_info wi;
2849 bool changed = false;
2850 bool inside_addr_expr = false;
2852 gimple stmt = gsi_stmt (*gsi);
2854 fold_stmt_r_data.stmt = stmt;
2855 fold_stmt_r_data.changed_p = &changed;
2856 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2858 memset (&wi, 0, sizeof (wi));
2859 wi.info = &fold_stmt_r_data;
2861 /* Fold the individual operands.
2862 For example, fold instances of *&VAR into VAR, etc. */
2863 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2866 /* Fold the main computation performed by the statement. */
2867 switch (gimple_code (stmt))
2871 tree new_rhs = fold_gimple_assign (gsi);
2872 if (new_rhs != NULL_TREE)
2874 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2877 stmt = gsi_stmt (*gsi);
2881 changed |= fold_gimple_cond (stmt);
2884 /* The entire statement may be replaced in this case. */
2885 changed |= fold_gimple_call (gsi);
2896 /* Perform the minimal folding on statement STMT. Only operations like
2897 *&x created by constant propagation are handled. The statement cannot
2898 be replaced with a new one. Return true if the statement was
2899 changed, false otherwise. */
2902 fold_stmt_inplace (gimple stmt)
2905 struct fold_stmt_r_data fold_stmt_r_data;
2906 struct walk_stmt_info wi;
2907 gimple_stmt_iterator si;
2909 bool changed = false;
2910 bool inside_addr_expr = false;
2912 fold_stmt_r_data.stmt = stmt;
2913 fold_stmt_r_data.changed_p = &changed;
2914 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2916 memset (&wi, 0, sizeof (wi));
2917 wi.info = &fold_stmt_r_data;
2919 /* Fold the individual operands.
2920 For example, fold instances of *&VAR into VAR, etc.
2922 It appears that, at one time, maybe_fold_stmt_indirect
2923 would cause the walk to return non-null in order to
2924 signal that the entire statement should be replaced with
2925 a call to _builtin_trap. This functionality is currently
2926 disabled, as noted in a FIXME, and cannot be supported here. */
2927 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2930 /* Fold the main computation performed by the statement. */
2931 switch (gimple_code (stmt))
2935 unsigned old_num_ops;
2937 old_num_ops = gimple_num_ops (stmt);
2938 si = gsi_for_stmt (stmt);
2939 new_rhs = fold_gimple_assign (&si);
2940 if (new_rhs != NULL_TREE
2941 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
2943 gimple_assign_set_rhs_from_tree (&si, new_rhs);
2946 gcc_assert (gsi_stmt (si) == stmt);
2950 changed |= fold_gimple_cond (stmt);
2960 /* Try to optimize out __builtin_stack_restore. Optimize it out
2961 if there is another __builtin_stack_restore in the same basic
2962 block and no calls or ASM_EXPRs are in between, or if this block's
2963 only outgoing edge is to EXIT_BLOCK and there are no calls or
2964 ASM_EXPRs after this __builtin_stack_restore. */
2967 optimize_stack_restore (gimple_stmt_iterator i)
2970 gimple stmt, stack_save;
2971 gimple_stmt_iterator stack_save_gsi;
2973 basic_block bb = gsi_bb (i);
2974 gimple call = gsi_stmt (i);
2976 if (gimple_code (call) != GIMPLE_CALL
2977 || gimple_call_num_args (call) != 1
2978 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2979 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2982 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2984 stmt = gsi_stmt (i);
2985 if (gimple_code (stmt) == GIMPLE_ASM)
2987 if (gimple_code (stmt) != GIMPLE_CALL)
2990 callee = gimple_call_fndecl (stmt);
2991 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2994 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2999 && (! single_succ_p (bb)
3000 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3003 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3004 if (gimple_code (stack_save) != GIMPLE_CALL
3005 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3006 || stmt_could_throw_p (stack_save)
3007 || !has_single_use (gimple_call_arg (call, 0)))
3010 callee = gimple_call_fndecl (stack_save);
3012 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3013 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
3014 || gimple_call_num_args (stack_save) != 0)
3017 stack_save_gsi = gsi_for_stmt (stack_save);
3018 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3019 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3020 if (!update_call_from_tree (&stack_save_gsi, rhs))
3022 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3025 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3027 /* No effect, so the statement will be deleted. */
3028 return integer_zero_node;
3031 /* If va_list type is a simple pointer and nothing special is needed,
3032 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3033 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3034 pointer assignment. */
3037 optimize_stdarg_builtin (gimple call)
3039 tree callee, lhs, rhs, cfun_va_list;
3040 bool va_list_simple_ptr;
3042 if (gimple_code (call) != GIMPLE_CALL)
3045 callee = gimple_call_fndecl (call);
3047 cfun_va_list = targetm.fn_abi_va_list (callee);
3048 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3049 && (TREE_TYPE (cfun_va_list) == void_type_node
3050 || TREE_TYPE (cfun_va_list) == char_type_node);
3052 switch (DECL_FUNCTION_CODE (callee))
3054 case BUILT_IN_VA_START:
3055 if (!va_list_simple_ptr
3056 || targetm.expand_builtin_va_start != NULL
3057 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3060 if (gimple_call_num_args (call) != 2)
3063 lhs = gimple_call_arg (call, 0);
3064 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3065 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3066 != TYPE_MAIN_VARIANT (cfun_va_list))
3069 lhs = build_fold_indirect_ref (lhs);
3070 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3071 1, integer_zero_node);
3072 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3073 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3075 case BUILT_IN_VA_COPY:
3076 if (!va_list_simple_ptr)
3079 if (gimple_call_num_args (call) != 2)
3082 lhs = gimple_call_arg (call, 0);
3083 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3084 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3085 != TYPE_MAIN_VARIANT (cfun_va_list))
3088 lhs = build_fold_indirect_ref (lhs);
3089 rhs = gimple_call_arg (call, 1);
3090 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3091 != TYPE_MAIN_VARIANT (cfun_va_list))
3094 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3095 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3097 case BUILT_IN_VA_END:
3098 /* No effect, so the statement will be deleted. */
3099 return integer_zero_node;
3106 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3107 RHS of an assignment. Insert the necessary statements before
3108 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3109 is replaced. If the call is expected to produces a result, then it
3110 is replaced by an assignment of the new RHS to the result variable.
3111 If the result is to be ignored, then the call is replaced by a
3115 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3118 tree tmp = NULL_TREE; /* Silence warning. */
3119 gimple stmt, new_stmt;
3120 gimple_stmt_iterator i;
3121 gimple_seq stmts = gimple_seq_alloc();
3122 struct gimplify_ctx gctx;
3124 stmt = gsi_stmt (*si_p);
3126 gcc_assert (is_gimple_call (stmt));
3128 lhs = gimple_call_lhs (stmt);
3130 push_gimplify_context (&gctx);
3132 if (lhs == NULL_TREE)
3133 gimplify_and_add (expr, &stmts);
3135 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3137 pop_gimplify_context (NULL);
3139 if (gimple_has_location (stmt))
3140 annotate_all_with_location (stmts, gimple_location (stmt));
3142 /* The replacement can expose previously unreferenced variables. */
3143 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3145 new_stmt = gsi_stmt (i);
3146 find_new_referenced_vars (new_stmt);
3147 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3148 mark_symbols_for_renaming (new_stmt);
3152 if (lhs == NULL_TREE)
3153 new_stmt = gimple_build_nop ();
3156 new_stmt = gimple_build_assign (lhs, tmp);
3157 copy_virtual_operands (new_stmt, stmt);
3158 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3161 gimple_set_location (new_stmt, gimple_location (stmt));
3162 gsi_replace (si_p, new_stmt, false);
3165 /* A simple pass that attempts to fold all builtin functions. This pass
3166 is run after we've propagated as many constants as we can. */
3169 execute_fold_all_builtins (void)
3171 bool cfg_changed = false;
3173 unsigned int todoflags = 0;
3177 gimple_stmt_iterator i;
3178 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3180 gimple stmt, old_stmt;
3181 tree callee, result;
3182 enum built_in_function fcode;
3184 stmt = gsi_stmt (i);
3186 if (gimple_code (stmt) != GIMPLE_CALL)
3191 callee = gimple_call_fndecl (stmt);
3192 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3197 fcode = DECL_FUNCTION_CODE (callee);
3199 result = ccp_fold_builtin (stmt);
3202 gimple_remove_stmt_histograms (cfun, stmt);
3205 switch (DECL_FUNCTION_CODE (callee))
3207 case BUILT_IN_CONSTANT_P:
3208 /* Resolve __builtin_constant_p. If it hasn't been
3209 folded to integer_one_node by now, it's fairly
3210 certain that the value simply isn't constant. */
3211 result = integer_zero_node;
3214 case BUILT_IN_STACK_RESTORE:
3215 result = optimize_stack_restore (i);
3221 case BUILT_IN_VA_START:
3222 case BUILT_IN_VA_END:
3223 case BUILT_IN_VA_COPY:
3224 /* These shouldn't be folded before pass_stdarg. */
3225 result = optimize_stdarg_builtin (stmt);
3235 if (dump_file && (dump_flags & TDF_DETAILS))
3237 fprintf (dump_file, "Simplified\n ");
3238 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3242 push_stmt_changes (gsi_stmt_ptr (&i));
3244 if (!update_call_from_tree (&i, result))
3246 gimplify_and_update_call_from_tree (&i, result);
3247 todoflags |= TODO_rebuild_alias;
3250 stmt = gsi_stmt (i);
3251 pop_stmt_changes (gsi_stmt_ptr (&i));
3253 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3254 && gimple_purge_dead_eh_edges (bb))
3257 if (dump_file && (dump_flags & TDF_DETAILS))
3259 fprintf (dump_file, "to\n ");
3260 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3261 fprintf (dump_file, "\n");
3264 /* Retry the same statement if it changed into another
3265 builtin, there might be new opportunities now. */
3266 if (gimple_code (stmt) != GIMPLE_CALL)
3271 callee = gimple_call_fndecl (stmt);
3273 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3274 || DECL_FUNCTION_CODE (callee) == fcode)
3279 /* Delete unreachable blocks. */
3281 todoflags |= TODO_cleanup_cfg;
3287 struct gimple_opt_pass pass_fold_builtins =
3293 execute_fold_all_builtins, /* execute */
3296 0, /* static_pass_number */
3298 PROP_cfg | PROP_ssa, /* properties_required */
3299 0, /* properties_provided */
3300 0, /* properties_destroyed */
3301 0, /* todo_flags_start */
3304 | TODO_update_ssa /* todo_flags_finish */