1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
213 /* Possible lattice values. */
222 /* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
228 static prop_value_t *const_val;
230 /* True if we are also propagating constants in stores and loads. */
231 static bool do_store_ccp;
233 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
236 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
238 switch (val.lattice_val)
241 fprintf (outf, "%sUNINITIALIZED", prefix);
244 fprintf (outf, "%sUNDEFINED", prefix);
247 fprintf (outf, "%sVARYING", prefix);
250 fprintf (outf, "%sCONSTANT ", prefix);
251 print_generic_expr (outf, val.value, dump_flags);
259 /* Print lattice value VAL to stderr. */
261 void debug_lattice_value (prop_value_t val);
264 debug_lattice_value (prop_value_t val)
266 dump_lattice_value (stderr, "", val);
267 fprintf (stderr, "\n");
272 /* If SYM is a constant variable with known value, return the value.
273 NULL_TREE is returned otherwise. */
276 get_symbol_constant_value (tree sym)
278 if (TREE_STATIC (sym)
279 && TREE_READONLY (sym)
282 tree val = DECL_INITIAL (sym);
285 STRIP_USELESS_TYPE_CONVERSION (val);
286 if (is_gimple_min_invariant (val))
289 /* Variables declared 'const' without an initializer
290 have zero as the initializer if they may not be
291 overridden at link or run time. */
293 && targetm.binds_local_p (sym)
294 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
295 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
296 return fold_convert (TREE_TYPE (sym), integer_zero_node);
302 /* Compute a default value for variable VAR and store it in the
303 CONST_VAL array. The following rules are used to get default
306 1- Global and static variables that are declared constant are
309 2- Any other value is considered UNDEFINED. This is useful when
310 considering PHI nodes. PHI arguments that are undefined do not
311 change the constant value of the PHI node, which allows for more
312 constants to be propagated.
314 3- Variables defined by statements other than assignments and PHI
315 nodes are considered VARYING.
317 4- Initial values of variables that are not GIMPLE registers are
318 considered VARYING. */
321 get_default_value (tree var)
323 tree sym = SSA_NAME_VAR (var);
324 prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE };
327 if (!do_store_ccp && !is_gimple_reg (var))
329 /* Short circuit for regular CCP. We are not interested in any
330 non-register when DO_STORE_CCP is false. */
331 val.lattice_val = VARYING;
333 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
335 /* Globals and static variables declared 'const' take their
337 val.lattice_val = CONSTANT;
343 gimple stmt = SSA_NAME_DEF_STMT (var);
345 if (gimple_nop_p (stmt))
347 /* Variables defined by an empty statement are those used
348 before being initialized. If VAR is a local variable, we
349 can assume initially that it is UNDEFINED, otherwise we must
350 consider it VARYING. */
351 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
352 val.lattice_val = UNDEFINED;
354 val.lattice_val = VARYING;
356 else if (is_gimple_assign (stmt)
357 /* Value-returning GIMPLE_CALL statements assign to
358 a variable, and are treated similarly to GIMPLE_ASSIGN. */
359 || (is_gimple_call (stmt)
360 && gimple_call_lhs (stmt) != NULL_TREE)
361 || gimple_code (stmt) == GIMPLE_PHI)
363 /* Any other variable defined by an assignment or a PHI node
364 is considered UNDEFINED. */
365 val.lattice_val = UNDEFINED;
369 /* Otherwise, VAR will never take on a constant value. */
370 val.lattice_val = VARYING;
378 /* Get the constant value associated with variable VAR. */
380 static inline prop_value_t *
385 if (const_val == NULL)
388 val = &const_val[SSA_NAME_VERSION (var)];
389 if (val->lattice_val == UNINITIALIZED)
390 *val = get_default_value (var);
395 /* Sets the value associated with VAR to VARYING. */
398 set_value_varying (tree var)
400 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
402 val->lattice_val = VARYING;
403 val->value = NULL_TREE;
404 val->mem_ref = NULL_TREE;
407 /* For float types, modify the value of VAL to make ccp work correctly
408 for non-standard values (-0, NaN):
410 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
411 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
412 This is to fix the following problem (see PR 29921): Suppose we have
416 and we set value of y to NaN. This causes value of x to be set to NaN.
417 When we later determine that y is in fact VARYING, fold uses the fact
418 that HONOR_NANS is false, and we try to change the value of x to 0,
419 causing an ICE. With HONOR_NANS being false, the real appearance of
420 NaN would cause undefined behavior, though, so claiming that y (and x)
421 are UNDEFINED initially is correct. */
424 canonicalize_float_value (prop_value_t *val)
426 enum machine_mode mode;
430 if (val->lattice_val != CONSTANT
431 || TREE_CODE (val->value) != REAL_CST)
434 d = TREE_REAL_CST (val->value);
435 type = TREE_TYPE (val->value);
436 mode = TYPE_MODE (type);
438 if (!HONOR_SIGNED_ZEROS (mode)
439 && REAL_VALUE_MINUS_ZERO (d))
441 val->value = build_real (type, dconst0);
445 if (!HONOR_NANS (mode)
446 && REAL_VALUE_ISNAN (d))
448 val->lattice_val = UNDEFINED;
455 /* Set the value for variable VAR to NEW_VAL. Return true if the new
456 value is different from VAR's previous value. */
459 set_lattice_value (tree var, prop_value_t new_val)
461 prop_value_t *old_val = get_value (var);
463 canonicalize_float_value (&new_val);
465 /* Lattice transitions must always be monotonically increasing in
466 value. If *OLD_VAL and NEW_VAL are the same, return false to
467 inform the caller that this was a non-transition. */
469 gcc_assert (old_val->lattice_val < new_val.lattice_val
470 || (old_val->lattice_val == new_val.lattice_val
471 && ((!old_val->value && !new_val.value)
472 || operand_equal_p (old_val->value, new_val.value, 0))
473 && old_val->mem_ref == new_val.mem_ref));
475 if (old_val->lattice_val != new_val.lattice_val)
477 if (dump_file && (dump_flags & TDF_DETAILS))
479 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
480 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
485 gcc_assert (new_val.lattice_val != UNDEFINED);
493 /* Return the likely CCP lattice value for STMT.
495 If STMT has no operands, then return CONSTANT.
497 Else if undefinedness of operands of STMT cause its value to be
498 undefined, then return UNDEFINED.
500 Else if any operands of STMT are constants, then return CONSTANT.
502 Else return VARYING. */
505 likely_value (gimple stmt)
507 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
511 enum tree_code code = gimple_code (stmt);
513 /* This function appears to be called only for assignments, calls,
514 conditionals, and switches, due to the logic in visit_stmt. */
515 gcc_assert (code == GIMPLE_ASSIGN
516 || code == GIMPLE_CALL
517 || code == GIMPLE_COND
518 || code == GIMPLE_SWITCH);
520 /* If the statement has volatile operands, it won't fold to a
522 if (gimple_has_volatile_ops (stmt))
525 /* If we are not doing store-ccp, statements with loads
526 and/or stores will never fold into a constant. */
528 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
531 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
532 is_gimple_min_invariant, so we do not consider calls or
533 other forms of assignment. */
534 if (gimple_assign_single_p (stmt)
535 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
538 if (code == GIMPLE_COND
539 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
540 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
543 if (code == GIMPLE_SWITCH
544 && is_gimple_min_invariant (gimple_switch_index (stmt)))
547 /* Arrive here for more complex cases. */
549 has_constant_operand = false;
550 has_undefined_operand = false;
551 all_undefined_operands = true;
552 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
554 prop_value_t *val = get_value (use);
556 if (val->lattice_val == UNDEFINED)
557 has_undefined_operand = true;
559 all_undefined_operands = false;
561 if (val->lattice_val == CONSTANT)
562 has_constant_operand = true;
565 /* If the operation combines operands like COMPLEX_EXPR make sure to
566 not mark the result UNDEFINED if only one part of the result is
568 if (has_undefined_operand && all_undefined_operands)
570 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
572 switch (gimple_assign_rhs_code (stmt))
574 /* Unary operators are handled with all_undefined_operands. */
577 case POINTER_PLUS_EXPR:
578 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
579 Not bitwise operators, one VARYING operand may specify the
580 result completely. Not logical operators for the same reason.
581 Not COMPLEX_EXPR as one VARYING operand makes the result partly
582 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
583 the undefined operand may be promoted. */
590 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
591 fall back to VARYING even if there were CONSTANT operands. */
592 if (has_undefined_operand)
595 if (has_constant_operand
596 /* We do not consider virtual operands here -- load from read-only
597 memory may have only VARYING virtual operands, but still be
599 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
605 /* Returns true if STMT cannot be constant. */
608 surely_varying_stmt_p (gimple stmt)
610 /* If the statement has operands that we cannot handle, it cannot be
612 if (gimple_has_volatile_ops (stmt))
615 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
620 /* We can only handle simple loads and stores. */
621 if (!stmt_makes_single_load (stmt)
622 && !stmt_makes_single_store (stmt))
626 /* If it is a call and does not return a value or is not a
627 builtin and not an indirect call, it is varying. */
628 if (is_gimple_call (stmt))
631 if (!gimple_call_lhs (stmt)
632 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
633 && !DECL_BUILT_IN (fndecl)))
637 /* Anything other than assignments and conditional jumps are not
638 interesting for CCP. */
639 if (gimple_code (stmt) != GIMPLE_ASSIGN
640 && gimple_code (stmt) != GIMPLE_COND
641 && gimple_code (stmt) != GIMPLE_SWITCH
642 && gimple_code (stmt) != GIMPLE_CALL)
648 /* Initialize local data structures for CCP. */
651 ccp_initialize (void)
655 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
657 /* Initialize simulation flags for PHI nodes and statements. */
660 gimple_stmt_iterator i;
662 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
664 gimple stmt = gsi_stmt (i);
665 bool is_varying = surely_varying_stmt_p (stmt);
672 /* If the statement will not produce a constant, mark
673 all its outputs VARYING. */
674 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
677 set_value_varying (def);
680 prop_set_simulate_again (stmt, !is_varying);
684 /* Now process PHI nodes. We never clear the simulate_again flag on
685 phi nodes, since we do not know which edges are executable yet,
686 except for phi nodes for virtual operands when we do not do store ccp. */
689 gimple_stmt_iterator i;
691 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
693 gimple phi = gsi_stmt (i);
695 if (!do_store_ccp && !is_gimple_reg (gimple_phi_result (phi)))
696 prop_set_simulate_again (phi, false);
698 prop_set_simulate_again (phi, true);
704 /* Do final substitution of propagated values, cleanup the flowgraph and
705 free allocated storage.
707 Return TRUE when something was optimized. */
712 /* Perform substitutions based on the known constant values. */
713 bool something_changed = substitute_and_fold (const_val, false);
717 return something_changed;;
721 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
724 any M UNDEFINED = any
725 any M VARYING = VARYING
726 Ci M Cj = Ci if (i == j)
727 Ci M Cj = VARYING if (i != j)
731 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
733 if (val1->lattice_val == UNDEFINED)
735 /* UNDEFINED M any = any */
738 else if (val2->lattice_val == UNDEFINED)
740 /* any M UNDEFINED = any
741 Nothing to do. VAL1 already contains the value we want. */
744 else if (val1->lattice_val == VARYING
745 || val2->lattice_val == VARYING)
747 /* any M VARYING = VARYING. */
748 val1->lattice_val = VARYING;
749 val1->value = NULL_TREE;
750 val1->mem_ref = NULL_TREE;
752 else if (val1->lattice_val == CONSTANT
753 && val2->lattice_val == CONSTANT
754 && simple_cst_equal (val1->value, val2->value) == 1
756 || (val1->mem_ref && val2->mem_ref
757 && operand_equal_p (val1->mem_ref, val2->mem_ref, 0))))
759 /* Ci M Cj = Ci if (i == j)
760 Ci M Cj = VARYING if (i != j)
762 If these two values come from memory stores, make sure that
763 they come from the same memory reference. */
764 val1->lattice_val = CONSTANT;
765 val1->value = val1->value;
766 val1->mem_ref = val1->mem_ref;
770 /* Any other combination is VARYING. */
771 val1->lattice_val = VARYING;
772 val1->value = NULL_TREE;
773 val1->mem_ref = NULL_TREE;
778 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
779 lattice values to determine PHI_NODE's lattice value. The value of a
780 PHI node is determined calling ccp_lattice_meet with all the arguments
781 of the PHI node that are incoming via executable edges. */
783 static enum ssa_prop_result
784 ccp_visit_phi_node (gimple phi)
787 prop_value_t *old_val, new_val;
789 if (dump_file && (dump_flags & TDF_DETAILS))
791 fprintf (dump_file, "\nVisiting PHI node: ");
792 print_gimple_stmt (dump_file, phi, 0, dump_flags);
795 old_val = get_value (gimple_phi_result (phi));
796 switch (old_val->lattice_val)
799 return SSA_PROP_VARYING;
806 new_val.lattice_val = UNDEFINED;
807 new_val.value = NULL_TREE;
808 new_val.mem_ref = NULL_TREE;
815 for (i = 0; i < gimple_phi_num_args (phi); i++)
817 /* Compute the meet operator over all the PHI arguments flowing
818 through executable edges. */
819 edge e = gimple_phi_arg_edge (phi, i);
821 if (dump_file && (dump_flags & TDF_DETAILS))
824 "\n Argument #%d (%d -> %d %sexecutable)\n",
825 i, e->src->index, e->dest->index,
826 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
829 /* If the incoming edge is executable, Compute the meet operator for
830 the existing value of the PHI node and the current PHI argument. */
831 if (e->flags & EDGE_EXECUTABLE)
833 tree arg = gimple_phi_arg (phi, i)->def;
834 prop_value_t arg_val;
836 if (is_gimple_min_invariant (arg))
838 arg_val.lattice_val = CONSTANT;
840 arg_val.mem_ref = NULL_TREE;
843 arg_val = *(get_value (arg));
845 ccp_lattice_meet (&new_val, &arg_val);
847 if (dump_file && (dump_flags & TDF_DETAILS))
849 fprintf (dump_file, "\t");
850 print_generic_expr (dump_file, arg, dump_flags);
851 dump_lattice_value (dump_file, "\tValue: ", arg_val);
852 fprintf (dump_file, "\n");
855 if (new_val.lattice_val == VARYING)
860 if (dump_file && (dump_flags & TDF_DETAILS))
862 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
863 fprintf (dump_file, "\n\n");
866 /* Make the transition to the new value. */
867 if (set_lattice_value (gimple_phi_result (phi), new_val))
869 if (new_val.lattice_val == VARYING)
870 return SSA_PROP_VARYING;
872 return SSA_PROP_INTERESTING;
875 return SSA_PROP_NOT_INTERESTING;
879 /* CCP specific front-end to the non-destructive constant folding
882 Attempt to simplify the RHS of STMT knowing that one or more
883 operands are constants.
885 If simplification is possible, return the simplified RHS,
886 otherwise return the original RHS or NULL_TREE. */
889 ccp_fold (gimple stmt)
891 switch (gimple_code (stmt))
895 enum tree_code subcode = gimple_assign_rhs_code (stmt);
897 switch (get_gimple_rhs_class (subcode))
899 case GIMPLE_SINGLE_RHS:
901 tree rhs = gimple_assign_rhs1 (stmt);
902 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
904 if (TREE_CODE (rhs) == SSA_NAME)
906 /* If the RHS is an SSA_NAME, return its known constant value,
908 return get_value (rhs)->value;
910 /* Handle propagating invariant addresses into address operations.
911 The folding we do here matches that in tree-ssa-forwprop.c. */
912 else if (TREE_CODE (rhs) == ADDR_EXPR)
915 base = &TREE_OPERAND (rhs, 0);
916 while (handled_component_p (*base))
917 base = &TREE_OPERAND (*base, 0);
918 if (TREE_CODE (*base) == INDIRECT_REF
919 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
921 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
922 if (val->lattice_val == CONSTANT
923 && TREE_CODE (val->value) == ADDR_EXPR
924 && useless_type_conversion_p
925 (TREE_TYPE (TREE_OPERAND (*base, 0)),
926 TREE_TYPE (val->value))
927 && useless_type_conversion_p
929 TREE_TYPE (TREE_OPERAND (val->value, 0))))
931 /* We need to return a new tree, not modify the IL
932 or share parts of it. So play some tricks to
933 avoid manually building it. */
934 tree ret, save = *base;
935 *base = TREE_OPERAND (val->value, 0);
936 ret = unshare_expr (rhs);
937 recompute_tree_invariant_for_addr_expr (ret);
944 else if (do_store_ccp && stmt_makes_single_load (stmt))
946 /* If the RHS is a memory load, see if the VUSEs associated with
947 it are a valid constant for that memory load. */
948 prop_value_t *val = get_value_loaded_by (stmt, const_val);
949 if (val && val->mem_ref)
951 if (operand_equal_p (val->mem_ref, rhs, 0))
954 /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
955 complex type with a known constant value, return it. */
956 if ((TREE_CODE (rhs) == REALPART_EXPR
957 || TREE_CODE (rhs) == IMAGPART_EXPR)
958 && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0))
959 return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value);
963 if (kind == tcc_reference)
965 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR
966 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
968 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
969 if (val->lattice_val == CONSTANT)
970 return fold_unary (VIEW_CONVERT_EXPR,
971 TREE_TYPE (rhs), val->value);
973 return fold_const_aggregate_ref (rhs);
975 else if (kind == tcc_declaration)
976 return get_symbol_constant_value (rhs);
980 case GIMPLE_UNARY_RHS:
982 /* Handle unary operators that can appear in GIMPLE form.
983 Note that we know the single operand must be a constant,
984 so this should almost always return a simplified RHS. */
985 tree lhs = gimple_assign_lhs (stmt);
986 tree op0 = gimple_assign_rhs1 (stmt);
988 /* Simplify the operand down to a constant. */
989 if (TREE_CODE (op0) == SSA_NAME)
991 prop_value_t *val = get_value (op0);
992 if (val->lattice_val == CONSTANT)
993 op0 = get_value (op0)->value;
996 /* Conversions are useless for CCP purposes if they are
997 value-preserving. Thus the restrictions that
998 useless_type_conversion_p places for pointer type conversions
999 do not apply here. Substitution later will only substitute to
1001 if (CONVERT_EXPR_CODE_P (subcode)
1002 && POINTER_TYPE_P (TREE_TYPE (lhs))
1003 && POINTER_TYPE_P (TREE_TYPE (op0))
1004 /* Do not allow differences in volatile qualification
1005 as this might get us confused as to whether a
1006 propagation destination statement is volatile
1007 or not. See PR36988. */
1008 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
1009 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
1012 /* Still try to generate a constant of correct type. */
1013 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1015 && ((tem = maybe_fold_offset_to_address
1016 (op0, integer_zero_node, TREE_TYPE (lhs)))
1022 return fold_unary (subcode, gimple_expr_type (stmt), op0);
1025 case GIMPLE_BINARY_RHS:
1027 /* Handle binary operators that can appear in GIMPLE form. */
1028 tree op0 = gimple_assign_rhs1 (stmt);
1029 tree op1 = gimple_assign_rhs2 (stmt);
1031 /* Simplify the operands down to constants when appropriate. */
1032 if (TREE_CODE (op0) == SSA_NAME)
1034 prop_value_t *val = get_value (op0);
1035 if (val->lattice_val == CONSTANT)
1039 if (TREE_CODE (op1) == SSA_NAME)
1041 prop_value_t *val = get_value (op1);
1042 if (val->lattice_val == CONSTANT)
1046 /* Fold &foo + CST into an invariant reference if possible. */
1047 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1048 && TREE_CODE (op0) == ADDR_EXPR
1049 && TREE_CODE (op1) == INTEGER_CST)
1051 tree lhs = gimple_assign_lhs (stmt);
1052 tree tem = maybe_fold_offset_to_address (op0, op1,
1054 if (tem != NULL_TREE)
1058 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1069 tree fn = gimple_call_fn (stmt);
1072 if (TREE_CODE (fn) == SSA_NAME)
1074 val = get_value (fn);
1075 if (val->lattice_val == CONSTANT)
1078 if (TREE_CODE (fn) == ADDR_EXPR
1079 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1081 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1084 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1086 args[i] = gimple_call_arg (stmt, i);
1087 if (TREE_CODE (args[i]) == SSA_NAME)
1089 val = get_value (args[i]);
1090 if (val->lattice_val == CONSTANT)
1091 args[i] = val->value;
1094 call = build_call_array (gimple_call_return_type (stmt),
1095 fn, gimple_call_num_args (stmt), args);
1096 retval = fold_call_expr (call, false);
1098 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1099 STRIP_NOPS (retval);
1107 /* Handle comparison operators that can appear in GIMPLE form. */
1108 tree op0 = gimple_cond_lhs (stmt);
1109 tree op1 = gimple_cond_rhs (stmt);
1110 enum tree_code code = gimple_cond_code (stmt);
1112 /* Simplify the operands down to constants when appropriate. */
1113 if (TREE_CODE (op0) == SSA_NAME)
1115 prop_value_t *val = get_value (op0);
1116 if (val->lattice_val == CONSTANT)
1120 if (TREE_CODE (op1) == SSA_NAME)
1122 prop_value_t *val = get_value (op1);
1123 if (val->lattice_val == CONSTANT)
1127 return fold_binary (code, boolean_type_node, op0, op1);
1132 tree rhs = gimple_switch_index (stmt);
1134 if (TREE_CODE (rhs) == SSA_NAME)
1136 /* If the RHS is an SSA_NAME, return its known constant value,
1138 return get_value (rhs)->value;
1150 /* Return the tree representing the element referenced by T if T is an
1151 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1152 NULL_TREE otherwise. */
1155 fold_const_aggregate_ref (tree t)
1157 prop_value_t *value;
1158 tree base, ctor, idx, field;
1159 unsigned HOST_WIDE_INT cnt;
1162 switch (TREE_CODE (t))
1165 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1166 DECL_INITIAL. If BASE is a nested reference into another
1167 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1168 the inner reference. */
1169 base = TREE_OPERAND (t, 0);
1170 switch (TREE_CODE (base))
1173 if (!TREE_READONLY (base)
1174 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1175 || !targetm.binds_local_p (base))
1178 ctor = DECL_INITIAL (base);
1183 ctor = fold_const_aggregate_ref (base);
1195 if (ctor == NULL_TREE
1196 || (TREE_CODE (ctor) != CONSTRUCTOR
1197 && TREE_CODE (ctor) != STRING_CST)
1198 || !TREE_STATIC (ctor))
1201 /* Get the index. If we have an SSA_NAME, try to resolve it
1202 with the current lattice value for the SSA_NAME. */
1203 idx = TREE_OPERAND (t, 1);
1204 switch (TREE_CODE (idx))
1207 if ((value = get_value (idx))
1208 && value->lattice_val == CONSTANT
1209 && TREE_CODE (value->value) == INTEGER_CST)
1222 /* Fold read from constant string. */
1223 if (TREE_CODE (ctor) == STRING_CST)
1225 if ((TYPE_MODE (TREE_TYPE (t))
1226 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1227 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1229 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1230 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1231 return build_int_cst_type (TREE_TYPE (t),
1232 (TREE_STRING_POINTER (ctor)
1233 [TREE_INT_CST_LOW (idx)]));
1237 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1238 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1239 if (tree_int_cst_equal (cfield, idx))
1241 STRIP_USELESS_TYPE_CONVERSION (cval);
1247 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1248 DECL_INITIAL. If BASE is a nested reference into another
1249 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1250 the inner reference. */
1251 base = TREE_OPERAND (t, 0);
1252 switch (TREE_CODE (base))
1255 if (!TREE_READONLY (base)
1256 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1257 || !targetm.binds_local_p (base))
1260 ctor = DECL_INITIAL (base);
1265 ctor = fold_const_aggregate_ref (base);
1272 if (ctor == NULL_TREE
1273 || TREE_CODE (ctor) != CONSTRUCTOR
1274 || !TREE_STATIC (ctor))
1277 field = TREE_OPERAND (t, 1);
1279 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1281 /* FIXME: Handle bit-fields. */
1282 && ! DECL_BIT_FIELD (cfield))
1284 STRIP_USELESS_TYPE_CONVERSION (cval);
1292 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1293 if (c && TREE_CODE (c) == COMPLEX_CST)
1294 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1300 tree base = TREE_OPERAND (t, 0);
1301 if (TREE_CODE (base) == SSA_NAME
1302 && (value = get_value (base))
1303 && value->lattice_val == CONSTANT
1304 && TREE_CODE (value->value) == ADDR_EXPR)
1305 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1316 /* Evaluate statement STMT.
1317 Valid only for assignments, calls, conditionals, and switches. */
1320 evaluate_stmt (gimple stmt)
1323 tree simplified = NULL_TREE;
1324 ccp_lattice_t likelyvalue = likely_value (stmt);
1327 val.mem_ref = NULL_TREE;
1329 fold_defer_overflow_warnings ();
1331 /* If the statement is likely to have a CONSTANT result, then try
1332 to fold the statement to determine the constant value. */
1333 /* FIXME. This is the only place that we call ccp_fold.
1334 Since likely_value never returns CONSTANT for calls, we will
1335 not attempt to fold them, including builtins that may profit. */
1336 if (likelyvalue == CONSTANT)
1337 simplified = ccp_fold (stmt);
1338 /* If the statement is likely to have a VARYING result, then do not
1339 bother folding the statement. */
1340 else if (likelyvalue == VARYING)
1342 enum tree_code code = gimple_code (stmt);
1343 if (code == GIMPLE_ASSIGN)
1345 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1347 /* Other cases cannot satisfy is_gimple_min_invariant
1349 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1350 simplified = gimple_assign_rhs1 (stmt);
1352 else if (code == GIMPLE_SWITCH)
1353 simplified = gimple_switch_index (stmt);
1355 /* These cannot satisfy is_gimple_min_invariant without folding. */
1356 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1359 is_constant = simplified && is_gimple_min_invariant (simplified);
1361 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1363 if (dump_file && (dump_flags & TDF_DETAILS))
1365 fprintf (dump_file, "which is likely ");
1366 switch (likelyvalue)
1369 fprintf (dump_file, "CONSTANT");
1372 fprintf (dump_file, "UNDEFINED");
1375 fprintf (dump_file, "VARYING");
1379 fprintf (dump_file, "\n");
1384 /* The statement produced a constant value. */
1385 val.lattice_val = CONSTANT;
1386 val.value = simplified;
1390 /* The statement produced a nonconstant value. If the statement
1391 had UNDEFINED operands, then the result of the statement
1392 should be UNDEFINED. Otherwise, the statement is VARYING. */
1393 if (likelyvalue == UNDEFINED)
1394 val.lattice_val = likelyvalue;
1396 val.lattice_val = VARYING;
1398 val.value = NULL_TREE;
1404 /* Visit the assignment statement STMT. Set the value of its LHS to the
1405 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1406 creates virtual definitions, set the value of each new name to that
1407 of the RHS (if we can derive a constant out of the RHS).
1408 Value-returning call statements also perform an assignment, and
1409 are handled here. */
1411 static enum ssa_prop_result
1412 visit_assignment (gimple stmt, tree *output_p)
1415 enum ssa_prop_result retval;
1417 tree lhs = gimple_get_lhs (stmt);
1419 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1420 || gimple_call_lhs (stmt) != NULL_TREE);
1422 if (gimple_assign_copy_p (stmt))
1424 tree rhs = gimple_assign_rhs1 (stmt);
1426 if (TREE_CODE (rhs) == SSA_NAME)
1428 /* For a simple copy operation, we copy the lattice values. */
1429 prop_value_t *nval = get_value (rhs);
1432 else if (do_store_ccp && stmt_makes_single_load (stmt))
1434 /* Same as above, but the RHS is not a gimple register and yet
1435 has a known VUSE. If STMT is loading from the same memory
1436 location that created the SSA_NAMEs for the virtual operands,
1437 we can propagate the value on the RHS. */
1438 prop_value_t *nval = get_value_loaded_by (stmt, const_val);
1442 && operand_equal_p (nval->mem_ref, rhs, 0))
1445 val = evaluate_stmt (stmt);
1448 val = evaluate_stmt (stmt);
1451 /* Evaluate the statement, which could be
1452 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1453 val = evaluate_stmt (stmt);
1455 retval = SSA_PROP_NOT_INTERESTING;
1457 /* Set the lattice value of the statement's output. */
1458 if (TREE_CODE (lhs) == SSA_NAME)
1460 /* If STMT is an assignment to an SSA_NAME, we only have one
1462 if (set_lattice_value (lhs, val))
1465 if (val.lattice_val == VARYING)
1466 retval = SSA_PROP_VARYING;
1468 retval = SSA_PROP_INTERESTING;
1471 else if (do_store_ccp && stmt_makes_single_store (stmt))
1473 /* Otherwise, set the names in VDEF operands to the new
1474 constant value and mark the LHS as the memory reference
1475 associated with VAL. */
1480 /* Mark VAL as stored in the LHS of this assignment. */
1481 if (val.lattice_val == CONSTANT)
1484 /* Set the value of every VDEF to VAL. */
1486 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS)
1488 /* See PR 29801. We may have VDEFs for read-only variables
1489 (see the handling of unmodifiable variables in
1490 add_virtual_operand); do not attempt to change their value. */
1491 if (get_symbol_constant_value (SSA_NAME_VAR (vdef)) != NULL_TREE)
1494 changed |= set_lattice_value (vdef, val);
1497 /* Note that for propagation purposes, we are only interested in
1498 visiting statements that load the exact same memory reference
1499 stored here. Those statements will have the exact same list
1500 of virtual uses, so it is enough to set the output of this
1501 statement to be its first virtual definition. */
1502 *output_p = first_vdef (stmt);
1505 if (val.lattice_val == VARYING)
1506 retval = SSA_PROP_VARYING;
1508 retval = SSA_PROP_INTERESTING;
1516 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1517 if it can determine which edge will be taken. Otherwise, return
1518 SSA_PROP_VARYING. */
1520 static enum ssa_prop_result
1521 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1526 block = gimple_bb (stmt);
1527 val = evaluate_stmt (stmt);
1529 /* Find which edge out of the conditional block will be taken and add it
1530 to the worklist. If no single edge can be determined statically,
1531 return SSA_PROP_VARYING to feed all the outgoing edges to the
1532 propagation engine. */
1533 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1535 return SSA_PROP_INTERESTING;
1537 return SSA_PROP_VARYING;
1541 /* Evaluate statement STMT. If the statement produces an output value and
1542 its evaluation changes the lattice value of its output, return
1543 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1546 If STMT is a conditional branch and we can determine its truth
1547 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1548 value, return SSA_PROP_VARYING. */
1550 static enum ssa_prop_result
1551 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1556 if (dump_file && (dump_flags & TDF_DETAILS))
1558 fprintf (dump_file, "\nVisiting statement:\n");
1559 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1562 switch (gimple_code (stmt))
1565 /* If the statement is an assignment that produces a single
1566 output value, evaluate its RHS to see if the lattice value of
1567 its output has changed. */
1568 return visit_assignment (stmt, output_p);
1571 /* A value-returning call also performs an assignment. */
1572 if (gimple_call_lhs (stmt) != NULL_TREE)
1573 return visit_assignment (stmt, output_p);
1578 /* If STMT is a conditional branch, see if we can determine
1579 which branch will be taken. */
1580 /* FIXME. It appears that we should be able to optimize
1581 computed GOTOs here as well. */
1582 return visit_cond_stmt (stmt, taken_edge_p);
1588 /* Any other kind of statement is not interesting for constant
1589 propagation and, therefore, not worth simulating. */
1590 if (dump_file && (dump_flags & TDF_DETAILS))
1591 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1593 /* Definitions made by statements other than assignments to
1594 SSA_NAMEs represent unknown modifications to their outputs.
1595 Mark them VARYING. */
1596 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1598 prop_value_t v = { VARYING, NULL_TREE, NULL_TREE };
1599 set_lattice_value (def, v);
1602 return SSA_PROP_VARYING;
1606 /* Main entry point for SSA Conditional Constant Propagation. */
1609 execute_ssa_ccp (bool store_ccp)
1611 do_store_ccp = store_ccp;
1613 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1614 if (ccp_finalize ())
1615 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1624 return execute_ssa_ccp (false);
1631 return flag_tree_ccp != 0;
1635 struct gimple_opt_pass pass_ccp =
1640 gate_ccp, /* gate */
1641 do_ssa_ccp, /* execute */
1644 0, /* static_pass_number */
1645 TV_TREE_CCP, /* tv_id */
1646 PROP_cfg | PROP_ssa, /* properties_required */
1647 0, /* properties_provided */
1648 0, /* properties_destroyed */
1649 0, /* todo_flags_start */
1650 TODO_dump_func | TODO_verify_ssa
1651 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1657 do_ssa_store_ccp (void)
1659 /* If STORE-CCP is not enabled, we just run regular CCP. */
1660 return execute_ssa_ccp (flag_tree_store_ccp != 0);
1664 gate_store_ccp (void)
1666 /* STORE-CCP is enabled only with -ftree-store-ccp, but when
1667 -fno-tree-store-ccp is specified, we should run regular CCP.
1668 That's why the pass is enabled with either flag. */
1669 return flag_tree_store_ccp != 0 || flag_tree_ccp != 0;
1673 struct gimple_opt_pass pass_store_ccp =
1677 "store_ccp", /* name */
1678 gate_store_ccp, /* gate */
1679 do_ssa_store_ccp, /* execute */
1682 0, /* static_pass_number */
1683 TV_TREE_STORE_CCP, /* tv_id */
1684 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1685 0, /* properties_provided */
1686 0, /* properties_destroyed */
1687 0, /* todo_flags_start */
1688 TODO_dump_func | TODO_verify_ssa
1689 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1693 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1694 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1695 is the desired result type. */
1698 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1699 bool allow_negative_idx)
1701 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1702 tree array_type, elt_type, elt_size;
1705 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1706 measured in units of the size of elements type) from that ARRAY_REF).
1707 We can't do anything if either is variable.
1709 The case we handle here is *(&A[N]+O). */
1710 if (TREE_CODE (base) == ARRAY_REF)
1712 tree low_bound = array_ref_low_bound (base);
1714 elt_offset = TREE_OPERAND (base, 1);
1715 if (TREE_CODE (low_bound) != INTEGER_CST
1716 || TREE_CODE (elt_offset) != INTEGER_CST)
1719 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1720 base = TREE_OPERAND (base, 0);
1723 /* Ignore stupid user tricks of indexing non-array variables. */
1724 array_type = TREE_TYPE (base);
1725 if (TREE_CODE (array_type) != ARRAY_TYPE)
1727 elt_type = TREE_TYPE (array_type);
1728 if (!useless_type_conversion_p (orig_type, elt_type))
1731 /* Use signed size type for intermediate computation on the index. */
1732 idx_type = signed_type_for (size_type_node);
1734 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1735 element type (so we can use the alignment if it's not constant).
1736 Otherwise, compute the offset as an index by using a division. If the
1737 division isn't exact, then don't do anything. */
1738 elt_size = TYPE_SIZE_UNIT (elt_type);
1741 if (integer_zerop (offset))
1743 if (TREE_CODE (elt_size) != INTEGER_CST)
1744 elt_size = size_int (TYPE_ALIGN (elt_type));
1746 idx = build_int_cst (idx_type, 0);
1750 unsigned HOST_WIDE_INT lquo, lrem;
1751 HOST_WIDE_INT hquo, hrem;
1754 /* The final array offset should be signed, so we need
1755 to sign-extend the (possibly pointer) offset here
1756 and use signed division. */
1757 soffset = double_int_sext (tree_to_double_int (offset),
1758 TYPE_PRECISION (TREE_TYPE (offset)));
1759 if (TREE_CODE (elt_size) != INTEGER_CST
1760 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1761 soffset.low, soffset.high,
1762 TREE_INT_CST_LOW (elt_size),
1763 TREE_INT_CST_HIGH (elt_size),
1764 &lquo, &hquo, &lrem, &hrem)
1768 idx = build_int_cst_wide (idx_type, lquo, hquo);
1771 /* Assume the low bound is zero. If there is a domain type, get the
1772 low bound, if any, convert the index into that type, and add the
1774 min_idx = build_int_cst (idx_type, 0);
1775 domain_type = TYPE_DOMAIN (array_type);
1778 idx_type = domain_type;
1779 if (TYPE_MIN_VALUE (idx_type))
1780 min_idx = TYPE_MIN_VALUE (idx_type);
1782 min_idx = fold_convert (idx_type, min_idx);
1784 if (TREE_CODE (min_idx) != INTEGER_CST)
1787 elt_offset = fold_convert (idx_type, elt_offset);
1790 if (!integer_zerop (min_idx))
1791 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1792 if (!integer_zerop (elt_offset))
1793 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1795 /* Make sure to possibly truncate late after offsetting. */
1796 idx = fold_convert (idx_type, idx);
1798 /* We don't want to construct access past array bounds. For example
1801 should not be simplified into (*c)[14] or tree-vrp will
1802 give false warnings. The same is true for
1803 struct A { long x; char d[0]; } *a;
1805 which should be not folded to &a->d[-8]. */
1807 && TYPE_MAX_VALUE (domain_type)
1808 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1810 tree up_bound = TYPE_MAX_VALUE (domain_type);
1812 if (tree_int_cst_lt (up_bound, idx)
1813 /* Accesses after the end of arrays of size 0 (gcc
1814 extension) and 1 are likely intentional ("struct
1816 && compare_tree_int (up_bound, 1) > 0)
1820 && TYPE_MIN_VALUE (domain_type))
1822 if (!allow_negative_idx
1823 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1824 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1827 else if (!allow_negative_idx
1828 && compare_tree_int (idx, 0) < 0)
1831 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1835 /* Attempt to fold *(S+O) to S.X.
1836 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1837 is the desired result type. */
1840 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1841 tree orig_type, bool base_is_ptr)
1843 tree f, t, field_type, tail_array_field, field_offset;
1847 if (TREE_CODE (record_type) != RECORD_TYPE
1848 && TREE_CODE (record_type) != UNION_TYPE
1849 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1852 /* Short-circuit silly cases. */
1853 if (useless_type_conversion_p (record_type, orig_type))
1856 tail_array_field = NULL_TREE;
1857 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1861 if (TREE_CODE (f) != FIELD_DECL)
1863 if (DECL_BIT_FIELD (f))
1866 if (!DECL_FIELD_OFFSET (f))
1868 field_offset = byte_position (f);
1869 if (TREE_CODE (field_offset) != INTEGER_CST)
1872 /* ??? Java creates "interesting" fields for representing base classes.
1873 They have no name, and have no context. With no context, we get into
1874 trouble with nonoverlapping_component_refs_p. Skip them. */
1875 if (!DECL_FIELD_CONTEXT (f))
1878 /* The previous array field isn't at the end. */
1879 tail_array_field = NULL_TREE;
1881 /* Check to see if this offset overlaps with the field. */
1882 cmp = tree_int_cst_compare (field_offset, offset);
1886 field_type = TREE_TYPE (f);
1888 /* Here we exactly match the offset being checked. If the types match,
1889 then we can return that field. */
1891 && useless_type_conversion_p (orig_type, field_type))
1894 base = build1 (INDIRECT_REF, record_type, base);
1895 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1899 /* Don't care about offsets into the middle of scalars. */
1900 if (!AGGREGATE_TYPE_P (field_type))
1903 /* Check for array at the end of the struct. This is often
1904 used as for flexible array members. We should be able to
1905 turn this into an array access anyway. */
1906 if (TREE_CODE (field_type) == ARRAY_TYPE)
1907 tail_array_field = f;
1909 /* Check the end of the field against the offset. */
1910 if (!DECL_SIZE_UNIT (f)
1911 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1913 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1914 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1917 /* If we matched, then set offset to the displacement into
1920 new_base = build1 (INDIRECT_REF, record_type, base);
1923 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1925 /* Recurse to possibly find the match. */
1926 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1927 f == TYPE_FIELDS (record_type));
1930 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1936 if (!tail_array_field)
1939 f = tail_array_field;
1940 field_type = TREE_TYPE (f);
1941 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1943 /* If we get here, we've got an aggregate field, and a possibly
1944 nonzero offset into them. Recurse and hope for a valid match. */
1946 base = build1 (INDIRECT_REF, record_type, base);
1947 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1949 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1950 f == TYPE_FIELDS (record_type));
1953 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1957 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1958 or BASE[index] or by combination of those.
1960 Before attempting the conversion strip off existing ADDR_EXPRs and
1961 handled component refs. */
1964 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1968 bool base_is_ptr = true;
1971 if (TREE_CODE (base) == ADDR_EXPR)
1973 base_is_ptr = false;
1975 base = TREE_OPERAND (base, 0);
1977 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1978 so it needs to be removed and new COMPONENT_REF constructed.
1979 The wrong COMPONENT_REF are often constructed by folding the
1980 (type *)&object within the expression (type *)&object+offset */
1981 if (handled_component_p (base))
1983 HOST_WIDE_INT sub_offset, size, maxsize;
1985 newbase = get_ref_base_and_extent (base, &sub_offset,
1987 gcc_assert (newbase);
1990 && !(sub_offset & (BITS_PER_UNIT - 1)))
1994 offset = int_const_binop (PLUS_EXPR, offset,
1995 build_int_cst (TREE_TYPE (offset),
1996 sub_offset / BITS_PER_UNIT), 1);
1999 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
2000 && integer_zerop (offset))
2002 type = TREE_TYPE (base);
2007 if (!POINTER_TYPE_P (TREE_TYPE (base)))
2009 type = TREE_TYPE (TREE_TYPE (base));
2011 ret = maybe_fold_offset_to_component_ref (type, base, offset,
2012 orig_type, base_is_ptr);
2016 base = build1 (INDIRECT_REF, type, base);
2017 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
2022 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
2023 or &BASE[index] or by combination of those.
2025 Before attempting the conversion strip off existing component refs. */
2028 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
2032 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
2033 && POINTER_TYPE_P (orig_type));
2035 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
2041 /* For __builtin_object_size to function correctly we need to
2042 make sure not to fold address arithmetic so that we change
2043 reference from one array to another. This would happen for
2046 struct X { char s1[10]; char s2[10] } s;
2047 char *foo (void) { return &s.s2[-4]; }
2049 where we need to avoid generating &s.s1[6]. As the C and
2050 C++ frontends create different initial trees
2051 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
2052 sophisticated comparisons here. Note that checking for the
2053 condition after the fact is easier than trying to avoid doing
2056 if (TREE_CODE (orig) == ADDR_EXPR)
2057 orig = TREE_OPERAND (orig, 0);
2058 if ((TREE_CODE (orig) == ARRAY_REF
2059 || (TREE_CODE (orig) == COMPONENT_REF
2060 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
2061 && (TREE_CODE (t) == ARRAY_REF
2062 || (TREE_CODE (t) == COMPONENT_REF
2063 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1))) == ARRAY_TYPE))
2064 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
2065 ? TREE_OPERAND (orig, 0) : orig,
2066 TREE_CODE (t) == ARRAY_REF
2067 ? TREE_OPERAND (t, 0) : t, 0))
2070 ptr_type = build_pointer_type (TREE_TYPE (t));
2071 if (!useless_type_conversion_p (orig_type, ptr_type))
2073 return build_fold_addr_expr_with_type (t, ptr_type);
2079 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
2080 Return the simplified expression, or NULL if nothing could be done. */
2083 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
2086 bool volatile_p = TREE_THIS_VOLATILE (expr);
2088 /* We may well have constructed a double-nested PLUS_EXPR via multiple
2089 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
2090 are sometimes added. */
2092 STRIP_TYPE_NOPS (base);
2093 TREE_OPERAND (expr, 0) = base;
2095 /* One possibility is that the address reduces to a string constant. */
2096 t = fold_read_from_constant_string (expr);
2100 /* Add in any offset from a POINTER_PLUS_EXPR. */
2101 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
2105 offset2 = TREE_OPERAND (base, 1);
2106 if (TREE_CODE (offset2) != INTEGER_CST)
2108 base = TREE_OPERAND (base, 0);
2110 offset = fold_convert (sizetype,
2111 int_const_binop (PLUS_EXPR, offset, offset2, 1));
2114 if (TREE_CODE (base) == ADDR_EXPR)
2116 tree base_addr = base;
2118 /* Strip the ADDR_EXPR. */
2119 base = TREE_OPERAND (base, 0);
2121 /* Fold away CONST_DECL to its value, if the type is scalar. */
2122 if (TREE_CODE (base) == CONST_DECL
2123 && is_gimple_min_invariant (DECL_INITIAL (base)))
2124 return DECL_INITIAL (base);
2126 /* Try folding *(&B+O) to B.X. */
2127 t = maybe_fold_offset_to_reference (base_addr, offset,
2131 /* Preserve volatileness of the original expression.
2132 We can end up with a plain decl here which is shared
2133 and we shouldn't mess with its flags. */
2135 TREE_THIS_VOLATILE (t) = volatile_p;
2141 /* We can get here for out-of-range string constant accesses,
2142 such as "_"[3]. Bail out of the entire substitution search
2143 and arrange for the entire statement to be replaced by a
2144 call to __builtin_trap. In all likelihood this will all be
2145 constant-folded away, but in the meantime we can't leave with
2146 something that get_expr_operands can't understand. */
2150 if (TREE_CODE (t) == ADDR_EXPR
2151 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2153 /* FIXME: Except that this causes problems elsewhere with dead
2154 code not being deleted, and we die in the rtl expanders
2155 because we failed to remove some ssa_name. In the meantime,
2156 just return zero. */
2157 /* FIXME2: This condition should be signaled by
2158 fold_read_from_constant_string directly, rather than
2159 re-checking for it here. */
2160 return integer_zero_node;
2163 /* Try folding *(B+O) to B->X. Still an improvement. */
2164 if (POINTER_TYPE_P (TREE_TYPE (base)))
2166 t = maybe_fold_offset_to_reference (base, offset,
2173 /* Otherwise we had an offset that we could not simplify. */
2178 /* A quaint feature extant in our address arithmetic is that there
2179 can be hidden type changes here. The type of the result need
2180 not be the same as the type of the input pointer.
2182 What we're after here is an expression of the form
2183 (T *)(&array + const)
2184 where array is OP0, const is OP1, RES_TYPE is T and
2185 the cast doesn't actually exist, but is implicit in the
2186 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2188 which may be able to propagate further. */
2191 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2196 /* It had better be a constant. */
2197 if (TREE_CODE (op1) != INTEGER_CST)
2199 /* The first operand should be an ADDR_EXPR. */
2200 if (TREE_CODE (op0) != ADDR_EXPR)
2202 op0 = TREE_OPERAND (op0, 0);
2204 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2205 the offset into it. */
2206 while (TREE_CODE (op0) == ARRAY_REF)
2208 tree array_obj = TREE_OPERAND (op0, 0);
2209 tree array_idx = TREE_OPERAND (op0, 1);
2210 tree elt_type = TREE_TYPE (op0);
2211 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2214 if (TREE_CODE (array_idx) != INTEGER_CST)
2216 if (TREE_CODE (elt_size) != INTEGER_CST)
2219 /* Un-bias the index by the min index of the array type. */
2220 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2223 min_idx = TYPE_MIN_VALUE (min_idx);
2226 if (TREE_CODE (min_idx) != INTEGER_CST)
2229 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2230 if (!integer_zerop (min_idx))
2231 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2236 /* Convert the index to a byte offset. */
2237 array_idx = fold_convert (sizetype, array_idx);
2238 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2240 /* Update the operands for the next round, or for folding. */
2241 op1 = int_const_binop (PLUS_EXPR,
2246 ptd_type = TREE_TYPE (res_type);
2247 /* If we want a pointer to void, reconstruct the reference from the
2248 array element type. A pointer to that can be trivially converted
2249 to void *. This happens as we fold (void *)(ptr p+ off). */
2250 if (VOID_TYPE_P (ptd_type)
2251 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2252 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2254 /* At which point we can try some of the same things as for indirects. */
2255 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2257 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2260 t = build1 (ADDR_EXPR, res_type, t);
2265 /* For passing state through walk_tree into fold_stmt_r and its
2268 struct fold_stmt_r_data
2272 bool *inside_addr_expr_p;
2275 /* Subroutine of fold_stmt called via walk_tree. We perform several
2276 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2279 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2281 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2282 struct fold_stmt_r_data *fold_stmt_r_data;
2283 bool *inside_addr_expr_p;
2285 tree expr = *expr_p, t;
2286 bool volatile_p = TREE_THIS_VOLATILE (expr);
2288 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2289 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2290 changed_p = fold_stmt_r_data->changed_p;
2292 /* ??? It'd be nice if walk_tree had a pre-order option. */
2293 switch (TREE_CODE (expr))
2296 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2301 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2304 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2305 /* If we had a good reason for propagating the address here,
2306 make sure we end up with valid gimple. See PR34989. */
2307 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2311 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2316 if (POINTER_TYPE_P (TREE_TYPE (expr))
2317 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
2318 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2319 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2321 TREE_TYPE (TREE_TYPE (expr)))))
2325 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2326 We'd only want to bother decomposing an existing ARRAY_REF if
2327 the base array is found to have another offset contained within.
2328 Otherwise we'd be wasting time. */
2330 /* If we are not processing expressions found within an
2331 ADDR_EXPR, then we can fold constant array references. */
2332 if (!*inside_addr_expr_p)
2333 t = fold_read_from_constant_string (expr);
2339 *inside_addr_expr_p = true;
2340 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2341 *inside_addr_expr_p = false;
2346 /* Make sure the value is properly considered constant, and so gets
2347 propagated as expected. */
2349 recompute_tree_invariant_for_addr_expr (expr);
2353 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2358 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2359 We've already checked that the records are compatible, so we should
2360 come up with a set of compatible fields. */
2362 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2363 tree expr_field = TREE_OPERAND (expr, 1);
2365 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2367 expr_field = find_compatible_field (expr_record, expr_field);
2368 TREE_OPERAND (expr, 1) = expr_field;
2373 case TARGET_MEM_REF:
2374 t = maybe_fold_tmr (expr);
2377 case POINTER_PLUS_EXPR:
2378 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2381 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2386 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2387 TREE_OPERAND (expr, 0),
2388 TREE_OPERAND (expr, 1));
2392 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2394 tree op0 = TREE_OPERAND (expr, 0);
2398 fold_defer_overflow_warnings ();
2399 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2400 TREE_OPERAND (op0, 0),
2401 TREE_OPERAND (op0, 1));
2402 /* This is actually a conditional expression, not a GIMPLE
2403 conditional statement, however, the valid_gimple_rhs_p
2404 test still applies. */
2405 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
2406 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2409 COND_EXPR_COND (expr) = tem;
2422 /* Preserve volatileness of the original expression.
2423 We can end up with a plain decl here which is shared
2424 and we shouldn't mess with its flags. */
2426 TREE_THIS_VOLATILE (t) = volatile_p;
2434 /* Return the string length, maximum string length or maximum value of
2436 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2437 is not NULL and, for TYPE == 0, its value is not equal to the length
2438 we determine or if we are unable to determine the length or value,
2439 return false. VISITED is a bitmap of visited variables.
2440 TYPE is 0 if string length should be returned, 1 for maximum string
2441 length and 2 for maximum value ARG can have. */
2444 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2449 if (TREE_CODE (arg) != SSA_NAME)
2451 if (TREE_CODE (arg) == COND_EXPR)
2452 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2453 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2454 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2455 else if (TREE_CODE (arg) == ADDR_EXPR
2456 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2457 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2459 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2460 if (TREE_CODE (aop0) == INDIRECT_REF
2461 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2462 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2463 length, visited, type);
2469 if (TREE_CODE (val) != INTEGER_CST
2470 || tree_int_cst_sgn (val) < 0)
2474 val = c_strlen (arg, 1);
2482 if (TREE_CODE (*length) != INTEGER_CST
2483 || TREE_CODE (val) != INTEGER_CST)
2486 if (tree_int_cst_lt (*length, val))
2490 else if (simple_cst_equal (val, *length) != 1)
2498 /* If we were already here, break the infinite cycle. */
2499 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2501 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2504 def_stmt = SSA_NAME_DEF_STMT (var);
2506 switch (gimple_code (def_stmt))
2509 /* The RHS of the statement defining VAR must either have a
2510 constant length or come from another SSA_NAME with a constant
2512 if (gimple_assign_single_p (def_stmt)
2513 || gimple_assign_unary_nop_p (def_stmt))
2515 tree rhs = gimple_assign_rhs1 (def_stmt);
2516 return get_maxval_strlen (rhs, length, visited, type);
2522 /* All the arguments of the PHI node must have the same constant
2526 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2528 tree arg = gimple_phi_arg (def_stmt, i)->def;
2530 /* If this PHI has itself as an argument, we cannot
2531 determine the string length of this argument. However,
2532 if we can find a constant string length for the other
2533 PHI args then we can still be sure that this is a
2534 constant string length. So be optimistic and just
2535 continue with the next argument. */
2536 if (arg == gimple_phi_result (def_stmt))
2539 if (!get_maxval_strlen (arg, length, visited, type))
2551 /* Fold builtin call in statement STMT. Returns a simplified tree.
2552 We may return a non-constant expression, including another call
2553 to a different function and with different arguments, e.g.,
2554 substituting memcpy for strcpy when the string length is known.
2555 Note that some builtins expand into inline code that may not
2556 be valid in GIMPLE. Callers must take care. */
2559 ccp_fold_builtin (gimple stmt)
2561 tree result, val[3];
2563 int arg_mask, i, type;
2568 gcc_assert (is_gimple_call (stmt));
2570 ignore = (gimple_call_lhs (stmt) == NULL);
2572 /* First try the generic builtin folder. If that succeeds, return the
2574 result = fold_call_stmt (stmt, ignore);
2578 STRIP_NOPS (result);
2582 /* Ignore MD builtins. */
2583 callee = gimple_call_fndecl (stmt);
2584 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2587 /* If the builtin could not be folded, and it has no argument list,
2589 nargs = gimple_call_num_args (stmt);
2593 /* Limit the work only for builtins we know how to simplify. */
2594 switch (DECL_FUNCTION_CODE (callee))
2596 case BUILT_IN_STRLEN:
2597 case BUILT_IN_FPUTS:
2598 case BUILT_IN_FPUTS_UNLOCKED:
2602 case BUILT_IN_STRCPY:
2603 case BUILT_IN_STRNCPY:
2607 case BUILT_IN_MEMCPY_CHK:
2608 case BUILT_IN_MEMPCPY_CHK:
2609 case BUILT_IN_MEMMOVE_CHK:
2610 case BUILT_IN_MEMSET_CHK:
2611 case BUILT_IN_STRNCPY_CHK:
2615 case BUILT_IN_STRCPY_CHK:
2616 case BUILT_IN_STPCPY_CHK:
2620 case BUILT_IN_SNPRINTF_CHK:
2621 case BUILT_IN_VSNPRINTF_CHK:
2629 /* Try to use the dataflow information gathered by the CCP process. */
2630 visited = BITMAP_ALLOC (NULL);
2632 memset (val, 0, sizeof (val));
2633 for (i = 0; i < nargs; i++)
2635 if ((arg_mask >> i) & 1)
2637 a = gimple_call_arg (stmt, i);
2638 bitmap_clear (visited);
2639 if (!get_maxval_strlen (a, &val[i], visited, type))
2644 BITMAP_FREE (visited);
2647 switch (DECL_FUNCTION_CODE (callee))
2649 case BUILT_IN_STRLEN:
2653 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2655 /* If the result is not a valid gimple value, or not a cast
2656 of a valid gimple value, then we can not use the result. */
2657 if (is_gimple_val (new_val)
2658 || (is_gimple_cast (new_val)
2659 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2664 case BUILT_IN_STRCPY:
2665 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2666 result = fold_builtin_strcpy (callee,
2667 gimple_call_arg (stmt, 0),
2668 gimple_call_arg (stmt, 1),
2672 case BUILT_IN_STRNCPY:
2673 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2674 result = fold_builtin_strncpy (callee,
2675 gimple_call_arg (stmt, 0),
2676 gimple_call_arg (stmt, 1),
2677 gimple_call_arg (stmt, 2),
2681 case BUILT_IN_FPUTS:
2682 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2683 gimple_call_arg (stmt, 1),
2684 ignore, false, val[0]);
2687 case BUILT_IN_FPUTS_UNLOCKED:
2688 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2689 gimple_call_arg (stmt, 1),
2690 ignore, true, val[0]);
2693 case BUILT_IN_MEMCPY_CHK:
2694 case BUILT_IN_MEMPCPY_CHK:
2695 case BUILT_IN_MEMMOVE_CHK:
2696 case BUILT_IN_MEMSET_CHK:
2697 if (val[2] && is_gimple_val (val[2]))
2698 result = fold_builtin_memory_chk (callee,
2699 gimple_call_arg (stmt, 0),
2700 gimple_call_arg (stmt, 1),
2701 gimple_call_arg (stmt, 2),
2702 gimple_call_arg (stmt, 3),
2704 DECL_FUNCTION_CODE (callee));
2707 case BUILT_IN_STRCPY_CHK:
2708 case BUILT_IN_STPCPY_CHK:
2709 if (val[1] && is_gimple_val (val[1]))
2710 result = fold_builtin_stxcpy_chk (callee,
2711 gimple_call_arg (stmt, 0),
2712 gimple_call_arg (stmt, 1),
2713 gimple_call_arg (stmt, 2),
2715 DECL_FUNCTION_CODE (callee));
2718 case BUILT_IN_STRNCPY_CHK:
2719 if (val[2] && is_gimple_val (val[2]))
2720 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2721 gimple_call_arg (stmt, 1),
2722 gimple_call_arg (stmt, 2),
2723 gimple_call_arg (stmt, 3),
2727 case BUILT_IN_SNPRINTF_CHK:
2728 case BUILT_IN_VSNPRINTF_CHK:
2729 if (val[1] && is_gimple_val (val[1]))
2730 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2731 DECL_FUNCTION_CODE (callee));
2738 if (result && ignore)
2739 result = fold_ignored_result (result);
2743 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2744 replacement rhs for the statement or NULL_TREE if no simplification
2745 could be made. It is assumed that the operands have been previously
2749 fold_gimple_assign (gimple_stmt_iterator *si)
2751 gimple stmt = gsi_stmt (*si);
2752 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2756 switch (get_gimple_rhs_class (subcode))
2758 case GIMPLE_SINGLE_RHS:
2760 tree rhs = gimple_assign_rhs1 (stmt);
2762 /* Try to fold a conditional expression. */
2763 if (TREE_CODE (rhs) == COND_EXPR)
2765 tree temp = fold (COND_EXPR_COND (rhs));
2766 if (temp != COND_EXPR_COND (rhs))
2767 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2768 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2771 /* If we couldn't fold the RHS, hand over to the generic
2773 if (result == NULL_TREE)
2774 result = fold (rhs);
2776 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2777 that may have been added by fold, and "useless" type
2778 conversions that might now be apparent due to propagation. */
2779 STRIP_USELESS_TYPE_CONVERSION (result);
2781 if (result != rhs && valid_gimple_rhs_p (result))
2784 /* It is possible that fold_stmt_r simplified the RHS.
2785 Make sure that the subcode of this statement still
2786 reflects the principal operator of the rhs operand. */
2791 case GIMPLE_UNARY_RHS:
2792 result = fold_unary (subcode,
2793 gimple_expr_type (stmt),
2794 gimple_assign_rhs1 (stmt));
2798 STRIP_USELESS_TYPE_CONVERSION (result);
2799 if (valid_gimple_rhs_p (result))
2802 else if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
2803 && POINTER_TYPE_P (gimple_expr_type (stmt))
2804 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2806 tree type = gimple_expr_type (stmt);
2807 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2808 integer_zero_node, type);
2814 case GIMPLE_BINARY_RHS:
2815 /* Try to fold pointer addition. */
2816 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2817 result = maybe_fold_stmt_addition (
2818 TREE_TYPE (gimple_assign_lhs (stmt)),
2819 gimple_assign_rhs1 (stmt),
2820 gimple_assign_rhs2 (stmt));
2823 result = fold_binary (subcode,
2824 TREE_TYPE (gimple_assign_lhs (stmt)),
2825 gimple_assign_rhs1 (stmt),
2826 gimple_assign_rhs2 (stmt));
2830 STRIP_USELESS_TYPE_CONVERSION (result);
2831 if (valid_gimple_rhs_p (result))
2836 case GIMPLE_INVALID_RHS:
2843 /* Attempt to fold a conditional statement. Return true if any changes were
2844 made. We only attempt to fold the condition expression, and do not perform
2845 any transformation that would require alteration of the cfg. It is
2846 assumed that the operands have been previously folded. */
2849 fold_gimple_cond (gimple stmt)
2851 tree result = fold_binary (gimple_cond_code (stmt),
2853 gimple_cond_lhs (stmt),
2854 gimple_cond_rhs (stmt));
2858 STRIP_USELESS_TYPE_CONVERSION (result);
2859 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2861 gimple_cond_set_condition_from_tree (stmt, result);
2870 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2871 The statement may be replaced by another statement, e.g., if the call
2872 simplifies to a constant value. Return true if any changes were made.
2873 It is assumed that the operands have been previously folded. */
2876 fold_gimple_call (gimple_stmt_iterator *gsi)
2878 gimple stmt = gsi_stmt (*gsi);
2880 tree callee = gimple_call_fndecl (stmt);
2882 /* Check for builtins that CCP can handle using information not
2883 available in the generic fold routines. */
2884 if (callee && DECL_BUILT_IN (callee))
2886 tree result = ccp_fold_builtin (stmt);
2889 return update_call_from_tree (gsi, result);
2893 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2894 here are when we've propagated the address of a decl into the
2896 /* ??? Should perhaps do this in fold proper. However, doing it
2897 there requires that we create a new CALL_EXPR, and that requires
2898 copying EH region info to the new node. Easier to just do it
2899 here where we can just smash the call operand. */
2900 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2901 callee = gimple_call_fn (stmt);
2902 if (TREE_CODE (callee) == OBJ_TYPE_REF
2903 && lang_hooks.fold_obj_type_ref
2904 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2905 && DECL_P (TREE_OPERAND
2906 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2910 /* ??? Caution: Broken ADDR_EXPR semantics means that
2911 looking at the type of the operand of the addr_expr
2912 can yield an array type. See silly exception in
2913 check_pointer_types_r. */
2914 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2915 t = lang_hooks.fold_obj_type_ref (callee, t);
2918 gimple_call_set_fn (stmt, t);
2927 /* Fold the statement pointed to by GSI. In some cases, this function may
2928 replace the whole statement with a new one. Returns true iff folding
2929 makes any changes. */
2932 fold_stmt (gimple_stmt_iterator *gsi)
2935 struct fold_stmt_r_data fold_stmt_r_data;
2936 struct walk_stmt_info wi;
2938 bool changed = false;
2939 bool inside_addr_expr = false;
2941 gimple stmt = gsi_stmt (*gsi);
2943 fold_stmt_r_data.stmt = stmt;
2944 fold_stmt_r_data.changed_p = &changed;
2945 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2947 memset (&wi, 0, sizeof (wi));
2948 wi.info = &fold_stmt_r_data;
2950 /* Fold the individual operands.
2951 For example, fold instances of *&VAR into VAR, etc. */
2952 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2955 /* Fold the main computation performed by the statement. */
2956 switch (gimple_code (stmt))
2960 tree new_rhs = fold_gimple_assign (gsi);
2961 if (new_rhs != NULL_TREE)
2963 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2966 stmt = gsi_stmt (*gsi);
2970 changed |= fold_gimple_cond (stmt);
2973 /* The entire statement may be replaced in this case. */
2974 changed |= fold_gimple_call (gsi);
2985 /* Perform the minimal folding on statement STMT. Only operations like
2986 *&x created by constant propagation are handled. The statement cannot
2987 be replaced with a new one. Return true if the statement was
2988 changed, false otherwise. */
2991 fold_stmt_inplace (gimple stmt)
2994 struct fold_stmt_r_data fold_stmt_r_data;
2995 struct walk_stmt_info wi;
2996 gimple_stmt_iterator si;
2998 bool changed = false;
2999 bool inside_addr_expr = false;
3001 fold_stmt_r_data.stmt = stmt;
3002 fold_stmt_r_data.changed_p = &changed;
3003 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
3005 memset (&wi, 0, sizeof (wi));
3006 wi.info = &fold_stmt_r_data;
3008 /* Fold the individual operands.
3009 For example, fold instances of *&VAR into VAR, etc.
3011 It appears that, at one time, maybe_fold_stmt_indirect
3012 would cause the walk to return non-null in order to
3013 signal that the entire statement should be replaced with
3014 a call to _builtin_trap. This functionality is currently
3015 disabled, as noted in a FIXME, and cannot be supported here. */
3016 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
3019 /* Fold the main computation performed by the statement. */
3020 switch (gimple_code (stmt))
3024 unsigned old_num_ops;
3026 old_num_ops = gimple_num_ops (stmt);
3027 si = gsi_for_stmt (stmt);
3028 new_rhs = fold_gimple_assign (&si);
3029 if (new_rhs != NULL_TREE
3030 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
3032 gimple_assign_set_rhs_from_tree (&si, new_rhs);
3035 gcc_assert (gsi_stmt (si) == stmt);
3039 changed |= fold_gimple_cond (stmt);
3049 /* Try to optimize out __builtin_stack_restore. Optimize it out
3050 if there is another __builtin_stack_restore in the same basic
3051 block and no calls or ASM_EXPRs are in between, or if this block's
3052 only outgoing edge is to EXIT_BLOCK and there are no calls or
3053 ASM_EXPRs after this __builtin_stack_restore. */
3056 optimize_stack_restore (gimple_stmt_iterator i)
3059 gimple stmt, stack_save;
3060 gimple_stmt_iterator stack_save_gsi;
3062 basic_block bb = gsi_bb (i);
3063 gimple call = gsi_stmt (i);
3065 if (gimple_code (call) != GIMPLE_CALL
3066 || gimple_call_num_args (call) != 1
3067 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
3068 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
3071 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
3073 stmt = gsi_stmt (i);
3074 if (gimple_code (stmt) == GIMPLE_ASM)
3076 if (gimple_code (stmt) != GIMPLE_CALL)
3079 callee = gimple_call_fndecl (stmt);
3080 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3083 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3088 && (! single_succ_p (bb)
3089 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3092 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3093 if (gimple_code (stack_save) != GIMPLE_CALL
3094 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3095 || stmt_could_throw_p (stack_save)
3096 || !has_single_use (gimple_call_arg (call, 0)))
3099 callee = gimple_call_fndecl (stack_save);
3101 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3102 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
3103 || gimple_call_num_args (stack_save) != 0)
3106 stack_save_gsi = gsi_for_stmt (stack_save);
3107 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3108 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3109 if (!update_call_from_tree (&stack_save_gsi, rhs))
3111 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3114 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3116 /* No effect, so the statement will be deleted. */
3117 return integer_zero_node;
3120 /* If va_list type is a simple pointer and nothing special is needed,
3121 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3122 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3123 pointer assignment. */
3126 optimize_stdarg_builtin (gimple call)
3128 tree callee, lhs, rhs, cfun_va_list;
3129 bool va_list_simple_ptr;
3131 if (gimple_code (call) != GIMPLE_CALL)
3134 callee = gimple_call_fndecl (call);
3136 cfun_va_list = targetm.fn_abi_va_list (callee);
3137 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3138 && (TREE_TYPE (cfun_va_list) == void_type_node
3139 || TREE_TYPE (cfun_va_list) == char_type_node);
3141 switch (DECL_FUNCTION_CODE (callee))
3143 case BUILT_IN_VA_START:
3144 if (!va_list_simple_ptr
3145 || targetm.expand_builtin_va_start != NULL
3146 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3149 if (gimple_call_num_args (call) != 2)
3152 lhs = gimple_call_arg (call, 0);
3153 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3154 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3155 != TYPE_MAIN_VARIANT (cfun_va_list))
3158 lhs = build_fold_indirect_ref (lhs);
3159 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3160 1, integer_zero_node);
3161 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3162 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3164 case BUILT_IN_VA_COPY:
3165 if (!va_list_simple_ptr)
3168 if (gimple_call_num_args (call) != 2)
3171 lhs = gimple_call_arg (call, 0);
3172 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3173 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3174 != TYPE_MAIN_VARIANT (cfun_va_list))
3177 lhs = build_fold_indirect_ref (lhs);
3178 rhs = gimple_call_arg (call, 1);
3179 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3180 != TYPE_MAIN_VARIANT (cfun_va_list))
3183 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3184 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3186 case BUILT_IN_VA_END:
3187 /* No effect, so the statement will be deleted. */
3188 return integer_zero_node;
3195 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3196 RHS of an assignment. Insert the necessary statements before
3197 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3198 is replaced. If the call is expected to produces a result, then it
3199 is replaced by an assignment of the new RHS to the result variable.
3200 If the result is to be ignored, then the call is replaced by a
3204 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3207 tree tmp = NULL_TREE; /* Silence warning. */
3208 gimple stmt, new_stmt;
3209 gimple_stmt_iterator i;
3210 gimple_seq stmts = gimple_seq_alloc();
3211 struct gimplify_ctx gctx;
3213 stmt = gsi_stmt (*si_p);
3215 gcc_assert (is_gimple_call (stmt));
3217 lhs = gimple_call_lhs (stmt);
3219 push_gimplify_context (&gctx);
3221 if (lhs == NULL_TREE)
3222 gimplify_and_add (expr, &stmts);
3224 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3226 pop_gimplify_context (NULL);
3228 if (gimple_has_location (stmt))
3229 annotate_all_with_location (stmts, gimple_location (stmt));
3231 /* The replacement can expose previously unreferenced variables. */
3232 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3234 new_stmt = gsi_stmt (i);
3235 find_new_referenced_vars (new_stmt);
3236 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3237 mark_symbols_for_renaming (new_stmt);
3241 if (lhs == NULL_TREE)
3242 new_stmt = gimple_build_nop ();
3245 new_stmt = gimple_build_assign (lhs, tmp);
3246 copy_virtual_operands (new_stmt, stmt);
3247 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3250 gimple_set_location (new_stmt, gimple_location (stmt));
3251 gsi_replace (si_p, new_stmt, false);
3254 /* A simple pass that attempts to fold all builtin functions. This pass
3255 is run after we've propagated as many constants as we can. */
3258 execute_fold_all_builtins (void)
3260 bool cfg_changed = false;
3262 unsigned int todoflags = 0;
3266 gimple_stmt_iterator i;
3267 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3269 gimple stmt, old_stmt;
3270 tree callee, result;
3271 enum built_in_function fcode;
3273 stmt = gsi_stmt (i);
3275 if (gimple_code (stmt) != GIMPLE_CALL)
3280 callee = gimple_call_fndecl (stmt);
3281 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3286 fcode = DECL_FUNCTION_CODE (callee);
3288 result = ccp_fold_builtin (stmt);
3291 gimple_remove_stmt_histograms (cfun, stmt);
3294 switch (DECL_FUNCTION_CODE (callee))
3296 case BUILT_IN_CONSTANT_P:
3297 /* Resolve __builtin_constant_p. If it hasn't been
3298 folded to integer_one_node by now, it's fairly
3299 certain that the value simply isn't constant. */
3300 result = integer_zero_node;
3303 case BUILT_IN_STACK_RESTORE:
3304 result = optimize_stack_restore (i);
3310 case BUILT_IN_VA_START:
3311 case BUILT_IN_VA_END:
3312 case BUILT_IN_VA_COPY:
3313 /* These shouldn't be folded before pass_stdarg. */
3314 result = optimize_stdarg_builtin (stmt);
3324 if (dump_file && (dump_flags & TDF_DETAILS))
3326 fprintf (dump_file, "Simplified\n ");
3327 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3331 push_stmt_changes (gsi_stmt_ptr (&i));
3333 if (!update_call_from_tree (&i, result))
3335 gimplify_and_update_call_from_tree (&i, result);
3336 todoflags |= TODO_rebuild_alias;
3339 stmt = gsi_stmt (i);
3340 pop_stmt_changes (gsi_stmt_ptr (&i));
3342 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3343 && gimple_purge_dead_eh_edges (bb))
3346 if (dump_file && (dump_flags & TDF_DETAILS))
3348 fprintf (dump_file, "to\n ");
3349 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3350 fprintf (dump_file, "\n");
3353 /* Retry the same statement if it changed into another
3354 builtin, there might be new opportunities now. */
3355 if (gimple_code (stmt) != GIMPLE_CALL)
3360 callee = gimple_call_fndecl (stmt);
3362 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3363 || DECL_FUNCTION_CODE (callee) == fcode)
3368 /* Delete unreachable blocks. */
3370 todoflags |= TODO_cleanup_cfg;
3376 struct gimple_opt_pass pass_fold_builtins =
3382 execute_fold_all_builtins, /* execute */
3385 0, /* static_pass_number */
3387 PROP_cfg | PROP_ssa, /* properties_required */
3388 0, /* properties_provided */
3389 0, /* properties_destroyed */
3390 0, /* todo_flags_start */
3393 | TODO_update_ssa /* todo_flags_finish */