1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
214 /* Possible lattice values. */
223 /* Array of propagated constant values. After propagation,
224 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
225 the constant is held in an SSA name representing a memory store
226 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
227 memory reference used to store (i.e., the LHS of the assignment
229 static prop_value_t *const_val;
231 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
234 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
236 switch (val.lattice_val)
239 fprintf (outf, "%sUNINITIALIZED", prefix);
242 fprintf (outf, "%sUNDEFINED", prefix);
245 fprintf (outf, "%sVARYING", prefix);
248 fprintf (outf, "%sCONSTANT ", prefix);
249 print_generic_expr (outf, val.value, dump_flags);
257 /* Print lattice value VAL to stderr. */
259 void debug_lattice_value (prop_value_t val);
262 debug_lattice_value (prop_value_t val)
264 dump_lattice_value (stderr, "", val);
265 fprintf (stderr, "\n");
270 /* If SYM is a constant variable with known value, return the value.
271 NULL_TREE is returned otherwise. */
274 get_symbol_constant_value (tree sym)
276 if (TREE_STATIC (sym)
277 && TREE_READONLY (sym))
279 tree val = DECL_INITIAL (sym);
282 STRIP_USELESS_TYPE_CONVERSION (val);
283 if (is_gimple_min_invariant (val))
285 if (TREE_CODE (val) == ADDR_EXPR)
287 tree base = get_base_address (TREE_OPERAND (val, 0));
288 if (base && TREE_CODE (base) == VAR_DECL)
289 add_referenced_var (base);
294 /* Variables declared 'const' without an initializer
295 have zero as the initializer if they may not be
296 overridden at link or run time. */
298 && !DECL_EXTERNAL (sym)
299 && targetm.binds_local_p (sym)
300 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
301 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
302 return fold_convert (TREE_TYPE (sym), integer_zero_node);
308 /* Compute a default value for variable VAR and store it in the
309 CONST_VAL array. The following rules are used to get default
312 1- Global and static variables that are declared constant are
315 2- Any other value is considered UNDEFINED. This is useful when
316 considering PHI nodes. PHI arguments that are undefined do not
317 change the constant value of the PHI node, which allows for more
318 constants to be propagated.
320 3- Variables defined by statements other than assignments and PHI
321 nodes are considered VARYING.
323 4- Initial values of variables that are not GIMPLE registers are
324 considered VARYING. */
327 get_default_value (tree var)
329 tree sym = SSA_NAME_VAR (var);
330 prop_value_t val = { UNINITIALIZED, NULL_TREE };
333 stmt = SSA_NAME_DEF_STMT (var);
335 if (gimple_nop_p (stmt))
337 /* Variables defined by an empty statement are those used
338 before being initialized. If VAR is a local variable, we
339 can assume initially that it is UNDEFINED, otherwise we must
340 consider it VARYING. */
341 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
342 val.lattice_val = UNDEFINED;
344 val.lattice_val = VARYING;
346 else if (is_gimple_assign (stmt)
347 /* Value-returning GIMPLE_CALL statements assign to
348 a variable, and are treated similarly to GIMPLE_ASSIGN. */
349 || (is_gimple_call (stmt)
350 && gimple_call_lhs (stmt) != NULL_TREE)
351 || gimple_code (stmt) == GIMPLE_PHI)
354 if (gimple_assign_single_p (stmt)
355 && DECL_P (gimple_assign_rhs1 (stmt))
356 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
358 val.lattice_val = CONSTANT;
362 /* Any other variable defined by an assignment or a PHI node
363 is considered UNDEFINED. */
364 val.lattice_val = UNDEFINED;
368 /* Otherwise, VAR will never take on a constant value. */
369 val.lattice_val = VARYING;
376 /* Get the constant value associated with variable VAR. */
378 static inline prop_value_t *
383 if (const_val == NULL)
386 val = &const_val[SSA_NAME_VERSION (var)];
387 if (val->lattice_val == UNINITIALIZED)
388 *val = get_default_value (var);
393 /* Sets the value associated with VAR to VARYING. */
396 set_value_varying (tree var)
398 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
400 val->lattice_val = VARYING;
401 val->value = NULL_TREE;
404 /* For float types, modify the value of VAL to make ccp work correctly
405 for non-standard values (-0, NaN):
407 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
408 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
409 This is to fix the following problem (see PR 29921): Suppose we have
413 and we set value of y to NaN. This causes value of x to be set to NaN.
414 When we later determine that y is in fact VARYING, fold uses the fact
415 that HONOR_NANS is false, and we try to change the value of x to 0,
416 causing an ICE. With HONOR_NANS being false, the real appearance of
417 NaN would cause undefined behavior, though, so claiming that y (and x)
418 are UNDEFINED initially is correct. */
421 canonicalize_float_value (prop_value_t *val)
423 enum machine_mode mode;
427 if (val->lattice_val != CONSTANT
428 || TREE_CODE (val->value) != REAL_CST)
431 d = TREE_REAL_CST (val->value);
432 type = TREE_TYPE (val->value);
433 mode = TYPE_MODE (type);
435 if (!HONOR_SIGNED_ZEROS (mode)
436 && REAL_VALUE_MINUS_ZERO (d))
438 val->value = build_real (type, dconst0);
442 if (!HONOR_NANS (mode)
443 && REAL_VALUE_ISNAN (d))
445 val->lattice_val = UNDEFINED;
451 /* Set the value for variable VAR to NEW_VAL. Return true if the new
452 value is different from VAR's previous value. */
455 set_lattice_value (tree var, prop_value_t new_val)
457 prop_value_t *old_val = get_value (var);
459 canonicalize_float_value (&new_val);
461 /* Lattice transitions must always be monotonically increasing in
462 value. If *OLD_VAL and NEW_VAL are the same, return false to
463 inform the caller that this was a non-transition. */
465 gcc_assert (old_val->lattice_val < new_val.lattice_val
466 || (old_val->lattice_val == new_val.lattice_val
467 && ((!old_val->value && !new_val.value)
468 || operand_equal_p (old_val->value, new_val.value, 0))));
470 if (old_val->lattice_val != new_val.lattice_val)
472 if (dump_file && (dump_flags & TDF_DETAILS))
474 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
475 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
480 gcc_assert (new_val.lattice_val != UNDEFINED);
488 /* Return the likely CCP lattice value for STMT.
490 If STMT has no operands, then return CONSTANT.
492 Else if undefinedness of operands of STMT cause its value to be
493 undefined, then return UNDEFINED.
495 Else if any operands of STMT are constants, then return CONSTANT.
497 Else return VARYING. */
500 likely_value (gimple stmt)
502 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
507 enum gimple_code code = gimple_code (stmt);
509 /* This function appears to be called only for assignments, calls,
510 conditionals, and switches, due to the logic in visit_stmt. */
511 gcc_assert (code == GIMPLE_ASSIGN
512 || code == GIMPLE_CALL
513 || code == GIMPLE_COND
514 || code == GIMPLE_SWITCH);
516 /* If the statement has volatile operands, it won't fold to a
518 if (gimple_has_volatile_ops (stmt))
521 /* Arrive here for more complex cases. */
522 has_constant_operand = false;
523 has_undefined_operand = false;
524 all_undefined_operands = true;
525 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
527 prop_value_t *val = get_value (use);
529 if (val->lattice_val == UNDEFINED)
530 has_undefined_operand = true;
532 all_undefined_operands = false;
534 if (val->lattice_val == CONSTANT)
535 has_constant_operand = true;
538 /* There may be constants in regular rhs operands. For calls we
539 have to ignore lhs, fndecl and static chain, otherwise only
541 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
542 i < gimple_num_ops (stmt); ++i)
544 tree op = gimple_op (stmt, i);
545 if (!op || TREE_CODE (op) == SSA_NAME)
547 if (is_gimple_min_invariant (op))
548 has_constant_operand = true;
551 /* If the operation combines operands like COMPLEX_EXPR make sure to
552 not mark the result UNDEFINED if only one part of the result is
554 if (has_undefined_operand && all_undefined_operands)
556 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
558 switch (gimple_assign_rhs_code (stmt))
560 /* Unary operators are handled with all_undefined_operands. */
563 case POINTER_PLUS_EXPR:
564 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
565 Not bitwise operators, one VARYING operand may specify the
566 result completely. Not logical operators for the same reason.
567 Not COMPLEX_EXPR as one VARYING operand makes the result partly
568 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
569 the undefined operand may be promoted. */
576 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
577 fall back to VARYING even if there were CONSTANT operands. */
578 if (has_undefined_operand)
581 /* We do not consider virtual operands here -- load from read-only
582 memory may have only VARYING virtual operands, but still be
584 if (has_constant_operand
585 || gimple_references_memory_p (stmt))
591 /* Returns true if STMT cannot be constant. */
594 surely_varying_stmt_p (gimple stmt)
596 /* If the statement has operands that we cannot handle, it cannot be
598 if (gimple_has_volatile_ops (stmt))
601 /* If it is a call and does not return a value or is not a
602 builtin and not an indirect call, it is varying. */
603 if (is_gimple_call (stmt))
606 if (!gimple_call_lhs (stmt)
607 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
608 && !DECL_BUILT_IN (fndecl)))
612 /* Any other store operation is not interesting. */
613 else if (gimple_vdef (stmt))
616 /* Anything other than assignments and conditional jumps are not
617 interesting for CCP. */
618 if (gimple_code (stmt) != GIMPLE_ASSIGN
619 && gimple_code (stmt) != GIMPLE_COND
620 && gimple_code (stmt) != GIMPLE_SWITCH
621 && gimple_code (stmt) != GIMPLE_CALL)
627 /* Initialize local data structures for CCP. */
630 ccp_initialize (void)
634 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
636 /* Initialize simulation flags for PHI nodes and statements. */
639 gimple_stmt_iterator i;
641 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
643 gimple stmt = gsi_stmt (i);
644 bool is_varying = surely_varying_stmt_p (stmt);
651 /* If the statement will not produce a constant, mark
652 all its outputs VARYING. */
653 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
654 set_value_varying (def);
656 prop_set_simulate_again (stmt, !is_varying);
660 /* Now process PHI nodes. We never clear the simulate_again flag on
661 phi nodes, since we do not know which edges are executable yet,
662 except for phi nodes for virtual operands when we do not do store ccp. */
665 gimple_stmt_iterator i;
667 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
669 gimple phi = gsi_stmt (i);
671 if (!is_gimple_reg (gimple_phi_result (phi)))
672 prop_set_simulate_again (phi, false);
674 prop_set_simulate_again (phi, true);
679 /* Debug count support. Reset the values of ssa names
680 VARYING when the total number ssa names analyzed is
681 beyond the debug count specified. */
687 for (i = 0; i < num_ssa_names; i++)
691 const_val[i].lattice_val = VARYING;
692 const_val[i].value = NULL_TREE;
698 /* Do final substitution of propagated values, cleanup the flowgraph and
699 free allocated storage.
701 Return TRUE when something was optimized. */
706 bool something_changed;
709 /* Perform substitutions based on the known constant values. */
710 something_changed = substitute_and_fold (const_val, false);
714 return something_changed;;
718 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
721 any M UNDEFINED = any
722 any M VARYING = VARYING
723 Ci M Cj = Ci if (i == j)
724 Ci M Cj = VARYING if (i != j)
728 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
730 if (val1->lattice_val == UNDEFINED)
732 /* UNDEFINED M any = any */
735 else if (val2->lattice_val == UNDEFINED)
737 /* any M UNDEFINED = any
738 Nothing to do. VAL1 already contains the value we want. */
741 else if (val1->lattice_val == VARYING
742 || val2->lattice_val == VARYING)
744 /* any M VARYING = VARYING. */
745 val1->lattice_val = VARYING;
746 val1->value = NULL_TREE;
748 else if (val1->lattice_val == CONSTANT
749 && val2->lattice_val == CONSTANT
750 && simple_cst_equal (val1->value, val2->value) == 1)
752 /* Ci M Cj = Ci if (i == j)
753 Ci M Cj = VARYING if (i != j)
755 If these two values come from memory stores, make sure that
756 they come from the same memory reference. */
757 val1->lattice_val = CONSTANT;
758 val1->value = val1->value;
762 /* Any other combination is VARYING. */
763 val1->lattice_val = VARYING;
764 val1->value = NULL_TREE;
769 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
770 lattice values to determine PHI_NODE's lattice value. The value of a
771 PHI node is determined calling ccp_lattice_meet with all the arguments
772 of the PHI node that are incoming via executable edges. */
774 static enum ssa_prop_result
775 ccp_visit_phi_node (gimple phi)
778 prop_value_t *old_val, new_val;
780 if (dump_file && (dump_flags & TDF_DETAILS))
782 fprintf (dump_file, "\nVisiting PHI node: ");
783 print_gimple_stmt (dump_file, phi, 0, dump_flags);
786 old_val = get_value (gimple_phi_result (phi));
787 switch (old_val->lattice_val)
790 return SSA_PROP_VARYING;
797 new_val.lattice_val = UNDEFINED;
798 new_val.value = NULL_TREE;
805 for (i = 0; i < gimple_phi_num_args (phi); i++)
807 /* Compute the meet operator over all the PHI arguments flowing
808 through executable edges. */
809 edge e = gimple_phi_arg_edge (phi, i);
811 if (dump_file && (dump_flags & TDF_DETAILS))
814 "\n Argument #%d (%d -> %d %sexecutable)\n",
815 i, e->src->index, e->dest->index,
816 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
819 /* If the incoming edge is executable, Compute the meet operator for
820 the existing value of the PHI node and the current PHI argument. */
821 if (e->flags & EDGE_EXECUTABLE)
823 tree arg = gimple_phi_arg (phi, i)->def;
824 prop_value_t arg_val;
826 if (is_gimple_min_invariant (arg))
828 arg_val.lattice_val = CONSTANT;
832 arg_val = *(get_value (arg));
834 ccp_lattice_meet (&new_val, &arg_val);
836 if (dump_file && (dump_flags & TDF_DETAILS))
838 fprintf (dump_file, "\t");
839 print_generic_expr (dump_file, arg, dump_flags);
840 dump_lattice_value (dump_file, "\tValue: ", arg_val);
841 fprintf (dump_file, "\n");
844 if (new_val.lattice_val == VARYING)
849 if (dump_file && (dump_flags & TDF_DETAILS))
851 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
852 fprintf (dump_file, "\n\n");
855 /* Make the transition to the new value. */
856 if (set_lattice_value (gimple_phi_result (phi), new_val))
858 if (new_val.lattice_val == VARYING)
859 return SSA_PROP_VARYING;
861 return SSA_PROP_INTERESTING;
864 return SSA_PROP_NOT_INTERESTING;
867 /* Return true if we may propagate the address expression ADDR into the
868 dereference DEREF and cancel them. */
871 may_propagate_address_into_dereference (tree addr, tree deref)
873 gcc_assert (INDIRECT_REF_P (deref)
874 && TREE_CODE (addr) == ADDR_EXPR);
876 /* Don't propagate if ADDR's operand has incomplete type. */
877 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
880 /* If the address is invariant then we do not need to preserve restrict
881 qualifications. But we do need to preserve volatile qualifiers until
882 we can annotate the folded dereference itself properly. */
883 if (is_gimple_min_invariant (addr)
884 && (!TREE_THIS_VOLATILE (deref)
885 || TYPE_VOLATILE (TREE_TYPE (addr))))
886 return useless_type_conversion_p (TREE_TYPE (deref),
887 TREE_TYPE (TREE_OPERAND (addr, 0)));
889 /* Else both the address substitution and the folding must result in
890 a valid useless type conversion sequence. */
891 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
893 && useless_type_conversion_p (TREE_TYPE (deref),
894 TREE_TYPE (TREE_OPERAND (addr, 0))));
897 /* CCP specific front-end to the non-destructive constant folding
900 Attempt to simplify the RHS of STMT knowing that one or more
901 operands are constants.
903 If simplification is possible, return the simplified RHS,
904 otherwise return the original RHS or NULL_TREE. */
907 ccp_fold (gimple stmt)
909 switch (gimple_code (stmt))
913 enum tree_code subcode = gimple_assign_rhs_code (stmt);
915 switch (get_gimple_rhs_class (subcode))
917 case GIMPLE_SINGLE_RHS:
919 tree rhs = gimple_assign_rhs1 (stmt);
920 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
922 if (TREE_CODE (rhs) == SSA_NAME)
924 /* If the RHS is an SSA_NAME, return its known constant value,
926 return get_value (rhs)->value;
928 /* Handle propagating invariant addresses into address operations.
929 The folding we do here matches that in tree-ssa-forwprop.c. */
930 else if (TREE_CODE (rhs) == ADDR_EXPR)
933 base = &TREE_OPERAND (rhs, 0);
934 while (handled_component_p (*base))
935 base = &TREE_OPERAND (*base, 0);
936 if (TREE_CODE (*base) == INDIRECT_REF
937 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
939 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
940 if (val->lattice_val == CONSTANT
941 && TREE_CODE (val->value) == ADDR_EXPR
942 && may_propagate_address_into_dereference
945 /* We need to return a new tree, not modify the IL
946 or share parts of it. So play some tricks to
947 avoid manually building it. */
948 tree ret, save = *base;
949 *base = TREE_OPERAND (val->value, 0);
950 ret = unshare_expr (rhs);
951 recompute_tree_invariant_for_addr_expr (ret);
958 if (kind == tcc_reference)
960 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
961 || TREE_CODE (rhs) == REALPART_EXPR
962 || TREE_CODE (rhs) == IMAGPART_EXPR)
963 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
965 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
966 if (val->lattice_val == CONSTANT)
967 return fold_unary (TREE_CODE (rhs),
968 TREE_TYPE (rhs), val->value);
970 else if (TREE_CODE (rhs) == INDIRECT_REF
971 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
973 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
974 if (val->lattice_val == CONSTANT
975 && TREE_CODE (val->value) == ADDR_EXPR
976 && useless_type_conversion_p (TREE_TYPE (rhs),
977 TREE_TYPE (TREE_TYPE (val->value))))
978 rhs = TREE_OPERAND (val->value, 0);
980 return fold_const_aggregate_ref (rhs);
982 else if (kind == tcc_declaration)
983 return get_symbol_constant_value (rhs);
987 case GIMPLE_UNARY_RHS:
989 /* Handle unary operators that can appear in GIMPLE form.
990 Note that we know the single operand must be a constant,
991 so this should almost always return a simplified RHS. */
992 tree lhs = gimple_assign_lhs (stmt);
993 tree op0 = gimple_assign_rhs1 (stmt);
995 /* Simplify the operand down to a constant. */
996 if (TREE_CODE (op0) == SSA_NAME)
998 prop_value_t *val = get_value (op0);
999 if (val->lattice_val == CONSTANT)
1000 op0 = get_value (op0)->value;
1003 /* Conversions are useless for CCP purposes if they are
1004 value-preserving. Thus the restrictions that
1005 useless_type_conversion_p places for pointer type conversions
1006 do not apply here. Substitution later will only substitute to
1008 if (CONVERT_EXPR_CODE_P (subcode)
1009 && POINTER_TYPE_P (TREE_TYPE (lhs))
1010 && POINTER_TYPE_P (TREE_TYPE (op0))
1011 /* Do not allow differences in volatile qualification
1012 as this might get us confused as to whether a
1013 propagation destination statement is volatile
1014 or not. See PR36988. */
1015 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
1016 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
1019 /* Still try to generate a constant of correct type. */
1020 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1022 && ((tem = maybe_fold_offset_to_address
1023 (op0, integer_zero_node, TREE_TYPE (lhs)))
1029 return fold_unary_ignore_overflow (subcode,
1030 gimple_expr_type (stmt), op0);
1033 case GIMPLE_BINARY_RHS:
1035 /* Handle binary operators that can appear in GIMPLE form. */
1036 tree op0 = gimple_assign_rhs1 (stmt);
1037 tree op1 = gimple_assign_rhs2 (stmt);
1039 /* Simplify the operands down to constants when appropriate. */
1040 if (TREE_CODE (op0) == SSA_NAME)
1042 prop_value_t *val = get_value (op0);
1043 if (val->lattice_val == CONSTANT)
1047 if (TREE_CODE (op1) == SSA_NAME)
1049 prop_value_t *val = get_value (op1);
1050 if (val->lattice_val == CONSTANT)
1054 /* Fold &foo + CST into an invariant reference if possible. */
1055 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1056 && TREE_CODE (op0) == ADDR_EXPR
1057 && TREE_CODE (op1) == INTEGER_CST)
1059 tree lhs = gimple_assign_lhs (stmt);
1060 tree tem = maybe_fold_offset_to_address (op0, op1,
1062 if (tem != NULL_TREE)
1066 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1077 tree fn = gimple_call_fn (stmt);
1080 if (TREE_CODE (fn) == SSA_NAME)
1082 val = get_value (fn);
1083 if (val->lattice_val == CONSTANT)
1086 if (TREE_CODE (fn) == ADDR_EXPR
1087 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1088 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1090 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1093 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1095 args[i] = gimple_call_arg (stmt, i);
1096 if (TREE_CODE (args[i]) == SSA_NAME)
1098 val = get_value (args[i]);
1099 if (val->lattice_val == CONSTANT)
1100 args[i] = val->value;
1103 call = build_call_array (gimple_call_return_type (stmt),
1104 fn, gimple_call_num_args (stmt), args);
1105 retval = fold_call_expr (call, false);
1107 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1108 STRIP_NOPS (retval);
1116 /* Handle comparison operators that can appear in GIMPLE form. */
1117 tree op0 = gimple_cond_lhs (stmt);
1118 tree op1 = gimple_cond_rhs (stmt);
1119 enum tree_code code = gimple_cond_code (stmt);
1121 /* Simplify the operands down to constants when appropriate. */
1122 if (TREE_CODE (op0) == SSA_NAME)
1124 prop_value_t *val = get_value (op0);
1125 if (val->lattice_val == CONSTANT)
1129 if (TREE_CODE (op1) == SSA_NAME)
1131 prop_value_t *val = get_value (op1);
1132 if (val->lattice_val == CONSTANT)
1136 return fold_binary (code, boolean_type_node, op0, op1);
1141 tree rhs = gimple_switch_index (stmt);
1143 if (TREE_CODE (rhs) == SSA_NAME)
1145 /* If the RHS is an SSA_NAME, return its known constant value,
1147 return get_value (rhs)->value;
1159 /* Return the tree representing the element referenced by T if T is an
1160 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1161 NULL_TREE otherwise. */
1164 fold_const_aggregate_ref (tree t)
1166 prop_value_t *value;
1167 tree base, ctor, idx, field;
1168 unsigned HOST_WIDE_INT cnt;
1171 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1172 return get_symbol_constant_value (t);
1174 switch (TREE_CODE (t))
1177 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1178 DECL_INITIAL. If BASE is a nested reference into another
1179 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1180 the inner reference. */
1181 base = TREE_OPERAND (t, 0);
1182 switch (TREE_CODE (base))
1185 if (!TREE_READONLY (base)
1186 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1187 || !targetm.binds_local_p (base))
1190 ctor = DECL_INITIAL (base);
1195 ctor = fold_const_aggregate_ref (base);
1207 if (ctor == NULL_TREE
1208 || (TREE_CODE (ctor) != CONSTRUCTOR
1209 && TREE_CODE (ctor) != STRING_CST)
1210 || !TREE_STATIC (ctor))
1213 /* Get the index. If we have an SSA_NAME, try to resolve it
1214 with the current lattice value for the SSA_NAME. */
1215 idx = TREE_OPERAND (t, 1);
1216 switch (TREE_CODE (idx))
1219 if ((value = get_value (idx))
1220 && value->lattice_val == CONSTANT
1221 && TREE_CODE (value->value) == INTEGER_CST)
1234 /* Fold read from constant string. */
1235 if (TREE_CODE (ctor) == STRING_CST)
1237 if ((TYPE_MODE (TREE_TYPE (t))
1238 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1239 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1241 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1242 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1243 return build_int_cst_type (TREE_TYPE (t),
1244 (TREE_STRING_POINTER (ctor)
1245 [TREE_INT_CST_LOW (idx)]));
1249 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1250 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1251 if (tree_int_cst_equal (cfield, idx))
1253 STRIP_USELESS_TYPE_CONVERSION (cval);
1254 if (TREE_CODE (cval) == ADDR_EXPR)
1256 tree base = get_base_address (TREE_OPERAND (cval, 0));
1257 if (base && TREE_CODE (base) == VAR_DECL)
1258 add_referenced_var (base);
1265 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1266 DECL_INITIAL. If BASE is a nested reference into another
1267 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1268 the inner reference. */
1269 base = TREE_OPERAND (t, 0);
1270 switch (TREE_CODE (base))
1273 if (!TREE_READONLY (base)
1274 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1275 || !targetm.binds_local_p (base))
1278 ctor = DECL_INITIAL (base);
1283 ctor = fold_const_aggregate_ref (base);
1290 if (ctor == NULL_TREE
1291 || TREE_CODE (ctor) != CONSTRUCTOR
1292 || !TREE_STATIC (ctor))
1295 field = TREE_OPERAND (t, 1);
1297 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1299 /* FIXME: Handle bit-fields. */
1300 && ! DECL_BIT_FIELD (cfield))
1302 STRIP_USELESS_TYPE_CONVERSION (cval);
1303 if (TREE_CODE (cval) == ADDR_EXPR)
1305 tree base = get_base_address (TREE_OPERAND (cval, 0));
1306 if (base && TREE_CODE (base) == VAR_DECL)
1307 add_referenced_var (base);
1316 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1317 if (c && TREE_CODE (c) == COMPLEX_CST)
1318 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1324 tree base = TREE_OPERAND (t, 0);
1325 if (TREE_CODE (base) == SSA_NAME
1326 && (value = get_value (base))
1327 && value->lattice_val == CONSTANT
1328 && TREE_CODE (value->value) == ADDR_EXPR)
1329 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1340 /* Evaluate statement STMT.
1341 Valid only for assignments, calls, conditionals, and switches. */
1344 evaluate_stmt (gimple stmt)
1347 tree simplified = NULL_TREE;
1348 ccp_lattice_t likelyvalue = likely_value (stmt);
1351 fold_defer_overflow_warnings ();
1353 /* If the statement is likely to have a CONSTANT result, then try
1354 to fold the statement to determine the constant value. */
1355 /* FIXME. This is the only place that we call ccp_fold.
1356 Since likely_value never returns CONSTANT for calls, we will
1357 not attempt to fold them, including builtins that may profit. */
1358 if (likelyvalue == CONSTANT)
1359 simplified = ccp_fold (stmt);
1360 /* If the statement is likely to have a VARYING result, then do not
1361 bother folding the statement. */
1362 else if (likelyvalue == VARYING)
1364 enum gimple_code code = gimple_code (stmt);
1365 if (code == GIMPLE_ASSIGN)
1367 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1369 /* Other cases cannot satisfy is_gimple_min_invariant
1371 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1372 simplified = gimple_assign_rhs1 (stmt);
1374 else if (code == GIMPLE_SWITCH)
1375 simplified = gimple_switch_index (stmt);
1377 /* These cannot satisfy is_gimple_min_invariant without folding. */
1378 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1381 is_constant = simplified && is_gimple_min_invariant (simplified);
1383 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1385 if (dump_file && (dump_flags & TDF_DETAILS))
1387 fprintf (dump_file, "which is likely ");
1388 switch (likelyvalue)
1391 fprintf (dump_file, "CONSTANT");
1394 fprintf (dump_file, "UNDEFINED");
1397 fprintf (dump_file, "VARYING");
1401 fprintf (dump_file, "\n");
1406 /* The statement produced a constant value. */
1407 val.lattice_val = CONSTANT;
1408 val.value = simplified;
1412 /* The statement produced a nonconstant value. If the statement
1413 had UNDEFINED operands, then the result of the statement
1414 should be UNDEFINED. Otherwise, the statement is VARYING. */
1415 if (likelyvalue == UNDEFINED)
1416 val.lattice_val = likelyvalue;
1418 val.lattice_val = VARYING;
1420 val.value = NULL_TREE;
1426 /* Visit the assignment statement STMT. Set the value of its LHS to the
1427 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1428 creates virtual definitions, set the value of each new name to that
1429 of the RHS (if we can derive a constant out of the RHS).
1430 Value-returning call statements also perform an assignment, and
1431 are handled here. */
1433 static enum ssa_prop_result
1434 visit_assignment (gimple stmt, tree *output_p)
1437 enum ssa_prop_result retval;
1439 tree lhs = gimple_get_lhs (stmt);
1441 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1442 || gimple_call_lhs (stmt) != NULL_TREE);
1444 if (gimple_assign_copy_p (stmt))
1446 tree rhs = gimple_assign_rhs1 (stmt);
1448 if (TREE_CODE (rhs) == SSA_NAME)
1450 /* For a simple copy operation, we copy the lattice values. */
1451 prop_value_t *nval = get_value (rhs);
1455 val = evaluate_stmt (stmt);
1458 /* Evaluate the statement, which could be
1459 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1460 val = evaluate_stmt (stmt);
1462 retval = SSA_PROP_NOT_INTERESTING;
1464 /* Set the lattice value of the statement's output. */
1465 if (TREE_CODE (lhs) == SSA_NAME)
1467 /* If STMT is an assignment to an SSA_NAME, we only have one
1469 if (set_lattice_value (lhs, val))
1472 if (val.lattice_val == VARYING)
1473 retval = SSA_PROP_VARYING;
1475 retval = SSA_PROP_INTERESTING;
1483 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1484 if it can determine which edge will be taken. Otherwise, return
1485 SSA_PROP_VARYING. */
1487 static enum ssa_prop_result
1488 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1493 block = gimple_bb (stmt);
1494 val = evaluate_stmt (stmt);
1496 /* Find which edge out of the conditional block will be taken and add it
1497 to the worklist. If no single edge can be determined statically,
1498 return SSA_PROP_VARYING to feed all the outgoing edges to the
1499 propagation engine. */
1500 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1502 return SSA_PROP_INTERESTING;
1504 return SSA_PROP_VARYING;
1508 /* Evaluate statement STMT. If the statement produces an output value and
1509 its evaluation changes the lattice value of its output, return
1510 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1513 If STMT is a conditional branch and we can determine its truth
1514 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1515 value, return SSA_PROP_VARYING. */
1517 static enum ssa_prop_result
1518 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1523 if (dump_file && (dump_flags & TDF_DETAILS))
1525 fprintf (dump_file, "\nVisiting statement:\n");
1526 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1529 switch (gimple_code (stmt))
1532 /* If the statement is an assignment that produces a single
1533 output value, evaluate its RHS to see if the lattice value of
1534 its output has changed. */
1535 return visit_assignment (stmt, output_p);
1538 /* A value-returning call also performs an assignment. */
1539 if (gimple_call_lhs (stmt) != NULL_TREE)
1540 return visit_assignment (stmt, output_p);
1545 /* If STMT is a conditional branch, see if we can determine
1546 which branch will be taken. */
1547 /* FIXME. It appears that we should be able to optimize
1548 computed GOTOs here as well. */
1549 return visit_cond_stmt (stmt, taken_edge_p);
1555 /* Any other kind of statement is not interesting for constant
1556 propagation and, therefore, not worth simulating. */
1557 if (dump_file && (dump_flags & TDF_DETAILS))
1558 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1560 /* Definitions made by statements other than assignments to
1561 SSA_NAMEs represent unknown modifications to their outputs.
1562 Mark them VARYING. */
1563 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1565 prop_value_t v = { VARYING, NULL_TREE };
1566 set_lattice_value (def, v);
1569 return SSA_PROP_VARYING;
1573 /* Main entry point for SSA Conditional Constant Propagation. */
1579 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1580 if (ccp_finalize ())
1581 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1590 return flag_tree_ccp != 0;
1594 struct gimple_opt_pass pass_ccp =
1599 gate_ccp, /* gate */
1600 do_ssa_ccp, /* execute */
1603 0, /* static_pass_number */
1604 TV_TREE_CCP, /* tv_id */
1605 PROP_cfg | PROP_ssa, /* properties_required */
1606 0, /* properties_provided */
1607 0, /* properties_destroyed */
1608 0, /* todo_flags_start */
1609 TODO_dump_func | TODO_verify_ssa
1610 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1615 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1616 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1617 is the desired result type. */
1620 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1621 bool allow_negative_idx)
1623 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1624 tree array_type, elt_type, elt_size;
1627 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1628 measured in units of the size of elements type) from that ARRAY_REF).
1629 We can't do anything if either is variable.
1631 The case we handle here is *(&A[N]+O). */
1632 if (TREE_CODE (base) == ARRAY_REF)
1634 tree low_bound = array_ref_low_bound (base);
1636 elt_offset = TREE_OPERAND (base, 1);
1637 if (TREE_CODE (low_bound) != INTEGER_CST
1638 || TREE_CODE (elt_offset) != INTEGER_CST)
1641 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1642 base = TREE_OPERAND (base, 0);
1645 /* Ignore stupid user tricks of indexing non-array variables. */
1646 array_type = TREE_TYPE (base);
1647 if (TREE_CODE (array_type) != ARRAY_TYPE)
1649 elt_type = TREE_TYPE (array_type);
1650 if (!useless_type_conversion_p (orig_type, elt_type))
1653 /* Use signed size type for intermediate computation on the index. */
1654 idx_type = signed_type_for (size_type_node);
1656 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1657 element type (so we can use the alignment if it's not constant).
1658 Otherwise, compute the offset as an index by using a division. If the
1659 division isn't exact, then don't do anything. */
1660 elt_size = TYPE_SIZE_UNIT (elt_type);
1663 if (integer_zerop (offset))
1665 if (TREE_CODE (elt_size) != INTEGER_CST)
1666 elt_size = size_int (TYPE_ALIGN (elt_type));
1668 idx = build_int_cst (idx_type, 0);
1672 unsigned HOST_WIDE_INT lquo, lrem;
1673 HOST_WIDE_INT hquo, hrem;
1676 /* The final array offset should be signed, so we need
1677 to sign-extend the (possibly pointer) offset here
1678 and use signed division. */
1679 soffset = double_int_sext (tree_to_double_int (offset),
1680 TYPE_PRECISION (TREE_TYPE (offset)));
1681 if (TREE_CODE (elt_size) != INTEGER_CST
1682 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1683 soffset.low, soffset.high,
1684 TREE_INT_CST_LOW (elt_size),
1685 TREE_INT_CST_HIGH (elt_size),
1686 &lquo, &hquo, &lrem, &hrem)
1690 idx = build_int_cst_wide (idx_type, lquo, hquo);
1693 /* Assume the low bound is zero. If there is a domain type, get the
1694 low bound, if any, convert the index into that type, and add the
1696 min_idx = build_int_cst (idx_type, 0);
1697 domain_type = TYPE_DOMAIN (array_type);
1700 idx_type = domain_type;
1701 if (TYPE_MIN_VALUE (idx_type))
1702 min_idx = TYPE_MIN_VALUE (idx_type);
1704 min_idx = fold_convert (idx_type, min_idx);
1706 if (TREE_CODE (min_idx) != INTEGER_CST)
1709 elt_offset = fold_convert (idx_type, elt_offset);
1712 if (!integer_zerop (min_idx))
1713 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1714 if (!integer_zerop (elt_offset))
1715 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1717 /* Make sure to possibly truncate late after offsetting. */
1718 idx = fold_convert (idx_type, idx);
1720 /* We don't want to construct access past array bounds. For example
1723 should not be simplified into (*c)[14] or tree-vrp will
1724 give false warnings. The same is true for
1725 struct A { long x; char d[0]; } *a;
1727 which should be not folded to &a->d[-8]. */
1729 && TYPE_MAX_VALUE (domain_type)
1730 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1732 tree up_bound = TYPE_MAX_VALUE (domain_type);
1734 if (tree_int_cst_lt (up_bound, idx)
1735 /* Accesses after the end of arrays of size 0 (gcc
1736 extension) and 1 are likely intentional ("struct
1738 && compare_tree_int (up_bound, 1) > 0)
1742 && TYPE_MIN_VALUE (domain_type))
1744 if (!allow_negative_idx
1745 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1746 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1749 else if (!allow_negative_idx
1750 && compare_tree_int (idx, 0) < 0)
1753 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1757 /* Attempt to fold *(S+O) to S.X.
1758 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1759 is the desired result type. */
1762 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1763 tree orig_type, bool base_is_ptr)
1765 tree f, t, field_type, tail_array_field, field_offset;
1769 if (TREE_CODE (record_type) != RECORD_TYPE
1770 && TREE_CODE (record_type) != UNION_TYPE
1771 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1774 /* Short-circuit silly cases. */
1775 if (useless_type_conversion_p (record_type, orig_type))
1778 tail_array_field = NULL_TREE;
1779 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1783 if (TREE_CODE (f) != FIELD_DECL)
1785 if (DECL_BIT_FIELD (f))
1788 if (!DECL_FIELD_OFFSET (f))
1790 field_offset = byte_position (f);
1791 if (TREE_CODE (field_offset) != INTEGER_CST)
1794 /* ??? Java creates "interesting" fields for representing base classes.
1795 They have no name, and have no context. With no context, we get into
1796 trouble with nonoverlapping_component_refs_p. Skip them. */
1797 if (!DECL_FIELD_CONTEXT (f))
1800 /* The previous array field isn't at the end. */
1801 tail_array_field = NULL_TREE;
1803 /* Check to see if this offset overlaps with the field. */
1804 cmp = tree_int_cst_compare (field_offset, offset);
1808 field_type = TREE_TYPE (f);
1810 /* Here we exactly match the offset being checked. If the types match,
1811 then we can return that field. */
1813 && useless_type_conversion_p (orig_type, field_type))
1816 base = build1 (INDIRECT_REF, record_type, base);
1817 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1821 /* Don't care about offsets into the middle of scalars. */
1822 if (!AGGREGATE_TYPE_P (field_type))
1825 /* Check for array at the end of the struct. This is often
1826 used as for flexible array members. We should be able to
1827 turn this into an array access anyway. */
1828 if (TREE_CODE (field_type) == ARRAY_TYPE)
1829 tail_array_field = f;
1831 /* Check the end of the field against the offset. */
1832 if (!DECL_SIZE_UNIT (f)
1833 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1835 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1836 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1839 /* If we matched, then set offset to the displacement into
1842 new_base = build1 (INDIRECT_REF, record_type, base);
1845 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1847 /* Recurse to possibly find the match. */
1848 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1849 f == TYPE_FIELDS (record_type));
1852 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1858 if (!tail_array_field)
1861 f = tail_array_field;
1862 field_type = TREE_TYPE (f);
1863 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1865 /* If we get here, we've got an aggregate field, and a possibly
1866 nonzero offset into them. Recurse and hope for a valid match. */
1868 base = build1 (INDIRECT_REF, record_type, base);
1869 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1871 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1872 f == TYPE_FIELDS (record_type));
1875 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1879 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1880 or BASE[index] or by combination of those.
1882 Before attempting the conversion strip off existing ADDR_EXPRs and
1883 handled component refs. */
1886 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1890 bool base_is_ptr = true;
1893 if (TREE_CODE (base) == ADDR_EXPR)
1895 base_is_ptr = false;
1897 base = TREE_OPERAND (base, 0);
1899 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1900 so it needs to be removed and new COMPONENT_REF constructed.
1901 The wrong COMPONENT_REF are often constructed by folding the
1902 (type *)&object within the expression (type *)&object+offset */
1903 if (handled_component_p (base))
1905 HOST_WIDE_INT sub_offset, size, maxsize;
1907 newbase = get_ref_base_and_extent (base, &sub_offset,
1909 gcc_assert (newbase);
1912 && !(sub_offset & (BITS_PER_UNIT - 1)))
1916 offset = int_const_binop (PLUS_EXPR, offset,
1917 build_int_cst (TREE_TYPE (offset),
1918 sub_offset / BITS_PER_UNIT), 1);
1921 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1922 && integer_zerop (offset))
1924 type = TREE_TYPE (base);
1929 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1931 type = TREE_TYPE (TREE_TYPE (base));
1933 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1934 orig_type, base_is_ptr);
1938 base = build1 (INDIRECT_REF, type, base);
1939 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
1944 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1945 or &BASE[index] or by combination of those.
1947 Before attempting the conversion strip off existing component refs. */
1950 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1954 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1955 && POINTER_TYPE_P (orig_type));
1957 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1963 /* For __builtin_object_size to function correctly we need to
1964 make sure not to fold address arithmetic so that we change
1965 reference from one array to another. This would happen for
1968 struct X { char s1[10]; char s2[10] } s;
1969 char *foo (void) { return &s.s2[-4]; }
1971 where we need to avoid generating &s.s1[6]. As the C and
1972 C++ frontends create different initial trees
1973 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1974 sophisticated comparisons here. Note that checking for the
1975 condition after the fact is easier than trying to avoid doing
1978 if (TREE_CODE (orig) == ADDR_EXPR)
1979 orig = TREE_OPERAND (orig, 0);
1980 if ((TREE_CODE (orig) == ARRAY_REF
1981 || (TREE_CODE (orig) == COMPONENT_REF
1982 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
1983 && (TREE_CODE (t) == ARRAY_REF
1984 || TREE_CODE (t) == COMPONENT_REF)
1985 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
1986 ? TREE_OPERAND (orig, 0) : orig,
1987 TREE_CODE (t) == ARRAY_REF
1988 ? TREE_OPERAND (t, 0) : t, 0))
1991 ptr_type = build_pointer_type (TREE_TYPE (t));
1992 if (!useless_type_conversion_p (orig_type, ptr_type))
1994 return build_fold_addr_expr_with_type (t, ptr_type);
2000 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
2001 Return the simplified expression, or NULL if nothing could be done. */
2004 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
2007 bool volatile_p = TREE_THIS_VOLATILE (expr);
2009 /* We may well have constructed a double-nested PLUS_EXPR via multiple
2010 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
2011 are sometimes added. */
2013 STRIP_TYPE_NOPS (base);
2014 TREE_OPERAND (expr, 0) = base;
2016 /* One possibility is that the address reduces to a string constant. */
2017 t = fold_read_from_constant_string (expr);
2021 /* Add in any offset from a POINTER_PLUS_EXPR. */
2022 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
2026 offset2 = TREE_OPERAND (base, 1);
2027 if (TREE_CODE (offset2) != INTEGER_CST)
2029 base = TREE_OPERAND (base, 0);
2031 offset = fold_convert (sizetype,
2032 int_const_binop (PLUS_EXPR, offset, offset2, 1));
2035 if (TREE_CODE (base) == ADDR_EXPR)
2037 tree base_addr = base;
2039 /* Strip the ADDR_EXPR. */
2040 base = TREE_OPERAND (base, 0);
2042 /* Fold away CONST_DECL to its value, if the type is scalar. */
2043 if (TREE_CODE (base) == CONST_DECL
2044 && is_gimple_min_invariant (DECL_INITIAL (base)))
2045 return DECL_INITIAL (base);
2047 /* Try folding *(&B+O) to B.X. */
2048 t = maybe_fold_offset_to_reference (base_addr, offset,
2052 /* Preserve volatileness of the original expression.
2053 We can end up with a plain decl here which is shared
2054 and we shouldn't mess with its flags. */
2056 TREE_THIS_VOLATILE (t) = volatile_p;
2062 /* We can get here for out-of-range string constant accesses,
2063 such as "_"[3]. Bail out of the entire substitution search
2064 and arrange for the entire statement to be replaced by a
2065 call to __builtin_trap. In all likelihood this will all be
2066 constant-folded away, but in the meantime we can't leave with
2067 something that get_expr_operands can't understand. */
2071 if (TREE_CODE (t) == ADDR_EXPR
2072 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2074 /* FIXME: Except that this causes problems elsewhere with dead
2075 code not being deleted, and we die in the rtl expanders
2076 because we failed to remove some ssa_name. In the meantime,
2077 just return zero. */
2078 /* FIXME2: This condition should be signaled by
2079 fold_read_from_constant_string directly, rather than
2080 re-checking for it here. */
2081 return integer_zero_node;
2084 /* Try folding *(B+O) to B->X. Still an improvement. */
2085 if (POINTER_TYPE_P (TREE_TYPE (base)))
2087 t = maybe_fold_offset_to_reference (base, offset,
2094 /* Otherwise we had an offset that we could not simplify. */
2099 /* A quaint feature extant in our address arithmetic is that there
2100 can be hidden type changes here. The type of the result need
2101 not be the same as the type of the input pointer.
2103 What we're after here is an expression of the form
2104 (T *)(&array + const)
2105 where array is OP0, const is OP1, RES_TYPE is T and
2106 the cast doesn't actually exist, but is implicit in the
2107 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2109 which may be able to propagate further. */
2112 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2117 /* It had better be a constant. */
2118 if (TREE_CODE (op1) != INTEGER_CST)
2120 /* The first operand should be an ADDR_EXPR. */
2121 if (TREE_CODE (op0) != ADDR_EXPR)
2123 op0 = TREE_OPERAND (op0, 0);
2125 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2126 the offset into it. */
2127 while (TREE_CODE (op0) == ARRAY_REF)
2129 tree array_obj = TREE_OPERAND (op0, 0);
2130 tree array_idx = TREE_OPERAND (op0, 1);
2131 tree elt_type = TREE_TYPE (op0);
2132 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2135 if (TREE_CODE (array_idx) != INTEGER_CST)
2137 if (TREE_CODE (elt_size) != INTEGER_CST)
2140 /* Un-bias the index by the min index of the array type. */
2141 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2144 min_idx = TYPE_MIN_VALUE (min_idx);
2147 if (TREE_CODE (min_idx) != INTEGER_CST)
2150 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2151 if (!integer_zerop (min_idx))
2152 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2157 /* Convert the index to a byte offset. */
2158 array_idx = fold_convert (sizetype, array_idx);
2159 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2161 /* Update the operands for the next round, or for folding. */
2162 op1 = int_const_binop (PLUS_EXPR,
2167 ptd_type = TREE_TYPE (res_type);
2168 /* If we want a pointer to void, reconstruct the reference from the
2169 array element type. A pointer to that can be trivially converted
2170 to void *. This happens as we fold (void *)(ptr p+ off). */
2171 if (VOID_TYPE_P (ptd_type)
2172 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2173 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2175 /* At which point we can try some of the same things as for indirects. */
2176 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2178 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2181 t = build1 (ADDR_EXPR, res_type, t);
2186 /* For passing state through walk_tree into fold_stmt_r and its
2189 struct fold_stmt_r_data
2193 bool *inside_addr_expr_p;
2196 /* Subroutine of fold_stmt called via walk_tree. We perform several
2197 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2200 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2202 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2203 struct fold_stmt_r_data *fold_stmt_r_data;
2204 bool *inside_addr_expr_p;
2206 tree expr = *expr_p, t;
2207 bool volatile_p = TREE_THIS_VOLATILE (expr);
2209 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2210 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2211 changed_p = fold_stmt_r_data->changed_p;
2213 /* ??? It'd be nice if walk_tree had a pre-order option. */
2214 switch (TREE_CODE (expr))
2217 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2222 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2224 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2225 if (wi->is_lhs && t && TREE_CODE (t) == INTEGER_CST)
2228 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2229 /* If we had a good reason for propagating the address here,
2230 make sure we end up with valid gimple. See PR34989. */
2231 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2235 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2240 if (POINTER_TYPE_P (TREE_TYPE (expr))
2241 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
2242 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2243 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2245 TREE_TYPE (TREE_TYPE (expr)))))
2249 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2250 We'd only want to bother decomposing an existing ARRAY_REF if
2251 the base array is found to have another offset contained within.
2252 Otherwise we'd be wasting time. */
2254 /* If we are not processing expressions found within an
2255 ADDR_EXPR, then we can fold constant array references.
2256 Don't fold on LHS either, to avoid folding "abc"[0] = 5
2258 if (!*inside_addr_expr_p && !wi->is_lhs)
2259 t = fold_read_from_constant_string (expr);
2265 *inside_addr_expr_p = true;
2266 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2267 *inside_addr_expr_p = false;
2272 /* Make sure the value is properly considered constant, and so gets
2273 propagated as expected. */
2275 recompute_tree_invariant_for_addr_expr (expr);
2279 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2284 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2285 We've already checked that the records are compatible, so we should
2286 come up with a set of compatible fields. */
2288 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2289 tree expr_field = TREE_OPERAND (expr, 1);
2291 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2293 expr_field = find_compatible_field (expr_record, expr_field);
2294 TREE_OPERAND (expr, 1) = expr_field;
2299 case TARGET_MEM_REF:
2300 t = maybe_fold_tmr (expr);
2303 case POINTER_PLUS_EXPR:
2304 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2307 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2312 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2313 TREE_OPERAND (expr, 0),
2314 TREE_OPERAND (expr, 1));
2318 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2320 tree op0 = TREE_OPERAND (expr, 0);
2324 fold_defer_overflow_warnings ();
2325 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2326 TREE_OPERAND (op0, 0),
2327 TREE_OPERAND (op0, 1));
2328 /* This is actually a conditional expression, not a GIMPLE
2329 conditional statement, however, the valid_gimple_rhs_p
2330 test still applies. */
2331 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
2332 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2335 COND_EXPR_COND (expr) = tem;
2348 /* Preserve volatileness of the original expression.
2349 We can end up with a plain decl here which is shared
2350 and we shouldn't mess with its flags. */
2352 TREE_THIS_VOLATILE (t) = volatile_p;
2360 /* Return the string length, maximum string length or maximum value of
2362 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2363 is not NULL and, for TYPE == 0, its value is not equal to the length
2364 we determine or if we are unable to determine the length or value,
2365 return false. VISITED is a bitmap of visited variables.
2366 TYPE is 0 if string length should be returned, 1 for maximum string
2367 length and 2 for maximum value ARG can have. */
2370 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2375 if (TREE_CODE (arg) != SSA_NAME)
2377 if (TREE_CODE (arg) == COND_EXPR)
2378 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2379 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2380 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2381 else if (TREE_CODE (arg) == ADDR_EXPR
2382 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2383 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2385 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2386 if (TREE_CODE (aop0) == INDIRECT_REF
2387 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2388 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2389 length, visited, type);
2395 if (TREE_CODE (val) != INTEGER_CST
2396 || tree_int_cst_sgn (val) < 0)
2400 val = c_strlen (arg, 1);
2408 if (TREE_CODE (*length) != INTEGER_CST
2409 || TREE_CODE (val) != INTEGER_CST)
2412 if (tree_int_cst_lt (*length, val))
2416 else if (simple_cst_equal (val, *length) != 1)
2424 /* If we were already here, break the infinite cycle. */
2425 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2427 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2430 def_stmt = SSA_NAME_DEF_STMT (var);
2432 switch (gimple_code (def_stmt))
2435 /* The RHS of the statement defining VAR must either have a
2436 constant length or come from another SSA_NAME with a constant
2438 if (gimple_assign_single_p (def_stmt)
2439 || gimple_assign_unary_nop_p (def_stmt))
2441 tree rhs = gimple_assign_rhs1 (def_stmt);
2442 return get_maxval_strlen (rhs, length, visited, type);
2448 /* All the arguments of the PHI node must have the same constant
2452 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2454 tree arg = gimple_phi_arg (def_stmt, i)->def;
2456 /* If this PHI has itself as an argument, we cannot
2457 determine the string length of this argument. However,
2458 if we can find a constant string length for the other
2459 PHI args then we can still be sure that this is a
2460 constant string length. So be optimistic and just
2461 continue with the next argument. */
2462 if (arg == gimple_phi_result (def_stmt))
2465 if (!get_maxval_strlen (arg, length, visited, type))
2477 /* Fold builtin call in statement STMT. Returns a simplified tree.
2478 We may return a non-constant expression, including another call
2479 to a different function and with different arguments, e.g.,
2480 substituting memcpy for strcpy when the string length is known.
2481 Note that some builtins expand into inline code that may not
2482 be valid in GIMPLE. Callers must take care. */
2485 ccp_fold_builtin (gimple stmt)
2487 tree result, val[3];
2494 gcc_assert (is_gimple_call (stmt));
2496 ignore = (gimple_call_lhs (stmt) == NULL);
2498 /* First try the generic builtin folder. If that succeeds, return the
2500 result = fold_call_stmt (stmt, ignore);
2504 STRIP_NOPS (result);
2508 /* Ignore MD builtins. */
2509 callee = gimple_call_fndecl (stmt);
2510 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2513 /* If the builtin could not be folded, and it has no argument list,
2515 nargs = gimple_call_num_args (stmt);
2519 /* Limit the work only for builtins we know how to simplify. */
2520 switch (DECL_FUNCTION_CODE (callee))
2522 case BUILT_IN_STRLEN:
2523 case BUILT_IN_FPUTS:
2524 case BUILT_IN_FPUTS_UNLOCKED:
2528 case BUILT_IN_STRCPY:
2529 case BUILT_IN_STRNCPY:
2533 case BUILT_IN_MEMCPY_CHK:
2534 case BUILT_IN_MEMPCPY_CHK:
2535 case BUILT_IN_MEMMOVE_CHK:
2536 case BUILT_IN_MEMSET_CHK:
2537 case BUILT_IN_STRNCPY_CHK:
2541 case BUILT_IN_STRCPY_CHK:
2542 case BUILT_IN_STPCPY_CHK:
2546 case BUILT_IN_SNPRINTF_CHK:
2547 case BUILT_IN_VSNPRINTF_CHK:
2555 if (arg_idx >= nargs)
2558 /* Try to use the dataflow information gathered by the CCP process. */
2559 visited = BITMAP_ALLOC (NULL);
2560 bitmap_clear (visited);
2562 memset (val, 0, sizeof (val));
2563 a = gimple_call_arg (stmt, arg_idx);
2564 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2565 val[arg_idx] = NULL_TREE;
2567 BITMAP_FREE (visited);
2570 switch (DECL_FUNCTION_CODE (callee))
2572 case BUILT_IN_STRLEN:
2573 if (val[0] && nargs == 1)
2576 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2578 /* If the result is not a valid gimple value, or not a cast
2579 of a valid gimple value, then we can not use the result. */
2580 if (is_gimple_val (new_val)
2581 || (is_gimple_cast (new_val)
2582 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2587 case BUILT_IN_STRCPY:
2588 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2589 result = fold_builtin_strcpy (callee,
2590 gimple_call_arg (stmt, 0),
2591 gimple_call_arg (stmt, 1),
2595 case BUILT_IN_STRNCPY:
2596 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2597 result = fold_builtin_strncpy (callee,
2598 gimple_call_arg (stmt, 0),
2599 gimple_call_arg (stmt, 1),
2600 gimple_call_arg (stmt, 2),
2604 case BUILT_IN_FPUTS:
2606 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2607 gimple_call_arg (stmt, 1),
2608 ignore, false, val[0]);
2611 case BUILT_IN_FPUTS_UNLOCKED:
2613 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2614 gimple_call_arg (stmt, 1),
2615 ignore, true, val[0]);
2618 case BUILT_IN_MEMCPY_CHK:
2619 case BUILT_IN_MEMPCPY_CHK:
2620 case BUILT_IN_MEMMOVE_CHK:
2621 case BUILT_IN_MEMSET_CHK:
2622 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2623 result = fold_builtin_memory_chk (callee,
2624 gimple_call_arg (stmt, 0),
2625 gimple_call_arg (stmt, 1),
2626 gimple_call_arg (stmt, 2),
2627 gimple_call_arg (stmt, 3),
2629 DECL_FUNCTION_CODE (callee));
2632 case BUILT_IN_STRCPY_CHK:
2633 case BUILT_IN_STPCPY_CHK:
2634 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2635 result = fold_builtin_stxcpy_chk (callee,
2636 gimple_call_arg (stmt, 0),
2637 gimple_call_arg (stmt, 1),
2638 gimple_call_arg (stmt, 2),
2640 DECL_FUNCTION_CODE (callee));
2643 case BUILT_IN_STRNCPY_CHK:
2644 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2645 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2646 gimple_call_arg (stmt, 1),
2647 gimple_call_arg (stmt, 2),
2648 gimple_call_arg (stmt, 3),
2652 case BUILT_IN_SNPRINTF_CHK:
2653 case BUILT_IN_VSNPRINTF_CHK:
2654 if (val[1] && is_gimple_val (val[1]))
2655 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2656 DECL_FUNCTION_CODE (callee));
2663 if (result && ignore)
2664 result = fold_ignored_result (result);
2668 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2669 replacement rhs for the statement or NULL_TREE if no simplification
2670 could be made. It is assumed that the operands have been previously
2674 fold_gimple_assign (gimple_stmt_iterator *si)
2676 gimple stmt = gsi_stmt (*si);
2677 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2681 switch (get_gimple_rhs_class (subcode))
2683 case GIMPLE_SINGLE_RHS:
2685 tree rhs = gimple_assign_rhs1 (stmt);
2687 /* Try to fold a conditional expression. */
2688 if (TREE_CODE (rhs) == COND_EXPR)
2690 tree temp = fold (COND_EXPR_COND (rhs));
2691 if (temp != COND_EXPR_COND (rhs))
2692 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2693 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2696 /* If we couldn't fold the RHS, hand over to the generic
2698 if (result == NULL_TREE)
2699 result = fold (rhs);
2701 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2702 that may have been added by fold, and "useless" type
2703 conversions that might now be apparent due to propagation. */
2704 STRIP_USELESS_TYPE_CONVERSION (result);
2706 if (result != rhs && valid_gimple_rhs_p (result))
2709 /* It is possible that fold_stmt_r simplified the RHS.
2710 Make sure that the subcode of this statement still
2711 reflects the principal operator of the rhs operand. */
2716 case GIMPLE_UNARY_RHS:
2718 tree rhs = gimple_assign_rhs1 (stmt);
2720 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2723 /* If the operation was a conversion do _not_ mark a
2724 resulting constant with TREE_OVERFLOW if the original
2725 constant was not. These conversions have implementation
2726 defined behavior and retaining the TREE_OVERFLOW flag
2727 here would confuse later passes such as VRP. */
2728 if (CONVERT_EXPR_CODE_P (subcode)
2729 && TREE_CODE (result) == INTEGER_CST
2730 && TREE_CODE (rhs) == INTEGER_CST)
2731 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2733 STRIP_USELESS_TYPE_CONVERSION (result);
2734 if (valid_gimple_rhs_p (result))
2737 else if (CONVERT_EXPR_CODE_P (subcode)
2738 && POINTER_TYPE_P (gimple_expr_type (stmt))
2739 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2741 tree type = gimple_expr_type (stmt);
2742 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2743 integer_zero_node, type);
2750 case GIMPLE_BINARY_RHS:
2751 /* Try to fold pointer addition. */
2752 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2754 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2755 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
2757 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2758 if (!useless_type_conversion_p
2759 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2760 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2762 result = maybe_fold_stmt_addition (type,
2763 gimple_assign_rhs1 (stmt),
2764 gimple_assign_rhs2 (stmt));
2768 result = fold_binary (subcode,
2769 TREE_TYPE (gimple_assign_lhs (stmt)),
2770 gimple_assign_rhs1 (stmt),
2771 gimple_assign_rhs2 (stmt));
2775 STRIP_USELESS_TYPE_CONVERSION (result);
2776 if (valid_gimple_rhs_p (result))
2779 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2780 we lose canonicalization opportunities. Do not go again
2781 through fold here though, or the same non-GIMPLE will be
2783 if (commutative_tree_code (subcode)
2784 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2785 gimple_assign_rhs2 (stmt), false))
2786 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2787 gimple_assign_rhs2 (stmt),
2788 gimple_assign_rhs1 (stmt));
2792 case GIMPLE_INVALID_RHS:
2799 /* Attempt to fold a conditional statement. Return true if any changes were
2800 made. We only attempt to fold the condition expression, and do not perform
2801 any transformation that would require alteration of the cfg. It is
2802 assumed that the operands have been previously folded. */
2805 fold_gimple_cond (gimple stmt)
2807 tree result = fold_binary (gimple_cond_code (stmt),
2809 gimple_cond_lhs (stmt),
2810 gimple_cond_rhs (stmt));
2814 STRIP_USELESS_TYPE_CONVERSION (result);
2815 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2817 gimple_cond_set_condition_from_tree (stmt, result);
2826 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2827 The statement may be replaced by another statement, e.g., if the call
2828 simplifies to a constant value. Return true if any changes were made.
2829 It is assumed that the operands have been previously folded. */
2832 fold_gimple_call (gimple_stmt_iterator *gsi)
2834 gimple stmt = gsi_stmt (*gsi);
2836 tree callee = gimple_call_fndecl (stmt);
2838 /* Check for builtins that CCP can handle using information not
2839 available in the generic fold routines. */
2840 if (callee && DECL_BUILT_IN (callee))
2842 tree result = ccp_fold_builtin (stmt);
2845 return update_call_from_tree (gsi, result);
2849 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2850 here are when we've propagated the address of a decl into the
2852 /* ??? Should perhaps do this in fold proper. However, doing it
2853 there requires that we create a new CALL_EXPR, and that requires
2854 copying EH region info to the new node. Easier to just do it
2855 here where we can just smash the call operand. */
2856 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2857 callee = gimple_call_fn (stmt);
2858 if (TREE_CODE (callee) == OBJ_TYPE_REF
2859 && lang_hooks.fold_obj_type_ref
2860 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2861 && DECL_P (TREE_OPERAND
2862 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2866 /* ??? Caution: Broken ADDR_EXPR semantics means that
2867 looking at the type of the operand of the addr_expr
2868 can yield an array type. See silly exception in
2869 check_pointer_types_r. */
2870 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2871 t = lang_hooks.fold_obj_type_ref (callee, t);
2874 gimple_call_set_fn (stmt, t);
2883 /* Fold the statement pointed to by GSI. In some cases, this function may
2884 replace the whole statement with a new one. Returns true iff folding
2885 makes any changes. */
2888 fold_stmt (gimple_stmt_iterator *gsi)
2891 struct fold_stmt_r_data fold_stmt_r_data;
2892 struct walk_stmt_info wi;
2894 bool changed = false;
2895 bool inside_addr_expr = false;
2897 gimple stmt = gsi_stmt (*gsi);
2899 fold_stmt_r_data.stmt = stmt;
2900 fold_stmt_r_data.changed_p = &changed;
2901 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2903 memset (&wi, 0, sizeof (wi));
2904 wi.info = &fold_stmt_r_data;
2906 /* Fold the individual operands.
2907 For example, fold instances of *&VAR into VAR, etc. */
2908 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2911 /* Fold the main computation performed by the statement. */
2912 switch (gimple_code (stmt))
2916 tree new_rhs = fold_gimple_assign (gsi);
2917 if (new_rhs != NULL_TREE)
2919 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2922 stmt = gsi_stmt (*gsi);
2926 changed |= fold_gimple_cond (stmt);
2929 /* The entire statement may be replaced in this case. */
2930 changed |= fold_gimple_call (gsi);
2941 /* Perform the minimal folding on statement STMT. Only operations like
2942 *&x created by constant propagation are handled. The statement cannot
2943 be replaced with a new one. Return true if the statement was
2944 changed, false otherwise. */
2947 fold_stmt_inplace (gimple stmt)
2950 struct fold_stmt_r_data fold_stmt_r_data;
2951 struct walk_stmt_info wi;
2952 gimple_stmt_iterator si;
2954 bool changed = false;
2955 bool inside_addr_expr = false;
2957 fold_stmt_r_data.stmt = stmt;
2958 fold_stmt_r_data.changed_p = &changed;
2959 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2961 memset (&wi, 0, sizeof (wi));
2962 wi.info = &fold_stmt_r_data;
2964 /* Fold the individual operands.
2965 For example, fold instances of *&VAR into VAR, etc.
2967 It appears that, at one time, maybe_fold_stmt_indirect
2968 would cause the walk to return non-null in order to
2969 signal that the entire statement should be replaced with
2970 a call to _builtin_trap. This functionality is currently
2971 disabled, as noted in a FIXME, and cannot be supported here. */
2972 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2975 /* Fold the main computation performed by the statement. */
2976 switch (gimple_code (stmt))
2980 unsigned old_num_ops;
2982 old_num_ops = gimple_num_ops (stmt);
2983 si = gsi_for_stmt (stmt);
2984 new_rhs = fold_gimple_assign (&si);
2985 if (new_rhs != NULL_TREE
2986 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
2988 gimple_assign_set_rhs_from_tree (&si, new_rhs);
2991 gcc_assert (gsi_stmt (si) == stmt);
2995 changed |= fold_gimple_cond (stmt);
3005 /* Try to optimize out __builtin_stack_restore. Optimize it out
3006 if there is another __builtin_stack_restore in the same basic
3007 block and no calls or ASM_EXPRs are in between, or if this block's
3008 only outgoing edge is to EXIT_BLOCK and there are no calls or
3009 ASM_EXPRs after this __builtin_stack_restore. */
3012 optimize_stack_restore (gimple_stmt_iterator i)
3015 gimple stmt, stack_save;
3016 gimple_stmt_iterator stack_save_gsi;
3018 basic_block bb = gsi_bb (i);
3019 gimple call = gsi_stmt (i);
3021 if (gimple_code (call) != GIMPLE_CALL
3022 || gimple_call_num_args (call) != 1
3023 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
3024 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
3027 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
3029 stmt = gsi_stmt (i);
3030 if (gimple_code (stmt) == GIMPLE_ASM)
3032 if (gimple_code (stmt) != GIMPLE_CALL)
3035 callee = gimple_call_fndecl (stmt);
3036 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3039 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3044 && (! single_succ_p (bb)
3045 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3048 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3049 if (gimple_code (stack_save) != GIMPLE_CALL
3050 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3051 || stmt_could_throw_p (stack_save)
3052 || !has_single_use (gimple_call_arg (call, 0)))
3055 callee = gimple_call_fndecl (stack_save);
3057 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3058 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
3059 || gimple_call_num_args (stack_save) != 0)
3062 stack_save_gsi = gsi_for_stmt (stack_save);
3063 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3064 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3065 if (!update_call_from_tree (&stack_save_gsi, rhs))
3067 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3070 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3072 /* No effect, so the statement will be deleted. */
3073 return integer_zero_node;
3076 /* If va_list type is a simple pointer and nothing special is needed,
3077 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3078 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3079 pointer assignment. */
3082 optimize_stdarg_builtin (gimple call)
3084 tree callee, lhs, rhs, cfun_va_list;
3085 bool va_list_simple_ptr;
3087 if (gimple_code (call) != GIMPLE_CALL)
3090 callee = gimple_call_fndecl (call);
3092 cfun_va_list = targetm.fn_abi_va_list (callee);
3093 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3094 && (TREE_TYPE (cfun_va_list) == void_type_node
3095 || TREE_TYPE (cfun_va_list) == char_type_node);
3097 switch (DECL_FUNCTION_CODE (callee))
3099 case BUILT_IN_VA_START:
3100 if (!va_list_simple_ptr
3101 || targetm.expand_builtin_va_start != NULL
3102 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3105 if (gimple_call_num_args (call) != 2)
3108 lhs = gimple_call_arg (call, 0);
3109 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3110 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3111 != TYPE_MAIN_VARIANT (cfun_va_list))
3114 lhs = build_fold_indirect_ref (lhs);
3115 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3116 1, integer_zero_node);
3117 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3118 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3120 case BUILT_IN_VA_COPY:
3121 if (!va_list_simple_ptr)
3124 if (gimple_call_num_args (call) != 2)
3127 lhs = gimple_call_arg (call, 0);
3128 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3129 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3130 != TYPE_MAIN_VARIANT (cfun_va_list))
3133 lhs = build_fold_indirect_ref (lhs);
3134 rhs = gimple_call_arg (call, 1);
3135 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3136 != TYPE_MAIN_VARIANT (cfun_va_list))
3139 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3140 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3142 case BUILT_IN_VA_END:
3143 /* No effect, so the statement will be deleted. */
3144 return integer_zero_node;
3151 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3152 RHS of an assignment. Insert the necessary statements before
3153 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3154 is replaced. If the call is expected to produces a result, then it
3155 is replaced by an assignment of the new RHS to the result variable.
3156 If the result is to be ignored, then the call is replaced by a
3160 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3163 tree tmp = NULL_TREE; /* Silence warning. */
3164 gimple stmt, new_stmt;
3165 gimple_stmt_iterator i;
3166 gimple_seq stmts = gimple_seq_alloc();
3167 struct gimplify_ctx gctx;
3169 stmt = gsi_stmt (*si_p);
3171 gcc_assert (is_gimple_call (stmt));
3173 lhs = gimple_call_lhs (stmt);
3175 push_gimplify_context (&gctx);
3177 if (lhs == NULL_TREE)
3178 gimplify_and_add (expr, &stmts);
3180 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3182 pop_gimplify_context (NULL);
3184 if (gimple_has_location (stmt))
3185 annotate_all_with_location (stmts, gimple_location (stmt));
3187 /* The replacement can expose previously unreferenced variables. */
3188 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3190 new_stmt = gsi_stmt (i);
3191 find_new_referenced_vars (new_stmt);
3192 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3193 mark_symbols_for_renaming (new_stmt);
3197 if (lhs == NULL_TREE)
3199 new_stmt = gimple_build_nop ();
3200 unlink_stmt_vdef (stmt);
3201 release_defs (stmt);
3205 new_stmt = gimple_build_assign (lhs, tmp);
3206 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3207 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3208 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3211 gimple_set_location (new_stmt, gimple_location (stmt));
3212 gsi_replace (si_p, new_stmt, false);
3215 /* A simple pass that attempts to fold all builtin functions. This pass
3216 is run after we've propagated as many constants as we can. */
3219 execute_fold_all_builtins (void)
3221 bool cfg_changed = false;
3223 unsigned int todoflags = 0;
3227 gimple_stmt_iterator i;
3228 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3230 gimple stmt, old_stmt;
3231 tree callee, result;
3232 enum built_in_function fcode;
3234 stmt = gsi_stmt (i);
3236 if (gimple_code (stmt) != GIMPLE_CALL)
3241 callee = gimple_call_fndecl (stmt);
3242 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3247 fcode = DECL_FUNCTION_CODE (callee);
3249 result = ccp_fold_builtin (stmt);
3252 gimple_remove_stmt_histograms (cfun, stmt);
3255 switch (DECL_FUNCTION_CODE (callee))
3257 case BUILT_IN_CONSTANT_P:
3258 /* Resolve __builtin_constant_p. If it hasn't been
3259 folded to integer_one_node by now, it's fairly
3260 certain that the value simply isn't constant. */
3261 result = integer_zero_node;
3264 case BUILT_IN_STACK_RESTORE:
3265 result = optimize_stack_restore (i);
3271 case BUILT_IN_VA_START:
3272 case BUILT_IN_VA_END:
3273 case BUILT_IN_VA_COPY:
3274 /* These shouldn't be folded before pass_stdarg. */
3275 result = optimize_stdarg_builtin (stmt);
3285 if (dump_file && (dump_flags & TDF_DETAILS))
3287 fprintf (dump_file, "Simplified\n ");
3288 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3292 push_stmt_changes (gsi_stmt_ptr (&i));
3294 if (!update_call_from_tree (&i, result))
3296 gimplify_and_update_call_from_tree (&i, result);
3297 todoflags |= TODO_update_address_taken;
3300 stmt = gsi_stmt (i);
3301 pop_stmt_changes (gsi_stmt_ptr (&i));
3303 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3304 && gimple_purge_dead_eh_edges (bb))
3307 if (dump_file && (dump_flags & TDF_DETAILS))
3309 fprintf (dump_file, "to\n ");
3310 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3311 fprintf (dump_file, "\n");
3314 /* Retry the same statement if it changed into another
3315 builtin, there might be new opportunities now. */
3316 if (gimple_code (stmt) != GIMPLE_CALL)
3321 callee = gimple_call_fndecl (stmt);
3323 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3324 || DECL_FUNCTION_CODE (callee) == fcode)
3329 /* Delete unreachable blocks. */
3331 todoflags |= TODO_cleanup_cfg;
3337 struct gimple_opt_pass pass_fold_builtins =
3343 execute_fold_all_builtins, /* execute */
3346 0, /* static_pass_number */
3348 PROP_cfg | PROP_ssa, /* properties_required */
3349 0, /* properties_provided */
3350 0, /* properties_destroyed */
3351 0, /* todo_flags_start */
3354 | TODO_update_ssa /* todo_flags_finish */