1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
214 /* Possible lattice values. */
223 /* Array of propagated constant values. After propagation,
224 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
225 the constant is held in an SSA name representing a memory store
226 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
227 memory reference used to store (i.e., the LHS of the assignment
229 static prop_value_t *const_val;
231 static void canonicalize_float_value (prop_value_t *);
233 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
236 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
238 switch (val.lattice_val)
241 fprintf (outf, "%sUNINITIALIZED", prefix);
244 fprintf (outf, "%sUNDEFINED", prefix);
247 fprintf (outf, "%sVARYING", prefix);
250 fprintf (outf, "%sCONSTANT ", prefix);
251 print_generic_expr (outf, val.value, dump_flags);
259 /* Print lattice value VAL to stderr. */
261 void debug_lattice_value (prop_value_t val);
264 debug_lattice_value (prop_value_t val)
266 dump_lattice_value (stderr, "", val);
267 fprintf (stderr, "\n");
272 /* If SYM is a constant variable with known value, return the value.
273 NULL_TREE is returned otherwise. */
276 get_symbol_constant_value (tree sym)
278 if (TREE_STATIC (sym)
279 && TREE_READONLY (sym))
281 tree val = DECL_INITIAL (sym);
284 STRIP_USELESS_TYPE_CONVERSION (val);
285 if (is_gimple_min_invariant (val))
287 if (TREE_CODE (val) == ADDR_EXPR)
289 tree base = get_base_address (TREE_OPERAND (val, 0));
290 if (base && TREE_CODE (base) == VAR_DECL)
291 add_referenced_var (base);
296 /* Variables declared 'const' without an initializer
297 have zero as the initializer if they may not be
298 overridden at link or run time. */
300 && !DECL_EXTERNAL (sym)
301 && targetm.binds_local_p (sym)
302 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
303 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
304 return fold_convert (TREE_TYPE (sym), integer_zero_node);
310 /* Compute a default value for variable VAR and store it in the
311 CONST_VAL array. The following rules are used to get default
314 1- Global and static variables that are declared constant are
317 2- Any other value is considered UNDEFINED. This is useful when
318 considering PHI nodes. PHI arguments that are undefined do not
319 change the constant value of the PHI node, which allows for more
320 constants to be propagated.
322 3- Variables defined by statements other than assignments and PHI
323 nodes are considered VARYING.
325 4- Initial values of variables that are not GIMPLE registers are
326 considered VARYING. */
329 get_default_value (tree var)
331 tree sym = SSA_NAME_VAR (var);
332 prop_value_t val = { UNINITIALIZED, NULL_TREE };
335 stmt = SSA_NAME_DEF_STMT (var);
337 if (gimple_nop_p (stmt))
339 /* Variables defined by an empty statement are those used
340 before being initialized. If VAR is a local variable, we
341 can assume initially that it is UNDEFINED, otherwise we must
342 consider it VARYING. */
343 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
344 val.lattice_val = UNDEFINED;
346 val.lattice_val = VARYING;
348 else if (is_gimple_assign (stmt)
349 /* Value-returning GIMPLE_CALL statements assign to
350 a variable, and are treated similarly to GIMPLE_ASSIGN. */
351 || (is_gimple_call (stmt)
352 && gimple_call_lhs (stmt) != NULL_TREE)
353 || gimple_code (stmt) == GIMPLE_PHI)
356 if (gimple_assign_single_p (stmt)
357 && DECL_P (gimple_assign_rhs1 (stmt))
358 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
360 val.lattice_val = CONSTANT;
364 /* Any other variable defined by an assignment or a PHI node
365 is considered UNDEFINED. */
366 val.lattice_val = UNDEFINED;
370 /* Otherwise, VAR will never take on a constant value. */
371 val.lattice_val = VARYING;
378 /* Get the constant value associated with variable VAR. */
380 static inline prop_value_t *
385 if (const_val == NULL)
388 val = &const_val[SSA_NAME_VERSION (var)];
389 if (val->lattice_val == UNINITIALIZED)
390 *val = get_default_value (var);
392 canonicalize_float_value (val);
397 /* Sets the value associated with VAR to VARYING. */
400 set_value_varying (tree var)
402 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
404 val->lattice_val = VARYING;
405 val->value = NULL_TREE;
408 /* For float types, modify the value of VAL to make ccp work correctly
409 for non-standard values (-0, NaN):
411 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
412 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
413 This is to fix the following problem (see PR 29921): Suppose we have
417 and we set value of y to NaN. This causes value of x to be set to NaN.
418 When we later determine that y is in fact VARYING, fold uses the fact
419 that HONOR_NANS is false, and we try to change the value of x to 0,
420 causing an ICE. With HONOR_NANS being false, the real appearance of
421 NaN would cause undefined behavior, though, so claiming that y (and x)
422 are UNDEFINED initially is correct. */
425 canonicalize_float_value (prop_value_t *val)
427 enum machine_mode mode;
431 if (val->lattice_val != CONSTANT
432 || TREE_CODE (val->value) != REAL_CST)
435 d = TREE_REAL_CST (val->value);
436 type = TREE_TYPE (val->value);
437 mode = TYPE_MODE (type);
439 if (!HONOR_SIGNED_ZEROS (mode)
440 && REAL_VALUE_MINUS_ZERO (d))
442 val->value = build_real (type, dconst0);
446 if (!HONOR_NANS (mode)
447 && REAL_VALUE_ISNAN (d))
449 val->lattice_val = UNDEFINED;
455 /* Set the value for variable VAR to NEW_VAL. Return true if the new
456 value is different from VAR's previous value. */
459 set_lattice_value (tree var, prop_value_t new_val)
461 prop_value_t *old_val = get_value (var);
463 canonicalize_float_value (&new_val);
465 /* Lattice transitions must always be monotonically increasing in
466 value. If *OLD_VAL and NEW_VAL are the same, return false to
467 inform the caller that this was a non-transition. */
469 gcc_assert (old_val->lattice_val < new_val.lattice_val
470 || (old_val->lattice_val == new_val.lattice_val
471 && ((!old_val->value && !new_val.value)
472 || operand_equal_p (old_val->value, new_val.value, 0))));
474 if (old_val->lattice_val != new_val.lattice_val)
476 if (dump_file && (dump_flags & TDF_DETAILS))
478 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
479 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
484 gcc_assert (new_val.lattice_val != UNDEFINED);
492 /* Return the likely CCP lattice value for STMT.
494 If STMT has no operands, then return CONSTANT.
496 Else if undefinedness of operands of STMT cause its value to be
497 undefined, then return UNDEFINED.
499 Else if any operands of STMT are constants, then return CONSTANT.
501 Else return VARYING. */
504 likely_value (gimple stmt)
506 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
511 enum gimple_code code = gimple_code (stmt);
513 /* This function appears to be called only for assignments, calls,
514 conditionals, and switches, due to the logic in visit_stmt. */
515 gcc_assert (code == GIMPLE_ASSIGN
516 || code == GIMPLE_CALL
517 || code == GIMPLE_COND
518 || code == GIMPLE_SWITCH);
520 /* If the statement has volatile operands, it won't fold to a
522 if (gimple_has_volatile_ops (stmt))
525 /* Arrive here for more complex cases. */
526 has_constant_operand = false;
527 has_undefined_operand = false;
528 all_undefined_operands = true;
529 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
531 prop_value_t *val = get_value (use);
533 if (val->lattice_val == UNDEFINED)
534 has_undefined_operand = true;
536 all_undefined_operands = false;
538 if (val->lattice_val == CONSTANT)
539 has_constant_operand = true;
542 /* There may be constants in regular rhs operands. For calls we
543 have to ignore lhs, fndecl and static chain, otherwise only
545 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
546 i < gimple_num_ops (stmt); ++i)
548 tree op = gimple_op (stmt, i);
549 if (!op || TREE_CODE (op) == SSA_NAME)
551 if (is_gimple_min_invariant (op))
552 has_constant_operand = true;
555 /* If the operation combines operands like COMPLEX_EXPR make sure to
556 not mark the result UNDEFINED if only one part of the result is
558 if (has_undefined_operand && all_undefined_operands)
560 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
562 switch (gimple_assign_rhs_code (stmt))
564 /* Unary operators are handled with all_undefined_operands. */
567 case POINTER_PLUS_EXPR:
568 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
569 Not bitwise operators, one VARYING operand may specify the
570 result completely. Not logical operators for the same reason.
571 Not COMPLEX_EXPR as one VARYING operand makes the result partly
572 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
573 the undefined operand may be promoted. */
580 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
581 fall back to VARYING even if there were CONSTANT operands. */
582 if (has_undefined_operand)
585 /* We do not consider virtual operands here -- load from read-only
586 memory may have only VARYING virtual operands, but still be
588 if (has_constant_operand
589 || gimple_references_memory_p (stmt))
595 /* Returns true if STMT cannot be constant. */
598 surely_varying_stmt_p (gimple stmt)
600 /* If the statement has operands that we cannot handle, it cannot be
602 if (gimple_has_volatile_ops (stmt))
605 /* If it is a call and does not return a value or is not a
606 builtin and not an indirect call, it is varying. */
607 if (is_gimple_call (stmt))
610 if (!gimple_call_lhs (stmt)
611 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
612 && !DECL_BUILT_IN (fndecl)))
616 /* Any other store operation is not interesting. */
617 else if (gimple_vdef (stmt))
620 /* Anything other than assignments and conditional jumps are not
621 interesting for CCP. */
622 if (gimple_code (stmt) != GIMPLE_ASSIGN
623 && gimple_code (stmt) != GIMPLE_COND
624 && gimple_code (stmt) != GIMPLE_SWITCH
625 && gimple_code (stmt) != GIMPLE_CALL)
631 /* Initialize local data structures for CCP. */
634 ccp_initialize (void)
638 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
640 /* Initialize simulation flags for PHI nodes and statements. */
643 gimple_stmt_iterator i;
645 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
647 gimple stmt = gsi_stmt (i);
648 bool is_varying = surely_varying_stmt_p (stmt);
655 /* If the statement will not produce a constant, mark
656 all its outputs VARYING. */
657 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
658 set_value_varying (def);
660 prop_set_simulate_again (stmt, !is_varying);
664 /* Now process PHI nodes. We never clear the simulate_again flag on
665 phi nodes, since we do not know which edges are executable yet,
666 except for phi nodes for virtual operands when we do not do store ccp. */
669 gimple_stmt_iterator i;
671 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
673 gimple phi = gsi_stmt (i);
675 if (!is_gimple_reg (gimple_phi_result (phi)))
676 prop_set_simulate_again (phi, false);
678 prop_set_simulate_again (phi, true);
683 /* Debug count support. Reset the values of ssa names
684 VARYING when the total number ssa names analyzed is
685 beyond the debug count specified. */
691 for (i = 0; i < num_ssa_names; i++)
695 const_val[i].lattice_val = VARYING;
696 const_val[i].value = NULL_TREE;
702 /* Do final substitution of propagated values, cleanup the flowgraph and
703 free allocated storage.
705 Return TRUE when something was optimized. */
710 bool something_changed;
713 /* Perform substitutions based on the known constant values. */
714 something_changed = substitute_and_fold (const_val, false);
718 return something_changed;;
722 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
725 any M UNDEFINED = any
726 any M VARYING = VARYING
727 Ci M Cj = Ci if (i == j)
728 Ci M Cj = VARYING if (i != j)
732 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
734 if (val1->lattice_val == UNDEFINED)
736 /* UNDEFINED M any = any */
739 else if (val2->lattice_val == UNDEFINED)
741 /* any M UNDEFINED = any
742 Nothing to do. VAL1 already contains the value we want. */
745 else if (val1->lattice_val == VARYING
746 || val2->lattice_val == VARYING)
748 /* any M VARYING = VARYING. */
749 val1->lattice_val = VARYING;
750 val1->value = NULL_TREE;
752 else if (val1->lattice_val == CONSTANT
753 && val2->lattice_val == CONSTANT
754 && simple_cst_equal (val1->value, val2->value) == 1)
756 /* Ci M Cj = Ci if (i == j)
757 Ci M Cj = VARYING if (i != j)
759 If these two values come from memory stores, make sure that
760 they come from the same memory reference. */
761 val1->lattice_val = CONSTANT;
762 val1->value = val1->value;
766 /* Any other combination is VARYING. */
767 val1->lattice_val = VARYING;
768 val1->value = NULL_TREE;
773 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
774 lattice values to determine PHI_NODE's lattice value. The value of a
775 PHI node is determined calling ccp_lattice_meet with all the arguments
776 of the PHI node that are incoming via executable edges. */
778 static enum ssa_prop_result
779 ccp_visit_phi_node (gimple phi)
782 prop_value_t *old_val, new_val;
784 if (dump_file && (dump_flags & TDF_DETAILS))
786 fprintf (dump_file, "\nVisiting PHI node: ");
787 print_gimple_stmt (dump_file, phi, 0, dump_flags);
790 old_val = get_value (gimple_phi_result (phi));
791 switch (old_val->lattice_val)
794 return SSA_PROP_VARYING;
801 new_val.lattice_val = UNDEFINED;
802 new_val.value = NULL_TREE;
809 for (i = 0; i < gimple_phi_num_args (phi); i++)
811 /* Compute the meet operator over all the PHI arguments flowing
812 through executable edges. */
813 edge e = gimple_phi_arg_edge (phi, i);
815 if (dump_file && (dump_flags & TDF_DETAILS))
818 "\n Argument #%d (%d -> %d %sexecutable)\n",
819 i, e->src->index, e->dest->index,
820 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
823 /* If the incoming edge is executable, Compute the meet operator for
824 the existing value of the PHI node and the current PHI argument. */
825 if (e->flags & EDGE_EXECUTABLE)
827 tree arg = gimple_phi_arg (phi, i)->def;
828 prop_value_t arg_val;
830 if (is_gimple_min_invariant (arg))
832 arg_val.lattice_val = CONSTANT;
836 arg_val = *(get_value (arg));
838 ccp_lattice_meet (&new_val, &arg_val);
840 if (dump_file && (dump_flags & TDF_DETAILS))
842 fprintf (dump_file, "\t");
843 print_generic_expr (dump_file, arg, dump_flags);
844 dump_lattice_value (dump_file, "\tValue: ", arg_val);
845 fprintf (dump_file, "\n");
848 if (new_val.lattice_val == VARYING)
853 if (dump_file && (dump_flags & TDF_DETAILS))
855 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
856 fprintf (dump_file, "\n\n");
859 /* Make the transition to the new value. */
860 if (set_lattice_value (gimple_phi_result (phi), new_val))
862 if (new_val.lattice_val == VARYING)
863 return SSA_PROP_VARYING;
865 return SSA_PROP_INTERESTING;
868 return SSA_PROP_NOT_INTERESTING;
871 /* Return true if we may propagate the address expression ADDR into the
872 dereference DEREF and cancel them. */
875 may_propagate_address_into_dereference (tree addr, tree deref)
877 gcc_assert (INDIRECT_REF_P (deref)
878 && TREE_CODE (addr) == ADDR_EXPR);
880 /* Don't propagate if ADDR's operand has incomplete type. */
881 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
884 /* If the address is invariant then we do not need to preserve restrict
885 qualifications. But we do need to preserve volatile qualifiers until
886 we can annotate the folded dereference itself properly. */
887 if (is_gimple_min_invariant (addr)
888 && (!TREE_THIS_VOLATILE (deref)
889 || TYPE_VOLATILE (TREE_TYPE (addr))))
890 return useless_type_conversion_p (TREE_TYPE (deref),
891 TREE_TYPE (TREE_OPERAND (addr, 0)));
893 /* Else both the address substitution and the folding must result in
894 a valid useless type conversion sequence. */
895 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
897 && useless_type_conversion_p (TREE_TYPE (deref),
898 TREE_TYPE (TREE_OPERAND (addr, 0))));
901 /* CCP specific front-end to the non-destructive constant folding
904 Attempt to simplify the RHS of STMT knowing that one or more
905 operands are constants.
907 If simplification is possible, return the simplified RHS,
908 otherwise return the original RHS or NULL_TREE. */
911 ccp_fold (gimple stmt)
913 switch (gimple_code (stmt))
917 enum tree_code subcode = gimple_assign_rhs_code (stmt);
919 switch (get_gimple_rhs_class (subcode))
921 case GIMPLE_SINGLE_RHS:
923 tree rhs = gimple_assign_rhs1 (stmt);
924 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
926 if (TREE_CODE (rhs) == SSA_NAME)
928 /* If the RHS is an SSA_NAME, return its known constant value,
930 return get_value (rhs)->value;
932 /* Handle propagating invariant addresses into address operations.
933 The folding we do here matches that in tree-ssa-forwprop.c. */
934 else if (TREE_CODE (rhs) == ADDR_EXPR)
937 base = &TREE_OPERAND (rhs, 0);
938 while (handled_component_p (*base))
939 base = &TREE_OPERAND (*base, 0);
940 if (TREE_CODE (*base) == INDIRECT_REF
941 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
943 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
944 if (val->lattice_val == CONSTANT
945 && TREE_CODE (val->value) == ADDR_EXPR
946 && may_propagate_address_into_dereference
949 /* We need to return a new tree, not modify the IL
950 or share parts of it. So play some tricks to
951 avoid manually building it. */
952 tree ret, save = *base;
953 *base = TREE_OPERAND (val->value, 0);
954 ret = unshare_expr (rhs);
955 recompute_tree_invariant_for_addr_expr (ret);
962 if (kind == tcc_reference)
964 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
965 || TREE_CODE (rhs) == REALPART_EXPR
966 || TREE_CODE (rhs) == IMAGPART_EXPR)
967 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
969 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
970 if (val->lattice_val == CONSTANT)
971 return fold_unary (TREE_CODE (rhs),
972 TREE_TYPE (rhs), val->value);
974 else if (TREE_CODE (rhs) == INDIRECT_REF
975 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
977 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
978 if (val->lattice_val == CONSTANT
979 && TREE_CODE (val->value) == ADDR_EXPR
980 && useless_type_conversion_p (TREE_TYPE (rhs),
981 TREE_TYPE (TREE_TYPE (val->value))))
982 rhs = TREE_OPERAND (val->value, 0);
984 return fold_const_aggregate_ref (rhs);
986 else if (kind == tcc_declaration)
987 return get_symbol_constant_value (rhs);
991 case GIMPLE_UNARY_RHS:
993 /* Handle unary operators that can appear in GIMPLE form.
994 Note that we know the single operand must be a constant,
995 so this should almost always return a simplified RHS. */
996 tree lhs = gimple_assign_lhs (stmt);
997 tree op0 = gimple_assign_rhs1 (stmt);
999 /* Simplify the operand down to a constant. */
1000 if (TREE_CODE (op0) == SSA_NAME)
1002 prop_value_t *val = get_value (op0);
1003 if (val->lattice_val == CONSTANT)
1004 op0 = get_value (op0)->value;
1007 /* Conversions are useless for CCP purposes if they are
1008 value-preserving. Thus the restrictions that
1009 useless_type_conversion_p places for pointer type conversions
1010 do not apply here. Substitution later will only substitute to
1012 if (CONVERT_EXPR_CODE_P (subcode)
1013 && POINTER_TYPE_P (TREE_TYPE (lhs))
1014 && POINTER_TYPE_P (TREE_TYPE (op0))
1015 /* Do not allow differences in volatile qualification
1016 as this might get us confused as to whether a
1017 propagation destination statement is volatile
1018 or not. See PR36988. */
1019 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
1020 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
1023 /* Still try to generate a constant of correct type. */
1024 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1026 && ((tem = maybe_fold_offset_to_address
1027 (op0, integer_zero_node, TREE_TYPE (lhs)))
1033 return fold_unary_ignore_overflow (subcode,
1034 gimple_expr_type (stmt), op0);
1037 case GIMPLE_BINARY_RHS:
1039 /* Handle binary operators that can appear in GIMPLE form. */
1040 tree op0 = gimple_assign_rhs1 (stmt);
1041 tree op1 = gimple_assign_rhs2 (stmt);
1043 /* Simplify the operands down to constants when appropriate. */
1044 if (TREE_CODE (op0) == SSA_NAME)
1046 prop_value_t *val = get_value (op0);
1047 if (val->lattice_val == CONSTANT)
1051 if (TREE_CODE (op1) == SSA_NAME)
1053 prop_value_t *val = get_value (op1);
1054 if (val->lattice_val == CONSTANT)
1058 /* Fold &foo + CST into an invariant reference if possible. */
1059 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1060 && TREE_CODE (op0) == ADDR_EXPR
1061 && TREE_CODE (op1) == INTEGER_CST)
1063 tree lhs = gimple_assign_lhs (stmt);
1064 tree tem = maybe_fold_offset_to_address (op0, op1,
1066 if (tem != NULL_TREE)
1070 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1081 tree fn = gimple_call_fn (stmt);
1084 if (TREE_CODE (fn) == SSA_NAME)
1086 val = get_value (fn);
1087 if (val->lattice_val == CONSTANT)
1090 if (TREE_CODE (fn) == ADDR_EXPR
1091 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1092 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1094 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1097 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1099 args[i] = gimple_call_arg (stmt, i);
1100 if (TREE_CODE (args[i]) == SSA_NAME)
1102 val = get_value (args[i]);
1103 if (val->lattice_val == CONSTANT)
1104 args[i] = val->value;
1107 call = build_call_array (gimple_call_return_type (stmt),
1108 fn, gimple_call_num_args (stmt), args);
1109 retval = fold_call_expr (call, false);
1111 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1112 STRIP_NOPS (retval);
1120 /* Handle comparison operators that can appear in GIMPLE form. */
1121 tree op0 = gimple_cond_lhs (stmt);
1122 tree op1 = gimple_cond_rhs (stmt);
1123 enum tree_code code = gimple_cond_code (stmt);
1125 /* Simplify the operands down to constants when appropriate. */
1126 if (TREE_CODE (op0) == SSA_NAME)
1128 prop_value_t *val = get_value (op0);
1129 if (val->lattice_val == CONSTANT)
1133 if (TREE_CODE (op1) == SSA_NAME)
1135 prop_value_t *val = get_value (op1);
1136 if (val->lattice_val == CONSTANT)
1140 return fold_binary (code, boolean_type_node, op0, op1);
1145 tree rhs = gimple_switch_index (stmt);
1147 if (TREE_CODE (rhs) == SSA_NAME)
1149 /* If the RHS is an SSA_NAME, return its known constant value,
1151 return get_value (rhs)->value;
1163 /* Return the tree representing the element referenced by T if T is an
1164 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1165 NULL_TREE otherwise. */
1168 fold_const_aggregate_ref (tree t)
1170 prop_value_t *value;
1171 tree base, ctor, idx, field;
1172 unsigned HOST_WIDE_INT cnt;
1175 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1176 return get_symbol_constant_value (t);
1178 switch (TREE_CODE (t))
1181 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1182 DECL_INITIAL. If BASE is a nested reference into another
1183 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1184 the inner reference. */
1185 base = TREE_OPERAND (t, 0);
1186 switch (TREE_CODE (base))
1189 if (!TREE_READONLY (base)
1190 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1191 || !targetm.binds_local_p (base))
1194 ctor = DECL_INITIAL (base);
1199 ctor = fold_const_aggregate_ref (base);
1211 if (ctor == NULL_TREE
1212 || (TREE_CODE (ctor) != CONSTRUCTOR
1213 && TREE_CODE (ctor) != STRING_CST)
1214 || !TREE_STATIC (ctor))
1217 /* Get the index. If we have an SSA_NAME, try to resolve it
1218 with the current lattice value for the SSA_NAME. */
1219 idx = TREE_OPERAND (t, 1);
1220 switch (TREE_CODE (idx))
1223 if ((value = get_value (idx))
1224 && value->lattice_val == CONSTANT
1225 && TREE_CODE (value->value) == INTEGER_CST)
1238 /* Fold read from constant string. */
1239 if (TREE_CODE (ctor) == STRING_CST)
1241 if ((TYPE_MODE (TREE_TYPE (t))
1242 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1243 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1245 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1246 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1247 return build_int_cst_type (TREE_TYPE (t),
1248 (TREE_STRING_POINTER (ctor)
1249 [TREE_INT_CST_LOW (idx)]));
1253 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1254 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1255 if (tree_int_cst_equal (cfield, idx))
1257 STRIP_USELESS_TYPE_CONVERSION (cval);
1258 if (TREE_CODE (cval) == ADDR_EXPR)
1260 tree base = get_base_address (TREE_OPERAND (cval, 0));
1261 if (base && TREE_CODE (base) == VAR_DECL)
1262 add_referenced_var (base);
1269 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1270 DECL_INITIAL. If BASE is a nested reference into another
1271 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1272 the inner reference. */
1273 base = TREE_OPERAND (t, 0);
1274 switch (TREE_CODE (base))
1277 if (!TREE_READONLY (base)
1278 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1279 || !targetm.binds_local_p (base))
1282 ctor = DECL_INITIAL (base);
1287 ctor = fold_const_aggregate_ref (base);
1294 if (ctor == NULL_TREE
1295 || TREE_CODE (ctor) != CONSTRUCTOR
1296 || !TREE_STATIC (ctor))
1299 field = TREE_OPERAND (t, 1);
1301 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1303 /* FIXME: Handle bit-fields. */
1304 && ! DECL_BIT_FIELD (cfield))
1306 STRIP_USELESS_TYPE_CONVERSION (cval);
1307 if (TREE_CODE (cval) == ADDR_EXPR)
1309 tree base = get_base_address (TREE_OPERAND (cval, 0));
1310 if (base && TREE_CODE (base) == VAR_DECL)
1311 add_referenced_var (base);
1320 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1321 if (c && TREE_CODE (c) == COMPLEX_CST)
1322 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1328 tree base = TREE_OPERAND (t, 0);
1329 if (TREE_CODE (base) == SSA_NAME
1330 && (value = get_value (base))
1331 && value->lattice_val == CONSTANT
1332 && TREE_CODE (value->value) == ADDR_EXPR)
1333 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1344 /* Evaluate statement STMT.
1345 Valid only for assignments, calls, conditionals, and switches. */
1348 evaluate_stmt (gimple stmt)
1351 tree simplified = NULL_TREE;
1352 ccp_lattice_t likelyvalue = likely_value (stmt);
1355 fold_defer_overflow_warnings ();
1357 /* If the statement is likely to have a CONSTANT result, then try
1358 to fold the statement to determine the constant value. */
1359 /* FIXME. This is the only place that we call ccp_fold.
1360 Since likely_value never returns CONSTANT for calls, we will
1361 not attempt to fold them, including builtins that may profit. */
1362 if (likelyvalue == CONSTANT)
1363 simplified = ccp_fold (stmt);
1364 /* If the statement is likely to have a VARYING result, then do not
1365 bother folding the statement. */
1366 else if (likelyvalue == VARYING)
1368 enum gimple_code code = gimple_code (stmt);
1369 if (code == GIMPLE_ASSIGN)
1371 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1373 /* Other cases cannot satisfy is_gimple_min_invariant
1375 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1376 simplified = gimple_assign_rhs1 (stmt);
1378 else if (code == GIMPLE_SWITCH)
1379 simplified = gimple_switch_index (stmt);
1381 /* These cannot satisfy is_gimple_min_invariant without folding. */
1382 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1385 is_constant = simplified && is_gimple_min_invariant (simplified);
1387 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1389 if (dump_file && (dump_flags & TDF_DETAILS))
1391 fprintf (dump_file, "which is likely ");
1392 switch (likelyvalue)
1395 fprintf (dump_file, "CONSTANT");
1398 fprintf (dump_file, "UNDEFINED");
1401 fprintf (dump_file, "VARYING");
1405 fprintf (dump_file, "\n");
1410 /* The statement produced a constant value. */
1411 val.lattice_val = CONSTANT;
1412 val.value = simplified;
1416 /* The statement produced a nonconstant value. If the statement
1417 had UNDEFINED operands, then the result of the statement
1418 should be UNDEFINED. Otherwise, the statement is VARYING. */
1419 if (likelyvalue == UNDEFINED)
1420 val.lattice_val = likelyvalue;
1422 val.lattice_val = VARYING;
1424 val.value = NULL_TREE;
1430 /* Visit the assignment statement STMT. Set the value of its LHS to the
1431 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1432 creates virtual definitions, set the value of each new name to that
1433 of the RHS (if we can derive a constant out of the RHS).
1434 Value-returning call statements also perform an assignment, and
1435 are handled here. */
1437 static enum ssa_prop_result
1438 visit_assignment (gimple stmt, tree *output_p)
1441 enum ssa_prop_result retval;
1443 tree lhs = gimple_get_lhs (stmt);
1445 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1446 || gimple_call_lhs (stmt) != NULL_TREE);
1448 if (gimple_assign_copy_p (stmt))
1450 tree rhs = gimple_assign_rhs1 (stmt);
1452 if (TREE_CODE (rhs) == SSA_NAME)
1454 /* For a simple copy operation, we copy the lattice values. */
1455 prop_value_t *nval = get_value (rhs);
1459 val = evaluate_stmt (stmt);
1462 /* Evaluate the statement, which could be
1463 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1464 val = evaluate_stmt (stmt);
1466 retval = SSA_PROP_NOT_INTERESTING;
1468 /* Set the lattice value of the statement's output. */
1469 if (TREE_CODE (lhs) == SSA_NAME)
1471 /* If STMT is an assignment to an SSA_NAME, we only have one
1473 if (set_lattice_value (lhs, val))
1476 if (val.lattice_val == VARYING)
1477 retval = SSA_PROP_VARYING;
1479 retval = SSA_PROP_INTERESTING;
1487 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1488 if it can determine which edge will be taken. Otherwise, return
1489 SSA_PROP_VARYING. */
1491 static enum ssa_prop_result
1492 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1497 block = gimple_bb (stmt);
1498 val = evaluate_stmt (stmt);
1500 /* Find which edge out of the conditional block will be taken and add it
1501 to the worklist. If no single edge can be determined statically,
1502 return SSA_PROP_VARYING to feed all the outgoing edges to the
1503 propagation engine. */
1504 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1506 return SSA_PROP_INTERESTING;
1508 return SSA_PROP_VARYING;
1512 /* Evaluate statement STMT. If the statement produces an output value and
1513 its evaluation changes the lattice value of its output, return
1514 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1517 If STMT is a conditional branch and we can determine its truth
1518 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1519 value, return SSA_PROP_VARYING. */
1521 static enum ssa_prop_result
1522 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1527 if (dump_file && (dump_flags & TDF_DETAILS))
1529 fprintf (dump_file, "\nVisiting statement:\n");
1530 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1533 switch (gimple_code (stmt))
1536 /* If the statement is an assignment that produces a single
1537 output value, evaluate its RHS to see if the lattice value of
1538 its output has changed. */
1539 return visit_assignment (stmt, output_p);
1542 /* A value-returning call also performs an assignment. */
1543 if (gimple_call_lhs (stmt) != NULL_TREE)
1544 return visit_assignment (stmt, output_p);
1549 /* If STMT is a conditional branch, see if we can determine
1550 which branch will be taken. */
1551 /* FIXME. It appears that we should be able to optimize
1552 computed GOTOs here as well. */
1553 return visit_cond_stmt (stmt, taken_edge_p);
1559 /* Any other kind of statement is not interesting for constant
1560 propagation and, therefore, not worth simulating. */
1561 if (dump_file && (dump_flags & TDF_DETAILS))
1562 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1564 /* Definitions made by statements other than assignments to
1565 SSA_NAMEs represent unknown modifications to their outputs.
1566 Mark them VARYING. */
1567 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1569 prop_value_t v = { VARYING, NULL_TREE };
1570 set_lattice_value (def, v);
1573 return SSA_PROP_VARYING;
1577 /* Main entry point for SSA Conditional Constant Propagation. */
1583 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1584 if (ccp_finalize ())
1585 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1594 return flag_tree_ccp != 0;
1598 struct gimple_opt_pass pass_ccp =
1603 gate_ccp, /* gate */
1604 do_ssa_ccp, /* execute */
1607 0, /* static_pass_number */
1608 TV_TREE_CCP, /* tv_id */
1609 PROP_cfg | PROP_ssa, /* properties_required */
1610 0, /* properties_provided */
1611 0, /* properties_destroyed */
1612 0, /* todo_flags_start */
1613 TODO_dump_func | TODO_verify_ssa
1614 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1619 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1620 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1621 is the desired result type. */
1624 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1625 bool allow_negative_idx)
1627 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1628 tree array_type, elt_type, elt_size;
1631 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1632 measured in units of the size of elements type) from that ARRAY_REF).
1633 We can't do anything if either is variable.
1635 The case we handle here is *(&A[N]+O). */
1636 if (TREE_CODE (base) == ARRAY_REF)
1638 tree low_bound = array_ref_low_bound (base);
1640 elt_offset = TREE_OPERAND (base, 1);
1641 if (TREE_CODE (low_bound) != INTEGER_CST
1642 || TREE_CODE (elt_offset) != INTEGER_CST)
1645 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1646 base = TREE_OPERAND (base, 0);
1649 /* Ignore stupid user tricks of indexing non-array variables. */
1650 array_type = TREE_TYPE (base);
1651 if (TREE_CODE (array_type) != ARRAY_TYPE)
1653 elt_type = TREE_TYPE (array_type);
1654 if (!useless_type_conversion_p (orig_type, elt_type))
1657 /* Use signed size type for intermediate computation on the index. */
1658 idx_type = signed_type_for (size_type_node);
1660 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1661 element type (so we can use the alignment if it's not constant).
1662 Otherwise, compute the offset as an index by using a division. If the
1663 division isn't exact, then don't do anything. */
1664 elt_size = TYPE_SIZE_UNIT (elt_type);
1667 if (integer_zerop (offset))
1669 if (TREE_CODE (elt_size) != INTEGER_CST)
1670 elt_size = size_int (TYPE_ALIGN (elt_type));
1672 idx = build_int_cst (idx_type, 0);
1676 unsigned HOST_WIDE_INT lquo, lrem;
1677 HOST_WIDE_INT hquo, hrem;
1680 /* The final array offset should be signed, so we need
1681 to sign-extend the (possibly pointer) offset here
1682 and use signed division. */
1683 soffset = double_int_sext (tree_to_double_int (offset),
1684 TYPE_PRECISION (TREE_TYPE (offset)));
1685 if (TREE_CODE (elt_size) != INTEGER_CST
1686 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1687 soffset.low, soffset.high,
1688 TREE_INT_CST_LOW (elt_size),
1689 TREE_INT_CST_HIGH (elt_size),
1690 &lquo, &hquo, &lrem, &hrem)
1694 idx = build_int_cst_wide (idx_type, lquo, hquo);
1697 /* Assume the low bound is zero. If there is a domain type, get the
1698 low bound, if any, convert the index into that type, and add the
1700 min_idx = build_int_cst (idx_type, 0);
1701 domain_type = TYPE_DOMAIN (array_type);
1704 idx_type = domain_type;
1705 if (TYPE_MIN_VALUE (idx_type))
1706 min_idx = TYPE_MIN_VALUE (idx_type);
1708 min_idx = fold_convert (idx_type, min_idx);
1710 if (TREE_CODE (min_idx) != INTEGER_CST)
1713 elt_offset = fold_convert (idx_type, elt_offset);
1716 if (!integer_zerop (min_idx))
1717 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1718 if (!integer_zerop (elt_offset))
1719 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1721 /* Make sure to possibly truncate late after offsetting. */
1722 idx = fold_convert (idx_type, idx);
1724 /* We don't want to construct access past array bounds. For example
1727 should not be simplified into (*c)[14] or tree-vrp will
1728 give false warnings. The same is true for
1729 struct A { long x; char d[0]; } *a;
1731 which should be not folded to &a->d[-8]. */
1733 && TYPE_MAX_VALUE (domain_type)
1734 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1736 tree up_bound = TYPE_MAX_VALUE (domain_type);
1738 if (tree_int_cst_lt (up_bound, idx)
1739 /* Accesses after the end of arrays of size 0 (gcc
1740 extension) and 1 are likely intentional ("struct
1742 && compare_tree_int (up_bound, 1) > 0)
1746 && TYPE_MIN_VALUE (domain_type))
1748 if (!allow_negative_idx
1749 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1750 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1753 else if (!allow_negative_idx
1754 && compare_tree_int (idx, 0) < 0)
1757 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1761 /* Attempt to fold *(S+O) to S.X.
1762 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1763 is the desired result type. */
1766 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1767 tree orig_type, bool base_is_ptr)
1769 tree f, t, field_type, tail_array_field, field_offset;
1773 if (TREE_CODE (record_type) != RECORD_TYPE
1774 && TREE_CODE (record_type) != UNION_TYPE
1775 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1778 /* Short-circuit silly cases. */
1779 if (useless_type_conversion_p (record_type, orig_type))
1782 tail_array_field = NULL_TREE;
1783 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1787 if (TREE_CODE (f) != FIELD_DECL)
1789 if (DECL_BIT_FIELD (f))
1792 if (!DECL_FIELD_OFFSET (f))
1794 field_offset = byte_position (f);
1795 if (TREE_CODE (field_offset) != INTEGER_CST)
1798 /* ??? Java creates "interesting" fields for representing base classes.
1799 They have no name, and have no context. With no context, we get into
1800 trouble with nonoverlapping_component_refs_p. Skip them. */
1801 if (!DECL_FIELD_CONTEXT (f))
1804 /* The previous array field isn't at the end. */
1805 tail_array_field = NULL_TREE;
1807 /* Check to see if this offset overlaps with the field. */
1808 cmp = tree_int_cst_compare (field_offset, offset);
1812 field_type = TREE_TYPE (f);
1814 /* Here we exactly match the offset being checked. If the types match,
1815 then we can return that field. */
1817 && useless_type_conversion_p (orig_type, field_type))
1820 base = build1 (INDIRECT_REF, record_type, base);
1821 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1825 /* Don't care about offsets into the middle of scalars. */
1826 if (!AGGREGATE_TYPE_P (field_type))
1829 /* Check for array at the end of the struct. This is often
1830 used as for flexible array members. We should be able to
1831 turn this into an array access anyway. */
1832 if (TREE_CODE (field_type) == ARRAY_TYPE)
1833 tail_array_field = f;
1835 /* Check the end of the field against the offset. */
1836 if (!DECL_SIZE_UNIT (f)
1837 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1839 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1840 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1843 /* If we matched, then set offset to the displacement into
1846 new_base = build1 (INDIRECT_REF, record_type, base);
1849 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1851 /* Recurse to possibly find the match. */
1852 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1853 f == TYPE_FIELDS (record_type));
1856 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1862 if (!tail_array_field)
1865 f = tail_array_field;
1866 field_type = TREE_TYPE (f);
1867 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1869 /* If we get here, we've got an aggregate field, and a possibly
1870 nonzero offset into them. Recurse and hope for a valid match. */
1872 base = build1 (INDIRECT_REF, record_type, base);
1873 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1875 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1876 f == TYPE_FIELDS (record_type));
1879 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1883 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1884 or BASE[index] or by combination of those.
1886 Before attempting the conversion strip off existing ADDR_EXPRs and
1887 handled component refs. */
1890 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1894 bool base_is_ptr = true;
1897 if (TREE_CODE (base) == ADDR_EXPR)
1899 base_is_ptr = false;
1901 base = TREE_OPERAND (base, 0);
1903 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1904 so it needs to be removed and new COMPONENT_REF constructed.
1905 The wrong COMPONENT_REF are often constructed by folding the
1906 (type *)&object within the expression (type *)&object+offset */
1907 if (handled_component_p (base))
1909 HOST_WIDE_INT sub_offset, size, maxsize;
1911 newbase = get_ref_base_and_extent (base, &sub_offset,
1913 gcc_assert (newbase);
1916 && !(sub_offset & (BITS_PER_UNIT - 1)))
1920 offset = int_const_binop (PLUS_EXPR, offset,
1921 build_int_cst (TREE_TYPE (offset),
1922 sub_offset / BITS_PER_UNIT), 1);
1925 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1926 && integer_zerop (offset))
1928 type = TREE_TYPE (base);
1933 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1935 type = TREE_TYPE (TREE_TYPE (base));
1937 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1938 orig_type, base_is_ptr);
1942 base = build1 (INDIRECT_REF, type, base);
1943 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
1948 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1949 or &BASE[index] or by combination of those.
1951 Before attempting the conversion strip off existing component refs. */
1954 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1958 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1959 && POINTER_TYPE_P (orig_type));
1961 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1967 /* For __builtin_object_size to function correctly we need to
1968 make sure not to fold address arithmetic so that we change
1969 reference from one array to another. This would happen for
1972 struct X { char s1[10]; char s2[10] } s;
1973 char *foo (void) { return &s.s2[-4]; }
1975 where we need to avoid generating &s.s1[6]. As the C and
1976 C++ frontends create different initial trees
1977 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1978 sophisticated comparisons here. Note that checking for the
1979 condition after the fact is easier than trying to avoid doing
1982 if (TREE_CODE (orig) == ADDR_EXPR)
1983 orig = TREE_OPERAND (orig, 0);
1984 if ((TREE_CODE (orig) == ARRAY_REF
1985 || (TREE_CODE (orig) == COMPONENT_REF
1986 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
1987 && (TREE_CODE (t) == ARRAY_REF
1988 || TREE_CODE (t) == COMPONENT_REF)
1989 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
1990 ? TREE_OPERAND (orig, 0) : orig,
1991 TREE_CODE (t) == ARRAY_REF
1992 ? TREE_OPERAND (t, 0) : t, 0))
1995 ptr_type = build_pointer_type (TREE_TYPE (t));
1996 if (!useless_type_conversion_p (orig_type, ptr_type))
1998 return build_fold_addr_expr_with_type (t, ptr_type);
2004 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
2005 Return the simplified expression, or NULL if nothing could be done. */
2008 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
2011 bool volatile_p = TREE_THIS_VOLATILE (expr);
2013 /* We may well have constructed a double-nested PLUS_EXPR via multiple
2014 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
2015 are sometimes added. */
2017 STRIP_TYPE_NOPS (base);
2018 TREE_OPERAND (expr, 0) = base;
2020 /* One possibility is that the address reduces to a string constant. */
2021 t = fold_read_from_constant_string (expr);
2025 /* Add in any offset from a POINTER_PLUS_EXPR. */
2026 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
2030 offset2 = TREE_OPERAND (base, 1);
2031 if (TREE_CODE (offset2) != INTEGER_CST)
2033 base = TREE_OPERAND (base, 0);
2035 offset = fold_convert (sizetype,
2036 int_const_binop (PLUS_EXPR, offset, offset2, 1));
2039 if (TREE_CODE (base) == ADDR_EXPR)
2041 tree base_addr = base;
2043 /* Strip the ADDR_EXPR. */
2044 base = TREE_OPERAND (base, 0);
2046 /* Fold away CONST_DECL to its value, if the type is scalar. */
2047 if (TREE_CODE (base) == CONST_DECL
2048 && is_gimple_min_invariant (DECL_INITIAL (base)))
2049 return DECL_INITIAL (base);
2051 /* Try folding *(&B+O) to B.X. */
2052 t = maybe_fold_offset_to_reference (base_addr, offset,
2056 /* Preserve volatileness of the original expression.
2057 We can end up with a plain decl here which is shared
2058 and we shouldn't mess with its flags. */
2060 TREE_THIS_VOLATILE (t) = volatile_p;
2066 /* We can get here for out-of-range string constant accesses,
2067 such as "_"[3]. Bail out of the entire substitution search
2068 and arrange for the entire statement to be replaced by a
2069 call to __builtin_trap. In all likelihood this will all be
2070 constant-folded away, but in the meantime we can't leave with
2071 something that get_expr_operands can't understand. */
2075 if (TREE_CODE (t) == ADDR_EXPR
2076 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2078 /* FIXME: Except that this causes problems elsewhere with dead
2079 code not being deleted, and we die in the rtl expanders
2080 because we failed to remove some ssa_name. In the meantime,
2081 just return zero. */
2082 /* FIXME2: This condition should be signaled by
2083 fold_read_from_constant_string directly, rather than
2084 re-checking for it here. */
2085 return integer_zero_node;
2088 /* Try folding *(B+O) to B->X. Still an improvement. */
2089 if (POINTER_TYPE_P (TREE_TYPE (base)))
2091 t = maybe_fold_offset_to_reference (base, offset,
2098 /* Otherwise we had an offset that we could not simplify. */
2103 /* A quaint feature extant in our address arithmetic is that there
2104 can be hidden type changes here. The type of the result need
2105 not be the same as the type of the input pointer.
2107 What we're after here is an expression of the form
2108 (T *)(&array + const)
2109 where array is OP0, const is OP1, RES_TYPE is T and
2110 the cast doesn't actually exist, but is implicit in the
2111 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2113 which may be able to propagate further. */
2116 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2121 /* The first operand should be an ADDR_EXPR. */
2122 if (TREE_CODE (op0) != ADDR_EXPR)
2124 op0 = TREE_OPERAND (op0, 0);
2126 /* It had better be a constant. */
2127 if (TREE_CODE (op1) != INTEGER_CST)
2129 /* Or op0 should now be A[0] and the non-constant offset defined
2130 via a multiplication by the array element size. */
2131 if (TREE_CODE (op0) == ARRAY_REF
2132 && integer_zerop (TREE_OPERAND (op0, 1))
2133 && TREE_CODE (op1) == SSA_NAME
2134 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
2136 gimple offset_def = SSA_NAME_DEF_STMT (op1);
2137 if (!is_gimple_assign (offset_def))
2140 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
2141 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
2142 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
2143 TYPE_SIZE_UNIT (TREE_TYPE (op0))))
2144 return build1 (ADDR_EXPR, res_type,
2145 build4 (ARRAY_REF, TREE_TYPE (op0),
2146 TREE_OPERAND (op0, 0),
2147 gimple_assign_rhs1 (offset_def),
2148 TREE_OPERAND (op0, 2),
2149 TREE_OPERAND (op0, 3)));
2150 else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
2151 && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
2152 return build1 (ADDR_EXPR, res_type,
2153 build4 (ARRAY_REF, TREE_TYPE (op0),
2154 TREE_OPERAND (op0, 0),
2156 TREE_OPERAND (op0, 2),
2157 TREE_OPERAND (op0, 3)));
2162 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2163 the offset into it. */
2164 while (TREE_CODE (op0) == ARRAY_REF)
2166 tree array_obj = TREE_OPERAND (op0, 0);
2167 tree array_idx = TREE_OPERAND (op0, 1);
2168 tree elt_type = TREE_TYPE (op0);
2169 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2172 if (TREE_CODE (array_idx) != INTEGER_CST)
2174 if (TREE_CODE (elt_size) != INTEGER_CST)
2177 /* Un-bias the index by the min index of the array type. */
2178 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2181 min_idx = TYPE_MIN_VALUE (min_idx);
2184 if (TREE_CODE (min_idx) != INTEGER_CST)
2187 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2188 if (!integer_zerop (min_idx))
2189 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2194 /* Convert the index to a byte offset. */
2195 array_idx = fold_convert (sizetype, array_idx);
2196 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2198 /* Update the operands for the next round, or for folding. */
2199 op1 = int_const_binop (PLUS_EXPR,
2204 ptd_type = TREE_TYPE (res_type);
2205 /* If we want a pointer to void, reconstruct the reference from the
2206 array element type. A pointer to that can be trivially converted
2207 to void *. This happens as we fold (void *)(ptr p+ off). */
2208 if (VOID_TYPE_P (ptd_type)
2209 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2210 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2212 /* At which point we can try some of the same things as for indirects. */
2213 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2215 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2218 t = build1 (ADDR_EXPR, res_type, t);
2223 /* For passing state through walk_tree into fold_stmt_r and its
2226 struct fold_stmt_r_data
2230 bool *inside_addr_expr_p;
2233 /* Subroutine of fold_stmt called via walk_tree. We perform several
2234 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2237 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2239 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2240 struct fold_stmt_r_data *fold_stmt_r_data;
2241 bool *inside_addr_expr_p;
2243 tree expr = *expr_p, t;
2244 bool volatile_p = TREE_THIS_VOLATILE (expr);
2246 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2247 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2248 changed_p = fold_stmt_r_data->changed_p;
2250 /* ??? It'd be nice if walk_tree had a pre-order option. */
2251 switch (TREE_CODE (expr))
2254 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2259 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2261 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2262 if (wi->is_lhs && t && TREE_CODE (t) == INTEGER_CST)
2265 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2266 /* If we had a good reason for propagating the address here,
2267 make sure we end up with valid gimple. See PR34989. */
2268 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2272 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2277 if (POINTER_TYPE_P (TREE_TYPE (expr))
2278 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
2279 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2280 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2282 TREE_TYPE (TREE_TYPE (expr)))))
2286 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2287 We'd only want to bother decomposing an existing ARRAY_REF if
2288 the base array is found to have another offset contained within.
2289 Otherwise we'd be wasting time. */
2291 /* If we are not processing expressions found within an
2292 ADDR_EXPR, then we can fold constant array references.
2293 Don't fold on LHS either, to avoid folding "abc"[0] = 5
2295 if (!*inside_addr_expr_p && !wi->is_lhs)
2296 t = fold_read_from_constant_string (expr);
2302 *inside_addr_expr_p = true;
2303 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2304 *inside_addr_expr_p = false;
2309 /* Make sure the value is properly considered constant, and so gets
2310 propagated as expected. */
2312 recompute_tree_invariant_for_addr_expr (expr);
2316 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2321 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2322 We've already checked that the records are compatible, so we should
2323 come up with a set of compatible fields. */
2325 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2326 tree expr_field = TREE_OPERAND (expr, 1);
2328 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2330 expr_field = find_compatible_field (expr_record, expr_field);
2331 TREE_OPERAND (expr, 1) = expr_field;
2336 case TARGET_MEM_REF:
2337 t = maybe_fold_tmr (expr);
2340 case POINTER_PLUS_EXPR:
2341 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2344 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2349 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2350 TREE_OPERAND (expr, 0),
2351 TREE_OPERAND (expr, 1));
2355 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2357 tree op0 = TREE_OPERAND (expr, 0);
2361 fold_defer_overflow_warnings ();
2362 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2363 TREE_OPERAND (op0, 0),
2364 TREE_OPERAND (op0, 1));
2365 /* This is actually a conditional expression, not a GIMPLE
2366 conditional statement, however, the valid_gimple_rhs_p
2367 test still applies. */
2368 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
2369 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2372 COND_EXPR_COND (expr) = tem;
2385 /* Preserve volatileness of the original expression.
2386 We can end up with a plain decl here which is shared
2387 and we shouldn't mess with its flags. */
2389 TREE_THIS_VOLATILE (t) = volatile_p;
2397 /* Return the string length, maximum string length or maximum value of
2399 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2400 is not NULL and, for TYPE == 0, its value is not equal to the length
2401 we determine or if we are unable to determine the length or value,
2402 return false. VISITED is a bitmap of visited variables.
2403 TYPE is 0 if string length should be returned, 1 for maximum string
2404 length and 2 for maximum value ARG can have. */
2407 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2412 if (TREE_CODE (arg) != SSA_NAME)
2414 if (TREE_CODE (arg) == COND_EXPR)
2415 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2416 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2417 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2418 else if (TREE_CODE (arg) == ADDR_EXPR
2419 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2420 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2422 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2423 if (TREE_CODE (aop0) == INDIRECT_REF
2424 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2425 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2426 length, visited, type);
2432 if (TREE_CODE (val) != INTEGER_CST
2433 || tree_int_cst_sgn (val) < 0)
2437 val = c_strlen (arg, 1);
2445 if (TREE_CODE (*length) != INTEGER_CST
2446 || TREE_CODE (val) != INTEGER_CST)
2449 if (tree_int_cst_lt (*length, val))
2453 else if (simple_cst_equal (val, *length) != 1)
2461 /* If we were already here, break the infinite cycle. */
2462 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2464 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2467 def_stmt = SSA_NAME_DEF_STMT (var);
2469 switch (gimple_code (def_stmt))
2472 /* The RHS of the statement defining VAR must either have a
2473 constant length or come from another SSA_NAME with a constant
2475 if (gimple_assign_single_p (def_stmt)
2476 || gimple_assign_unary_nop_p (def_stmt))
2478 tree rhs = gimple_assign_rhs1 (def_stmt);
2479 return get_maxval_strlen (rhs, length, visited, type);
2485 /* All the arguments of the PHI node must have the same constant
2489 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2491 tree arg = gimple_phi_arg (def_stmt, i)->def;
2493 /* If this PHI has itself as an argument, we cannot
2494 determine the string length of this argument. However,
2495 if we can find a constant string length for the other
2496 PHI args then we can still be sure that this is a
2497 constant string length. So be optimistic and just
2498 continue with the next argument. */
2499 if (arg == gimple_phi_result (def_stmt))
2502 if (!get_maxval_strlen (arg, length, visited, type))
2514 /* Fold builtin call in statement STMT. Returns a simplified tree.
2515 We may return a non-constant expression, including another call
2516 to a different function and with different arguments, e.g.,
2517 substituting memcpy for strcpy when the string length is known.
2518 Note that some builtins expand into inline code that may not
2519 be valid in GIMPLE. Callers must take care. */
2522 ccp_fold_builtin (gimple stmt)
2524 tree result, val[3];
2531 gcc_assert (is_gimple_call (stmt));
2533 ignore = (gimple_call_lhs (stmt) == NULL);
2535 /* First try the generic builtin folder. If that succeeds, return the
2537 result = fold_call_stmt (stmt, ignore);
2541 STRIP_NOPS (result);
2545 /* Ignore MD builtins. */
2546 callee = gimple_call_fndecl (stmt);
2547 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2550 /* If the builtin could not be folded, and it has no argument list,
2552 nargs = gimple_call_num_args (stmt);
2556 /* Limit the work only for builtins we know how to simplify. */
2557 switch (DECL_FUNCTION_CODE (callee))
2559 case BUILT_IN_STRLEN:
2560 case BUILT_IN_FPUTS:
2561 case BUILT_IN_FPUTS_UNLOCKED:
2565 case BUILT_IN_STRCPY:
2566 case BUILT_IN_STRNCPY:
2570 case BUILT_IN_MEMCPY_CHK:
2571 case BUILT_IN_MEMPCPY_CHK:
2572 case BUILT_IN_MEMMOVE_CHK:
2573 case BUILT_IN_MEMSET_CHK:
2574 case BUILT_IN_STRNCPY_CHK:
2578 case BUILT_IN_STRCPY_CHK:
2579 case BUILT_IN_STPCPY_CHK:
2583 case BUILT_IN_SNPRINTF_CHK:
2584 case BUILT_IN_VSNPRINTF_CHK:
2592 if (arg_idx >= nargs)
2595 /* Try to use the dataflow information gathered by the CCP process. */
2596 visited = BITMAP_ALLOC (NULL);
2597 bitmap_clear (visited);
2599 memset (val, 0, sizeof (val));
2600 a = gimple_call_arg (stmt, arg_idx);
2601 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2602 val[arg_idx] = NULL_TREE;
2604 BITMAP_FREE (visited);
2607 switch (DECL_FUNCTION_CODE (callee))
2609 case BUILT_IN_STRLEN:
2610 if (val[0] && nargs == 1)
2613 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2615 /* If the result is not a valid gimple value, or not a cast
2616 of a valid gimple value, then we can not use the result. */
2617 if (is_gimple_val (new_val)
2618 || (is_gimple_cast (new_val)
2619 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2624 case BUILT_IN_STRCPY:
2625 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2626 result = fold_builtin_strcpy (callee,
2627 gimple_call_arg (stmt, 0),
2628 gimple_call_arg (stmt, 1),
2632 case BUILT_IN_STRNCPY:
2633 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2634 result = fold_builtin_strncpy (callee,
2635 gimple_call_arg (stmt, 0),
2636 gimple_call_arg (stmt, 1),
2637 gimple_call_arg (stmt, 2),
2641 case BUILT_IN_FPUTS:
2643 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2644 gimple_call_arg (stmt, 1),
2645 ignore, false, val[0]);
2648 case BUILT_IN_FPUTS_UNLOCKED:
2650 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2651 gimple_call_arg (stmt, 1),
2652 ignore, true, val[0]);
2655 case BUILT_IN_MEMCPY_CHK:
2656 case BUILT_IN_MEMPCPY_CHK:
2657 case BUILT_IN_MEMMOVE_CHK:
2658 case BUILT_IN_MEMSET_CHK:
2659 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2660 result = fold_builtin_memory_chk (callee,
2661 gimple_call_arg (stmt, 0),
2662 gimple_call_arg (stmt, 1),
2663 gimple_call_arg (stmt, 2),
2664 gimple_call_arg (stmt, 3),
2666 DECL_FUNCTION_CODE (callee));
2669 case BUILT_IN_STRCPY_CHK:
2670 case BUILT_IN_STPCPY_CHK:
2671 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2672 result = fold_builtin_stxcpy_chk (callee,
2673 gimple_call_arg (stmt, 0),
2674 gimple_call_arg (stmt, 1),
2675 gimple_call_arg (stmt, 2),
2677 DECL_FUNCTION_CODE (callee));
2680 case BUILT_IN_STRNCPY_CHK:
2681 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2682 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2683 gimple_call_arg (stmt, 1),
2684 gimple_call_arg (stmt, 2),
2685 gimple_call_arg (stmt, 3),
2689 case BUILT_IN_SNPRINTF_CHK:
2690 case BUILT_IN_VSNPRINTF_CHK:
2691 if (val[1] && is_gimple_val (val[1]))
2692 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2693 DECL_FUNCTION_CODE (callee));
2700 if (result && ignore)
2701 result = fold_ignored_result (result);
2705 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2706 replacement rhs for the statement or NULL_TREE if no simplification
2707 could be made. It is assumed that the operands have been previously
2711 fold_gimple_assign (gimple_stmt_iterator *si)
2713 gimple stmt = gsi_stmt (*si);
2714 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2718 switch (get_gimple_rhs_class (subcode))
2720 case GIMPLE_SINGLE_RHS:
2722 tree rhs = gimple_assign_rhs1 (stmt);
2724 /* Try to fold a conditional expression. */
2725 if (TREE_CODE (rhs) == COND_EXPR)
2727 tree temp = fold (COND_EXPR_COND (rhs));
2728 if (temp != COND_EXPR_COND (rhs))
2729 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2730 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2733 /* If we couldn't fold the RHS, hand over to the generic
2735 if (result == NULL_TREE)
2736 result = fold (rhs);
2738 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2739 that may have been added by fold, and "useless" type
2740 conversions that might now be apparent due to propagation. */
2741 STRIP_USELESS_TYPE_CONVERSION (result);
2743 if (result != rhs && valid_gimple_rhs_p (result))
2746 /* It is possible that fold_stmt_r simplified the RHS.
2747 Make sure that the subcode of this statement still
2748 reflects the principal operator of the rhs operand. */
2753 case GIMPLE_UNARY_RHS:
2755 tree rhs = gimple_assign_rhs1 (stmt);
2757 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2760 /* If the operation was a conversion do _not_ mark a
2761 resulting constant with TREE_OVERFLOW if the original
2762 constant was not. These conversions have implementation
2763 defined behavior and retaining the TREE_OVERFLOW flag
2764 here would confuse later passes such as VRP. */
2765 if (CONVERT_EXPR_CODE_P (subcode)
2766 && TREE_CODE (result) == INTEGER_CST
2767 && TREE_CODE (rhs) == INTEGER_CST)
2768 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2770 STRIP_USELESS_TYPE_CONVERSION (result);
2771 if (valid_gimple_rhs_p (result))
2774 else if (CONVERT_EXPR_CODE_P (subcode)
2775 && POINTER_TYPE_P (gimple_expr_type (stmt))
2776 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2778 tree type = gimple_expr_type (stmt);
2779 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2780 integer_zero_node, type);
2787 case GIMPLE_BINARY_RHS:
2788 /* Try to fold pointer addition. */
2789 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2791 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2792 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
2794 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2795 if (!useless_type_conversion_p
2796 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2797 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2799 result = maybe_fold_stmt_addition (type,
2800 gimple_assign_rhs1 (stmt),
2801 gimple_assign_rhs2 (stmt));
2805 result = fold_binary (subcode,
2806 TREE_TYPE (gimple_assign_lhs (stmt)),
2807 gimple_assign_rhs1 (stmt),
2808 gimple_assign_rhs2 (stmt));
2812 STRIP_USELESS_TYPE_CONVERSION (result);
2813 if (valid_gimple_rhs_p (result))
2816 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2817 we lose canonicalization opportunities. Do not go again
2818 through fold here though, or the same non-GIMPLE will be
2820 if (commutative_tree_code (subcode)
2821 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2822 gimple_assign_rhs2 (stmt), false))
2823 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2824 gimple_assign_rhs2 (stmt),
2825 gimple_assign_rhs1 (stmt));
2829 case GIMPLE_INVALID_RHS:
2836 /* Attempt to fold a conditional statement. Return true if any changes were
2837 made. We only attempt to fold the condition expression, and do not perform
2838 any transformation that would require alteration of the cfg. It is
2839 assumed that the operands have been previously folded. */
2842 fold_gimple_cond (gimple stmt)
2844 tree result = fold_binary (gimple_cond_code (stmt),
2846 gimple_cond_lhs (stmt),
2847 gimple_cond_rhs (stmt));
2851 STRIP_USELESS_TYPE_CONVERSION (result);
2852 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2854 gimple_cond_set_condition_from_tree (stmt, result);
2863 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2864 The statement may be replaced by another statement, e.g., if the call
2865 simplifies to a constant value. Return true if any changes were made.
2866 It is assumed that the operands have been previously folded. */
2869 fold_gimple_call (gimple_stmt_iterator *gsi)
2871 gimple stmt = gsi_stmt (*gsi);
2873 tree callee = gimple_call_fndecl (stmt);
2875 /* Check for builtins that CCP can handle using information not
2876 available in the generic fold routines. */
2877 if (callee && DECL_BUILT_IN (callee))
2879 tree result = ccp_fold_builtin (stmt);
2882 return update_call_from_tree (gsi, result);
2886 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2887 here are when we've propagated the address of a decl into the
2889 /* ??? Should perhaps do this in fold proper. However, doing it
2890 there requires that we create a new CALL_EXPR, and that requires
2891 copying EH region info to the new node. Easier to just do it
2892 here where we can just smash the call operand. */
2893 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2894 callee = gimple_call_fn (stmt);
2895 if (TREE_CODE (callee) == OBJ_TYPE_REF
2896 && lang_hooks.fold_obj_type_ref
2897 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2898 && DECL_P (TREE_OPERAND
2899 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2903 /* ??? Caution: Broken ADDR_EXPR semantics means that
2904 looking at the type of the operand of the addr_expr
2905 can yield an array type. See silly exception in
2906 check_pointer_types_r. */
2907 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2908 t = lang_hooks.fold_obj_type_ref (callee, t);
2911 gimple_call_set_fn (stmt, t);
2920 /* Fold the statement pointed to by GSI. In some cases, this function may
2921 replace the whole statement with a new one. Returns true iff folding
2922 makes any changes. */
2925 fold_stmt (gimple_stmt_iterator *gsi)
2928 struct fold_stmt_r_data fold_stmt_r_data;
2929 struct walk_stmt_info wi;
2931 bool changed = false;
2932 bool inside_addr_expr = false;
2934 gimple stmt = gsi_stmt (*gsi);
2936 fold_stmt_r_data.stmt = stmt;
2937 fold_stmt_r_data.changed_p = &changed;
2938 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2940 memset (&wi, 0, sizeof (wi));
2941 wi.info = &fold_stmt_r_data;
2943 /* Fold the individual operands.
2944 For example, fold instances of *&VAR into VAR, etc. */
2945 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2948 /* Fold the main computation performed by the statement. */
2949 switch (gimple_code (stmt))
2953 tree new_rhs = fold_gimple_assign (gsi);
2954 if (new_rhs != NULL_TREE)
2956 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2959 stmt = gsi_stmt (*gsi);
2963 changed |= fold_gimple_cond (stmt);
2966 /* The entire statement may be replaced in this case. */
2967 changed |= fold_gimple_call (gsi);
2978 /* Perform the minimal folding on statement STMT. Only operations like
2979 *&x created by constant propagation are handled. The statement cannot
2980 be replaced with a new one. Return true if the statement was
2981 changed, false otherwise. */
2984 fold_stmt_inplace (gimple stmt)
2987 struct fold_stmt_r_data fold_stmt_r_data;
2988 struct walk_stmt_info wi;
2989 gimple_stmt_iterator si;
2991 bool changed = false;
2992 bool inside_addr_expr = false;
2994 fold_stmt_r_data.stmt = stmt;
2995 fold_stmt_r_data.changed_p = &changed;
2996 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2998 memset (&wi, 0, sizeof (wi));
2999 wi.info = &fold_stmt_r_data;
3001 /* Fold the individual operands.
3002 For example, fold instances of *&VAR into VAR, etc.
3004 It appears that, at one time, maybe_fold_stmt_indirect
3005 would cause the walk to return non-null in order to
3006 signal that the entire statement should be replaced with
3007 a call to _builtin_trap. This functionality is currently
3008 disabled, as noted in a FIXME, and cannot be supported here. */
3009 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
3012 /* Fold the main computation performed by the statement. */
3013 switch (gimple_code (stmt))
3017 unsigned old_num_ops;
3019 old_num_ops = gimple_num_ops (stmt);
3020 si = gsi_for_stmt (stmt);
3021 new_rhs = fold_gimple_assign (&si);
3022 if (new_rhs != NULL_TREE
3023 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
3025 gimple_assign_set_rhs_from_tree (&si, new_rhs);
3028 gcc_assert (gsi_stmt (si) == stmt);
3032 changed |= fold_gimple_cond (stmt);
3042 /* Try to optimize out __builtin_stack_restore. Optimize it out
3043 if there is another __builtin_stack_restore in the same basic
3044 block and no calls or ASM_EXPRs are in between, or if this block's
3045 only outgoing edge is to EXIT_BLOCK and there are no calls or
3046 ASM_EXPRs after this __builtin_stack_restore. */
3049 optimize_stack_restore (gimple_stmt_iterator i)
3052 gimple stmt, stack_save;
3053 gimple_stmt_iterator stack_save_gsi;
3055 basic_block bb = gsi_bb (i);
3056 gimple call = gsi_stmt (i);
3058 if (gimple_code (call) != GIMPLE_CALL
3059 || gimple_call_num_args (call) != 1
3060 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
3061 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
3064 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
3066 stmt = gsi_stmt (i);
3067 if (gimple_code (stmt) == GIMPLE_ASM)
3069 if (gimple_code (stmt) != GIMPLE_CALL)
3072 callee = gimple_call_fndecl (stmt);
3073 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3076 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3081 && (! single_succ_p (bb)
3082 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3085 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3086 if (gimple_code (stack_save) != GIMPLE_CALL
3087 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3088 || stmt_could_throw_p (stack_save)
3089 || !has_single_use (gimple_call_arg (call, 0)))
3092 callee = gimple_call_fndecl (stack_save);
3094 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3095 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
3096 || gimple_call_num_args (stack_save) != 0)
3099 stack_save_gsi = gsi_for_stmt (stack_save);
3100 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3101 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3102 if (!update_call_from_tree (&stack_save_gsi, rhs))
3104 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3107 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3109 /* No effect, so the statement will be deleted. */
3110 return integer_zero_node;
3113 /* If va_list type is a simple pointer and nothing special is needed,
3114 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3115 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3116 pointer assignment. */
3119 optimize_stdarg_builtin (gimple call)
3121 tree callee, lhs, rhs, cfun_va_list;
3122 bool va_list_simple_ptr;
3124 if (gimple_code (call) != GIMPLE_CALL)
3127 callee = gimple_call_fndecl (call);
3129 cfun_va_list = targetm.fn_abi_va_list (callee);
3130 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3131 && (TREE_TYPE (cfun_va_list) == void_type_node
3132 || TREE_TYPE (cfun_va_list) == char_type_node);
3134 switch (DECL_FUNCTION_CODE (callee))
3136 case BUILT_IN_VA_START:
3137 if (!va_list_simple_ptr
3138 || targetm.expand_builtin_va_start != NULL
3139 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3142 if (gimple_call_num_args (call) != 2)
3145 lhs = gimple_call_arg (call, 0);
3146 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3147 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3148 != TYPE_MAIN_VARIANT (cfun_va_list))
3151 lhs = build_fold_indirect_ref (lhs);
3152 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3153 1, integer_zero_node);
3154 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3155 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3157 case BUILT_IN_VA_COPY:
3158 if (!va_list_simple_ptr)
3161 if (gimple_call_num_args (call) != 2)
3164 lhs = gimple_call_arg (call, 0);
3165 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3166 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3167 != TYPE_MAIN_VARIANT (cfun_va_list))
3170 lhs = build_fold_indirect_ref (lhs);
3171 rhs = gimple_call_arg (call, 1);
3172 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3173 != TYPE_MAIN_VARIANT (cfun_va_list))
3176 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3177 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3179 case BUILT_IN_VA_END:
3180 /* No effect, so the statement will be deleted. */
3181 return integer_zero_node;
3188 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3189 RHS of an assignment. Insert the necessary statements before
3190 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3191 is replaced. If the call is expected to produces a result, then it
3192 is replaced by an assignment of the new RHS to the result variable.
3193 If the result is to be ignored, then the call is replaced by a
3197 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3200 tree tmp = NULL_TREE; /* Silence warning. */
3201 gimple stmt, new_stmt;
3202 gimple_stmt_iterator i;
3203 gimple_seq stmts = gimple_seq_alloc();
3204 struct gimplify_ctx gctx;
3206 stmt = gsi_stmt (*si_p);
3208 gcc_assert (is_gimple_call (stmt));
3210 lhs = gimple_call_lhs (stmt);
3212 push_gimplify_context (&gctx);
3214 if (lhs == NULL_TREE)
3215 gimplify_and_add (expr, &stmts);
3217 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3219 pop_gimplify_context (NULL);
3221 if (gimple_has_location (stmt))
3222 annotate_all_with_location (stmts, gimple_location (stmt));
3224 /* The replacement can expose previously unreferenced variables. */
3225 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3227 new_stmt = gsi_stmt (i);
3228 find_new_referenced_vars (new_stmt);
3229 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3230 mark_symbols_for_renaming (new_stmt);
3234 if (lhs == NULL_TREE)
3236 new_stmt = gimple_build_nop ();
3237 unlink_stmt_vdef (stmt);
3238 release_defs (stmt);
3242 new_stmt = gimple_build_assign (lhs, tmp);
3243 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3244 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3245 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3248 gimple_set_location (new_stmt, gimple_location (stmt));
3249 gsi_replace (si_p, new_stmt, false);
3252 /* A simple pass that attempts to fold all builtin functions. This pass
3253 is run after we've propagated as many constants as we can. */
3256 execute_fold_all_builtins (void)
3258 bool cfg_changed = false;
3260 unsigned int todoflags = 0;
3264 gimple_stmt_iterator i;
3265 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3267 gimple stmt, old_stmt;
3268 tree callee, result;
3269 enum built_in_function fcode;
3271 stmt = gsi_stmt (i);
3273 if (gimple_code (stmt) != GIMPLE_CALL)
3278 callee = gimple_call_fndecl (stmt);
3279 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3284 fcode = DECL_FUNCTION_CODE (callee);
3286 result = ccp_fold_builtin (stmt);
3289 gimple_remove_stmt_histograms (cfun, stmt);
3292 switch (DECL_FUNCTION_CODE (callee))
3294 case BUILT_IN_CONSTANT_P:
3295 /* Resolve __builtin_constant_p. If it hasn't been
3296 folded to integer_one_node by now, it's fairly
3297 certain that the value simply isn't constant. */
3298 result = integer_zero_node;
3301 case BUILT_IN_STACK_RESTORE:
3302 result = optimize_stack_restore (i);
3308 case BUILT_IN_VA_START:
3309 case BUILT_IN_VA_END:
3310 case BUILT_IN_VA_COPY:
3311 /* These shouldn't be folded before pass_stdarg. */
3312 result = optimize_stdarg_builtin (stmt);
3322 if (dump_file && (dump_flags & TDF_DETAILS))
3324 fprintf (dump_file, "Simplified\n ");
3325 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3329 push_stmt_changes (gsi_stmt_ptr (&i));
3331 if (!update_call_from_tree (&i, result))
3333 gimplify_and_update_call_from_tree (&i, result);
3334 todoflags |= TODO_update_address_taken;
3337 stmt = gsi_stmt (i);
3338 pop_stmt_changes (gsi_stmt_ptr (&i));
3340 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3341 && gimple_purge_dead_eh_edges (bb))
3344 if (dump_file && (dump_flags & TDF_DETAILS))
3346 fprintf (dump_file, "to\n ");
3347 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3348 fprintf (dump_file, "\n");
3351 /* Retry the same statement if it changed into another
3352 builtin, there might be new opportunities now. */
3353 if (gimple_code (stmt) != GIMPLE_CALL)
3358 callee = gimple_call_fndecl (stmt);
3360 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3361 || DECL_FUNCTION_CODE (callee) == fcode)
3366 /* Delete unreachable blocks. */
3368 todoflags |= TODO_cleanup_cfg;
3374 struct gimple_opt_pass pass_fold_builtins =
3380 execute_fold_all_builtins, /* execute */
3383 0, /* static_pass_number */
3385 PROP_cfg | PROP_ssa, /* properties_required */
3386 0, /* properties_provided */
3387 0, /* properties_destroyed */
3388 0, /* todo_flags_start */
3391 | TODO_update_ssa /* todo_flags_finish */