1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
213 /* Possible lattice values. */
222 /* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
228 static prop_value_t *const_val;
230 /* True if we are also propagating constants in stores and loads. */
231 static bool do_store_ccp;
233 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
236 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
238 switch (val.lattice_val)
241 fprintf (outf, "%sUNINITIALIZED", prefix);
244 fprintf (outf, "%sUNDEFINED", prefix);
247 fprintf (outf, "%sVARYING", prefix);
250 fprintf (outf, "%sCONSTANT ", prefix);
251 print_generic_expr (outf, val.value, dump_flags);
259 /* Print lattice value VAL to stderr. */
261 void debug_lattice_value (prop_value_t val);
264 debug_lattice_value (prop_value_t val)
266 dump_lattice_value (stderr, "", val);
267 fprintf (stderr, "\n");
272 /* If SYM is a constant variable with known value, return the value.
273 NULL_TREE is returned otherwise. */
276 get_symbol_constant_value (tree sym)
278 if (TREE_STATIC (sym)
279 && TREE_READONLY (sym)
282 tree val = DECL_INITIAL (sym);
285 STRIP_USELESS_TYPE_CONVERSION (val);
286 if (is_gimple_min_invariant (val))
289 /* Variables declared 'const' without an initializer
290 have zero as the initializer if they may not be
291 overridden at link or run time. */
293 && targetm.binds_local_p (sym)
294 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
295 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
296 return fold_convert (TREE_TYPE (sym), integer_zero_node);
302 /* Compute a default value for variable VAR and store it in the
303 CONST_VAL array. The following rules are used to get default
306 1- Global and static variables that are declared constant are
309 2- Any other value is considered UNDEFINED. This is useful when
310 considering PHI nodes. PHI arguments that are undefined do not
311 change the constant value of the PHI node, which allows for more
312 constants to be propagated.
314 3- Variables defined by statements other than assignments and PHI
315 nodes are considered VARYING.
317 4- Initial values of variables that are not GIMPLE registers are
318 considered VARYING. */
321 get_default_value (tree var)
323 tree sym = SSA_NAME_VAR (var);
324 prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE };
327 if (!do_store_ccp && !is_gimple_reg (var))
329 /* Short circuit for regular CCP. We are not interested in any
330 non-register when DO_STORE_CCP is false. */
331 val.lattice_val = VARYING;
333 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
335 /* Globals and static variables declared 'const' take their
337 val.lattice_val = CONSTANT;
343 gimple stmt = SSA_NAME_DEF_STMT (var);
345 if (gimple_nop_p (stmt))
347 /* Variables defined by an empty statement are those used
348 before being initialized. If VAR is a local variable, we
349 can assume initially that it is UNDEFINED, otherwise we must
350 consider it VARYING. */
351 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
352 val.lattice_val = UNDEFINED;
354 val.lattice_val = VARYING;
356 else if (is_gimple_assign (stmt)
357 /* Value-returning GIMPLE_CALL statements assign to
358 a variable, and are treated similarly to GIMPLE_ASSIGN. */
359 || (is_gimple_call (stmt)
360 && gimple_call_lhs (stmt) != NULL_TREE)
361 || gimple_code (stmt) == GIMPLE_PHI)
363 /* Any other variable defined by an assignment or a PHI node
364 is considered UNDEFINED. */
365 val.lattice_val = UNDEFINED;
369 /* Otherwise, VAR will never take on a constant value. */
370 val.lattice_val = VARYING;
378 /* Get the constant value associated with variable VAR. */
380 static inline prop_value_t *
385 if (const_val == NULL)
388 val = &const_val[SSA_NAME_VERSION (var)];
389 if (val->lattice_val == UNINITIALIZED)
390 *val = get_default_value (var);
395 /* Sets the value associated with VAR to VARYING. */
398 set_value_varying (tree var)
400 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
402 val->lattice_val = VARYING;
403 val->value = NULL_TREE;
404 val->mem_ref = NULL_TREE;
407 /* For float types, modify the value of VAL to make ccp work correctly
408 for non-standard values (-0, NaN):
410 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
411 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
412 This is to fix the following problem (see PR 29921): Suppose we have
416 and we set value of y to NaN. This causes value of x to be set to NaN.
417 When we later determine that y is in fact VARYING, fold uses the fact
418 that HONOR_NANS is false, and we try to change the value of x to 0,
419 causing an ICE. With HONOR_NANS being false, the real appearance of
420 NaN would cause undefined behavior, though, so claiming that y (and x)
421 are UNDEFINED initially is correct. */
424 canonicalize_float_value (prop_value_t *val)
426 enum machine_mode mode;
430 if (val->lattice_val != CONSTANT
431 || TREE_CODE (val->value) != REAL_CST)
434 d = TREE_REAL_CST (val->value);
435 type = TREE_TYPE (val->value);
436 mode = TYPE_MODE (type);
438 if (!HONOR_SIGNED_ZEROS (mode)
439 && REAL_VALUE_MINUS_ZERO (d))
441 val->value = build_real (type, dconst0);
445 if (!HONOR_NANS (mode)
446 && REAL_VALUE_ISNAN (d))
448 val->lattice_val = UNDEFINED;
455 /* Set the value for variable VAR to NEW_VAL. Return true if the new
456 value is different from VAR's previous value. */
459 set_lattice_value (tree var, prop_value_t new_val)
461 prop_value_t *old_val = get_value (var);
463 canonicalize_float_value (&new_val);
465 /* Lattice transitions must always be monotonically increasing in
466 value. If *OLD_VAL and NEW_VAL are the same, return false to
467 inform the caller that this was a non-transition. */
469 gcc_assert (old_val->lattice_val < new_val.lattice_val
470 || (old_val->lattice_val == new_val.lattice_val
471 && ((!old_val->value && !new_val.value)
472 || operand_equal_p (old_val->value, new_val.value, 0))
473 && old_val->mem_ref == new_val.mem_ref));
475 if (old_val->lattice_val != new_val.lattice_val)
477 if (dump_file && (dump_flags & TDF_DETAILS))
479 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
480 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
485 gcc_assert (new_val.lattice_val != UNDEFINED);
493 /* Return the likely CCP lattice value for STMT.
495 If STMT has no operands, then return CONSTANT.
497 Else if undefinedness of operands of STMT cause its value to be
498 undefined, then return UNDEFINED.
500 Else if any operands of STMT are constants, then return CONSTANT.
502 Else return VARYING. */
505 likely_value (gimple stmt)
507 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
511 enum tree_code code = gimple_code (stmt);
513 /* This function appears to be called only for assignments, calls,
514 conditionals, and switches, due to the logic in visit_stmt. */
515 gcc_assert (code == GIMPLE_ASSIGN
516 || code == GIMPLE_CALL
517 || code == GIMPLE_COND
518 || code == GIMPLE_SWITCH);
520 /* If the statement has volatile operands, it won't fold to a
522 if (gimple_has_volatile_ops (stmt))
525 /* If we are not doing store-ccp, statements with loads
526 and/or stores will never fold into a constant. */
528 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
531 /* A GIMPLE_CALL is assumed to be varying. NOTE: This may be overly
532 conservative, in the presence of const and pure calls. */
533 if (code == GIMPLE_CALL)
536 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
537 is_gimple_min_invariant, so we do not consider calls or
538 other forms of assignment. */
539 if (code == GIMPLE_ASSIGN
540 && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
541 == GIMPLE_SINGLE_RHS)
542 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
545 if (code == GIMPLE_COND
546 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
547 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
550 if (code == GIMPLE_SWITCH
551 && is_gimple_min_invariant (gimple_switch_index (stmt)))
554 /* Arrive here for more complex cases. */
556 has_constant_operand = false;
557 has_undefined_operand = false;
558 all_undefined_operands = true;
559 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
561 prop_value_t *val = get_value (use);
563 if (val->lattice_val == UNDEFINED)
564 has_undefined_operand = true;
566 all_undefined_operands = false;
568 if (val->lattice_val == CONSTANT)
569 has_constant_operand = true;
572 /* If the operation combines operands like COMPLEX_EXPR make sure to
573 not mark the result UNDEFINED if only one part of the result is
575 if (has_undefined_operand && all_undefined_operands)
577 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
579 switch (gimple_assign_rhs_code (stmt))
581 /* Unary operators are handled with all_undefined_operands. */
584 case POINTER_PLUS_EXPR:
585 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
586 Not bitwise operators, one VARYING operand may specify the
587 result completely. Not logical operators for the same reason.
588 Not COMPLEX_EXPR as one VARYING operand makes the result partly
589 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
590 the undefined operand may be promoted. */
597 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
598 fall back to VARYING even if there were CONSTANT operands. */
599 if (has_undefined_operand)
602 if (has_constant_operand
603 /* We do not consider virtual operands here -- load from read-only
604 memory may have only VARYING virtual operands, but still be
606 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
612 /* Returns true if STMT cannot be constant. */
615 surely_varying_stmt_p (gimple stmt)
617 /* If the statement has operands that we cannot handle, it cannot be
619 if (gimple_has_volatile_ops (stmt))
622 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
627 /* We can only handle simple loads and stores. */
628 if (!stmt_makes_single_load (stmt)
629 && !stmt_makes_single_store (stmt))
633 /* If it contains a call, it is varying. */
634 if (is_gimple_call (stmt))
637 /* Anything other than assignments and conditional jumps are not
638 interesting for CCP. */
639 if (gimple_code (stmt) != GIMPLE_ASSIGN
640 && (gimple_code (stmt) != GIMPLE_COND)
641 && (gimple_code (stmt) != GIMPLE_SWITCH))
647 /* Initialize local data structures for CCP. */
650 ccp_initialize (void)
654 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
656 /* Initialize simulation flags for PHI nodes and statements. */
659 gimple_stmt_iterator i;
661 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
663 gimple stmt = gsi_stmt (i);
664 bool is_varying = surely_varying_stmt_p (stmt);
671 /* If the statement will not produce a constant, mark
672 all its outputs VARYING. */
673 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
676 set_value_varying (def);
679 prop_set_simulate_again (stmt, !is_varying);
683 /* Now process PHI nodes. We never clear the simulate_again flag on
684 phi nodes, since we do not know which edges are executable yet,
685 except for phi nodes for virtual operands when we do not do store ccp. */
688 gimple_stmt_iterator i;
690 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
692 gimple phi = gsi_stmt (i);
694 if (!do_store_ccp && !is_gimple_reg (gimple_phi_result (phi)))
695 prop_set_simulate_again (phi, false);
697 prop_set_simulate_again (phi, true);
703 /* Do final substitution of propagated values, cleanup the flowgraph and
704 free allocated storage.
706 Return TRUE when something was optimized. */
711 /* Perform substitutions based on the known constant values. */
712 bool something_changed = substitute_and_fold (const_val, false);
716 return something_changed;;
720 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
723 any M UNDEFINED = any
724 any M VARYING = VARYING
725 Ci M Cj = Ci if (i == j)
726 Ci M Cj = VARYING if (i != j)
730 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
732 if (val1->lattice_val == UNDEFINED)
734 /* UNDEFINED M any = any */
737 else if (val2->lattice_val == UNDEFINED)
739 /* any M UNDEFINED = any
740 Nothing to do. VAL1 already contains the value we want. */
743 else if (val1->lattice_val == VARYING
744 || val2->lattice_val == VARYING)
746 /* any M VARYING = VARYING. */
747 val1->lattice_val = VARYING;
748 val1->value = NULL_TREE;
749 val1->mem_ref = NULL_TREE;
751 else if (val1->lattice_val == CONSTANT
752 && val2->lattice_val == CONSTANT
753 && simple_cst_equal (val1->value, val2->value) == 1
755 || (val1->mem_ref && val2->mem_ref
756 && operand_equal_p (val1->mem_ref, val2->mem_ref, 0))))
758 /* Ci M Cj = Ci if (i == j)
759 Ci M Cj = VARYING if (i != j)
761 If these two values come from memory stores, make sure that
762 they come from the same memory reference. */
763 val1->lattice_val = CONSTANT;
764 val1->value = val1->value;
765 val1->mem_ref = val1->mem_ref;
769 /* Any other combination is VARYING. */
770 val1->lattice_val = VARYING;
771 val1->value = NULL_TREE;
772 val1->mem_ref = NULL_TREE;
777 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
778 lattice values to determine PHI_NODE's lattice value. The value of a
779 PHI node is determined calling ccp_lattice_meet with all the arguments
780 of the PHI node that are incoming via executable edges. */
782 static enum ssa_prop_result
783 ccp_visit_phi_node (gimple phi)
786 prop_value_t *old_val, new_val;
788 if (dump_file && (dump_flags & TDF_DETAILS))
790 fprintf (dump_file, "\nVisiting PHI node: ");
791 print_gimple_stmt (dump_file, phi, 0, dump_flags);
794 old_val = get_value (gimple_phi_result (phi));
795 switch (old_val->lattice_val)
798 return SSA_PROP_VARYING;
805 new_val.lattice_val = UNDEFINED;
806 new_val.value = NULL_TREE;
807 new_val.mem_ref = NULL_TREE;
814 for (i = 0; i < gimple_phi_num_args (phi); i++)
816 /* Compute the meet operator over all the PHI arguments flowing
817 through executable edges. */
818 edge e = gimple_phi_arg_edge (phi, i);
820 if (dump_file && (dump_flags & TDF_DETAILS))
823 "\n Argument #%d (%d -> %d %sexecutable)\n",
824 i, e->src->index, e->dest->index,
825 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
828 /* If the incoming edge is executable, Compute the meet operator for
829 the existing value of the PHI node and the current PHI argument. */
830 if (e->flags & EDGE_EXECUTABLE)
832 tree arg = gimple_phi_arg (phi, i)->def;
833 prop_value_t arg_val;
835 if (is_gimple_min_invariant (arg))
837 arg_val.lattice_val = CONSTANT;
839 arg_val.mem_ref = NULL_TREE;
842 arg_val = *(get_value (arg));
844 ccp_lattice_meet (&new_val, &arg_val);
846 if (dump_file && (dump_flags & TDF_DETAILS))
848 fprintf (dump_file, "\t");
849 print_generic_expr (dump_file, arg, dump_flags);
850 dump_lattice_value (dump_file, "\tValue: ", arg_val);
851 fprintf (dump_file, "\n");
854 if (new_val.lattice_val == VARYING)
859 if (dump_file && (dump_flags & TDF_DETAILS))
861 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
862 fprintf (dump_file, "\n\n");
865 /* Make the transition to the new value. */
866 if (set_lattice_value (gimple_phi_result (phi), new_val))
868 if (new_val.lattice_val == VARYING)
869 return SSA_PROP_VARYING;
871 return SSA_PROP_INTERESTING;
874 return SSA_PROP_NOT_INTERESTING;
878 /* CCP specific front-end to the non-destructive constant folding
881 Attempt to simplify the RHS of STMT knowing that one or more
882 operands are constants.
884 If simplification is possible, return the simplified RHS,
885 otherwise return the original RHS or NULL_TREE. */
888 ccp_fold (gimple stmt)
890 switch (gimple_code (stmt))
894 enum tree_code subcode = gimple_assign_rhs_code (stmt);
896 switch (get_gimple_rhs_class (subcode))
898 case GIMPLE_SINGLE_RHS:
900 tree rhs = gimple_assign_rhs1 (stmt);
901 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
903 if (TREE_CODE (rhs) == SSA_NAME)
905 /* If the RHS is an SSA_NAME, return its known constant value,
907 return get_value (rhs)->value;
909 /* Handle propagating invariant addresses into address operations.
910 The folding we do here matches that in tree-ssa-forwprop.c. */
911 else if (TREE_CODE (rhs) == ADDR_EXPR)
914 base = &TREE_OPERAND (rhs, 0);
915 while (handled_component_p (*base))
916 base = &TREE_OPERAND (*base, 0);
917 if (TREE_CODE (*base) == INDIRECT_REF
918 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
920 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
921 if (val->lattice_val == CONSTANT
922 && TREE_CODE (val->value) == ADDR_EXPR
923 && useless_type_conversion_p
924 (TREE_TYPE (TREE_OPERAND (*base, 0)),
925 TREE_TYPE (val->value))
926 && useless_type_conversion_p
928 TREE_TYPE (TREE_OPERAND (val->value, 0))))
930 /* We need to return a new tree, not modify the IL
931 or share parts of it. So play some tricks to
932 avoid manually building it. */
933 tree ret, save = *base;
934 *base = TREE_OPERAND (val->value, 0);
935 ret = unshare_expr (rhs);
936 recompute_tree_invariant_for_addr_expr (ret);
943 else if (do_store_ccp && stmt_makes_single_load (stmt))
945 /* If the RHS is a memory load, see if the VUSEs associated with
946 it are a valid constant for that memory load. */
947 prop_value_t *val = get_value_loaded_by (stmt, const_val);
948 if (val && val->mem_ref)
950 if (operand_equal_p (val->mem_ref, rhs, 0))
953 /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
954 complex type with a known constant value, return it. */
955 if ((TREE_CODE (rhs) == REALPART_EXPR
956 || TREE_CODE (rhs) == IMAGPART_EXPR)
957 && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0))
958 return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value);
962 if (kind == tcc_reference)
963 return fold_const_aggregate_ref (rhs);
964 else if (kind == tcc_declaration)
965 return get_symbol_constant_value (rhs);
969 case GIMPLE_UNARY_RHS:
971 /* Handle unary operators that can appear in GIMPLE form.
972 Note that we know the single operand must be a constant,
973 so this should almost always return a simplified RHS. */
974 tree lhs = gimple_assign_lhs (stmt);
975 tree op0 = gimple_assign_rhs1 (stmt);
977 /* Simplify the operand down to a constant. */
978 if (TREE_CODE (op0) == SSA_NAME)
980 prop_value_t *val = get_value (op0);
981 if (val->lattice_val == CONSTANT)
982 op0 = get_value (op0)->value;
985 /* Conversions are useless for CCP purposes if they are
986 value-preserving. Thus the restrictions that
987 useless_type_conversion_p places for pointer type conversions
988 do not apply here. Substitution later will only substitute to
990 if ((subcode == NOP_EXPR || subcode == CONVERT_EXPR)
991 && ((POINTER_TYPE_P (TREE_TYPE (lhs))
992 && POINTER_TYPE_P (TREE_TYPE (op0)))
993 || useless_type_conversion_p (TREE_TYPE (lhs),
997 return fold_unary (subcode, gimple_expr_type (stmt), op0);
1000 case GIMPLE_BINARY_RHS:
1002 /* Handle binary operators that can appear in GIMPLE form. */
1003 tree op0 = gimple_assign_rhs1 (stmt);
1004 tree op1 = gimple_assign_rhs2 (stmt);
1006 /* Simplify the operands down to constants when appropriate. */
1007 if (TREE_CODE (op0) == SSA_NAME)
1009 prop_value_t *val = get_value (op0);
1010 if (val->lattice_val == CONSTANT)
1014 if (TREE_CODE (op1) == SSA_NAME)
1016 prop_value_t *val = get_value (op1);
1017 if (val->lattice_val == CONSTANT)
1021 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1031 /* It may be possible to fold away calls to builtin functions if
1032 their arguments are constants. At present, such folding will not
1033 be attempted, as likely_value classifies all calls as VARYING. */
1039 /* Handle comparison operators that can appear in GIMPLE form. */
1040 tree op0 = gimple_cond_lhs (stmt);
1041 tree op1 = gimple_cond_rhs (stmt);
1042 enum tree_code code = gimple_cond_code (stmt);
1044 /* Simplify the operands down to constants when appropriate. */
1045 if (TREE_CODE (op0) == SSA_NAME)
1047 prop_value_t *val = get_value (op0);
1048 if (val->lattice_val == CONSTANT)
1052 if (TREE_CODE (op1) == SSA_NAME)
1054 prop_value_t *val = get_value (op1);
1055 if (val->lattice_val == CONSTANT)
1059 return fold_binary (code, boolean_type_node, op0, op1);
1064 tree rhs = gimple_switch_index (stmt);
1066 if (TREE_CODE (rhs) == SSA_NAME)
1068 /* If the RHS is an SSA_NAME, return its known constant value,
1070 return get_value (rhs)->value;
1082 /* Return the tree representing the element referenced by T if T is an
1083 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1084 NULL_TREE otherwise. */
1087 fold_const_aggregate_ref (tree t)
1089 prop_value_t *value;
1090 tree base, ctor, idx, field;
1091 unsigned HOST_WIDE_INT cnt;
1094 switch (TREE_CODE (t))
1097 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1098 DECL_INITIAL. If BASE is a nested reference into another
1099 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1100 the inner reference. */
1101 base = TREE_OPERAND (t, 0);
1102 switch (TREE_CODE (base))
1105 if (!TREE_READONLY (base)
1106 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1107 || !targetm.binds_local_p (base))
1110 ctor = DECL_INITIAL (base);
1115 ctor = fold_const_aggregate_ref (base);
1127 if (ctor == NULL_TREE
1128 || (TREE_CODE (ctor) != CONSTRUCTOR
1129 && TREE_CODE (ctor) != STRING_CST)
1130 || !TREE_STATIC (ctor))
1133 /* Get the index. If we have an SSA_NAME, try to resolve it
1134 with the current lattice value for the SSA_NAME. */
1135 idx = TREE_OPERAND (t, 1);
1136 switch (TREE_CODE (idx))
1139 if ((value = get_value (idx))
1140 && value->lattice_val == CONSTANT
1141 && TREE_CODE (value->value) == INTEGER_CST)
1154 /* Fold read from constant string. */
1155 if (TREE_CODE (ctor) == STRING_CST)
1157 if ((TYPE_MODE (TREE_TYPE (t))
1158 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1159 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1161 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1162 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1163 return build_int_cst_type (TREE_TYPE (t),
1164 (TREE_STRING_POINTER (ctor)
1165 [TREE_INT_CST_LOW (idx)]));
1169 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1170 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1171 if (tree_int_cst_equal (cfield, idx))
1173 STRIP_USELESS_TYPE_CONVERSION (cval);
1179 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1180 DECL_INITIAL. If BASE is a nested reference into another
1181 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1182 the inner reference. */
1183 base = TREE_OPERAND (t, 0);
1184 switch (TREE_CODE (base))
1187 if (!TREE_READONLY (base)
1188 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1189 || !targetm.binds_local_p (base))
1192 ctor = DECL_INITIAL (base);
1197 ctor = fold_const_aggregate_ref (base);
1204 if (ctor == NULL_TREE
1205 || TREE_CODE (ctor) != CONSTRUCTOR
1206 || !TREE_STATIC (ctor))
1209 field = TREE_OPERAND (t, 1);
1211 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1213 /* FIXME: Handle bit-fields. */
1214 && ! DECL_BIT_FIELD (cfield))
1216 STRIP_USELESS_TYPE_CONVERSION (cval);
1224 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1225 if (c && TREE_CODE (c) == COMPLEX_CST)
1226 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1232 tree base = TREE_OPERAND (t, 0);
1233 if (TREE_CODE (base) == SSA_NAME
1234 && (value = get_value (base))
1235 && value->lattice_val == CONSTANT
1236 && TREE_CODE (value->value) == ADDR_EXPR)
1237 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1248 /* Evaluate statement STMT.
1249 Valid only for assignments, calls, conditionals, and switches. */
1252 evaluate_stmt (gimple stmt)
1255 tree simplified = NULL_TREE;
1256 ccp_lattice_t likelyvalue = likely_value (stmt);
1259 val.mem_ref = NULL_TREE;
1261 fold_defer_overflow_warnings ();
1263 /* If the statement is likely to have a CONSTANT result, then try
1264 to fold the statement to determine the constant value. */
1265 /* FIXME. This is the only place that we call ccp_fold.
1266 Since likely_value never returns CONSTANT for calls, we will
1267 not attempt to fold them, including builtins that may profit. */
1268 if (likelyvalue == CONSTANT)
1269 simplified = ccp_fold (stmt);
1270 /* If the statement is likely to have a VARYING result, then do not
1271 bother folding the statement. */
1272 else if (likelyvalue == VARYING)
1274 enum tree_code code = gimple_code (stmt);
1275 if (code == GIMPLE_ASSIGN)
1277 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1279 /* Other cases cannot satisfy is_gimple_min_invariant
1281 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1282 simplified = gimple_assign_rhs1 (stmt);
1284 else if (code == GIMPLE_SWITCH)
1285 simplified = gimple_switch_index (stmt);
1287 /* These cannot satisfy is_gimple_min_invariant without folding. */
1288 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1291 is_constant = simplified && is_gimple_min_invariant (simplified);
1293 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1295 if (dump_file && (dump_flags & TDF_DETAILS))
1297 fprintf (dump_file, "which is likely ");
1298 switch (likelyvalue)
1301 fprintf (dump_file, "CONSTANT");
1304 fprintf (dump_file, "UNDEFINED");
1307 fprintf (dump_file, "VARYING");
1311 fprintf (dump_file, "\n");
1316 /* The statement produced a constant value. */
1317 val.lattice_val = CONSTANT;
1318 val.value = simplified;
1322 /* The statement produced a nonconstant value. If the statement
1323 had UNDEFINED operands, then the result of the statement
1324 should be UNDEFINED. Otherwise, the statement is VARYING. */
1325 if (likelyvalue == UNDEFINED)
1326 val.lattice_val = likelyvalue;
1328 val.lattice_val = VARYING;
1330 val.value = NULL_TREE;
1336 /* Visit the assignment statement STMT. Set the value of its LHS to the
1337 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1338 creates virtual definitions, set the value of each new name to that
1339 of the RHS (if we can derive a constant out of the RHS).
1340 Value-returning call statements also perform an assignment, and
1341 are handled here. */
1343 static enum ssa_prop_result
1344 visit_assignment (gimple stmt, tree *output_p)
1347 enum ssa_prop_result retval;
1349 tree lhs = gimple_get_lhs (stmt);
1351 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1352 || gimple_call_lhs (stmt) != NULL_TREE);
1354 if (gimple_assign_copy_p (stmt))
1356 tree rhs = gimple_assign_rhs1 (stmt);
1358 if (TREE_CODE (rhs) == SSA_NAME)
1360 /* For a simple copy operation, we copy the lattice values. */
1361 prop_value_t *nval = get_value (rhs);
1364 else if (do_store_ccp && stmt_makes_single_load (stmt))
1366 /* Same as above, but the RHS is not a gimple register and yet
1367 has a known VUSE. If STMT is loading from the same memory
1368 location that created the SSA_NAMEs for the virtual operands,
1369 we can propagate the value on the RHS. */
1370 prop_value_t *nval = get_value_loaded_by (stmt, const_val);
1374 && operand_equal_p (nval->mem_ref, rhs, 0))
1377 val = evaluate_stmt (stmt);
1380 val = evaluate_stmt (stmt);
1383 /* Evaluate the statement, which could be
1384 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1385 val = evaluate_stmt (stmt);
1387 retval = SSA_PROP_NOT_INTERESTING;
1389 /* Set the lattice value of the statement's output. */
1390 if (TREE_CODE (lhs) == SSA_NAME)
1392 /* If STMT is an assignment to an SSA_NAME, we only have one
1394 if (set_lattice_value (lhs, val))
1397 if (val.lattice_val == VARYING)
1398 retval = SSA_PROP_VARYING;
1400 retval = SSA_PROP_INTERESTING;
1403 else if (do_store_ccp && stmt_makes_single_store (stmt))
1405 /* Otherwise, set the names in VDEF operands to the new
1406 constant value and mark the LHS as the memory reference
1407 associated with VAL. */
1412 /* Mark VAL as stored in the LHS of this assignment. */
1413 if (val.lattice_val == CONSTANT)
1416 /* Set the value of every VDEF to VAL. */
1418 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS)
1420 /* See PR 29801. We may have VDEFs for read-only variables
1421 (see the handling of unmodifiable variables in
1422 add_virtual_operand); do not attempt to change their value. */
1423 if (get_symbol_constant_value (SSA_NAME_VAR (vdef)) != NULL_TREE)
1426 changed |= set_lattice_value (vdef, val);
1429 /* Note that for propagation purposes, we are only interested in
1430 visiting statements that load the exact same memory reference
1431 stored here. Those statements will have the exact same list
1432 of virtual uses, so it is enough to set the output of this
1433 statement to be its first virtual definition. */
1434 *output_p = first_vdef (stmt);
1437 if (val.lattice_val == VARYING)
1438 retval = SSA_PROP_VARYING;
1440 retval = SSA_PROP_INTERESTING;
1448 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1449 if it can determine which edge will be taken. Otherwise, return
1450 SSA_PROP_VARYING. */
1452 static enum ssa_prop_result
1453 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1458 block = gimple_bb (stmt);
1459 val = evaluate_stmt (stmt);
1461 /* Find which edge out of the conditional block will be taken and add it
1462 to the worklist. If no single edge can be determined statically,
1463 return SSA_PROP_VARYING to feed all the outgoing edges to the
1464 propagation engine. */
1465 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1467 return SSA_PROP_INTERESTING;
1469 return SSA_PROP_VARYING;
1473 /* Evaluate statement STMT. If the statement produces an output value and
1474 its evaluation changes the lattice value of its output, return
1475 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1478 If STMT is a conditional branch and we can determine its truth
1479 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1480 value, return SSA_PROP_VARYING. */
1482 static enum ssa_prop_result
1483 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1488 if (dump_file && (dump_flags & TDF_DETAILS))
1490 fprintf (dump_file, "\nVisiting statement:\n");
1491 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1494 switch (gimple_code (stmt))
1497 /* If the statement is an assignment that produces a single
1498 output value, evaluate its RHS to see if the lattice value of
1499 its output has changed. */
1500 return visit_assignment (stmt, output_p);
1503 /* A value-returning call also performs an assignment. */
1504 if (gimple_call_lhs (stmt) != NULL_TREE)
1505 return visit_assignment (stmt, output_p);
1510 /* If STMT is a conditional branch, see if we can determine
1511 which branch will be taken. */
1512 /* FIXME. It appears that we should be able to optimize
1513 computed GOTOs here as well. */
1514 return visit_cond_stmt (stmt, taken_edge_p);
1520 /* Any other kind of statement is not interesting for constant
1521 propagation and, therefore, not worth simulating. */
1522 if (dump_file && (dump_flags & TDF_DETAILS))
1523 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1525 /* Definitions made by statements other than assignments to
1526 SSA_NAMEs represent unknown modifications to their outputs.
1527 Mark them VARYING. */
1528 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1530 prop_value_t v = { VARYING, NULL_TREE, NULL_TREE };
1531 set_lattice_value (def, v);
1534 return SSA_PROP_VARYING;
1538 /* Main entry point for SSA Conditional Constant Propagation. */
1541 execute_ssa_ccp (bool store_ccp)
1543 do_store_ccp = store_ccp;
1545 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1546 if (ccp_finalize ())
1547 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1556 return execute_ssa_ccp (false);
1563 return flag_tree_ccp != 0;
1567 struct gimple_opt_pass pass_ccp =
1572 gate_ccp, /* gate */
1573 do_ssa_ccp, /* execute */
1576 0, /* static_pass_number */
1577 TV_TREE_CCP, /* tv_id */
1578 PROP_cfg | PROP_ssa, /* properties_required */
1579 0, /* properties_provided */
1580 0, /* properties_destroyed */
1581 0, /* todo_flags_start */
1582 TODO_dump_func | TODO_verify_ssa
1583 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1589 do_ssa_store_ccp (void)
1591 /* If STORE-CCP is not enabled, we just run regular CCP. */
1592 return execute_ssa_ccp (flag_tree_store_ccp != 0);
1596 gate_store_ccp (void)
1598 /* STORE-CCP is enabled only with -ftree-store-ccp, but when
1599 -fno-tree-store-ccp is specified, we should run regular CCP.
1600 That's why the pass is enabled with either flag. */
1601 return flag_tree_store_ccp != 0 || flag_tree_ccp != 0;
1605 struct gimple_opt_pass pass_store_ccp =
1609 "store_ccp", /* name */
1610 gate_store_ccp, /* gate */
1611 do_ssa_store_ccp, /* execute */
1614 0, /* static_pass_number */
1615 TV_TREE_STORE_CCP, /* tv_id */
1616 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1617 0, /* properties_provided */
1618 0, /* properties_destroyed */
1619 0, /* todo_flags_start */
1620 TODO_dump_func | TODO_verify_ssa
1621 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1625 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1626 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1627 is the desired result type. */
1630 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1631 bool allow_negative_idx)
1633 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1634 tree array_type, elt_type, elt_size;
1637 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1638 measured in units of the size of elements type) from that ARRAY_REF).
1639 We can't do anything if either is variable.
1641 The case we handle here is *(&A[N]+O). */
1642 if (TREE_CODE (base) == ARRAY_REF)
1644 tree low_bound = array_ref_low_bound (base);
1646 elt_offset = TREE_OPERAND (base, 1);
1647 if (TREE_CODE (low_bound) != INTEGER_CST
1648 || TREE_CODE (elt_offset) != INTEGER_CST)
1651 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1652 base = TREE_OPERAND (base, 0);
1655 /* Ignore stupid user tricks of indexing non-array variables. */
1656 array_type = TREE_TYPE (base);
1657 if (TREE_CODE (array_type) != ARRAY_TYPE)
1659 elt_type = TREE_TYPE (array_type);
1660 if (!useless_type_conversion_p (orig_type, elt_type))
1663 /* Use signed size type for intermediate computation on the index. */
1664 idx_type = signed_type_for (size_type_node);
1666 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1667 element type (so we can use the alignment if it's not constant).
1668 Otherwise, compute the offset as an index by using a division. If the
1669 division isn't exact, then don't do anything. */
1670 elt_size = TYPE_SIZE_UNIT (elt_type);
1673 if (integer_zerop (offset))
1675 if (TREE_CODE (elt_size) != INTEGER_CST)
1676 elt_size = size_int (TYPE_ALIGN (elt_type));
1678 idx = build_int_cst (idx_type, 0);
1682 unsigned HOST_WIDE_INT lquo, lrem;
1683 HOST_WIDE_INT hquo, hrem;
1686 /* The final array offset should be signed, so we need
1687 to sign-extend the (possibly pointer) offset here
1688 and use signed division. */
1689 soffset = double_int_sext (tree_to_double_int (offset),
1690 TYPE_PRECISION (TREE_TYPE (offset)));
1691 if (TREE_CODE (elt_size) != INTEGER_CST
1692 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1693 soffset.low, soffset.high,
1694 TREE_INT_CST_LOW (elt_size),
1695 TREE_INT_CST_HIGH (elt_size),
1696 &lquo, &hquo, &lrem, &hrem)
1700 idx = build_int_cst_wide (idx_type, lquo, hquo);
1703 /* Assume the low bound is zero. If there is a domain type, get the
1704 low bound, if any, convert the index into that type, and add the
1706 min_idx = build_int_cst (idx_type, 0);
1707 domain_type = TYPE_DOMAIN (array_type);
1710 idx_type = domain_type;
1711 if (TYPE_MIN_VALUE (idx_type))
1712 min_idx = TYPE_MIN_VALUE (idx_type);
1714 min_idx = fold_convert (idx_type, min_idx);
1716 if (TREE_CODE (min_idx) != INTEGER_CST)
1719 elt_offset = fold_convert (idx_type, elt_offset);
1722 if (!integer_zerop (min_idx))
1723 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1724 if (!integer_zerop (elt_offset))
1725 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1727 /* Make sure to possibly truncate late after offsetting. */
1728 idx = fold_convert (idx_type, idx);
1730 /* We don't want to construct access past array bounds. For example
1733 should not be simplified into (*c)[14] or tree-vrp will
1734 give false warnings. The same is true for
1735 struct A { long x; char d[0]; } *a;
1737 which should be not folded to &a->d[-8]. */
1739 && TYPE_MAX_VALUE (domain_type)
1740 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1742 tree up_bound = TYPE_MAX_VALUE (domain_type);
1744 if (tree_int_cst_lt (up_bound, idx)
1745 /* Accesses after the end of arrays of size 0 (gcc
1746 extension) and 1 are likely intentional ("struct
1748 && compare_tree_int (up_bound, 1) > 0)
1752 && TYPE_MIN_VALUE (domain_type))
1754 if (!allow_negative_idx
1755 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1756 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1759 else if (!allow_negative_idx
1760 && compare_tree_int (idx, 0) < 0)
1763 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1767 /* Attempt to fold *(S+O) to S.X.
1768 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1769 is the desired result type. */
1772 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1773 tree orig_type, bool base_is_ptr)
1775 tree f, t, field_type, tail_array_field, field_offset;
1779 if (TREE_CODE (record_type) != RECORD_TYPE
1780 && TREE_CODE (record_type) != UNION_TYPE
1781 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1784 /* Short-circuit silly cases. */
1785 if (useless_type_conversion_p (record_type, orig_type))
1788 tail_array_field = NULL_TREE;
1789 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1793 if (TREE_CODE (f) != FIELD_DECL)
1795 if (DECL_BIT_FIELD (f))
1798 if (!DECL_FIELD_OFFSET (f))
1800 field_offset = byte_position (f);
1801 if (TREE_CODE (field_offset) != INTEGER_CST)
1804 /* ??? Java creates "interesting" fields for representing base classes.
1805 They have no name, and have no context. With no context, we get into
1806 trouble with nonoverlapping_component_refs_p. Skip them. */
1807 if (!DECL_FIELD_CONTEXT (f))
1810 /* The previous array field isn't at the end. */
1811 tail_array_field = NULL_TREE;
1813 /* Check to see if this offset overlaps with the field. */
1814 cmp = tree_int_cst_compare (field_offset, offset);
1818 field_type = TREE_TYPE (f);
1820 /* Here we exactly match the offset being checked. If the types match,
1821 then we can return that field. */
1823 && useless_type_conversion_p (orig_type, field_type))
1826 base = build1 (INDIRECT_REF, record_type, base);
1827 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1831 /* Don't care about offsets into the middle of scalars. */
1832 if (!AGGREGATE_TYPE_P (field_type))
1835 /* Check for array at the end of the struct. This is often
1836 used as for flexible array members. We should be able to
1837 turn this into an array access anyway. */
1838 if (TREE_CODE (field_type) == ARRAY_TYPE)
1839 tail_array_field = f;
1841 /* Check the end of the field against the offset. */
1842 if (!DECL_SIZE_UNIT (f)
1843 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1845 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1846 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1849 /* If we matched, then set offset to the displacement into
1852 new_base = build1 (INDIRECT_REF, record_type, base);
1855 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1857 /* Recurse to possibly find the match. */
1858 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1859 f == TYPE_FIELDS (record_type));
1862 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1868 if (!tail_array_field)
1871 f = tail_array_field;
1872 field_type = TREE_TYPE (f);
1873 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1875 /* If we get here, we've got an aggregate field, and a possibly
1876 nonzero offset into them. Recurse and hope for a valid match. */
1878 base = build1 (INDIRECT_REF, record_type, base);
1879 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1881 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1882 f == TYPE_FIELDS (record_type));
1885 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1889 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1890 or BASE[index] or by combination of those.
1892 Before attempting the conversion strip off existing ADDR_EXPRs and
1893 handled component refs. */
1896 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1900 bool base_is_ptr = true;
1903 if (TREE_CODE (base) == ADDR_EXPR)
1905 base_is_ptr = false;
1907 base = TREE_OPERAND (base, 0);
1909 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1910 so it needs to be removed and new COMPONENT_REF constructed.
1911 The wrong COMPONENT_REF are often constructed by folding the
1912 (type *)&object within the expression (type *)&object+offset */
1913 if (handled_component_p (base) && 0)
1915 HOST_WIDE_INT sub_offset, size, maxsize;
1917 newbase = get_ref_base_and_extent (base, &sub_offset,
1919 gcc_assert (newbase);
1920 gcc_assert (!(sub_offset & (BITS_PER_UNIT - 1)));
1921 if (size == maxsize)
1925 offset = int_const_binop (PLUS_EXPR, offset,
1926 build_int_cst (TREE_TYPE (offset),
1927 sub_offset / BITS_PER_UNIT), 1);
1930 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1931 && integer_zerop (offset))
1933 type = TREE_TYPE (base);
1938 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1940 type = TREE_TYPE (TREE_TYPE (base));
1942 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1943 orig_type, base_is_ptr);
1947 base = build1 (INDIRECT_REF, type, base);
1948 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
1953 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1954 Return the simplified expression, or NULL if nothing could be done. */
1957 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1960 bool volatile_p = TREE_THIS_VOLATILE (expr);
1962 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1963 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1964 are sometimes added. */
1966 STRIP_TYPE_NOPS (base);
1967 TREE_OPERAND (expr, 0) = base;
1969 /* One possibility is that the address reduces to a string constant. */
1970 t = fold_read_from_constant_string (expr);
1974 /* Add in any offset from a POINTER_PLUS_EXPR. */
1975 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
1979 offset2 = TREE_OPERAND (base, 1);
1980 if (TREE_CODE (offset2) != INTEGER_CST)
1982 base = TREE_OPERAND (base, 0);
1984 offset = fold_convert (sizetype,
1985 int_const_binop (PLUS_EXPR, offset, offset2, 1));
1988 if (TREE_CODE (base) == ADDR_EXPR)
1990 tree base_addr = base;
1992 /* Strip the ADDR_EXPR. */
1993 base = TREE_OPERAND (base, 0);
1995 /* Fold away CONST_DECL to its value, if the type is scalar. */
1996 if (TREE_CODE (base) == CONST_DECL
1997 && is_gimple_min_invariant (DECL_INITIAL (base)))
1998 return DECL_INITIAL (base);
2000 /* Try folding *(&B+O) to B.X. */
2001 t = maybe_fold_offset_to_reference (base_addr, offset,
2005 TREE_THIS_VOLATILE (t) = volatile_p;
2011 /* We can get here for out-of-range string constant accesses,
2012 such as "_"[3]. Bail out of the entire substitution search
2013 and arrange for the entire statement to be replaced by a
2014 call to __builtin_trap. In all likelihood this will all be
2015 constant-folded away, but in the meantime we can't leave with
2016 something that get_expr_operands can't understand. */
2020 if (TREE_CODE (t) == ADDR_EXPR
2021 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2023 /* FIXME: Except that this causes problems elsewhere with dead
2024 code not being deleted, and we die in the rtl expanders
2025 because we failed to remove some ssa_name. In the meantime,
2026 just return zero. */
2027 /* FIXME2: This condition should be signaled by
2028 fold_read_from_constant_string directly, rather than
2029 re-checking for it here. */
2030 return integer_zero_node;
2033 /* Try folding *(B+O) to B->X. Still an improvement. */
2034 if (POINTER_TYPE_P (TREE_TYPE (base)))
2036 t = maybe_fold_offset_to_reference (base, offset,
2043 /* Otherwise we had an offset that we could not simplify. */
2048 /* A quaint feature extant in our address arithmetic is that there
2049 can be hidden type changes here. The type of the result need
2050 not be the same as the type of the input pointer.
2052 What we're after here is an expression of the form
2053 (T *)(&array + const)
2054 where array is OP0, const is OP1, RES_TYPE is T and
2055 the cast doesn't actually exist, but is implicit in the
2056 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2058 which may be able to propagate further. */
2061 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2066 /* It had better be a constant. */
2067 if (TREE_CODE (op1) != INTEGER_CST)
2069 /* The first operand should be an ADDR_EXPR. */
2070 if (TREE_CODE (op0) != ADDR_EXPR)
2072 op0 = TREE_OPERAND (op0, 0);
2074 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2075 the offset into it. */
2076 while (TREE_CODE (op0) == ARRAY_REF)
2078 tree array_obj = TREE_OPERAND (op0, 0);
2079 tree array_idx = TREE_OPERAND (op0, 1);
2080 tree elt_type = TREE_TYPE (op0);
2081 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2084 if (TREE_CODE (array_idx) != INTEGER_CST)
2086 if (TREE_CODE (elt_size) != INTEGER_CST)
2089 /* Un-bias the index by the min index of the array type. */
2090 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2093 min_idx = TYPE_MIN_VALUE (min_idx);
2096 if (TREE_CODE (min_idx) != INTEGER_CST)
2099 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2100 if (!integer_zerop (min_idx))
2101 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2106 /* Convert the index to a byte offset. */
2107 array_idx = fold_convert (sizetype, array_idx);
2108 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2110 /* Update the operands for the next round, or for folding. */
2111 op1 = int_const_binop (PLUS_EXPR,
2116 ptd_type = TREE_TYPE (res_type);
2117 /* If we want a pointer to void, reconstruct the reference from the
2118 array element type. A pointer to that can be trivially converted
2119 to void *. This happens as we fold (void *)(ptr p+ off). */
2120 if (VOID_TYPE_P (ptd_type)
2121 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2122 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2124 /* At which point we can try some of the same things as for indirects. */
2125 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2127 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2130 t = build1 (ADDR_EXPR, res_type, t);
2135 /* For passing state through walk_tree into fold_stmt_r and its
2138 struct fold_stmt_r_data
2142 bool *inside_addr_expr_p;
2145 /* Subroutine of fold_stmt called via walk_tree. We perform several
2146 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2149 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2151 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2152 struct fold_stmt_r_data *fold_stmt_r_data;
2153 bool *inside_addr_expr_p;
2155 tree expr = *expr_p, t;
2156 bool volatile_p = TREE_THIS_VOLATILE (expr);
2158 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2159 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2160 changed_p = fold_stmt_r_data->changed_p;
2162 /* ??? It'd be nice if walk_tree had a pre-order option. */
2163 switch (TREE_CODE (expr))
2166 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2171 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2174 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2175 /* If we had a good reason for propagating the address here,
2176 make sure we end up with valid gimple. See PR34989. */
2177 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2181 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2186 if (POINTER_TYPE_P (TREE_TYPE (expr))
2187 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2188 && (t = maybe_fold_offset_to_reference
2189 (TREE_OPERAND (expr, 0),
2191 TREE_TYPE (TREE_TYPE (expr)))))
2193 tree ptr_type = build_pointer_type (TREE_TYPE (t));
2194 if (!useless_type_conversion_p (TREE_TYPE (expr), ptr_type))
2196 t = build_fold_addr_expr_with_type (t, ptr_type);
2200 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2201 We'd only want to bother decomposing an existing ARRAY_REF if
2202 the base array is found to have another offset contained within.
2203 Otherwise we'd be wasting time. */
2205 /* If we are not processing expressions found within an
2206 ADDR_EXPR, then we can fold constant array references. */
2207 if (!*inside_addr_expr_p)
2208 t = fold_read_from_constant_string (expr);
2214 *inside_addr_expr_p = true;
2215 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2216 *inside_addr_expr_p = false;
2221 /* Make sure the value is properly considered constant, and so gets
2222 propagated as expected. */
2224 recompute_tree_invariant_for_addr_expr (expr);
2228 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2233 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2234 We've already checked that the records are compatible, so we should
2235 come up with a set of compatible fields. */
2237 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2238 tree expr_field = TREE_OPERAND (expr, 1);
2240 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2242 expr_field = find_compatible_field (expr_record, expr_field);
2243 TREE_OPERAND (expr, 1) = expr_field;
2248 case TARGET_MEM_REF:
2249 t = maybe_fold_tmr (expr);
2252 case POINTER_PLUS_EXPR:
2253 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2256 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2261 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2262 TREE_OPERAND (expr, 0),
2263 TREE_OPERAND (expr, 1));
2267 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2269 tree op0 = TREE_OPERAND (expr, 0);
2273 fold_defer_overflow_warnings ();
2274 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2275 TREE_OPERAND (op0, 0),
2276 TREE_OPERAND (op0, 1));
2277 /* This is actually a conditional expression, not a GIMPLE
2278 conditional statement, however, the valid_gimple_rhs_p
2279 test still applies. */
2280 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
2281 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2284 COND_EXPR_COND (expr) = tem;
2297 /* Preserve volatileness of the original expression. */
2298 TREE_THIS_VOLATILE (t) = volatile_p;
2306 /* Return the string length, maximum string length or maximum value of
2308 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2309 is not NULL and, for TYPE == 0, its value is not equal to the length
2310 we determine or if we are unable to determine the length or value,
2311 return false. VISITED is a bitmap of visited variables.
2312 TYPE is 0 if string length should be returned, 1 for maximum string
2313 length and 2 for maximum value ARG can have. */
2316 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2321 if (TREE_CODE (arg) != SSA_NAME)
2323 if (TREE_CODE (arg) == COND_EXPR)
2324 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2325 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2326 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2327 else if (TREE_CODE (arg) == ADDR_EXPR
2328 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2329 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2331 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2332 if (TREE_CODE (aop0) == INDIRECT_REF
2333 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2334 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2335 length, visited, type);
2341 if (TREE_CODE (val) != INTEGER_CST
2342 || tree_int_cst_sgn (val) < 0)
2346 val = c_strlen (arg, 1);
2354 if (TREE_CODE (*length) != INTEGER_CST
2355 || TREE_CODE (val) != INTEGER_CST)
2358 if (tree_int_cst_lt (*length, val))
2362 else if (simple_cst_equal (val, *length) != 1)
2370 /* If we were already here, break the infinite cycle. */
2371 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2373 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2376 def_stmt = SSA_NAME_DEF_STMT (var);
2378 switch (gimple_code (def_stmt))
2381 /* The RHS of the statement defining VAR must either have a
2382 constant length or come from another SSA_NAME with a constant
2384 if (gimple_assign_single_p (def_stmt)
2385 || gimple_assign_unary_nop_p (def_stmt))
2387 tree rhs = gimple_assign_rhs1 (def_stmt);
2388 return get_maxval_strlen (rhs, length, visited, type);
2394 /* All the arguments of the PHI node must have the same constant
2398 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2400 tree arg = gimple_phi_arg (def_stmt, i)->def;
2402 /* If this PHI has itself as an argument, we cannot
2403 determine the string length of this argument. However,
2404 if we can find a constant string length for the other
2405 PHI args then we can still be sure that this is a
2406 constant string length. So be optimistic and just
2407 continue with the next argument. */
2408 if (arg == gimple_phi_result (def_stmt))
2411 if (!get_maxval_strlen (arg, length, visited, type))
2423 /* Fold builtin call in statement STMT. Returns a simplified tree.
2424 We may return a non-constant expression, including another call
2425 to a different function and with different arguments, e.g.,
2426 substituting memcpy for strcpy when the string length is known.
2427 Note that some builtins expand into inline code that may not
2428 be valid in GIMPLE. Callers must take care. */
2431 ccp_fold_builtin (gimple stmt)
2433 tree result, val[3];
2435 int arg_mask, i, type;
2440 gcc_assert (is_gimple_call (stmt));
2442 ignore = (gimple_call_lhs (stmt) == NULL);
2444 /* First try the generic builtin folder. If that succeeds, return the
2446 result = fold_call_stmt (stmt, ignore);
2450 STRIP_NOPS (result);
2454 /* Ignore MD builtins. */
2455 callee = gimple_call_fndecl (stmt);
2456 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2459 /* If the builtin could not be folded, and it has no argument list,
2461 nargs = gimple_call_num_args (stmt);
2465 /* Limit the work only for builtins we know how to simplify. */
2466 switch (DECL_FUNCTION_CODE (callee))
2468 case BUILT_IN_STRLEN:
2469 case BUILT_IN_FPUTS:
2470 case BUILT_IN_FPUTS_UNLOCKED:
2474 case BUILT_IN_STRCPY:
2475 case BUILT_IN_STRNCPY:
2479 case BUILT_IN_MEMCPY_CHK:
2480 case BUILT_IN_MEMPCPY_CHK:
2481 case BUILT_IN_MEMMOVE_CHK:
2482 case BUILT_IN_MEMSET_CHK:
2483 case BUILT_IN_STRNCPY_CHK:
2487 case BUILT_IN_STRCPY_CHK:
2488 case BUILT_IN_STPCPY_CHK:
2492 case BUILT_IN_SNPRINTF_CHK:
2493 case BUILT_IN_VSNPRINTF_CHK:
2501 /* Try to use the dataflow information gathered by the CCP process. */
2502 visited = BITMAP_ALLOC (NULL);
2504 memset (val, 0, sizeof (val));
2505 for (i = 0; i < nargs; i++)
2507 if ((arg_mask >> i) & 1)
2509 a = gimple_call_arg (stmt, i);
2510 bitmap_clear (visited);
2511 if (!get_maxval_strlen (a, &val[i], visited, type))
2516 BITMAP_FREE (visited);
2519 switch (DECL_FUNCTION_CODE (callee))
2521 case BUILT_IN_STRLEN:
2525 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2527 /* If the result is not a valid gimple value, or not a cast
2528 of a valid gimple value, then we can not use the result. */
2529 if (is_gimple_val (new_val)
2530 || (is_gimple_cast (new_val)
2531 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2536 case BUILT_IN_STRCPY:
2537 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2538 result = fold_builtin_strcpy (callee,
2539 gimple_call_arg (stmt, 0),
2540 gimple_call_arg (stmt, 1),
2544 case BUILT_IN_STRNCPY:
2545 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2546 result = fold_builtin_strncpy (callee,
2547 gimple_call_arg (stmt, 0),
2548 gimple_call_arg (stmt, 1),
2549 gimple_call_arg (stmt, 2),
2553 case BUILT_IN_FPUTS:
2554 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2555 gimple_call_arg (stmt, 1),
2556 ignore, false, val[0]);
2559 case BUILT_IN_FPUTS_UNLOCKED:
2560 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2561 gimple_call_arg (stmt, 1),
2562 ignore, true, val[0]);
2565 case BUILT_IN_MEMCPY_CHK:
2566 case BUILT_IN_MEMPCPY_CHK:
2567 case BUILT_IN_MEMMOVE_CHK:
2568 case BUILT_IN_MEMSET_CHK:
2569 if (val[2] && is_gimple_val (val[2]))
2570 result = fold_builtin_memory_chk (callee,
2571 gimple_call_arg (stmt, 0),
2572 gimple_call_arg (stmt, 1),
2573 gimple_call_arg (stmt, 2),
2574 gimple_call_arg (stmt, 3),
2576 DECL_FUNCTION_CODE (callee));
2579 case BUILT_IN_STRCPY_CHK:
2580 case BUILT_IN_STPCPY_CHK:
2581 if (val[1] && is_gimple_val (val[1]))
2582 result = fold_builtin_stxcpy_chk (callee,
2583 gimple_call_arg (stmt, 0),
2584 gimple_call_arg (stmt, 1),
2585 gimple_call_arg (stmt, 2),
2587 DECL_FUNCTION_CODE (callee));
2590 case BUILT_IN_STRNCPY_CHK:
2591 if (val[2] && is_gimple_val (val[2]))
2592 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2593 gimple_call_arg (stmt, 1),
2594 gimple_call_arg (stmt, 2),
2595 gimple_call_arg (stmt, 3),
2599 case BUILT_IN_SNPRINTF_CHK:
2600 case BUILT_IN_VSNPRINTF_CHK:
2601 if (val[1] && is_gimple_val (val[1]))
2602 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2603 DECL_FUNCTION_CODE (callee));
2610 if (result && ignore)
2611 result = fold_ignored_result (result);
2615 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2616 replacement rhs for the statement or NULL_TREE if no simplification
2617 could be made. It is assumed that the operands have been previously
2621 fold_gimple_assign (gimple_stmt_iterator *si)
2623 gimple stmt = gsi_stmt (*si);
2624 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2628 switch (get_gimple_rhs_class (subcode))
2630 case GIMPLE_SINGLE_RHS:
2632 tree rhs = gimple_assign_rhs1 (stmt);
2634 /* Try to fold a conditional expression. */
2635 if (TREE_CODE (rhs) == COND_EXPR)
2637 tree temp = fold (COND_EXPR_COND (rhs));
2638 if (temp != COND_EXPR_COND (rhs))
2639 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2640 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2643 /* If we couldn't fold the RHS, hand over to the generic
2645 if (result == NULL_TREE)
2646 result = fold (rhs);
2648 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2649 that may have been added by fold, and "useless" type
2650 conversions that might now be apparent due to propagation. */
2651 STRIP_USELESS_TYPE_CONVERSION (result);
2653 if (result != rhs && valid_gimple_rhs_p (result))
2656 /* It is possible that fold_stmt_r simplified the RHS.
2657 Make sure that the subcode of this statement still
2658 reflects the principal operator of the rhs operand. */
2663 case GIMPLE_UNARY_RHS:
2664 result = fold_unary (subcode,
2665 gimple_expr_type (stmt),
2666 gimple_assign_rhs1 (stmt));
2670 STRIP_USELESS_TYPE_CONVERSION (result);
2671 if (valid_gimple_rhs_p (result))
2674 else if ((gimple_assign_rhs_code (stmt) == NOP_EXPR
2675 || gimple_assign_rhs_code (stmt) == CONVERT_EXPR)
2676 && POINTER_TYPE_P (gimple_expr_type (stmt))
2677 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2679 tree type = gimple_expr_type (stmt);
2680 tree t = maybe_fold_offset_to_reference (gimple_assign_rhs1 (stmt),
2685 tree ptr_type = build_pointer_type (TREE_TYPE (t));
2686 if (useless_type_conversion_p (type, ptr_type))
2687 return build_fold_addr_expr_with_type (t, ptr_type);
2692 case GIMPLE_BINARY_RHS:
2693 /* Try to fold pointer addition. */
2694 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2695 result = maybe_fold_stmt_addition (
2696 TREE_TYPE (gimple_assign_lhs (stmt)),
2697 gimple_assign_rhs1 (stmt),
2698 gimple_assign_rhs2 (stmt));
2701 result = fold_binary (subcode,
2702 TREE_TYPE (gimple_assign_lhs (stmt)),
2703 gimple_assign_rhs1 (stmt),
2704 gimple_assign_rhs2 (stmt));
2708 STRIP_USELESS_TYPE_CONVERSION (result);
2709 if (valid_gimple_rhs_p (result))
2714 case GIMPLE_INVALID_RHS:
2721 /* Attempt to fold a conditional statement. Return true if any changes were
2722 made. We only attempt to fold the condition expression, and do not perform
2723 any transformation that would require alteration of the cfg. It is
2724 assumed that the operands have been previously folded. */
2727 fold_gimple_cond (gimple stmt)
2729 tree result = fold_binary (gimple_cond_code (stmt),
2731 gimple_cond_lhs (stmt),
2732 gimple_cond_rhs (stmt));
2736 STRIP_USELESS_TYPE_CONVERSION (result);
2737 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2739 gimple_cond_set_condition_from_tree (stmt, result);
2748 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2749 The statement may be replaced by another statement, e.g., if the call
2750 simplifies to a constant value. Return true if any changes were made.
2751 It is assumed that the operands have been previously folded. */
2754 fold_gimple_call (gimple_stmt_iterator *gsi)
2756 gimple stmt = gsi_stmt (*gsi);
2758 tree callee = gimple_call_fndecl (stmt);
2760 /* Check for builtins that CCP can handle using information not
2761 available in the generic fold routines. */
2762 if (callee && DECL_BUILT_IN (callee))
2764 tree result = ccp_fold_builtin (stmt);
2767 return update_call_from_tree (gsi, result);
2771 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2772 here are when we've propagated the address of a decl into the
2774 /* ??? Should perhaps do this in fold proper. However, doing it
2775 there requires that we create a new CALL_EXPR, and that requires
2776 copying EH region info to the new node. Easier to just do it
2777 here where we can just smash the call operand. */
2778 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2779 callee = gimple_call_fn (stmt);
2780 if (TREE_CODE (callee) == OBJ_TYPE_REF
2781 && lang_hooks.fold_obj_type_ref
2782 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2783 && DECL_P (TREE_OPERAND
2784 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2788 /* ??? Caution: Broken ADDR_EXPR semantics means that
2789 looking at the type of the operand of the addr_expr
2790 can yield an array type. See silly exception in
2791 check_pointer_types_r. */
2792 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2793 t = lang_hooks.fold_obj_type_ref (callee, t);
2796 gimple_call_set_fn (stmt, t);
2805 /* Fold the statement pointed to by GSI. In some cases, this function may
2806 replace the whole statement with a new one. Returns true iff folding
2807 makes any changes. */
2810 fold_stmt (gimple_stmt_iterator *gsi)
2813 struct fold_stmt_r_data fold_stmt_r_data;
2814 struct walk_stmt_info wi;
2816 bool changed = false;
2817 bool inside_addr_expr = false;
2819 gimple stmt = gsi_stmt (*gsi);
2821 fold_stmt_r_data.stmt = stmt;
2822 fold_stmt_r_data.changed_p = &changed;
2823 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2825 memset (&wi, 0, sizeof (wi));
2826 wi.info = &fold_stmt_r_data;
2828 /* Fold the individual operands.
2829 For example, fold instances of *&VAR into VAR, etc. */
2830 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2833 /* Fold the main computation performed by the statement. */
2834 switch (gimple_code (stmt))
2838 tree new_rhs = fold_gimple_assign (gsi);
2839 if (new_rhs != NULL_TREE)
2841 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2844 stmt = gsi_stmt (*gsi);
2848 changed |= fold_gimple_cond (stmt);
2851 /* The entire statement may be replaced in this case. */
2852 changed |= fold_gimple_call (gsi);
2863 /* Perform the minimal folding on statement STMT. Only operations like
2864 *&x created by constant propagation are handled. The statement cannot
2865 be replaced with a new one. Return true if the statement was
2866 changed, false otherwise. */
2869 fold_stmt_inplace (gimple stmt)
2872 struct fold_stmt_r_data fold_stmt_r_data;
2873 struct walk_stmt_info wi;
2874 gimple_stmt_iterator si;
2876 bool changed = false;
2877 bool inside_addr_expr = false;
2879 fold_stmt_r_data.stmt = stmt;
2880 fold_stmt_r_data.changed_p = &changed;
2881 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2883 memset (&wi, 0, sizeof (wi));
2884 wi.info = &fold_stmt_r_data;
2886 /* Fold the individual operands.
2887 For example, fold instances of *&VAR into VAR, etc.
2889 It appears that, at one time, maybe_fold_stmt_indirect
2890 would cause the walk to return non-null in order to
2891 signal that the entire statement should be replaced with
2892 a call to _builtin_trap. This functionality is currently
2893 disabled, as noted in a FIXME, and cannot be supported here. */
2894 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2897 /* Fold the main computation performed by the statement. */
2898 switch (gimple_code (stmt))
2902 unsigned old_num_ops;
2904 old_num_ops = gimple_num_ops (stmt);
2905 si = gsi_for_stmt (stmt);
2906 new_rhs = fold_gimple_assign (&si);
2907 if (new_rhs != NULL_TREE
2908 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
2910 gimple_assign_set_rhs_from_tree (&si, new_rhs);
2913 gcc_assert (gsi_stmt (si) == stmt);
2917 changed |= fold_gimple_cond (stmt);
2927 /* Try to optimize out __builtin_stack_restore. Optimize it out
2928 if there is another __builtin_stack_restore in the same basic
2929 block and no calls or ASM_EXPRs are in between, or if this block's
2930 only outgoing edge is to EXIT_BLOCK and there are no calls or
2931 ASM_EXPRs after this __builtin_stack_restore. */
2934 optimize_stack_restore (gimple_stmt_iterator i)
2937 gimple stmt, stack_save;
2938 gimple_stmt_iterator stack_save_gsi;
2940 basic_block bb = gsi_bb (i);
2941 gimple call = gsi_stmt (i);
2943 if (gimple_code (call) != GIMPLE_CALL
2944 || gimple_call_num_args (call) != 1
2945 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2946 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2949 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2951 stmt = gsi_stmt (i);
2952 if (gimple_code (stmt) == GIMPLE_ASM)
2954 if (gimple_code (stmt) != GIMPLE_CALL)
2957 callee = gimple_call_fndecl (stmt);
2958 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2961 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2966 && (! single_succ_p (bb)
2967 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
2970 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2971 if (gimple_code (stack_save) != GIMPLE_CALL
2972 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
2973 || stmt_could_throw_p (stack_save)
2974 || !has_single_use (gimple_call_arg (call, 0)))
2977 callee = gimple_call_fndecl (stack_save);
2979 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2980 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
2981 || gimple_call_num_args (stack_save) != 0)
2984 stack_save_gsi = gsi_for_stmt (stack_save);
2985 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
2986 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2987 if (!update_call_from_tree (&stack_save_gsi, rhs))
2989 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
2992 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
2994 /* No effect, so the statement will be deleted. */
2995 return integer_zero_node;
2998 /* If va_list type is a simple pointer and nothing special is needed,
2999 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3000 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3001 pointer assignment. */
3004 optimize_stdarg_builtin (gimple call)
3006 tree callee, lhs, rhs, cfun_va_list;
3007 bool va_list_simple_ptr;
3009 if (gimple_code (call) != GIMPLE_CALL)
3012 callee = gimple_call_fndecl (call);
3014 cfun_va_list = targetm.fn_abi_va_list (callee);
3015 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3016 && (TREE_TYPE (cfun_va_list) == void_type_node
3017 || TREE_TYPE (cfun_va_list) == char_type_node);
3019 switch (DECL_FUNCTION_CODE (callee))
3021 case BUILT_IN_VA_START:
3022 if (!va_list_simple_ptr
3023 || targetm.expand_builtin_va_start != NULL
3024 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3027 if (gimple_call_num_args (call) != 2)
3030 lhs = gimple_call_arg (call, 0);
3031 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3032 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3033 != TYPE_MAIN_VARIANT (cfun_va_list))
3036 lhs = build_fold_indirect_ref (lhs);
3037 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3038 1, integer_zero_node);
3039 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3040 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3042 case BUILT_IN_VA_COPY:
3043 if (!va_list_simple_ptr)
3046 if (gimple_call_num_args (call) != 2)
3049 lhs = gimple_call_arg (call, 0);
3050 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3051 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3052 != TYPE_MAIN_VARIANT (cfun_va_list))
3055 lhs = build_fold_indirect_ref (lhs);
3056 rhs = gimple_call_arg (call, 1);
3057 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3058 != TYPE_MAIN_VARIANT (cfun_va_list))
3061 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3062 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3064 case BUILT_IN_VA_END:
3065 /* No effect, so the statement will be deleted. */
3066 return integer_zero_node;
3073 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3074 RHS of an assignment. Insert the necessary statements before
3075 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3076 is replaced. If the call is expected to produces a result, then it
3077 is replaced by an assignment of the new RHS to the result variable.
3078 If the result is to be ignored, then the call is replaced by a
3082 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3085 tree tmp = NULL_TREE; /* Silence warning. */
3086 gimple stmt, new_stmt;
3087 gimple_stmt_iterator i;
3088 gimple_seq stmts = gimple_seq_alloc();
3089 struct gimplify_ctx gctx;
3091 stmt = gsi_stmt (*si_p);
3093 gcc_assert (is_gimple_call (stmt));
3095 lhs = gimple_call_lhs (stmt);
3097 push_gimplify_context (&gctx);
3099 if (lhs == NULL_TREE)
3100 gimplify_and_add (expr, &stmts);
3102 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3104 pop_gimplify_context (NULL);
3106 if (gimple_has_location (stmt))
3107 annotate_all_with_location (stmts, gimple_location (stmt));
3109 /* The replacement can expose previously unreferenced variables. */
3110 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3112 new_stmt = gsi_stmt (i);
3113 find_new_referenced_vars (new_stmt);
3114 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3115 mark_symbols_for_renaming (new_stmt);
3119 if (lhs == NULL_TREE)
3120 new_stmt = gimple_build_nop ();
3123 new_stmt = gimple_build_assign (lhs, tmp);
3124 copy_virtual_operands (new_stmt, stmt);
3125 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3128 gimple_set_location (new_stmt, gimple_location (stmt));
3129 gsi_replace (si_p, new_stmt, false);
3132 /* A simple pass that attempts to fold all builtin functions. This pass
3133 is run after we've propagated as many constants as we can. */
3136 execute_fold_all_builtins (void)
3138 bool cfg_changed = false;
3140 unsigned int todoflags = 0;
3144 gimple_stmt_iterator i;
3145 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3147 gimple stmt, old_stmt;
3148 tree callee, result;
3149 enum built_in_function fcode;
3151 stmt = gsi_stmt (i);
3153 if (gimple_code (stmt) != GIMPLE_CALL)
3158 callee = gimple_call_fndecl (stmt);
3159 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3164 fcode = DECL_FUNCTION_CODE (callee);
3166 result = ccp_fold_builtin (stmt);
3169 gimple_remove_stmt_histograms (cfun, stmt);
3172 switch (DECL_FUNCTION_CODE (callee))
3174 case BUILT_IN_CONSTANT_P:
3175 /* Resolve __builtin_constant_p. If it hasn't been
3176 folded to integer_one_node by now, it's fairly
3177 certain that the value simply isn't constant. */
3178 result = integer_zero_node;
3181 case BUILT_IN_STACK_RESTORE:
3182 result = optimize_stack_restore (i);
3188 case BUILT_IN_VA_START:
3189 case BUILT_IN_VA_END:
3190 case BUILT_IN_VA_COPY:
3191 /* These shouldn't be folded before pass_stdarg. */
3192 result = optimize_stdarg_builtin (stmt);
3202 if (dump_file && (dump_flags & TDF_DETAILS))
3204 fprintf (dump_file, "Simplified\n ");
3205 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3209 push_stmt_changes (gsi_stmt_ptr (&i));
3211 if (!update_call_from_tree (&i, result))
3213 gimplify_and_update_call_from_tree (&i, result);
3214 todoflags |= TODO_rebuild_alias;
3217 stmt = gsi_stmt (i);
3218 pop_stmt_changes (gsi_stmt_ptr (&i));
3220 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3221 && gimple_purge_dead_eh_edges (bb))
3224 if (dump_file && (dump_flags & TDF_DETAILS))
3226 fprintf (dump_file, "to\n ");
3227 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3228 fprintf (dump_file, "\n");
3231 /* Retry the same statement if it changed into another
3232 builtin, there might be new opportunities now. */
3233 if (gimple_code (stmt) != GIMPLE_CALL)
3238 callee = gimple_call_fndecl (stmt);
3240 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3241 || DECL_FUNCTION_CODE (callee) == fcode)
3246 /* Delete unreachable blocks. */
3248 todoflags |= TODO_cleanup_cfg;
3254 struct gimple_opt_pass pass_fold_builtins =
3260 execute_fold_all_builtins, /* execute */
3263 0, /* static_pass_number */
3265 PROP_cfg | PROP_ssa, /* properties_required */
3266 0, /* properties_provided */
3267 0, /* properties_destroyed */
3268 0, /* todo_flags_start */
3271 | TODO_update_ssa /* todo_flags_finish */