1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
213 /* Possible lattice values. */
222 /* Array of propagated constant values. After propagation,
223 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
224 the constant is held in an SSA name representing a memory store
225 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
226 memory reference used to store (i.e., the LHS of the assignment
228 static prop_value_t *const_val;
230 /* True if we are also propagating constants in stores and loads. */
231 static bool do_store_ccp;
233 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
236 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
238 switch (val.lattice_val)
241 fprintf (outf, "%sUNINITIALIZED", prefix);
244 fprintf (outf, "%sUNDEFINED", prefix);
247 fprintf (outf, "%sVARYING", prefix);
250 fprintf (outf, "%sCONSTANT ", prefix);
251 print_generic_expr (outf, val.value, dump_flags);
259 /* Print lattice value VAL to stderr. */
261 void debug_lattice_value (prop_value_t val);
264 debug_lattice_value (prop_value_t val)
266 dump_lattice_value (stderr, "", val);
267 fprintf (stderr, "\n");
272 /* If SYM is a constant variable with known value, return the value.
273 NULL_TREE is returned otherwise. */
276 get_symbol_constant_value (tree sym)
278 if (TREE_STATIC (sym)
279 && TREE_READONLY (sym)
282 tree val = DECL_INITIAL (sym);
285 STRIP_USELESS_TYPE_CONVERSION (val);
286 if (is_gimple_min_invariant (val))
289 /* Variables declared 'const' without an initializer
290 have zero as the initializer if they may not be
291 overridden at link or run time. */
293 && targetm.binds_local_p (sym)
294 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
295 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
296 return fold_convert (TREE_TYPE (sym), integer_zero_node);
302 /* Compute a default value for variable VAR and store it in the
303 CONST_VAL array. The following rules are used to get default
306 1- Global and static variables that are declared constant are
309 2- Any other value is considered UNDEFINED. This is useful when
310 considering PHI nodes. PHI arguments that are undefined do not
311 change the constant value of the PHI node, which allows for more
312 constants to be propagated.
314 3- Variables defined by statements other than assignments and PHI
315 nodes are considered VARYING.
317 4- Initial values of variables that are not GIMPLE registers are
318 considered VARYING. */
321 get_default_value (tree var)
323 tree sym = SSA_NAME_VAR (var);
324 prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE };
327 if (!do_store_ccp && !is_gimple_reg (var))
329 /* Short circuit for regular CCP. We are not interested in any
330 non-register when DO_STORE_CCP is false. */
331 val.lattice_val = VARYING;
333 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
335 /* Globals and static variables declared 'const' take their
337 val.lattice_val = CONSTANT;
343 gimple stmt = SSA_NAME_DEF_STMT (var);
345 if (gimple_nop_p (stmt))
347 /* Variables defined by an empty statement are those used
348 before being initialized. If VAR is a local variable, we
349 can assume initially that it is UNDEFINED, otherwise we must
350 consider it VARYING. */
351 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
352 val.lattice_val = UNDEFINED;
354 val.lattice_val = VARYING;
356 else if (is_gimple_assign (stmt)
357 /* Value-returning GIMPLE_CALL statements assign to
358 a variable, and are treated similarly to GIMPLE_ASSIGN. */
359 || (is_gimple_call (stmt)
360 && gimple_call_lhs (stmt) != NULL_TREE)
361 || gimple_code (stmt) == GIMPLE_PHI)
363 /* Any other variable defined by an assignment or a PHI node
364 is considered UNDEFINED. */
365 val.lattice_val = UNDEFINED;
369 /* Otherwise, VAR will never take on a constant value. */
370 val.lattice_val = VARYING;
378 /* Get the constant value associated with variable VAR. */
380 static inline prop_value_t *
385 if (const_val == NULL)
388 val = &const_val[SSA_NAME_VERSION (var)];
389 if (val->lattice_val == UNINITIALIZED)
390 *val = get_default_value (var);
395 /* Sets the value associated with VAR to VARYING. */
398 set_value_varying (tree var)
400 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
402 val->lattice_val = VARYING;
403 val->value = NULL_TREE;
404 val->mem_ref = NULL_TREE;
407 /* For float types, modify the value of VAL to make ccp work correctly
408 for non-standard values (-0, NaN):
410 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
411 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
412 This is to fix the following problem (see PR 29921): Suppose we have
416 and we set value of y to NaN. This causes value of x to be set to NaN.
417 When we later determine that y is in fact VARYING, fold uses the fact
418 that HONOR_NANS is false, and we try to change the value of x to 0,
419 causing an ICE. With HONOR_NANS being false, the real appearance of
420 NaN would cause undefined behavior, though, so claiming that y (and x)
421 are UNDEFINED initially is correct. */
424 canonicalize_float_value (prop_value_t *val)
426 enum machine_mode mode;
430 if (val->lattice_val != CONSTANT
431 || TREE_CODE (val->value) != REAL_CST)
434 d = TREE_REAL_CST (val->value);
435 type = TREE_TYPE (val->value);
436 mode = TYPE_MODE (type);
438 if (!HONOR_SIGNED_ZEROS (mode)
439 && REAL_VALUE_MINUS_ZERO (d))
441 val->value = build_real (type, dconst0);
445 if (!HONOR_NANS (mode)
446 && REAL_VALUE_ISNAN (d))
448 val->lattice_val = UNDEFINED;
455 /* Set the value for variable VAR to NEW_VAL. Return true if the new
456 value is different from VAR's previous value. */
459 set_lattice_value (tree var, prop_value_t new_val)
461 prop_value_t *old_val = get_value (var);
463 canonicalize_float_value (&new_val);
465 /* Lattice transitions must always be monotonically increasing in
466 value. If *OLD_VAL and NEW_VAL are the same, return false to
467 inform the caller that this was a non-transition. */
469 gcc_assert (old_val->lattice_val < new_val.lattice_val
470 || (old_val->lattice_val == new_val.lattice_val
471 && ((!old_val->value && !new_val.value)
472 || operand_equal_p (old_val->value, new_val.value, 0))
473 && old_val->mem_ref == new_val.mem_ref));
475 if (old_val->lattice_val != new_val.lattice_val)
477 if (dump_file && (dump_flags & TDF_DETAILS))
479 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
480 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
485 gcc_assert (new_val.lattice_val != UNDEFINED);
493 /* Return the likely CCP lattice value for STMT.
495 If STMT has no operands, then return CONSTANT.
497 Else if undefinedness of operands of STMT cause its value to be
498 undefined, then return UNDEFINED.
500 Else if any operands of STMT are constants, then return CONSTANT.
502 Else return VARYING. */
505 likely_value (gimple stmt)
507 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
511 enum tree_code code = gimple_code (stmt);
513 /* This function appears to be called only for assignments, calls,
514 conditionals, and switches, due to the logic in visit_stmt. */
515 gcc_assert (code == GIMPLE_ASSIGN
516 || code == GIMPLE_CALL
517 || code == GIMPLE_COND
518 || code == GIMPLE_SWITCH);
520 /* If the statement has volatile operands, it won't fold to a
522 if (gimple_has_volatile_ops (stmt))
525 /* If we are not doing store-ccp, statements with loads
526 and/or stores will never fold into a constant. */
528 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
531 /* A GIMPLE_CALL is assumed to be varying. NOTE: This may be overly
532 conservative, in the presence of const and pure calls. */
533 if (code == GIMPLE_CALL)
536 /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
537 is_gimple_min_invariant, so we do not consider calls or
538 other forms of assignment. */
539 if (code == GIMPLE_ASSIGN
540 && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
541 == GIMPLE_SINGLE_RHS)
542 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
545 if (code == GIMPLE_COND
546 && is_gimple_min_invariant (gimple_cond_lhs (stmt))
547 && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
550 if (code == GIMPLE_SWITCH
551 && is_gimple_min_invariant (gimple_switch_index (stmt)))
554 /* Arrive here for more complex cases. */
556 has_constant_operand = false;
557 has_undefined_operand = false;
558 all_undefined_operands = true;
559 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
561 prop_value_t *val = get_value (use);
563 if (val->lattice_val == UNDEFINED)
564 has_undefined_operand = true;
566 all_undefined_operands = false;
568 if (val->lattice_val == CONSTANT)
569 has_constant_operand = true;
572 /* If the operation combines operands like COMPLEX_EXPR make sure to
573 not mark the result UNDEFINED if only one part of the result is
575 if (has_undefined_operand && all_undefined_operands)
577 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
579 switch (gimple_assign_rhs_code (stmt))
581 /* Unary operators are handled with all_undefined_operands. */
584 case POINTER_PLUS_EXPR:
585 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
586 Not bitwise operators, one VARYING operand may specify the
587 result completely. Not logical operators for the same reason.
588 Not COMPLEX_EXPR as one VARYING operand makes the result partly
589 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
590 the undefined operand may be promoted. */
597 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
598 fall back to VARYING even if there were CONSTANT operands. */
599 if (has_undefined_operand)
602 if (has_constant_operand
603 /* We do not consider virtual operands here -- load from read-only
604 memory may have only VARYING virtual operands, but still be
606 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
612 /* Returns true if STMT cannot be constant. */
615 surely_varying_stmt_p (gimple stmt)
617 /* If the statement has operands that we cannot handle, it cannot be
619 if (gimple_has_volatile_ops (stmt))
622 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
627 /* We can only handle simple loads and stores. */
628 if (!stmt_makes_single_load (stmt)
629 && !stmt_makes_single_store (stmt))
633 /* If it contains a call, it is varying. */
634 if (is_gimple_call (stmt))
637 /* Anything other than assignments and conditional jumps are not
638 interesting for CCP. */
639 if (gimple_code (stmt) != GIMPLE_ASSIGN
640 && (gimple_code (stmt) != GIMPLE_COND)
641 && (gimple_code (stmt) != GIMPLE_SWITCH))
647 /* Initialize local data structures for CCP. */
650 ccp_initialize (void)
654 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
656 /* Initialize simulation flags for PHI nodes and statements. */
659 gimple_stmt_iterator i;
661 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
663 gimple stmt = gsi_stmt (i);
664 bool is_varying = surely_varying_stmt_p (stmt);
671 /* If the statement will not produce a constant, mark
672 all its outputs VARYING. */
673 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
676 set_value_varying (def);
679 prop_set_simulate_again (stmt, !is_varying);
683 /* Now process PHI nodes. We never clear the simulate_again flag on
684 phi nodes, since we do not know which edges are executable yet,
685 except for phi nodes for virtual operands when we do not do store ccp. */
688 gimple_stmt_iterator i;
690 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
692 gimple phi = gsi_stmt (i);
694 if (!do_store_ccp && !is_gimple_reg (gimple_phi_result (phi)))
695 prop_set_simulate_again (phi, false);
697 prop_set_simulate_again (phi, true);
703 /* Do final substitution of propagated values, cleanup the flowgraph and
704 free allocated storage.
706 Return TRUE when something was optimized. */
711 /* Perform substitutions based on the known constant values. */
712 bool something_changed = substitute_and_fold (const_val, false);
716 return something_changed;;
720 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
723 any M UNDEFINED = any
724 any M VARYING = VARYING
725 Ci M Cj = Ci if (i == j)
726 Ci M Cj = VARYING if (i != j)
730 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
732 if (val1->lattice_val == UNDEFINED)
734 /* UNDEFINED M any = any */
737 else if (val2->lattice_val == UNDEFINED)
739 /* any M UNDEFINED = any
740 Nothing to do. VAL1 already contains the value we want. */
743 else if (val1->lattice_val == VARYING
744 || val2->lattice_val == VARYING)
746 /* any M VARYING = VARYING. */
747 val1->lattice_val = VARYING;
748 val1->value = NULL_TREE;
749 val1->mem_ref = NULL_TREE;
751 else if (val1->lattice_val == CONSTANT
752 && val2->lattice_val == CONSTANT
753 && simple_cst_equal (val1->value, val2->value) == 1
755 || (val1->mem_ref && val2->mem_ref
756 && operand_equal_p (val1->mem_ref, val2->mem_ref, 0))))
758 /* Ci M Cj = Ci if (i == j)
759 Ci M Cj = VARYING if (i != j)
761 If these two values come from memory stores, make sure that
762 they come from the same memory reference. */
763 val1->lattice_val = CONSTANT;
764 val1->value = val1->value;
765 val1->mem_ref = val1->mem_ref;
769 /* Any other combination is VARYING. */
770 val1->lattice_val = VARYING;
771 val1->value = NULL_TREE;
772 val1->mem_ref = NULL_TREE;
777 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
778 lattice values to determine PHI_NODE's lattice value. The value of a
779 PHI node is determined calling ccp_lattice_meet with all the arguments
780 of the PHI node that are incoming via executable edges. */
782 static enum ssa_prop_result
783 ccp_visit_phi_node (gimple phi)
786 prop_value_t *old_val, new_val;
788 if (dump_file && (dump_flags & TDF_DETAILS))
790 fprintf (dump_file, "\nVisiting PHI node: ");
791 print_gimple_stmt (dump_file, phi, 0, dump_flags);
794 old_val = get_value (gimple_phi_result (phi));
795 switch (old_val->lattice_val)
798 return SSA_PROP_VARYING;
805 new_val.lattice_val = UNDEFINED;
806 new_val.value = NULL_TREE;
807 new_val.mem_ref = NULL_TREE;
814 for (i = 0; i < gimple_phi_num_args (phi); i++)
816 /* Compute the meet operator over all the PHI arguments flowing
817 through executable edges. */
818 edge e = gimple_phi_arg_edge (phi, i);
820 if (dump_file && (dump_flags & TDF_DETAILS))
823 "\n Argument #%d (%d -> %d %sexecutable)\n",
824 i, e->src->index, e->dest->index,
825 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
828 /* If the incoming edge is executable, Compute the meet operator for
829 the existing value of the PHI node and the current PHI argument. */
830 if (e->flags & EDGE_EXECUTABLE)
832 tree arg = gimple_phi_arg (phi, i)->def;
833 prop_value_t arg_val;
835 if (is_gimple_min_invariant (arg))
837 arg_val.lattice_val = CONSTANT;
839 arg_val.mem_ref = NULL_TREE;
842 arg_val = *(get_value (arg));
844 ccp_lattice_meet (&new_val, &arg_val);
846 if (dump_file && (dump_flags & TDF_DETAILS))
848 fprintf (dump_file, "\t");
849 print_generic_expr (dump_file, arg, dump_flags);
850 dump_lattice_value (dump_file, "\tValue: ", arg_val);
851 fprintf (dump_file, "\n");
854 if (new_val.lattice_val == VARYING)
859 if (dump_file && (dump_flags & TDF_DETAILS))
861 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
862 fprintf (dump_file, "\n\n");
865 /* Make the transition to the new value. */
866 if (set_lattice_value (gimple_phi_result (phi), new_val))
868 if (new_val.lattice_val == VARYING)
869 return SSA_PROP_VARYING;
871 return SSA_PROP_INTERESTING;
874 return SSA_PROP_NOT_INTERESTING;
878 /* CCP specific front-end to the non-destructive constant folding
881 Attempt to simplify the RHS of STMT knowing that one or more
882 operands are constants.
884 If simplification is possible, return the simplified RHS,
885 otherwise return the original RHS or NULL_TREE. */
888 ccp_fold (gimple stmt)
890 switch (gimple_code (stmt))
894 enum tree_code subcode = gimple_assign_rhs_code (stmt);
896 switch (get_gimple_rhs_class (subcode))
898 case GIMPLE_SINGLE_RHS:
900 tree rhs = gimple_assign_rhs1 (stmt);
901 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
903 if (TREE_CODE (rhs) == SSA_NAME)
905 /* If the RHS is an SSA_NAME, return its known constant value,
907 return get_value (rhs)->value;
909 /* Handle propagating invariant addresses into address operations.
910 The folding we do here matches that in tree-ssa-forwprop.c. */
911 else if (TREE_CODE (rhs) == ADDR_EXPR)
914 base = &TREE_OPERAND (rhs, 0);
915 while (handled_component_p (*base))
916 base = &TREE_OPERAND (*base, 0);
917 if (TREE_CODE (*base) == INDIRECT_REF
918 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
920 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
921 if (val->lattice_val == CONSTANT
922 && TREE_CODE (val->value) == ADDR_EXPR
923 && useless_type_conversion_p
924 (TREE_TYPE (TREE_OPERAND (*base, 0)),
925 TREE_TYPE (val->value))
926 && useless_type_conversion_p
928 TREE_TYPE (TREE_OPERAND (val->value, 0))))
930 /* We need to return a new tree, not modify the IL
931 or share parts of it. So play some tricks to
932 avoid manually building it. */
933 tree ret, save = *base;
934 *base = TREE_OPERAND (val->value, 0);
935 ret = unshare_expr (rhs);
936 recompute_tree_invariant_for_addr_expr (ret);
943 else if (do_store_ccp && stmt_makes_single_load (stmt))
945 /* If the RHS is a memory load, see if the VUSEs associated with
946 it are a valid constant for that memory load. */
947 prop_value_t *val = get_value_loaded_by (stmt, const_val);
948 if (val && val->mem_ref)
950 if (operand_equal_p (val->mem_ref, rhs, 0))
953 /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
954 complex type with a known constant value, return it. */
955 if ((TREE_CODE (rhs) == REALPART_EXPR
956 || TREE_CODE (rhs) == IMAGPART_EXPR)
957 && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0))
958 return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value);
962 if (kind == tcc_reference)
963 return fold_const_aggregate_ref (rhs);
964 else if (kind == tcc_declaration)
965 return get_symbol_constant_value (rhs);
969 case GIMPLE_UNARY_RHS:
971 /* Handle unary operators that can appear in GIMPLE form.
972 Note that we know the single operand must be a constant,
973 so this should almost always return a simplified RHS. */
974 tree lhs = gimple_assign_lhs (stmt);
975 tree op0 = gimple_assign_rhs1 (stmt);
977 /* Simplify the operand down to a constant. */
978 if (TREE_CODE (op0) == SSA_NAME)
980 prop_value_t *val = get_value (op0);
981 if (val->lattice_val == CONSTANT)
982 op0 = get_value (op0)->value;
985 /* Conversions are useless for CCP purposes if they are
986 value-preserving. Thus the restrictions that
987 useless_type_conversion_p places for pointer type conversions
988 do not apply here. Substitution later will only substitute to
990 if ((subcode == NOP_EXPR || subcode == CONVERT_EXPR)
991 && ((POINTER_TYPE_P (TREE_TYPE (lhs))
992 && POINTER_TYPE_P (TREE_TYPE (op0))
993 /* Do not allow differences in volatile qualification
994 as this might get us confused as to whether a
995 propagation destination statement is volatile
996 or not. See PR36988. */
997 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
998 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
999 || useless_type_conversion_p (TREE_TYPE (lhs),
1003 return fold_unary (subcode, gimple_expr_type (stmt), op0);
1006 case GIMPLE_BINARY_RHS:
1008 /* Handle binary operators that can appear in GIMPLE form. */
1009 tree op0 = gimple_assign_rhs1 (stmt);
1010 tree op1 = gimple_assign_rhs2 (stmt);
1012 /* Simplify the operands down to constants when appropriate. */
1013 if (TREE_CODE (op0) == SSA_NAME)
1015 prop_value_t *val = get_value (op0);
1016 if (val->lattice_val == CONSTANT)
1020 if (TREE_CODE (op1) == SSA_NAME)
1022 prop_value_t *val = get_value (op1);
1023 if (val->lattice_val == CONSTANT)
1027 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1037 /* It may be possible to fold away calls to builtin functions if
1038 their arguments are constants. At present, such folding will not
1039 be attempted, as likely_value classifies all calls as VARYING. */
1045 /* Handle comparison operators that can appear in GIMPLE form. */
1046 tree op0 = gimple_cond_lhs (stmt);
1047 tree op1 = gimple_cond_rhs (stmt);
1048 enum tree_code code = gimple_cond_code (stmt);
1050 /* Simplify the operands down to constants when appropriate. */
1051 if (TREE_CODE (op0) == SSA_NAME)
1053 prop_value_t *val = get_value (op0);
1054 if (val->lattice_val == CONSTANT)
1058 if (TREE_CODE (op1) == SSA_NAME)
1060 prop_value_t *val = get_value (op1);
1061 if (val->lattice_val == CONSTANT)
1065 return fold_binary (code, boolean_type_node, op0, op1);
1070 tree rhs = gimple_switch_index (stmt);
1072 if (TREE_CODE (rhs) == SSA_NAME)
1074 /* If the RHS is an SSA_NAME, return its known constant value,
1076 return get_value (rhs)->value;
1088 /* Return the tree representing the element referenced by T if T is an
1089 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1090 NULL_TREE otherwise. */
1093 fold_const_aggregate_ref (tree t)
1095 prop_value_t *value;
1096 tree base, ctor, idx, field;
1097 unsigned HOST_WIDE_INT cnt;
1100 switch (TREE_CODE (t))
1103 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1104 DECL_INITIAL. If BASE is a nested reference into another
1105 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1106 the inner reference. */
1107 base = TREE_OPERAND (t, 0);
1108 switch (TREE_CODE (base))
1111 if (!TREE_READONLY (base)
1112 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1113 || !targetm.binds_local_p (base))
1116 ctor = DECL_INITIAL (base);
1121 ctor = fold_const_aggregate_ref (base);
1133 if (ctor == NULL_TREE
1134 || (TREE_CODE (ctor) != CONSTRUCTOR
1135 && TREE_CODE (ctor) != STRING_CST)
1136 || !TREE_STATIC (ctor))
1139 /* Get the index. If we have an SSA_NAME, try to resolve it
1140 with the current lattice value for the SSA_NAME. */
1141 idx = TREE_OPERAND (t, 1);
1142 switch (TREE_CODE (idx))
1145 if ((value = get_value (idx))
1146 && value->lattice_val == CONSTANT
1147 && TREE_CODE (value->value) == INTEGER_CST)
1160 /* Fold read from constant string. */
1161 if (TREE_CODE (ctor) == STRING_CST)
1163 if ((TYPE_MODE (TREE_TYPE (t))
1164 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1165 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1167 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1168 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1169 return build_int_cst_type (TREE_TYPE (t),
1170 (TREE_STRING_POINTER (ctor)
1171 [TREE_INT_CST_LOW (idx)]));
1175 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1176 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1177 if (tree_int_cst_equal (cfield, idx))
1179 STRIP_USELESS_TYPE_CONVERSION (cval);
1185 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1186 DECL_INITIAL. If BASE is a nested reference into another
1187 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1188 the inner reference. */
1189 base = TREE_OPERAND (t, 0);
1190 switch (TREE_CODE (base))
1193 if (!TREE_READONLY (base)
1194 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1195 || !targetm.binds_local_p (base))
1198 ctor = DECL_INITIAL (base);
1203 ctor = fold_const_aggregate_ref (base);
1210 if (ctor == NULL_TREE
1211 || TREE_CODE (ctor) != CONSTRUCTOR
1212 || !TREE_STATIC (ctor))
1215 field = TREE_OPERAND (t, 1);
1217 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1219 /* FIXME: Handle bit-fields. */
1220 && ! DECL_BIT_FIELD (cfield))
1222 STRIP_USELESS_TYPE_CONVERSION (cval);
1230 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1231 if (c && TREE_CODE (c) == COMPLEX_CST)
1232 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1238 tree base = TREE_OPERAND (t, 0);
1239 if (TREE_CODE (base) == SSA_NAME
1240 && (value = get_value (base))
1241 && value->lattice_val == CONSTANT
1242 && TREE_CODE (value->value) == ADDR_EXPR)
1243 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1254 /* Evaluate statement STMT.
1255 Valid only for assignments, calls, conditionals, and switches. */
1258 evaluate_stmt (gimple stmt)
1261 tree simplified = NULL_TREE;
1262 ccp_lattice_t likelyvalue = likely_value (stmt);
1265 val.mem_ref = NULL_TREE;
1267 fold_defer_overflow_warnings ();
1269 /* If the statement is likely to have a CONSTANT result, then try
1270 to fold the statement to determine the constant value. */
1271 /* FIXME. This is the only place that we call ccp_fold.
1272 Since likely_value never returns CONSTANT for calls, we will
1273 not attempt to fold them, including builtins that may profit. */
1274 if (likelyvalue == CONSTANT)
1275 simplified = ccp_fold (stmt);
1276 /* If the statement is likely to have a VARYING result, then do not
1277 bother folding the statement. */
1278 else if (likelyvalue == VARYING)
1280 enum tree_code code = gimple_code (stmt);
1281 if (code == GIMPLE_ASSIGN)
1283 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1285 /* Other cases cannot satisfy is_gimple_min_invariant
1287 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1288 simplified = gimple_assign_rhs1 (stmt);
1290 else if (code == GIMPLE_SWITCH)
1291 simplified = gimple_switch_index (stmt);
1293 /* These cannot satisfy is_gimple_min_invariant without folding. */
1294 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1297 is_constant = simplified && is_gimple_min_invariant (simplified);
1299 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1301 if (dump_file && (dump_flags & TDF_DETAILS))
1303 fprintf (dump_file, "which is likely ");
1304 switch (likelyvalue)
1307 fprintf (dump_file, "CONSTANT");
1310 fprintf (dump_file, "UNDEFINED");
1313 fprintf (dump_file, "VARYING");
1317 fprintf (dump_file, "\n");
1322 /* The statement produced a constant value. */
1323 val.lattice_val = CONSTANT;
1324 val.value = simplified;
1328 /* The statement produced a nonconstant value. If the statement
1329 had UNDEFINED operands, then the result of the statement
1330 should be UNDEFINED. Otherwise, the statement is VARYING. */
1331 if (likelyvalue == UNDEFINED)
1332 val.lattice_val = likelyvalue;
1334 val.lattice_val = VARYING;
1336 val.value = NULL_TREE;
1342 /* Visit the assignment statement STMT. Set the value of its LHS to the
1343 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1344 creates virtual definitions, set the value of each new name to that
1345 of the RHS (if we can derive a constant out of the RHS).
1346 Value-returning call statements also perform an assignment, and
1347 are handled here. */
1349 static enum ssa_prop_result
1350 visit_assignment (gimple stmt, tree *output_p)
1353 enum ssa_prop_result retval;
1355 tree lhs = gimple_get_lhs (stmt);
1357 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1358 || gimple_call_lhs (stmt) != NULL_TREE);
1360 if (gimple_assign_copy_p (stmt))
1362 tree rhs = gimple_assign_rhs1 (stmt);
1364 if (TREE_CODE (rhs) == SSA_NAME)
1366 /* For a simple copy operation, we copy the lattice values. */
1367 prop_value_t *nval = get_value (rhs);
1370 else if (do_store_ccp && stmt_makes_single_load (stmt))
1372 /* Same as above, but the RHS is not a gimple register and yet
1373 has a known VUSE. If STMT is loading from the same memory
1374 location that created the SSA_NAMEs for the virtual operands,
1375 we can propagate the value on the RHS. */
1376 prop_value_t *nval = get_value_loaded_by (stmt, const_val);
1380 && operand_equal_p (nval->mem_ref, rhs, 0))
1383 val = evaluate_stmt (stmt);
1386 val = evaluate_stmt (stmt);
1389 /* Evaluate the statement, which could be
1390 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1391 val = evaluate_stmt (stmt);
1393 retval = SSA_PROP_NOT_INTERESTING;
1395 /* Set the lattice value of the statement's output. */
1396 if (TREE_CODE (lhs) == SSA_NAME)
1398 /* If STMT is an assignment to an SSA_NAME, we only have one
1400 if (set_lattice_value (lhs, val))
1403 if (val.lattice_val == VARYING)
1404 retval = SSA_PROP_VARYING;
1406 retval = SSA_PROP_INTERESTING;
1409 else if (do_store_ccp && stmt_makes_single_store (stmt))
1411 /* Otherwise, set the names in VDEF operands to the new
1412 constant value and mark the LHS as the memory reference
1413 associated with VAL. */
1418 /* Mark VAL as stored in the LHS of this assignment. */
1419 if (val.lattice_val == CONSTANT)
1422 /* Set the value of every VDEF to VAL. */
1424 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS)
1426 /* See PR 29801. We may have VDEFs for read-only variables
1427 (see the handling of unmodifiable variables in
1428 add_virtual_operand); do not attempt to change their value. */
1429 if (get_symbol_constant_value (SSA_NAME_VAR (vdef)) != NULL_TREE)
1432 changed |= set_lattice_value (vdef, val);
1435 /* Note that for propagation purposes, we are only interested in
1436 visiting statements that load the exact same memory reference
1437 stored here. Those statements will have the exact same list
1438 of virtual uses, so it is enough to set the output of this
1439 statement to be its first virtual definition. */
1440 *output_p = first_vdef (stmt);
1443 if (val.lattice_val == VARYING)
1444 retval = SSA_PROP_VARYING;
1446 retval = SSA_PROP_INTERESTING;
1454 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1455 if it can determine which edge will be taken. Otherwise, return
1456 SSA_PROP_VARYING. */
1458 static enum ssa_prop_result
1459 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1464 block = gimple_bb (stmt);
1465 val = evaluate_stmt (stmt);
1467 /* Find which edge out of the conditional block will be taken and add it
1468 to the worklist. If no single edge can be determined statically,
1469 return SSA_PROP_VARYING to feed all the outgoing edges to the
1470 propagation engine. */
1471 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1473 return SSA_PROP_INTERESTING;
1475 return SSA_PROP_VARYING;
1479 /* Evaluate statement STMT. If the statement produces an output value and
1480 its evaluation changes the lattice value of its output, return
1481 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1484 If STMT is a conditional branch and we can determine its truth
1485 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1486 value, return SSA_PROP_VARYING. */
1488 static enum ssa_prop_result
1489 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1494 if (dump_file && (dump_flags & TDF_DETAILS))
1496 fprintf (dump_file, "\nVisiting statement:\n");
1497 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1500 switch (gimple_code (stmt))
1503 /* If the statement is an assignment that produces a single
1504 output value, evaluate its RHS to see if the lattice value of
1505 its output has changed. */
1506 return visit_assignment (stmt, output_p);
1509 /* A value-returning call also performs an assignment. */
1510 if (gimple_call_lhs (stmt) != NULL_TREE)
1511 return visit_assignment (stmt, output_p);
1516 /* If STMT is a conditional branch, see if we can determine
1517 which branch will be taken. */
1518 /* FIXME. It appears that we should be able to optimize
1519 computed GOTOs here as well. */
1520 return visit_cond_stmt (stmt, taken_edge_p);
1526 /* Any other kind of statement is not interesting for constant
1527 propagation and, therefore, not worth simulating. */
1528 if (dump_file && (dump_flags & TDF_DETAILS))
1529 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1531 /* Definitions made by statements other than assignments to
1532 SSA_NAMEs represent unknown modifications to their outputs.
1533 Mark them VARYING. */
1534 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1536 prop_value_t v = { VARYING, NULL_TREE, NULL_TREE };
1537 set_lattice_value (def, v);
1540 return SSA_PROP_VARYING;
1544 /* Main entry point for SSA Conditional Constant Propagation. */
1547 execute_ssa_ccp (bool store_ccp)
1549 do_store_ccp = store_ccp;
1551 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1552 if (ccp_finalize ())
1553 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1562 return execute_ssa_ccp (false);
1569 return flag_tree_ccp != 0;
1573 struct gimple_opt_pass pass_ccp =
1578 gate_ccp, /* gate */
1579 do_ssa_ccp, /* execute */
1582 0, /* static_pass_number */
1583 TV_TREE_CCP, /* tv_id */
1584 PROP_cfg | PROP_ssa, /* properties_required */
1585 0, /* properties_provided */
1586 0, /* properties_destroyed */
1587 0, /* todo_flags_start */
1588 TODO_dump_func | TODO_verify_ssa
1589 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1595 do_ssa_store_ccp (void)
1597 /* If STORE-CCP is not enabled, we just run regular CCP. */
1598 return execute_ssa_ccp (flag_tree_store_ccp != 0);
1602 gate_store_ccp (void)
1604 /* STORE-CCP is enabled only with -ftree-store-ccp, but when
1605 -fno-tree-store-ccp is specified, we should run regular CCP.
1606 That's why the pass is enabled with either flag. */
1607 return flag_tree_store_ccp != 0 || flag_tree_ccp != 0;
1611 struct gimple_opt_pass pass_store_ccp =
1615 "store_ccp", /* name */
1616 gate_store_ccp, /* gate */
1617 do_ssa_store_ccp, /* execute */
1620 0, /* static_pass_number */
1621 TV_TREE_STORE_CCP, /* tv_id */
1622 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1623 0, /* properties_provided */
1624 0, /* properties_destroyed */
1625 0, /* todo_flags_start */
1626 TODO_dump_func | TODO_verify_ssa
1627 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1631 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1632 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1633 is the desired result type. */
1636 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1637 bool allow_negative_idx)
1639 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1640 tree array_type, elt_type, elt_size;
1643 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1644 measured in units of the size of elements type) from that ARRAY_REF).
1645 We can't do anything if either is variable.
1647 The case we handle here is *(&A[N]+O). */
1648 if (TREE_CODE (base) == ARRAY_REF)
1650 tree low_bound = array_ref_low_bound (base);
1652 elt_offset = TREE_OPERAND (base, 1);
1653 if (TREE_CODE (low_bound) != INTEGER_CST
1654 || TREE_CODE (elt_offset) != INTEGER_CST)
1657 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1658 base = TREE_OPERAND (base, 0);
1661 /* Ignore stupid user tricks of indexing non-array variables. */
1662 array_type = TREE_TYPE (base);
1663 if (TREE_CODE (array_type) != ARRAY_TYPE)
1665 elt_type = TREE_TYPE (array_type);
1666 if (!useless_type_conversion_p (orig_type, elt_type))
1669 /* Use signed size type for intermediate computation on the index. */
1670 idx_type = signed_type_for (size_type_node);
1672 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1673 element type (so we can use the alignment if it's not constant).
1674 Otherwise, compute the offset as an index by using a division. If the
1675 division isn't exact, then don't do anything. */
1676 elt_size = TYPE_SIZE_UNIT (elt_type);
1679 if (integer_zerop (offset))
1681 if (TREE_CODE (elt_size) != INTEGER_CST)
1682 elt_size = size_int (TYPE_ALIGN (elt_type));
1684 idx = build_int_cst (idx_type, 0);
1688 unsigned HOST_WIDE_INT lquo, lrem;
1689 HOST_WIDE_INT hquo, hrem;
1692 /* The final array offset should be signed, so we need
1693 to sign-extend the (possibly pointer) offset here
1694 and use signed division. */
1695 soffset = double_int_sext (tree_to_double_int (offset),
1696 TYPE_PRECISION (TREE_TYPE (offset)));
1697 if (TREE_CODE (elt_size) != INTEGER_CST
1698 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1699 soffset.low, soffset.high,
1700 TREE_INT_CST_LOW (elt_size),
1701 TREE_INT_CST_HIGH (elt_size),
1702 &lquo, &hquo, &lrem, &hrem)
1706 idx = build_int_cst_wide (idx_type, lquo, hquo);
1709 /* Assume the low bound is zero. If there is a domain type, get the
1710 low bound, if any, convert the index into that type, and add the
1712 min_idx = build_int_cst (idx_type, 0);
1713 domain_type = TYPE_DOMAIN (array_type);
1716 idx_type = domain_type;
1717 if (TYPE_MIN_VALUE (idx_type))
1718 min_idx = TYPE_MIN_VALUE (idx_type);
1720 min_idx = fold_convert (idx_type, min_idx);
1722 if (TREE_CODE (min_idx) != INTEGER_CST)
1725 elt_offset = fold_convert (idx_type, elt_offset);
1728 if (!integer_zerop (min_idx))
1729 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1730 if (!integer_zerop (elt_offset))
1731 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1733 /* Make sure to possibly truncate late after offsetting. */
1734 idx = fold_convert (idx_type, idx);
1736 /* We don't want to construct access past array bounds. For example
1739 should not be simplified into (*c)[14] or tree-vrp will
1740 give false warnings. The same is true for
1741 struct A { long x; char d[0]; } *a;
1743 which should be not folded to &a->d[-8]. */
1745 && TYPE_MAX_VALUE (domain_type)
1746 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1748 tree up_bound = TYPE_MAX_VALUE (domain_type);
1750 if (tree_int_cst_lt (up_bound, idx)
1751 /* Accesses after the end of arrays of size 0 (gcc
1752 extension) and 1 are likely intentional ("struct
1754 && compare_tree_int (up_bound, 1) > 0)
1758 && TYPE_MIN_VALUE (domain_type))
1760 if (!allow_negative_idx
1761 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1762 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1765 else if (!allow_negative_idx
1766 && compare_tree_int (idx, 0) < 0)
1769 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1773 /* Attempt to fold *(S+O) to S.X.
1774 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1775 is the desired result type. */
1778 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1779 tree orig_type, bool base_is_ptr)
1781 tree f, t, field_type, tail_array_field, field_offset;
1785 if (TREE_CODE (record_type) != RECORD_TYPE
1786 && TREE_CODE (record_type) != UNION_TYPE
1787 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1790 /* Short-circuit silly cases. */
1791 if (useless_type_conversion_p (record_type, orig_type))
1794 tail_array_field = NULL_TREE;
1795 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1799 if (TREE_CODE (f) != FIELD_DECL)
1801 if (DECL_BIT_FIELD (f))
1804 if (!DECL_FIELD_OFFSET (f))
1806 field_offset = byte_position (f);
1807 if (TREE_CODE (field_offset) != INTEGER_CST)
1810 /* ??? Java creates "interesting" fields for representing base classes.
1811 They have no name, and have no context. With no context, we get into
1812 trouble with nonoverlapping_component_refs_p. Skip them. */
1813 if (!DECL_FIELD_CONTEXT (f))
1816 /* The previous array field isn't at the end. */
1817 tail_array_field = NULL_TREE;
1819 /* Check to see if this offset overlaps with the field. */
1820 cmp = tree_int_cst_compare (field_offset, offset);
1824 field_type = TREE_TYPE (f);
1826 /* Here we exactly match the offset being checked. If the types match,
1827 then we can return that field. */
1829 && useless_type_conversion_p (orig_type, field_type))
1832 base = build1 (INDIRECT_REF, record_type, base);
1833 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1837 /* Don't care about offsets into the middle of scalars. */
1838 if (!AGGREGATE_TYPE_P (field_type))
1841 /* Check for array at the end of the struct. This is often
1842 used as for flexible array members. We should be able to
1843 turn this into an array access anyway. */
1844 if (TREE_CODE (field_type) == ARRAY_TYPE)
1845 tail_array_field = f;
1847 /* Check the end of the field against the offset. */
1848 if (!DECL_SIZE_UNIT (f)
1849 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1851 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1852 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1855 /* If we matched, then set offset to the displacement into
1858 new_base = build1 (INDIRECT_REF, record_type, base);
1861 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1863 /* Recurse to possibly find the match. */
1864 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1865 f == TYPE_FIELDS (record_type));
1868 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1874 if (!tail_array_field)
1877 f = tail_array_field;
1878 field_type = TREE_TYPE (f);
1879 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1881 /* If we get here, we've got an aggregate field, and a possibly
1882 nonzero offset into them. Recurse and hope for a valid match. */
1884 base = build1 (INDIRECT_REF, record_type, base);
1885 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1887 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1888 f == TYPE_FIELDS (record_type));
1891 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1895 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1896 or BASE[index] or by combination of those.
1898 Before attempting the conversion strip off existing ADDR_EXPRs and
1899 handled component refs. */
1902 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1906 bool base_is_ptr = true;
1909 if (TREE_CODE (base) == ADDR_EXPR)
1911 base_is_ptr = false;
1913 base = TREE_OPERAND (base, 0);
1915 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1916 so it needs to be removed and new COMPONENT_REF constructed.
1917 The wrong COMPONENT_REF are often constructed by folding the
1918 (type *)&object within the expression (type *)&object+offset */
1919 if (handled_component_p (base) && 0)
1921 HOST_WIDE_INT sub_offset, size, maxsize;
1923 newbase = get_ref_base_and_extent (base, &sub_offset,
1925 gcc_assert (newbase);
1926 gcc_assert (!(sub_offset & (BITS_PER_UNIT - 1)));
1927 if (size == maxsize)
1931 offset = int_const_binop (PLUS_EXPR, offset,
1932 build_int_cst (TREE_TYPE (offset),
1933 sub_offset / BITS_PER_UNIT), 1);
1936 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1937 && integer_zerop (offset))
1939 type = TREE_TYPE (base);
1944 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1946 type = TREE_TYPE (TREE_TYPE (base));
1948 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1949 orig_type, base_is_ptr);
1953 base = build1 (INDIRECT_REF, type, base);
1954 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
1959 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1960 Return the simplified expression, or NULL if nothing could be done. */
1963 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1966 bool volatile_p = TREE_THIS_VOLATILE (expr);
1968 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1969 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1970 are sometimes added. */
1972 STRIP_TYPE_NOPS (base);
1973 TREE_OPERAND (expr, 0) = base;
1975 /* One possibility is that the address reduces to a string constant. */
1976 t = fold_read_from_constant_string (expr);
1980 /* Add in any offset from a POINTER_PLUS_EXPR. */
1981 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
1985 offset2 = TREE_OPERAND (base, 1);
1986 if (TREE_CODE (offset2) != INTEGER_CST)
1988 base = TREE_OPERAND (base, 0);
1990 offset = fold_convert (sizetype,
1991 int_const_binop (PLUS_EXPR, offset, offset2, 1));
1994 if (TREE_CODE (base) == ADDR_EXPR)
1996 tree base_addr = base;
1998 /* Strip the ADDR_EXPR. */
1999 base = TREE_OPERAND (base, 0);
2001 /* Fold away CONST_DECL to its value, if the type is scalar. */
2002 if (TREE_CODE (base) == CONST_DECL
2003 && is_gimple_min_invariant (DECL_INITIAL (base)))
2004 return DECL_INITIAL (base);
2006 /* Try folding *(&B+O) to B.X. */
2007 t = maybe_fold_offset_to_reference (base_addr, offset,
2011 TREE_THIS_VOLATILE (t) = volatile_p;
2017 /* We can get here for out-of-range string constant accesses,
2018 such as "_"[3]. Bail out of the entire substitution search
2019 and arrange for the entire statement to be replaced by a
2020 call to __builtin_trap. In all likelihood this will all be
2021 constant-folded away, but in the meantime we can't leave with
2022 something that get_expr_operands can't understand. */
2026 if (TREE_CODE (t) == ADDR_EXPR
2027 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2029 /* FIXME: Except that this causes problems elsewhere with dead
2030 code not being deleted, and we die in the rtl expanders
2031 because we failed to remove some ssa_name. In the meantime,
2032 just return zero. */
2033 /* FIXME2: This condition should be signaled by
2034 fold_read_from_constant_string directly, rather than
2035 re-checking for it here. */
2036 return integer_zero_node;
2039 /* Try folding *(B+O) to B->X. Still an improvement. */
2040 if (POINTER_TYPE_P (TREE_TYPE (base)))
2042 t = maybe_fold_offset_to_reference (base, offset,
2049 /* Otherwise we had an offset that we could not simplify. */
2054 /* A quaint feature extant in our address arithmetic is that there
2055 can be hidden type changes here. The type of the result need
2056 not be the same as the type of the input pointer.
2058 What we're after here is an expression of the form
2059 (T *)(&array + const)
2060 where array is OP0, const is OP1, RES_TYPE is T and
2061 the cast doesn't actually exist, but is implicit in the
2062 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2064 which may be able to propagate further. */
2067 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2072 /* It had better be a constant. */
2073 if (TREE_CODE (op1) != INTEGER_CST)
2075 /* The first operand should be an ADDR_EXPR. */
2076 if (TREE_CODE (op0) != ADDR_EXPR)
2078 op0 = TREE_OPERAND (op0, 0);
2080 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2081 the offset into it. */
2082 while (TREE_CODE (op0) == ARRAY_REF)
2084 tree array_obj = TREE_OPERAND (op0, 0);
2085 tree array_idx = TREE_OPERAND (op0, 1);
2086 tree elt_type = TREE_TYPE (op0);
2087 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2090 if (TREE_CODE (array_idx) != INTEGER_CST)
2092 if (TREE_CODE (elt_size) != INTEGER_CST)
2095 /* Un-bias the index by the min index of the array type. */
2096 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2099 min_idx = TYPE_MIN_VALUE (min_idx);
2102 if (TREE_CODE (min_idx) != INTEGER_CST)
2105 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2106 if (!integer_zerop (min_idx))
2107 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2112 /* Convert the index to a byte offset. */
2113 array_idx = fold_convert (sizetype, array_idx);
2114 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2116 /* Update the operands for the next round, or for folding. */
2117 op1 = int_const_binop (PLUS_EXPR,
2122 ptd_type = TREE_TYPE (res_type);
2123 /* If we want a pointer to void, reconstruct the reference from the
2124 array element type. A pointer to that can be trivially converted
2125 to void *. This happens as we fold (void *)(ptr p+ off). */
2126 if (VOID_TYPE_P (ptd_type)
2127 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2128 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2130 /* At which point we can try some of the same things as for indirects. */
2131 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2133 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2136 t = build1 (ADDR_EXPR, res_type, t);
2141 /* For passing state through walk_tree into fold_stmt_r and its
2144 struct fold_stmt_r_data
2148 bool *inside_addr_expr_p;
2151 /* Subroutine of fold_stmt called via walk_tree. We perform several
2152 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2155 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2157 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2158 struct fold_stmt_r_data *fold_stmt_r_data;
2159 bool *inside_addr_expr_p;
2161 tree expr = *expr_p, t;
2162 bool volatile_p = TREE_THIS_VOLATILE (expr);
2164 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2165 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2166 changed_p = fold_stmt_r_data->changed_p;
2168 /* ??? It'd be nice if walk_tree had a pre-order option. */
2169 switch (TREE_CODE (expr))
2172 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2177 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2180 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2181 /* If we had a good reason for propagating the address here,
2182 make sure we end up with valid gimple. See PR34989. */
2183 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2187 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2192 if (POINTER_TYPE_P (TREE_TYPE (expr))
2193 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2194 && (t = maybe_fold_offset_to_reference
2195 (TREE_OPERAND (expr, 0),
2197 TREE_TYPE (TREE_TYPE (expr)))))
2199 tree ptr_type = build_pointer_type (TREE_TYPE (t));
2200 if (!useless_type_conversion_p (TREE_TYPE (expr), ptr_type))
2202 t = build_fold_addr_expr_with_type (t, ptr_type);
2206 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2207 We'd only want to bother decomposing an existing ARRAY_REF if
2208 the base array is found to have another offset contained within.
2209 Otherwise we'd be wasting time. */
2211 /* If we are not processing expressions found within an
2212 ADDR_EXPR, then we can fold constant array references. */
2213 if (!*inside_addr_expr_p)
2214 t = fold_read_from_constant_string (expr);
2220 *inside_addr_expr_p = true;
2221 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2222 *inside_addr_expr_p = false;
2227 /* Make sure the value is properly considered constant, and so gets
2228 propagated as expected. */
2230 recompute_tree_invariant_for_addr_expr (expr);
2234 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2239 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2240 We've already checked that the records are compatible, so we should
2241 come up with a set of compatible fields. */
2243 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2244 tree expr_field = TREE_OPERAND (expr, 1);
2246 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2248 expr_field = find_compatible_field (expr_record, expr_field);
2249 TREE_OPERAND (expr, 1) = expr_field;
2254 case TARGET_MEM_REF:
2255 t = maybe_fold_tmr (expr);
2258 case POINTER_PLUS_EXPR:
2259 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2262 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2267 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2268 TREE_OPERAND (expr, 0),
2269 TREE_OPERAND (expr, 1));
2273 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2275 tree op0 = TREE_OPERAND (expr, 0);
2279 fold_defer_overflow_warnings ();
2280 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2281 TREE_OPERAND (op0, 0),
2282 TREE_OPERAND (op0, 1));
2283 /* This is actually a conditional expression, not a GIMPLE
2284 conditional statement, however, the valid_gimple_rhs_p
2285 test still applies. */
2286 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
2287 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2290 COND_EXPR_COND (expr) = tem;
2303 /* Preserve volatileness of the original expression. */
2304 TREE_THIS_VOLATILE (t) = volatile_p;
2312 /* Return the string length, maximum string length or maximum value of
2314 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2315 is not NULL and, for TYPE == 0, its value is not equal to the length
2316 we determine or if we are unable to determine the length or value,
2317 return false. VISITED is a bitmap of visited variables.
2318 TYPE is 0 if string length should be returned, 1 for maximum string
2319 length and 2 for maximum value ARG can have. */
2322 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2327 if (TREE_CODE (arg) != SSA_NAME)
2329 if (TREE_CODE (arg) == COND_EXPR)
2330 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2331 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2332 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2333 else if (TREE_CODE (arg) == ADDR_EXPR
2334 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2335 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2337 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2338 if (TREE_CODE (aop0) == INDIRECT_REF
2339 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2340 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2341 length, visited, type);
2347 if (TREE_CODE (val) != INTEGER_CST
2348 || tree_int_cst_sgn (val) < 0)
2352 val = c_strlen (arg, 1);
2360 if (TREE_CODE (*length) != INTEGER_CST
2361 || TREE_CODE (val) != INTEGER_CST)
2364 if (tree_int_cst_lt (*length, val))
2368 else if (simple_cst_equal (val, *length) != 1)
2376 /* If we were already here, break the infinite cycle. */
2377 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2379 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2382 def_stmt = SSA_NAME_DEF_STMT (var);
2384 switch (gimple_code (def_stmt))
2387 /* The RHS of the statement defining VAR must either have a
2388 constant length or come from another SSA_NAME with a constant
2390 if (gimple_assign_single_p (def_stmt)
2391 || gimple_assign_unary_nop_p (def_stmt))
2393 tree rhs = gimple_assign_rhs1 (def_stmt);
2394 return get_maxval_strlen (rhs, length, visited, type);
2400 /* All the arguments of the PHI node must have the same constant
2404 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2406 tree arg = gimple_phi_arg (def_stmt, i)->def;
2408 /* If this PHI has itself as an argument, we cannot
2409 determine the string length of this argument. However,
2410 if we can find a constant string length for the other
2411 PHI args then we can still be sure that this is a
2412 constant string length. So be optimistic and just
2413 continue with the next argument. */
2414 if (arg == gimple_phi_result (def_stmt))
2417 if (!get_maxval_strlen (arg, length, visited, type))
2429 /* Fold builtin call in statement STMT. Returns a simplified tree.
2430 We may return a non-constant expression, including another call
2431 to a different function and with different arguments, e.g.,
2432 substituting memcpy for strcpy when the string length is known.
2433 Note that some builtins expand into inline code that may not
2434 be valid in GIMPLE. Callers must take care. */
2437 ccp_fold_builtin (gimple stmt)
2439 tree result, val[3];
2441 int arg_mask, i, type;
2446 gcc_assert (is_gimple_call (stmt));
2448 ignore = (gimple_call_lhs (stmt) == NULL);
2450 /* First try the generic builtin folder. If that succeeds, return the
2452 result = fold_call_stmt (stmt, ignore);
2456 STRIP_NOPS (result);
2460 /* Ignore MD builtins. */
2461 callee = gimple_call_fndecl (stmt);
2462 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2465 /* If the builtin could not be folded, and it has no argument list,
2467 nargs = gimple_call_num_args (stmt);
2471 /* Limit the work only for builtins we know how to simplify. */
2472 switch (DECL_FUNCTION_CODE (callee))
2474 case BUILT_IN_STRLEN:
2475 case BUILT_IN_FPUTS:
2476 case BUILT_IN_FPUTS_UNLOCKED:
2480 case BUILT_IN_STRCPY:
2481 case BUILT_IN_STRNCPY:
2485 case BUILT_IN_MEMCPY_CHK:
2486 case BUILT_IN_MEMPCPY_CHK:
2487 case BUILT_IN_MEMMOVE_CHK:
2488 case BUILT_IN_MEMSET_CHK:
2489 case BUILT_IN_STRNCPY_CHK:
2493 case BUILT_IN_STRCPY_CHK:
2494 case BUILT_IN_STPCPY_CHK:
2498 case BUILT_IN_SNPRINTF_CHK:
2499 case BUILT_IN_VSNPRINTF_CHK:
2507 /* Try to use the dataflow information gathered by the CCP process. */
2508 visited = BITMAP_ALLOC (NULL);
2510 memset (val, 0, sizeof (val));
2511 for (i = 0; i < nargs; i++)
2513 if ((arg_mask >> i) & 1)
2515 a = gimple_call_arg (stmt, i);
2516 bitmap_clear (visited);
2517 if (!get_maxval_strlen (a, &val[i], visited, type))
2522 BITMAP_FREE (visited);
2525 switch (DECL_FUNCTION_CODE (callee))
2527 case BUILT_IN_STRLEN:
2531 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2533 /* If the result is not a valid gimple value, or not a cast
2534 of a valid gimple value, then we can not use the result. */
2535 if (is_gimple_val (new_val)
2536 || (is_gimple_cast (new_val)
2537 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2542 case BUILT_IN_STRCPY:
2543 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2544 result = fold_builtin_strcpy (callee,
2545 gimple_call_arg (stmt, 0),
2546 gimple_call_arg (stmt, 1),
2550 case BUILT_IN_STRNCPY:
2551 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2552 result = fold_builtin_strncpy (callee,
2553 gimple_call_arg (stmt, 0),
2554 gimple_call_arg (stmt, 1),
2555 gimple_call_arg (stmt, 2),
2559 case BUILT_IN_FPUTS:
2560 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2561 gimple_call_arg (stmt, 1),
2562 ignore, false, val[0]);
2565 case BUILT_IN_FPUTS_UNLOCKED:
2566 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2567 gimple_call_arg (stmt, 1),
2568 ignore, true, val[0]);
2571 case BUILT_IN_MEMCPY_CHK:
2572 case BUILT_IN_MEMPCPY_CHK:
2573 case BUILT_IN_MEMMOVE_CHK:
2574 case BUILT_IN_MEMSET_CHK:
2575 if (val[2] && is_gimple_val (val[2]))
2576 result = fold_builtin_memory_chk (callee,
2577 gimple_call_arg (stmt, 0),
2578 gimple_call_arg (stmt, 1),
2579 gimple_call_arg (stmt, 2),
2580 gimple_call_arg (stmt, 3),
2582 DECL_FUNCTION_CODE (callee));
2585 case BUILT_IN_STRCPY_CHK:
2586 case BUILT_IN_STPCPY_CHK:
2587 if (val[1] && is_gimple_val (val[1]))
2588 result = fold_builtin_stxcpy_chk (callee,
2589 gimple_call_arg (stmt, 0),
2590 gimple_call_arg (stmt, 1),
2591 gimple_call_arg (stmt, 2),
2593 DECL_FUNCTION_CODE (callee));
2596 case BUILT_IN_STRNCPY_CHK:
2597 if (val[2] && is_gimple_val (val[2]))
2598 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2599 gimple_call_arg (stmt, 1),
2600 gimple_call_arg (stmt, 2),
2601 gimple_call_arg (stmt, 3),
2605 case BUILT_IN_SNPRINTF_CHK:
2606 case BUILT_IN_VSNPRINTF_CHK:
2607 if (val[1] && is_gimple_val (val[1]))
2608 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2609 DECL_FUNCTION_CODE (callee));
2616 if (result && ignore)
2617 result = fold_ignored_result (result);
2621 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2622 replacement rhs for the statement or NULL_TREE if no simplification
2623 could be made. It is assumed that the operands have been previously
2627 fold_gimple_assign (gimple_stmt_iterator *si)
2629 gimple stmt = gsi_stmt (*si);
2630 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2634 switch (get_gimple_rhs_class (subcode))
2636 case GIMPLE_SINGLE_RHS:
2638 tree rhs = gimple_assign_rhs1 (stmt);
2640 /* Try to fold a conditional expression. */
2641 if (TREE_CODE (rhs) == COND_EXPR)
2643 tree temp = fold (COND_EXPR_COND (rhs));
2644 if (temp != COND_EXPR_COND (rhs))
2645 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2646 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2649 /* If we couldn't fold the RHS, hand over to the generic
2651 if (result == NULL_TREE)
2652 result = fold (rhs);
2654 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2655 that may have been added by fold, and "useless" type
2656 conversions that might now be apparent due to propagation. */
2657 STRIP_USELESS_TYPE_CONVERSION (result);
2659 if (result != rhs && valid_gimple_rhs_p (result))
2662 /* It is possible that fold_stmt_r simplified the RHS.
2663 Make sure that the subcode of this statement still
2664 reflects the principal operator of the rhs operand. */
2669 case GIMPLE_UNARY_RHS:
2670 result = fold_unary (subcode,
2671 gimple_expr_type (stmt),
2672 gimple_assign_rhs1 (stmt));
2676 STRIP_USELESS_TYPE_CONVERSION (result);
2677 if (valid_gimple_rhs_p (result))
2680 else if ((gimple_assign_rhs_code (stmt) == NOP_EXPR
2681 || gimple_assign_rhs_code (stmt) == CONVERT_EXPR)
2682 && POINTER_TYPE_P (gimple_expr_type (stmt))
2683 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2685 tree type = gimple_expr_type (stmt);
2686 tree t = maybe_fold_offset_to_reference (gimple_assign_rhs1 (stmt),
2691 tree ptr_type = build_pointer_type (TREE_TYPE (t));
2692 if (useless_type_conversion_p (type, ptr_type))
2693 return build_fold_addr_expr_with_type (t, ptr_type);
2698 case GIMPLE_BINARY_RHS:
2699 /* Try to fold pointer addition. */
2700 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2701 result = maybe_fold_stmt_addition (
2702 TREE_TYPE (gimple_assign_lhs (stmt)),
2703 gimple_assign_rhs1 (stmt),
2704 gimple_assign_rhs2 (stmt));
2707 result = fold_binary (subcode,
2708 TREE_TYPE (gimple_assign_lhs (stmt)),
2709 gimple_assign_rhs1 (stmt),
2710 gimple_assign_rhs2 (stmt));
2714 STRIP_USELESS_TYPE_CONVERSION (result);
2715 if (valid_gimple_rhs_p (result))
2720 case GIMPLE_INVALID_RHS:
2727 /* Attempt to fold a conditional statement. Return true if any changes were
2728 made. We only attempt to fold the condition expression, and do not perform
2729 any transformation that would require alteration of the cfg. It is
2730 assumed that the operands have been previously folded. */
2733 fold_gimple_cond (gimple stmt)
2735 tree result = fold_binary (gimple_cond_code (stmt),
2737 gimple_cond_lhs (stmt),
2738 gimple_cond_rhs (stmt));
2742 STRIP_USELESS_TYPE_CONVERSION (result);
2743 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2745 gimple_cond_set_condition_from_tree (stmt, result);
2754 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2755 The statement may be replaced by another statement, e.g., if the call
2756 simplifies to a constant value. Return true if any changes were made.
2757 It is assumed that the operands have been previously folded. */
2760 fold_gimple_call (gimple_stmt_iterator *gsi)
2762 gimple stmt = gsi_stmt (*gsi);
2764 tree callee = gimple_call_fndecl (stmt);
2766 /* Check for builtins that CCP can handle using information not
2767 available in the generic fold routines. */
2768 if (callee && DECL_BUILT_IN (callee))
2770 tree result = ccp_fold_builtin (stmt);
2773 return update_call_from_tree (gsi, result);
2777 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2778 here are when we've propagated the address of a decl into the
2780 /* ??? Should perhaps do this in fold proper. However, doing it
2781 there requires that we create a new CALL_EXPR, and that requires
2782 copying EH region info to the new node. Easier to just do it
2783 here where we can just smash the call operand. */
2784 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2785 callee = gimple_call_fn (stmt);
2786 if (TREE_CODE (callee) == OBJ_TYPE_REF
2787 && lang_hooks.fold_obj_type_ref
2788 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2789 && DECL_P (TREE_OPERAND
2790 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2794 /* ??? Caution: Broken ADDR_EXPR semantics means that
2795 looking at the type of the operand of the addr_expr
2796 can yield an array type. See silly exception in
2797 check_pointer_types_r. */
2798 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2799 t = lang_hooks.fold_obj_type_ref (callee, t);
2802 gimple_call_set_fn (stmt, t);
2811 /* Fold the statement pointed to by GSI. In some cases, this function may
2812 replace the whole statement with a new one. Returns true iff folding
2813 makes any changes. */
2816 fold_stmt (gimple_stmt_iterator *gsi)
2819 struct fold_stmt_r_data fold_stmt_r_data;
2820 struct walk_stmt_info wi;
2822 bool changed = false;
2823 bool inside_addr_expr = false;
2825 gimple stmt = gsi_stmt (*gsi);
2827 fold_stmt_r_data.stmt = stmt;
2828 fold_stmt_r_data.changed_p = &changed;
2829 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2831 memset (&wi, 0, sizeof (wi));
2832 wi.info = &fold_stmt_r_data;
2834 /* Fold the individual operands.
2835 For example, fold instances of *&VAR into VAR, etc. */
2836 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2839 /* Fold the main computation performed by the statement. */
2840 switch (gimple_code (stmt))
2844 tree new_rhs = fold_gimple_assign (gsi);
2845 if (new_rhs != NULL_TREE)
2847 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2850 stmt = gsi_stmt (*gsi);
2854 changed |= fold_gimple_cond (stmt);
2857 /* The entire statement may be replaced in this case. */
2858 changed |= fold_gimple_call (gsi);
2869 /* Perform the minimal folding on statement STMT. Only operations like
2870 *&x created by constant propagation are handled. The statement cannot
2871 be replaced with a new one. Return true if the statement was
2872 changed, false otherwise. */
2875 fold_stmt_inplace (gimple stmt)
2878 struct fold_stmt_r_data fold_stmt_r_data;
2879 struct walk_stmt_info wi;
2880 gimple_stmt_iterator si;
2882 bool changed = false;
2883 bool inside_addr_expr = false;
2885 fold_stmt_r_data.stmt = stmt;
2886 fold_stmt_r_data.changed_p = &changed;
2887 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2889 memset (&wi, 0, sizeof (wi));
2890 wi.info = &fold_stmt_r_data;
2892 /* Fold the individual operands.
2893 For example, fold instances of *&VAR into VAR, etc.
2895 It appears that, at one time, maybe_fold_stmt_indirect
2896 would cause the walk to return non-null in order to
2897 signal that the entire statement should be replaced with
2898 a call to _builtin_trap. This functionality is currently
2899 disabled, as noted in a FIXME, and cannot be supported here. */
2900 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2903 /* Fold the main computation performed by the statement. */
2904 switch (gimple_code (stmt))
2908 unsigned old_num_ops;
2910 old_num_ops = gimple_num_ops (stmt);
2911 si = gsi_for_stmt (stmt);
2912 new_rhs = fold_gimple_assign (&si);
2913 if (new_rhs != NULL_TREE
2914 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
2916 gimple_assign_set_rhs_from_tree (&si, new_rhs);
2919 gcc_assert (gsi_stmt (si) == stmt);
2923 changed |= fold_gimple_cond (stmt);
2933 /* Try to optimize out __builtin_stack_restore. Optimize it out
2934 if there is another __builtin_stack_restore in the same basic
2935 block and no calls or ASM_EXPRs are in between, or if this block's
2936 only outgoing edge is to EXIT_BLOCK and there are no calls or
2937 ASM_EXPRs after this __builtin_stack_restore. */
2940 optimize_stack_restore (gimple_stmt_iterator i)
2943 gimple stmt, stack_save;
2944 gimple_stmt_iterator stack_save_gsi;
2946 basic_block bb = gsi_bb (i);
2947 gimple call = gsi_stmt (i);
2949 if (gimple_code (call) != GIMPLE_CALL
2950 || gimple_call_num_args (call) != 1
2951 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2952 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2955 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2957 stmt = gsi_stmt (i);
2958 if (gimple_code (stmt) == GIMPLE_ASM)
2960 if (gimple_code (stmt) != GIMPLE_CALL)
2963 callee = gimple_call_fndecl (stmt);
2964 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2967 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2972 && (! single_succ_p (bb)
2973 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
2976 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2977 if (gimple_code (stack_save) != GIMPLE_CALL
2978 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
2979 || stmt_could_throw_p (stack_save)
2980 || !has_single_use (gimple_call_arg (call, 0)))
2983 callee = gimple_call_fndecl (stack_save);
2985 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2986 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
2987 || gimple_call_num_args (stack_save) != 0)
2990 stack_save_gsi = gsi_for_stmt (stack_save);
2991 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
2992 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2993 if (!update_call_from_tree (&stack_save_gsi, rhs))
2995 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
2998 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3000 /* No effect, so the statement will be deleted. */
3001 return integer_zero_node;
3004 /* If va_list type is a simple pointer and nothing special is needed,
3005 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3006 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3007 pointer assignment. */
3010 optimize_stdarg_builtin (gimple call)
3012 tree callee, lhs, rhs, cfun_va_list;
3013 bool va_list_simple_ptr;
3015 if (gimple_code (call) != GIMPLE_CALL)
3018 callee = gimple_call_fndecl (call);
3020 cfun_va_list = targetm.fn_abi_va_list (callee);
3021 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3022 && (TREE_TYPE (cfun_va_list) == void_type_node
3023 || TREE_TYPE (cfun_va_list) == char_type_node);
3025 switch (DECL_FUNCTION_CODE (callee))
3027 case BUILT_IN_VA_START:
3028 if (!va_list_simple_ptr
3029 || targetm.expand_builtin_va_start != NULL
3030 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3033 if (gimple_call_num_args (call) != 2)
3036 lhs = gimple_call_arg (call, 0);
3037 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3038 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3039 != TYPE_MAIN_VARIANT (cfun_va_list))
3042 lhs = build_fold_indirect_ref (lhs);
3043 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3044 1, integer_zero_node);
3045 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3046 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3048 case BUILT_IN_VA_COPY:
3049 if (!va_list_simple_ptr)
3052 if (gimple_call_num_args (call) != 2)
3055 lhs = gimple_call_arg (call, 0);
3056 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3057 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3058 != TYPE_MAIN_VARIANT (cfun_va_list))
3061 lhs = build_fold_indirect_ref (lhs);
3062 rhs = gimple_call_arg (call, 1);
3063 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3064 != TYPE_MAIN_VARIANT (cfun_va_list))
3067 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3068 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3070 case BUILT_IN_VA_END:
3071 /* No effect, so the statement will be deleted. */
3072 return integer_zero_node;
3079 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3080 RHS of an assignment. Insert the necessary statements before
3081 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3082 is replaced. If the call is expected to produces a result, then it
3083 is replaced by an assignment of the new RHS to the result variable.
3084 If the result is to be ignored, then the call is replaced by a
3088 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3091 tree tmp = NULL_TREE; /* Silence warning. */
3092 gimple stmt, new_stmt;
3093 gimple_stmt_iterator i;
3094 gimple_seq stmts = gimple_seq_alloc();
3095 struct gimplify_ctx gctx;
3097 stmt = gsi_stmt (*si_p);
3099 gcc_assert (is_gimple_call (stmt));
3101 lhs = gimple_call_lhs (stmt);
3103 push_gimplify_context (&gctx);
3105 if (lhs == NULL_TREE)
3106 gimplify_and_add (expr, &stmts);
3108 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3110 pop_gimplify_context (NULL);
3112 if (gimple_has_location (stmt))
3113 annotate_all_with_location (stmts, gimple_location (stmt));
3115 /* The replacement can expose previously unreferenced variables. */
3116 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3118 new_stmt = gsi_stmt (i);
3119 find_new_referenced_vars (new_stmt);
3120 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3121 mark_symbols_for_renaming (new_stmt);
3125 if (lhs == NULL_TREE)
3126 new_stmt = gimple_build_nop ();
3129 new_stmt = gimple_build_assign (lhs, tmp);
3130 copy_virtual_operands (new_stmt, stmt);
3131 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3134 gimple_set_location (new_stmt, gimple_location (stmt));
3135 gsi_replace (si_p, new_stmt, false);
3138 /* A simple pass that attempts to fold all builtin functions. This pass
3139 is run after we've propagated as many constants as we can. */
3142 execute_fold_all_builtins (void)
3144 bool cfg_changed = false;
3146 unsigned int todoflags = 0;
3150 gimple_stmt_iterator i;
3151 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3153 gimple stmt, old_stmt;
3154 tree callee, result;
3155 enum built_in_function fcode;
3157 stmt = gsi_stmt (i);
3159 if (gimple_code (stmt) != GIMPLE_CALL)
3164 callee = gimple_call_fndecl (stmt);
3165 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3170 fcode = DECL_FUNCTION_CODE (callee);
3172 result = ccp_fold_builtin (stmt);
3175 gimple_remove_stmt_histograms (cfun, stmt);
3178 switch (DECL_FUNCTION_CODE (callee))
3180 case BUILT_IN_CONSTANT_P:
3181 /* Resolve __builtin_constant_p. If it hasn't been
3182 folded to integer_one_node by now, it's fairly
3183 certain that the value simply isn't constant. */
3184 result = integer_zero_node;
3187 case BUILT_IN_STACK_RESTORE:
3188 result = optimize_stack_restore (i);
3194 case BUILT_IN_VA_START:
3195 case BUILT_IN_VA_END:
3196 case BUILT_IN_VA_COPY:
3197 /* These shouldn't be folded before pass_stdarg. */
3198 result = optimize_stdarg_builtin (stmt);
3208 if (dump_file && (dump_flags & TDF_DETAILS))
3210 fprintf (dump_file, "Simplified\n ");
3211 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3215 push_stmt_changes (gsi_stmt_ptr (&i));
3217 if (!update_call_from_tree (&i, result))
3219 gimplify_and_update_call_from_tree (&i, result);
3220 todoflags |= TODO_rebuild_alias;
3223 stmt = gsi_stmt (i);
3224 pop_stmt_changes (gsi_stmt_ptr (&i));
3226 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3227 && gimple_purge_dead_eh_edges (bb))
3230 if (dump_file && (dump_flags & TDF_DETAILS))
3232 fprintf (dump_file, "to\n ");
3233 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3234 fprintf (dump_file, "\n");
3237 /* Retry the same statement if it changed into another
3238 builtin, there might be new opportunities now. */
3239 if (gimple_code (stmt) != GIMPLE_CALL)
3244 callee = gimple_call_fndecl (stmt);
3246 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3247 || DECL_FUNCTION_CODE (callee) == fcode)
3252 /* Delete unreachable blocks. */
3254 todoflags |= TODO_cleanup_cfg;
3260 struct gimple_opt_pass pass_fold_builtins =
3266 execute_fold_all_builtins, /* execute */
3269 0, /* static_pass_number */
3271 PROP_cfg | PROP_ssa, /* properties_required */
3272 0, /* properties_provided */
3273 0, /* properties_destroyed */
3274 0, /* todo_flags_start */
3277 | TODO_update_ssa /* todo_flags_finish */