1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "langhooks.h"
212 /* Possible lattice values. */
221 /* Array of propagated constant values. After propagation,
222 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
223 the constant is held in an SSA name representing a memory store
224 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
225 memory reference used to store (i.e., the LHS of the assignment
227 static prop_value_t *const_val;
229 /* True if we are also propagating constants in stores and loads. */
230 static bool do_store_ccp;
232 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
235 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
237 switch (val.lattice_val)
240 fprintf (outf, "%sUNINITIALIZED", prefix);
243 fprintf (outf, "%sUNDEFINED", prefix);
246 fprintf (outf, "%sVARYING", prefix);
249 fprintf (outf, "%sCONSTANT ", prefix);
250 print_generic_expr (outf, val.value, dump_flags);
258 /* Print lattice value VAL to stderr. */
260 void debug_lattice_value (prop_value_t val);
263 debug_lattice_value (prop_value_t val)
265 dump_lattice_value (stderr, "", val);
266 fprintf (stderr, "\n");
270 /* The regular is_gimple_min_invariant does a shallow test of the object.
271 It assumes that full gimplification has happened, or will happen on the
272 object. For a value coming from DECL_INITIAL, this is not true, so we
273 have to be more strict ourselves. */
276 ccp_decl_initial_min_invariant (tree t)
278 if (!is_gimple_min_invariant (t))
280 if (TREE_CODE (t) == ADDR_EXPR)
282 /* Inline and unroll is_gimple_addressable. */
285 t = TREE_OPERAND (t, 0);
286 if (is_gimple_id (t))
288 if (!handled_component_p (t))
295 /* If SYM is a constant variable with known value, return the value.
296 NULL_TREE is returned otherwise. */
299 get_symbol_constant_value (tree sym)
301 if (TREE_STATIC (sym)
302 && TREE_READONLY (sym)
305 tree val = DECL_INITIAL (sym);
307 && ccp_decl_initial_min_invariant (val))
309 /* Variables declared 'const' without an initializer
310 have zero as the intializer if they may not be
311 overridden at link or run time. */
313 && targetm.binds_local_p (sym)
314 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
315 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
316 return fold_convert (TREE_TYPE (sym), integer_zero_node);
322 /* Compute a default value for variable VAR and store it in the
323 CONST_VAL array. The following rules are used to get default
326 1- Global and static variables that are declared constant are
329 2- Any other value is considered UNDEFINED. This is useful when
330 considering PHI nodes. PHI arguments that are undefined do not
331 change the constant value of the PHI node, which allows for more
332 constants to be propagated.
334 3- If SSA_NAME_VALUE is set and it is a constant, its value is
337 4- Variables defined by statements other than assignments and PHI
338 nodes are considered VARYING.
340 5- Initial values of variables that are not GIMPLE registers are
341 considered VARYING. */
344 get_default_value (tree var)
346 tree sym = SSA_NAME_VAR (var);
347 prop_value_t val = { UNINITIALIZED, NULL_TREE, NULL_TREE };
350 if (!do_store_ccp && !is_gimple_reg (var))
352 /* Short circuit for regular CCP. We are not interested in any
353 non-register when DO_STORE_CCP is false. */
354 val.lattice_val = VARYING;
356 else if (SSA_NAME_VALUE (var)
357 && is_gimple_min_invariant (SSA_NAME_VALUE (var)))
359 val.lattice_val = CONSTANT;
360 val.value = SSA_NAME_VALUE (var);
362 else if ((cst_val = get_symbol_constant_value (sym)) != NULL_TREE)
364 /* Globals and static variables declared 'const' take their
366 val.lattice_val = CONSTANT;
372 tree stmt = SSA_NAME_DEF_STMT (var);
374 if (IS_EMPTY_STMT (stmt))
376 /* Variables defined by an empty statement are those used
377 before being initialized. If VAR is a local variable, we
378 can assume initially that it is UNDEFINED, otherwise we must
379 consider it VARYING. */
380 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
381 val.lattice_val = UNDEFINED;
383 val.lattice_val = VARYING;
385 else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
386 || TREE_CODE (stmt) == PHI_NODE)
388 /* Any other variable defined by an assignment or a PHI node
389 is considered UNDEFINED. */
390 val.lattice_val = UNDEFINED;
394 /* Otherwise, VAR will never take on a constant value. */
395 val.lattice_val = VARYING;
403 /* Get the constant value associated with variable VAR. */
405 static inline prop_value_t *
410 if (const_val == NULL)
413 val = &const_val[SSA_NAME_VERSION (var)];
414 if (val->lattice_val == UNINITIALIZED)
415 *val = get_default_value (var);
420 /* Sets the value associated with VAR to VARYING. */
423 set_value_varying (tree var)
425 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
427 val->lattice_val = VARYING;
428 val->value = NULL_TREE;
429 val->mem_ref = NULL_TREE;
432 /* For float types, modify the value of VAL to make ccp work correctly
433 for non-standard values (-0, NaN):
435 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
436 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
437 This is to fix the following problem (see PR 29921): Suppose we have
441 and we set value of y to NaN. This causes value of x to be set to NaN.
442 When we later determine that y is in fact VARYING, fold uses the fact
443 that HONOR_NANS is false, and we try to change the value of x to 0,
444 causing an ICE. With HONOR_NANS being false, the real appearance of
445 NaN would cause undefined behavior, though, so claiming that y (and x)
446 are UNDEFINED initially is correct. */
449 canonicalize_float_value (prop_value_t *val)
451 enum machine_mode mode;
455 if (val->lattice_val != CONSTANT
456 || TREE_CODE (val->value) != REAL_CST)
459 d = TREE_REAL_CST (val->value);
460 type = TREE_TYPE (val->value);
461 mode = TYPE_MODE (type);
463 if (!HONOR_SIGNED_ZEROS (mode)
464 && REAL_VALUE_MINUS_ZERO (d))
466 val->value = build_real (type, dconst0);
470 if (!HONOR_NANS (mode)
471 && REAL_VALUE_ISNAN (d))
473 val->lattice_val = UNDEFINED;
480 /* Set the value for variable VAR to NEW_VAL. Return true if the new
481 value is different from VAR's previous value. */
484 set_lattice_value (tree var, prop_value_t new_val)
486 prop_value_t *old_val = get_value (var);
488 canonicalize_float_value (&new_val);
490 /* Lattice transitions must always be monotonically increasing in
491 value. If *OLD_VAL and NEW_VAL are the same, return false to
492 inform the caller that this was a non-transition. */
494 gcc_assert (old_val->lattice_val < new_val.lattice_val
495 || (old_val->lattice_val == new_val.lattice_val
496 && ((!old_val->value && !new_val.value)
497 || operand_equal_p (old_val->value, new_val.value, 0))
498 && old_val->mem_ref == new_val.mem_ref));
500 if (old_val->lattice_val != new_val.lattice_val)
502 if (dump_file && (dump_flags & TDF_DETAILS))
504 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
505 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
510 gcc_assert (new_val.lattice_val != UNDEFINED);
518 /* Return the likely CCP lattice value for STMT.
520 If STMT has no operands, then return CONSTANT.
522 Else if undefinedness of operands of STMT cause its value to be
523 undefined, then return UNDEFINED.
525 Else if any operands of STMT are constants, then return CONSTANT.
527 Else return VARYING. */
530 likely_value (tree stmt)
532 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
537 ann = stmt_ann (stmt);
539 /* If the statement has volatile operands, it won't fold to a
541 if (ann->has_volatile_ops)
544 /* If we are not doing store-ccp, statements with loads
545 and/or stores will never fold into a constant. */
547 && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
551 /* A CALL_EXPR is assumed to be varying. NOTE: This may be overly
552 conservative, in the presence of const and pure calls. */
553 if (get_call_expr_in (stmt) != NULL_TREE)
556 /* Anything other than assignments and conditional jumps are not
557 interesting for CCP. */
558 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
559 && !(TREE_CODE (stmt) == RETURN_EXPR && get_rhs (stmt) != NULL_TREE)
560 && TREE_CODE (stmt) != COND_EXPR
561 && TREE_CODE (stmt) != SWITCH_EXPR)
564 if (is_gimple_min_invariant (get_rhs (stmt)))
567 has_constant_operand = false;
568 has_undefined_operand = false;
569 all_undefined_operands = true;
570 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
572 prop_value_t *val = get_value (use);
574 if (val->lattice_val == UNDEFINED)
575 has_undefined_operand = true;
577 all_undefined_operands = false;
579 if (val->lattice_val == CONSTANT)
580 has_constant_operand = true;
583 /* If the operation combines operands like COMPLEX_EXPR make sure to
584 not mark the result UNDEFINED if only one part of the result is
586 if (has_undefined_operand
587 && all_undefined_operands)
589 else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
590 && has_undefined_operand)
592 switch (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)))
594 /* Unary operators are handled with all_undefined_operands. */
597 case POINTER_PLUS_EXPR:
598 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
599 Not bitwise operators, one VARYING operand may specify the
600 result completely. Not logical operators for the same reason.
601 Not COMPLEX_EXPR as one VARYING operand makes the result partly
602 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
603 the undefined operand may be promoted. */
610 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
611 fall back to VARYING even if there were CONSTANT operands. */
612 if (has_undefined_operand)
615 if (has_constant_operand
616 /* We do not consider virtual operands here -- load from read-only
617 memory may have only VARYING virtual operands, but still be
619 || ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
625 /* Returns true if STMT cannot be constant. */
628 surely_varying_stmt_p (tree stmt)
630 /* If the statement has operands that we cannot handle, it cannot be
632 if (stmt_ann (stmt)->has_volatile_ops)
635 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
640 /* We can only handle simple loads and stores. */
641 if (!stmt_makes_single_load (stmt)
642 && !stmt_makes_single_store (stmt))
646 /* If it contains a call, it is varying. */
647 if (get_call_expr_in (stmt) != NULL_TREE)
650 /* Anything other than assignments and conditional jumps are not
651 interesting for CCP. */
652 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
653 && !(TREE_CODE (stmt) == RETURN_EXPR && get_rhs (stmt) != NULL_TREE)
654 && TREE_CODE (stmt) != COND_EXPR
655 && TREE_CODE (stmt) != SWITCH_EXPR)
661 /* Initialize local data structures for CCP. */
664 ccp_initialize (void)
668 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
670 /* Initialize simulation flags for PHI nodes and statements. */
673 block_stmt_iterator i;
675 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
677 tree stmt = bsi_stmt (i);
678 bool is_varying = surely_varying_stmt_p (stmt);
685 /* If the statement will not produce a constant, mark
686 all its outputs VARYING. */
687 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
690 set_value_varying (def);
694 DONT_SIMULATE_AGAIN (stmt) = is_varying;
698 /* Now process PHI nodes. We never set DONT_SIMULATE_AGAIN on phi node,
699 since we do not know which edges are executable yet, except for
700 phi nodes for virtual operands when we do not do store ccp. */
705 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
707 if (!do_store_ccp && !is_gimple_reg (PHI_RESULT (phi)))
708 DONT_SIMULATE_AGAIN (phi) = true;
710 DONT_SIMULATE_AGAIN (phi) = false;
716 /* Do final substitution of propagated values, cleanup the flowgraph and
717 free allocated storage.
719 Return TRUE when something was optimized. */
724 /* Perform substitutions based on the known constant values. */
725 bool something_changed = substitute_and_fold (const_val, false);
729 return something_changed;;
733 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
736 any M UNDEFINED = any
737 any M VARYING = VARYING
738 Ci M Cj = Ci if (i == j)
739 Ci M Cj = VARYING if (i != j)
743 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
745 if (val1->lattice_val == UNDEFINED)
747 /* UNDEFINED M any = any */
750 else if (val2->lattice_val == UNDEFINED)
752 /* any M UNDEFINED = any
753 Nothing to do. VAL1 already contains the value we want. */
756 else if (val1->lattice_val == VARYING
757 || val2->lattice_val == VARYING)
759 /* any M VARYING = VARYING. */
760 val1->lattice_val = VARYING;
761 val1->value = NULL_TREE;
762 val1->mem_ref = NULL_TREE;
764 else if (val1->lattice_val == CONSTANT
765 && val2->lattice_val == CONSTANT
766 && simple_cst_equal (val1->value, val2->value) == 1
768 || (val1->mem_ref && val2->mem_ref
769 && operand_equal_p (val1->mem_ref, val2->mem_ref, 0))))
771 /* Ci M Cj = Ci if (i == j)
772 Ci M Cj = VARYING if (i != j)
774 If these two values come from memory stores, make sure that
775 they come from the same memory reference. */
776 val1->lattice_val = CONSTANT;
777 val1->value = val1->value;
778 val1->mem_ref = val1->mem_ref;
782 /* Any other combination is VARYING. */
783 val1->lattice_val = VARYING;
784 val1->value = NULL_TREE;
785 val1->mem_ref = NULL_TREE;
790 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
791 lattice values to determine PHI_NODE's lattice value. The value of a
792 PHI node is determined calling ccp_lattice_meet with all the arguments
793 of the PHI node that are incoming via executable edges. */
795 static enum ssa_prop_result
796 ccp_visit_phi_node (tree phi)
799 prop_value_t *old_val, new_val;
801 if (dump_file && (dump_flags & TDF_DETAILS))
803 fprintf (dump_file, "\nVisiting PHI node: ");
804 print_generic_expr (dump_file, phi, dump_flags);
807 old_val = get_value (PHI_RESULT (phi));
808 switch (old_val->lattice_val)
811 return SSA_PROP_VARYING;
818 new_val.lattice_val = UNDEFINED;
819 new_val.value = NULL_TREE;
820 new_val.mem_ref = NULL_TREE;
827 for (i = 0; i < PHI_NUM_ARGS (phi); i++)
829 /* Compute the meet operator over all the PHI arguments flowing
830 through executable edges. */
831 edge e = PHI_ARG_EDGE (phi, i);
833 if (dump_file && (dump_flags & TDF_DETAILS))
836 "\n Argument #%d (%d -> %d %sexecutable)\n",
837 i, e->src->index, e->dest->index,
838 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
841 /* If the incoming edge is executable, Compute the meet operator for
842 the existing value of the PHI node and the current PHI argument. */
843 if (e->flags & EDGE_EXECUTABLE)
845 tree arg = PHI_ARG_DEF (phi, i);
846 prop_value_t arg_val;
848 if (is_gimple_min_invariant (arg))
850 arg_val.lattice_val = CONSTANT;
852 arg_val.mem_ref = NULL_TREE;
855 arg_val = *(get_value (arg));
857 ccp_lattice_meet (&new_val, &arg_val);
859 if (dump_file && (dump_flags & TDF_DETAILS))
861 fprintf (dump_file, "\t");
862 print_generic_expr (dump_file, arg, dump_flags);
863 dump_lattice_value (dump_file, "\tValue: ", arg_val);
864 fprintf (dump_file, "\n");
867 if (new_val.lattice_val == VARYING)
872 if (dump_file && (dump_flags & TDF_DETAILS))
874 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
875 fprintf (dump_file, "\n\n");
878 /* Make the transition to the new value. */
879 if (set_lattice_value (PHI_RESULT (phi), new_val))
881 if (new_val.lattice_val == VARYING)
882 return SSA_PROP_VARYING;
884 return SSA_PROP_INTERESTING;
887 return SSA_PROP_NOT_INTERESTING;
891 /* CCP specific front-end to the non-destructive constant folding
894 Attempt to simplify the RHS of STMT knowing that one or more
895 operands are constants.
897 If simplification is possible, return the simplified RHS,
898 otherwise return the original RHS. */
903 tree rhs = get_rhs (stmt);
904 enum tree_code code = TREE_CODE (rhs);
905 enum tree_code_class kind = TREE_CODE_CLASS (code);
906 tree retval = NULL_TREE;
908 if (TREE_CODE (rhs) == SSA_NAME)
910 /* If the RHS is an SSA_NAME, return its known constant value,
912 return get_value (rhs)->value;
914 else if (do_store_ccp && stmt_makes_single_load (stmt))
916 /* If the RHS is a memory load, see if the VUSEs associated with
917 it are a valid constant for that memory load. */
918 prop_value_t *val = get_value_loaded_by (stmt, const_val);
919 if (val && val->mem_ref)
921 if (operand_equal_p (val->mem_ref, rhs, 0))
924 /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
925 complex type with a known constant value, return it. */
926 if ((TREE_CODE (rhs) == REALPART_EXPR
927 || TREE_CODE (rhs) == IMAGPART_EXPR)
928 && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0))
929 return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value);
934 /* Unary operators. Note that we know the single operand must
935 be a constant. So this should almost always return a
937 if (kind == tcc_unary)
939 /* Handle unary operators which can appear in GIMPLE form. */
940 tree op0 = TREE_OPERAND (rhs, 0);
942 /* Simplify the operand down to a constant. */
943 if (TREE_CODE (op0) == SSA_NAME)
945 prop_value_t *val = get_value (op0);
946 if (val->lattice_val == CONSTANT)
947 op0 = get_value (op0)->value;
950 if ((code == NOP_EXPR || code == CONVERT_EXPR)
951 && useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (op0)))
953 return fold_unary (code, TREE_TYPE (rhs), op0);
956 /* Binary and comparison operators. We know one or both of the
957 operands are constants. */
958 else if (kind == tcc_binary
959 || kind == tcc_comparison
960 || code == TRUTH_AND_EXPR
961 || code == TRUTH_OR_EXPR
962 || code == TRUTH_XOR_EXPR)
964 /* Handle binary and comparison operators that can appear in
966 tree op0 = TREE_OPERAND (rhs, 0);
967 tree op1 = TREE_OPERAND (rhs, 1);
969 /* Simplify the operands down to constants when appropriate. */
970 if (TREE_CODE (op0) == SSA_NAME)
972 prop_value_t *val = get_value (op0);
973 if (val->lattice_val == CONSTANT)
977 if (TREE_CODE (op1) == SSA_NAME)
979 prop_value_t *val = get_value (op1);
980 if (val->lattice_val == CONSTANT)
984 return fold_binary (code, TREE_TYPE (rhs), op0, op1);
987 else if (kind == tcc_declaration)
988 return get_symbol_constant_value (rhs);
990 else if (kind == tcc_reference)
991 return fold_const_aggregate_ref (rhs);
993 /* We may be able to fold away calls to builtin functions if their
994 arguments are constants. */
995 else if (code == CALL_EXPR
996 && TREE_CODE (CALL_EXPR_FN (rhs)) == ADDR_EXPR
997 && TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (rhs), 0)) == FUNCTION_DECL
998 && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (rhs), 0)))
1000 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
1005 use_operand_p var_p;
1007 /* Preserve the original values of every operand. */
1008 orig = XNEWVEC (tree, NUM_SSA_OPERANDS (stmt, SSA_OP_USE));
1009 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
1012 /* Substitute operands with their values and try to fold. */
1013 replace_uses_in (stmt, NULL, const_val);
1014 retval = fold_call_expr (rhs, false);
1016 /* Restore operands to their original form. */
1018 FOR_EACH_SSA_USE_OPERAND (var_p, stmt, iter, SSA_OP_USE)
1019 SET_USE (var_p, orig[i++]);
1026 /* If we got a simplified form, see if we need to convert its type. */
1028 return fold_convert (TREE_TYPE (rhs), retval);
1030 /* No simplification was possible. */
1035 /* Return the tree representing the element referenced by T if T is an
1036 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1037 NULL_TREE otherwise. */
1040 fold_const_aggregate_ref (tree t)
1042 prop_value_t *value;
1043 tree base, ctor, idx, field;
1044 unsigned HOST_WIDE_INT cnt;
1047 switch (TREE_CODE (t))
1050 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1051 DECL_INITIAL. If BASE is a nested reference into another
1052 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1053 the inner reference. */
1054 base = TREE_OPERAND (t, 0);
1055 switch (TREE_CODE (base))
1058 if (!TREE_READONLY (base)
1059 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1060 || !targetm.binds_local_p (base))
1063 ctor = DECL_INITIAL (base);
1068 ctor = fold_const_aggregate_ref (base);
1080 if (ctor == NULL_TREE
1081 || (TREE_CODE (ctor) != CONSTRUCTOR
1082 && TREE_CODE (ctor) != STRING_CST)
1083 || !TREE_STATIC (ctor))
1086 /* Get the index. If we have an SSA_NAME, try to resolve it
1087 with the current lattice value for the SSA_NAME. */
1088 idx = TREE_OPERAND (t, 1);
1089 switch (TREE_CODE (idx))
1092 if ((value = get_value (idx))
1093 && value->lattice_val == CONSTANT
1094 && TREE_CODE (value->value) == INTEGER_CST)
1107 /* Fold read from constant string. */
1108 if (TREE_CODE (ctor) == STRING_CST)
1110 if ((TYPE_MODE (TREE_TYPE (t))
1111 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1112 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1114 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1115 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1116 return build_int_cst_type (TREE_TYPE (t),
1117 (TREE_STRING_POINTER (ctor)
1118 [TREE_INT_CST_LOW (idx)]));
1122 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1123 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1124 if (tree_int_cst_equal (cfield, idx))
1129 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1130 DECL_INITIAL. If BASE is a nested reference into another
1131 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1132 the inner reference. */
1133 base = TREE_OPERAND (t, 0);
1134 switch (TREE_CODE (base))
1137 if (!TREE_READONLY (base)
1138 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1139 || !targetm.binds_local_p (base))
1142 ctor = DECL_INITIAL (base);
1147 ctor = fold_const_aggregate_ref (base);
1154 if (ctor == NULL_TREE
1155 || TREE_CODE (ctor) != CONSTRUCTOR
1156 || !TREE_STATIC (ctor))
1159 field = TREE_OPERAND (t, 1);
1161 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1163 /* FIXME: Handle bit-fields. */
1164 && ! DECL_BIT_FIELD (cfield))
1171 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1172 if (c && TREE_CODE (c) == COMPLEX_CST)
1173 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1179 tree base = TREE_OPERAND (t, 0);
1180 if (TREE_CODE (base) == SSA_NAME
1181 && (value = get_value (base))
1182 && value->lattice_val == CONSTANT
1183 && TREE_CODE (value->value) == ADDR_EXPR)
1184 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1195 /* Evaluate statement STMT. */
1198 evaluate_stmt (tree stmt)
1201 tree simplified = NULL_TREE;
1202 ccp_lattice_t likelyvalue = likely_value (stmt);
1205 val.mem_ref = NULL_TREE;
1207 fold_defer_overflow_warnings ();
1209 /* If the statement is likely to have a CONSTANT result, then try
1210 to fold the statement to determine the constant value. */
1211 if (likelyvalue == CONSTANT)
1212 simplified = ccp_fold (stmt);
1213 /* If the statement is likely to have a VARYING result, then do not
1214 bother folding the statement. */
1215 else if (likelyvalue == VARYING)
1216 simplified = get_rhs (stmt);
1218 is_constant = simplified && is_gimple_min_invariant (simplified);
1220 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1224 /* The statement produced a constant value. */
1225 val.lattice_val = CONSTANT;
1226 val.value = simplified;
1230 /* The statement produced a nonconstant value. If the statement
1231 had UNDEFINED operands, then the result of the statement
1232 should be UNDEFINED. Otherwise, the statement is VARYING. */
1233 if (likelyvalue == UNDEFINED)
1234 val.lattice_val = likelyvalue;
1236 val.lattice_val = VARYING;
1238 val.value = NULL_TREE;
1245 /* Visit the assignment statement STMT. Set the value of its LHS to the
1246 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1247 creates virtual definitions, set the value of each new name to that
1248 of the RHS (if we can derive a constant out of the RHS). */
1250 static enum ssa_prop_result
1251 visit_assignment (tree stmt, tree *output_p)
1255 enum ssa_prop_result retval;
1257 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1258 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1260 if (TREE_CODE (rhs) == SSA_NAME)
1262 /* For a simple copy operation, we copy the lattice values. */
1263 prop_value_t *nval = get_value (rhs);
1266 else if (do_store_ccp && stmt_makes_single_load (stmt))
1268 /* Same as above, but the RHS is not a gimple register and yet
1269 has a known VUSE. If STMT is loading from the same memory
1270 location that created the SSA_NAMEs for the virtual operands,
1271 we can propagate the value on the RHS. */
1272 prop_value_t *nval = get_value_loaded_by (stmt, const_val);
1276 && operand_equal_p (nval->mem_ref, rhs, 0))
1279 val = evaluate_stmt (stmt);
1282 /* Evaluate the statement. */
1283 val = evaluate_stmt (stmt);
1285 /* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant
1286 value to be a VIEW_CONVERT_EXPR of the old constant value.
1288 ??? Also, if this was a definition of a bitfield, we need to widen
1289 the constant value into the type of the destination variable. This
1290 should not be necessary if GCC represented bitfields properly. */
1292 tree orig_lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1294 if (TREE_CODE (orig_lhs) == VIEW_CONVERT_EXPR
1295 && val.lattice_val == CONSTANT)
1297 tree w = fold_unary (VIEW_CONVERT_EXPR,
1298 TREE_TYPE (TREE_OPERAND (orig_lhs, 0)),
1301 orig_lhs = TREE_OPERAND (orig_lhs, 0);
1302 if (w && is_gimple_min_invariant (w))
1306 val.lattice_val = VARYING;
1311 if (val.lattice_val == CONSTANT
1312 && TREE_CODE (orig_lhs) == COMPONENT_REF
1313 && DECL_BIT_FIELD (TREE_OPERAND (orig_lhs, 1)))
1315 tree w = widen_bitfield (val.value, TREE_OPERAND (orig_lhs, 1),
1318 if (w && is_gimple_min_invariant (w))
1322 val.lattice_val = VARYING;
1323 val.value = NULL_TREE;
1324 val.mem_ref = NULL_TREE;
1329 retval = SSA_PROP_NOT_INTERESTING;
1331 /* Set the lattice value of the statement's output. */
1332 if (TREE_CODE (lhs) == SSA_NAME)
1334 /* If STMT is an assignment to an SSA_NAME, we only have one
1336 if (set_lattice_value (lhs, val))
1339 if (val.lattice_val == VARYING)
1340 retval = SSA_PROP_VARYING;
1342 retval = SSA_PROP_INTERESTING;
1345 else if (do_store_ccp && stmt_makes_single_store (stmt))
1347 /* Otherwise, set the names in VDEF operands to the new
1348 constant value and mark the LHS as the memory reference
1349 associated with VAL. */
1354 /* Mark VAL as stored in the LHS of this assignment. */
1355 if (val.lattice_val == CONSTANT)
1358 /* Set the value of every VDEF to VAL. */
1360 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, i, SSA_OP_VIRTUAL_DEFS)
1362 /* See PR 29801. We may have VDEFs for read-only variables
1363 (see the handling of unmodifiable variables in
1364 add_virtual_operand); do not attempt to change their value. */
1365 if (get_symbol_constant_value (SSA_NAME_VAR (vdef)) != NULL_TREE)
1368 changed |= set_lattice_value (vdef, val);
1371 /* Note that for propagation purposes, we are only interested in
1372 visiting statements that load the exact same memory reference
1373 stored here. Those statements will have the exact same list
1374 of virtual uses, so it is enough to set the output of this
1375 statement to be its first virtual definition. */
1376 *output_p = first_vdef (stmt);
1379 if (val.lattice_val == VARYING)
1380 retval = SSA_PROP_VARYING;
1382 retval = SSA_PROP_INTERESTING;
1390 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1391 if it can determine which edge will be taken. Otherwise, return
1392 SSA_PROP_VARYING. */
1394 static enum ssa_prop_result
1395 visit_cond_stmt (tree stmt, edge *taken_edge_p)
1400 block = bb_for_stmt (stmt);
1401 val = evaluate_stmt (stmt);
1403 /* Find which edge out of the conditional block will be taken and add it
1404 to the worklist. If no single edge can be determined statically,
1405 return SSA_PROP_VARYING to feed all the outgoing edges to the
1406 propagation engine. */
1407 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1409 return SSA_PROP_INTERESTING;
1411 return SSA_PROP_VARYING;
1415 /* Evaluate statement STMT. If the statement produces an output value and
1416 its evaluation changes the lattice value of its output, return
1417 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1420 If STMT is a conditional branch and we can determine its truth
1421 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1422 value, return SSA_PROP_VARYING. */
1424 static enum ssa_prop_result
1425 ccp_visit_stmt (tree stmt, edge *taken_edge_p, tree *output_p)
1430 if (dump_file && (dump_flags & TDF_DETAILS))
1432 fprintf (dump_file, "\nVisiting statement:\n");
1433 print_generic_stmt (dump_file, stmt, dump_flags);
1434 fprintf (dump_file, "\n");
1437 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
1439 /* If the statement is an assignment that produces a single
1440 output value, evaluate its RHS to see if the lattice value of
1441 its output has changed. */
1442 return visit_assignment (stmt, output_p);
1444 else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
1446 /* If STMT is a conditional branch, see if we can determine
1447 which branch will be taken. */
1448 return visit_cond_stmt (stmt, taken_edge_p);
1451 /* Any other kind of statement is not interesting for constant
1452 propagation and, therefore, not worth simulating. */
1453 if (dump_file && (dump_flags & TDF_DETAILS))
1454 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1456 /* Definitions made by statements other than assignments to
1457 SSA_NAMEs represent unknown modifications to their outputs.
1458 Mark them VARYING. */
1459 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1461 prop_value_t v = { VARYING, NULL_TREE, NULL_TREE };
1462 set_lattice_value (def, v);
1465 return SSA_PROP_VARYING;
1469 /* Main entry point for SSA Conditional Constant Propagation. */
1472 execute_ssa_ccp (bool store_ccp)
1474 do_store_ccp = store_ccp;
1476 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1477 if (ccp_finalize ())
1478 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1487 return execute_ssa_ccp (false);
1494 return flag_tree_ccp != 0;
1498 struct tree_opt_pass pass_ccp =
1501 gate_ccp, /* gate */
1502 do_ssa_ccp, /* execute */
1505 0, /* static_pass_number */
1506 TV_TREE_CCP, /* tv_id */
1507 PROP_cfg | PROP_ssa, /* properties_required */
1508 0, /* properties_provided */
1509 0, /* properties_destroyed */
1510 0, /* todo_flags_start */
1511 TODO_dump_func | TODO_verify_ssa
1512 | TODO_verify_stmts | TODO_ggc_collect,/* todo_flags_finish */
1518 do_ssa_store_ccp (void)
1520 /* If STORE-CCP is not enabled, we just run regular CCP. */
1521 return execute_ssa_ccp (flag_tree_store_ccp != 0);
1525 gate_store_ccp (void)
1527 /* STORE-CCP is enabled only with -ftree-store-ccp, but when
1528 -fno-tree-store-ccp is specified, we should run regular CCP.
1529 That's why the pass is enabled with either flag. */
1530 return flag_tree_store_ccp != 0 || flag_tree_ccp != 0;
1534 struct tree_opt_pass pass_store_ccp =
1536 "store_ccp", /* name */
1537 gate_store_ccp, /* gate */
1538 do_ssa_store_ccp, /* execute */
1541 0, /* static_pass_number */
1542 TV_TREE_STORE_CCP, /* tv_id */
1543 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
1544 0, /* properties_provided */
1545 0, /* properties_destroyed */
1546 0, /* todo_flags_start */
1547 TODO_dump_func | TODO_verify_ssa
1548 | TODO_verify_stmts | TODO_ggc_collect,/* todo_flags_finish */
1552 /* Given a constant value VAL for bitfield FIELD, and a destination
1553 variable VAR, return VAL appropriately widened to fit into VAR. If
1554 FIELD is wider than HOST_WIDE_INT, NULL is returned. */
1557 widen_bitfield (tree val, tree field, tree var)
1559 unsigned HOST_WIDE_INT var_size, field_size;
1561 unsigned HOST_WIDE_INT mask;
1564 /* We can only do this if the size of the type and field and VAL are
1565 all constants representable in HOST_WIDE_INT. */
1566 if (!host_integerp (TYPE_SIZE (TREE_TYPE (var)), 1)
1567 || !host_integerp (DECL_SIZE (field), 1)
1568 || !host_integerp (val, 0))
1571 var_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1);
1572 field_size = tree_low_cst (DECL_SIZE (field), 1);
1574 /* Give up if either the bitfield or the variable are too wide. */
1575 if (field_size > HOST_BITS_PER_WIDE_INT || var_size > HOST_BITS_PER_WIDE_INT)
1578 gcc_assert (var_size >= field_size);
1580 /* If the sign bit of the value is not set or the field's type is unsigned,
1581 just mask off the high order bits of the value. */
1582 if (DECL_UNSIGNED (field)
1583 || !(tree_low_cst (val, 0) & (((HOST_WIDE_INT)1) << (field_size - 1))))
1585 /* Zero extension. Build a mask with the lower 'field_size' bits
1586 set and a BIT_AND_EXPR node to clear the high order bits of
1588 for (i = 0, mask = 0; i < field_size; i++)
1589 mask |= ((HOST_WIDE_INT) 1) << i;
1591 wide_val = fold_build2 (BIT_AND_EXPR, TREE_TYPE (var), val,
1592 build_int_cst (TREE_TYPE (var), mask));
1596 /* Sign extension. Create a mask with the upper 'field_size'
1597 bits set and a BIT_IOR_EXPR to set the high order bits of the
1599 for (i = 0, mask = 0; i < (var_size - field_size); i++)
1600 mask |= ((HOST_WIDE_INT) 1) << (var_size - i - 1);
1602 wide_val = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (var), val,
1603 build_int_cst (TREE_TYPE (var), mask));
1610 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1611 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1612 is the desired result type. */
1615 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1616 bool allow_negative_idx)
1618 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1619 tree array_type, elt_type, elt_size;
1622 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1623 measured in units of the size of elements type) from that ARRAY_REF).
1624 We can't do anything if either is variable.
1626 The case we handle here is *(&A[N]+O). */
1627 if (TREE_CODE (base) == ARRAY_REF)
1629 tree low_bound = array_ref_low_bound (base);
1631 elt_offset = TREE_OPERAND (base, 1);
1632 if (TREE_CODE (low_bound) != INTEGER_CST
1633 || TREE_CODE (elt_offset) != INTEGER_CST)
1636 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1637 base = TREE_OPERAND (base, 0);
1640 /* Ignore stupid user tricks of indexing non-array variables. */
1641 array_type = TREE_TYPE (base);
1642 if (TREE_CODE (array_type) != ARRAY_TYPE)
1644 elt_type = TREE_TYPE (array_type);
1645 if (!useless_type_conversion_p (orig_type, elt_type))
1648 /* Use signed size type for intermediate computation on the index. */
1649 idx_type = signed_type_for (size_type_node);
1651 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1652 element type (so we can use the alignment if it's not constant).
1653 Otherwise, compute the offset as an index by using a division. If the
1654 division isn't exact, then don't do anything. */
1655 elt_size = TYPE_SIZE_UNIT (elt_type);
1658 if (integer_zerop (offset))
1660 if (TREE_CODE (elt_size) != INTEGER_CST)
1661 elt_size = size_int (TYPE_ALIGN (elt_type));
1663 idx = build_int_cst (idx_type, 0);
1667 unsigned HOST_WIDE_INT lquo, lrem;
1668 HOST_WIDE_INT hquo, hrem;
1671 /* The final array offset should be signed, so we need
1672 to sign-extend the (possibly pointer) offset here
1673 and use signed division. */
1674 soffset = double_int_sext (tree_to_double_int (offset),
1675 TYPE_PRECISION (TREE_TYPE (offset)));
1676 if (TREE_CODE (elt_size) != INTEGER_CST
1677 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1678 soffset.low, soffset.high,
1679 TREE_INT_CST_LOW (elt_size),
1680 TREE_INT_CST_HIGH (elt_size),
1681 &lquo, &hquo, &lrem, &hrem)
1685 idx = build_int_cst_wide (idx_type, lquo, hquo);
1688 /* Assume the low bound is zero. If there is a domain type, get the
1689 low bound, if any, convert the index into that type, and add the
1691 min_idx = build_int_cst (idx_type, 0);
1692 domain_type = TYPE_DOMAIN (array_type);
1695 idx_type = domain_type;
1696 if (TYPE_MIN_VALUE (idx_type))
1697 min_idx = TYPE_MIN_VALUE (idx_type);
1699 min_idx = fold_convert (idx_type, min_idx);
1701 if (TREE_CODE (min_idx) != INTEGER_CST)
1704 elt_offset = fold_convert (idx_type, elt_offset);
1707 if (!integer_zerop (min_idx))
1708 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1709 if (!integer_zerop (elt_offset))
1710 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1712 /* Make sure to possibly truncate late after offsetting. */
1713 idx = fold_convert (idx_type, idx);
1715 /* We don't want to construct access past array bounds. For example
1718 should not be simplified into (*c)[14] or tree-vrp will
1719 give false warnings. The same is true for
1720 struct A { long x; char d[0]; } *a;
1722 which should be not folded to &a->d[-8]. */
1724 && TYPE_MAX_VALUE (domain_type)
1725 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1727 tree up_bound = TYPE_MAX_VALUE (domain_type);
1729 if (tree_int_cst_lt (up_bound, idx)
1730 /* Accesses after the end of arrays of size 0 (gcc
1731 extension) and 1 are likely intentional ("struct
1733 && compare_tree_int (up_bound, 1) > 0)
1737 && TYPE_MIN_VALUE (domain_type))
1739 if (!allow_negative_idx
1740 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1741 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1744 else if (!allow_negative_idx
1745 && compare_tree_int (idx, 0) < 0)
1748 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1752 /* Attempt to fold *(S+O) to S.X.
1753 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1754 is the desired result type. */
1757 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1758 tree orig_type, bool base_is_ptr)
1760 tree f, t, field_type, tail_array_field, field_offset;
1764 if (TREE_CODE (record_type) != RECORD_TYPE
1765 && TREE_CODE (record_type) != UNION_TYPE
1766 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1769 /* Short-circuit silly cases. */
1770 if (useless_type_conversion_p (record_type, orig_type))
1773 tail_array_field = NULL_TREE;
1774 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1778 if (TREE_CODE (f) != FIELD_DECL)
1780 if (DECL_BIT_FIELD (f))
1783 if (!DECL_FIELD_OFFSET (f))
1785 field_offset = byte_position (f);
1786 if (TREE_CODE (field_offset) != INTEGER_CST)
1789 /* ??? Java creates "interesting" fields for representing base classes.
1790 They have no name, and have no context. With no context, we get into
1791 trouble with nonoverlapping_component_refs_p. Skip them. */
1792 if (!DECL_FIELD_CONTEXT (f))
1795 /* The previous array field isn't at the end. */
1796 tail_array_field = NULL_TREE;
1798 /* Check to see if this offset overlaps with the field. */
1799 cmp = tree_int_cst_compare (field_offset, offset);
1803 field_type = TREE_TYPE (f);
1805 /* Here we exactly match the offset being checked. If the types match,
1806 then we can return that field. */
1808 && useless_type_conversion_p (orig_type, field_type))
1811 base = build1 (INDIRECT_REF, record_type, base);
1812 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1816 /* Don't care about offsets into the middle of scalars. */
1817 if (!AGGREGATE_TYPE_P (field_type))
1820 /* Check for array at the end of the struct. This is often
1821 used as for flexible array members. We should be able to
1822 turn this into an array access anyway. */
1823 if (TREE_CODE (field_type) == ARRAY_TYPE)
1824 tail_array_field = f;
1826 /* Check the end of the field against the offset. */
1827 if (!DECL_SIZE_UNIT (f)
1828 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1830 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1831 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1834 /* If we matched, then set offset to the displacement into
1837 new_base = build1 (INDIRECT_REF, record_type, base);
1840 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1842 /* Recurse to possibly find the match. */
1843 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1844 f == TYPE_FIELDS (record_type));
1847 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1853 if (!tail_array_field)
1856 f = tail_array_field;
1857 field_type = TREE_TYPE (f);
1858 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1860 /* If we get here, we've got an aggregate field, and a possibly
1861 nonzero offset into them. Recurse and hope for a valid match. */
1863 base = build1 (INDIRECT_REF, record_type, base);
1864 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1866 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1867 f == TYPE_FIELDS (record_type));
1870 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1874 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1875 or BASE[index] or by combination of those.
1877 Before attempting the conversion strip off existing ADDR_EXPRs and
1878 handled component refs. */
1881 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1885 bool base_is_ptr = true;
1888 if (TREE_CODE (base) == ADDR_EXPR)
1890 base_is_ptr = false;
1892 base = TREE_OPERAND (base, 0);
1894 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1895 so it needs to be removed and new COMPONENT_REF constructed.
1896 The wrong COMPONENT_REF are often constructed by folding the
1897 (type *)&object within the expression (type *)&object+offset */
1898 if (handled_component_p (base) && 0)
1900 HOST_WIDE_INT sub_offset, size, maxsize;
1902 newbase = get_ref_base_and_extent (base, &sub_offset,
1904 gcc_assert (newbase);
1905 gcc_assert (!(sub_offset & (BITS_PER_UNIT - 1)));
1906 if (size == maxsize)
1910 offset = int_const_binop (PLUS_EXPR, offset,
1911 build_int_cst (TREE_TYPE (offset),
1912 sub_offset / BITS_PER_UNIT), 1);
1915 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1916 && integer_zerop (offset))
1918 type = TREE_TYPE (base);
1923 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1925 type = TREE_TYPE (TREE_TYPE (base));
1927 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1928 orig_type, base_is_ptr);
1932 base = build1 (INDIRECT_REF, type, base);
1933 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
1938 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1939 Return the simplified expression, or NULL if nothing could be done. */
1942 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1945 bool volatile_p = TREE_THIS_VOLATILE (expr);
1947 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1948 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1949 are sometimes added. */
1951 STRIP_TYPE_NOPS (base);
1952 TREE_OPERAND (expr, 0) = base;
1954 /* One possibility is that the address reduces to a string constant. */
1955 t = fold_read_from_constant_string (expr);
1959 /* Add in any offset from a POINTER_PLUS_EXPR. */
1960 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
1964 offset2 = TREE_OPERAND (base, 1);
1965 if (TREE_CODE (offset2) != INTEGER_CST)
1967 base = TREE_OPERAND (base, 0);
1969 offset = fold_convert (sizetype,
1970 int_const_binop (PLUS_EXPR, offset, offset2, 1));
1973 if (TREE_CODE (base) == ADDR_EXPR)
1975 tree base_addr = base;
1977 /* Strip the ADDR_EXPR. */
1978 base = TREE_OPERAND (base, 0);
1980 /* Fold away CONST_DECL to its value, if the type is scalar. */
1981 if (TREE_CODE (base) == CONST_DECL
1982 && ccp_decl_initial_min_invariant (DECL_INITIAL (base)))
1983 return DECL_INITIAL (base);
1985 /* Try folding *(&B+O) to B.X. */
1986 t = maybe_fold_offset_to_reference (base_addr, offset,
1990 TREE_THIS_VOLATILE (t) = volatile_p;
1996 /* We can get here for out-of-range string constant accesses,
1997 such as "_"[3]. Bail out of the entire substitution search
1998 and arrange for the entire statement to be replaced by a
1999 call to __builtin_trap. In all likelihood this will all be
2000 constant-folded away, but in the meantime we can't leave with
2001 something that get_expr_operands can't understand. */
2005 if (TREE_CODE (t) == ADDR_EXPR
2006 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2008 /* FIXME: Except that this causes problems elsewhere with dead
2009 code not being deleted, and we die in the rtl expanders
2010 because we failed to remove some ssa_name. In the meantime,
2011 just return zero. */
2012 /* FIXME2: This condition should be signaled by
2013 fold_read_from_constant_string directly, rather than
2014 re-checking for it here. */
2015 return integer_zero_node;
2018 /* Try folding *(B+O) to B->X. Still an improvement. */
2019 if (POINTER_TYPE_P (TREE_TYPE (base)))
2021 t = maybe_fold_offset_to_reference (base, offset,
2028 /* Otherwise we had an offset that we could not simplify. */
2033 /* A subroutine of fold_stmt_r. EXPR is a POINTER_PLUS_EXPR.
2035 A quaint feature extant in our address arithmetic is that there
2036 can be hidden type changes here. The type of the result need
2037 not be the same as the type of the input pointer.
2039 What we're after here is an expression of the form
2040 (T *)(&array + const)
2041 where the cast doesn't actually exist, but is implicit in the
2042 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2044 which may be able to propagate further. */
2047 maybe_fold_stmt_addition (tree expr)
2049 tree op0 = TREE_OPERAND (expr, 0);
2050 tree op1 = TREE_OPERAND (expr, 1);
2051 tree ptr_type = TREE_TYPE (expr);
2055 gcc_assert (TREE_CODE (expr) == POINTER_PLUS_EXPR);
2057 /* It had better be a constant. */
2058 if (TREE_CODE (op1) != INTEGER_CST)
2060 /* The first operand should be an ADDR_EXPR. */
2061 if (TREE_CODE (op0) != ADDR_EXPR)
2063 op0 = TREE_OPERAND (op0, 0);
2065 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2066 the offset into it. */
2067 while (TREE_CODE (op0) == ARRAY_REF)
2069 tree array_obj = TREE_OPERAND (op0, 0);
2070 tree array_idx = TREE_OPERAND (op0, 1);
2071 tree elt_type = TREE_TYPE (op0);
2072 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2075 if (TREE_CODE (array_idx) != INTEGER_CST)
2077 if (TREE_CODE (elt_size) != INTEGER_CST)
2080 /* Un-bias the index by the min index of the array type. */
2081 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2084 min_idx = TYPE_MIN_VALUE (min_idx);
2087 if (TREE_CODE (min_idx) != INTEGER_CST)
2090 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2091 if (!integer_zerop (min_idx))
2092 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2097 /* Convert the index to a byte offset. */
2098 array_idx = fold_convert (sizetype, array_idx);
2099 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2101 /* Update the operands for the next round, or for folding. */
2102 op1 = int_const_binop (PLUS_EXPR,
2107 ptd_type = TREE_TYPE (ptr_type);
2108 /* If we want a pointer to void, reconstruct the reference from the
2109 array element type. A pointer to that can be trivially converted
2110 to void *. This happens as we fold (void *)(ptr p+ off). */
2111 if (VOID_TYPE_P (ptd_type)
2112 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2113 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2115 /* At which point we can try some of the same things as for indirects. */
2116 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2118 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2121 t = build1 (ADDR_EXPR, ptr_type, t);
2126 /* For passing state through walk_tree into fold_stmt_r and its
2129 struct fold_stmt_r_data
2133 bool *inside_addr_expr_p;
2136 /* Subroutine of fold_stmt called via walk_tree. We perform several
2137 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2140 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2142 struct fold_stmt_r_data *fold_stmt_r_data = (struct fold_stmt_r_data *) data;
2143 bool *inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2144 bool *changed_p = fold_stmt_r_data->changed_p;
2145 tree expr = *expr_p, t;
2146 bool volatile_p = TREE_THIS_VOLATILE (expr);
2148 /* ??? It'd be nice if walk_tree had a pre-order option. */
2149 switch (TREE_CODE (expr))
2152 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2157 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2162 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2167 if (POINTER_TYPE_P (TREE_TYPE (expr))
2168 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2169 && (t = maybe_fold_offset_to_reference
2170 (TREE_OPERAND (expr, 0),
2172 TREE_TYPE (TREE_TYPE (expr)))))
2174 tree ptr_type = build_pointer_type (TREE_TYPE (t));
2175 if (!useless_type_conversion_p (TREE_TYPE (expr), ptr_type))
2177 t = build_fold_addr_expr_with_type (t, ptr_type);
2181 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2182 We'd only want to bother decomposing an existing ARRAY_REF if
2183 the base array is found to have another offset contained within.
2184 Otherwise we'd be wasting time. */
2186 /* If we are not processing expressions found within an
2187 ADDR_EXPR, then we can fold constant array references. */
2188 if (!*inside_addr_expr_p)
2189 t = fold_read_from_constant_string (expr);
2195 *inside_addr_expr_p = true;
2196 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2197 *inside_addr_expr_p = false;
2202 /* Set TREE_INVARIANT properly so that the value is properly
2203 considered constant, and so gets propagated as expected. */
2205 recompute_tree_invariant_for_addr_expr (expr);
2208 case POINTER_PLUS_EXPR:
2209 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2212 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2217 t = maybe_fold_stmt_addition (expr);
2221 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2226 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2227 We've already checked that the records are compatible, so we should
2228 come up with a set of compatible fields. */
2230 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2231 tree expr_field = TREE_OPERAND (expr, 1);
2233 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2235 expr_field = find_compatible_field (expr_record, expr_field);
2236 TREE_OPERAND (expr, 1) = expr_field;
2241 case TARGET_MEM_REF:
2242 t = maybe_fold_tmr (expr);
2246 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2248 tree op0 = TREE_OPERAND (expr, 0);
2252 fold_defer_overflow_warnings ();
2253 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2254 TREE_OPERAND (op0, 0),
2255 TREE_OPERAND (op0, 1));
2256 set = tem && set_rhs (expr_p, tem);
2257 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2272 /* Preserve volatileness of the original expression. */
2273 TREE_THIS_VOLATILE (t) = volatile_p;
2282 /* Return the string length, maximum string length or maximum value of
2284 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2285 is not NULL and, for TYPE == 0, its value is not equal to the length
2286 we determine or if we are unable to determine the length or value,
2287 return false. VISITED is a bitmap of visited variables.
2288 TYPE is 0 if string length should be returned, 1 for maximum string
2289 length and 2 for maximum value ARG can have. */
2292 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2294 tree var, def_stmt, val;
2296 if (TREE_CODE (arg) != SSA_NAME)
2298 if (TREE_CODE (arg) == COND_EXPR)
2299 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2300 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2301 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2302 else if (TREE_CODE (arg) == ADDR_EXPR
2303 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2304 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2306 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2307 if (TREE_CODE (aop0) == INDIRECT_REF
2308 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2309 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2310 length, visited, type);
2316 if (TREE_CODE (val) != INTEGER_CST
2317 || tree_int_cst_sgn (val) < 0)
2321 val = c_strlen (arg, 1);
2329 if (TREE_CODE (*length) != INTEGER_CST
2330 || TREE_CODE (val) != INTEGER_CST)
2333 if (tree_int_cst_lt (*length, val))
2337 else if (simple_cst_equal (val, *length) != 1)
2345 /* If we were already here, break the infinite cycle. */
2346 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2348 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2351 def_stmt = SSA_NAME_DEF_STMT (var);
2353 switch (TREE_CODE (def_stmt))
2355 case GIMPLE_MODIFY_STMT:
2359 /* The RHS of the statement defining VAR must either have a
2360 constant length or come from another SSA_NAME with a constant
2362 rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
2364 return get_maxval_strlen (rhs, length, visited, type);
2369 /* All the arguments of the PHI node must have the same constant
2373 for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
2375 tree arg = PHI_ARG_DEF (def_stmt, i);
2377 /* If this PHI has itself as an argument, we cannot
2378 determine the string length of this argument. However,
2379 if we can find a constant string length for the other
2380 PHI args then we can still be sure that this is a
2381 constant string length. So be optimistic and just
2382 continue with the next argument. */
2383 if (arg == PHI_RESULT (def_stmt))
2386 if (!get_maxval_strlen (arg, length, visited, type))
2402 /* Fold builtin call FN in statement STMT. If it cannot be folded into a
2403 constant, return NULL_TREE. Otherwise, return its constant value. */
2406 ccp_fold_builtin (tree stmt, tree fn)
2408 tree result, val[3];
2410 int arg_mask, i, type;
2413 call_expr_arg_iterator iter;
2416 ignore = TREE_CODE (stmt) != GIMPLE_MODIFY_STMT;
2418 /* First try the generic builtin folder. If that succeeds, return the
2420 result = fold_call_expr (fn, ignore);
2424 STRIP_NOPS (result);
2428 /* Ignore MD builtins. */
2429 callee = get_callee_fndecl (fn);
2430 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2433 /* If the builtin could not be folded, and it has no argument list,
2435 nargs = call_expr_nargs (fn);
2439 /* Limit the work only for builtins we know how to simplify. */
2440 switch (DECL_FUNCTION_CODE (callee))
2442 case BUILT_IN_STRLEN:
2443 case BUILT_IN_FPUTS:
2444 case BUILT_IN_FPUTS_UNLOCKED:
2448 case BUILT_IN_STRCPY:
2449 case BUILT_IN_STRNCPY:
2453 case BUILT_IN_MEMCPY_CHK:
2454 case BUILT_IN_MEMPCPY_CHK:
2455 case BUILT_IN_MEMMOVE_CHK:
2456 case BUILT_IN_MEMSET_CHK:
2457 case BUILT_IN_STRNCPY_CHK:
2461 case BUILT_IN_STRCPY_CHK:
2462 case BUILT_IN_STPCPY_CHK:
2466 case BUILT_IN_SNPRINTF_CHK:
2467 case BUILT_IN_VSNPRINTF_CHK:
2475 /* Try to use the dataflow information gathered by the CCP process. */
2476 visited = BITMAP_ALLOC (NULL);
2478 memset (val, 0, sizeof (val));
2479 init_call_expr_arg_iterator (fn, &iter);
2480 for (i = 0; arg_mask; i++, arg_mask >>= 1)
2482 a = next_call_expr_arg (&iter);
2485 bitmap_clear (visited);
2486 if (!get_maxval_strlen (a, &val[i], visited, type))
2491 BITMAP_FREE (visited);
2494 switch (DECL_FUNCTION_CODE (callee))
2496 case BUILT_IN_STRLEN:
2499 tree new_val = fold_convert (TREE_TYPE (fn), val[0]);
2501 /* If the result is not a valid gimple value, or not a cast
2502 of a valid gimple value, then we can not use the result. */
2503 if (is_gimple_val (new_val)
2504 || (is_gimple_cast (new_val)
2505 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2510 case BUILT_IN_STRCPY:
2511 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2512 result = fold_builtin_strcpy (callee,
2513 CALL_EXPR_ARG (fn, 0),
2514 CALL_EXPR_ARG (fn, 1),
2518 case BUILT_IN_STRNCPY:
2519 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2520 result = fold_builtin_strncpy (callee,
2521 CALL_EXPR_ARG (fn, 0),
2522 CALL_EXPR_ARG (fn, 1),
2523 CALL_EXPR_ARG (fn, 2),
2527 case BUILT_IN_FPUTS:
2528 result = fold_builtin_fputs (CALL_EXPR_ARG (fn, 0),
2529 CALL_EXPR_ARG (fn, 1),
2530 TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 0,
2534 case BUILT_IN_FPUTS_UNLOCKED:
2535 result = fold_builtin_fputs (CALL_EXPR_ARG (fn, 0),
2536 CALL_EXPR_ARG (fn, 1),
2537 TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 1,
2541 case BUILT_IN_MEMCPY_CHK:
2542 case BUILT_IN_MEMPCPY_CHK:
2543 case BUILT_IN_MEMMOVE_CHK:
2544 case BUILT_IN_MEMSET_CHK:
2545 if (val[2] && is_gimple_val (val[2]))
2546 result = fold_builtin_memory_chk (callee,
2547 CALL_EXPR_ARG (fn, 0),
2548 CALL_EXPR_ARG (fn, 1),
2549 CALL_EXPR_ARG (fn, 2),
2550 CALL_EXPR_ARG (fn, 3),
2552 DECL_FUNCTION_CODE (callee));
2555 case BUILT_IN_STRCPY_CHK:
2556 case BUILT_IN_STPCPY_CHK:
2557 if (val[1] && is_gimple_val (val[1]))
2558 result = fold_builtin_stxcpy_chk (callee,
2559 CALL_EXPR_ARG (fn, 0),
2560 CALL_EXPR_ARG (fn, 1),
2561 CALL_EXPR_ARG (fn, 2),
2563 DECL_FUNCTION_CODE (callee));
2566 case BUILT_IN_STRNCPY_CHK:
2567 if (val[2] && is_gimple_val (val[2]))
2568 result = fold_builtin_strncpy_chk (CALL_EXPR_ARG (fn, 0),
2569 CALL_EXPR_ARG (fn, 1),
2570 CALL_EXPR_ARG (fn, 2),
2571 CALL_EXPR_ARG (fn, 3),
2575 case BUILT_IN_SNPRINTF_CHK:
2576 case BUILT_IN_VSNPRINTF_CHK:
2577 if (val[1] && is_gimple_val (val[1]))
2578 result = fold_builtin_snprintf_chk (fn, val[1],
2579 DECL_FUNCTION_CODE (callee));
2586 if (result && ignore)
2587 result = fold_ignored_result (result);
2592 /* Fold the statement pointed to by STMT_P. In some cases, this function may
2593 replace the whole statement with a new one. Returns true iff folding
2594 makes any changes. */
2597 fold_stmt (tree *stmt_p)
2599 tree rhs, result, stmt;
2600 struct fold_stmt_r_data fold_stmt_r_data;
2601 bool changed = false;
2602 bool inside_addr_expr = false;
2606 fold_stmt_r_data.stmt = stmt;
2607 fold_stmt_r_data.changed_p = &changed;
2608 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2610 /* If we replaced constants and the statement makes pointer dereferences,
2611 then we may need to fold instances of *&VAR into VAR, etc. */
2612 if (walk_tree (stmt_p, fold_stmt_r, &fold_stmt_r_data, NULL))
2614 *stmt_p = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
2618 rhs = get_rhs (stmt);
2623 if (TREE_CODE (rhs) == CALL_EXPR)
2627 /* Check for builtins that CCP can handle using information not
2628 available in the generic fold routines. */
2629 callee = get_callee_fndecl (rhs);
2630 if (callee && DECL_BUILT_IN (callee))
2631 result = ccp_fold_builtin (stmt, rhs);
2634 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2635 here are when we've propagated the address of a decl into the
2637 /* ??? Should perhaps do this in fold proper. However, doing it
2638 there requires that we create a new CALL_EXPR, and that requires
2639 copying EH region info to the new node. Easier to just do it
2640 here where we can just smash the call operand. Also
2641 CALL_EXPR_RETURN_SLOT_OPT needs to be handled correctly and
2642 copied, fold_call_expr does not have not information. */
2643 callee = CALL_EXPR_FN (rhs);
2644 if (TREE_CODE (callee) == OBJ_TYPE_REF
2645 && lang_hooks.fold_obj_type_ref
2646 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2647 && DECL_P (TREE_OPERAND
2648 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2652 /* ??? Caution: Broken ADDR_EXPR semantics means that
2653 looking at the type of the operand of the addr_expr
2654 can yield an array type. See silly exception in
2655 check_pointer_types_r. */
2657 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2658 t = lang_hooks.fold_obj_type_ref (callee, t);
2661 CALL_EXPR_FN (rhs) = t;
2667 else if (TREE_CODE (rhs) == COND_EXPR)
2669 tree temp = fold (COND_EXPR_COND (rhs));
2670 if (temp != COND_EXPR_COND (rhs))
2671 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2672 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2675 /* If we couldn't fold the RHS, hand over to the generic fold routines. */
2676 if (result == NULL_TREE)
2677 result = fold (rhs);
2679 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR that
2680 may have been added by fold, and "useless" type conversions that might
2681 now be apparent due to propagation. */
2682 STRIP_USELESS_TYPE_CONVERSION (result);
2685 changed |= set_rhs (stmt_p, result);
2690 /* Perform the minimal folding on statement STMT. Only operations like
2691 *&x created by constant propagation are handled. The statement cannot
2692 be replaced with a new one. */
2695 fold_stmt_inplace (tree stmt)
2697 tree old_stmt = stmt, rhs, new_rhs;
2698 struct fold_stmt_r_data fold_stmt_r_data;
2699 bool changed = false;
2700 bool inside_addr_expr = false;
2702 fold_stmt_r_data.stmt = stmt;
2703 fold_stmt_r_data.changed_p = &changed;
2704 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2706 walk_tree (&stmt, fold_stmt_r, &fold_stmt_r_data, NULL);
2707 gcc_assert (stmt == old_stmt);
2709 rhs = get_rhs (stmt);
2710 if (!rhs || rhs == stmt)
2713 new_rhs = fold (rhs);
2714 STRIP_USELESS_TYPE_CONVERSION (new_rhs);
2718 changed |= set_rhs (&stmt, new_rhs);
2719 gcc_assert (stmt == old_stmt);
2724 /* Try to optimize out __builtin_stack_restore. Optimize it out
2725 if there is another __builtin_stack_restore in the same basic
2726 block and no calls or ASM_EXPRs are in between, or if this block's
2727 only outgoing edge is to EXIT_BLOCK and there are no calls or
2728 ASM_EXPRs after this __builtin_stack_restore. */
2731 optimize_stack_restore (basic_block bb, tree call, block_stmt_iterator i)
2733 tree stack_save, stmt, callee;
2735 if (TREE_CODE (call) != CALL_EXPR
2736 || call_expr_nargs (call) != 1
2737 || TREE_CODE (CALL_EXPR_ARG (call, 0)) != SSA_NAME
2738 || !POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (call, 0))))
2741 for (bsi_next (&i); !bsi_end_p (i); bsi_next (&i))
2745 stmt = bsi_stmt (i);
2746 if (TREE_CODE (stmt) == ASM_EXPR)
2748 call = get_call_expr_in (stmt);
2752 callee = get_callee_fndecl (call);
2753 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2756 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2761 && (! single_succ_p (bb)
2762 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
2765 stack_save = SSA_NAME_DEF_STMT (CALL_EXPR_ARG (call, 0));
2766 if (TREE_CODE (stack_save) != GIMPLE_MODIFY_STMT
2767 || GIMPLE_STMT_OPERAND (stack_save, 0) != CALL_EXPR_ARG (call, 0)
2768 || TREE_CODE (GIMPLE_STMT_OPERAND (stack_save, 1)) != CALL_EXPR
2769 || tree_could_throw_p (stack_save)
2770 || !has_single_use (CALL_EXPR_ARG (call, 0)))
2773 callee = get_callee_fndecl (GIMPLE_STMT_OPERAND (stack_save, 1));
2775 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2776 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
2777 || call_expr_nargs (GIMPLE_STMT_OPERAND (stack_save, 1)) != 0)
2781 push_stmt_changes (&stmt);
2782 if (!set_rhs (&stmt,
2783 build_int_cst (TREE_TYPE (CALL_EXPR_ARG (call, 0)), 0)))
2785 discard_stmt_changes (&stmt);
2788 gcc_assert (stmt == stack_save);
2789 pop_stmt_changes (&stmt);
2791 return integer_zero_node;
2794 /* If va_list type is a simple pointer and nothing special is needed,
2795 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2796 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2797 pointer assignment. */
2800 optimize_stdarg_builtin (tree call)
2802 tree callee, lhs, rhs;
2803 bool va_list_simple_ptr;
2805 if (TREE_CODE (call) != CALL_EXPR)
2808 va_list_simple_ptr = POINTER_TYPE_P (va_list_type_node)
2809 && (TREE_TYPE (va_list_type_node) == void_type_node
2810 || TREE_TYPE (va_list_type_node) == char_type_node);
2812 callee = get_callee_fndecl (call);
2813 switch (DECL_FUNCTION_CODE (callee))
2815 case BUILT_IN_VA_START:
2816 if (!va_list_simple_ptr
2817 || targetm.expand_builtin_va_start != NULL
2818 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
2821 if (call_expr_nargs (call) != 2)
2824 lhs = CALL_EXPR_ARG (call, 0);
2825 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2826 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2827 != TYPE_MAIN_VARIANT (va_list_type_node))
2830 lhs = build_fold_indirect_ref (lhs);
2831 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
2832 1, integer_zero_node);
2833 rhs = fold_convert (TREE_TYPE (lhs), rhs);
2834 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2836 case BUILT_IN_VA_COPY:
2837 if (!va_list_simple_ptr)
2840 if (call_expr_nargs (call) != 2)
2843 lhs = CALL_EXPR_ARG (call, 0);
2844 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2845 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2846 != TYPE_MAIN_VARIANT (va_list_type_node))
2849 lhs = build_fold_indirect_ref (lhs);
2850 rhs = CALL_EXPR_ARG (call, 1);
2851 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2852 != TYPE_MAIN_VARIANT (va_list_type_node))
2855 rhs = fold_convert (TREE_TYPE (lhs), rhs);
2856 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2858 case BUILT_IN_VA_END:
2859 return integer_zero_node;
2866 /* Convert EXPR into a GIMPLE value suitable for substitution on the
2867 RHS of an assignment. Insert the necessary statements before
2869 When IGNORE is set, don't worry about the return value. */
2872 convert_to_gimple_builtin (block_stmt_iterator *si_p, tree expr, bool ignore)
2874 tree_stmt_iterator ti;
2875 tree stmt = bsi_stmt (*si_p);
2876 tree tmp, stmts = NULL;
2878 push_gimplify_context ();
2881 tmp = build_empty_stmt ();
2882 gimplify_and_add (expr, &stmts);
2885 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
2886 pop_gimplify_context (NULL);
2888 if (EXPR_HAS_LOCATION (stmt))
2889 annotate_all_with_locus (&stmts, EXPR_LOCATION (stmt));
2891 /* The replacement can expose previously unreferenced variables. */
2892 for (ti = tsi_start (stmts); !tsi_end_p (ti); tsi_next (&ti))
2894 tree new_stmt = tsi_stmt (ti);
2895 find_new_referenced_vars (tsi_stmt_ptr (ti));
2896 bsi_insert_before (si_p, new_stmt, BSI_NEW_STMT);
2897 mark_symbols_for_renaming (new_stmt);
2905 /* A simple pass that attempts to fold all builtin functions. This pass
2906 is run after we've propagated as many constants as we can. */
2909 execute_fold_all_builtins (void)
2911 bool cfg_changed = false;
2913 unsigned int todoflags = 0;
2917 block_stmt_iterator i;
2918 for (i = bsi_start (bb); !bsi_end_p (i); )
2920 tree *stmtp = bsi_stmt_ptr (i);
2921 tree old_stmt = *stmtp;
2922 tree call = get_rhs (*stmtp);
2923 tree callee, result;
2924 enum built_in_function fcode;
2926 if (!call || TREE_CODE (call) != CALL_EXPR)
2931 callee = get_callee_fndecl (call);
2932 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2937 fcode = DECL_FUNCTION_CODE (callee);
2939 result = ccp_fold_builtin (*stmtp, call);
2941 switch (DECL_FUNCTION_CODE (callee))
2943 case BUILT_IN_CONSTANT_P:
2944 /* Resolve __builtin_constant_p. If it hasn't been
2945 folded to integer_one_node by now, it's fairly
2946 certain that the value simply isn't constant. */
2947 result = integer_zero_node;
2950 case BUILT_IN_STACK_RESTORE:
2951 result = optimize_stack_restore (bb, *stmtp, i);
2957 case BUILT_IN_VA_START:
2958 case BUILT_IN_VA_END:
2959 case BUILT_IN_VA_COPY:
2960 /* These shouldn't be folded before pass_stdarg. */
2961 result = optimize_stdarg_builtin (*stmtp);
2971 if (dump_file && (dump_flags & TDF_DETAILS))
2973 fprintf (dump_file, "Simplified\n ");
2974 print_generic_stmt (dump_file, *stmtp, dump_flags);
2977 push_stmt_changes (stmtp);
2979 if (!set_rhs (stmtp, result))
2981 result = convert_to_gimple_builtin (&i, result,
2982 TREE_CODE (old_stmt)
2983 != GIMPLE_MODIFY_STMT);
2986 bool ok = set_rhs (stmtp, result);
2988 todoflags |= TODO_rebuild_alias;
2992 pop_stmt_changes (stmtp);
2994 if (maybe_clean_or_replace_eh_stmt (old_stmt, *stmtp)
2995 && tree_purge_dead_eh_edges (bb))
2998 if (dump_file && (dump_flags & TDF_DETAILS))
3000 fprintf (dump_file, "to\n ");
3001 print_generic_stmt (dump_file, *stmtp, dump_flags);
3002 fprintf (dump_file, "\n");
3005 /* Retry the same statement if it changed into another
3006 builtin, there might be new opportunities now. */
3007 call = get_rhs (*stmtp);
3008 if (!call || TREE_CODE (call) != CALL_EXPR)
3013 callee = get_callee_fndecl (call);
3015 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3016 || DECL_FUNCTION_CODE (callee) == fcode)
3021 /* Delete unreachable blocks. */
3023 todoflags |= TODO_cleanup_cfg;
3029 struct tree_opt_pass pass_fold_builtins =
3033 execute_fold_all_builtins, /* execute */
3036 0, /* static_pass_number */
3038 PROP_cfg | PROP_ssa, /* properties_required */
3039 0, /* properties_provided */
3040 0, /* properties_destroyed */
3041 0, /* todo_flags_start */
3044 | TODO_update_ssa, /* todo_flags_finish */