1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
103 Constant propagation in stores and loads (STORE-CCP)
104 ----------------------------------------------------
106 While CCP has all the logic to propagate constants in GIMPLE
107 registers, it is missing the ability to associate constants with
108 stores and loads (i.e., pointer dereferences, structures and
109 global/aliased variables). We don't keep loads and stores in
110 SSA, but we do build a factored use-def web for them (in the
113 For instance, consider the following code fragment:
132 We should be able to deduce that the predicate 'a.a != B' is always
133 false. To achieve this, we associate constant values to the SSA
134 names in the VDEF operands for each store. Additionally,
135 since we also glob partial loads/stores with the base symbol, we
136 also keep track of the memory reference where the constant value
137 was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
145 In the example above, CCP will associate value '2' with 'a_5', but
146 it would be wrong to replace the load from 'a.b' with '2', because
147 '2' had been stored into a.a.
149 Note that the initial value of virtual operands is VARYING, not
150 UNDEFINED. Consider, for instance global variables:
158 # A_5 = PHI (A_4, A_2);
166 The value of A_2 cannot be assumed to be UNDEFINED, as it may have
167 been defined outside of foo. If we were to assume it UNDEFINED, we
168 would erroneously optimize the above into 'return 3;'.
170 Though STORE-CCP is not too expensive, it does have to do more work
171 than regular CCP, so it is only enabled at -O2. Both regular CCP
172 and STORE-CCP use the exact same algorithm. The only distinction
173 is that when doing STORE-CCP, the boolean variable DO_STORE_CCP is
174 set to true. This affects the evaluation of statements and PHI
179 Constant propagation with conditional branches,
180 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
182 Building an Optimizing Compiler,
183 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
185 Advanced Compiler Design and Implementation,
186 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
190 #include "coretypes.h"
197 #include "basic-block.h"
200 #include "function.h"
201 #include "diagnostic.h"
203 #include "tree-dump.h"
204 #include "tree-flow.h"
205 #include "tree-pass.h"
206 #include "tree-ssa-propagate.h"
207 #include "value-prof.h"
208 #include "langhooks.h"
214 /* Possible lattice values. */
223 /* Array of propagated constant values. After propagation,
224 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
225 the constant is held in an SSA name representing a memory store
226 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
227 memory reference used to store (i.e., the LHS of the assignment
229 static prop_value_t *const_val;
231 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
234 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
236 switch (val.lattice_val)
239 fprintf (outf, "%sUNINITIALIZED", prefix);
242 fprintf (outf, "%sUNDEFINED", prefix);
245 fprintf (outf, "%sVARYING", prefix);
248 fprintf (outf, "%sCONSTANT ", prefix);
249 print_generic_expr (outf, val.value, dump_flags);
257 /* Print lattice value VAL to stderr. */
259 void debug_lattice_value (prop_value_t val);
262 debug_lattice_value (prop_value_t val)
264 dump_lattice_value (stderr, "", val);
265 fprintf (stderr, "\n");
270 /* If SYM is a constant variable with known value, return the value.
271 NULL_TREE is returned otherwise. */
274 get_symbol_constant_value (tree sym)
276 if (TREE_STATIC (sym)
277 && TREE_READONLY (sym)
280 tree val = DECL_INITIAL (sym);
283 STRIP_USELESS_TYPE_CONVERSION (val);
284 if (is_gimple_min_invariant (val))
287 /* Variables declared 'const' without an initializer
288 have zero as the initializer if they may not be
289 overridden at link or run time. */
291 && !DECL_EXTERNAL (sym)
292 && targetm.binds_local_p (sym)
293 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
294 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
295 return fold_convert (TREE_TYPE (sym), integer_zero_node);
301 /* Compute a default value for variable VAR and store it in the
302 CONST_VAL array. The following rules are used to get default
305 1- Global and static variables that are declared constant are
308 2- Any other value is considered UNDEFINED. This is useful when
309 considering PHI nodes. PHI arguments that are undefined do not
310 change the constant value of the PHI node, which allows for more
311 constants to be propagated.
313 3- Variables defined by statements other than assignments and PHI
314 nodes are considered VARYING.
316 4- Initial values of variables that are not GIMPLE registers are
317 considered VARYING. */
320 get_default_value (tree var)
322 tree sym = SSA_NAME_VAR (var);
323 prop_value_t val = { UNINITIALIZED, NULL_TREE };
326 stmt = SSA_NAME_DEF_STMT (var);
328 if (gimple_nop_p (stmt))
330 /* Variables defined by an empty statement are those used
331 before being initialized. If VAR is a local variable, we
332 can assume initially that it is UNDEFINED, otherwise we must
333 consider it VARYING. */
334 if (is_gimple_reg (sym) && TREE_CODE (sym) != PARM_DECL)
335 val.lattice_val = UNDEFINED;
337 val.lattice_val = VARYING;
339 else if (is_gimple_assign (stmt)
340 /* Value-returning GIMPLE_CALL statements assign to
341 a variable, and are treated similarly to GIMPLE_ASSIGN. */
342 || (is_gimple_call (stmt)
343 && gimple_call_lhs (stmt) != NULL_TREE)
344 || gimple_code (stmt) == GIMPLE_PHI)
347 if (gimple_assign_single_p (stmt)
348 && DECL_P (gimple_assign_rhs1 (stmt))
349 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
351 val.lattice_val = CONSTANT;
355 /* Any other variable defined by an assignment or a PHI node
356 is considered UNDEFINED. */
357 val.lattice_val = UNDEFINED;
361 /* Otherwise, VAR will never take on a constant value. */
362 val.lattice_val = VARYING;
369 /* Get the constant value associated with variable VAR. */
371 static inline prop_value_t *
376 if (const_val == NULL)
379 val = &const_val[SSA_NAME_VERSION (var)];
380 if (val->lattice_val == UNINITIALIZED)
381 *val = get_default_value (var);
386 /* Sets the value associated with VAR to VARYING. */
389 set_value_varying (tree var)
391 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
393 val->lattice_val = VARYING;
394 val->value = NULL_TREE;
397 /* For float types, modify the value of VAL to make ccp work correctly
398 for non-standard values (-0, NaN):
400 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
401 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
402 This is to fix the following problem (see PR 29921): Suppose we have
406 and we set value of y to NaN. This causes value of x to be set to NaN.
407 When we later determine that y is in fact VARYING, fold uses the fact
408 that HONOR_NANS is false, and we try to change the value of x to 0,
409 causing an ICE. With HONOR_NANS being false, the real appearance of
410 NaN would cause undefined behavior, though, so claiming that y (and x)
411 are UNDEFINED initially is correct. */
414 canonicalize_float_value (prop_value_t *val)
416 enum machine_mode mode;
420 if (val->lattice_val != CONSTANT
421 || TREE_CODE (val->value) != REAL_CST)
424 d = TREE_REAL_CST (val->value);
425 type = TREE_TYPE (val->value);
426 mode = TYPE_MODE (type);
428 if (!HONOR_SIGNED_ZEROS (mode)
429 && REAL_VALUE_MINUS_ZERO (d))
431 val->value = build_real (type, dconst0);
435 if (!HONOR_NANS (mode)
436 && REAL_VALUE_ISNAN (d))
438 val->lattice_val = UNDEFINED;
444 /* Set the value for variable VAR to NEW_VAL. Return true if the new
445 value is different from VAR's previous value. */
448 set_lattice_value (tree var, prop_value_t new_val)
450 prop_value_t *old_val = get_value (var);
452 canonicalize_float_value (&new_val);
454 /* Lattice transitions must always be monotonically increasing in
455 value. If *OLD_VAL and NEW_VAL are the same, return false to
456 inform the caller that this was a non-transition. */
458 gcc_assert (old_val->lattice_val < new_val.lattice_val
459 || (old_val->lattice_val == new_val.lattice_val
460 && ((!old_val->value && !new_val.value)
461 || operand_equal_p (old_val->value, new_val.value, 0))));
463 if (old_val->lattice_val != new_val.lattice_val)
465 if (dump_file && (dump_flags & TDF_DETAILS))
467 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
468 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
473 gcc_assert (new_val.lattice_val != UNDEFINED);
481 /* Return the likely CCP lattice value for STMT.
483 If STMT has no operands, then return CONSTANT.
485 Else if undefinedness of operands of STMT cause its value to be
486 undefined, then return UNDEFINED.
488 Else if any operands of STMT are constants, then return CONSTANT.
490 Else return VARYING. */
493 likely_value (gimple stmt)
495 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
500 enum gimple_code code = gimple_code (stmt);
502 /* This function appears to be called only for assignments, calls,
503 conditionals, and switches, due to the logic in visit_stmt. */
504 gcc_assert (code == GIMPLE_ASSIGN
505 || code == GIMPLE_CALL
506 || code == GIMPLE_COND
507 || code == GIMPLE_SWITCH);
509 /* If the statement has volatile operands, it won't fold to a
511 if (gimple_has_volatile_ops (stmt))
514 /* Arrive here for more complex cases. */
515 has_constant_operand = false;
516 has_undefined_operand = false;
517 all_undefined_operands = true;
518 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
520 prop_value_t *val = get_value (use);
522 if (val->lattice_val == UNDEFINED)
523 has_undefined_operand = true;
525 all_undefined_operands = false;
527 if (val->lattice_val == CONSTANT)
528 has_constant_operand = true;
531 /* There may be constants in regular rhs operands. */
532 for (i = is_gimple_call (stmt) + gimple_has_lhs (stmt);
533 i < gimple_num_ops (stmt); ++i)
535 tree op = gimple_op (stmt, i);
536 if (!op || TREE_CODE (op) == SSA_NAME)
538 if (is_gimple_min_invariant (op))
539 has_constant_operand = true;
542 /* If the operation combines operands like COMPLEX_EXPR make sure to
543 not mark the result UNDEFINED if only one part of the result is
545 if (has_undefined_operand && all_undefined_operands)
547 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
549 switch (gimple_assign_rhs_code (stmt))
551 /* Unary operators are handled with all_undefined_operands. */
554 case POINTER_PLUS_EXPR:
555 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
556 Not bitwise operators, one VARYING operand may specify the
557 result completely. Not logical operators for the same reason.
558 Not COMPLEX_EXPR as one VARYING operand makes the result partly
559 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
560 the undefined operand may be promoted. */
567 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
568 fall back to VARYING even if there were CONSTANT operands. */
569 if (has_undefined_operand)
572 /* We do not consider virtual operands here -- load from read-only
573 memory may have only VARYING virtual operands, but still be
575 if (has_constant_operand
576 || gimple_references_memory_p (stmt))
582 /* Returns true if STMT cannot be constant. */
585 surely_varying_stmt_p (gimple stmt)
587 /* If the statement has operands that we cannot handle, it cannot be
589 if (gimple_has_volatile_ops (stmt))
592 /* If it is a call and does not return a value or is not a
593 builtin and not an indirect call, it is varying. */
594 if (is_gimple_call (stmt))
597 if (!gimple_call_lhs (stmt)
598 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
599 && !DECL_BUILT_IN (fndecl)))
603 /* Any other store operation is not interesting. */
604 else if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS))
607 /* Anything other than assignments and conditional jumps are not
608 interesting for CCP. */
609 if (gimple_code (stmt) != GIMPLE_ASSIGN
610 && gimple_code (stmt) != GIMPLE_COND
611 && gimple_code (stmt) != GIMPLE_SWITCH
612 && gimple_code (stmt) != GIMPLE_CALL)
618 /* Initialize local data structures for CCP. */
621 ccp_initialize (void)
625 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
627 /* Initialize simulation flags for PHI nodes and statements. */
630 gimple_stmt_iterator i;
632 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
634 gimple stmt = gsi_stmt (i);
635 bool is_varying = surely_varying_stmt_p (stmt);
642 /* If the statement will not produce a constant, mark
643 all its outputs VARYING. */
644 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
645 set_value_varying (def);
647 prop_set_simulate_again (stmt, !is_varying);
651 /* Now process PHI nodes. We never clear the simulate_again flag on
652 phi nodes, since we do not know which edges are executable yet,
653 except for phi nodes for virtual operands when we do not do store ccp. */
656 gimple_stmt_iterator i;
658 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
660 gimple phi = gsi_stmt (i);
662 if (!is_gimple_reg (gimple_phi_result (phi)))
663 prop_set_simulate_again (phi, false);
665 prop_set_simulate_again (phi, true);
670 /* Debug count support. Reset the values of ssa names
671 VARYING when the total number ssa names analyzed is
672 beyond the debug count specified. */
678 for (i = 0; i < num_ssa_names; i++)
682 const_val[i].lattice_val = VARYING;
683 const_val[i].value = NULL_TREE;
689 /* Do final substitution of propagated values, cleanup the flowgraph and
690 free allocated storage.
692 Return TRUE when something was optimized. */
697 bool something_changed;
700 /* Perform substitutions based on the known constant values. */
701 something_changed = substitute_and_fold (const_val, false);
705 return something_changed;;
709 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
712 any M UNDEFINED = any
713 any M VARYING = VARYING
714 Ci M Cj = Ci if (i == j)
715 Ci M Cj = VARYING if (i != j)
719 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
721 if (val1->lattice_val == UNDEFINED)
723 /* UNDEFINED M any = any */
726 else if (val2->lattice_val == UNDEFINED)
728 /* any M UNDEFINED = any
729 Nothing to do. VAL1 already contains the value we want. */
732 else if (val1->lattice_val == VARYING
733 || val2->lattice_val == VARYING)
735 /* any M VARYING = VARYING. */
736 val1->lattice_val = VARYING;
737 val1->value = NULL_TREE;
739 else if (val1->lattice_val == CONSTANT
740 && val2->lattice_val == CONSTANT
741 && simple_cst_equal (val1->value, val2->value) == 1)
743 /* Ci M Cj = Ci if (i == j)
744 Ci M Cj = VARYING if (i != j)
746 If these two values come from memory stores, make sure that
747 they come from the same memory reference. */
748 val1->lattice_val = CONSTANT;
749 val1->value = val1->value;
753 /* Any other combination is VARYING. */
754 val1->lattice_val = VARYING;
755 val1->value = NULL_TREE;
760 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
761 lattice values to determine PHI_NODE's lattice value. The value of a
762 PHI node is determined calling ccp_lattice_meet with all the arguments
763 of the PHI node that are incoming via executable edges. */
765 static enum ssa_prop_result
766 ccp_visit_phi_node (gimple phi)
769 prop_value_t *old_val, new_val;
771 if (dump_file && (dump_flags & TDF_DETAILS))
773 fprintf (dump_file, "\nVisiting PHI node: ");
774 print_gimple_stmt (dump_file, phi, 0, dump_flags);
777 old_val = get_value (gimple_phi_result (phi));
778 switch (old_val->lattice_val)
781 return SSA_PROP_VARYING;
788 new_val.lattice_val = UNDEFINED;
789 new_val.value = NULL_TREE;
796 for (i = 0; i < gimple_phi_num_args (phi); i++)
798 /* Compute the meet operator over all the PHI arguments flowing
799 through executable edges. */
800 edge e = gimple_phi_arg_edge (phi, i);
802 if (dump_file && (dump_flags & TDF_DETAILS))
805 "\n Argument #%d (%d -> %d %sexecutable)\n",
806 i, e->src->index, e->dest->index,
807 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
810 /* If the incoming edge is executable, Compute the meet operator for
811 the existing value of the PHI node and the current PHI argument. */
812 if (e->flags & EDGE_EXECUTABLE)
814 tree arg = gimple_phi_arg (phi, i)->def;
815 prop_value_t arg_val;
817 if (is_gimple_min_invariant (arg))
819 arg_val.lattice_val = CONSTANT;
823 arg_val = *(get_value (arg));
825 ccp_lattice_meet (&new_val, &arg_val);
827 if (dump_file && (dump_flags & TDF_DETAILS))
829 fprintf (dump_file, "\t");
830 print_generic_expr (dump_file, arg, dump_flags);
831 dump_lattice_value (dump_file, "\tValue: ", arg_val);
832 fprintf (dump_file, "\n");
835 if (new_val.lattice_val == VARYING)
840 if (dump_file && (dump_flags & TDF_DETAILS))
842 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
843 fprintf (dump_file, "\n\n");
846 /* Make the transition to the new value. */
847 if (set_lattice_value (gimple_phi_result (phi), new_val))
849 if (new_val.lattice_val == VARYING)
850 return SSA_PROP_VARYING;
852 return SSA_PROP_INTERESTING;
855 return SSA_PROP_NOT_INTERESTING;
858 /* Return true if we may propagate the address expression ADDR into the
859 dereference DEREF and cancel them. */
862 may_propagate_address_into_dereference (tree addr, tree deref)
864 gcc_assert (INDIRECT_REF_P (deref)
865 && TREE_CODE (addr) == ADDR_EXPR);
867 /* Don't propagate if ADDR's operand has incomplete type. */
868 if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
871 /* If the address is invariant then we do not need to preserve restrict
872 qualifications. But we do need to preserve volatile qualifiers until
873 we can annotate the folded dereference itself properly. */
874 if (is_gimple_min_invariant (addr)
875 && (!TREE_THIS_VOLATILE (deref)
876 || TYPE_VOLATILE (TREE_TYPE (addr))))
877 return useless_type_conversion_p (TREE_TYPE (deref),
878 TREE_TYPE (TREE_OPERAND (addr, 0)));
880 /* Else both the address substitution and the folding must result in
881 a valid useless type conversion sequence. */
882 return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
884 && useless_type_conversion_p (TREE_TYPE (deref),
885 TREE_TYPE (TREE_OPERAND (addr, 0))));
888 /* CCP specific front-end to the non-destructive constant folding
891 Attempt to simplify the RHS of STMT knowing that one or more
892 operands are constants.
894 If simplification is possible, return the simplified RHS,
895 otherwise return the original RHS or NULL_TREE. */
898 ccp_fold (gimple stmt)
900 switch (gimple_code (stmt))
904 enum tree_code subcode = gimple_assign_rhs_code (stmt);
906 switch (get_gimple_rhs_class (subcode))
908 case GIMPLE_SINGLE_RHS:
910 tree rhs = gimple_assign_rhs1 (stmt);
911 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
913 if (TREE_CODE (rhs) == SSA_NAME)
915 /* If the RHS is an SSA_NAME, return its known constant value,
917 return get_value (rhs)->value;
919 /* Handle propagating invariant addresses into address operations.
920 The folding we do here matches that in tree-ssa-forwprop.c. */
921 else if (TREE_CODE (rhs) == ADDR_EXPR)
924 base = &TREE_OPERAND (rhs, 0);
925 while (handled_component_p (*base))
926 base = &TREE_OPERAND (*base, 0);
927 if (TREE_CODE (*base) == INDIRECT_REF
928 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
930 prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
931 if (val->lattice_val == CONSTANT
932 && TREE_CODE (val->value) == ADDR_EXPR
933 && may_propagate_address_into_dereference
936 /* We need to return a new tree, not modify the IL
937 or share parts of it. So play some tricks to
938 avoid manually building it. */
939 tree ret, save = *base;
940 *base = TREE_OPERAND (val->value, 0);
941 ret = unshare_expr (rhs);
942 recompute_tree_invariant_for_addr_expr (ret);
949 if (kind == tcc_reference)
951 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR
952 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
954 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
955 if (val->lattice_val == CONSTANT)
956 return fold_unary (VIEW_CONVERT_EXPR,
957 TREE_TYPE (rhs), val->value);
959 else if (TREE_CODE (rhs) == INDIRECT_REF
960 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
962 prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
963 if (val->lattice_val == CONSTANT
964 && TREE_CODE (val->value) == ADDR_EXPR
965 && useless_type_conversion_p (TREE_TYPE (rhs),
966 TREE_TYPE (TREE_TYPE (val->value))))
967 rhs = TREE_OPERAND (val->value, 0);
969 return fold_const_aggregate_ref (rhs);
971 else if (kind == tcc_declaration)
972 return get_symbol_constant_value (rhs);
976 case GIMPLE_UNARY_RHS:
978 /* Handle unary operators that can appear in GIMPLE form.
979 Note that we know the single operand must be a constant,
980 so this should almost always return a simplified RHS. */
981 tree lhs = gimple_assign_lhs (stmt);
982 tree op0 = gimple_assign_rhs1 (stmt);
984 /* Simplify the operand down to a constant. */
985 if (TREE_CODE (op0) == SSA_NAME)
987 prop_value_t *val = get_value (op0);
988 if (val->lattice_val == CONSTANT)
989 op0 = get_value (op0)->value;
992 /* Conversions are useless for CCP purposes if they are
993 value-preserving. Thus the restrictions that
994 useless_type_conversion_p places for pointer type conversions
995 do not apply here. Substitution later will only substitute to
997 if (CONVERT_EXPR_CODE_P (subcode)
998 && POINTER_TYPE_P (TREE_TYPE (lhs))
999 && POINTER_TYPE_P (TREE_TYPE (op0))
1000 /* Do not allow differences in volatile qualification
1001 as this might get us confused as to whether a
1002 propagation destination statement is volatile
1003 or not. See PR36988. */
1004 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
1005 == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
1008 /* Still try to generate a constant of correct type. */
1009 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1011 && ((tem = maybe_fold_offset_to_address
1012 (op0, integer_zero_node, TREE_TYPE (lhs)))
1018 return fold_unary_ignore_overflow (subcode,
1019 gimple_expr_type (stmt), op0);
1022 case GIMPLE_BINARY_RHS:
1024 /* Handle binary operators that can appear in GIMPLE form. */
1025 tree op0 = gimple_assign_rhs1 (stmt);
1026 tree op1 = gimple_assign_rhs2 (stmt);
1028 /* Simplify the operands down to constants when appropriate. */
1029 if (TREE_CODE (op0) == SSA_NAME)
1031 prop_value_t *val = get_value (op0);
1032 if (val->lattice_val == CONSTANT)
1036 if (TREE_CODE (op1) == SSA_NAME)
1038 prop_value_t *val = get_value (op1);
1039 if (val->lattice_val == CONSTANT)
1043 /* Fold &foo + CST into an invariant reference if possible. */
1044 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1045 && TREE_CODE (op0) == ADDR_EXPR
1046 && TREE_CODE (op1) == INTEGER_CST)
1048 tree lhs = gimple_assign_lhs (stmt);
1049 tree tem = maybe_fold_offset_to_address (op0, op1,
1051 if (tem != NULL_TREE)
1055 return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
1066 tree fn = gimple_call_fn (stmt);
1069 if (TREE_CODE (fn) == SSA_NAME)
1071 val = get_value (fn);
1072 if (val->lattice_val == CONSTANT)
1075 if (TREE_CODE (fn) == ADDR_EXPR
1076 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1077 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1079 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1082 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1084 args[i] = gimple_call_arg (stmt, i);
1085 if (TREE_CODE (args[i]) == SSA_NAME)
1087 val = get_value (args[i]);
1088 if (val->lattice_val == CONSTANT)
1089 args[i] = val->value;
1092 call = build_call_array (gimple_call_return_type (stmt),
1093 fn, gimple_call_num_args (stmt), args);
1094 retval = fold_call_expr (call, false);
1096 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1097 STRIP_NOPS (retval);
1105 /* Handle comparison operators that can appear in GIMPLE form. */
1106 tree op0 = gimple_cond_lhs (stmt);
1107 tree op1 = gimple_cond_rhs (stmt);
1108 enum tree_code code = gimple_cond_code (stmt);
1110 /* Simplify the operands down to constants when appropriate. */
1111 if (TREE_CODE (op0) == SSA_NAME)
1113 prop_value_t *val = get_value (op0);
1114 if (val->lattice_val == CONSTANT)
1118 if (TREE_CODE (op1) == SSA_NAME)
1120 prop_value_t *val = get_value (op1);
1121 if (val->lattice_val == CONSTANT)
1125 return fold_binary (code, boolean_type_node, op0, op1);
1130 tree rhs = gimple_switch_index (stmt);
1132 if (TREE_CODE (rhs) == SSA_NAME)
1134 /* If the RHS is an SSA_NAME, return its known constant value,
1136 return get_value (rhs)->value;
1148 /* Return the tree representing the element referenced by T if T is an
1149 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1150 NULL_TREE otherwise. */
1153 fold_const_aggregate_ref (tree t)
1155 prop_value_t *value;
1156 tree base, ctor, idx, field;
1157 unsigned HOST_WIDE_INT cnt;
1160 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1161 return get_symbol_constant_value (t);
1163 switch (TREE_CODE (t))
1166 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1167 DECL_INITIAL. If BASE is a nested reference into another
1168 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1169 the inner reference. */
1170 base = TREE_OPERAND (t, 0);
1171 switch (TREE_CODE (base))
1174 if (!TREE_READONLY (base)
1175 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1176 || !targetm.binds_local_p (base))
1179 ctor = DECL_INITIAL (base);
1184 ctor = fold_const_aggregate_ref (base);
1196 if (ctor == NULL_TREE
1197 || (TREE_CODE (ctor) != CONSTRUCTOR
1198 && TREE_CODE (ctor) != STRING_CST)
1199 || !TREE_STATIC (ctor))
1202 /* Get the index. If we have an SSA_NAME, try to resolve it
1203 with the current lattice value for the SSA_NAME. */
1204 idx = TREE_OPERAND (t, 1);
1205 switch (TREE_CODE (idx))
1208 if ((value = get_value (idx))
1209 && value->lattice_val == CONSTANT
1210 && TREE_CODE (value->value) == INTEGER_CST)
1223 /* Fold read from constant string. */
1224 if (TREE_CODE (ctor) == STRING_CST)
1226 if ((TYPE_MODE (TREE_TYPE (t))
1227 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1228 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1230 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1231 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1232 return build_int_cst_type (TREE_TYPE (t),
1233 (TREE_STRING_POINTER (ctor)
1234 [TREE_INT_CST_LOW (idx)]));
1238 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1239 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1240 if (tree_int_cst_equal (cfield, idx))
1242 STRIP_USELESS_TYPE_CONVERSION (cval);
1248 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1249 DECL_INITIAL. If BASE is a nested reference into another
1250 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1251 the inner reference. */
1252 base = TREE_OPERAND (t, 0);
1253 switch (TREE_CODE (base))
1256 if (!TREE_READONLY (base)
1257 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1258 || !targetm.binds_local_p (base))
1261 ctor = DECL_INITIAL (base);
1266 ctor = fold_const_aggregate_ref (base);
1273 if (ctor == NULL_TREE
1274 || TREE_CODE (ctor) != CONSTRUCTOR
1275 || !TREE_STATIC (ctor))
1278 field = TREE_OPERAND (t, 1);
1280 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1282 /* FIXME: Handle bit-fields. */
1283 && ! DECL_BIT_FIELD (cfield))
1285 STRIP_USELESS_TYPE_CONVERSION (cval);
1293 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1294 if (c && TREE_CODE (c) == COMPLEX_CST)
1295 return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
1301 tree base = TREE_OPERAND (t, 0);
1302 if (TREE_CODE (base) == SSA_NAME
1303 && (value = get_value (base))
1304 && value->lattice_val == CONSTANT
1305 && TREE_CODE (value->value) == ADDR_EXPR)
1306 return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
1317 /* Evaluate statement STMT.
1318 Valid only for assignments, calls, conditionals, and switches. */
1321 evaluate_stmt (gimple stmt)
1324 tree simplified = NULL_TREE;
1325 ccp_lattice_t likelyvalue = likely_value (stmt);
1328 fold_defer_overflow_warnings ();
1330 /* If the statement is likely to have a CONSTANT result, then try
1331 to fold the statement to determine the constant value. */
1332 /* FIXME. This is the only place that we call ccp_fold.
1333 Since likely_value never returns CONSTANT for calls, we will
1334 not attempt to fold them, including builtins that may profit. */
1335 if (likelyvalue == CONSTANT)
1336 simplified = ccp_fold (stmt);
1337 /* If the statement is likely to have a VARYING result, then do not
1338 bother folding the statement. */
1339 else if (likelyvalue == VARYING)
1341 enum gimple_code code = gimple_code (stmt);
1342 if (code == GIMPLE_ASSIGN)
1344 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1346 /* Other cases cannot satisfy is_gimple_min_invariant
1348 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1349 simplified = gimple_assign_rhs1 (stmt);
1351 else if (code == GIMPLE_SWITCH)
1352 simplified = gimple_switch_index (stmt);
1354 /* These cannot satisfy is_gimple_min_invariant without folding. */
1355 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1358 is_constant = simplified && is_gimple_min_invariant (simplified);
1360 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1362 if (dump_file && (dump_flags & TDF_DETAILS))
1364 fprintf (dump_file, "which is likely ");
1365 switch (likelyvalue)
1368 fprintf (dump_file, "CONSTANT");
1371 fprintf (dump_file, "UNDEFINED");
1374 fprintf (dump_file, "VARYING");
1378 fprintf (dump_file, "\n");
1383 /* The statement produced a constant value. */
1384 val.lattice_val = CONSTANT;
1385 val.value = simplified;
1389 /* The statement produced a nonconstant value. If the statement
1390 had UNDEFINED operands, then the result of the statement
1391 should be UNDEFINED. Otherwise, the statement is VARYING. */
1392 if (likelyvalue == UNDEFINED)
1393 val.lattice_val = likelyvalue;
1395 val.lattice_val = VARYING;
1397 val.value = NULL_TREE;
1403 /* Visit the assignment statement STMT. Set the value of its LHS to the
1404 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1405 creates virtual definitions, set the value of each new name to that
1406 of the RHS (if we can derive a constant out of the RHS).
1407 Value-returning call statements also perform an assignment, and
1408 are handled here. */
1410 static enum ssa_prop_result
1411 visit_assignment (gimple stmt, tree *output_p)
1414 enum ssa_prop_result retval;
1416 tree lhs = gimple_get_lhs (stmt);
1418 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1419 || gimple_call_lhs (stmt) != NULL_TREE);
1421 if (gimple_assign_copy_p (stmt))
1423 tree rhs = gimple_assign_rhs1 (stmt);
1425 if (TREE_CODE (rhs) == SSA_NAME)
1427 /* For a simple copy operation, we copy the lattice values. */
1428 prop_value_t *nval = get_value (rhs);
1432 val = evaluate_stmt (stmt);
1435 /* Evaluate the statement, which could be
1436 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1437 val = evaluate_stmt (stmt);
1439 retval = SSA_PROP_NOT_INTERESTING;
1441 /* Set the lattice value of the statement's output. */
1442 if (TREE_CODE (lhs) == SSA_NAME)
1444 /* If STMT is an assignment to an SSA_NAME, we only have one
1446 if (set_lattice_value (lhs, val))
1449 if (val.lattice_val == VARYING)
1450 retval = SSA_PROP_VARYING;
1452 retval = SSA_PROP_INTERESTING;
1460 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1461 if it can determine which edge will be taken. Otherwise, return
1462 SSA_PROP_VARYING. */
1464 static enum ssa_prop_result
1465 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1470 block = gimple_bb (stmt);
1471 val = evaluate_stmt (stmt);
1473 /* Find which edge out of the conditional block will be taken and add it
1474 to the worklist. If no single edge can be determined statically,
1475 return SSA_PROP_VARYING to feed all the outgoing edges to the
1476 propagation engine. */
1477 *taken_edge_p = val.value ? find_taken_edge (block, val.value) : 0;
1479 return SSA_PROP_INTERESTING;
1481 return SSA_PROP_VARYING;
1485 /* Evaluate statement STMT. If the statement produces an output value and
1486 its evaluation changes the lattice value of its output, return
1487 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1490 If STMT is a conditional branch and we can determine its truth
1491 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1492 value, return SSA_PROP_VARYING. */
1494 static enum ssa_prop_result
1495 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1500 if (dump_file && (dump_flags & TDF_DETAILS))
1502 fprintf (dump_file, "\nVisiting statement:\n");
1503 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1506 switch (gimple_code (stmt))
1509 /* If the statement is an assignment that produces a single
1510 output value, evaluate its RHS to see if the lattice value of
1511 its output has changed. */
1512 return visit_assignment (stmt, output_p);
1515 /* A value-returning call also performs an assignment. */
1516 if (gimple_call_lhs (stmt) != NULL_TREE)
1517 return visit_assignment (stmt, output_p);
1522 /* If STMT is a conditional branch, see if we can determine
1523 which branch will be taken. */
1524 /* FIXME. It appears that we should be able to optimize
1525 computed GOTOs here as well. */
1526 return visit_cond_stmt (stmt, taken_edge_p);
1532 /* Any other kind of statement is not interesting for constant
1533 propagation and, therefore, not worth simulating. */
1534 if (dump_file && (dump_flags & TDF_DETAILS))
1535 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1537 /* Definitions made by statements other than assignments to
1538 SSA_NAMEs represent unknown modifications to their outputs.
1539 Mark them VARYING. */
1540 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1542 prop_value_t v = { VARYING, NULL_TREE };
1543 set_lattice_value (def, v);
1546 return SSA_PROP_VARYING;
1550 /* Main entry point for SSA Conditional Constant Propagation. */
1556 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1557 if (ccp_finalize ())
1558 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1567 return flag_tree_ccp != 0;
1571 struct gimple_opt_pass pass_ccp =
1576 gate_ccp, /* gate */
1577 do_ssa_ccp, /* execute */
1580 0, /* static_pass_number */
1581 TV_TREE_CCP, /* tv_id */
1582 PROP_cfg | PROP_ssa, /* properties_required */
1583 0, /* properties_provided */
1584 0, /* properties_destroyed */
1585 0, /* todo_flags_start */
1586 TODO_dump_func | TODO_verify_ssa
1587 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1592 /* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
1593 BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
1594 is the desired result type. */
1597 maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
1598 bool allow_negative_idx)
1600 tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
1601 tree array_type, elt_type, elt_size;
1604 /* If BASE is an ARRAY_REF, we can pick up another offset (this time
1605 measured in units of the size of elements type) from that ARRAY_REF).
1606 We can't do anything if either is variable.
1608 The case we handle here is *(&A[N]+O). */
1609 if (TREE_CODE (base) == ARRAY_REF)
1611 tree low_bound = array_ref_low_bound (base);
1613 elt_offset = TREE_OPERAND (base, 1);
1614 if (TREE_CODE (low_bound) != INTEGER_CST
1615 || TREE_CODE (elt_offset) != INTEGER_CST)
1618 elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
1619 base = TREE_OPERAND (base, 0);
1622 /* Ignore stupid user tricks of indexing non-array variables. */
1623 array_type = TREE_TYPE (base);
1624 if (TREE_CODE (array_type) != ARRAY_TYPE)
1626 elt_type = TREE_TYPE (array_type);
1627 if (!useless_type_conversion_p (orig_type, elt_type))
1630 /* Use signed size type for intermediate computation on the index. */
1631 idx_type = signed_type_for (size_type_node);
1633 /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
1634 element type (so we can use the alignment if it's not constant).
1635 Otherwise, compute the offset as an index by using a division. If the
1636 division isn't exact, then don't do anything. */
1637 elt_size = TYPE_SIZE_UNIT (elt_type);
1640 if (integer_zerop (offset))
1642 if (TREE_CODE (elt_size) != INTEGER_CST)
1643 elt_size = size_int (TYPE_ALIGN (elt_type));
1645 idx = build_int_cst (idx_type, 0);
1649 unsigned HOST_WIDE_INT lquo, lrem;
1650 HOST_WIDE_INT hquo, hrem;
1653 /* The final array offset should be signed, so we need
1654 to sign-extend the (possibly pointer) offset here
1655 and use signed division. */
1656 soffset = double_int_sext (tree_to_double_int (offset),
1657 TYPE_PRECISION (TREE_TYPE (offset)));
1658 if (TREE_CODE (elt_size) != INTEGER_CST
1659 || div_and_round_double (TRUNC_DIV_EXPR, 0,
1660 soffset.low, soffset.high,
1661 TREE_INT_CST_LOW (elt_size),
1662 TREE_INT_CST_HIGH (elt_size),
1663 &lquo, &hquo, &lrem, &hrem)
1667 idx = build_int_cst_wide (idx_type, lquo, hquo);
1670 /* Assume the low bound is zero. If there is a domain type, get the
1671 low bound, if any, convert the index into that type, and add the
1673 min_idx = build_int_cst (idx_type, 0);
1674 domain_type = TYPE_DOMAIN (array_type);
1677 idx_type = domain_type;
1678 if (TYPE_MIN_VALUE (idx_type))
1679 min_idx = TYPE_MIN_VALUE (idx_type);
1681 min_idx = fold_convert (idx_type, min_idx);
1683 if (TREE_CODE (min_idx) != INTEGER_CST)
1686 elt_offset = fold_convert (idx_type, elt_offset);
1689 if (!integer_zerop (min_idx))
1690 idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
1691 if (!integer_zerop (elt_offset))
1692 idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
1694 /* Make sure to possibly truncate late after offsetting. */
1695 idx = fold_convert (idx_type, idx);
1697 /* We don't want to construct access past array bounds. For example
1700 should not be simplified into (*c)[14] or tree-vrp will
1701 give false warnings. The same is true for
1702 struct A { long x; char d[0]; } *a;
1704 which should be not folded to &a->d[-8]. */
1706 && TYPE_MAX_VALUE (domain_type)
1707 && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
1709 tree up_bound = TYPE_MAX_VALUE (domain_type);
1711 if (tree_int_cst_lt (up_bound, idx)
1712 /* Accesses after the end of arrays of size 0 (gcc
1713 extension) and 1 are likely intentional ("struct
1715 && compare_tree_int (up_bound, 1) > 0)
1719 && TYPE_MIN_VALUE (domain_type))
1721 if (!allow_negative_idx
1722 && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
1723 && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
1726 else if (!allow_negative_idx
1727 && compare_tree_int (idx, 0) < 0)
1730 return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
1734 /* Attempt to fold *(S+O) to S.X.
1735 BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE
1736 is the desired result type. */
1739 maybe_fold_offset_to_component_ref (tree record_type, tree base, tree offset,
1740 tree orig_type, bool base_is_ptr)
1742 tree f, t, field_type, tail_array_field, field_offset;
1746 if (TREE_CODE (record_type) != RECORD_TYPE
1747 && TREE_CODE (record_type) != UNION_TYPE
1748 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1751 /* Short-circuit silly cases. */
1752 if (useless_type_conversion_p (record_type, orig_type))
1755 tail_array_field = NULL_TREE;
1756 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1760 if (TREE_CODE (f) != FIELD_DECL)
1762 if (DECL_BIT_FIELD (f))
1765 if (!DECL_FIELD_OFFSET (f))
1767 field_offset = byte_position (f);
1768 if (TREE_CODE (field_offset) != INTEGER_CST)
1771 /* ??? Java creates "interesting" fields for representing base classes.
1772 They have no name, and have no context. With no context, we get into
1773 trouble with nonoverlapping_component_refs_p. Skip them. */
1774 if (!DECL_FIELD_CONTEXT (f))
1777 /* The previous array field isn't at the end. */
1778 tail_array_field = NULL_TREE;
1780 /* Check to see if this offset overlaps with the field. */
1781 cmp = tree_int_cst_compare (field_offset, offset);
1785 field_type = TREE_TYPE (f);
1787 /* Here we exactly match the offset being checked. If the types match,
1788 then we can return that field. */
1790 && useless_type_conversion_p (orig_type, field_type))
1793 base = build1 (INDIRECT_REF, record_type, base);
1794 t = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1798 /* Don't care about offsets into the middle of scalars. */
1799 if (!AGGREGATE_TYPE_P (field_type))
1802 /* Check for array at the end of the struct. This is often
1803 used as for flexible array members. We should be able to
1804 turn this into an array access anyway. */
1805 if (TREE_CODE (field_type) == ARRAY_TYPE)
1806 tail_array_field = f;
1808 /* Check the end of the field against the offset. */
1809 if (!DECL_SIZE_UNIT (f)
1810 || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
1812 t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
1813 if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
1816 /* If we matched, then set offset to the displacement into
1819 new_base = build1 (INDIRECT_REF, record_type, base);
1822 new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
1824 /* Recurse to possibly find the match. */
1825 ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
1826 f == TYPE_FIELDS (record_type));
1829 ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
1835 if (!tail_array_field)
1838 f = tail_array_field;
1839 field_type = TREE_TYPE (f);
1840 offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
1842 /* If we get here, we've got an aggregate field, and a possibly
1843 nonzero offset into them. Recurse and hope for a valid match. */
1845 base = build1 (INDIRECT_REF, record_type, base);
1846 base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
1848 t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
1849 f == TYPE_FIELDS (record_type));
1852 return maybe_fold_offset_to_component_ref (field_type, base, offset,
1856 /* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
1857 or BASE[index] or by combination of those.
1859 Before attempting the conversion strip off existing ADDR_EXPRs and
1860 handled component refs. */
1863 maybe_fold_offset_to_reference (tree base, tree offset, tree orig_type)
1867 bool base_is_ptr = true;
1870 if (TREE_CODE (base) == ADDR_EXPR)
1872 base_is_ptr = false;
1874 base = TREE_OPERAND (base, 0);
1876 /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
1877 so it needs to be removed and new COMPONENT_REF constructed.
1878 The wrong COMPONENT_REF are often constructed by folding the
1879 (type *)&object within the expression (type *)&object+offset */
1880 if (handled_component_p (base))
1882 HOST_WIDE_INT sub_offset, size, maxsize;
1884 newbase = get_ref_base_and_extent (base, &sub_offset,
1886 gcc_assert (newbase);
1889 && !(sub_offset & (BITS_PER_UNIT - 1)))
1893 offset = int_const_binop (PLUS_EXPR, offset,
1894 build_int_cst (TREE_TYPE (offset),
1895 sub_offset / BITS_PER_UNIT), 1);
1898 if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
1899 && integer_zerop (offset))
1901 type = TREE_TYPE (base);
1906 if (!POINTER_TYPE_P (TREE_TYPE (base)))
1908 type = TREE_TYPE (TREE_TYPE (base));
1910 ret = maybe_fold_offset_to_component_ref (type, base, offset,
1911 orig_type, base_is_ptr);
1915 base = build1 (INDIRECT_REF, type, base);
1916 ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
1921 /* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
1922 or &BASE[index] or by combination of those.
1924 Before attempting the conversion strip off existing component refs. */
1927 maybe_fold_offset_to_address (tree addr, tree offset, tree orig_type)
1931 gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
1932 && POINTER_TYPE_P (orig_type));
1934 t = maybe_fold_offset_to_reference (addr, offset, TREE_TYPE (orig_type));
1940 /* For __builtin_object_size to function correctly we need to
1941 make sure not to fold address arithmetic so that we change
1942 reference from one array to another. This would happen for
1945 struct X { char s1[10]; char s2[10] } s;
1946 char *foo (void) { return &s.s2[-4]; }
1948 where we need to avoid generating &s.s1[6]. As the C and
1949 C++ frontends create different initial trees
1950 (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some
1951 sophisticated comparisons here. Note that checking for the
1952 condition after the fact is easier than trying to avoid doing
1955 if (TREE_CODE (orig) == ADDR_EXPR)
1956 orig = TREE_OPERAND (orig, 0);
1957 if ((TREE_CODE (orig) == ARRAY_REF
1958 || (TREE_CODE (orig) == COMPONENT_REF
1959 && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
1960 && (TREE_CODE (t) == ARRAY_REF
1961 || TREE_CODE (t) == COMPONENT_REF)
1962 && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
1963 ? TREE_OPERAND (orig, 0) : orig,
1964 TREE_CODE (t) == ARRAY_REF
1965 ? TREE_OPERAND (t, 0) : t, 0))
1968 ptr_type = build_pointer_type (TREE_TYPE (t));
1969 if (!useless_type_conversion_p (orig_type, ptr_type))
1971 return build_fold_addr_expr_with_type (t, ptr_type);
1977 /* A subroutine of fold_stmt_r. Attempt to simplify *(BASE+OFFSET).
1978 Return the simplified expression, or NULL if nothing could be done. */
1981 maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
1984 bool volatile_p = TREE_THIS_VOLATILE (expr);
1986 /* We may well have constructed a double-nested PLUS_EXPR via multiple
1987 substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that
1988 are sometimes added. */
1990 STRIP_TYPE_NOPS (base);
1991 TREE_OPERAND (expr, 0) = base;
1993 /* One possibility is that the address reduces to a string constant. */
1994 t = fold_read_from_constant_string (expr);
1998 /* Add in any offset from a POINTER_PLUS_EXPR. */
1999 if (TREE_CODE (base) == POINTER_PLUS_EXPR)
2003 offset2 = TREE_OPERAND (base, 1);
2004 if (TREE_CODE (offset2) != INTEGER_CST)
2006 base = TREE_OPERAND (base, 0);
2008 offset = fold_convert (sizetype,
2009 int_const_binop (PLUS_EXPR, offset, offset2, 1));
2012 if (TREE_CODE (base) == ADDR_EXPR)
2014 tree base_addr = base;
2016 /* Strip the ADDR_EXPR. */
2017 base = TREE_OPERAND (base, 0);
2019 /* Fold away CONST_DECL to its value, if the type is scalar. */
2020 if (TREE_CODE (base) == CONST_DECL
2021 && is_gimple_min_invariant (DECL_INITIAL (base)))
2022 return DECL_INITIAL (base);
2024 /* Try folding *(&B+O) to B.X. */
2025 t = maybe_fold_offset_to_reference (base_addr, offset,
2029 /* Preserve volatileness of the original expression.
2030 We can end up with a plain decl here which is shared
2031 and we shouldn't mess with its flags. */
2033 TREE_THIS_VOLATILE (t) = volatile_p;
2039 /* We can get here for out-of-range string constant accesses,
2040 such as "_"[3]. Bail out of the entire substitution search
2041 and arrange for the entire statement to be replaced by a
2042 call to __builtin_trap. In all likelihood this will all be
2043 constant-folded away, but in the meantime we can't leave with
2044 something that get_expr_operands can't understand. */
2048 if (TREE_CODE (t) == ADDR_EXPR
2049 && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
2051 /* FIXME: Except that this causes problems elsewhere with dead
2052 code not being deleted, and we die in the rtl expanders
2053 because we failed to remove some ssa_name. In the meantime,
2054 just return zero. */
2055 /* FIXME2: This condition should be signaled by
2056 fold_read_from_constant_string directly, rather than
2057 re-checking for it here. */
2058 return integer_zero_node;
2061 /* Try folding *(B+O) to B->X. Still an improvement. */
2062 if (POINTER_TYPE_P (TREE_TYPE (base)))
2064 t = maybe_fold_offset_to_reference (base, offset,
2071 /* Otherwise we had an offset that we could not simplify. */
2076 /* A quaint feature extant in our address arithmetic is that there
2077 can be hidden type changes here. The type of the result need
2078 not be the same as the type of the input pointer.
2080 What we're after here is an expression of the form
2081 (T *)(&array + const)
2082 where array is OP0, const is OP1, RES_TYPE is T and
2083 the cast doesn't actually exist, but is implicit in the
2084 type of the POINTER_PLUS_EXPR. We'd like to turn this into
2086 which may be able to propagate further. */
2089 maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
2094 /* It had better be a constant. */
2095 if (TREE_CODE (op1) != INTEGER_CST)
2097 /* The first operand should be an ADDR_EXPR. */
2098 if (TREE_CODE (op0) != ADDR_EXPR)
2100 op0 = TREE_OPERAND (op0, 0);
2102 /* If the first operand is an ARRAY_REF, expand it so that we can fold
2103 the offset into it. */
2104 while (TREE_CODE (op0) == ARRAY_REF)
2106 tree array_obj = TREE_OPERAND (op0, 0);
2107 tree array_idx = TREE_OPERAND (op0, 1);
2108 tree elt_type = TREE_TYPE (op0);
2109 tree elt_size = TYPE_SIZE_UNIT (elt_type);
2112 if (TREE_CODE (array_idx) != INTEGER_CST)
2114 if (TREE_CODE (elt_size) != INTEGER_CST)
2117 /* Un-bias the index by the min index of the array type. */
2118 min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
2121 min_idx = TYPE_MIN_VALUE (min_idx);
2124 if (TREE_CODE (min_idx) != INTEGER_CST)
2127 array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
2128 if (!integer_zerop (min_idx))
2129 array_idx = int_const_binop (MINUS_EXPR, array_idx,
2134 /* Convert the index to a byte offset. */
2135 array_idx = fold_convert (sizetype, array_idx);
2136 array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
2138 /* Update the operands for the next round, or for folding. */
2139 op1 = int_const_binop (PLUS_EXPR,
2144 ptd_type = TREE_TYPE (res_type);
2145 /* If we want a pointer to void, reconstruct the reference from the
2146 array element type. A pointer to that can be trivially converted
2147 to void *. This happens as we fold (void *)(ptr p+ off). */
2148 if (VOID_TYPE_P (ptd_type)
2149 && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
2150 ptd_type = TREE_TYPE (TREE_TYPE (op0));
2152 /* At which point we can try some of the same things as for indirects. */
2153 t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
2155 t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
2158 t = build1 (ADDR_EXPR, res_type, t);
2163 /* For passing state through walk_tree into fold_stmt_r and its
2166 struct fold_stmt_r_data
2170 bool *inside_addr_expr_p;
2173 /* Subroutine of fold_stmt called via walk_tree. We perform several
2174 simplifications of EXPR_P, mostly having to do with pointer arithmetic. */
2177 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
2179 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2180 struct fold_stmt_r_data *fold_stmt_r_data;
2181 bool *inside_addr_expr_p;
2183 tree expr = *expr_p, t;
2184 bool volatile_p = TREE_THIS_VOLATILE (expr);
2186 fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
2187 inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
2188 changed_p = fold_stmt_r_data->changed_p;
2190 /* ??? It'd be nice if walk_tree had a pre-order option. */
2191 switch (TREE_CODE (expr))
2194 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2199 t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
2201 /* Avoid folding *"abc" = 5 into 'a' = 5. */
2202 if (wi->is_lhs && t && TREE_CODE (t) == INTEGER_CST)
2205 && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2206 /* If we had a good reason for propagating the address here,
2207 make sure we end up with valid gimple. See PR34989. */
2208 t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2212 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2217 if (POINTER_TYPE_P (TREE_TYPE (expr))
2218 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (expr)))
2219 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2220 && (t = maybe_fold_offset_to_address (TREE_OPERAND (expr, 0),
2222 TREE_TYPE (TREE_TYPE (expr)))))
2226 /* ??? Could handle more ARRAY_REFs here, as a variant of INDIRECT_REF.
2227 We'd only want to bother decomposing an existing ARRAY_REF if
2228 the base array is found to have another offset contained within.
2229 Otherwise we'd be wasting time. */
2231 /* If we are not processing expressions found within an
2232 ADDR_EXPR, then we can fold constant array references.
2233 Don't fold on LHS either, to avoid folding "abc"[0] = 5
2235 if (!*inside_addr_expr_p && !wi->is_lhs)
2236 t = fold_read_from_constant_string (expr);
2242 *inside_addr_expr_p = true;
2243 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2244 *inside_addr_expr_p = false;
2249 /* Make sure the value is properly considered constant, and so gets
2250 propagated as expected. */
2252 recompute_tree_invariant_for_addr_expr (expr);
2256 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2261 /* Make sure the FIELD_DECL is actually a field in the type on the lhs.
2262 We've already checked that the records are compatible, so we should
2263 come up with a set of compatible fields. */
2265 tree expr_record = TREE_TYPE (TREE_OPERAND (expr, 0));
2266 tree expr_field = TREE_OPERAND (expr, 1);
2268 if (DECL_FIELD_CONTEXT (expr_field) != TYPE_MAIN_VARIANT (expr_record))
2270 expr_field = find_compatible_field (expr_record, expr_field);
2271 TREE_OPERAND (expr, 1) = expr_field;
2276 case TARGET_MEM_REF:
2277 t = maybe_fold_tmr (expr);
2280 case POINTER_PLUS_EXPR:
2281 t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
2284 t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
2289 t = maybe_fold_stmt_addition (TREE_TYPE (expr),
2290 TREE_OPERAND (expr, 0),
2291 TREE_OPERAND (expr, 1));
2295 if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
2297 tree op0 = TREE_OPERAND (expr, 0);
2301 fold_defer_overflow_warnings ();
2302 tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
2303 TREE_OPERAND (op0, 0),
2304 TREE_OPERAND (op0, 1));
2305 /* This is actually a conditional expression, not a GIMPLE
2306 conditional statement, however, the valid_gimple_rhs_p
2307 test still applies. */
2308 set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
2309 fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
2312 COND_EXPR_COND (expr) = tem;
2325 /* Preserve volatileness of the original expression.
2326 We can end up with a plain decl here which is shared
2327 and we shouldn't mess with its flags. */
2329 TREE_THIS_VOLATILE (t) = volatile_p;
2337 /* Return the string length, maximum string length or maximum value of
2339 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
2340 is not NULL and, for TYPE == 0, its value is not equal to the length
2341 we determine or if we are unable to determine the length or value,
2342 return false. VISITED is a bitmap of visited variables.
2343 TYPE is 0 if string length should be returned, 1 for maximum string
2344 length and 2 for maximum value ARG can have. */
2347 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
2352 if (TREE_CODE (arg) != SSA_NAME)
2354 if (TREE_CODE (arg) == COND_EXPR)
2355 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
2356 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
2357 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
2358 else if (TREE_CODE (arg) == ADDR_EXPR
2359 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
2360 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
2362 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
2363 if (TREE_CODE (aop0) == INDIRECT_REF
2364 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
2365 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
2366 length, visited, type);
2372 if (TREE_CODE (val) != INTEGER_CST
2373 || tree_int_cst_sgn (val) < 0)
2377 val = c_strlen (arg, 1);
2385 if (TREE_CODE (*length) != INTEGER_CST
2386 || TREE_CODE (val) != INTEGER_CST)
2389 if (tree_int_cst_lt (*length, val))
2393 else if (simple_cst_equal (val, *length) != 1)
2401 /* If we were already here, break the infinite cycle. */
2402 if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
2404 bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
2407 def_stmt = SSA_NAME_DEF_STMT (var);
2409 switch (gimple_code (def_stmt))
2412 /* The RHS of the statement defining VAR must either have a
2413 constant length or come from another SSA_NAME with a constant
2415 if (gimple_assign_single_p (def_stmt)
2416 || gimple_assign_unary_nop_p (def_stmt))
2418 tree rhs = gimple_assign_rhs1 (def_stmt);
2419 return get_maxval_strlen (rhs, length, visited, type);
2425 /* All the arguments of the PHI node must have the same constant
2429 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
2431 tree arg = gimple_phi_arg (def_stmt, i)->def;
2433 /* If this PHI has itself as an argument, we cannot
2434 determine the string length of this argument. However,
2435 if we can find a constant string length for the other
2436 PHI args then we can still be sure that this is a
2437 constant string length. So be optimistic and just
2438 continue with the next argument. */
2439 if (arg == gimple_phi_result (def_stmt))
2442 if (!get_maxval_strlen (arg, length, visited, type))
2454 /* Fold builtin call in statement STMT. Returns a simplified tree.
2455 We may return a non-constant expression, including another call
2456 to a different function and with different arguments, e.g.,
2457 substituting memcpy for strcpy when the string length is known.
2458 Note that some builtins expand into inline code that may not
2459 be valid in GIMPLE. Callers must take care. */
2462 ccp_fold_builtin (gimple stmt)
2464 tree result, val[3];
2471 gcc_assert (is_gimple_call (stmt));
2473 ignore = (gimple_call_lhs (stmt) == NULL);
2475 /* First try the generic builtin folder. If that succeeds, return the
2477 result = fold_call_stmt (stmt, ignore);
2481 STRIP_NOPS (result);
2485 /* Ignore MD builtins. */
2486 callee = gimple_call_fndecl (stmt);
2487 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
2490 /* If the builtin could not be folded, and it has no argument list,
2492 nargs = gimple_call_num_args (stmt);
2496 /* Limit the work only for builtins we know how to simplify. */
2497 switch (DECL_FUNCTION_CODE (callee))
2499 case BUILT_IN_STRLEN:
2500 case BUILT_IN_FPUTS:
2501 case BUILT_IN_FPUTS_UNLOCKED:
2505 case BUILT_IN_STRCPY:
2506 case BUILT_IN_STRNCPY:
2510 case BUILT_IN_MEMCPY_CHK:
2511 case BUILT_IN_MEMPCPY_CHK:
2512 case BUILT_IN_MEMMOVE_CHK:
2513 case BUILT_IN_MEMSET_CHK:
2514 case BUILT_IN_STRNCPY_CHK:
2518 case BUILT_IN_STRCPY_CHK:
2519 case BUILT_IN_STPCPY_CHK:
2523 case BUILT_IN_SNPRINTF_CHK:
2524 case BUILT_IN_VSNPRINTF_CHK:
2532 if (arg_idx >= nargs)
2535 /* Try to use the dataflow information gathered by the CCP process. */
2536 visited = BITMAP_ALLOC (NULL);
2537 bitmap_clear (visited);
2539 memset (val, 0, sizeof (val));
2540 a = gimple_call_arg (stmt, arg_idx);
2541 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
2542 val[arg_idx] = NULL_TREE;
2544 BITMAP_FREE (visited);
2547 switch (DECL_FUNCTION_CODE (callee))
2549 case BUILT_IN_STRLEN:
2550 if (val[0] && nargs == 1)
2553 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
2555 /* If the result is not a valid gimple value, or not a cast
2556 of a valid gimple value, then we can not use the result. */
2557 if (is_gimple_val (new_val)
2558 || (is_gimple_cast (new_val)
2559 && is_gimple_val (TREE_OPERAND (new_val, 0))))
2564 case BUILT_IN_STRCPY:
2565 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
2566 result = fold_builtin_strcpy (callee,
2567 gimple_call_arg (stmt, 0),
2568 gimple_call_arg (stmt, 1),
2572 case BUILT_IN_STRNCPY:
2573 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2574 result = fold_builtin_strncpy (callee,
2575 gimple_call_arg (stmt, 0),
2576 gimple_call_arg (stmt, 1),
2577 gimple_call_arg (stmt, 2),
2581 case BUILT_IN_FPUTS:
2583 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2584 gimple_call_arg (stmt, 1),
2585 ignore, false, val[0]);
2588 case BUILT_IN_FPUTS_UNLOCKED:
2590 result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
2591 gimple_call_arg (stmt, 1),
2592 ignore, true, val[0]);
2595 case BUILT_IN_MEMCPY_CHK:
2596 case BUILT_IN_MEMPCPY_CHK:
2597 case BUILT_IN_MEMMOVE_CHK:
2598 case BUILT_IN_MEMSET_CHK:
2599 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2600 result = fold_builtin_memory_chk (callee,
2601 gimple_call_arg (stmt, 0),
2602 gimple_call_arg (stmt, 1),
2603 gimple_call_arg (stmt, 2),
2604 gimple_call_arg (stmt, 3),
2606 DECL_FUNCTION_CODE (callee));
2609 case BUILT_IN_STRCPY_CHK:
2610 case BUILT_IN_STPCPY_CHK:
2611 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
2612 result = fold_builtin_stxcpy_chk (callee,
2613 gimple_call_arg (stmt, 0),
2614 gimple_call_arg (stmt, 1),
2615 gimple_call_arg (stmt, 2),
2617 DECL_FUNCTION_CODE (callee));
2620 case BUILT_IN_STRNCPY_CHK:
2621 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
2622 result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
2623 gimple_call_arg (stmt, 1),
2624 gimple_call_arg (stmt, 2),
2625 gimple_call_arg (stmt, 3),
2629 case BUILT_IN_SNPRINTF_CHK:
2630 case BUILT_IN_VSNPRINTF_CHK:
2631 if (val[1] && is_gimple_val (val[1]))
2632 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
2633 DECL_FUNCTION_CODE (callee));
2640 if (result && ignore)
2641 result = fold_ignored_result (result);
2645 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
2646 replacement rhs for the statement or NULL_TREE if no simplification
2647 could be made. It is assumed that the operands have been previously
2651 fold_gimple_assign (gimple_stmt_iterator *si)
2653 gimple stmt = gsi_stmt (*si);
2654 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2658 switch (get_gimple_rhs_class (subcode))
2660 case GIMPLE_SINGLE_RHS:
2662 tree rhs = gimple_assign_rhs1 (stmt);
2664 /* Try to fold a conditional expression. */
2665 if (TREE_CODE (rhs) == COND_EXPR)
2667 tree temp = fold (COND_EXPR_COND (rhs));
2668 if (temp != COND_EXPR_COND (rhs))
2669 result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
2670 COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
2673 /* If we couldn't fold the RHS, hand over to the generic
2675 if (result == NULL_TREE)
2676 result = fold (rhs);
2678 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
2679 that may have been added by fold, and "useless" type
2680 conversions that might now be apparent due to propagation. */
2681 STRIP_USELESS_TYPE_CONVERSION (result);
2683 if (result != rhs && valid_gimple_rhs_p (result))
2686 /* It is possible that fold_stmt_r simplified the RHS.
2687 Make sure that the subcode of this statement still
2688 reflects the principal operator of the rhs operand. */
2693 case GIMPLE_UNARY_RHS:
2695 tree rhs = gimple_assign_rhs1 (stmt);
2697 result = fold_unary (subcode, gimple_expr_type (stmt), rhs);
2700 /* If the operation was a conversion do _not_ mark a
2701 resulting constant with TREE_OVERFLOW if the original
2702 constant was not. These conversions have implementation
2703 defined behavior and retaining the TREE_OVERFLOW flag
2704 here would confuse later passes such as VRP. */
2705 if (CONVERT_EXPR_CODE_P (subcode)
2706 && TREE_CODE (result) == INTEGER_CST
2707 && TREE_CODE (rhs) == INTEGER_CST)
2708 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
2710 STRIP_USELESS_TYPE_CONVERSION (result);
2711 if (valid_gimple_rhs_p (result))
2714 else if (CONVERT_EXPR_CODE_P (subcode)
2715 && POINTER_TYPE_P (gimple_expr_type (stmt))
2716 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
2718 tree type = gimple_expr_type (stmt);
2719 tree t = maybe_fold_offset_to_address (gimple_assign_rhs1 (stmt),
2720 integer_zero_node, type);
2727 case GIMPLE_BINARY_RHS:
2728 /* Try to fold pointer addition. */
2729 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2731 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2732 if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
2734 type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
2735 if (!useless_type_conversion_p
2736 (TREE_TYPE (gimple_assign_lhs (stmt)), type))
2737 type = TREE_TYPE (gimple_assign_rhs1 (stmt));
2739 result = maybe_fold_stmt_addition (type,
2740 gimple_assign_rhs1 (stmt),
2741 gimple_assign_rhs2 (stmt));
2745 result = fold_binary (subcode,
2746 TREE_TYPE (gimple_assign_lhs (stmt)),
2747 gimple_assign_rhs1 (stmt),
2748 gimple_assign_rhs2 (stmt));
2752 STRIP_USELESS_TYPE_CONVERSION (result);
2753 if (valid_gimple_rhs_p (result))
2756 /* Fold might have produced non-GIMPLE, so if we trust it blindly
2757 we lose canonicalization opportunities. Do not go again
2758 through fold here though, or the same non-GIMPLE will be
2760 if (commutative_tree_code (subcode)
2761 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2762 gimple_assign_rhs2 (stmt), false))
2763 return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
2764 gimple_assign_rhs2 (stmt),
2765 gimple_assign_rhs1 (stmt));
2769 case GIMPLE_INVALID_RHS:
2776 /* Attempt to fold a conditional statement. Return true if any changes were
2777 made. We only attempt to fold the condition expression, and do not perform
2778 any transformation that would require alteration of the cfg. It is
2779 assumed that the operands have been previously folded. */
2782 fold_gimple_cond (gimple stmt)
2784 tree result = fold_binary (gimple_cond_code (stmt),
2786 gimple_cond_lhs (stmt),
2787 gimple_cond_rhs (stmt));
2791 STRIP_USELESS_TYPE_CONVERSION (result);
2792 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
2794 gimple_cond_set_condition_from_tree (stmt, result);
2803 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2804 The statement may be replaced by another statement, e.g., if the call
2805 simplifies to a constant value. Return true if any changes were made.
2806 It is assumed that the operands have been previously folded. */
2809 fold_gimple_call (gimple_stmt_iterator *gsi)
2811 gimple stmt = gsi_stmt (*gsi);
2813 tree callee = gimple_call_fndecl (stmt);
2815 /* Check for builtins that CCP can handle using information not
2816 available in the generic fold routines. */
2817 if (callee && DECL_BUILT_IN (callee))
2819 tree result = ccp_fold_builtin (stmt);
2822 return update_call_from_tree (gsi, result);
2826 /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
2827 here are when we've propagated the address of a decl into the
2829 /* ??? Should perhaps do this in fold proper. However, doing it
2830 there requires that we create a new CALL_EXPR, and that requires
2831 copying EH region info to the new node. Easier to just do it
2832 here where we can just smash the call operand. */
2833 /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
2834 callee = gimple_call_fn (stmt);
2835 if (TREE_CODE (callee) == OBJ_TYPE_REF
2836 && lang_hooks.fold_obj_type_ref
2837 && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
2838 && DECL_P (TREE_OPERAND
2839 (OBJ_TYPE_REF_OBJECT (callee), 0)))
2843 /* ??? Caution: Broken ADDR_EXPR semantics means that
2844 looking at the type of the operand of the addr_expr
2845 can yield an array type. See silly exception in
2846 check_pointer_types_r. */
2847 t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
2848 t = lang_hooks.fold_obj_type_ref (callee, t);
2851 gimple_call_set_fn (stmt, t);
2860 /* Fold the statement pointed to by GSI. In some cases, this function may
2861 replace the whole statement with a new one. Returns true iff folding
2862 makes any changes. */
2865 fold_stmt (gimple_stmt_iterator *gsi)
2868 struct fold_stmt_r_data fold_stmt_r_data;
2869 struct walk_stmt_info wi;
2871 bool changed = false;
2872 bool inside_addr_expr = false;
2874 gimple stmt = gsi_stmt (*gsi);
2876 fold_stmt_r_data.stmt = stmt;
2877 fold_stmt_r_data.changed_p = &changed;
2878 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2880 memset (&wi, 0, sizeof (wi));
2881 wi.info = &fold_stmt_r_data;
2883 /* Fold the individual operands.
2884 For example, fold instances of *&VAR into VAR, etc. */
2885 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2888 /* Fold the main computation performed by the statement. */
2889 switch (gimple_code (stmt))
2893 tree new_rhs = fold_gimple_assign (gsi);
2894 if (new_rhs != NULL_TREE)
2896 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2899 stmt = gsi_stmt (*gsi);
2903 changed |= fold_gimple_cond (stmt);
2906 /* The entire statement may be replaced in this case. */
2907 changed |= fold_gimple_call (gsi);
2918 /* Perform the minimal folding on statement STMT. Only operations like
2919 *&x created by constant propagation are handled. The statement cannot
2920 be replaced with a new one. Return true if the statement was
2921 changed, false otherwise. */
2924 fold_stmt_inplace (gimple stmt)
2927 struct fold_stmt_r_data fold_stmt_r_data;
2928 struct walk_stmt_info wi;
2929 gimple_stmt_iterator si;
2931 bool changed = false;
2932 bool inside_addr_expr = false;
2934 fold_stmt_r_data.stmt = stmt;
2935 fold_stmt_r_data.changed_p = &changed;
2936 fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
2938 memset (&wi, 0, sizeof (wi));
2939 wi.info = &fold_stmt_r_data;
2941 /* Fold the individual operands.
2942 For example, fold instances of *&VAR into VAR, etc.
2944 It appears that, at one time, maybe_fold_stmt_indirect
2945 would cause the walk to return non-null in order to
2946 signal that the entire statement should be replaced with
2947 a call to _builtin_trap. This functionality is currently
2948 disabled, as noted in a FIXME, and cannot be supported here. */
2949 res = walk_gimple_op (stmt, fold_stmt_r, &wi);
2952 /* Fold the main computation performed by the statement. */
2953 switch (gimple_code (stmt))
2957 unsigned old_num_ops;
2959 old_num_ops = gimple_num_ops (stmt);
2960 si = gsi_for_stmt (stmt);
2961 new_rhs = fold_gimple_assign (&si);
2962 if (new_rhs != NULL_TREE
2963 && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
2965 gimple_assign_set_rhs_from_tree (&si, new_rhs);
2968 gcc_assert (gsi_stmt (si) == stmt);
2972 changed |= fold_gimple_cond (stmt);
2982 /* Try to optimize out __builtin_stack_restore. Optimize it out
2983 if there is another __builtin_stack_restore in the same basic
2984 block and no calls or ASM_EXPRs are in between, or if this block's
2985 only outgoing edge is to EXIT_BLOCK and there are no calls or
2986 ASM_EXPRs after this __builtin_stack_restore. */
2989 optimize_stack_restore (gimple_stmt_iterator i)
2992 gimple stmt, stack_save;
2993 gimple_stmt_iterator stack_save_gsi;
2995 basic_block bb = gsi_bb (i);
2996 gimple call = gsi_stmt (i);
2998 if (gimple_code (call) != GIMPLE_CALL
2999 || gimple_call_num_args (call) != 1
3000 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
3001 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
3004 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
3006 stmt = gsi_stmt (i);
3007 if (gimple_code (stmt) == GIMPLE_ASM)
3009 if (gimple_code (stmt) != GIMPLE_CALL)
3012 callee = gimple_call_fndecl (stmt);
3013 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3016 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
3021 && (! single_succ_p (bb)
3022 || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
3025 stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
3026 if (gimple_code (stack_save) != GIMPLE_CALL
3027 || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
3028 || stmt_could_throw_p (stack_save)
3029 || !has_single_use (gimple_call_arg (call, 0)))
3032 callee = gimple_call_fndecl (stack_save);
3034 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3035 || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
3036 || gimple_call_num_args (stack_save) != 0)
3039 stack_save_gsi = gsi_for_stmt (stack_save);
3040 push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3041 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
3042 if (!update_call_from_tree (&stack_save_gsi, rhs))
3044 discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3047 pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
3049 /* No effect, so the statement will be deleted. */
3050 return integer_zero_node;
3053 /* If va_list type is a simple pointer and nothing special is needed,
3054 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
3055 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
3056 pointer assignment. */
3059 optimize_stdarg_builtin (gimple call)
3061 tree callee, lhs, rhs, cfun_va_list;
3062 bool va_list_simple_ptr;
3064 if (gimple_code (call) != GIMPLE_CALL)
3067 callee = gimple_call_fndecl (call);
3069 cfun_va_list = targetm.fn_abi_va_list (callee);
3070 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
3071 && (TREE_TYPE (cfun_va_list) == void_type_node
3072 || TREE_TYPE (cfun_va_list) == char_type_node);
3074 switch (DECL_FUNCTION_CODE (callee))
3076 case BUILT_IN_VA_START:
3077 if (!va_list_simple_ptr
3078 || targetm.expand_builtin_va_start != NULL
3079 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
3082 if (gimple_call_num_args (call) != 2)
3085 lhs = gimple_call_arg (call, 0);
3086 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3087 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3088 != TYPE_MAIN_VARIANT (cfun_va_list))
3091 lhs = build_fold_indirect_ref (lhs);
3092 rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
3093 1, integer_zero_node);
3094 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3095 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3097 case BUILT_IN_VA_COPY:
3098 if (!va_list_simple_ptr)
3101 if (gimple_call_num_args (call) != 2)
3104 lhs = gimple_call_arg (call, 0);
3105 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
3106 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
3107 != TYPE_MAIN_VARIANT (cfun_va_list))
3110 lhs = build_fold_indirect_ref (lhs);
3111 rhs = gimple_call_arg (call, 1);
3112 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
3113 != TYPE_MAIN_VARIANT (cfun_va_list))
3116 rhs = fold_convert (TREE_TYPE (lhs), rhs);
3117 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
3119 case BUILT_IN_VA_END:
3120 /* No effect, so the statement will be deleted. */
3121 return integer_zero_node;
3128 /* Convert EXPR into a GIMPLE value suitable for substitution on the
3129 RHS of an assignment. Insert the necessary statements before
3130 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
3131 is replaced. If the call is expected to produces a result, then it
3132 is replaced by an assignment of the new RHS to the result variable.
3133 If the result is to be ignored, then the call is replaced by a
3137 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
3140 tree tmp = NULL_TREE; /* Silence warning. */
3141 gimple stmt, new_stmt;
3142 gimple_stmt_iterator i;
3143 gimple_seq stmts = gimple_seq_alloc();
3144 struct gimplify_ctx gctx;
3146 stmt = gsi_stmt (*si_p);
3148 gcc_assert (is_gimple_call (stmt));
3150 lhs = gimple_call_lhs (stmt);
3152 push_gimplify_context (&gctx);
3154 if (lhs == NULL_TREE)
3155 gimplify_and_add (expr, &stmts);
3157 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
3159 pop_gimplify_context (NULL);
3161 if (gimple_has_location (stmt))
3162 annotate_all_with_location (stmts, gimple_location (stmt));
3164 /* The replacement can expose previously unreferenced variables. */
3165 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
3167 new_stmt = gsi_stmt (i);
3168 find_new_referenced_vars (new_stmt);
3169 gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
3170 mark_symbols_for_renaming (new_stmt);
3174 if (lhs == NULL_TREE)
3175 new_stmt = gimple_build_nop ();
3178 new_stmt = gimple_build_assign (lhs, tmp);
3179 copy_virtual_operands (new_stmt, stmt);
3180 move_ssa_defining_stmt_for_defs (new_stmt, stmt);
3183 gimple_set_location (new_stmt, gimple_location (stmt));
3184 gsi_replace (si_p, new_stmt, false);
3187 /* A simple pass that attempts to fold all builtin functions. This pass
3188 is run after we've propagated as many constants as we can. */
3191 execute_fold_all_builtins (void)
3193 bool cfg_changed = false;
3195 unsigned int todoflags = 0;
3199 gimple_stmt_iterator i;
3200 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3202 gimple stmt, old_stmt;
3203 tree callee, result;
3204 enum built_in_function fcode;
3206 stmt = gsi_stmt (i);
3208 if (gimple_code (stmt) != GIMPLE_CALL)
3213 callee = gimple_call_fndecl (stmt);
3214 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
3219 fcode = DECL_FUNCTION_CODE (callee);
3221 result = ccp_fold_builtin (stmt);
3224 gimple_remove_stmt_histograms (cfun, stmt);
3227 switch (DECL_FUNCTION_CODE (callee))
3229 case BUILT_IN_CONSTANT_P:
3230 /* Resolve __builtin_constant_p. If it hasn't been
3231 folded to integer_one_node by now, it's fairly
3232 certain that the value simply isn't constant. */
3233 result = integer_zero_node;
3236 case BUILT_IN_STACK_RESTORE:
3237 result = optimize_stack_restore (i);
3243 case BUILT_IN_VA_START:
3244 case BUILT_IN_VA_END:
3245 case BUILT_IN_VA_COPY:
3246 /* These shouldn't be folded before pass_stdarg. */
3247 result = optimize_stdarg_builtin (stmt);
3257 if (dump_file && (dump_flags & TDF_DETAILS))
3259 fprintf (dump_file, "Simplified\n ");
3260 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3264 push_stmt_changes (gsi_stmt_ptr (&i));
3266 if (!update_call_from_tree (&i, result))
3268 gimplify_and_update_call_from_tree (&i, result);
3269 todoflags |= TODO_rebuild_alias;
3272 stmt = gsi_stmt (i);
3273 pop_stmt_changes (gsi_stmt_ptr (&i));
3275 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3276 && gimple_purge_dead_eh_edges (bb))
3279 if (dump_file && (dump_flags & TDF_DETAILS))
3281 fprintf (dump_file, "to\n ");
3282 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3283 fprintf (dump_file, "\n");
3286 /* Retry the same statement if it changed into another
3287 builtin, there might be new opportunities now. */
3288 if (gimple_code (stmt) != GIMPLE_CALL)
3293 callee = gimple_call_fndecl (stmt);
3295 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
3296 || DECL_FUNCTION_CODE (callee) == fcode)
3301 /* Delete unreachable blocks. */
3303 todoflags |= TODO_cleanup_cfg;
3309 struct gimple_opt_pass pass_fold_builtins =
3315 execute_fold_all_builtins, /* execute */
3318 0, /* static_pass_number */
3320 PROP_cfg | PROP_ssa, /* properties_required */
3321 0, /* properties_provided */
3322 0, /* properties_destroyed */
3323 0, /* todo_flags_start */
3326 | TODO_update_ssa /* todo_flags_finish */