1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
115 #include "coretypes.h"
120 #include "basic-block.h"
122 #include "function.h"
123 #include "tree-pretty-print.h"
124 #include "gimple-pretty-print.h"
126 #include "tree-dump.h"
127 #include "tree-flow.h"
128 #include "tree-pass.h"
129 #include "tree-ssa-propagate.h"
130 #include "value-prof.h"
131 #include "langhooks.h"
133 #include "diagnostic-core.h"
137 /* Possible lattice values. */
146 struct prop_value_d {
148 ccp_lattice_t lattice_val;
150 /* Propagated value. */
153 /* Mask that applies to the propagated value during CCP. For
154 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
158 typedef struct prop_value_d prop_value_t;
160 /* Array of propagated constant values. After propagation,
161 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
162 the constant is held in an SSA name representing a memory store
163 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
164 memory reference used to store (i.e., the LHS of the assignment
166 static prop_value_t *const_val;
168 static void canonicalize_float_value (prop_value_t *);
169 static bool ccp_fold_stmt (gimple_stmt_iterator *);
170 static tree fold_ctor_reference (tree type, tree ctor,
171 unsigned HOST_WIDE_INT offset,
172 unsigned HOST_WIDE_INT size);
174 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
177 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
179 switch (val.lattice_val)
182 fprintf (outf, "%sUNINITIALIZED", prefix);
185 fprintf (outf, "%sUNDEFINED", prefix);
188 fprintf (outf, "%sVARYING", prefix);
191 fprintf (outf, "%sCONSTANT ", prefix);
192 if (TREE_CODE (val.value) != INTEGER_CST
193 || double_int_zero_p (val.mask))
194 print_generic_expr (outf, val.value, dump_flags);
197 double_int cval = double_int_and_not (tree_to_double_int (val.value),
199 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
200 prefix, cval.high, cval.low);
201 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
202 val.mask.high, val.mask.low);
211 /* Print lattice value VAL to stderr. */
213 void debug_lattice_value (prop_value_t val);
216 debug_lattice_value (prop_value_t val)
218 dump_lattice_value (stderr, "", val);
219 fprintf (stderr, "\n");
223 /* Compute a default value for variable VAR and store it in the
224 CONST_VAL array. The following rules are used to get default
227 1- Global and static variables that are declared constant are
230 2- Any other value is considered UNDEFINED. This is useful when
231 considering PHI nodes. PHI arguments that are undefined do not
232 change the constant value of the PHI node, which allows for more
233 constants to be propagated.
235 3- Variables defined by statements other than assignments and PHI
236 nodes are considered VARYING.
238 4- Initial values of variables that are not GIMPLE registers are
239 considered VARYING. */
242 get_default_value (tree var)
244 tree sym = SSA_NAME_VAR (var);
245 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
248 stmt = SSA_NAME_DEF_STMT (var);
250 if (gimple_nop_p (stmt))
252 /* Variables defined by an empty statement are those used
253 before being initialized. If VAR is a local variable, we
254 can assume initially that it is UNDEFINED, otherwise we must
255 consider it VARYING. */
256 if (is_gimple_reg (sym)
257 && TREE_CODE (sym) == VAR_DECL)
258 val.lattice_val = UNDEFINED;
261 val.lattice_val = VARYING;
262 val.mask = double_int_minus_one;
265 else if (is_gimple_assign (stmt)
266 /* Value-returning GIMPLE_CALL statements assign to
267 a variable, and are treated similarly to GIMPLE_ASSIGN. */
268 || (is_gimple_call (stmt)
269 && gimple_call_lhs (stmt) != NULL_TREE)
270 || gimple_code (stmt) == GIMPLE_PHI)
273 if (gimple_assign_single_p (stmt)
274 && DECL_P (gimple_assign_rhs1 (stmt))
275 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
277 val.lattice_val = CONSTANT;
281 /* Any other variable defined by an assignment or a PHI node
282 is considered UNDEFINED. */
283 val.lattice_val = UNDEFINED;
287 /* Otherwise, VAR will never take on a constant value. */
288 val.lattice_val = VARYING;
289 val.mask = double_int_minus_one;
296 /* Get the constant value associated with variable VAR. */
298 static inline prop_value_t *
303 if (const_val == NULL)
306 val = &const_val[SSA_NAME_VERSION (var)];
307 if (val->lattice_val == UNINITIALIZED)
308 *val = get_default_value (var);
310 canonicalize_float_value (val);
315 /* Return the constant tree value associated with VAR. */
318 get_constant_value (tree var)
321 if (TREE_CODE (var) != SSA_NAME)
323 if (is_gimple_min_invariant (var))
327 val = get_value (var);
329 && val->lattice_val == CONSTANT
330 && (TREE_CODE (val->value) != INTEGER_CST
331 || double_int_zero_p (val->mask)))
336 /* Sets the value associated with VAR to VARYING. */
339 set_value_varying (tree var)
341 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
343 val->lattice_val = VARYING;
344 val->value = NULL_TREE;
345 val->mask = double_int_minus_one;
348 /* For float types, modify the value of VAL to make ccp work correctly
349 for non-standard values (-0, NaN):
351 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
352 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
353 This is to fix the following problem (see PR 29921): Suppose we have
357 and we set value of y to NaN. This causes value of x to be set to NaN.
358 When we later determine that y is in fact VARYING, fold uses the fact
359 that HONOR_NANS is false, and we try to change the value of x to 0,
360 causing an ICE. With HONOR_NANS being false, the real appearance of
361 NaN would cause undefined behavior, though, so claiming that y (and x)
362 are UNDEFINED initially is correct. */
365 canonicalize_float_value (prop_value_t *val)
367 enum machine_mode mode;
371 if (val->lattice_val != CONSTANT
372 || TREE_CODE (val->value) != REAL_CST)
375 d = TREE_REAL_CST (val->value);
376 type = TREE_TYPE (val->value);
377 mode = TYPE_MODE (type);
379 if (!HONOR_SIGNED_ZEROS (mode)
380 && REAL_VALUE_MINUS_ZERO (d))
382 val->value = build_real (type, dconst0);
386 if (!HONOR_NANS (mode)
387 && REAL_VALUE_ISNAN (d))
389 val->lattice_val = UNDEFINED;
395 /* Return whether the lattice transition is valid. */
398 valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
400 /* Lattice transitions must always be monotonically increasing in
402 if (old_val.lattice_val < new_val.lattice_val)
405 if (old_val.lattice_val != new_val.lattice_val)
408 if (!old_val.value && !new_val.value)
411 /* Now both lattice values are CONSTANT. */
413 /* Allow transitioning from &x to &x & ~3. */
414 if (TREE_CODE (old_val.value) != INTEGER_CST
415 && TREE_CODE (new_val.value) == INTEGER_CST)
418 /* Bit-lattices have to agree in the still valid bits. */
419 if (TREE_CODE (old_val.value) == INTEGER_CST
420 && TREE_CODE (new_val.value) == INTEGER_CST)
421 return double_int_equal_p
422 (double_int_and_not (tree_to_double_int (old_val.value),
424 double_int_and_not (tree_to_double_int (new_val.value),
427 /* Otherwise constant values have to agree. */
428 return operand_equal_p (old_val.value, new_val.value, 0);
431 /* Set the value for variable VAR to NEW_VAL. Return true if the new
432 value is different from VAR's previous value. */
435 set_lattice_value (tree var, prop_value_t new_val)
437 /* We can deal with old UNINITIALIZED values just fine here. */
438 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
440 canonicalize_float_value (&new_val);
442 /* We have to be careful to not go up the bitwise lattice
443 represented by the mask.
444 ??? This doesn't seem to be the best place to enforce this. */
445 if (new_val.lattice_val == CONSTANT
446 && old_val->lattice_val == CONSTANT
447 && TREE_CODE (new_val.value) == INTEGER_CST
448 && TREE_CODE (old_val->value) == INTEGER_CST)
451 diff = double_int_xor (tree_to_double_int (new_val.value),
452 tree_to_double_int (old_val->value));
453 new_val.mask = double_int_ior (new_val.mask,
454 double_int_ior (old_val->mask, diff));
457 gcc_assert (valid_lattice_transition (*old_val, new_val));
459 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
460 caller that this was a non-transition. */
461 if (old_val->lattice_val != new_val.lattice_val
462 || (new_val.lattice_val == CONSTANT
463 && TREE_CODE (new_val.value) == INTEGER_CST
464 && (TREE_CODE (old_val->value) != INTEGER_CST
465 || !double_int_equal_p (new_val.mask, old_val->mask))))
467 /* ??? We would like to delay creation of INTEGER_CSTs from
468 partially constants here. */
470 if (dump_file && (dump_flags & TDF_DETAILS))
472 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
473 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
478 gcc_assert (new_val.lattice_val != UNINITIALIZED);
485 static prop_value_t get_value_for_expr (tree, bool);
486 static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
487 static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
488 tree, double_int, double_int,
489 tree, double_int, double_int);
491 /* Return a double_int that can be used for bitwise simplifications
495 value_to_double_int (prop_value_t val)
498 && TREE_CODE (val.value) == INTEGER_CST)
499 return tree_to_double_int (val.value);
501 return double_int_zero;
504 /* Return the value for the address expression EXPR based on alignment
508 get_value_from_alignment (tree expr)
511 HOST_WIDE_INT bitsize, bitpos;
513 enum machine_mode mode;
516 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
518 base = get_inner_reference (TREE_OPERAND (expr, 0),
519 &bitsize, &bitpos, &offset,
520 &mode, &align, &align, false);
521 if (TREE_CODE (base) == MEM_REF)
522 val = bit_value_binop (PLUS_EXPR, TREE_TYPE (expr),
523 TREE_OPERAND (base, 0), TREE_OPERAND (base, 1));
525 && ((align = get_object_alignment (base, BIGGEST_ALIGNMENT))
528 val.lattice_val = CONSTANT;
529 /* We assume pointers are zero-extended. */
530 val.mask = double_int_and_not
531 (double_int_mask (TYPE_PRECISION (TREE_TYPE (expr))),
532 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
533 val.value = build_int_cst (TREE_TYPE (expr), 0);
537 val.lattice_val = VARYING;
538 val.mask = double_int_minus_one;
539 val.value = NULL_TREE;
543 double_int value, mask;
544 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
545 TREE_TYPE (expr), value_to_double_int (val), val.mask,
547 shwi_to_double_int (bitpos / BITS_PER_UNIT),
549 val.lattice_val = double_int_minus_one_p (mask) ? VARYING : CONSTANT;
551 if (val.lattice_val == CONSTANT)
552 val.value = double_int_to_tree (TREE_TYPE (expr), value);
554 val.value = NULL_TREE;
556 /* ??? We should handle i * 4 and more complex expressions from
557 the offset, possibly by just expanding get_value_for_expr. */
558 if (offset != NULL_TREE)
560 double_int value, mask;
561 prop_value_t oval = get_value_for_expr (offset, true);
562 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
563 TREE_TYPE (expr), value_to_double_int (val), val.mask,
564 TREE_TYPE (expr), value_to_double_int (oval),
567 if (double_int_minus_one_p (mask))
569 val.lattice_val = VARYING;
570 val.value = NULL_TREE;
574 val.lattice_val = CONSTANT;
575 val.value = double_int_to_tree (TREE_TYPE (expr), value);
582 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
583 return constant bits extracted from alignment information for
584 invariant addresses. */
587 get_value_for_expr (tree expr, bool for_bits_p)
591 if (TREE_CODE (expr) == SSA_NAME)
593 val = *get_value (expr);
595 && val.lattice_val == CONSTANT
596 && TREE_CODE (val.value) == ADDR_EXPR)
597 val = get_value_from_alignment (val.value);
599 else if (is_gimple_min_invariant (expr)
600 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
602 val.lattice_val = CONSTANT;
604 val.mask = double_int_zero;
605 canonicalize_float_value (&val);
607 else if (TREE_CODE (expr) == ADDR_EXPR)
608 val = get_value_from_alignment (expr);
611 val.lattice_val = VARYING;
612 val.mask = double_int_minus_one;
613 val.value = NULL_TREE;
618 /* Return the likely CCP lattice value for STMT.
620 If STMT has no operands, then return CONSTANT.
622 Else if undefinedness of operands of STMT cause its value to be
623 undefined, then return UNDEFINED.
625 Else if any operands of STMT are constants, then return CONSTANT.
627 Else return VARYING. */
630 likely_value (gimple stmt)
632 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
637 enum gimple_code code = gimple_code (stmt);
639 /* This function appears to be called only for assignments, calls,
640 conditionals, and switches, due to the logic in visit_stmt. */
641 gcc_assert (code == GIMPLE_ASSIGN
642 || code == GIMPLE_CALL
643 || code == GIMPLE_COND
644 || code == GIMPLE_SWITCH);
646 /* If the statement has volatile operands, it won't fold to a
648 if (gimple_has_volatile_ops (stmt))
651 /* Arrive here for more complex cases. */
652 has_constant_operand = false;
653 has_undefined_operand = false;
654 all_undefined_operands = true;
655 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
657 prop_value_t *val = get_value (use);
659 if (val->lattice_val == UNDEFINED)
660 has_undefined_operand = true;
662 all_undefined_operands = false;
664 if (val->lattice_val == CONSTANT)
665 has_constant_operand = true;
668 /* There may be constants in regular rhs operands. For calls we
669 have to ignore lhs, fndecl and static chain, otherwise only
671 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
672 i < gimple_num_ops (stmt); ++i)
674 tree op = gimple_op (stmt, i);
675 if (!op || TREE_CODE (op) == SSA_NAME)
677 if (is_gimple_min_invariant (op))
678 has_constant_operand = true;
681 if (has_constant_operand)
682 all_undefined_operands = false;
684 /* If the operation combines operands like COMPLEX_EXPR make sure to
685 not mark the result UNDEFINED if only one part of the result is
687 if (has_undefined_operand && all_undefined_operands)
689 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
691 switch (gimple_assign_rhs_code (stmt))
693 /* Unary operators are handled with all_undefined_operands. */
696 case POINTER_PLUS_EXPR:
697 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
698 Not bitwise operators, one VARYING operand may specify the
699 result completely. Not logical operators for the same reason.
700 Not COMPLEX_EXPR as one VARYING operand makes the result partly
701 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
702 the undefined operand may be promoted. */
709 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
710 fall back to VARYING even if there were CONSTANT operands. */
711 if (has_undefined_operand)
714 /* We do not consider virtual operands here -- load from read-only
715 memory may have only VARYING virtual operands, but still be
717 if (has_constant_operand
718 || gimple_references_memory_p (stmt))
724 /* Returns true if STMT cannot be constant. */
727 surely_varying_stmt_p (gimple stmt)
729 /* If the statement has operands that we cannot handle, it cannot be
731 if (gimple_has_volatile_ops (stmt))
734 /* If it is a call and does not return a value or is not a
735 builtin and not an indirect call, it is varying. */
736 if (is_gimple_call (stmt))
739 if (!gimple_call_lhs (stmt)
740 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
741 && !DECL_BUILT_IN (fndecl)))
745 /* Any other store operation is not interesting. */
746 else if (gimple_vdef (stmt))
749 /* Anything other than assignments and conditional jumps are not
750 interesting for CCP. */
751 if (gimple_code (stmt) != GIMPLE_ASSIGN
752 && gimple_code (stmt) != GIMPLE_COND
753 && gimple_code (stmt) != GIMPLE_SWITCH
754 && gimple_code (stmt) != GIMPLE_CALL)
760 /* Initialize local data structures for CCP. */
763 ccp_initialize (void)
767 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
769 /* Initialize simulation flags for PHI nodes and statements. */
772 gimple_stmt_iterator i;
774 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
776 gimple stmt = gsi_stmt (i);
779 /* If the statement is a control insn, then we do not
780 want to avoid simulating the statement once. Failure
781 to do so means that those edges will never get added. */
782 if (stmt_ends_bb_p (stmt))
785 is_varying = surely_varying_stmt_p (stmt);
792 /* If the statement will not produce a constant, mark
793 all its outputs VARYING. */
794 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
795 set_value_varying (def);
797 prop_set_simulate_again (stmt, !is_varying);
801 /* Now process PHI nodes. We never clear the simulate_again flag on
802 phi nodes, since we do not know which edges are executable yet,
803 except for phi nodes for virtual operands when we do not do store ccp. */
806 gimple_stmt_iterator i;
808 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
810 gimple phi = gsi_stmt (i);
812 if (!is_gimple_reg (gimple_phi_result (phi)))
813 prop_set_simulate_again (phi, false);
815 prop_set_simulate_again (phi, true);
820 /* Debug count support. Reset the values of ssa names
821 VARYING when the total number ssa names analyzed is
822 beyond the debug count specified. */
828 for (i = 0; i < num_ssa_names; i++)
832 const_val[i].lattice_val = VARYING;
833 const_val[i].mask = double_int_minus_one;
834 const_val[i].value = NULL_TREE;
840 /* Do final substitution of propagated values, cleanup the flowgraph and
841 free allocated storage.
843 Return TRUE when something was optimized. */
848 bool something_changed;
853 /* Derive alignment and misalignment information from partially
854 constant pointers in the lattice. */
855 for (i = 1; i < num_ssa_names; ++i)
857 tree name = ssa_name (i);
859 struct ptr_info_def *pi;
860 unsigned int tem, align;
863 || !POINTER_TYPE_P (TREE_TYPE (name)))
866 val = get_value (name);
867 if (val->lattice_val != CONSTANT
868 || TREE_CODE (val->value) != INTEGER_CST)
871 /* Trailing constant bits specify the alignment, trailing value
872 bits the misalignment. */
874 align = (tem & -tem);
878 pi = get_ptr_info (name);
880 pi->misalign = TREE_INT_CST_LOW (val->value) & (align - 1);
883 /* Perform substitutions based on the known constant values. */
884 something_changed = substitute_and_fold (get_constant_value,
885 ccp_fold_stmt, true);
889 return something_changed;;
893 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
896 any M UNDEFINED = any
897 any M VARYING = VARYING
898 Ci M Cj = Ci if (i == j)
899 Ci M Cj = VARYING if (i != j)
903 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
905 if (val1->lattice_val == UNDEFINED)
907 /* UNDEFINED M any = any */
910 else if (val2->lattice_val == UNDEFINED)
912 /* any M UNDEFINED = any
913 Nothing to do. VAL1 already contains the value we want. */
916 else if (val1->lattice_val == VARYING
917 || val2->lattice_val == VARYING)
919 /* any M VARYING = VARYING. */
920 val1->lattice_val = VARYING;
921 val1->mask = double_int_minus_one;
922 val1->value = NULL_TREE;
924 else if (val1->lattice_val == CONSTANT
925 && val2->lattice_val == CONSTANT
926 && TREE_CODE (val1->value) == INTEGER_CST
927 && TREE_CODE (val2->value) == INTEGER_CST)
929 /* Ci M Cj = Ci if (i == j)
930 Ci M Cj = VARYING if (i != j)
932 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
935 = double_int_ior (double_int_ior (val1->mask,
937 double_int_xor (tree_to_double_int (val1->value),
938 tree_to_double_int (val2->value)));
939 if (double_int_minus_one_p (val1->mask))
941 val1->lattice_val = VARYING;
942 val1->value = NULL_TREE;
945 else if (val1->lattice_val == CONSTANT
946 && val2->lattice_val == CONSTANT
947 && simple_cst_equal (val1->value, val2->value) == 1)
949 /* Ci M Cj = Ci if (i == j)
950 Ci M Cj = VARYING if (i != j)
952 VAL1 already contains the value we want for equivalent values. */
954 else if (val1->lattice_val == CONSTANT
955 && val2->lattice_val == CONSTANT
956 && (TREE_CODE (val1->value) == ADDR_EXPR
957 || TREE_CODE (val2->value) == ADDR_EXPR))
959 /* When not equal addresses are involved try meeting for
961 prop_value_t tem = *val2;
962 if (TREE_CODE (val1->value) == ADDR_EXPR)
963 *val1 = get_value_for_expr (val1->value, true);
964 if (TREE_CODE (val2->value) == ADDR_EXPR)
965 tem = get_value_for_expr (val2->value, true);
966 ccp_lattice_meet (val1, &tem);
970 /* Any other combination is VARYING. */
971 val1->lattice_val = VARYING;
972 val1->mask = double_int_minus_one;
973 val1->value = NULL_TREE;
978 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
979 lattice values to determine PHI_NODE's lattice value. The value of a
980 PHI node is determined calling ccp_lattice_meet with all the arguments
981 of the PHI node that are incoming via executable edges. */
983 static enum ssa_prop_result
984 ccp_visit_phi_node (gimple phi)
987 prop_value_t *old_val, new_val;
989 if (dump_file && (dump_flags & TDF_DETAILS))
991 fprintf (dump_file, "\nVisiting PHI node: ");
992 print_gimple_stmt (dump_file, phi, 0, dump_flags);
995 old_val = get_value (gimple_phi_result (phi));
996 switch (old_val->lattice_val)
999 return SSA_PROP_VARYING;
1006 new_val.lattice_val = UNDEFINED;
1007 new_val.value = NULL_TREE;
1014 for (i = 0; i < gimple_phi_num_args (phi); i++)
1016 /* Compute the meet operator over all the PHI arguments flowing
1017 through executable edges. */
1018 edge e = gimple_phi_arg_edge (phi, i);
1020 if (dump_file && (dump_flags & TDF_DETAILS))
1023 "\n Argument #%d (%d -> %d %sexecutable)\n",
1024 i, e->src->index, e->dest->index,
1025 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1028 /* If the incoming edge is executable, Compute the meet operator for
1029 the existing value of the PHI node and the current PHI argument. */
1030 if (e->flags & EDGE_EXECUTABLE)
1032 tree arg = gimple_phi_arg (phi, i)->def;
1033 prop_value_t arg_val = get_value_for_expr (arg, false);
1035 ccp_lattice_meet (&new_val, &arg_val);
1037 if (dump_file && (dump_flags & TDF_DETAILS))
1039 fprintf (dump_file, "\t");
1040 print_generic_expr (dump_file, arg, dump_flags);
1041 dump_lattice_value (dump_file, "\tValue: ", arg_val);
1042 fprintf (dump_file, "\n");
1045 if (new_val.lattice_val == VARYING)
1050 if (dump_file && (dump_flags & TDF_DETAILS))
1052 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1053 fprintf (dump_file, "\n\n");
1056 /* Make the transition to the new value. */
1057 if (set_lattice_value (gimple_phi_result (phi), new_val))
1059 if (new_val.lattice_val == VARYING)
1060 return SSA_PROP_VARYING;
1062 return SSA_PROP_INTERESTING;
1065 return SSA_PROP_NOT_INTERESTING;
1068 /* Return the constant value for OP or OP otherwise. */
1071 valueize_op (tree op)
1073 if (TREE_CODE (op) == SSA_NAME)
1075 tree tem = get_constant_value (op);
1082 /* CCP specific front-end to the non-destructive constant folding
1085 Attempt to simplify the RHS of STMT knowing that one or more
1086 operands are constants.
1088 If simplification is possible, return the simplified RHS,
1089 otherwise return the original RHS or NULL_TREE. */
1092 ccp_fold (gimple stmt)
1094 location_t loc = gimple_location (stmt);
1095 switch (gimple_code (stmt))
1099 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1101 switch (get_gimple_rhs_class (subcode))
1103 case GIMPLE_SINGLE_RHS:
1105 tree rhs = gimple_assign_rhs1 (stmt);
1106 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
1108 if (TREE_CODE (rhs) == SSA_NAME)
1110 /* If the RHS is an SSA_NAME, return its known constant value,
1112 return get_constant_value (rhs);
1114 /* Handle propagating invariant addresses into address operations.
1115 The folding we do here matches that in tree-ssa-forwprop.c. */
1116 else if (TREE_CODE (rhs) == ADDR_EXPR)
1119 base = &TREE_OPERAND (rhs, 0);
1120 while (handled_component_p (*base))
1121 base = &TREE_OPERAND (*base, 0);
1122 if (TREE_CODE (*base) == MEM_REF
1123 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
1125 tree val = get_constant_value (TREE_OPERAND (*base, 0));
1127 && TREE_CODE (val) == ADDR_EXPR)
1129 tree ret, save = *base;
1131 new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
1133 TREE_OPERAND (*base, 1));
1134 /* We need to return a new tree, not modify the IL
1135 or share parts of it. So play some tricks to
1136 avoid manually building it. */
1138 ret = unshare_expr (rhs);
1139 recompute_tree_invariant_for_addr_expr (ret);
1145 else if (TREE_CODE (rhs) == CONSTRUCTOR
1146 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
1147 && (CONSTRUCTOR_NELTS (rhs)
1148 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
1154 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
1156 val = valueize_op (val);
1157 if (TREE_CODE (val) == INTEGER_CST
1158 || TREE_CODE (val) == REAL_CST
1159 || TREE_CODE (val) == FIXED_CST)
1160 list = tree_cons (NULL_TREE, val, list);
1165 return build_vector (TREE_TYPE (rhs), nreverse (list));
1168 if (kind == tcc_reference)
1170 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
1171 || TREE_CODE (rhs) == REALPART_EXPR
1172 || TREE_CODE (rhs) == IMAGPART_EXPR)
1173 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1175 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1177 return fold_unary_loc (EXPR_LOCATION (rhs),
1179 TREE_TYPE (rhs), val);
1181 else if (TREE_CODE (rhs) == MEM_REF
1182 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1184 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1186 && TREE_CODE (val) == ADDR_EXPR)
1188 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
1190 TREE_OPERAND (rhs, 1));
1195 return fold_const_aggregate_ref (rhs);
1197 else if (kind == tcc_declaration)
1198 return get_symbol_constant_value (rhs);
1202 case GIMPLE_UNARY_RHS:
1204 /* Handle unary operators that can appear in GIMPLE form.
1205 Note that we know the single operand must be a constant,
1206 so this should almost always return a simplified RHS. */
1207 tree lhs = gimple_assign_lhs (stmt);
1208 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1210 /* Conversions are useless for CCP purposes if they are
1211 value-preserving. Thus the restrictions that
1212 useless_type_conversion_p places for pointer type conversions
1213 do not apply here. Substitution later will only substitute to
1215 if (CONVERT_EXPR_CODE_P (subcode)
1216 && POINTER_TYPE_P (TREE_TYPE (lhs))
1217 && POINTER_TYPE_P (TREE_TYPE (op0)))
1220 /* Try to re-construct array references on-the-fly. */
1221 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1223 && ((tem = maybe_fold_offset_to_address
1225 op0, integer_zero_node, TREE_TYPE (lhs)))
1232 fold_unary_ignore_overflow_loc (loc, subcode,
1233 gimple_expr_type (stmt), op0);
1236 case GIMPLE_BINARY_RHS:
1238 /* Handle binary operators that can appear in GIMPLE form. */
1239 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1240 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1242 /* Translate &x + CST into an invariant form suitable for
1243 further propagation. */
1244 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1245 && TREE_CODE (op0) == ADDR_EXPR
1246 && TREE_CODE (op1) == INTEGER_CST)
1248 tree off = fold_convert (ptr_type_node, op1);
1249 return build_fold_addr_expr
1250 (fold_build2 (MEM_REF,
1251 TREE_TYPE (TREE_TYPE (op0)),
1252 unshare_expr (op0), off));
1255 return fold_binary_loc (loc, subcode,
1256 gimple_expr_type (stmt), op0, op1);
1259 case GIMPLE_TERNARY_RHS:
1261 /* Handle ternary operators that can appear in GIMPLE form. */
1262 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1263 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1264 tree op2 = valueize_op (gimple_assign_rhs3 (stmt));
1266 return fold_ternary_loc (loc, subcode,
1267 gimple_expr_type (stmt), op0, op1, op2);
1278 tree fn = valueize_op (gimple_call_fn (stmt));
1279 if (TREE_CODE (fn) == ADDR_EXPR
1280 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1281 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1283 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1286 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1287 args[i] = valueize_op (gimple_call_arg (stmt, i));
1288 call = build_call_array_loc (loc,
1289 gimple_call_return_type (stmt),
1290 fn, gimple_call_num_args (stmt), args);
1291 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
1293 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1294 STRIP_NOPS (retval);
1302 /* Handle comparison operators that can appear in GIMPLE form. */
1303 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1304 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1305 enum tree_code code = gimple_cond_code (stmt);
1306 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1311 /* Return the constant switch index. */
1312 return valueize_op (gimple_switch_index (stmt));
1320 /* See if we can find constructor defining value of BASE.
1321 When we know the consructor with constant offset (such as
1322 base is array[40] and we do know constructor of array), then
1323 BIT_OFFSET is adjusted accordingly.
1325 As a special case, return error_mark_node when constructor
1326 is not explicitly available, but it is known to be zero
1327 such as 'static const int a;'. */
1329 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset)
1331 HOST_WIDE_INT bit_offset2, size, max_size;
1332 if (TREE_CODE (base) == MEM_REF)
1334 if (!integer_zerop (TREE_OPERAND (base, 1)))
1336 if (!host_integerp (TREE_OPERAND (base, 1), 0))
1338 *bit_offset += (mem_ref_offset (base).low
1342 base = get_constant_value (TREE_OPERAND (base, 0));
1343 if (!base || TREE_CODE (base) != ADDR_EXPR)
1345 base = TREE_OPERAND (base, 0);
1348 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1349 DECL_INITIAL. If BASE is a nested reference into another
1350 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1351 the inner reference. */
1352 switch (TREE_CODE (base))
1355 if (!const_value_known_p (base))
1360 if (!DECL_INITIAL (base)
1361 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1362 return error_mark_node;
1363 return DECL_INITIAL (base);
1367 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
1368 if (max_size == -1 || size != max_size)
1370 *bit_offset += bit_offset2;
1371 return get_base_constructor (base, bit_offset);
1382 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
1383 to the memory at bit OFFSET.
1385 We do only simple job of folding byte accesses. */
1388 fold_string_cst_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
1389 unsigned HOST_WIDE_INT size)
1391 if (INTEGRAL_TYPE_P (type)
1392 && (TYPE_MODE (type)
1393 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1394 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1396 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1397 && size == BITS_PER_UNIT
1398 && !(offset % BITS_PER_UNIT))
1400 offset /= BITS_PER_UNIT;
1401 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
1402 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
1405 const char a[20]="hello";
1408 might lead to offset greater than string length. In this case we
1409 know value is either initialized to 0 or out of bounds. Return 0
1411 return build_zero_cst (type);
1416 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
1417 SIZE to the memory at bit OFFSET. */
1420 fold_array_ctor_reference (tree type, tree ctor,
1421 unsigned HOST_WIDE_INT offset,
1422 unsigned HOST_WIDE_INT size)
1424 unsigned HOST_WIDE_INT cnt;
1426 double_int low_bound, elt_size;
1427 double_int index, max_index;
1428 double_int access_index;
1429 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
1430 HOST_WIDE_INT inner_offset;
1432 /* Compute low bound and elt size. */
1433 if (domain_type && TYPE_MIN_VALUE (domain_type))
1435 /* Static constructors for variably sized objects makes no sense. */
1436 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
1437 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
1440 low_bound = double_int_zero;
1441 /* Static constructors for variably sized objects makes no sense. */
1442 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
1445 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
1448 /* We can handle only constantly sized accesses that are known to not
1449 be larger than size of array element. */
1450 if (!TYPE_SIZE_UNIT (type)
1451 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1452 || double_int_cmp (elt_size,
1453 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
1456 /* Compute the array index we look for. */
1457 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
1458 elt_size, TRUNC_DIV_EXPR);
1459 access_index = double_int_add (access_index, low_bound);
1461 /* And offset within the access. */
1462 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
1464 /* See if the array field is large enough to span whole access. We do not
1465 care to fold accesses spanning multiple array indexes. */
1466 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
1469 index = double_int_sub (low_bound, double_int_one);
1470 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1472 /* Array constructor might explicitely set index, or specify range
1473 or leave index NULL meaning that it is next index after previous
1477 if (TREE_CODE (cfield) == INTEGER_CST)
1478 max_index = index = tree_to_double_int (cfield);
1481 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
1482 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
1483 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
1487 max_index = index = double_int_add (index, double_int_one);
1489 /* Do we have match? */
1490 if (double_int_cmp (access_index, index, 1) >= 0
1491 && double_int_cmp (access_index, max_index, 1) <= 0)
1492 return fold_ctor_reference (type, cval, inner_offset, size);
1494 /* When memory is not explicitely mentioned in constructor,
1495 it is 0 (or out of range). */
1496 return build_zero_cst (type);
1499 /* CTOR is CONSTRUCTOR of an aggregate or vector.
1500 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
1503 fold_nonarray_ctor_reference (tree type, tree ctor,
1504 unsigned HOST_WIDE_INT offset,
1505 unsigned HOST_WIDE_INT size)
1507 unsigned HOST_WIDE_INT cnt;
1510 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
1513 tree byte_offset = DECL_FIELD_OFFSET (cfield);
1514 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
1515 tree field_size = DECL_SIZE (cfield);
1516 double_int bitoffset;
1517 double_int byte_offset_cst = tree_to_double_int (byte_offset);
1518 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
1519 double_int bitoffset_end;
1521 /* Variable sized objects in static constructors makes no sense,
1522 but field_size can be NULL for flexible array members. */
1523 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
1524 && TREE_CODE (byte_offset) == INTEGER_CST
1525 && (field_size != NULL_TREE
1526 ? TREE_CODE (field_size) == INTEGER_CST
1527 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
1529 /* Compute bit offset of the field. */
1530 bitoffset = double_int_add (tree_to_double_int (field_offset),
1531 double_int_mul (byte_offset_cst,
1532 bits_per_unit_cst));
1533 /* Compute bit offset where the field ends. */
1534 if (field_size != NULL_TREE)
1535 bitoffset_end = double_int_add (bitoffset,
1536 tree_to_double_int (field_size));
1538 bitoffset_end = double_int_zero;
1540 /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */
1541 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0
1542 && (field_size == NULL_TREE
1543 || double_int_cmp (uhwi_to_double_int (offset),
1544 bitoffset_end, 0) < 0))
1546 double_int access_end = double_int_add (uhwi_to_double_int (offset),
1547 uhwi_to_double_int (size));
1548 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
1550 /* We do have overlap. Now see if field is large enough to
1551 cover the access. Give up for accesses spanning multiple
1553 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
1555 return fold_ctor_reference (type, cval,
1556 double_int_to_uhwi (inner_offset), size);
1559 /* When memory is not explicitely mentioned in constructor, it is 0. */
1560 return build_zero_cst (type);
1563 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
1564 to the memory at bit OFFSET. */
1567 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
1568 unsigned HOST_WIDE_INT size)
1572 /* We found the field with exact match. */
1573 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
1575 return canonicalize_constructor_val (ctor);
1577 /* We are at the end of walk, see if we can view convert the
1579 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
1580 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
1581 && operand_equal_p (TYPE_SIZE (type),
1582 TYPE_SIZE (TREE_TYPE (ctor)), 0))
1584 ret = canonicalize_constructor_val (ctor);
1585 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
1590 if (TREE_CODE (ctor) == STRING_CST)
1591 return fold_string_cst_ctor_reference (type, ctor, offset, size);
1592 if (TREE_CODE (ctor) == CONSTRUCTOR)
1595 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
1596 return fold_array_ctor_reference (type, ctor, offset, size);
1598 return fold_nonarray_ctor_reference (type, ctor, offset, size);
1604 /* Return the tree representing the element referenced by T if T is an
1605 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1606 NULL_TREE otherwise. */
1609 fold_const_aggregate_ref (tree t)
1611 tree ctor, idx, base;
1612 HOST_WIDE_INT offset, size, max_size;
1615 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1616 return get_symbol_constant_value (t);
1618 tem = fold_read_from_constant_string (t);
1622 switch (TREE_CODE (t))
1625 case ARRAY_RANGE_REF:
1626 /* Constant indexes are handled well by get_base_constructor.
1627 Only special case variable offsets.
1628 FIXME: This code can't handle nested references with variable indexes
1629 (they will be handled only by iteration of ccp). Perhaps we can bring
1630 get_ref_base_and_extent here and make it use get_constant_value. */
1631 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
1632 && (idx = get_constant_value (TREE_OPERAND (t, 1)))
1633 && host_integerp (idx, 0))
1635 tree low_bound, unit_size;
1637 /* If the resulting bit-offset is constant, track it. */
1638 if ((low_bound = array_ref_low_bound (t),
1639 host_integerp (low_bound, 0))
1640 && (unit_size = array_ref_element_size (t),
1641 host_integerp (unit_size, 1)))
1643 offset = TREE_INT_CST_LOW (idx);
1644 offset -= TREE_INT_CST_LOW (low_bound);
1645 offset *= TREE_INT_CST_LOW (unit_size);
1646 offset *= BITS_PER_UNIT;
1648 base = TREE_OPERAND (t, 0);
1649 ctor = get_base_constructor (base, &offset);
1650 /* Empty constructor. Always fold to 0. */
1651 if (ctor == error_mark_node)
1652 return build_zero_cst (TREE_TYPE (t));
1653 /* Out of bound array access. Value is undefined, but don't fold. */
1656 /* We can not determine ctor. */
1659 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
1660 TREE_INT_CST_LOW (unit_size)
1668 case TARGET_MEM_REF:
1670 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
1671 ctor = get_base_constructor (base, &offset);
1673 /* Empty constructor. Always fold to 0. */
1674 if (ctor == error_mark_node)
1675 return build_zero_cst (TREE_TYPE (t));
1676 /* We do not know precise address. */
1677 if (max_size == -1 || max_size != size)
1679 /* We can not determine ctor. */
1683 /* Out of bound array access. Value is undefined, but don't fold. */
1687 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
1692 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1693 if (c && TREE_CODE (c) == COMPLEX_CST)
1694 return fold_build1_loc (EXPR_LOCATION (t),
1695 TREE_CODE (t), TREE_TYPE (t), c);
1706 /* Apply the operation CODE in type TYPE to the value, mask pair
1707 RVAL and RMASK representing a value of type RTYPE and set
1708 the value, mask pair *VAL and *MASK to the result. */
1711 bit_value_unop_1 (enum tree_code code, tree type,
1712 double_int *val, double_int *mask,
1713 tree rtype, double_int rval, double_int rmask)
1719 *val = double_int_not (rval);
1724 double_int temv, temm;
1725 /* Return ~rval + 1. */
1726 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1727 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1729 type, double_int_one, double_int_zero);
1737 /* First extend mask and value according to the original type. */
1738 uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype)
1739 ? 0 : TYPE_UNSIGNED (rtype));
1740 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1741 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1743 /* Then extend mask and value according to the target type. */
1744 uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1745 ? 0 : TYPE_UNSIGNED (type));
1746 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1747 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1752 *mask = double_int_minus_one;
1757 /* Apply the operation CODE in type TYPE to the value, mask pairs
1758 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1759 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1762 bit_value_binop_1 (enum tree_code code, tree type,
1763 double_int *val, double_int *mask,
1764 tree r1type, double_int r1val, double_int r1mask,
1765 tree r2type, double_int r2val, double_int r2mask)
1767 bool uns = (TREE_CODE (type) == INTEGER_TYPE
1768 && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type));
1769 /* Assume we'll get a constant result. Use an initial varying value,
1770 we fall back to varying in the end if necessary. */
1771 *mask = double_int_minus_one;
1775 /* The mask is constant where there is a known not
1776 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1777 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1778 double_int_and (double_int_ior (r1val, r1mask),
1779 double_int_ior (r2val, r2mask)));
1780 *val = double_int_and (r1val, r2val);
1784 /* The mask is constant where there is a known
1785 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1786 *mask = double_int_and_not
1787 (double_int_ior (r1mask, r2mask),
1788 double_int_ior (double_int_and_not (r1val, r1mask),
1789 double_int_and_not (r2val, r2mask)));
1790 *val = double_int_ior (r1val, r2val);
1795 *mask = double_int_ior (r1mask, r2mask);
1796 *val = double_int_xor (r1val, r2val);
1801 if (double_int_zero_p (r2mask))
1803 HOST_WIDE_INT shift = r2val.low;
1804 if (code == RROTATE_EXPR)
1806 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1807 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1813 /* ??? We can handle partially known shift counts if we know
1814 its sign. That way we can tell that (x << (y | 8)) & 255
1816 if (double_int_zero_p (r2mask))
1818 HOST_WIDE_INT shift = r2val.low;
1819 if (code == RSHIFT_EXPR)
1821 /* We need to know if we are doing a left or a right shift
1822 to properly shift in zeros for left shift and unsigned
1823 right shifts and the sign bit for signed right shifts.
1824 For signed right shifts we shift in varying in case
1825 the sign bit was varying. */
1828 *mask = double_int_lshift (r1mask, shift,
1829 TYPE_PRECISION (type), false);
1830 *val = double_int_lshift (r1val, shift,
1831 TYPE_PRECISION (type), false);
1836 *mask = double_int_rshift (r1mask, shift,
1837 TYPE_PRECISION (type), !uns);
1838 *val = double_int_rshift (r1val, shift,
1839 TYPE_PRECISION (type), !uns);
1850 case POINTER_PLUS_EXPR:
1853 /* Do the addition with unknown bits set to zero, to give carry-ins of
1854 zero wherever possible. */
1855 lo = double_int_add (double_int_and_not (r1val, r1mask),
1856 double_int_and_not (r2val, r2mask));
1857 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1858 /* Do the addition with unknown bits set to one, to give carry-ins of
1859 one wherever possible. */
1860 hi = double_int_add (double_int_ior (r1val, r1mask),
1861 double_int_ior (r2val, r2mask));
1862 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1863 /* Each bit in the result is known if (a) the corresponding bits in
1864 both inputs are known, and (b) the carry-in to that bit position
1865 is known. We can check condition (b) by seeing if we got the same
1866 result with minimised carries as with maximised carries. */
1867 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1868 double_int_xor (lo, hi));
1869 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1870 /* It shouldn't matter whether we choose lo or hi here. */
1877 double_int temv, temm;
1878 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1879 r2type, r2val, r2mask);
1880 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1881 r1type, r1val, r1mask,
1882 r2type, temv, temm);
1888 /* Just track trailing zeros in both operands and transfer
1889 them to the other. */
1890 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1891 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1892 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1894 *mask = double_int_zero;
1895 *val = double_int_zero;
1897 else if (r1tz + r2tz > 0)
1899 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1900 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1901 *val = double_int_zero;
1909 double_int m = double_int_ior (r1mask, r2mask);
1910 if (!double_int_equal_p (double_int_and_not (r1val, m),
1911 double_int_and_not (r2val, m)))
1913 *mask = double_int_zero;
1914 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1918 /* We know the result of a comparison is always one or zero. */
1919 *mask = double_int_one;
1920 *val = double_int_zero;
1928 double_int tem = r1val;
1934 code = swap_tree_comparison (code);
1941 /* If the most significant bits are not known we know nothing. */
1942 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1945 /* If we know the most significant bits we know the values
1946 value ranges by means of treating varying bits as zero
1947 or one. Do a cross comparison of the max/min pairs. */
1948 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1949 double_int_and_not (r2val, r2mask), uns);
1950 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1951 double_int_ior (r2val, r2mask), uns);
1952 if (maxmin < 0) /* r1 is less than r2. */
1954 *mask = double_int_zero;
1955 *val = double_int_one;
1957 else if (minmax > 0) /* r1 is not less or equal to r2. */
1959 *mask = double_int_zero;
1960 *val = double_int_zero;
1962 else if (maxmin == minmax) /* r1 and r2 are equal. */
1964 /* This probably should never happen as we'd have
1965 folded the thing during fully constant value folding. */
1966 *mask = double_int_zero;
1967 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1971 /* We know the result of a comparison is always one or zero. */
1972 *mask = double_int_one;
1973 *val = double_int_zero;
1982 /* Return the propagation value when applying the operation CODE to
1983 the value RHS yielding type TYPE. */
1986 bit_value_unop (enum tree_code code, tree type, tree rhs)
1988 prop_value_t rval = get_value_for_expr (rhs, true);
1989 double_int value, mask;
1991 gcc_assert ((rval.lattice_val == CONSTANT
1992 && TREE_CODE (rval.value) == INTEGER_CST)
1993 || double_int_minus_one_p (rval.mask));
1994 bit_value_unop_1 (code, type, &value, &mask,
1995 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1996 if (!double_int_minus_one_p (mask))
1998 val.lattice_val = CONSTANT;
2000 /* ??? Delay building trees here. */
2001 val.value = double_int_to_tree (type, value);
2005 val.lattice_val = VARYING;
2006 val.value = NULL_TREE;
2007 val.mask = double_int_minus_one;
2012 /* Return the propagation value when applying the operation CODE to
2013 the values RHS1 and RHS2 yielding type TYPE. */
2016 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
2018 prop_value_t r1val = get_value_for_expr (rhs1, true);
2019 prop_value_t r2val = get_value_for_expr (rhs2, true);
2020 double_int value, mask;
2022 gcc_assert ((r1val.lattice_val == CONSTANT
2023 && TREE_CODE (r1val.value) == INTEGER_CST)
2024 || double_int_minus_one_p (r1val.mask));
2025 gcc_assert ((r2val.lattice_val == CONSTANT
2026 && TREE_CODE (r2val.value) == INTEGER_CST)
2027 || double_int_minus_one_p (r2val.mask));
2028 bit_value_binop_1 (code, type, &value, &mask,
2029 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
2030 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
2031 if (!double_int_minus_one_p (mask))
2033 val.lattice_val = CONSTANT;
2035 /* ??? Delay building trees here. */
2036 val.value = double_int_to_tree (type, value);
2040 val.lattice_val = VARYING;
2041 val.value = NULL_TREE;
2042 val.mask = double_int_minus_one;
2047 /* Evaluate statement STMT.
2048 Valid only for assignments, calls, conditionals, and switches. */
2051 evaluate_stmt (gimple stmt)
2054 tree simplified = NULL_TREE;
2055 ccp_lattice_t likelyvalue = likely_value (stmt);
2056 bool is_constant = false;
2058 if (dump_file && (dump_flags & TDF_DETAILS))
2060 fprintf (dump_file, "which is likely ");
2061 switch (likelyvalue)
2064 fprintf (dump_file, "CONSTANT");
2067 fprintf (dump_file, "UNDEFINED");
2070 fprintf (dump_file, "VARYING");
2074 fprintf (dump_file, "\n");
2077 /* If the statement is likely to have a CONSTANT result, then try
2078 to fold the statement to determine the constant value. */
2079 /* FIXME. This is the only place that we call ccp_fold.
2080 Since likely_value never returns CONSTANT for calls, we will
2081 not attempt to fold them, including builtins that may profit. */
2082 if (likelyvalue == CONSTANT)
2084 fold_defer_overflow_warnings ();
2085 simplified = ccp_fold (stmt);
2086 is_constant = simplified && is_gimple_min_invariant (simplified);
2087 fold_undefer_overflow_warnings (is_constant, stmt, 0);
2090 /* The statement produced a constant value. */
2091 val.lattice_val = CONSTANT;
2092 val.value = simplified;
2093 val.mask = double_int_zero;
2096 /* If the statement is likely to have a VARYING result, then do not
2097 bother folding the statement. */
2098 else if (likelyvalue == VARYING)
2100 enum gimple_code code = gimple_code (stmt);
2101 if (code == GIMPLE_ASSIGN)
2103 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2105 /* Other cases cannot satisfy is_gimple_min_invariant
2107 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
2108 simplified = gimple_assign_rhs1 (stmt);
2110 else if (code == GIMPLE_SWITCH)
2111 simplified = gimple_switch_index (stmt);
2113 /* These cannot satisfy is_gimple_min_invariant without folding. */
2114 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
2115 is_constant = simplified && is_gimple_min_invariant (simplified);
2118 /* The statement produced a constant value. */
2119 val.lattice_val = CONSTANT;
2120 val.value = simplified;
2121 val.mask = double_int_zero;
2125 /* Resort to simplification for bitwise tracking. */
2126 if (flag_tree_bit_ccp
2127 && likelyvalue == CONSTANT
2130 enum gimple_code code = gimple_code (stmt);
2132 val.lattice_val = VARYING;
2133 val.value = NULL_TREE;
2134 val.mask = double_int_minus_one;
2135 if (code == GIMPLE_ASSIGN)
2137 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2138 tree rhs1 = gimple_assign_rhs1 (stmt);
2139 switch (get_gimple_rhs_class (subcode))
2141 case GIMPLE_SINGLE_RHS:
2142 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2143 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2144 val = get_value_for_expr (rhs1, true);
2147 case GIMPLE_UNARY_RHS:
2148 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2149 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2150 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
2151 || POINTER_TYPE_P (gimple_expr_type (stmt))))
2152 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
2155 case GIMPLE_BINARY_RHS:
2156 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2157 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2159 tree lhs = gimple_assign_lhs (stmt);
2160 tree rhs2 = gimple_assign_rhs2 (stmt);
2161 val = bit_value_binop (subcode,
2162 TREE_TYPE (lhs), rhs1, rhs2);
2169 else if (code == GIMPLE_COND)
2171 enum tree_code code = gimple_cond_code (stmt);
2172 tree rhs1 = gimple_cond_lhs (stmt);
2173 tree rhs2 = gimple_cond_rhs (stmt);
2174 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2175 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2176 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
2178 else if (code == GIMPLE_CALL
2179 && (fndecl = gimple_call_fndecl (stmt))
2180 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2182 switch (DECL_FUNCTION_CODE (fndecl))
2184 case BUILT_IN_MALLOC:
2185 case BUILT_IN_REALLOC:
2186 case BUILT_IN_CALLOC:
2187 val.lattice_val = CONSTANT;
2188 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2189 val.mask = shwi_to_double_int
2190 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
2191 / BITS_PER_UNIT - 1));
2194 case BUILT_IN_ALLOCA:
2195 val.lattice_val = CONSTANT;
2196 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2197 val.mask = shwi_to_double_int
2198 (~(((HOST_WIDE_INT) BIGGEST_ALIGNMENT)
2199 / BITS_PER_UNIT - 1));
2205 is_constant = (val.lattice_val == CONSTANT);
2210 /* The statement produced a nonconstant value. If the statement
2211 had UNDEFINED operands, then the result of the statement
2212 should be UNDEFINED. Otherwise, the statement is VARYING. */
2213 if (likelyvalue == UNDEFINED)
2215 val.lattice_val = likelyvalue;
2216 val.mask = double_int_zero;
2220 val.lattice_val = VARYING;
2221 val.mask = double_int_minus_one;
2224 val.value = NULL_TREE;
2230 /* Fold the stmt at *GSI with CCP specific information that propagating
2231 and regular folding does not catch. */
2234 ccp_fold_stmt (gimple_stmt_iterator *gsi)
2236 gimple stmt = gsi_stmt (*gsi);
2238 switch (gimple_code (stmt))
2243 /* Statement evaluation will handle type mismatches in constants
2244 more gracefully than the final propagation. This allows us to
2245 fold more conditionals here. */
2246 val = evaluate_stmt (stmt);
2247 if (val.lattice_val != CONSTANT
2248 || !double_int_zero_p (val.mask))
2253 fprintf (dump_file, "Folding predicate ");
2254 print_gimple_expr (dump_file, stmt, 0, 0);
2255 fprintf (dump_file, " to ");
2256 print_generic_expr (dump_file, val.value, 0);
2257 fprintf (dump_file, "\n");
2260 if (integer_zerop (val.value))
2261 gimple_cond_make_false (stmt);
2263 gimple_cond_make_true (stmt);
2270 tree lhs = gimple_call_lhs (stmt);
2274 bool changed = false;
2277 /* If the call was folded into a constant make sure it goes
2278 away even if we cannot propagate into all uses because of
2281 && TREE_CODE (lhs) == SSA_NAME
2282 && (val = get_constant_value (lhs)))
2284 tree new_rhs = unshare_expr (val);
2286 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2287 TREE_TYPE (new_rhs)))
2288 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2289 res = update_call_from_tree (gsi, new_rhs);
2294 /* Propagate into the call arguments. Compared to replace_uses_in
2295 this can use the argument slot types for type verification
2296 instead of the current argument type. We also can safely
2297 drop qualifiers here as we are dealing with constants anyway. */
2298 argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
2299 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2300 ++i, argt = TREE_CHAIN (argt))
2302 tree arg = gimple_call_arg (stmt, i);
2303 if (TREE_CODE (arg) == SSA_NAME
2304 && (val = get_constant_value (arg))
2305 && useless_type_conversion_p
2306 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
2307 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
2309 gimple_call_set_arg (stmt, i, unshare_expr (val));
2314 callee = gimple_call_fn (stmt);
2315 if (TREE_CODE (callee) == OBJ_TYPE_REF
2316 && TREE_CODE (OBJ_TYPE_REF_EXPR (callee)) == SSA_NAME)
2318 tree expr = OBJ_TYPE_REF_EXPR (callee);
2319 OBJ_TYPE_REF_EXPR (callee) = valueize_op (expr);
2320 if (gimple_fold_call (gsi, false))
2322 OBJ_TYPE_REF_EXPR (callee) = expr;
2330 tree lhs = gimple_assign_lhs (stmt);
2333 /* If we have a load that turned out to be constant replace it
2334 as we cannot propagate into all uses in all cases. */
2335 if (gimple_assign_single_p (stmt)
2336 && TREE_CODE (lhs) == SSA_NAME
2337 && (val = get_constant_value (lhs)))
2339 tree rhs = unshare_expr (val);
2340 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2341 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2342 gimple_assign_set_rhs_from_tree (gsi, rhs);
2354 /* Visit the assignment statement STMT. Set the value of its LHS to the
2355 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2356 creates virtual definitions, set the value of each new name to that
2357 of the RHS (if we can derive a constant out of the RHS).
2358 Value-returning call statements also perform an assignment, and
2359 are handled here. */
2361 static enum ssa_prop_result
2362 visit_assignment (gimple stmt, tree *output_p)
2365 enum ssa_prop_result retval;
2367 tree lhs = gimple_get_lhs (stmt);
2369 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
2370 || gimple_call_lhs (stmt) != NULL_TREE);
2372 if (gimple_assign_single_p (stmt)
2373 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2374 /* For a simple copy operation, we copy the lattice values. */
2375 val = *get_value (gimple_assign_rhs1 (stmt));
2377 /* Evaluate the statement, which could be
2378 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2379 val = evaluate_stmt (stmt);
2381 retval = SSA_PROP_NOT_INTERESTING;
2383 /* Set the lattice value of the statement's output. */
2384 if (TREE_CODE (lhs) == SSA_NAME)
2386 /* If STMT is an assignment to an SSA_NAME, we only have one
2388 if (set_lattice_value (lhs, val))
2391 if (val.lattice_val == VARYING)
2392 retval = SSA_PROP_VARYING;
2394 retval = SSA_PROP_INTERESTING;
2402 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2403 if it can determine which edge will be taken. Otherwise, return
2404 SSA_PROP_VARYING. */
2406 static enum ssa_prop_result
2407 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
2412 block = gimple_bb (stmt);
2413 val = evaluate_stmt (stmt);
2414 if (val.lattice_val != CONSTANT
2415 || !double_int_zero_p (val.mask))
2416 return SSA_PROP_VARYING;
2418 /* Find which edge out of the conditional block will be taken and add it
2419 to the worklist. If no single edge can be determined statically,
2420 return SSA_PROP_VARYING to feed all the outgoing edges to the
2421 propagation engine. */
2422 *taken_edge_p = find_taken_edge (block, val.value);
2424 return SSA_PROP_INTERESTING;
2426 return SSA_PROP_VARYING;
2430 /* Evaluate statement STMT. If the statement produces an output value and
2431 its evaluation changes the lattice value of its output, return
2432 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2435 If STMT is a conditional branch and we can determine its truth
2436 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2437 value, return SSA_PROP_VARYING. */
2439 static enum ssa_prop_result
2440 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
2445 if (dump_file && (dump_flags & TDF_DETAILS))
2447 fprintf (dump_file, "\nVisiting statement:\n");
2448 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2451 switch (gimple_code (stmt))
2454 /* If the statement is an assignment that produces a single
2455 output value, evaluate its RHS to see if the lattice value of
2456 its output has changed. */
2457 return visit_assignment (stmt, output_p);
2460 /* A value-returning call also performs an assignment. */
2461 if (gimple_call_lhs (stmt) != NULL_TREE)
2462 return visit_assignment (stmt, output_p);
2467 /* If STMT is a conditional branch, see if we can determine
2468 which branch will be taken. */
2469 /* FIXME. It appears that we should be able to optimize
2470 computed GOTOs here as well. */
2471 return visit_cond_stmt (stmt, taken_edge_p);
2477 /* Any other kind of statement is not interesting for constant
2478 propagation and, therefore, not worth simulating. */
2479 if (dump_file && (dump_flags & TDF_DETAILS))
2480 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2482 /* Definitions made by statements other than assignments to
2483 SSA_NAMEs represent unknown modifications to their outputs.
2484 Mark them VARYING. */
2485 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2487 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
2488 set_lattice_value (def, v);
2491 return SSA_PROP_VARYING;
2495 /* Main entry point for SSA Conditional Constant Propagation. */
2501 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2502 if (ccp_finalize ())
2503 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
2512 return flag_tree_ccp != 0;
2516 struct gimple_opt_pass pass_ccp =
2521 gate_ccp, /* gate */
2522 do_ssa_ccp, /* execute */
2525 0, /* static_pass_number */
2526 TV_TREE_CCP, /* tv_id */
2527 PROP_cfg | PROP_ssa, /* properties_required */
2528 0, /* properties_provided */
2529 0, /* properties_destroyed */
2530 0, /* todo_flags_start */
2531 TODO_dump_func | TODO_verify_ssa
2532 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2538 /* Try to optimize out __builtin_stack_restore. Optimize it out
2539 if there is another __builtin_stack_restore in the same basic
2540 block and no calls or ASM_EXPRs are in between, or if this block's
2541 only outgoing edge is to EXIT_BLOCK and there are no calls or
2542 ASM_EXPRs after this __builtin_stack_restore. */
2545 optimize_stack_restore (gimple_stmt_iterator i)
2550 basic_block bb = gsi_bb (i);
2551 gimple call = gsi_stmt (i);
2553 if (gimple_code (call) != GIMPLE_CALL
2554 || gimple_call_num_args (call) != 1
2555 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2556 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2559 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2561 stmt = gsi_stmt (i);
2562 if (gimple_code (stmt) == GIMPLE_ASM)
2564 if (gimple_code (stmt) != GIMPLE_CALL)
2567 callee = gimple_call_fndecl (stmt);
2569 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2570 /* All regular builtins are ok, just obviously not alloca. */
2571 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
2574 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2575 goto second_stack_restore;
2581 /* Allow one successor of the exit block, or zero successors. */
2582 switch (EDGE_COUNT (bb->succs))
2587 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2593 second_stack_restore:
2595 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2596 If there are multiple uses, then the last one should remove the call.
2597 In any case, whether the call to __builtin_stack_save can be removed
2598 or not is irrelevant to removing the call to __builtin_stack_restore. */
2599 if (has_single_use (gimple_call_arg (call, 0)))
2601 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2602 if (is_gimple_call (stack_save))
2604 callee = gimple_call_fndecl (stack_save);
2606 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2607 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2609 gimple_stmt_iterator stack_save_gsi;
2612 stack_save_gsi = gsi_for_stmt (stack_save);
2613 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2614 update_call_from_tree (&stack_save_gsi, rhs);
2619 /* No effect, so the statement will be deleted. */
2620 return integer_zero_node;
2623 /* If va_list type is a simple pointer and nothing special is needed,
2624 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2625 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2626 pointer assignment. */
2629 optimize_stdarg_builtin (gimple call)
2631 tree callee, lhs, rhs, cfun_va_list;
2632 bool va_list_simple_ptr;
2633 location_t loc = gimple_location (call);
2635 if (gimple_code (call) != GIMPLE_CALL)
2638 callee = gimple_call_fndecl (call);
2640 cfun_va_list = targetm.fn_abi_va_list (callee);
2641 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2642 && (TREE_TYPE (cfun_va_list) == void_type_node
2643 || TREE_TYPE (cfun_va_list) == char_type_node);
2645 switch (DECL_FUNCTION_CODE (callee))
2647 case BUILT_IN_VA_START:
2648 if (!va_list_simple_ptr
2649 || targetm.expand_builtin_va_start != NULL
2650 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
2653 if (gimple_call_num_args (call) != 2)
2656 lhs = gimple_call_arg (call, 0);
2657 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2658 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2659 != TYPE_MAIN_VARIANT (cfun_va_list))
2662 lhs = build_fold_indirect_ref_loc (loc, lhs);
2663 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
2664 1, integer_zero_node);
2665 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2666 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2668 case BUILT_IN_VA_COPY:
2669 if (!va_list_simple_ptr)
2672 if (gimple_call_num_args (call) != 2)
2675 lhs = gimple_call_arg (call, 0);
2676 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2677 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2678 != TYPE_MAIN_VARIANT (cfun_va_list))
2681 lhs = build_fold_indirect_ref_loc (loc, lhs);
2682 rhs = gimple_call_arg (call, 1);
2683 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2684 != TYPE_MAIN_VARIANT (cfun_va_list))
2687 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2688 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2690 case BUILT_IN_VA_END:
2691 /* No effect, so the statement will be deleted. */
2692 return integer_zero_node;
2699 /* A simple pass that attempts to fold all builtin functions. This pass
2700 is run after we've propagated as many constants as we can. */
2703 execute_fold_all_builtins (void)
2705 bool cfg_changed = false;
2707 unsigned int todoflags = 0;
2711 gimple_stmt_iterator i;
2712 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2714 gimple stmt, old_stmt;
2715 tree callee, result;
2716 enum built_in_function fcode;
2718 stmt = gsi_stmt (i);
2720 if (gimple_code (stmt) != GIMPLE_CALL)
2725 callee = gimple_call_fndecl (stmt);
2726 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2731 fcode = DECL_FUNCTION_CODE (callee);
2733 result = gimple_fold_builtin (stmt);
2736 gimple_remove_stmt_histograms (cfun, stmt);
2739 switch (DECL_FUNCTION_CODE (callee))
2741 case BUILT_IN_CONSTANT_P:
2742 /* Resolve __builtin_constant_p. If it hasn't been
2743 folded to integer_one_node by now, it's fairly
2744 certain that the value simply isn't constant. */
2745 result = integer_zero_node;
2748 case BUILT_IN_STACK_RESTORE:
2749 result = optimize_stack_restore (i);
2755 case BUILT_IN_VA_START:
2756 case BUILT_IN_VA_END:
2757 case BUILT_IN_VA_COPY:
2758 /* These shouldn't be folded before pass_stdarg. */
2759 result = optimize_stdarg_builtin (stmt);
2769 if (dump_file && (dump_flags & TDF_DETAILS))
2771 fprintf (dump_file, "Simplified\n ");
2772 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2776 if (!update_call_from_tree (&i, result))
2778 gimplify_and_update_call_from_tree (&i, result);
2779 todoflags |= TODO_update_address_taken;
2782 stmt = gsi_stmt (i);
2785 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2786 && gimple_purge_dead_eh_edges (bb))
2789 if (dump_file && (dump_flags & TDF_DETAILS))
2791 fprintf (dump_file, "to\n ");
2792 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2793 fprintf (dump_file, "\n");
2796 /* Retry the same statement if it changed into another
2797 builtin, there might be new opportunities now. */
2798 if (gimple_code (stmt) != GIMPLE_CALL)
2803 callee = gimple_call_fndecl (stmt);
2805 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2806 || DECL_FUNCTION_CODE (callee) == fcode)
2811 /* Delete unreachable blocks. */
2813 todoflags |= TODO_cleanup_cfg;
2819 struct gimple_opt_pass pass_fold_builtins =
2825 execute_fold_all_builtins, /* execute */
2828 0, /* static_pass_number */
2829 TV_NONE, /* tv_id */
2830 PROP_cfg | PROP_ssa, /* properties_required */
2831 0, /* properties_provided */
2832 0, /* properties_destroyed */
2833 0, /* todo_flags_start */
2836 | TODO_update_ssa /* todo_flags_finish */