1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010, 2011 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
115 #include "coretypes.h"
120 #include "basic-block.h"
122 #include "function.h"
123 #include "tree-pretty-print.h"
124 #include "gimple-pretty-print.h"
126 #include "tree-dump.h"
127 #include "tree-flow.h"
128 #include "tree-pass.h"
129 #include "tree-ssa-propagate.h"
130 #include "value-prof.h"
131 #include "langhooks.h"
133 #include "diagnostic-core.h"
135 #include "gimple-fold.h"
138 /* Possible lattice values. */
147 struct prop_value_d {
149 ccp_lattice_t lattice_val;
151 /* Propagated value. */
154 /* Mask that applies to the propagated value during CCP. For
155 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
159 typedef struct prop_value_d prop_value_t;
161 /* Array of propagated constant values. After propagation,
162 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
163 the constant is held in an SSA name representing a memory store
164 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
165 memory reference used to store (i.e., the LHS of the assignment
167 static prop_value_t *const_val;
169 static void canonicalize_float_value (prop_value_t *);
170 static bool ccp_fold_stmt (gimple_stmt_iterator *);
172 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
175 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
177 switch (val.lattice_val)
180 fprintf (outf, "%sUNINITIALIZED", prefix);
183 fprintf (outf, "%sUNDEFINED", prefix);
186 fprintf (outf, "%sVARYING", prefix);
189 fprintf (outf, "%sCONSTANT ", prefix);
190 if (TREE_CODE (val.value) != INTEGER_CST
191 || double_int_zero_p (val.mask))
192 print_generic_expr (outf, val.value, dump_flags);
195 double_int cval = double_int_and_not (tree_to_double_int (val.value),
197 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
198 prefix, cval.high, cval.low);
199 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
200 val.mask.high, val.mask.low);
209 /* Print lattice value VAL to stderr. */
211 void debug_lattice_value (prop_value_t val);
214 debug_lattice_value (prop_value_t val)
216 dump_lattice_value (stderr, "", val);
217 fprintf (stderr, "\n");
221 /* Compute a default value for variable VAR and store it in the
222 CONST_VAL array. The following rules are used to get default
225 1- Global and static variables that are declared constant are
228 2- Any other value is considered UNDEFINED. This is useful when
229 considering PHI nodes. PHI arguments that are undefined do not
230 change the constant value of the PHI node, which allows for more
231 constants to be propagated.
233 3- Variables defined by statements other than assignments and PHI
234 nodes are considered VARYING.
236 4- Initial values of variables that are not GIMPLE registers are
237 considered VARYING. */
240 get_default_value (tree var)
242 tree sym = SSA_NAME_VAR (var);
243 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
246 stmt = SSA_NAME_DEF_STMT (var);
248 if (gimple_nop_p (stmt))
250 /* Variables defined by an empty statement are those used
251 before being initialized. If VAR is a local variable, we
252 can assume initially that it is UNDEFINED, otherwise we must
253 consider it VARYING. */
254 if (is_gimple_reg (sym)
255 && TREE_CODE (sym) == VAR_DECL)
256 val.lattice_val = UNDEFINED;
259 val.lattice_val = VARYING;
260 val.mask = double_int_minus_one;
263 else if (is_gimple_assign (stmt)
264 /* Value-returning GIMPLE_CALL statements assign to
265 a variable, and are treated similarly to GIMPLE_ASSIGN. */
266 || (is_gimple_call (stmt)
267 && gimple_call_lhs (stmt) != NULL_TREE)
268 || gimple_code (stmt) == GIMPLE_PHI)
271 if (gimple_assign_single_p (stmt)
272 && DECL_P (gimple_assign_rhs1 (stmt))
273 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
275 val.lattice_val = CONSTANT;
279 /* Any other variable defined by an assignment or a PHI node
280 is considered UNDEFINED. */
281 val.lattice_val = UNDEFINED;
285 /* Otherwise, VAR will never take on a constant value. */
286 val.lattice_val = VARYING;
287 val.mask = double_int_minus_one;
294 /* Get the constant value associated with variable VAR. */
296 static inline prop_value_t *
301 if (const_val == NULL)
304 val = &const_val[SSA_NAME_VERSION (var)];
305 if (val->lattice_val == UNINITIALIZED)
306 *val = get_default_value (var);
308 canonicalize_float_value (val);
313 /* Return the constant tree value associated with VAR. */
316 get_constant_value (tree var)
319 if (TREE_CODE (var) != SSA_NAME)
321 if (is_gimple_min_invariant (var))
325 val = get_value (var);
327 && val->lattice_val == CONSTANT
328 && (TREE_CODE (val->value) != INTEGER_CST
329 || double_int_zero_p (val->mask)))
334 /* Sets the value associated with VAR to VARYING. */
337 set_value_varying (tree var)
339 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
341 val->lattice_val = VARYING;
342 val->value = NULL_TREE;
343 val->mask = double_int_minus_one;
346 /* For float types, modify the value of VAL to make ccp work correctly
347 for non-standard values (-0, NaN):
349 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
350 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
351 This is to fix the following problem (see PR 29921): Suppose we have
355 and we set value of y to NaN. This causes value of x to be set to NaN.
356 When we later determine that y is in fact VARYING, fold uses the fact
357 that HONOR_NANS is false, and we try to change the value of x to 0,
358 causing an ICE. With HONOR_NANS being false, the real appearance of
359 NaN would cause undefined behavior, though, so claiming that y (and x)
360 are UNDEFINED initially is correct. */
363 canonicalize_float_value (prop_value_t *val)
365 enum machine_mode mode;
369 if (val->lattice_val != CONSTANT
370 || TREE_CODE (val->value) != REAL_CST)
373 d = TREE_REAL_CST (val->value);
374 type = TREE_TYPE (val->value);
375 mode = TYPE_MODE (type);
377 if (!HONOR_SIGNED_ZEROS (mode)
378 && REAL_VALUE_MINUS_ZERO (d))
380 val->value = build_real (type, dconst0);
384 if (!HONOR_NANS (mode)
385 && REAL_VALUE_ISNAN (d))
387 val->lattice_val = UNDEFINED;
393 /* Return whether the lattice transition is valid. */
396 valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
398 /* Lattice transitions must always be monotonically increasing in
400 if (old_val.lattice_val < new_val.lattice_val)
403 if (old_val.lattice_val != new_val.lattice_val)
406 if (!old_val.value && !new_val.value)
409 /* Now both lattice values are CONSTANT. */
411 /* Allow transitioning from &x to &x & ~3. */
412 if (TREE_CODE (old_val.value) != INTEGER_CST
413 && TREE_CODE (new_val.value) == INTEGER_CST)
416 /* Bit-lattices have to agree in the still valid bits. */
417 if (TREE_CODE (old_val.value) == INTEGER_CST
418 && TREE_CODE (new_val.value) == INTEGER_CST)
419 return double_int_equal_p
420 (double_int_and_not (tree_to_double_int (old_val.value),
422 double_int_and_not (tree_to_double_int (new_val.value),
425 /* Otherwise constant values have to agree. */
426 return operand_equal_p (old_val.value, new_val.value, 0);
429 /* Set the value for variable VAR to NEW_VAL. Return true if the new
430 value is different from VAR's previous value. */
433 set_lattice_value (tree var, prop_value_t new_val)
435 /* We can deal with old UNINITIALIZED values just fine here. */
436 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
438 canonicalize_float_value (&new_val);
440 /* We have to be careful to not go up the bitwise lattice
441 represented by the mask.
442 ??? This doesn't seem to be the best place to enforce this. */
443 if (new_val.lattice_val == CONSTANT
444 && old_val->lattice_val == CONSTANT
445 && TREE_CODE (new_val.value) == INTEGER_CST
446 && TREE_CODE (old_val->value) == INTEGER_CST)
449 diff = double_int_xor (tree_to_double_int (new_val.value),
450 tree_to_double_int (old_val->value));
451 new_val.mask = double_int_ior (new_val.mask,
452 double_int_ior (old_val->mask, diff));
455 gcc_assert (valid_lattice_transition (*old_val, new_val));
457 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
458 caller that this was a non-transition. */
459 if (old_val->lattice_val != new_val.lattice_val
460 || (new_val.lattice_val == CONSTANT
461 && TREE_CODE (new_val.value) == INTEGER_CST
462 && (TREE_CODE (old_val->value) != INTEGER_CST
463 || !double_int_equal_p (new_val.mask, old_val->mask))))
465 /* ??? We would like to delay creation of INTEGER_CSTs from
466 partially constants here. */
468 if (dump_file && (dump_flags & TDF_DETAILS))
470 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
471 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
476 gcc_assert (new_val.lattice_val != UNINITIALIZED);
483 static prop_value_t get_value_for_expr (tree, bool);
484 static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
485 static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
486 tree, double_int, double_int,
487 tree, double_int, double_int);
489 /* Return a double_int that can be used for bitwise simplifications
493 value_to_double_int (prop_value_t val)
496 && TREE_CODE (val.value) == INTEGER_CST)
497 return tree_to_double_int (val.value);
499 return double_int_zero;
502 /* Return the value for the address expression EXPR based on alignment
506 get_value_from_alignment (tree expr)
508 tree type = TREE_TYPE (expr);
510 unsigned HOST_WIDE_INT bitpos;
513 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
515 align = get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitpos);
517 = double_int_and_not (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
518 ? double_int_mask (TYPE_PRECISION (type))
519 : double_int_minus_one,
520 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
521 val.lattice_val = double_int_minus_one_p (val.mask) ? VARYING : CONSTANT;
522 if (val.lattice_val == CONSTANT)
524 = double_int_to_tree (type, uhwi_to_double_int (bitpos / BITS_PER_UNIT));
526 val.value = NULL_TREE;
531 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
532 return constant bits extracted from alignment information for
533 invariant addresses. */
536 get_value_for_expr (tree expr, bool for_bits_p)
540 if (TREE_CODE (expr) == SSA_NAME)
542 val = *get_value (expr);
544 && val.lattice_val == CONSTANT
545 && TREE_CODE (val.value) == ADDR_EXPR)
546 val = get_value_from_alignment (val.value);
548 else if (is_gimple_min_invariant (expr)
549 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
551 val.lattice_val = CONSTANT;
553 val.mask = double_int_zero;
554 canonicalize_float_value (&val);
556 else if (TREE_CODE (expr) == ADDR_EXPR)
557 val = get_value_from_alignment (expr);
560 val.lattice_val = VARYING;
561 val.mask = double_int_minus_one;
562 val.value = NULL_TREE;
567 /* Return the likely CCP lattice value for STMT.
569 If STMT has no operands, then return CONSTANT.
571 Else if undefinedness of operands of STMT cause its value to be
572 undefined, then return UNDEFINED.
574 Else if any operands of STMT are constants, then return CONSTANT.
576 Else return VARYING. */
579 likely_value (gimple stmt)
581 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
586 enum gimple_code code = gimple_code (stmt);
588 /* This function appears to be called only for assignments, calls,
589 conditionals, and switches, due to the logic in visit_stmt. */
590 gcc_assert (code == GIMPLE_ASSIGN
591 || code == GIMPLE_CALL
592 || code == GIMPLE_COND
593 || code == GIMPLE_SWITCH);
595 /* If the statement has volatile operands, it won't fold to a
597 if (gimple_has_volatile_ops (stmt))
600 /* Arrive here for more complex cases. */
601 has_constant_operand = false;
602 has_undefined_operand = false;
603 all_undefined_operands = true;
604 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
606 prop_value_t *val = get_value (use);
608 if (val->lattice_val == UNDEFINED)
609 has_undefined_operand = true;
611 all_undefined_operands = false;
613 if (val->lattice_val == CONSTANT)
614 has_constant_operand = true;
617 /* There may be constants in regular rhs operands. For calls we
618 have to ignore lhs, fndecl and static chain, otherwise only
620 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
621 i < gimple_num_ops (stmt); ++i)
623 tree op = gimple_op (stmt, i);
624 if (!op || TREE_CODE (op) == SSA_NAME)
626 if (is_gimple_min_invariant (op))
627 has_constant_operand = true;
630 if (has_constant_operand)
631 all_undefined_operands = false;
633 /* If the operation combines operands like COMPLEX_EXPR make sure to
634 not mark the result UNDEFINED if only one part of the result is
636 if (has_undefined_operand && all_undefined_operands)
638 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
640 switch (gimple_assign_rhs_code (stmt))
642 /* Unary operators are handled with all_undefined_operands. */
645 case POINTER_PLUS_EXPR:
646 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
647 Not bitwise operators, one VARYING operand may specify the
648 result completely. Not logical operators for the same reason.
649 Not COMPLEX_EXPR as one VARYING operand makes the result partly
650 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
651 the undefined operand may be promoted. */
658 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
659 fall back to VARYING even if there were CONSTANT operands. */
660 if (has_undefined_operand)
663 /* We do not consider virtual operands here -- load from read-only
664 memory may have only VARYING virtual operands, but still be
666 if (has_constant_operand
667 || gimple_references_memory_p (stmt))
673 /* Returns true if STMT cannot be constant. */
676 surely_varying_stmt_p (gimple stmt)
678 /* If the statement has operands that we cannot handle, it cannot be
680 if (gimple_has_volatile_ops (stmt))
683 /* If it is a call and does not return a value or is not a
684 builtin and not an indirect call, it is varying. */
685 if (is_gimple_call (stmt))
688 if (!gimple_call_lhs (stmt)
689 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
690 && !DECL_BUILT_IN (fndecl)))
694 /* Any other store operation is not interesting. */
695 else if (gimple_vdef (stmt))
698 /* Anything other than assignments and conditional jumps are not
699 interesting for CCP. */
700 if (gimple_code (stmt) != GIMPLE_ASSIGN
701 && gimple_code (stmt) != GIMPLE_COND
702 && gimple_code (stmt) != GIMPLE_SWITCH
703 && gimple_code (stmt) != GIMPLE_CALL)
709 /* Initialize local data structures for CCP. */
712 ccp_initialize (void)
716 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
718 /* Initialize simulation flags for PHI nodes and statements. */
721 gimple_stmt_iterator i;
723 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
725 gimple stmt = gsi_stmt (i);
728 /* If the statement is a control insn, then we do not
729 want to avoid simulating the statement once. Failure
730 to do so means that those edges will never get added. */
731 if (stmt_ends_bb_p (stmt))
734 is_varying = surely_varying_stmt_p (stmt);
741 /* If the statement will not produce a constant, mark
742 all its outputs VARYING. */
743 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
744 set_value_varying (def);
746 prop_set_simulate_again (stmt, !is_varying);
750 /* Now process PHI nodes. We never clear the simulate_again flag on
751 phi nodes, since we do not know which edges are executable yet,
752 except for phi nodes for virtual operands when we do not do store ccp. */
755 gimple_stmt_iterator i;
757 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
759 gimple phi = gsi_stmt (i);
761 if (!is_gimple_reg (gimple_phi_result (phi)))
762 prop_set_simulate_again (phi, false);
764 prop_set_simulate_again (phi, true);
769 /* Debug count support. Reset the values of ssa names
770 VARYING when the total number ssa names analyzed is
771 beyond the debug count specified. */
777 for (i = 0; i < num_ssa_names; i++)
781 const_val[i].lattice_val = VARYING;
782 const_val[i].mask = double_int_minus_one;
783 const_val[i].value = NULL_TREE;
789 /* Do final substitution of propagated values, cleanup the flowgraph and
790 free allocated storage.
792 Return TRUE when something was optimized. */
797 bool something_changed;
802 /* Derive alignment and misalignment information from partially
803 constant pointers in the lattice. */
804 for (i = 1; i < num_ssa_names; ++i)
806 tree name = ssa_name (i);
808 struct ptr_info_def *pi;
809 unsigned int tem, align;
812 || !POINTER_TYPE_P (TREE_TYPE (name)))
815 val = get_value (name);
816 if (val->lattice_val != CONSTANT
817 || TREE_CODE (val->value) != INTEGER_CST)
820 /* Trailing constant bits specify the alignment, trailing value
821 bits the misalignment. */
823 align = (tem & -tem);
827 pi = get_ptr_info (name);
829 pi->misalign = TREE_INT_CST_LOW (val->value) & (align - 1);
832 /* Perform substitutions based on the known constant values. */
833 something_changed = substitute_and_fold (get_constant_value,
834 ccp_fold_stmt, true);
838 return something_changed;;
842 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
845 any M UNDEFINED = any
846 any M VARYING = VARYING
847 Ci M Cj = Ci if (i == j)
848 Ci M Cj = VARYING if (i != j)
852 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
854 if (val1->lattice_val == UNDEFINED)
856 /* UNDEFINED M any = any */
859 else if (val2->lattice_val == UNDEFINED)
861 /* any M UNDEFINED = any
862 Nothing to do. VAL1 already contains the value we want. */
865 else if (val1->lattice_val == VARYING
866 || val2->lattice_val == VARYING)
868 /* any M VARYING = VARYING. */
869 val1->lattice_val = VARYING;
870 val1->mask = double_int_minus_one;
871 val1->value = NULL_TREE;
873 else if (val1->lattice_val == CONSTANT
874 && val2->lattice_val == CONSTANT
875 && TREE_CODE (val1->value) == INTEGER_CST
876 && TREE_CODE (val2->value) == INTEGER_CST)
878 /* Ci M Cj = Ci if (i == j)
879 Ci M Cj = VARYING if (i != j)
881 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
884 = double_int_ior (double_int_ior (val1->mask,
886 double_int_xor (tree_to_double_int (val1->value),
887 tree_to_double_int (val2->value)));
888 if (double_int_minus_one_p (val1->mask))
890 val1->lattice_val = VARYING;
891 val1->value = NULL_TREE;
894 else if (val1->lattice_val == CONSTANT
895 && val2->lattice_val == CONSTANT
896 && simple_cst_equal (val1->value, val2->value) == 1)
898 /* Ci M Cj = Ci if (i == j)
899 Ci M Cj = VARYING if (i != j)
901 VAL1 already contains the value we want for equivalent values. */
903 else if (val1->lattice_val == CONSTANT
904 && val2->lattice_val == CONSTANT
905 && (TREE_CODE (val1->value) == ADDR_EXPR
906 || TREE_CODE (val2->value) == ADDR_EXPR))
908 /* When not equal addresses are involved try meeting for
910 prop_value_t tem = *val2;
911 if (TREE_CODE (val1->value) == ADDR_EXPR)
912 *val1 = get_value_for_expr (val1->value, true);
913 if (TREE_CODE (val2->value) == ADDR_EXPR)
914 tem = get_value_for_expr (val2->value, true);
915 ccp_lattice_meet (val1, &tem);
919 /* Any other combination is VARYING. */
920 val1->lattice_val = VARYING;
921 val1->mask = double_int_minus_one;
922 val1->value = NULL_TREE;
927 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
928 lattice values to determine PHI_NODE's lattice value. The value of a
929 PHI node is determined calling ccp_lattice_meet with all the arguments
930 of the PHI node that are incoming via executable edges. */
932 static enum ssa_prop_result
933 ccp_visit_phi_node (gimple phi)
936 prop_value_t *old_val, new_val;
938 if (dump_file && (dump_flags & TDF_DETAILS))
940 fprintf (dump_file, "\nVisiting PHI node: ");
941 print_gimple_stmt (dump_file, phi, 0, dump_flags);
944 old_val = get_value (gimple_phi_result (phi));
945 switch (old_val->lattice_val)
948 return SSA_PROP_VARYING;
955 new_val.lattice_val = UNDEFINED;
956 new_val.value = NULL_TREE;
963 for (i = 0; i < gimple_phi_num_args (phi); i++)
965 /* Compute the meet operator over all the PHI arguments flowing
966 through executable edges. */
967 edge e = gimple_phi_arg_edge (phi, i);
969 if (dump_file && (dump_flags & TDF_DETAILS))
972 "\n Argument #%d (%d -> %d %sexecutable)\n",
973 i, e->src->index, e->dest->index,
974 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
977 /* If the incoming edge is executable, Compute the meet operator for
978 the existing value of the PHI node and the current PHI argument. */
979 if (e->flags & EDGE_EXECUTABLE)
981 tree arg = gimple_phi_arg (phi, i)->def;
982 prop_value_t arg_val = get_value_for_expr (arg, false);
984 ccp_lattice_meet (&new_val, &arg_val);
986 if (dump_file && (dump_flags & TDF_DETAILS))
988 fprintf (dump_file, "\t");
989 print_generic_expr (dump_file, arg, dump_flags);
990 dump_lattice_value (dump_file, "\tValue: ", arg_val);
991 fprintf (dump_file, "\n");
994 if (new_val.lattice_val == VARYING)
999 if (dump_file && (dump_flags & TDF_DETAILS))
1001 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1002 fprintf (dump_file, "\n\n");
1005 /* Make the transition to the new value. */
1006 if (set_lattice_value (gimple_phi_result (phi), new_val))
1008 if (new_val.lattice_val == VARYING)
1009 return SSA_PROP_VARYING;
1011 return SSA_PROP_INTERESTING;
1014 return SSA_PROP_NOT_INTERESTING;
1017 /* Return the constant value for OP or OP otherwise. */
1020 valueize_op (tree op)
1022 if (TREE_CODE (op) == SSA_NAME)
1024 tree tem = get_constant_value (op);
1031 /* CCP specific front-end to the non-destructive constant folding
1034 Attempt to simplify the RHS of STMT knowing that one or more
1035 operands are constants.
1037 If simplification is possible, return the simplified RHS,
1038 otherwise return the original RHS or NULL_TREE. */
1041 ccp_fold (gimple stmt)
1043 location_t loc = gimple_location (stmt);
1044 switch (gimple_code (stmt))
1048 /* Handle comparison operators that can appear in GIMPLE form. */
1049 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1050 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1051 enum tree_code code = gimple_cond_code (stmt);
1052 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1057 /* Return the constant switch index. */
1058 return valueize_op (gimple_switch_index (stmt));
1063 return gimple_fold_stmt_to_constant_1 (stmt, valueize_op);
1070 /* Apply the operation CODE in type TYPE to the value, mask pair
1071 RVAL and RMASK representing a value of type RTYPE and set
1072 the value, mask pair *VAL and *MASK to the result. */
1075 bit_value_unop_1 (enum tree_code code, tree type,
1076 double_int *val, double_int *mask,
1077 tree rtype, double_int rval, double_int rmask)
1083 *val = double_int_not (rval);
1088 double_int temv, temm;
1089 /* Return ~rval + 1. */
1090 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1091 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1093 type, double_int_one, double_int_zero);
1101 /* First extend mask and value according to the original type. */
1102 uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype)
1103 ? 0 : TYPE_UNSIGNED (rtype));
1104 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1105 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1107 /* Then extend mask and value according to the target type. */
1108 uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1109 ? 0 : TYPE_UNSIGNED (type));
1110 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1111 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1116 *mask = double_int_minus_one;
1121 /* Apply the operation CODE in type TYPE to the value, mask pairs
1122 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1123 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1126 bit_value_binop_1 (enum tree_code code, tree type,
1127 double_int *val, double_int *mask,
1128 tree r1type, double_int r1val, double_int r1mask,
1129 tree r2type, double_int r2val, double_int r2mask)
1131 bool uns = (TREE_CODE (type) == INTEGER_TYPE
1132 && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type));
1133 /* Assume we'll get a constant result. Use an initial varying value,
1134 we fall back to varying in the end if necessary. */
1135 *mask = double_int_minus_one;
1139 /* The mask is constant where there is a known not
1140 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1141 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1142 double_int_and (double_int_ior (r1val, r1mask),
1143 double_int_ior (r2val, r2mask)));
1144 *val = double_int_and (r1val, r2val);
1148 /* The mask is constant where there is a known
1149 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1150 *mask = double_int_and_not
1151 (double_int_ior (r1mask, r2mask),
1152 double_int_ior (double_int_and_not (r1val, r1mask),
1153 double_int_and_not (r2val, r2mask)));
1154 *val = double_int_ior (r1val, r2val);
1159 *mask = double_int_ior (r1mask, r2mask);
1160 *val = double_int_xor (r1val, r2val);
1165 if (double_int_zero_p (r2mask))
1167 HOST_WIDE_INT shift = r2val.low;
1168 if (code == RROTATE_EXPR)
1170 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1171 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1177 /* ??? We can handle partially known shift counts if we know
1178 its sign. That way we can tell that (x << (y | 8)) & 255
1180 if (double_int_zero_p (r2mask))
1182 HOST_WIDE_INT shift = r2val.low;
1183 if (code == RSHIFT_EXPR)
1185 /* We need to know if we are doing a left or a right shift
1186 to properly shift in zeros for left shift and unsigned
1187 right shifts and the sign bit for signed right shifts.
1188 For signed right shifts we shift in varying in case
1189 the sign bit was varying. */
1192 *mask = double_int_lshift (r1mask, shift,
1193 TYPE_PRECISION (type), false);
1194 *val = double_int_lshift (r1val, shift,
1195 TYPE_PRECISION (type), false);
1199 /* ??? We can have sizetype related inconsistencies in
1201 if ((TREE_CODE (r1type) == INTEGER_TYPE
1202 && (TYPE_IS_SIZETYPE (r1type)
1203 ? 0 : TYPE_UNSIGNED (r1type))) != uns)
1207 *mask = double_int_rshift (r1mask, shift,
1208 TYPE_PRECISION (type), !uns);
1209 *val = double_int_rshift (r1val, shift,
1210 TYPE_PRECISION (type), !uns);
1221 case POINTER_PLUS_EXPR:
1224 /* Do the addition with unknown bits set to zero, to give carry-ins of
1225 zero wherever possible. */
1226 lo = double_int_add (double_int_and_not (r1val, r1mask),
1227 double_int_and_not (r2val, r2mask));
1228 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1229 /* Do the addition with unknown bits set to one, to give carry-ins of
1230 one wherever possible. */
1231 hi = double_int_add (double_int_ior (r1val, r1mask),
1232 double_int_ior (r2val, r2mask));
1233 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1234 /* Each bit in the result is known if (a) the corresponding bits in
1235 both inputs are known, and (b) the carry-in to that bit position
1236 is known. We can check condition (b) by seeing if we got the same
1237 result with minimised carries as with maximised carries. */
1238 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1239 double_int_xor (lo, hi));
1240 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1241 /* It shouldn't matter whether we choose lo or hi here. */
1248 double_int temv, temm;
1249 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1250 r2type, r2val, r2mask);
1251 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1252 r1type, r1val, r1mask,
1253 r2type, temv, temm);
1259 /* Just track trailing zeros in both operands and transfer
1260 them to the other. */
1261 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1262 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1263 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1265 *mask = double_int_zero;
1266 *val = double_int_zero;
1268 else if (r1tz + r2tz > 0)
1270 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1271 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1272 *val = double_int_zero;
1280 double_int m = double_int_ior (r1mask, r2mask);
1281 if (!double_int_equal_p (double_int_and_not (r1val, m),
1282 double_int_and_not (r2val, m)))
1284 *mask = double_int_zero;
1285 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1289 /* We know the result of a comparison is always one or zero. */
1290 *mask = double_int_one;
1291 *val = double_int_zero;
1299 double_int tem = r1val;
1305 code = swap_tree_comparison (code);
1312 /* If the most significant bits are not known we know nothing. */
1313 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1316 /* For comparisons the signedness is in the comparison operands. */
1317 uns = (TREE_CODE (r1type) == INTEGER_TYPE
1318 && TYPE_IS_SIZETYPE (r1type) ? 0 : TYPE_UNSIGNED (r1type));
1319 /* ??? We can have sizetype related inconsistencies in the IL. */
1320 if ((TREE_CODE (r2type) == INTEGER_TYPE
1321 && TYPE_IS_SIZETYPE (r2type) ? 0 : TYPE_UNSIGNED (r2type)) != uns)
1324 /* If we know the most significant bits we know the values
1325 value ranges by means of treating varying bits as zero
1326 or one. Do a cross comparison of the max/min pairs. */
1327 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1328 double_int_and_not (r2val, r2mask), uns);
1329 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1330 double_int_ior (r2val, r2mask), uns);
1331 if (maxmin < 0) /* r1 is less than r2. */
1333 *mask = double_int_zero;
1334 *val = double_int_one;
1336 else if (minmax > 0) /* r1 is not less or equal to r2. */
1338 *mask = double_int_zero;
1339 *val = double_int_zero;
1341 else if (maxmin == minmax) /* r1 and r2 are equal. */
1343 /* This probably should never happen as we'd have
1344 folded the thing during fully constant value folding. */
1345 *mask = double_int_zero;
1346 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1350 /* We know the result of a comparison is always one or zero. */
1351 *mask = double_int_one;
1352 *val = double_int_zero;
1361 /* Return the propagation value when applying the operation CODE to
1362 the value RHS yielding type TYPE. */
1365 bit_value_unop (enum tree_code code, tree type, tree rhs)
1367 prop_value_t rval = get_value_for_expr (rhs, true);
1368 double_int value, mask;
1370 gcc_assert ((rval.lattice_val == CONSTANT
1371 && TREE_CODE (rval.value) == INTEGER_CST)
1372 || double_int_minus_one_p (rval.mask));
1373 bit_value_unop_1 (code, type, &value, &mask,
1374 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1375 if (!double_int_minus_one_p (mask))
1377 val.lattice_val = CONSTANT;
1379 /* ??? Delay building trees here. */
1380 val.value = double_int_to_tree (type, value);
1384 val.lattice_val = VARYING;
1385 val.value = NULL_TREE;
1386 val.mask = double_int_minus_one;
1391 /* Return the propagation value when applying the operation CODE to
1392 the values RHS1 and RHS2 yielding type TYPE. */
1395 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1397 prop_value_t r1val = get_value_for_expr (rhs1, true);
1398 prop_value_t r2val = get_value_for_expr (rhs2, true);
1399 double_int value, mask;
1401 gcc_assert ((r1val.lattice_val == CONSTANT
1402 && TREE_CODE (r1val.value) == INTEGER_CST)
1403 || double_int_minus_one_p (r1val.mask));
1404 gcc_assert ((r2val.lattice_val == CONSTANT
1405 && TREE_CODE (r2val.value) == INTEGER_CST)
1406 || double_int_minus_one_p (r2val.mask));
1407 bit_value_binop_1 (code, type, &value, &mask,
1408 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
1409 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
1410 if (!double_int_minus_one_p (mask))
1412 val.lattice_val = CONSTANT;
1414 /* ??? Delay building trees here. */
1415 val.value = double_int_to_tree (type, value);
1419 val.lattice_val = VARYING;
1420 val.value = NULL_TREE;
1421 val.mask = double_int_minus_one;
1426 /* Return the propagation value when applying __builtin_assume_aligned to
1430 bit_value_assume_aligned (gimple stmt)
1432 tree ptr = gimple_call_arg (stmt, 0), align, misalign = NULL_TREE;
1433 tree type = TREE_TYPE (ptr);
1434 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1435 prop_value_t ptrval = get_value_for_expr (ptr, true);
1436 prop_value_t alignval;
1437 double_int value, mask;
1439 if (ptrval.lattice_val == UNDEFINED)
1441 gcc_assert ((ptrval.lattice_val == CONSTANT
1442 && TREE_CODE (ptrval.value) == INTEGER_CST)
1443 || double_int_minus_one_p (ptrval.mask));
1444 align = gimple_call_arg (stmt, 1);
1445 if (!host_integerp (align, 1))
1447 aligni = tree_low_cst (align, 1);
1449 || (aligni & (aligni - 1)) != 0)
1451 if (gimple_call_num_args (stmt) > 2)
1453 misalign = gimple_call_arg (stmt, 2);
1454 if (!host_integerp (misalign, 1))
1456 misaligni = tree_low_cst (misalign, 1);
1457 if (misaligni >= aligni)
1460 align = build_int_cst_type (type, -aligni);
1461 alignval = get_value_for_expr (align, true);
1462 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
1463 type, value_to_double_int (ptrval), ptrval.mask,
1464 type, value_to_double_int (alignval), alignval.mask);
1465 if (!double_int_minus_one_p (mask))
1467 val.lattice_val = CONSTANT;
1469 gcc_assert ((mask.low & (aligni - 1)) == 0);
1470 gcc_assert ((value.low & (aligni - 1)) == 0);
1471 value.low |= misaligni;
1472 /* ??? Delay building trees here. */
1473 val.value = double_int_to_tree (type, value);
1477 val.lattice_val = VARYING;
1478 val.value = NULL_TREE;
1479 val.mask = double_int_minus_one;
1484 /* Evaluate statement STMT.
1485 Valid only for assignments, calls, conditionals, and switches. */
1488 evaluate_stmt (gimple stmt)
1491 tree simplified = NULL_TREE;
1492 ccp_lattice_t likelyvalue = likely_value (stmt);
1493 bool is_constant = false;
1495 if (dump_file && (dump_flags & TDF_DETAILS))
1497 fprintf (dump_file, "which is likely ");
1498 switch (likelyvalue)
1501 fprintf (dump_file, "CONSTANT");
1504 fprintf (dump_file, "UNDEFINED");
1507 fprintf (dump_file, "VARYING");
1511 fprintf (dump_file, "\n");
1514 /* If the statement is likely to have a CONSTANT result, then try
1515 to fold the statement to determine the constant value. */
1516 /* FIXME. This is the only place that we call ccp_fold.
1517 Since likely_value never returns CONSTANT for calls, we will
1518 not attempt to fold them, including builtins that may profit. */
1519 if (likelyvalue == CONSTANT)
1521 fold_defer_overflow_warnings ();
1522 simplified = ccp_fold (stmt);
1523 is_constant = simplified && is_gimple_min_invariant (simplified);
1524 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1527 /* The statement produced a constant value. */
1528 val.lattice_val = CONSTANT;
1529 val.value = simplified;
1530 val.mask = double_int_zero;
1533 /* If the statement is likely to have a VARYING result, then do not
1534 bother folding the statement. */
1535 else if (likelyvalue == VARYING)
1537 enum gimple_code code = gimple_code (stmt);
1538 if (code == GIMPLE_ASSIGN)
1540 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1542 /* Other cases cannot satisfy is_gimple_min_invariant
1544 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1545 simplified = gimple_assign_rhs1 (stmt);
1547 else if (code == GIMPLE_SWITCH)
1548 simplified = gimple_switch_index (stmt);
1550 /* These cannot satisfy is_gimple_min_invariant without folding. */
1551 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1552 is_constant = simplified && is_gimple_min_invariant (simplified);
1555 /* The statement produced a constant value. */
1556 val.lattice_val = CONSTANT;
1557 val.value = simplified;
1558 val.mask = double_int_zero;
1562 /* Resort to simplification for bitwise tracking. */
1563 if (flag_tree_bit_ccp
1564 && (likelyvalue == CONSTANT || is_gimple_call (stmt))
1567 enum gimple_code code = gimple_code (stmt);
1569 val.lattice_val = VARYING;
1570 val.value = NULL_TREE;
1571 val.mask = double_int_minus_one;
1572 if (code == GIMPLE_ASSIGN)
1574 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1575 tree rhs1 = gimple_assign_rhs1 (stmt);
1576 switch (get_gimple_rhs_class (subcode))
1578 case GIMPLE_SINGLE_RHS:
1579 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1580 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1581 val = get_value_for_expr (rhs1, true);
1584 case GIMPLE_UNARY_RHS:
1585 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1586 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1587 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
1588 || POINTER_TYPE_P (gimple_expr_type (stmt))))
1589 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
1592 case GIMPLE_BINARY_RHS:
1593 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1594 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1596 tree lhs = gimple_assign_lhs (stmt);
1597 tree rhs2 = gimple_assign_rhs2 (stmt);
1598 val = bit_value_binop (subcode,
1599 TREE_TYPE (lhs), rhs1, rhs2);
1606 else if (code == GIMPLE_COND)
1608 enum tree_code code = gimple_cond_code (stmt);
1609 tree rhs1 = gimple_cond_lhs (stmt);
1610 tree rhs2 = gimple_cond_rhs (stmt);
1611 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1612 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1613 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1615 else if (code == GIMPLE_CALL
1616 && (fndecl = gimple_call_fndecl (stmt))
1617 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1619 switch (DECL_FUNCTION_CODE (fndecl))
1621 case BUILT_IN_MALLOC:
1622 case BUILT_IN_REALLOC:
1623 case BUILT_IN_CALLOC:
1624 case BUILT_IN_STRDUP:
1625 case BUILT_IN_STRNDUP:
1626 val.lattice_val = CONSTANT;
1627 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1628 val.mask = shwi_to_double_int
1629 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
1630 / BITS_PER_UNIT - 1));
1633 case BUILT_IN_ALLOCA:
1634 val.lattice_val = CONSTANT;
1635 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1636 val.mask = shwi_to_double_int
1637 (~(((HOST_WIDE_INT) BIGGEST_ALIGNMENT)
1638 / BITS_PER_UNIT - 1));
1641 /* These builtins return their first argument, unmodified. */
1642 case BUILT_IN_MEMCPY:
1643 case BUILT_IN_MEMMOVE:
1644 case BUILT_IN_MEMSET:
1645 case BUILT_IN_STRCPY:
1646 case BUILT_IN_STRNCPY:
1647 case BUILT_IN_MEMCPY_CHK:
1648 case BUILT_IN_MEMMOVE_CHK:
1649 case BUILT_IN_MEMSET_CHK:
1650 case BUILT_IN_STRCPY_CHK:
1651 case BUILT_IN_STRNCPY_CHK:
1652 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1655 case BUILT_IN_ASSUME_ALIGNED:
1656 val = bit_value_assume_aligned (stmt);
1662 is_constant = (val.lattice_val == CONSTANT);
1667 /* The statement produced a nonconstant value. If the statement
1668 had UNDEFINED operands, then the result of the statement
1669 should be UNDEFINED. Otherwise, the statement is VARYING. */
1670 if (likelyvalue == UNDEFINED)
1672 val.lattice_val = likelyvalue;
1673 val.mask = double_int_zero;
1677 val.lattice_val = VARYING;
1678 val.mask = double_int_minus_one;
1681 val.value = NULL_TREE;
1687 /* Fold the stmt at *GSI with CCP specific information that propagating
1688 and regular folding does not catch. */
1691 ccp_fold_stmt (gimple_stmt_iterator *gsi)
1693 gimple stmt = gsi_stmt (*gsi);
1695 switch (gimple_code (stmt))
1700 /* Statement evaluation will handle type mismatches in constants
1701 more gracefully than the final propagation. This allows us to
1702 fold more conditionals here. */
1703 val = evaluate_stmt (stmt);
1704 if (val.lattice_val != CONSTANT
1705 || !double_int_zero_p (val.mask))
1710 fprintf (dump_file, "Folding predicate ");
1711 print_gimple_expr (dump_file, stmt, 0, 0);
1712 fprintf (dump_file, " to ");
1713 print_generic_expr (dump_file, val.value, 0);
1714 fprintf (dump_file, "\n");
1717 if (integer_zerop (val.value))
1718 gimple_cond_make_false (stmt);
1720 gimple_cond_make_true (stmt);
1727 tree lhs = gimple_call_lhs (stmt);
1730 bool changed = false;
1733 /* If the call was folded into a constant make sure it goes
1734 away even if we cannot propagate into all uses because of
1737 && TREE_CODE (lhs) == SSA_NAME
1738 && (val = get_constant_value (lhs)))
1740 tree new_rhs = unshare_expr (val);
1742 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1743 TREE_TYPE (new_rhs)))
1744 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1745 res = update_call_from_tree (gsi, new_rhs);
1750 /* Internal calls provide no argument types, so the extra laxity
1751 for normal calls does not apply. */
1752 if (gimple_call_internal_p (stmt))
1755 /* Propagate into the call arguments. Compared to replace_uses_in
1756 this can use the argument slot types for type verification
1757 instead of the current argument type. We also can safely
1758 drop qualifiers here as we are dealing with constants anyway. */
1759 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
1760 for (i = 0; i < gimple_call_num_args (stmt) && argt;
1761 ++i, argt = TREE_CHAIN (argt))
1763 tree arg = gimple_call_arg (stmt, i);
1764 if (TREE_CODE (arg) == SSA_NAME
1765 && (val = get_constant_value (arg))
1766 && useless_type_conversion_p
1767 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
1768 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
1770 gimple_call_set_arg (stmt, i, unshare_expr (val));
1780 tree lhs = gimple_assign_lhs (stmt);
1783 /* If we have a load that turned out to be constant replace it
1784 as we cannot propagate into all uses in all cases. */
1785 if (gimple_assign_single_p (stmt)
1786 && TREE_CODE (lhs) == SSA_NAME
1787 && (val = get_constant_value (lhs)))
1789 tree rhs = unshare_expr (val);
1790 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1791 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1792 gimple_assign_set_rhs_from_tree (gsi, rhs);
1804 /* Visit the assignment statement STMT. Set the value of its LHS to the
1805 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1806 creates virtual definitions, set the value of each new name to that
1807 of the RHS (if we can derive a constant out of the RHS).
1808 Value-returning call statements also perform an assignment, and
1809 are handled here. */
1811 static enum ssa_prop_result
1812 visit_assignment (gimple stmt, tree *output_p)
1815 enum ssa_prop_result retval;
1817 tree lhs = gimple_get_lhs (stmt);
1819 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1820 || gimple_call_lhs (stmt) != NULL_TREE);
1822 if (gimple_assign_single_p (stmt)
1823 && gimple_assign_rhs_code (stmt) == SSA_NAME)
1824 /* For a simple copy operation, we copy the lattice values. */
1825 val = *get_value (gimple_assign_rhs1 (stmt));
1827 /* Evaluate the statement, which could be
1828 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1829 val = evaluate_stmt (stmt);
1831 retval = SSA_PROP_NOT_INTERESTING;
1833 /* Set the lattice value of the statement's output. */
1834 if (TREE_CODE (lhs) == SSA_NAME)
1836 /* If STMT is an assignment to an SSA_NAME, we only have one
1838 if (set_lattice_value (lhs, val))
1841 if (val.lattice_val == VARYING)
1842 retval = SSA_PROP_VARYING;
1844 retval = SSA_PROP_INTERESTING;
1852 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
1853 if it can determine which edge will be taken. Otherwise, return
1854 SSA_PROP_VARYING. */
1856 static enum ssa_prop_result
1857 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
1862 block = gimple_bb (stmt);
1863 val = evaluate_stmt (stmt);
1864 if (val.lattice_val != CONSTANT
1865 || !double_int_zero_p (val.mask))
1866 return SSA_PROP_VARYING;
1868 /* Find which edge out of the conditional block will be taken and add it
1869 to the worklist. If no single edge can be determined statically,
1870 return SSA_PROP_VARYING to feed all the outgoing edges to the
1871 propagation engine. */
1872 *taken_edge_p = find_taken_edge (block, val.value);
1874 return SSA_PROP_INTERESTING;
1876 return SSA_PROP_VARYING;
1880 /* Evaluate statement STMT. If the statement produces an output value and
1881 its evaluation changes the lattice value of its output, return
1882 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
1885 If STMT is a conditional branch and we can determine its truth
1886 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
1887 value, return SSA_PROP_VARYING. */
1889 static enum ssa_prop_result
1890 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
1895 if (dump_file && (dump_flags & TDF_DETAILS))
1897 fprintf (dump_file, "\nVisiting statement:\n");
1898 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1901 switch (gimple_code (stmt))
1904 /* If the statement is an assignment that produces a single
1905 output value, evaluate its RHS to see if the lattice value of
1906 its output has changed. */
1907 return visit_assignment (stmt, output_p);
1910 /* A value-returning call also performs an assignment. */
1911 if (gimple_call_lhs (stmt) != NULL_TREE)
1912 return visit_assignment (stmt, output_p);
1917 /* If STMT is a conditional branch, see if we can determine
1918 which branch will be taken. */
1919 /* FIXME. It appears that we should be able to optimize
1920 computed GOTOs here as well. */
1921 return visit_cond_stmt (stmt, taken_edge_p);
1927 /* Any other kind of statement is not interesting for constant
1928 propagation and, therefore, not worth simulating. */
1929 if (dump_file && (dump_flags & TDF_DETAILS))
1930 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
1932 /* Definitions made by statements other than assignments to
1933 SSA_NAMEs represent unknown modifications to their outputs.
1934 Mark them VARYING. */
1935 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
1937 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
1938 set_lattice_value (def, v);
1941 return SSA_PROP_VARYING;
1945 /* Main entry point for SSA Conditional Constant Propagation. */
1951 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
1952 if (ccp_finalize ())
1953 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
1962 return flag_tree_ccp != 0;
1966 struct gimple_opt_pass pass_ccp =
1971 gate_ccp, /* gate */
1972 do_ssa_ccp, /* execute */
1975 0, /* static_pass_number */
1976 TV_TREE_CCP, /* tv_id */
1977 PROP_cfg | PROP_ssa, /* properties_required */
1978 0, /* properties_provided */
1979 0, /* properties_destroyed */
1980 0, /* todo_flags_start */
1982 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
1988 /* Try to optimize out __builtin_stack_restore. Optimize it out
1989 if there is another __builtin_stack_restore in the same basic
1990 block and no calls or ASM_EXPRs are in between, or if this block's
1991 only outgoing edge is to EXIT_BLOCK and there are no calls or
1992 ASM_EXPRs after this __builtin_stack_restore. */
1995 optimize_stack_restore (gimple_stmt_iterator i)
2000 basic_block bb = gsi_bb (i);
2001 gimple call = gsi_stmt (i);
2003 if (gimple_code (call) != GIMPLE_CALL
2004 || gimple_call_num_args (call) != 1
2005 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2006 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2009 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2011 stmt = gsi_stmt (i);
2012 if (gimple_code (stmt) == GIMPLE_ASM)
2014 if (gimple_code (stmt) != GIMPLE_CALL)
2017 callee = gimple_call_fndecl (stmt);
2019 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2020 /* All regular builtins are ok, just obviously not alloca. */
2021 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
2024 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2025 goto second_stack_restore;
2031 /* Allow one successor of the exit block, or zero successors. */
2032 switch (EDGE_COUNT (bb->succs))
2037 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2043 second_stack_restore:
2045 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2046 If there are multiple uses, then the last one should remove the call.
2047 In any case, whether the call to __builtin_stack_save can be removed
2048 or not is irrelevant to removing the call to __builtin_stack_restore. */
2049 if (has_single_use (gimple_call_arg (call, 0)))
2051 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2052 if (is_gimple_call (stack_save))
2054 callee = gimple_call_fndecl (stack_save);
2056 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2057 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2059 gimple_stmt_iterator stack_save_gsi;
2062 stack_save_gsi = gsi_for_stmt (stack_save);
2063 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2064 update_call_from_tree (&stack_save_gsi, rhs);
2069 /* No effect, so the statement will be deleted. */
2070 return integer_zero_node;
2073 /* If va_list type is a simple pointer and nothing special is needed,
2074 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2075 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2076 pointer assignment. */
2079 optimize_stdarg_builtin (gimple call)
2081 tree callee, lhs, rhs, cfun_va_list;
2082 bool va_list_simple_ptr;
2083 location_t loc = gimple_location (call);
2085 if (gimple_code (call) != GIMPLE_CALL)
2088 callee = gimple_call_fndecl (call);
2090 cfun_va_list = targetm.fn_abi_va_list (callee);
2091 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2092 && (TREE_TYPE (cfun_va_list) == void_type_node
2093 || TREE_TYPE (cfun_va_list) == char_type_node);
2095 switch (DECL_FUNCTION_CODE (callee))
2097 case BUILT_IN_VA_START:
2098 if (!va_list_simple_ptr
2099 || targetm.expand_builtin_va_start != NULL
2100 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
2103 if (gimple_call_num_args (call) != 2)
2106 lhs = gimple_call_arg (call, 0);
2107 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2108 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2109 != TYPE_MAIN_VARIANT (cfun_va_list))
2112 lhs = build_fold_indirect_ref_loc (loc, lhs);
2113 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
2114 1, integer_zero_node);
2115 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2116 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2118 case BUILT_IN_VA_COPY:
2119 if (!va_list_simple_ptr)
2122 if (gimple_call_num_args (call) != 2)
2125 lhs = gimple_call_arg (call, 0);
2126 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2127 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2128 != TYPE_MAIN_VARIANT (cfun_va_list))
2131 lhs = build_fold_indirect_ref_loc (loc, lhs);
2132 rhs = gimple_call_arg (call, 1);
2133 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2134 != TYPE_MAIN_VARIANT (cfun_va_list))
2137 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2138 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2140 case BUILT_IN_VA_END:
2141 /* No effect, so the statement will be deleted. */
2142 return integer_zero_node;
2149 /* A simple pass that attempts to fold all builtin functions. This pass
2150 is run after we've propagated as many constants as we can. */
2153 execute_fold_all_builtins (void)
2155 bool cfg_changed = false;
2157 unsigned int todoflags = 0;
2161 gimple_stmt_iterator i;
2162 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2164 gimple stmt, old_stmt;
2165 tree callee, result;
2166 enum built_in_function fcode;
2168 stmt = gsi_stmt (i);
2170 if (gimple_code (stmt) != GIMPLE_CALL)
2175 callee = gimple_call_fndecl (stmt);
2176 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2181 fcode = DECL_FUNCTION_CODE (callee);
2183 result = gimple_fold_builtin (stmt);
2186 gimple_remove_stmt_histograms (cfun, stmt);
2189 switch (DECL_FUNCTION_CODE (callee))
2191 case BUILT_IN_CONSTANT_P:
2192 /* Resolve __builtin_constant_p. If it hasn't been
2193 folded to integer_one_node by now, it's fairly
2194 certain that the value simply isn't constant. */
2195 result = integer_zero_node;
2198 case BUILT_IN_ASSUME_ALIGNED:
2199 /* Remove __builtin_assume_aligned. */
2200 result = gimple_call_arg (stmt, 0);
2203 case BUILT_IN_STACK_RESTORE:
2204 result = optimize_stack_restore (i);
2210 case BUILT_IN_VA_START:
2211 case BUILT_IN_VA_END:
2212 case BUILT_IN_VA_COPY:
2213 /* These shouldn't be folded before pass_stdarg. */
2214 result = optimize_stdarg_builtin (stmt);
2224 if (dump_file && (dump_flags & TDF_DETAILS))
2226 fprintf (dump_file, "Simplified\n ");
2227 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2231 if (!update_call_from_tree (&i, result))
2233 gimplify_and_update_call_from_tree (&i, result);
2234 todoflags |= TODO_update_address_taken;
2237 stmt = gsi_stmt (i);
2240 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2241 && gimple_purge_dead_eh_edges (bb))
2244 if (dump_file && (dump_flags & TDF_DETAILS))
2246 fprintf (dump_file, "to\n ");
2247 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2248 fprintf (dump_file, "\n");
2251 /* Retry the same statement if it changed into another
2252 builtin, there might be new opportunities now. */
2253 if (gimple_code (stmt) != GIMPLE_CALL)
2258 callee = gimple_call_fndecl (stmt);
2260 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2261 || DECL_FUNCTION_CODE (callee) == fcode)
2266 /* Delete unreachable blocks. */
2268 todoflags |= TODO_cleanup_cfg;
2274 struct gimple_opt_pass pass_fold_builtins =
2280 execute_fold_all_builtins, /* execute */
2283 0, /* static_pass_number */
2284 TV_NONE, /* tv_id */
2285 PROP_cfg | PROP_ssa, /* properties_required */
2286 0, /* properties_provided */
2287 0, /* properties_destroyed */
2288 0, /* todo_flags_start */
2290 | TODO_update_ssa /* todo_flags_finish */