1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
115 #include "coretypes.h"
120 #include "basic-block.h"
122 #include "function.h"
123 #include "tree-pretty-print.h"
124 #include "gimple-pretty-print.h"
126 #include "tree-dump.h"
127 #include "tree-flow.h"
128 #include "tree-pass.h"
129 #include "tree-ssa-propagate.h"
130 #include "value-prof.h"
131 #include "langhooks.h"
133 #include "diagnostic-core.h"
138 /* Possible lattice values. */
147 struct prop_value_d {
149 ccp_lattice_t lattice_val;
151 /* Propagated value. */
154 /* Mask that applies to the propagated value during CCP. For
155 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
159 typedef struct prop_value_d prop_value_t;
161 /* Array of propagated constant values. After propagation,
162 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
163 the constant is held in an SSA name representing a memory store
164 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
165 memory reference used to store (i.e., the LHS of the assignment
167 static prop_value_t *const_val;
169 static void canonicalize_float_value (prop_value_t *);
170 static bool ccp_fold_stmt (gimple_stmt_iterator *);
172 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
175 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
177 switch (val.lattice_val)
180 fprintf (outf, "%sUNINITIALIZED", prefix);
183 fprintf (outf, "%sUNDEFINED", prefix);
186 fprintf (outf, "%sVARYING", prefix);
189 fprintf (outf, "%sCONSTANT ", prefix);
190 if (TREE_CODE (val.value) != INTEGER_CST
191 || double_int_zero_p (val.mask))
192 print_generic_expr (outf, val.value, dump_flags);
195 double_int cval = double_int_and_not (tree_to_double_int (val.value),
197 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
198 prefix, cval.high, cval.low);
199 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
200 val.mask.high, val.mask.low);
209 /* Print lattice value VAL to stderr. */
211 void debug_lattice_value (prop_value_t val);
214 debug_lattice_value (prop_value_t val)
216 dump_lattice_value (stderr, "", val);
217 fprintf (stderr, "\n");
221 /* Compute a default value for variable VAR and store it in the
222 CONST_VAL array. The following rules are used to get default
225 1- Global and static variables that are declared constant are
228 2- Any other value is considered UNDEFINED. This is useful when
229 considering PHI nodes. PHI arguments that are undefined do not
230 change the constant value of the PHI node, which allows for more
231 constants to be propagated.
233 3- Variables defined by statements other than assignments and PHI
234 nodes are considered VARYING.
236 4- Initial values of variables that are not GIMPLE registers are
237 considered VARYING. */
240 get_default_value (tree var)
242 tree sym = SSA_NAME_VAR (var);
243 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
246 stmt = SSA_NAME_DEF_STMT (var);
248 if (gimple_nop_p (stmt))
250 /* Variables defined by an empty statement are those used
251 before being initialized. If VAR is a local variable, we
252 can assume initially that it is UNDEFINED, otherwise we must
253 consider it VARYING. */
254 if (is_gimple_reg (sym)
255 && TREE_CODE (sym) == VAR_DECL)
256 val.lattice_val = UNDEFINED;
259 val.lattice_val = VARYING;
260 val.mask = double_int_minus_one;
263 else if (is_gimple_assign (stmt)
264 /* Value-returning GIMPLE_CALL statements assign to
265 a variable, and are treated similarly to GIMPLE_ASSIGN. */
266 || (is_gimple_call (stmt)
267 && gimple_call_lhs (stmt) != NULL_TREE)
268 || gimple_code (stmt) == GIMPLE_PHI)
271 if (gimple_assign_single_p (stmt)
272 && DECL_P (gimple_assign_rhs1 (stmt))
273 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
275 val.lattice_val = CONSTANT;
279 /* Any other variable defined by an assignment or a PHI node
280 is considered UNDEFINED. */
281 val.lattice_val = UNDEFINED;
285 /* Otherwise, VAR will never take on a constant value. */
286 val.lattice_val = VARYING;
287 val.mask = double_int_minus_one;
294 /* Get the constant value associated with variable VAR. */
296 static inline prop_value_t *
301 if (const_val == NULL)
304 val = &const_val[SSA_NAME_VERSION (var)];
305 if (val->lattice_val == UNINITIALIZED)
306 *val = get_default_value (var);
308 canonicalize_float_value (val);
313 /* Return the constant tree value associated with VAR. */
316 get_constant_value (tree var)
319 if (TREE_CODE (var) != SSA_NAME)
321 if (is_gimple_min_invariant (var))
325 val = get_value (var);
327 && val->lattice_val == CONSTANT
328 && (TREE_CODE (val->value) != INTEGER_CST
329 || double_int_zero_p (val->mask)))
334 /* Sets the value associated with VAR to VARYING. */
337 set_value_varying (tree var)
339 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
341 val->lattice_val = VARYING;
342 val->value = NULL_TREE;
343 val->mask = double_int_minus_one;
346 /* For float types, modify the value of VAL to make ccp work correctly
347 for non-standard values (-0, NaN):
349 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
350 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
351 This is to fix the following problem (see PR 29921): Suppose we have
355 and we set value of y to NaN. This causes value of x to be set to NaN.
356 When we later determine that y is in fact VARYING, fold uses the fact
357 that HONOR_NANS is false, and we try to change the value of x to 0,
358 causing an ICE. With HONOR_NANS being false, the real appearance of
359 NaN would cause undefined behavior, though, so claiming that y (and x)
360 are UNDEFINED initially is correct. */
363 canonicalize_float_value (prop_value_t *val)
365 enum machine_mode mode;
369 if (val->lattice_val != CONSTANT
370 || TREE_CODE (val->value) != REAL_CST)
373 d = TREE_REAL_CST (val->value);
374 type = TREE_TYPE (val->value);
375 mode = TYPE_MODE (type);
377 if (!HONOR_SIGNED_ZEROS (mode)
378 && REAL_VALUE_MINUS_ZERO (d))
380 val->value = build_real (type, dconst0);
384 if (!HONOR_NANS (mode)
385 && REAL_VALUE_ISNAN (d))
387 val->lattice_val = UNDEFINED;
393 /* Return whether the lattice transition is valid. */
396 valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
398 /* Lattice transitions must always be monotonically increasing in
400 if (old_val.lattice_val < new_val.lattice_val)
403 if (old_val.lattice_val != new_val.lattice_val)
406 if (!old_val.value && !new_val.value)
409 /* Now both lattice values are CONSTANT. */
411 /* Allow transitioning from &x to &x & ~3. */
412 if (TREE_CODE (old_val.value) != INTEGER_CST
413 && TREE_CODE (new_val.value) == INTEGER_CST)
416 /* Bit-lattices have to agree in the still valid bits. */
417 if (TREE_CODE (old_val.value) == INTEGER_CST
418 && TREE_CODE (new_val.value) == INTEGER_CST)
419 return double_int_equal_p
420 (double_int_and_not (tree_to_double_int (old_val.value),
422 double_int_and_not (tree_to_double_int (new_val.value),
425 /* Otherwise constant values have to agree. */
426 return operand_equal_p (old_val.value, new_val.value, 0);
429 /* Set the value for variable VAR to NEW_VAL. Return true if the new
430 value is different from VAR's previous value. */
433 set_lattice_value (tree var, prop_value_t new_val)
435 /* We can deal with old UNINITIALIZED values just fine here. */
436 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
438 canonicalize_float_value (&new_val);
440 /* We have to be careful to not go up the bitwise lattice
441 represented by the mask.
442 ??? This doesn't seem to be the best place to enforce this. */
443 if (new_val.lattice_val == CONSTANT
444 && old_val->lattice_val == CONSTANT
445 && TREE_CODE (new_val.value) == INTEGER_CST
446 && TREE_CODE (old_val->value) == INTEGER_CST)
449 diff = double_int_xor (tree_to_double_int (new_val.value),
450 tree_to_double_int (old_val->value));
451 new_val.mask = double_int_ior (new_val.mask,
452 double_int_ior (old_val->mask, diff));
455 gcc_assert (valid_lattice_transition (*old_val, new_val));
457 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
458 caller that this was a non-transition. */
459 if (old_val->lattice_val != new_val.lattice_val
460 || (new_val.lattice_val == CONSTANT
461 && TREE_CODE (new_val.value) == INTEGER_CST
462 && (TREE_CODE (old_val->value) != INTEGER_CST
463 || !double_int_equal_p (new_val.mask, old_val->mask))))
465 /* ??? We would like to delay creation of INTEGER_CSTs from
466 partially constants here. */
468 if (dump_file && (dump_flags & TDF_DETAILS))
470 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
471 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
476 gcc_assert (new_val.lattice_val != UNINITIALIZED);
483 static prop_value_t get_value_for_expr (tree, bool);
484 static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
485 static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
486 tree, double_int, double_int,
487 tree, double_int, double_int);
489 /* Return a double_int that can be used for bitwise simplifications
493 value_to_double_int (prop_value_t val)
496 && TREE_CODE (val.value) == INTEGER_CST)
497 return tree_to_double_int (val.value);
499 return double_int_zero;
502 /* Return the value for the address expression EXPR based on alignment
506 get_value_from_alignment (tree expr)
509 HOST_WIDE_INT bitsize, bitpos;
511 enum machine_mode mode;
514 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
516 base = get_inner_reference (TREE_OPERAND (expr, 0),
517 &bitsize, &bitpos, &offset,
518 &mode, &align, &align, false);
519 if (TREE_CODE (base) == MEM_REF)
520 val = bit_value_binop (PLUS_EXPR, TREE_TYPE (expr),
521 TREE_OPERAND (base, 0), TREE_OPERAND (base, 1));
523 && ((align = get_object_alignment (base, BIGGEST_ALIGNMENT))
526 val.lattice_val = CONSTANT;
527 /* We assume pointers are zero-extended. */
528 val.mask = double_int_and_not
529 (double_int_mask (TYPE_PRECISION (TREE_TYPE (expr))),
530 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
531 val.value = build_int_cst (TREE_TYPE (expr), 0);
535 val.lattice_val = VARYING;
536 val.mask = double_int_minus_one;
537 val.value = NULL_TREE;
541 double_int value, mask;
542 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
543 TREE_TYPE (expr), value_to_double_int (val), val.mask,
545 shwi_to_double_int (bitpos / BITS_PER_UNIT),
547 val.lattice_val = double_int_minus_one_p (mask) ? VARYING : CONSTANT;
549 if (val.lattice_val == CONSTANT)
550 val.value = double_int_to_tree (TREE_TYPE (expr), value);
552 val.value = NULL_TREE;
554 /* ??? We should handle i * 4 and more complex expressions from
555 the offset, possibly by just expanding get_value_for_expr. */
556 if (offset != NULL_TREE)
558 double_int value, mask;
559 prop_value_t oval = get_value_for_expr (offset, true);
560 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
561 TREE_TYPE (expr), value_to_double_int (val), val.mask,
562 TREE_TYPE (expr), value_to_double_int (oval),
565 if (double_int_minus_one_p (mask))
567 val.lattice_val = VARYING;
568 val.value = NULL_TREE;
572 val.lattice_val = CONSTANT;
573 val.value = double_int_to_tree (TREE_TYPE (expr), value);
580 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
581 return constant bits extracted from alignment information for
582 invariant addresses. */
585 get_value_for_expr (tree expr, bool for_bits_p)
589 if (TREE_CODE (expr) == SSA_NAME)
591 val = *get_value (expr);
593 && val.lattice_val == CONSTANT
594 && TREE_CODE (val.value) == ADDR_EXPR)
595 val = get_value_from_alignment (val.value);
597 else if (is_gimple_min_invariant (expr)
598 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
600 val.lattice_val = CONSTANT;
602 val.mask = double_int_zero;
603 canonicalize_float_value (&val);
605 else if (TREE_CODE (expr) == ADDR_EXPR)
606 val = get_value_from_alignment (expr);
609 val.lattice_val = VARYING;
610 val.mask = double_int_minus_one;
611 val.value = NULL_TREE;
616 /* Return the likely CCP lattice value for STMT.
618 If STMT has no operands, then return CONSTANT.
620 Else if undefinedness of operands of STMT cause its value to be
621 undefined, then return UNDEFINED.
623 Else if any operands of STMT are constants, then return CONSTANT.
625 Else return VARYING. */
628 likely_value (gimple stmt)
630 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
635 enum gimple_code code = gimple_code (stmt);
637 /* This function appears to be called only for assignments, calls,
638 conditionals, and switches, due to the logic in visit_stmt. */
639 gcc_assert (code == GIMPLE_ASSIGN
640 || code == GIMPLE_CALL
641 || code == GIMPLE_COND
642 || code == GIMPLE_SWITCH);
644 /* If the statement has volatile operands, it won't fold to a
646 if (gimple_has_volatile_ops (stmt))
649 /* Arrive here for more complex cases. */
650 has_constant_operand = false;
651 has_undefined_operand = false;
652 all_undefined_operands = true;
653 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
655 prop_value_t *val = get_value (use);
657 if (val->lattice_val == UNDEFINED)
658 has_undefined_operand = true;
660 all_undefined_operands = false;
662 if (val->lattice_val == CONSTANT)
663 has_constant_operand = true;
666 /* There may be constants in regular rhs operands. For calls we
667 have to ignore lhs, fndecl and static chain, otherwise only
669 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
670 i < gimple_num_ops (stmt); ++i)
672 tree op = gimple_op (stmt, i);
673 if (!op || TREE_CODE (op) == SSA_NAME)
675 if (is_gimple_min_invariant (op))
676 has_constant_operand = true;
679 if (has_constant_operand)
680 all_undefined_operands = false;
682 /* If the operation combines operands like COMPLEX_EXPR make sure to
683 not mark the result UNDEFINED if only one part of the result is
685 if (has_undefined_operand && all_undefined_operands)
687 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
689 switch (gimple_assign_rhs_code (stmt))
691 /* Unary operators are handled with all_undefined_operands. */
694 case POINTER_PLUS_EXPR:
695 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
696 Not bitwise operators, one VARYING operand may specify the
697 result completely. Not logical operators for the same reason.
698 Not COMPLEX_EXPR as one VARYING operand makes the result partly
699 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
700 the undefined operand may be promoted. */
707 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
708 fall back to VARYING even if there were CONSTANT operands. */
709 if (has_undefined_operand)
712 /* We do not consider virtual operands here -- load from read-only
713 memory may have only VARYING virtual operands, but still be
715 if (has_constant_operand
716 || gimple_references_memory_p (stmt))
722 /* Returns true if STMT cannot be constant. */
725 surely_varying_stmt_p (gimple stmt)
727 /* If the statement has operands that we cannot handle, it cannot be
729 if (gimple_has_volatile_ops (stmt))
732 /* If it is a call and does not return a value or is not a
733 builtin and not an indirect call, it is varying. */
734 if (is_gimple_call (stmt))
737 if (!gimple_call_lhs (stmt)
738 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
739 && !DECL_BUILT_IN (fndecl)))
743 /* Any other store operation is not interesting. */
744 else if (gimple_vdef (stmt))
747 /* Anything other than assignments and conditional jumps are not
748 interesting for CCP. */
749 if (gimple_code (stmt) != GIMPLE_ASSIGN
750 && gimple_code (stmt) != GIMPLE_COND
751 && gimple_code (stmt) != GIMPLE_SWITCH
752 && gimple_code (stmt) != GIMPLE_CALL)
758 /* Initialize local data structures for CCP. */
761 ccp_initialize (void)
765 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
767 /* Initialize simulation flags for PHI nodes and statements. */
770 gimple_stmt_iterator i;
772 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
774 gimple stmt = gsi_stmt (i);
777 /* If the statement is a control insn, then we do not
778 want to avoid simulating the statement once. Failure
779 to do so means that those edges will never get added. */
780 if (stmt_ends_bb_p (stmt))
783 is_varying = surely_varying_stmt_p (stmt);
790 /* If the statement will not produce a constant, mark
791 all its outputs VARYING. */
792 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
793 set_value_varying (def);
795 prop_set_simulate_again (stmt, !is_varying);
799 /* Now process PHI nodes. We never clear the simulate_again flag on
800 phi nodes, since we do not know which edges are executable yet,
801 except for phi nodes for virtual operands when we do not do store ccp. */
804 gimple_stmt_iterator i;
806 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
808 gimple phi = gsi_stmt (i);
810 if (!is_gimple_reg (gimple_phi_result (phi)))
811 prop_set_simulate_again (phi, false);
813 prop_set_simulate_again (phi, true);
818 /* Debug count support. Reset the values of ssa names
819 VARYING when the total number ssa names analyzed is
820 beyond the debug count specified. */
826 for (i = 0; i < num_ssa_names; i++)
830 const_val[i].lattice_val = VARYING;
831 const_val[i].mask = double_int_minus_one;
832 const_val[i].value = NULL_TREE;
838 /* Do final substitution of propagated values, cleanup the flowgraph and
839 free allocated storage.
841 Return TRUE when something was optimized. */
846 bool something_changed;
851 /* Derive alignment and misalignment information from partially
852 constant pointers in the lattice. */
853 for (i = 1; i < num_ssa_names; ++i)
855 tree name = ssa_name (i);
857 struct ptr_info_def *pi;
858 unsigned int tem, align;
861 || !POINTER_TYPE_P (TREE_TYPE (name)))
864 val = get_value (name);
865 if (val->lattice_val != CONSTANT
866 || TREE_CODE (val->value) != INTEGER_CST)
869 /* Trailing constant bits specify the alignment, trailing value
870 bits the misalignment. */
872 align = (tem & -tem);
876 pi = get_ptr_info (name);
878 pi->misalign = TREE_INT_CST_LOW (val->value) & (align - 1);
881 /* Perform substitutions based on the known constant values. */
882 something_changed = substitute_and_fold (get_constant_value,
883 ccp_fold_stmt, true);
887 return something_changed;;
891 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
894 any M UNDEFINED = any
895 any M VARYING = VARYING
896 Ci M Cj = Ci if (i == j)
897 Ci M Cj = VARYING if (i != j)
901 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
903 if (val1->lattice_val == UNDEFINED)
905 /* UNDEFINED M any = any */
908 else if (val2->lattice_val == UNDEFINED)
910 /* any M UNDEFINED = any
911 Nothing to do. VAL1 already contains the value we want. */
914 else if (val1->lattice_val == VARYING
915 || val2->lattice_val == VARYING)
917 /* any M VARYING = VARYING. */
918 val1->lattice_val = VARYING;
919 val1->mask = double_int_minus_one;
920 val1->value = NULL_TREE;
922 else if (val1->lattice_val == CONSTANT
923 && val2->lattice_val == CONSTANT
924 && TREE_CODE (val1->value) == INTEGER_CST
925 && TREE_CODE (val2->value) == INTEGER_CST)
927 /* Ci M Cj = Ci if (i == j)
928 Ci M Cj = VARYING if (i != j)
930 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
933 = double_int_ior (double_int_ior (val1->mask,
935 double_int_xor (tree_to_double_int (val1->value),
936 tree_to_double_int (val2->value)));
937 if (double_int_minus_one_p (val1->mask))
939 val1->lattice_val = VARYING;
940 val1->value = NULL_TREE;
943 else if (val1->lattice_val == CONSTANT
944 && val2->lattice_val == CONSTANT
945 && simple_cst_equal (val1->value, val2->value) == 1)
947 /* Ci M Cj = Ci if (i == j)
948 Ci M Cj = VARYING if (i != j)
950 VAL1 already contains the value we want for equivalent values. */
952 else if (val1->lattice_val == CONSTANT
953 && val2->lattice_val == CONSTANT
954 && (TREE_CODE (val1->value) == ADDR_EXPR
955 || TREE_CODE (val2->value) == ADDR_EXPR))
957 /* When not equal addresses are involved try meeting for
959 prop_value_t tem = *val2;
960 if (TREE_CODE (val1->value) == ADDR_EXPR)
961 *val1 = get_value_for_expr (val1->value, true);
962 if (TREE_CODE (val2->value) == ADDR_EXPR)
963 tem = get_value_for_expr (val2->value, true);
964 ccp_lattice_meet (val1, &tem);
968 /* Any other combination is VARYING. */
969 val1->lattice_val = VARYING;
970 val1->mask = double_int_minus_one;
971 val1->value = NULL_TREE;
976 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
977 lattice values to determine PHI_NODE's lattice value. The value of a
978 PHI node is determined calling ccp_lattice_meet with all the arguments
979 of the PHI node that are incoming via executable edges. */
981 static enum ssa_prop_result
982 ccp_visit_phi_node (gimple phi)
985 prop_value_t *old_val, new_val;
987 if (dump_file && (dump_flags & TDF_DETAILS))
989 fprintf (dump_file, "\nVisiting PHI node: ");
990 print_gimple_stmt (dump_file, phi, 0, dump_flags);
993 old_val = get_value (gimple_phi_result (phi));
994 switch (old_val->lattice_val)
997 return SSA_PROP_VARYING;
1004 new_val.lattice_val = UNDEFINED;
1005 new_val.value = NULL_TREE;
1012 for (i = 0; i < gimple_phi_num_args (phi); i++)
1014 /* Compute the meet operator over all the PHI arguments flowing
1015 through executable edges. */
1016 edge e = gimple_phi_arg_edge (phi, i);
1018 if (dump_file && (dump_flags & TDF_DETAILS))
1021 "\n Argument #%d (%d -> %d %sexecutable)\n",
1022 i, e->src->index, e->dest->index,
1023 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1026 /* If the incoming edge is executable, Compute the meet operator for
1027 the existing value of the PHI node and the current PHI argument. */
1028 if (e->flags & EDGE_EXECUTABLE)
1030 tree arg = gimple_phi_arg (phi, i)->def;
1031 prop_value_t arg_val = get_value_for_expr (arg, false);
1033 ccp_lattice_meet (&new_val, &arg_val);
1035 if (dump_file && (dump_flags & TDF_DETAILS))
1037 fprintf (dump_file, "\t");
1038 print_generic_expr (dump_file, arg, dump_flags);
1039 dump_lattice_value (dump_file, "\tValue: ", arg_val);
1040 fprintf (dump_file, "\n");
1043 if (new_val.lattice_val == VARYING)
1048 if (dump_file && (dump_flags & TDF_DETAILS))
1050 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1051 fprintf (dump_file, "\n\n");
1054 /* Make the transition to the new value. */
1055 if (set_lattice_value (gimple_phi_result (phi), new_val))
1057 if (new_val.lattice_val == VARYING)
1058 return SSA_PROP_VARYING;
1060 return SSA_PROP_INTERESTING;
1063 return SSA_PROP_NOT_INTERESTING;
1066 /* Return the constant value for OP or OP otherwise. */
1069 valueize_op (tree op)
1071 if (TREE_CODE (op) == SSA_NAME)
1073 tree tem = get_constant_value (op);
1080 /* CCP specific front-end to the non-destructive constant folding
1083 Attempt to simplify the RHS of STMT knowing that one or more
1084 operands are constants.
1086 If simplification is possible, return the simplified RHS,
1087 otherwise return the original RHS or NULL_TREE. */
1090 ccp_fold (gimple stmt)
1092 location_t loc = gimple_location (stmt);
1093 switch (gimple_code (stmt))
1097 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1099 switch (get_gimple_rhs_class (subcode))
1101 case GIMPLE_SINGLE_RHS:
1103 tree rhs = gimple_assign_rhs1 (stmt);
1104 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
1106 if (TREE_CODE (rhs) == SSA_NAME)
1108 /* If the RHS is an SSA_NAME, return its known constant value,
1110 return get_constant_value (rhs);
1112 /* Handle propagating invariant addresses into address operations.
1113 The folding we do here matches that in tree-ssa-forwprop.c. */
1114 else if (TREE_CODE (rhs) == ADDR_EXPR)
1117 base = &TREE_OPERAND (rhs, 0);
1118 while (handled_component_p (*base))
1119 base = &TREE_OPERAND (*base, 0);
1120 if (TREE_CODE (*base) == MEM_REF
1121 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
1123 tree val = get_constant_value (TREE_OPERAND (*base, 0));
1125 && TREE_CODE (val) == ADDR_EXPR)
1127 tree ret, save = *base;
1129 new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
1131 TREE_OPERAND (*base, 1));
1132 /* We need to return a new tree, not modify the IL
1133 or share parts of it. So play some tricks to
1134 avoid manually building it. */
1136 ret = unshare_expr (rhs);
1137 recompute_tree_invariant_for_addr_expr (ret);
1143 else if (TREE_CODE (rhs) == CONSTRUCTOR
1144 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
1145 && (CONSTRUCTOR_NELTS (rhs)
1146 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
1152 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
1154 val = valueize_op (val);
1155 if (TREE_CODE (val) == INTEGER_CST
1156 || TREE_CODE (val) == REAL_CST
1157 || TREE_CODE (val) == FIXED_CST)
1158 list = tree_cons (NULL_TREE, val, list);
1163 return build_vector (TREE_TYPE (rhs), nreverse (list));
1166 if (kind == tcc_reference)
1168 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
1169 || TREE_CODE (rhs) == REALPART_EXPR
1170 || TREE_CODE (rhs) == IMAGPART_EXPR)
1171 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1173 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1175 return fold_unary_loc (EXPR_LOCATION (rhs),
1177 TREE_TYPE (rhs), val);
1179 else if (TREE_CODE (rhs) == MEM_REF
1180 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1182 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1184 && TREE_CODE (val) == ADDR_EXPR)
1186 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
1188 TREE_OPERAND (rhs, 1));
1193 return fold_const_aggregate_ref (rhs);
1195 else if (kind == tcc_declaration)
1196 return get_symbol_constant_value (rhs);
1200 case GIMPLE_UNARY_RHS:
1202 /* Handle unary operators that can appear in GIMPLE form.
1203 Note that we know the single operand must be a constant,
1204 so this should almost always return a simplified RHS. */
1205 tree lhs = gimple_assign_lhs (stmt);
1206 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1208 /* Conversions are useless for CCP purposes if they are
1209 value-preserving. Thus the restrictions that
1210 useless_type_conversion_p places for pointer type conversions
1211 do not apply here. Substitution later will only substitute to
1213 if (CONVERT_EXPR_CODE_P (subcode)
1214 && POINTER_TYPE_P (TREE_TYPE (lhs))
1215 && POINTER_TYPE_P (TREE_TYPE (op0)))
1218 /* Try to re-construct array references on-the-fly. */
1219 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1221 && ((tem = maybe_fold_offset_to_address
1223 op0, integer_zero_node, TREE_TYPE (lhs)))
1230 fold_unary_ignore_overflow_loc (loc, subcode,
1231 gimple_expr_type (stmt), op0);
1234 case GIMPLE_BINARY_RHS:
1236 /* Handle binary operators that can appear in GIMPLE form. */
1237 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1238 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1240 /* Translate &x + CST into an invariant form suitable for
1241 further propagation. */
1242 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1243 && TREE_CODE (op0) == ADDR_EXPR
1244 && TREE_CODE (op1) == INTEGER_CST)
1246 tree off = fold_convert (ptr_type_node, op1);
1247 return build_fold_addr_expr
1248 (fold_build2 (MEM_REF,
1249 TREE_TYPE (TREE_TYPE (op0)),
1250 unshare_expr (op0), off));
1253 return fold_binary_loc (loc, subcode,
1254 gimple_expr_type (stmt), op0, op1);
1257 case GIMPLE_TERNARY_RHS:
1259 /* Handle ternary operators that can appear in GIMPLE form. */
1260 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1261 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1262 tree op2 = valueize_op (gimple_assign_rhs3 (stmt));
1264 return fold_ternary_loc (loc, subcode,
1265 gimple_expr_type (stmt), op0, op1, op2);
1276 tree fn = valueize_op (gimple_call_fn (stmt));
1277 if (TREE_CODE (fn) == ADDR_EXPR
1278 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1279 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1281 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1284 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1285 args[i] = valueize_op (gimple_call_arg (stmt, i));
1286 call = build_call_array_loc (loc,
1287 gimple_call_return_type (stmt),
1288 fn, gimple_call_num_args (stmt), args);
1289 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
1291 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1292 STRIP_NOPS (retval);
1300 /* Handle comparison operators that can appear in GIMPLE form. */
1301 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1302 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1303 enum tree_code code = gimple_cond_code (stmt);
1304 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1309 /* Return the constant switch index. */
1310 return valueize_op (gimple_switch_index (stmt));
1318 /* See if we can find constructor defining value of BASE.
1320 As a special case, return error_mark_node when constructor
1321 is not explicitly available, but it is known to be zero
1322 such as 'static const int a;'. */
1324 get_base_constructor (tree base, tree *offset)
1327 if (TREE_CODE (base) == MEM_REF)
1329 if (!integer_zerop (TREE_OPERAND (base, 1)))
1330 *offset = TREE_OPERAND (base, 1);
1332 base = get_constant_value (TREE_OPERAND (base, 0));
1333 if (!base || TREE_CODE (base) != ADDR_EXPR)
1335 base = TREE_OPERAND (base, 0);
1338 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1339 DECL_INITIAL. If BASE is a nested reference into another
1340 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1341 the inner reference. */
1342 switch (TREE_CODE (base))
1345 if (!TREE_READONLY (base)
1346 || ((TREE_STATIC (base) || DECL_EXTERNAL (base))
1347 && !varpool_get_node (base)->const_value_known))
1352 if (!DECL_INITIAL (base)
1353 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1354 return error_mark_node;
1355 return DECL_INITIAL (base);
1361 return fold_const_aggregate_ref (base);
1374 /* Return the tree representing the element referenced by T if T is an
1375 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1376 NULL_TREE otherwise. */
1379 fold_const_aggregate_ref (tree t)
1381 tree ctor, idx, field;
1382 unsigned HOST_WIDE_INT cnt;
1386 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1387 return get_symbol_constant_value (t);
1389 tem = fold_read_from_constant_string (t);
1393 switch (TREE_CODE (t))
1396 ctor = get_base_constructor (TREE_OPERAND (t, 0), &idx);
1401 if (ctor == error_mark_node)
1402 return build_zero_cst (TREE_TYPE (t));
1404 if (ctor == NULL_TREE
1405 || (TREE_CODE (ctor) != CONSTRUCTOR
1406 && TREE_CODE (ctor) != STRING_CST))
1409 /* Get the index. If we have an SSA_NAME, try to resolve it
1410 with the current lattice value for the SSA_NAME. */
1411 idx = TREE_OPERAND (t, 1);
1412 switch (TREE_CODE (idx))
1415 if ((tem = get_constant_value (idx))
1416 && TREE_CODE (tem) == INTEGER_CST)
1429 /* Fold read from constant string. */
1430 if (TREE_CODE (ctor) == STRING_CST)
1432 tree low_bound = array_ref_low_bound (t);
1433 double_int low_bound_cst;
1434 double_int index_cst;
1435 double_int length_cst;
1436 bool signed_p = TYPE_UNSIGNED (TREE_TYPE (idx));
1438 if (TREE_CODE (idx) != INTEGER_CST
1439 || !INTEGRAL_TYPE_P (TREE_TYPE (t))
1440 || TREE_CODE (low_bound) != INTEGER_CST)
1442 low_bound_cst = tree_to_double_int (low_bound);
1443 index_cst = tree_to_double_int (idx);
1444 length_cst = uhwi_to_double_int (TREE_STRING_LENGTH (ctor));
1445 index_cst = double_int_sub (index_cst, low_bound_cst);
1446 if ((TYPE_MODE (TREE_TYPE (t))
1447 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1448 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1450 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1451 && double_int_cmp (index_cst, length_cst, signed_p) < 0)
1452 return build_int_cst_type (TREE_TYPE (t),
1453 (TREE_STRING_POINTER (ctor)
1454 [double_int_to_uhwi (index_cst)]));
1458 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1459 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1460 if (tree_int_cst_equal (cfield, idx))
1461 return canonicalize_constructor_val (cval);
1465 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1466 DECL_INITIAL. If BASE is a nested reference into another
1467 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1468 the inner reference. */
1469 ctor = get_base_constructor (TREE_OPERAND (t, 0), &idx);
1474 if (ctor == error_mark_node)
1475 return build_zero_cst (TREE_TYPE (t));
1477 if (ctor == NULL_TREE
1478 || TREE_CODE (ctor) != CONSTRUCTOR)
1481 field = TREE_OPERAND (t, 1);
1483 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1485 /* FIXME: Handle bit-fields. */
1486 && ! DECL_BIT_FIELD (cfield))
1487 return canonicalize_constructor_val (cval);
1493 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1494 if (c && TREE_CODE (c) == COMPLEX_CST)
1495 return fold_build1_loc (EXPR_LOCATION (t),
1496 TREE_CODE (t), TREE_TYPE (t), c);
1501 ctor = get_base_constructor (t, &idx);
1503 if (ctor == error_mark_node)
1504 return build_zero_cst (TREE_TYPE (t));
1506 if (ctor && !AGGREGATE_TYPE_P (TREE_TYPE (ctor))
1510 && !useless_type_conversion_p
1511 (TREE_TYPE (t), TREE_TYPE (ctor)))
1512 ctor = fold_unary (VIEW_CONVERT_EXPR, TREE_TYPE (t), ctor);
1517 idx = integer_zero_node;
1519 if (ctor == NULL_TREE
1520 || (TREE_CODE (ctor) != CONSTRUCTOR
1521 && TREE_CODE (ctor) != STRING_CST))
1524 /* Fold read from constant string. */
1525 if (TREE_CODE (ctor) == STRING_CST)
1527 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1528 && (TYPE_MODE (TREE_TYPE (t))
1529 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1530 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1532 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1533 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1534 return build_int_cst_type (TREE_TYPE (t),
1535 (TREE_STRING_POINTER (ctor)
1536 [TREE_INT_CST_LOW (idx)]));
1540 /* ??? Implement byte-offset indexing into a non-array CONSTRUCTOR. */
1541 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
1542 && (TYPE_MODE (TREE_TYPE (t))
1543 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1544 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t))) != 0
1547 (TRUNC_MOD_EXPR, idx,
1548 size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t)))), 0)))
1550 idx = int_const_binop (TRUNC_DIV_EXPR, idx,
1551 size_int (GET_MODE_SIZE
1552 (TYPE_MODE (TREE_TYPE (t)))), 0);
1553 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1554 if (tree_int_cst_equal (cfield, idx))
1556 cval = canonicalize_constructor_val (cval);
1557 if (useless_type_conversion_p (TREE_TYPE (t), TREE_TYPE (cval)))
1559 else if (CONSTANT_CLASS_P (cval))
1560 return fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (t), cval);
1574 /* Apply the operation CODE in type TYPE to the value, mask pair
1575 RVAL and RMASK representing a value of type RTYPE and set
1576 the value, mask pair *VAL and *MASK to the result. */
1579 bit_value_unop_1 (enum tree_code code, tree type,
1580 double_int *val, double_int *mask,
1581 tree rtype, double_int rval, double_int rmask)
1587 *val = double_int_not (rval);
1592 double_int temv, temm;
1593 /* Return ~rval + 1. */
1594 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1595 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1597 type, double_int_one, double_int_zero);
1605 /* First extend mask and value according to the original type. */
1606 uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype)
1607 ? 0 : TYPE_UNSIGNED (rtype));
1608 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1609 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1611 /* Then extend mask and value according to the target type. */
1612 uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1613 ? 0 : TYPE_UNSIGNED (type));
1614 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1615 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1620 *mask = double_int_minus_one;
1625 /* Apply the operation CODE in type TYPE to the value, mask pairs
1626 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1627 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1630 bit_value_binop_1 (enum tree_code code, tree type,
1631 double_int *val, double_int *mask,
1632 tree r1type, double_int r1val, double_int r1mask,
1633 tree r2type, double_int r2val, double_int r2mask)
1635 bool uns = (TREE_CODE (type) == INTEGER_TYPE
1636 && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type));
1637 /* Assume we'll get a constant result. Use an initial varying value,
1638 we fall back to varying in the end if necessary. */
1639 *mask = double_int_minus_one;
1643 /* The mask is constant where there is a known not
1644 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1645 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1646 double_int_and (double_int_ior (r1val, r1mask),
1647 double_int_ior (r2val, r2mask)));
1648 *val = double_int_and (r1val, r2val);
1652 /* The mask is constant where there is a known
1653 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1654 *mask = double_int_and_not
1655 (double_int_ior (r1mask, r2mask),
1656 double_int_ior (double_int_and_not (r1val, r1mask),
1657 double_int_and_not (r2val, r2mask)));
1658 *val = double_int_ior (r1val, r2val);
1663 *mask = double_int_ior (r1mask, r2mask);
1664 *val = double_int_xor (r1val, r2val);
1669 if (double_int_zero_p (r2mask))
1671 HOST_WIDE_INT shift = r2val.low;
1672 if (code == RROTATE_EXPR)
1674 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1675 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1681 /* ??? We can handle partially known shift counts if we know
1682 its sign. That way we can tell that (x << (y | 8)) & 255
1684 if (double_int_zero_p (r2mask))
1686 HOST_WIDE_INT shift = r2val.low;
1687 if (code == RSHIFT_EXPR)
1689 /* We need to know if we are doing a left or a right shift
1690 to properly shift in zeros for left shift and unsigned
1691 right shifts and the sign bit for signed right shifts.
1692 For signed right shifts we shift in varying in case
1693 the sign bit was varying. */
1696 *mask = double_int_lshift (r1mask, shift,
1697 TYPE_PRECISION (type), false);
1698 *val = double_int_lshift (r1val, shift,
1699 TYPE_PRECISION (type), false);
1704 *mask = double_int_rshift (r1mask, shift,
1705 TYPE_PRECISION (type), !uns);
1706 *val = double_int_rshift (r1val, shift,
1707 TYPE_PRECISION (type), !uns);
1718 case POINTER_PLUS_EXPR:
1721 /* Do the addition with unknown bits set to zero, to give carry-ins of
1722 zero wherever possible. */
1723 lo = double_int_add (double_int_and_not (r1val, r1mask),
1724 double_int_and_not (r2val, r2mask));
1725 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1726 /* Do the addition with unknown bits set to one, to give carry-ins of
1727 one wherever possible. */
1728 hi = double_int_add (double_int_ior (r1val, r1mask),
1729 double_int_ior (r2val, r2mask));
1730 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1731 /* Each bit in the result is known if (a) the corresponding bits in
1732 both inputs are known, and (b) the carry-in to that bit position
1733 is known. We can check condition (b) by seeing if we got the same
1734 result with minimised carries as with maximised carries. */
1735 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1736 double_int_xor (lo, hi));
1737 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1738 /* It shouldn't matter whether we choose lo or hi here. */
1745 double_int temv, temm;
1746 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1747 r2type, r2val, r2mask);
1748 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1749 r1type, r1val, r1mask,
1750 r2type, temv, temm);
1756 /* Just track trailing zeros in both operands and transfer
1757 them to the other. */
1758 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1759 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1760 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1762 *mask = double_int_zero;
1763 *val = double_int_zero;
1765 else if (r1tz + r2tz > 0)
1767 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1768 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1769 *val = double_int_zero;
1777 double_int m = double_int_ior (r1mask, r2mask);
1778 if (!double_int_equal_p (double_int_and_not (r1val, m),
1779 double_int_and_not (r2val, m)))
1781 *mask = double_int_zero;
1782 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1786 /* We know the result of a comparison is always one or zero. */
1787 *mask = double_int_one;
1788 *val = double_int_zero;
1796 double_int tem = r1val;
1802 code = swap_tree_comparison (code);
1809 /* If the most significant bits are not known we know nothing. */
1810 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1813 /* If we know the most significant bits we know the values
1814 value ranges by means of treating varying bits as zero
1815 or one. Do a cross comparison of the max/min pairs. */
1816 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1817 double_int_and_not (r2val, r2mask), uns);
1818 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1819 double_int_ior (r2val, r2mask), uns);
1820 if (maxmin < 0) /* r1 is less than r2. */
1822 *mask = double_int_zero;
1823 *val = double_int_one;
1825 else if (minmax > 0) /* r1 is not less or equal to r2. */
1827 *mask = double_int_zero;
1828 *val = double_int_zero;
1830 else if (maxmin == minmax) /* r1 and r2 are equal. */
1832 /* This probably should never happen as we'd have
1833 folded the thing during fully constant value folding. */
1834 *mask = double_int_zero;
1835 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1839 /* We know the result of a comparison is always one or zero. */
1840 *mask = double_int_one;
1841 *val = double_int_zero;
1850 /* Return the propagation value when applying the operation CODE to
1851 the value RHS yielding type TYPE. */
1854 bit_value_unop (enum tree_code code, tree type, tree rhs)
1856 prop_value_t rval = get_value_for_expr (rhs, true);
1857 double_int value, mask;
1859 gcc_assert ((rval.lattice_val == CONSTANT
1860 && TREE_CODE (rval.value) == INTEGER_CST)
1861 || double_int_minus_one_p (rval.mask));
1862 bit_value_unop_1 (code, type, &value, &mask,
1863 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1864 if (!double_int_minus_one_p (mask))
1866 val.lattice_val = CONSTANT;
1868 /* ??? Delay building trees here. */
1869 val.value = double_int_to_tree (type, value);
1873 val.lattice_val = VARYING;
1874 val.value = NULL_TREE;
1875 val.mask = double_int_minus_one;
1880 /* Return the propagation value when applying the operation CODE to
1881 the values RHS1 and RHS2 yielding type TYPE. */
1884 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1886 prop_value_t r1val = get_value_for_expr (rhs1, true);
1887 prop_value_t r2val = get_value_for_expr (rhs2, true);
1888 double_int value, mask;
1890 gcc_assert ((r1val.lattice_val == CONSTANT
1891 && TREE_CODE (r1val.value) == INTEGER_CST)
1892 || double_int_minus_one_p (r1val.mask));
1893 gcc_assert ((r2val.lattice_val == CONSTANT
1894 && TREE_CODE (r2val.value) == INTEGER_CST)
1895 || double_int_minus_one_p (r2val.mask));
1896 bit_value_binop_1 (code, type, &value, &mask,
1897 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
1898 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
1899 if (!double_int_minus_one_p (mask))
1901 val.lattice_val = CONSTANT;
1903 /* ??? Delay building trees here. */
1904 val.value = double_int_to_tree (type, value);
1908 val.lattice_val = VARYING;
1909 val.value = NULL_TREE;
1910 val.mask = double_int_minus_one;
1915 /* Evaluate statement STMT.
1916 Valid only for assignments, calls, conditionals, and switches. */
1919 evaluate_stmt (gimple stmt)
1922 tree simplified = NULL_TREE;
1923 ccp_lattice_t likelyvalue = likely_value (stmt);
1924 bool is_constant = false;
1926 if (dump_file && (dump_flags & TDF_DETAILS))
1928 fprintf (dump_file, "which is likely ");
1929 switch (likelyvalue)
1932 fprintf (dump_file, "CONSTANT");
1935 fprintf (dump_file, "UNDEFINED");
1938 fprintf (dump_file, "VARYING");
1942 fprintf (dump_file, "\n");
1945 /* If the statement is likely to have a CONSTANT result, then try
1946 to fold the statement to determine the constant value. */
1947 /* FIXME. This is the only place that we call ccp_fold.
1948 Since likely_value never returns CONSTANT for calls, we will
1949 not attempt to fold them, including builtins that may profit. */
1950 if (likelyvalue == CONSTANT)
1952 fold_defer_overflow_warnings ();
1953 simplified = ccp_fold (stmt);
1954 is_constant = simplified && is_gimple_min_invariant (simplified);
1955 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1958 /* The statement produced a constant value. */
1959 val.lattice_val = CONSTANT;
1960 val.value = simplified;
1961 val.mask = double_int_zero;
1964 /* If the statement is likely to have a VARYING result, then do not
1965 bother folding the statement. */
1966 else if (likelyvalue == VARYING)
1968 enum gimple_code code = gimple_code (stmt);
1969 if (code == GIMPLE_ASSIGN)
1971 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1973 /* Other cases cannot satisfy is_gimple_min_invariant
1975 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1976 simplified = gimple_assign_rhs1 (stmt);
1978 else if (code == GIMPLE_SWITCH)
1979 simplified = gimple_switch_index (stmt);
1981 /* These cannot satisfy is_gimple_min_invariant without folding. */
1982 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1983 is_constant = simplified && is_gimple_min_invariant (simplified);
1986 /* The statement produced a constant value. */
1987 val.lattice_val = CONSTANT;
1988 val.value = simplified;
1989 val.mask = double_int_zero;
1993 /* Resort to simplification for bitwise tracking. */
1994 if (flag_tree_bit_ccp
1995 && likelyvalue == CONSTANT
1998 enum gimple_code code = gimple_code (stmt);
2000 val.lattice_val = VARYING;
2001 val.value = NULL_TREE;
2002 val.mask = double_int_minus_one;
2003 if (code == GIMPLE_ASSIGN)
2005 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2006 tree rhs1 = gimple_assign_rhs1 (stmt);
2007 switch (get_gimple_rhs_class (subcode))
2009 case GIMPLE_SINGLE_RHS:
2010 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2011 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2012 val = get_value_for_expr (rhs1, true);
2015 case GIMPLE_UNARY_RHS:
2016 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2017 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2018 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
2019 || POINTER_TYPE_P (gimple_expr_type (stmt))))
2020 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
2023 case GIMPLE_BINARY_RHS:
2024 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2025 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2027 tree rhs2 = gimple_assign_rhs2 (stmt);
2028 val = bit_value_binop (subcode,
2029 TREE_TYPE (rhs1), rhs1, rhs2);
2036 else if (code == GIMPLE_COND)
2038 enum tree_code code = gimple_cond_code (stmt);
2039 tree rhs1 = gimple_cond_lhs (stmt);
2040 tree rhs2 = gimple_cond_rhs (stmt);
2041 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2042 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2043 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
2045 else if (code == GIMPLE_CALL
2046 && (fndecl = gimple_call_fndecl (stmt))
2047 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2049 switch (DECL_FUNCTION_CODE (fndecl))
2051 case BUILT_IN_MALLOC:
2052 case BUILT_IN_REALLOC:
2053 case BUILT_IN_CALLOC:
2054 val.lattice_val = CONSTANT;
2055 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2056 val.mask = shwi_to_double_int
2057 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
2058 / BITS_PER_UNIT - 1));
2061 case BUILT_IN_ALLOCA:
2062 val.lattice_val = CONSTANT;
2063 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2064 val.mask = shwi_to_double_int
2065 (~(((HOST_WIDE_INT) BIGGEST_ALIGNMENT)
2066 / BITS_PER_UNIT - 1));
2072 is_constant = (val.lattice_val == CONSTANT);
2077 /* The statement produced a nonconstant value. If the statement
2078 had UNDEFINED operands, then the result of the statement
2079 should be UNDEFINED. Otherwise, the statement is VARYING. */
2080 if (likelyvalue == UNDEFINED)
2082 val.lattice_val = likelyvalue;
2083 val.mask = double_int_zero;
2087 val.lattice_val = VARYING;
2088 val.mask = double_int_minus_one;
2091 val.value = NULL_TREE;
2097 /* Fold the stmt at *GSI with CCP specific information that propagating
2098 and regular folding does not catch. */
2101 ccp_fold_stmt (gimple_stmt_iterator *gsi)
2103 gimple stmt = gsi_stmt (*gsi);
2105 switch (gimple_code (stmt))
2110 /* Statement evaluation will handle type mismatches in constants
2111 more gracefully than the final propagation. This allows us to
2112 fold more conditionals here. */
2113 val = evaluate_stmt (stmt);
2114 if (val.lattice_val != CONSTANT
2115 || !double_int_zero_p (val.mask))
2120 fprintf (dump_file, "Folding predicate ");
2121 print_gimple_expr (dump_file, stmt, 0, 0);
2122 fprintf (dump_file, " to ");
2123 print_generic_expr (dump_file, val.value, 0);
2124 fprintf (dump_file, "\n");
2127 if (integer_zerop (val.value))
2128 gimple_cond_make_false (stmt);
2130 gimple_cond_make_true (stmt);
2137 tree lhs = gimple_call_lhs (stmt);
2140 bool changed = false;
2143 /* If the call was folded into a constant make sure it goes
2144 away even if we cannot propagate into all uses because of
2147 && TREE_CODE (lhs) == SSA_NAME
2148 && (val = get_constant_value (lhs)))
2150 tree new_rhs = unshare_expr (val);
2152 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2153 TREE_TYPE (new_rhs)))
2154 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2155 res = update_call_from_tree (gsi, new_rhs);
2160 /* Propagate into the call arguments. Compared to replace_uses_in
2161 this can use the argument slot types for type verification
2162 instead of the current argument type. We also can safely
2163 drop qualifiers here as we are dealing with constants anyway. */
2164 argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
2165 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2166 ++i, argt = TREE_CHAIN (argt))
2168 tree arg = gimple_call_arg (stmt, i);
2169 if (TREE_CODE (arg) == SSA_NAME
2170 && (val = get_constant_value (arg))
2171 && useless_type_conversion_p
2172 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
2173 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
2175 gimple_call_set_arg (stmt, i, unshare_expr (val));
2185 tree lhs = gimple_assign_lhs (stmt);
2188 /* If we have a load that turned out to be constant replace it
2189 as we cannot propagate into all uses in all cases. */
2190 if (gimple_assign_single_p (stmt)
2191 && TREE_CODE (lhs) == SSA_NAME
2192 && (val = get_constant_value (lhs)))
2194 tree rhs = unshare_expr (val);
2195 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2196 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2197 gimple_assign_set_rhs_from_tree (gsi, rhs);
2209 /* Visit the assignment statement STMT. Set the value of its LHS to the
2210 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2211 creates virtual definitions, set the value of each new name to that
2212 of the RHS (if we can derive a constant out of the RHS).
2213 Value-returning call statements also perform an assignment, and
2214 are handled here. */
2216 static enum ssa_prop_result
2217 visit_assignment (gimple stmt, tree *output_p)
2220 enum ssa_prop_result retval;
2222 tree lhs = gimple_get_lhs (stmt);
2224 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
2225 || gimple_call_lhs (stmt) != NULL_TREE);
2227 if (gimple_assign_single_p (stmt)
2228 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2229 /* For a simple copy operation, we copy the lattice values. */
2230 val = *get_value (gimple_assign_rhs1 (stmt));
2232 /* Evaluate the statement, which could be
2233 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2234 val = evaluate_stmt (stmt);
2236 retval = SSA_PROP_NOT_INTERESTING;
2238 /* Set the lattice value of the statement's output. */
2239 if (TREE_CODE (lhs) == SSA_NAME)
2241 /* If STMT is an assignment to an SSA_NAME, we only have one
2243 if (set_lattice_value (lhs, val))
2246 if (val.lattice_val == VARYING)
2247 retval = SSA_PROP_VARYING;
2249 retval = SSA_PROP_INTERESTING;
2257 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2258 if it can determine which edge will be taken. Otherwise, return
2259 SSA_PROP_VARYING. */
2261 static enum ssa_prop_result
2262 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
2267 block = gimple_bb (stmt);
2268 val = evaluate_stmt (stmt);
2269 if (val.lattice_val != CONSTANT
2270 || !double_int_zero_p (val.mask))
2271 return SSA_PROP_VARYING;
2273 /* Find which edge out of the conditional block will be taken and add it
2274 to the worklist. If no single edge can be determined statically,
2275 return SSA_PROP_VARYING to feed all the outgoing edges to the
2276 propagation engine. */
2277 *taken_edge_p = find_taken_edge (block, val.value);
2279 return SSA_PROP_INTERESTING;
2281 return SSA_PROP_VARYING;
2285 /* Evaluate statement STMT. If the statement produces an output value and
2286 its evaluation changes the lattice value of its output, return
2287 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2290 If STMT is a conditional branch and we can determine its truth
2291 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2292 value, return SSA_PROP_VARYING. */
2294 static enum ssa_prop_result
2295 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
2300 if (dump_file && (dump_flags & TDF_DETAILS))
2302 fprintf (dump_file, "\nVisiting statement:\n");
2303 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2306 switch (gimple_code (stmt))
2309 /* If the statement is an assignment that produces a single
2310 output value, evaluate its RHS to see if the lattice value of
2311 its output has changed. */
2312 return visit_assignment (stmt, output_p);
2315 /* A value-returning call also performs an assignment. */
2316 if (gimple_call_lhs (stmt) != NULL_TREE)
2317 return visit_assignment (stmt, output_p);
2322 /* If STMT is a conditional branch, see if we can determine
2323 which branch will be taken. */
2324 /* FIXME. It appears that we should be able to optimize
2325 computed GOTOs here as well. */
2326 return visit_cond_stmt (stmt, taken_edge_p);
2332 /* Any other kind of statement is not interesting for constant
2333 propagation and, therefore, not worth simulating. */
2334 if (dump_file && (dump_flags & TDF_DETAILS))
2335 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2337 /* Definitions made by statements other than assignments to
2338 SSA_NAMEs represent unknown modifications to their outputs.
2339 Mark them VARYING. */
2340 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2342 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
2343 set_lattice_value (def, v);
2346 return SSA_PROP_VARYING;
2350 /* Main entry point for SSA Conditional Constant Propagation. */
2356 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2357 if (ccp_finalize ())
2358 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
2367 return flag_tree_ccp != 0;
2371 struct gimple_opt_pass pass_ccp =
2376 gate_ccp, /* gate */
2377 do_ssa_ccp, /* execute */
2380 0, /* static_pass_number */
2381 TV_TREE_CCP, /* tv_id */
2382 PROP_cfg | PROP_ssa, /* properties_required */
2383 0, /* properties_provided */
2384 0, /* properties_destroyed */
2385 0, /* todo_flags_start */
2386 TODO_dump_func | TODO_verify_ssa
2387 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2393 /* Try to optimize out __builtin_stack_restore. Optimize it out
2394 if there is another __builtin_stack_restore in the same basic
2395 block and no calls or ASM_EXPRs are in between, or if this block's
2396 only outgoing edge is to EXIT_BLOCK and there are no calls or
2397 ASM_EXPRs after this __builtin_stack_restore. */
2400 optimize_stack_restore (gimple_stmt_iterator i)
2405 basic_block bb = gsi_bb (i);
2406 gimple call = gsi_stmt (i);
2408 if (gimple_code (call) != GIMPLE_CALL
2409 || gimple_call_num_args (call) != 1
2410 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2411 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2414 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2416 stmt = gsi_stmt (i);
2417 if (gimple_code (stmt) == GIMPLE_ASM)
2419 if (gimple_code (stmt) != GIMPLE_CALL)
2422 callee = gimple_call_fndecl (stmt);
2424 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2425 /* All regular builtins are ok, just obviously not alloca. */
2426 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
2429 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2430 goto second_stack_restore;
2436 /* Allow one successor of the exit block, or zero successors. */
2437 switch (EDGE_COUNT (bb->succs))
2442 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2448 second_stack_restore:
2450 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2451 If there are multiple uses, then the last one should remove the call.
2452 In any case, whether the call to __builtin_stack_save can be removed
2453 or not is irrelevant to removing the call to __builtin_stack_restore. */
2454 if (has_single_use (gimple_call_arg (call, 0)))
2456 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2457 if (is_gimple_call (stack_save))
2459 callee = gimple_call_fndecl (stack_save);
2461 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2462 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2464 gimple_stmt_iterator stack_save_gsi;
2467 stack_save_gsi = gsi_for_stmt (stack_save);
2468 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2469 update_call_from_tree (&stack_save_gsi, rhs);
2474 /* No effect, so the statement will be deleted. */
2475 return integer_zero_node;
2478 /* If va_list type is a simple pointer and nothing special is needed,
2479 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2480 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2481 pointer assignment. */
2484 optimize_stdarg_builtin (gimple call)
2486 tree callee, lhs, rhs, cfun_va_list;
2487 bool va_list_simple_ptr;
2488 location_t loc = gimple_location (call);
2490 if (gimple_code (call) != GIMPLE_CALL)
2493 callee = gimple_call_fndecl (call);
2495 cfun_va_list = targetm.fn_abi_va_list (callee);
2496 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2497 && (TREE_TYPE (cfun_va_list) == void_type_node
2498 || TREE_TYPE (cfun_va_list) == char_type_node);
2500 switch (DECL_FUNCTION_CODE (callee))
2502 case BUILT_IN_VA_START:
2503 if (!va_list_simple_ptr
2504 || targetm.expand_builtin_va_start != NULL
2505 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
2508 if (gimple_call_num_args (call) != 2)
2511 lhs = gimple_call_arg (call, 0);
2512 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2513 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2514 != TYPE_MAIN_VARIANT (cfun_va_list))
2517 lhs = build_fold_indirect_ref_loc (loc, lhs);
2518 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
2519 1, integer_zero_node);
2520 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2521 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2523 case BUILT_IN_VA_COPY:
2524 if (!va_list_simple_ptr)
2527 if (gimple_call_num_args (call) != 2)
2530 lhs = gimple_call_arg (call, 0);
2531 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2532 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2533 != TYPE_MAIN_VARIANT (cfun_va_list))
2536 lhs = build_fold_indirect_ref_loc (loc, lhs);
2537 rhs = gimple_call_arg (call, 1);
2538 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2539 != TYPE_MAIN_VARIANT (cfun_va_list))
2542 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2543 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2545 case BUILT_IN_VA_END:
2546 /* No effect, so the statement will be deleted. */
2547 return integer_zero_node;
2554 /* A simple pass that attempts to fold all builtin functions. This pass
2555 is run after we've propagated as many constants as we can. */
2558 execute_fold_all_builtins (void)
2560 bool cfg_changed = false;
2562 unsigned int todoflags = 0;
2566 gimple_stmt_iterator i;
2567 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2569 gimple stmt, old_stmt;
2570 tree callee, result;
2571 enum built_in_function fcode;
2573 stmt = gsi_stmt (i);
2575 if (gimple_code (stmt) != GIMPLE_CALL)
2580 callee = gimple_call_fndecl (stmt);
2581 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2586 fcode = DECL_FUNCTION_CODE (callee);
2588 result = gimple_fold_builtin (stmt);
2591 gimple_remove_stmt_histograms (cfun, stmt);
2594 switch (DECL_FUNCTION_CODE (callee))
2596 case BUILT_IN_CONSTANT_P:
2597 /* Resolve __builtin_constant_p. If it hasn't been
2598 folded to integer_one_node by now, it's fairly
2599 certain that the value simply isn't constant. */
2600 result = integer_zero_node;
2603 case BUILT_IN_STACK_RESTORE:
2604 result = optimize_stack_restore (i);
2610 case BUILT_IN_VA_START:
2611 case BUILT_IN_VA_END:
2612 case BUILT_IN_VA_COPY:
2613 /* These shouldn't be folded before pass_stdarg. */
2614 result = optimize_stdarg_builtin (stmt);
2624 if (dump_file && (dump_flags & TDF_DETAILS))
2626 fprintf (dump_file, "Simplified\n ");
2627 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2631 if (!update_call_from_tree (&i, result))
2633 gimplify_and_update_call_from_tree (&i, result);
2634 todoflags |= TODO_update_address_taken;
2637 stmt = gsi_stmt (i);
2640 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2641 && gimple_purge_dead_eh_edges (bb))
2644 if (dump_file && (dump_flags & TDF_DETAILS))
2646 fprintf (dump_file, "to\n ");
2647 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2648 fprintf (dump_file, "\n");
2651 /* Retry the same statement if it changed into another
2652 builtin, there might be new opportunities now. */
2653 if (gimple_code (stmt) != GIMPLE_CALL)
2658 callee = gimple_call_fndecl (stmt);
2660 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2661 || DECL_FUNCTION_CODE (callee) == fcode)
2666 /* Delete unreachable blocks. */
2668 todoflags |= TODO_cleanup_cfg;
2674 struct gimple_opt_pass pass_fold_builtins =
2680 execute_fold_all_builtins, /* execute */
2683 0, /* static_pass_number */
2684 TV_NONE, /* tv_id */
2685 PROP_cfg | PROP_ssa, /* properties_required */
2686 0, /* properties_provided */
2687 0, /* properties_destroyed */
2688 0, /* todo_flags_start */
2691 | TODO_update_ssa /* todo_flags_finish */