1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
115 #include "coretypes.h"
120 #include "basic-block.h"
122 #include "function.h"
123 #include "tree-pretty-print.h"
124 #include "gimple-pretty-print.h"
126 #include "tree-dump.h"
127 #include "tree-flow.h"
128 #include "tree-pass.h"
129 #include "tree-ssa-propagate.h"
130 #include "value-prof.h"
131 #include "langhooks.h"
133 #include "diagnostic-core.h"
138 /* Possible lattice values. */
147 struct prop_value_d {
149 ccp_lattice_t lattice_val;
151 /* Propagated value. */
154 /* Mask that applies to the propagated value during CCP. For
155 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
159 typedef struct prop_value_d prop_value_t;
161 /* Array of propagated constant values. After propagation,
162 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
163 the constant is held in an SSA name representing a memory store
164 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
165 memory reference used to store (i.e., the LHS of the assignment
167 static prop_value_t *const_val;
169 static void canonicalize_float_value (prop_value_t *);
170 static bool ccp_fold_stmt (gimple_stmt_iterator *);
172 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
175 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
177 switch (val.lattice_val)
180 fprintf (outf, "%sUNINITIALIZED", prefix);
183 fprintf (outf, "%sUNDEFINED", prefix);
186 fprintf (outf, "%sVARYING", prefix);
189 fprintf (outf, "%sCONSTANT ", prefix);
190 if (TREE_CODE (val.value) != INTEGER_CST
191 || double_int_zero_p (val.mask))
192 print_generic_expr (outf, val.value, dump_flags);
195 double_int cval = double_int_and_not (tree_to_double_int (val.value),
197 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
198 prefix, cval.high, cval.low);
199 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
200 val.mask.high, val.mask.low);
209 /* Print lattice value VAL to stderr. */
211 void debug_lattice_value (prop_value_t val);
214 debug_lattice_value (prop_value_t val)
216 dump_lattice_value (stderr, "", val);
217 fprintf (stderr, "\n");
221 /* Compute a default value for variable VAR and store it in the
222 CONST_VAL array. The following rules are used to get default
225 1- Global and static variables that are declared constant are
228 2- Any other value is considered UNDEFINED. This is useful when
229 considering PHI nodes. PHI arguments that are undefined do not
230 change the constant value of the PHI node, which allows for more
231 constants to be propagated.
233 3- Variables defined by statements other than assignments and PHI
234 nodes are considered VARYING.
236 4- Initial values of variables that are not GIMPLE registers are
237 considered VARYING. */
240 get_default_value (tree var)
242 tree sym = SSA_NAME_VAR (var);
243 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
246 stmt = SSA_NAME_DEF_STMT (var);
248 if (gimple_nop_p (stmt))
250 /* Variables defined by an empty statement are those used
251 before being initialized. If VAR is a local variable, we
252 can assume initially that it is UNDEFINED, otherwise we must
253 consider it VARYING. */
254 if (is_gimple_reg (sym)
255 && TREE_CODE (sym) == VAR_DECL)
256 val.lattice_val = UNDEFINED;
259 val.lattice_val = VARYING;
260 val.mask = double_int_minus_one;
263 else if (is_gimple_assign (stmt)
264 /* Value-returning GIMPLE_CALL statements assign to
265 a variable, and are treated similarly to GIMPLE_ASSIGN. */
266 || (is_gimple_call (stmt)
267 && gimple_call_lhs (stmt) != NULL_TREE)
268 || gimple_code (stmt) == GIMPLE_PHI)
271 if (gimple_assign_single_p (stmt)
272 && DECL_P (gimple_assign_rhs1 (stmt))
273 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
275 val.lattice_val = CONSTANT;
279 /* Any other variable defined by an assignment or a PHI node
280 is considered UNDEFINED. */
281 val.lattice_val = UNDEFINED;
285 /* Otherwise, VAR will never take on a constant value. */
286 val.lattice_val = VARYING;
287 val.mask = double_int_minus_one;
294 /* Get the constant value associated with variable VAR. */
296 static inline prop_value_t *
301 if (const_val == NULL)
304 val = &const_val[SSA_NAME_VERSION (var)];
305 if (val->lattice_val == UNINITIALIZED)
306 *val = get_default_value (var);
308 canonicalize_float_value (val);
313 /* Return the constant tree value associated with VAR. */
316 get_constant_value (tree var)
318 prop_value_t *val = get_value (var);
320 && val->lattice_val == CONSTANT
321 && (TREE_CODE (val->value) != INTEGER_CST
322 || double_int_zero_p (val->mask)))
327 /* Sets the value associated with VAR to VARYING. */
330 set_value_varying (tree var)
332 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
334 val->lattice_val = VARYING;
335 val->value = NULL_TREE;
336 val->mask = double_int_minus_one;
339 /* For float types, modify the value of VAL to make ccp work correctly
340 for non-standard values (-0, NaN):
342 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
343 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
344 This is to fix the following problem (see PR 29921): Suppose we have
348 and we set value of y to NaN. This causes value of x to be set to NaN.
349 When we later determine that y is in fact VARYING, fold uses the fact
350 that HONOR_NANS is false, and we try to change the value of x to 0,
351 causing an ICE. With HONOR_NANS being false, the real appearance of
352 NaN would cause undefined behavior, though, so claiming that y (and x)
353 are UNDEFINED initially is correct. */
356 canonicalize_float_value (prop_value_t *val)
358 enum machine_mode mode;
362 if (val->lattice_val != CONSTANT
363 || TREE_CODE (val->value) != REAL_CST)
366 d = TREE_REAL_CST (val->value);
367 type = TREE_TYPE (val->value);
368 mode = TYPE_MODE (type);
370 if (!HONOR_SIGNED_ZEROS (mode)
371 && REAL_VALUE_MINUS_ZERO (d))
373 val->value = build_real (type, dconst0);
377 if (!HONOR_NANS (mode)
378 && REAL_VALUE_ISNAN (d))
380 val->lattice_val = UNDEFINED;
386 /* Return whether the lattice transition is valid. */
389 valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
391 /* Lattice transitions must always be monotonically increasing in
393 if (old_val.lattice_val < new_val.lattice_val)
396 if (old_val.lattice_val != new_val.lattice_val)
399 if (!old_val.value && !new_val.value)
402 /* Now both lattice values are CONSTANT. */
404 /* Allow transitioning from &x to &x & ~3. */
405 if (TREE_CODE (old_val.value) != INTEGER_CST
406 && TREE_CODE (new_val.value) == INTEGER_CST)
409 /* Bit-lattices have to agree in the still valid bits. */
410 if (TREE_CODE (old_val.value) == INTEGER_CST
411 && TREE_CODE (new_val.value) == INTEGER_CST)
412 return double_int_equal_p
413 (double_int_and_not (tree_to_double_int (old_val.value),
415 double_int_and_not (tree_to_double_int (new_val.value),
418 /* Otherwise constant values have to agree. */
419 return operand_equal_p (old_val.value, new_val.value, 0);
422 /* Set the value for variable VAR to NEW_VAL. Return true if the new
423 value is different from VAR's previous value. */
426 set_lattice_value (tree var, prop_value_t new_val)
428 /* We can deal with old UNINITIALIZED values just fine here. */
429 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
431 canonicalize_float_value (&new_val);
433 /* We have to be careful to not go up the bitwise lattice
434 represented by the mask.
435 ??? This doesn't seem to be the best place to enforce this. */
436 if (new_val.lattice_val == CONSTANT
437 && old_val->lattice_val == CONSTANT
438 && TREE_CODE (new_val.value) == INTEGER_CST
439 && TREE_CODE (old_val->value) == INTEGER_CST)
442 diff = double_int_xor (tree_to_double_int (new_val.value),
443 tree_to_double_int (old_val->value));
444 new_val.mask = double_int_ior (new_val.mask,
445 double_int_ior (old_val->mask, diff));
448 gcc_assert (valid_lattice_transition (*old_val, new_val));
450 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
451 caller that this was a non-transition. */
452 if (old_val->lattice_val != new_val.lattice_val
453 || (new_val.lattice_val == CONSTANT
454 && TREE_CODE (new_val.value) == INTEGER_CST
455 && (TREE_CODE (old_val->value) != INTEGER_CST
456 || !double_int_equal_p (new_val.mask, old_val->mask))))
458 /* ??? We would like to delay creation of INTEGER_CSTs from
459 partially constants here. */
461 if (dump_file && (dump_flags & TDF_DETAILS))
463 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
464 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
469 gcc_assert (new_val.lattice_val != UNINITIALIZED);
476 static prop_value_t get_value_for_expr (tree, bool);
477 static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
478 static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
479 tree, double_int, double_int,
480 tree, double_int, double_int);
482 /* Return a double_int that can be used for bitwise simplifications
486 value_to_double_int (prop_value_t val)
489 && TREE_CODE (val.value) == INTEGER_CST)
490 return tree_to_double_int (val.value);
492 return double_int_zero;
495 /* Return the value for the address expression EXPR based on alignment
499 get_value_from_alignment (tree expr)
502 HOST_WIDE_INT bitsize, bitpos;
504 enum machine_mode mode;
507 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
509 base = get_inner_reference (TREE_OPERAND (expr, 0),
510 &bitsize, &bitpos, &offset,
511 &mode, &align, &align, false);
512 if (TREE_CODE (base) == MISALIGNED_INDIRECT_REF)
513 val = get_value_for_expr (TREE_OPERAND (base, 0), true);
514 else if (TREE_CODE (base) == MEM_REF)
515 val = bit_value_binop (PLUS_EXPR, TREE_TYPE (expr),
516 TREE_OPERAND (base, 0), TREE_OPERAND (base, 1));
518 && ((align = get_object_alignment (base, BIGGEST_ALIGNMENT))
521 val.lattice_val = CONSTANT;
522 /* We assume pointers are zero-extended. */
523 val.mask = double_int_and_not
524 (double_int_mask (TYPE_PRECISION (TREE_TYPE (expr))),
525 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
526 val.value = build_int_cst (TREE_TYPE (expr), 0);
530 val.lattice_val = VARYING;
531 val.mask = double_int_minus_one;
532 val.value = NULL_TREE;
536 double_int value, mask;
537 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
538 TREE_TYPE (expr), value_to_double_int (val), val.mask,
540 shwi_to_double_int (bitpos / BITS_PER_UNIT),
542 val.lattice_val = double_int_minus_one_p (mask) ? VARYING : CONSTANT;
544 if (val.lattice_val == CONSTANT)
545 val.value = double_int_to_tree (TREE_TYPE (expr), value);
547 val.value = NULL_TREE;
549 /* ??? We should handle i * 4 and more complex expressions from
550 the offset, possibly by just expanding get_value_for_expr. */
551 if (offset != NULL_TREE)
553 double_int value, mask;
554 prop_value_t oval = get_value_for_expr (offset, true);
555 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
556 TREE_TYPE (expr), value_to_double_int (val), val.mask,
557 TREE_TYPE (expr), value_to_double_int (oval),
560 if (double_int_minus_one_p (mask))
562 val.lattice_val = VARYING;
563 val.value = NULL_TREE;
567 val.lattice_val = CONSTANT;
568 val.value = double_int_to_tree (TREE_TYPE (expr), value);
575 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
576 return constant bits extracted from alignment information for
577 invariant addresses. */
580 get_value_for_expr (tree expr, bool for_bits_p)
584 if (TREE_CODE (expr) == SSA_NAME)
586 val = *get_value (expr);
588 && val.lattice_val == CONSTANT
589 && TREE_CODE (val.value) == ADDR_EXPR)
590 val = get_value_from_alignment (val.value);
592 else if (is_gimple_min_invariant (expr)
593 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
595 val.lattice_val = CONSTANT;
597 val.mask = double_int_zero;
598 canonicalize_float_value (&val);
600 else if (TREE_CODE (expr) == ADDR_EXPR)
601 val = get_value_from_alignment (expr);
604 val.lattice_val = VARYING;
605 val.mask = double_int_minus_one;
606 val.value = NULL_TREE;
611 /* Return the likely CCP lattice value for STMT.
613 If STMT has no operands, then return CONSTANT.
615 Else if undefinedness of operands of STMT cause its value to be
616 undefined, then return UNDEFINED.
618 Else if any operands of STMT are constants, then return CONSTANT.
620 Else return VARYING. */
623 likely_value (gimple stmt)
625 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
630 enum gimple_code code = gimple_code (stmt);
632 /* This function appears to be called only for assignments, calls,
633 conditionals, and switches, due to the logic in visit_stmt. */
634 gcc_assert (code == GIMPLE_ASSIGN
635 || code == GIMPLE_CALL
636 || code == GIMPLE_COND
637 || code == GIMPLE_SWITCH);
639 /* If the statement has volatile operands, it won't fold to a
641 if (gimple_has_volatile_ops (stmt))
644 /* Arrive here for more complex cases. */
645 has_constant_operand = false;
646 has_undefined_operand = false;
647 all_undefined_operands = true;
648 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
650 prop_value_t *val = get_value (use);
652 if (val->lattice_val == UNDEFINED)
653 has_undefined_operand = true;
655 all_undefined_operands = false;
657 if (val->lattice_val == CONSTANT)
658 has_constant_operand = true;
661 /* There may be constants in regular rhs operands. For calls we
662 have to ignore lhs, fndecl and static chain, otherwise only
664 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
665 i < gimple_num_ops (stmt); ++i)
667 tree op = gimple_op (stmt, i);
668 if (!op || TREE_CODE (op) == SSA_NAME)
670 if (is_gimple_min_invariant (op))
671 has_constant_operand = true;
674 if (has_constant_operand)
675 all_undefined_operands = false;
677 /* If the operation combines operands like COMPLEX_EXPR make sure to
678 not mark the result UNDEFINED if only one part of the result is
680 if (has_undefined_operand && all_undefined_operands)
682 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
684 switch (gimple_assign_rhs_code (stmt))
686 /* Unary operators are handled with all_undefined_operands. */
689 case POINTER_PLUS_EXPR:
690 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
691 Not bitwise operators, one VARYING operand may specify the
692 result completely. Not logical operators for the same reason.
693 Not COMPLEX_EXPR as one VARYING operand makes the result partly
694 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
695 the undefined operand may be promoted. */
702 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
703 fall back to VARYING even if there were CONSTANT operands. */
704 if (has_undefined_operand)
707 /* We do not consider virtual operands here -- load from read-only
708 memory may have only VARYING virtual operands, but still be
710 if (has_constant_operand
711 || gimple_references_memory_p (stmt))
717 /* Returns true if STMT cannot be constant. */
720 surely_varying_stmt_p (gimple stmt)
722 /* If the statement has operands that we cannot handle, it cannot be
724 if (gimple_has_volatile_ops (stmt))
727 /* If it is a call and does not return a value or is not a
728 builtin and not an indirect call, it is varying. */
729 if (is_gimple_call (stmt))
732 if (!gimple_call_lhs (stmt)
733 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
734 && !DECL_BUILT_IN (fndecl)))
738 /* Any other store operation is not interesting. */
739 else if (gimple_vdef (stmt))
742 /* Anything other than assignments and conditional jumps are not
743 interesting for CCP. */
744 if (gimple_code (stmt) != GIMPLE_ASSIGN
745 && gimple_code (stmt) != GIMPLE_COND
746 && gimple_code (stmt) != GIMPLE_SWITCH
747 && gimple_code (stmt) != GIMPLE_CALL)
753 /* Initialize local data structures for CCP. */
756 ccp_initialize (void)
760 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
762 /* Initialize simulation flags for PHI nodes and statements. */
765 gimple_stmt_iterator i;
767 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
769 gimple stmt = gsi_stmt (i);
772 /* If the statement is a control insn, then we do not
773 want to avoid simulating the statement once. Failure
774 to do so means that those edges will never get added. */
775 if (stmt_ends_bb_p (stmt))
778 is_varying = surely_varying_stmt_p (stmt);
785 /* If the statement will not produce a constant, mark
786 all its outputs VARYING. */
787 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
788 set_value_varying (def);
790 prop_set_simulate_again (stmt, !is_varying);
794 /* Now process PHI nodes. We never clear the simulate_again flag on
795 phi nodes, since we do not know which edges are executable yet,
796 except for phi nodes for virtual operands when we do not do store ccp. */
799 gimple_stmt_iterator i;
801 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
803 gimple phi = gsi_stmt (i);
805 if (!is_gimple_reg (gimple_phi_result (phi)))
806 prop_set_simulate_again (phi, false);
808 prop_set_simulate_again (phi, true);
813 /* Debug count support. Reset the values of ssa names
814 VARYING when the total number ssa names analyzed is
815 beyond the debug count specified. */
821 for (i = 0; i < num_ssa_names; i++)
825 const_val[i].lattice_val = VARYING;
826 const_val[i].mask = double_int_minus_one;
827 const_val[i].value = NULL_TREE;
833 /* Do final substitution of propagated values, cleanup the flowgraph and
834 free allocated storage.
836 Return TRUE when something was optimized. */
841 bool something_changed;
846 /* Derive alignment and misalignment information from partially
847 constant pointers in the lattice. */
848 for (i = 1; i < num_ssa_names; ++i)
850 tree name = ssa_name (i);
852 struct ptr_info_def *pi;
853 unsigned int tem, align;
856 || !POINTER_TYPE_P (TREE_TYPE (name)))
859 val = get_value (name);
860 if (val->lattice_val != CONSTANT
861 || TREE_CODE (val->value) != INTEGER_CST)
864 /* Trailing constant bits specify the alignment, trailing value
865 bits the misalignment. */
867 align = (tem & -tem);
871 pi = get_ptr_info (name);
873 pi->misalign = TREE_INT_CST_LOW (val->value) & (align - 1);
876 /* Perform substitutions based on the known constant values. */
877 something_changed = substitute_and_fold (get_constant_value,
878 ccp_fold_stmt, true);
882 return something_changed;;
886 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
889 any M UNDEFINED = any
890 any M VARYING = VARYING
891 Ci M Cj = Ci if (i == j)
892 Ci M Cj = VARYING if (i != j)
896 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
898 if (val1->lattice_val == UNDEFINED)
900 /* UNDEFINED M any = any */
903 else if (val2->lattice_val == UNDEFINED)
905 /* any M UNDEFINED = any
906 Nothing to do. VAL1 already contains the value we want. */
909 else if (val1->lattice_val == VARYING
910 || val2->lattice_val == VARYING)
912 /* any M VARYING = VARYING. */
913 val1->lattice_val = VARYING;
914 val1->mask = double_int_minus_one;
915 val1->value = NULL_TREE;
917 else if (val1->lattice_val == CONSTANT
918 && val2->lattice_val == CONSTANT
919 && TREE_CODE (val1->value) == INTEGER_CST
920 && TREE_CODE (val2->value) == INTEGER_CST)
922 /* Ci M Cj = Ci if (i == j)
923 Ci M Cj = VARYING if (i != j)
925 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
928 = double_int_ior (double_int_ior (val1->mask,
930 double_int_xor (tree_to_double_int (val1->value),
931 tree_to_double_int (val2->value)));
932 if (double_int_minus_one_p (val1->mask))
934 val1->lattice_val = VARYING;
935 val1->value = NULL_TREE;
938 else if (val1->lattice_val == CONSTANT
939 && val2->lattice_val == CONSTANT
940 && simple_cst_equal (val1->value, val2->value) == 1)
942 /* Ci M Cj = Ci if (i == j)
943 Ci M Cj = VARYING if (i != j)
945 VAL1 already contains the value we want for equivalent values. */
947 else if (val1->lattice_val == CONSTANT
948 && val2->lattice_val == CONSTANT
949 && (TREE_CODE (val1->value) == ADDR_EXPR
950 || TREE_CODE (val2->value) == ADDR_EXPR))
952 /* When not equal addresses are involved try meeting for
954 prop_value_t tem = *val2;
955 if (TREE_CODE (val1->value) == ADDR_EXPR)
956 *val1 = get_value_for_expr (val1->value, true);
957 if (TREE_CODE (val2->value) == ADDR_EXPR)
958 tem = get_value_for_expr (val2->value, true);
959 ccp_lattice_meet (val1, &tem);
963 /* Any other combination is VARYING. */
964 val1->lattice_val = VARYING;
965 val1->mask = double_int_minus_one;
966 val1->value = NULL_TREE;
971 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
972 lattice values to determine PHI_NODE's lattice value. The value of a
973 PHI node is determined calling ccp_lattice_meet with all the arguments
974 of the PHI node that are incoming via executable edges. */
976 static enum ssa_prop_result
977 ccp_visit_phi_node (gimple phi)
980 prop_value_t *old_val, new_val;
982 if (dump_file && (dump_flags & TDF_DETAILS))
984 fprintf (dump_file, "\nVisiting PHI node: ");
985 print_gimple_stmt (dump_file, phi, 0, dump_flags);
988 old_val = get_value (gimple_phi_result (phi));
989 switch (old_val->lattice_val)
992 return SSA_PROP_VARYING;
999 new_val.lattice_val = UNDEFINED;
1000 new_val.value = NULL_TREE;
1007 for (i = 0; i < gimple_phi_num_args (phi); i++)
1009 /* Compute the meet operator over all the PHI arguments flowing
1010 through executable edges. */
1011 edge e = gimple_phi_arg_edge (phi, i);
1013 if (dump_file && (dump_flags & TDF_DETAILS))
1016 "\n Argument #%d (%d -> %d %sexecutable)\n",
1017 i, e->src->index, e->dest->index,
1018 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1021 /* If the incoming edge is executable, Compute the meet operator for
1022 the existing value of the PHI node and the current PHI argument. */
1023 if (e->flags & EDGE_EXECUTABLE)
1025 tree arg = gimple_phi_arg (phi, i)->def;
1026 prop_value_t arg_val = get_value_for_expr (arg, false);
1028 ccp_lattice_meet (&new_val, &arg_val);
1030 if (dump_file && (dump_flags & TDF_DETAILS))
1032 fprintf (dump_file, "\t");
1033 print_generic_expr (dump_file, arg, dump_flags);
1034 dump_lattice_value (dump_file, "\tValue: ", arg_val);
1035 fprintf (dump_file, "\n");
1038 if (new_val.lattice_val == VARYING)
1043 if (dump_file && (dump_flags & TDF_DETAILS))
1045 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1046 fprintf (dump_file, "\n\n");
1049 /* Make the transition to the new value. */
1050 if (set_lattice_value (gimple_phi_result (phi), new_val))
1052 if (new_val.lattice_val == VARYING)
1053 return SSA_PROP_VARYING;
1055 return SSA_PROP_INTERESTING;
1058 return SSA_PROP_NOT_INTERESTING;
1061 /* Return the constant value for OP or OP otherwise. */
1064 valueize_op (tree op)
1066 if (TREE_CODE (op) == SSA_NAME)
1068 tree tem = get_constant_value (op);
1075 /* CCP specific front-end to the non-destructive constant folding
1078 Attempt to simplify the RHS of STMT knowing that one or more
1079 operands are constants.
1081 If simplification is possible, return the simplified RHS,
1082 otherwise return the original RHS or NULL_TREE. */
1085 ccp_fold (gimple stmt)
1087 location_t loc = gimple_location (stmt);
1088 switch (gimple_code (stmt))
1092 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1094 switch (get_gimple_rhs_class (subcode))
1096 case GIMPLE_SINGLE_RHS:
1098 tree rhs = gimple_assign_rhs1 (stmt);
1099 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
1101 if (TREE_CODE (rhs) == SSA_NAME)
1103 /* If the RHS is an SSA_NAME, return its known constant value,
1105 return get_constant_value (rhs);
1107 /* Handle propagating invariant addresses into address operations.
1108 The folding we do here matches that in tree-ssa-forwprop.c. */
1109 else if (TREE_CODE (rhs) == ADDR_EXPR)
1112 base = &TREE_OPERAND (rhs, 0);
1113 while (handled_component_p (*base))
1114 base = &TREE_OPERAND (*base, 0);
1115 if (TREE_CODE (*base) == MEM_REF
1116 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
1118 tree val = get_constant_value (TREE_OPERAND (*base, 0));
1120 && TREE_CODE (val) == ADDR_EXPR)
1122 tree ret, save = *base;
1124 new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
1126 TREE_OPERAND (*base, 1));
1127 /* We need to return a new tree, not modify the IL
1128 or share parts of it. So play some tricks to
1129 avoid manually building it. */
1131 ret = unshare_expr (rhs);
1132 recompute_tree_invariant_for_addr_expr (ret);
1138 else if (TREE_CODE (rhs) == CONSTRUCTOR
1139 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
1140 && (CONSTRUCTOR_NELTS (rhs)
1141 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
1147 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
1149 val = valueize_op (val);
1150 if (TREE_CODE (val) == INTEGER_CST
1151 || TREE_CODE (val) == REAL_CST
1152 || TREE_CODE (val) == FIXED_CST)
1153 list = tree_cons (NULL_TREE, val, list);
1158 return build_vector (TREE_TYPE (rhs), nreverse (list));
1161 if (kind == tcc_reference)
1163 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
1164 || TREE_CODE (rhs) == REALPART_EXPR
1165 || TREE_CODE (rhs) == IMAGPART_EXPR)
1166 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1168 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1170 return fold_unary_loc (EXPR_LOCATION (rhs),
1172 TREE_TYPE (rhs), val);
1174 else if (TREE_CODE (rhs) == MEM_REF
1175 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1177 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1179 && TREE_CODE (val) == ADDR_EXPR)
1181 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
1183 TREE_OPERAND (rhs, 1));
1188 return fold_const_aggregate_ref (rhs);
1190 else if (kind == tcc_declaration)
1191 return get_symbol_constant_value (rhs);
1195 case GIMPLE_UNARY_RHS:
1197 /* Handle unary operators that can appear in GIMPLE form.
1198 Note that we know the single operand must be a constant,
1199 so this should almost always return a simplified RHS. */
1200 tree lhs = gimple_assign_lhs (stmt);
1201 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1203 /* Conversions are useless for CCP purposes if they are
1204 value-preserving. Thus the restrictions that
1205 useless_type_conversion_p places for pointer type conversions
1206 do not apply here. Substitution later will only substitute to
1208 if (CONVERT_EXPR_CODE_P (subcode)
1209 && POINTER_TYPE_P (TREE_TYPE (lhs))
1210 && POINTER_TYPE_P (TREE_TYPE (op0)))
1213 /* Try to re-construct array references on-the-fly. */
1214 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1216 && ((tem = maybe_fold_offset_to_address
1218 op0, integer_zero_node, TREE_TYPE (lhs)))
1225 fold_unary_ignore_overflow_loc (loc, subcode,
1226 gimple_expr_type (stmt), op0);
1229 case GIMPLE_BINARY_RHS:
1231 /* Handle binary operators that can appear in GIMPLE form. */
1232 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1233 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1235 /* Translate &x + CST into an invariant form suitable for
1236 further propagation. */
1237 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1238 && TREE_CODE (op0) == ADDR_EXPR
1239 && TREE_CODE (op1) == INTEGER_CST)
1241 tree off = fold_convert (ptr_type_node, op1);
1242 return build_fold_addr_expr
1243 (fold_build2 (MEM_REF,
1244 TREE_TYPE (TREE_TYPE (op0)),
1245 unshare_expr (op0), off));
1248 return fold_binary_loc (loc, subcode,
1249 gimple_expr_type (stmt), op0, op1);
1252 case GIMPLE_TERNARY_RHS:
1254 /* Handle ternary operators that can appear in GIMPLE form. */
1255 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1256 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1257 tree op2 = valueize_op (gimple_assign_rhs3 (stmt));
1259 return fold_ternary_loc (loc, subcode,
1260 gimple_expr_type (stmt), op0, op1, op2);
1271 tree fn = valueize_op (gimple_call_fn (stmt));
1272 if (TREE_CODE (fn) == ADDR_EXPR
1273 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1274 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1276 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1279 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1280 args[i] = valueize_op (gimple_call_arg (stmt, i));
1281 call = build_call_array_loc (loc,
1282 gimple_call_return_type (stmt),
1283 fn, gimple_call_num_args (stmt), args);
1284 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
1286 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1287 STRIP_NOPS (retval);
1295 /* Handle comparison operators that can appear in GIMPLE form. */
1296 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1297 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1298 enum tree_code code = gimple_cond_code (stmt);
1299 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1304 /* Return the constant switch index. */
1305 return valueize_op (gimple_switch_index (stmt));
1313 /* Return the tree representing the element referenced by T if T is an
1314 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1315 NULL_TREE otherwise. */
1318 fold_const_aggregate_ref (tree t)
1320 tree base, ctor, idx, field;
1321 unsigned HOST_WIDE_INT cnt;
1325 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1326 return get_symbol_constant_value (t);
1328 switch (TREE_CODE (t))
1331 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1332 DECL_INITIAL. If BASE is a nested reference into another
1333 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1334 the inner reference. */
1335 base = TREE_OPERAND (t, 0);
1336 switch (TREE_CODE (base))
1339 /* ??? We could handle this case. */
1340 if (!integer_zerop (TREE_OPERAND (base, 1)))
1342 base = get_base_address (base);
1344 || TREE_CODE (base) != VAR_DECL)
1349 if (!TREE_READONLY (base)
1350 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1351 || !targetm.binds_local_p (base))
1354 ctor = DECL_INITIAL (base);
1359 ctor = fold_const_aggregate_ref (base);
1371 if (ctor == NULL_TREE
1372 || (TREE_CODE (ctor) != CONSTRUCTOR
1373 && TREE_CODE (ctor) != STRING_CST)
1374 || !TREE_STATIC (ctor))
1377 /* Get the index. If we have an SSA_NAME, try to resolve it
1378 with the current lattice value for the SSA_NAME. */
1379 idx = TREE_OPERAND (t, 1);
1380 switch (TREE_CODE (idx))
1383 if ((tem = get_constant_value (idx))
1384 && TREE_CODE (tem) == INTEGER_CST)
1397 /* Fold read from constant string. */
1398 if (TREE_CODE (ctor) == STRING_CST)
1400 if ((TYPE_MODE (TREE_TYPE (t))
1401 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1402 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1404 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1405 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1406 return build_int_cst_type (TREE_TYPE (t),
1407 (TREE_STRING_POINTER (ctor)
1408 [TREE_INT_CST_LOW (idx)]));
1412 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1413 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1414 if (tree_int_cst_equal (cfield, idx))
1417 if (TREE_CODE (cval) == ADDR_EXPR)
1419 tree base = get_base_address (TREE_OPERAND (cval, 0));
1420 if (base && TREE_CODE (base) == VAR_DECL)
1421 add_referenced_var (base);
1428 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1429 DECL_INITIAL. If BASE is a nested reference into another
1430 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1431 the inner reference. */
1432 base = TREE_OPERAND (t, 0);
1433 switch (TREE_CODE (base))
1436 if (!TREE_READONLY (base)
1437 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1438 || !targetm.binds_local_p (base))
1441 ctor = DECL_INITIAL (base);
1446 ctor = fold_const_aggregate_ref (base);
1453 if (ctor == NULL_TREE
1454 || TREE_CODE (ctor) != CONSTRUCTOR
1455 || !TREE_STATIC (ctor))
1458 field = TREE_OPERAND (t, 1);
1460 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1462 /* FIXME: Handle bit-fields. */
1463 && ! DECL_BIT_FIELD (cfield))
1466 if (TREE_CODE (cval) == ADDR_EXPR)
1468 tree base = get_base_address (TREE_OPERAND (cval, 0));
1469 if (base && TREE_CODE (base) == VAR_DECL)
1470 add_referenced_var (base);
1479 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1480 if (c && TREE_CODE (c) == COMPLEX_CST)
1481 return fold_build1_loc (EXPR_LOCATION (t),
1482 TREE_CODE (t), TREE_TYPE (t), c);
1487 /* Get the base object we are accessing. */
1488 base = TREE_OPERAND (t, 0);
1489 if (TREE_CODE (base) == SSA_NAME
1490 && (tem = get_constant_value (base)))
1492 if (TREE_CODE (base) != ADDR_EXPR)
1494 base = TREE_OPERAND (base, 0);
1495 switch (TREE_CODE (base))
1499 && !AGGREGATE_TYPE_P (TREE_TYPE (base))
1500 && integer_zerop (TREE_OPERAND (t, 1)))
1502 tree res = get_symbol_constant_value (base);
1504 && !useless_type_conversion_p
1505 (TREE_TYPE (t), TREE_TYPE (res)))
1506 res = fold_unary (VIEW_CONVERT_EXPR, TREE_TYPE (t), res);
1510 if (!TREE_READONLY (base)
1511 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1512 || !targetm.binds_local_p (base))
1515 ctor = DECL_INITIAL (base);
1527 if (ctor == NULL_TREE
1528 || (TREE_CODE (ctor) != CONSTRUCTOR
1529 && TREE_CODE (ctor) != STRING_CST)
1530 || !TREE_STATIC (ctor))
1533 /* Get the byte offset. */
1534 idx = TREE_OPERAND (t, 1);
1536 /* Fold read from constant string. */
1537 if (TREE_CODE (ctor) == STRING_CST)
1539 if ((TYPE_MODE (TREE_TYPE (t))
1540 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1541 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1543 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1544 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1545 return build_int_cst_type (TREE_TYPE (t),
1546 (TREE_STRING_POINTER (ctor)
1547 [TREE_INT_CST_LOW (idx)]));
1551 /* ??? Implement byte-offset indexing into a non-array CONSTRUCTOR. */
1552 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
1553 && (TYPE_MODE (TREE_TYPE (t))
1554 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1555 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t))) != 0
1558 (TRUNC_MOD_EXPR, idx,
1559 size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t)))), 0)))
1561 idx = int_const_binop (TRUNC_DIV_EXPR, idx,
1562 size_int (GET_MODE_SIZE
1563 (TYPE_MODE (TREE_TYPE (t)))), 0);
1564 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1565 if (tree_int_cst_equal (cfield, idx))
1568 if (TREE_CODE (cval) == ADDR_EXPR)
1570 tree base = get_base_address (TREE_OPERAND (cval, 0));
1571 if (base && TREE_CODE (base) == VAR_DECL)
1572 add_referenced_var (base);
1574 if (useless_type_conversion_p (TREE_TYPE (t), TREE_TYPE (cval)))
1576 else if (CONSTANT_CLASS_P (cval))
1577 return fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (t), cval);
1591 /* Apply the operation CODE in type TYPE to the value, mask pair
1592 RVAL and RMASK representing a value of type RTYPE and set
1593 the value, mask pair *VAL and *MASK to the result. */
1596 bit_value_unop_1 (enum tree_code code, tree type,
1597 double_int *val, double_int *mask,
1598 tree rtype, double_int rval, double_int rmask)
1604 *val = double_int_not (rval);
1609 double_int temv, temm;
1610 /* Return ~rval + 1. */
1611 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1612 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1614 type, double_int_one, double_int_zero);
1622 /* First extend mask and value according to the original type. */
1623 uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype)
1624 ? 0 : TYPE_UNSIGNED (rtype));
1625 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1626 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1628 /* Then extend mask and value according to the target type. */
1629 uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1630 ? 0 : TYPE_UNSIGNED (type));
1631 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1632 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1637 *mask = double_int_minus_one;
1642 /* Apply the operation CODE in type TYPE to the value, mask pairs
1643 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1644 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1647 bit_value_binop_1 (enum tree_code code, tree type,
1648 double_int *val, double_int *mask,
1649 tree r1type, double_int r1val, double_int r1mask,
1650 tree r2type, double_int r2val, double_int r2mask)
1652 bool uns = (TREE_CODE (type) == INTEGER_TYPE
1653 && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type));
1654 /* Assume we'll get a constant result. Use an initial varying value,
1655 we fall back to varying in the end if necessary. */
1656 *mask = double_int_minus_one;
1660 /* The mask is constant where there is a known not
1661 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1662 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1663 double_int_and (double_int_ior (r1val, r1mask),
1664 double_int_ior (r2val, r2mask)));
1665 *val = double_int_and (r1val, r2val);
1669 /* The mask is constant where there is a known
1670 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1671 *mask = double_int_and_not
1672 (double_int_ior (r1mask, r2mask),
1673 double_int_ior (double_int_and_not (r1val, r1mask),
1674 double_int_and_not (r2val, r2mask)));
1675 *val = double_int_ior (r1val, r2val);
1680 *mask = double_int_ior (r1mask, r2mask);
1681 *val = double_int_xor (r1val, r2val);
1686 if (double_int_zero_p (r2mask))
1688 HOST_WIDE_INT shift = r2val.low;
1689 if (code == RROTATE_EXPR)
1691 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1692 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1698 /* ??? We can handle partially known shift counts if we know
1699 its sign. That way we can tell that (x << (y | 8)) & 255
1701 if (double_int_zero_p (r2mask))
1703 HOST_WIDE_INT shift = r2val.low;
1704 if (code == RSHIFT_EXPR)
1706 /* We need to know if we are doing a left or a right shift
1707 to properly shift in zeros for left shift and unsigned
1708 right shifts and the sign bit for signed right shifts.
1709 For signed right shifts we shift in varying in case
1710 the sign bit was varying. */
1713 *mask = double_int_lshift (r1mask, shift,
1714 TYPE_PRECISION (type), false);
1715 *val = double_int_lshift (r1val, shift,
1716 TYPE_PRECISION (type), false);
1721 *mask = double_int_rshift (r1mask, shift,
1722 TYPE_PRECISION (type), !uns);
1723 *val = double_int_rshift (r1val, shift,
1724 TYPE_PRECISION (type), !uns);
1735 case POINTER_PLUS_EXPR:
1738 /* Do the addition with unknown bits set to zero, to give carry-ins of
1739 zero wherever possible. */
1740 lo = double_int_add (double_int_and_not (r1val, r1mask),
1741 double_int_and_not (r2val, r2mask));
1742 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1743 /* Do the addition with unknown bits set to one, to give carry-ins of
1744 one wherever possible. */
1745 hi = double_int_add (double_int_ior (r1val, r1mask),
1746 double_int_ior (r2val, r2mask));
1747 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1748 /* Each bit in the result is known if (a) the corresponding bits in
1749 both inputs are known, and (b) the carry-in to that bit position
1750 is known. We can check condition (b) by seeing if we got the same
1751 result with minimised carries as with maximised carries. */
1752 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1753 double_int_xor (lo, hi));
1754 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1755 /* It shouldn't matter whether we choose lo or hi here. */
1762 double_int temv, temm;
1763 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1764 r2type, r2val, r2mask);
1765 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1766 r1type, r1val, r1mask,
1767 r2type, temv, temm);
1773 /* Just track trailing zeros in both operands and transfer
1774 them to the other. */
1775 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1776 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1777 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1779 *mask = double_int_zero;
1780 *val = double_int_zero;
1782 else if (r1tz + r2tz > 0)
1784 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1785 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1786 *val = double_int_zero;
1794 double_int m = double_int_ior (r1mask, r2mask);
1795 if (!double_int_equal_p (double_int_and_not (r1val, m),
1796 double_int_and_not (r2val, m)))
1798 *mask = double_int_zero;
1799 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1803 /* We know the result of a comparison is always one or zero. */
1804 *mask = double_int_one;
1805 *val = double_int_zero;
1813 double_int tem = r1val;
1819 code = swap_tree_comparison (code);
1826 /* If the most significant bits are not known we know nothing. */
1827 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1830 /* If we know the most significant bits we know the values
1831 value ranges by means of treating varying bits as zero
1832 or one. Do a cross comparison of the max/min pairs. */
1833 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1834 double_int_and_not (r2val, r2mask), uns);
1835 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1836 double_int_ior (r2val, r2mask), uns);
1837 if (maxmin < 0) /* r1 is less than r2. */
1839 *mask = double_int_zero;
1840 *val = double_int_one;
1842 else if (minmax > 0) /* r1 is not less or equal to r2. */
1844 *mask = double_int_zero;
1845 *val = double_int_zero;
1847 else if (maxmin == minmax) /* r1 and r2 are equal. */
1849 /* This probably should never happen as we'd have
1850 folded the thing during fully constant value folding. */
1851 *mask = double_int_zero;
1852 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1856 /* We know the result of a comparison is always one or zero. */
1857 *mask = double_int_one;
1858 *val = double_int_zero;
1867 /* Return the propagation value when applying the operation CODE to
1868 the value RHS yielding type TYPE. */
1871 bit_value_unop (enum tree_code code, tree type, tree rhs)
1873 prop_value_t rval = get_value_for_expr (rhs, true);
1874 double_int value, mask;
1876 gcc_assert ((rval.lattice_val == CONSTANT
1877 && TREE_CODE (rval.value) == INTEGER_CST)
1878 || double_int_minus_one_p (rval.mask));
1879 bit_value_unop_1 (code, type, &value, &mask,
1880 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1881 if (!double_int_minus_one_p (mask))
1883 val.lattice_val = CONSTANT;
1885 /* ??? Delay building trees here. */
1886 val.value = double_int_to_tree (type, value);
1890 val.lattice_val = VARYING;
1891 val.value = NULL_TREE;
1892 val.mask = double_int_minus_one;
1897 /* Return the propagation value when applying the operation CODE to
1898 the values RHS1 and RHS2 yielding type TYPE. */
1901 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1903 prop_value_t r1val = get_value_for_expr (rhs1, true);
1904 prop_value_t r2val = get_value_for_expr (rhs2, true);
1905 double_int value, mask;
1907 gcc_assert ((r1val.lattice_val == CONSTANT
1908 && TREE_CODE (r1val.value) == INTEGER_CST)
1909 || double_int_minus_one_p (r1val.mask));
1910 gcc_assert ((r2val.lattice_val == CONSTANT
1911 && TREE_CODE (r2val.value) == INTEGER_CST)
1912 || double_int_minus_one_p (r2val.mask));
1913 bit_value_binop_1 (code, type, &value, &mask,
1914 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
1915 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
1916 if (!double_int_minus_one_p (mask))
1918 val.lattice_val = CONSTANT;
1920 /* ??? Delay building trees here. */
1921 val.value = double_int_to_tree (type, value);
1925 val.lattice_val = VARYING;
1926 val.value = NULL_TREE;
1927 val.mask = double_int_minus_one;
1932 /* Evaluate statement STMT.
1933 Valid only for assignments, calls, conditionals, and switches. */
1936 evaluate_stmt (gimple stmt)
1939 tree simplified = NULL_TREE;
1940 ccp_lattice_t likelyvalue = likely_value (stmt);
1941 bool is_constant = false;
1943 if (dump_file && (dump_flags & TDF_DETAILS))
1945 fprintf (dump_file, "which is likely ");
1946 switch (likelyvalue)
1949 fprintf (dump_file, "CONSTANT");
1952 fprintf (dump_file, "UNDEFINED");
1955 fprintf (dump_file, "VARYING");
1959 fprintf (dump_file, "\n");
1962 /* If the statement is likely to have a CONSTANT result, then try
1963 to fold the statement to determine the constant value. */
1964 /* FIXME. This is the only place that we call ccp_fold.
1965 Since likely_value never returns CONSTANT for calls, we will
1966 not attempt to fold them, including builtins that may profit. */
1967 if (likelyvalue == CONSTANT)
1969 fold_defer_overflow_warnings ();
1970 simplified = ccp_fold (stmt);
1971 is_constant = simplified && is_gimple_min_invariant (simplified);
1972 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1975 /* The statement produced a constant value. */
1976 val.lattice_val = CONSTANT;
1977 val.value = simplified;
1978 val.mask = double_int_zero;
1981 /* If the statement is likely to have a VARYING result, then do not
1982 bother folding the statement. */
1983 else if (likelyvalue == VARYING)
1985 enum gimple_code code = gimple_code (stmt);
1986 if (code == GIMPLE_ASSIGN)
1988 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1990 /* Other cases cannot satisfy is_gimple_min_invariant
1992 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1993 simplified = gimple_assign_rhs1 (stmt);
1995 else if (code == GIMPLE_SWITCH)
1996 simplified = gimple_switch_index (stmt);
1998 /* These cannot satisfy is_gimple_min_invariant without folding. */
1999 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
2000 is_constant = simplified && is_gimple_min_invariant (simplified);
2003 /* The statement produced a constant value. */
2004 val.lattice_val = CONSTANT;
2005 val.value = simplified;
2006 val.mask = double_int_zero;
2010 /* Resort to simplification for bitwise tracking. */
2011 if (flag_tree_bit_ccp
2012 && likelyvalue == CONSTANT
2015 enum gimple_code code = gimple_code (stmt);
2017 val.lattice_val = VARYING;
2018 val.value = NULL_TREE;
2019 val.mask = double_int_minus_one;
2020 if (code == GIMPLE_ASSIGN)
2022 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2023 tree rhs1 = gimple_assign_rhs1 (stmt);
2024 switch (get_gimple_rhs_class (subcode))
2026 case GIMPLE_SINGLE_RHS:
2027 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2028 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2029 val = get_value_for_expr (rhs1, true);
2032 case GIMPLE_UNARY_RHS:
2033 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2034 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2035 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
2036 || POINTER_TYPE_P (gimple_expr_type (stmt))))
2037 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
2040 case GIMPLE_BINARY_RHS:
2041 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2042 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2044 tree rhs2 = gimple_assign_rhs2 (stmt);
2045 val = bit_value_binop (subcode,
2046 TREE_TYPE (rhs1), rhs1, rhs2);
2053 else if (code == GIMPLE_COND)
2055 enum tree_code code = gimple_cond_code (stmt);
2056 tree rhs1 = gimple_cond_lhs (stmt);
2057 tree rhs2 = gimple_cond_rhs (stmt);
2058 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2059 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2060 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
2062 else if (code == GIMPLE_CALL
2063 && (fndecl = gimple_call_fndecl (stmt))
2064 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2066 switch (DECL_FUNCTION_CODE (fndecl))
2068 case BUILT_IN_MALLOC:
2069 case BUILT_IN_REALLOC:
2070 case BUILT_IN_CALLOC:
2071 val.lattice_val = CONSTANT;
2072 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2073 val.mask = shwi_to_double_int
2074 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
2075 / BITS_PER_UNIT - 1));
2078 case BUILT_IN_ALLOCA:
2079 val.lattice_val = CONSTANT;
2080 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2081 val.mask = shwi_to_double_int
2082 (~(((HOST_WIDE_INT) BIGGEST_ALIGNMENT)
2083 / BITS_PER_UNIT - 1));
2089 is_constant = (val.lattice_val == CONSTANT);
2094 /* The statement produced a nonconstant value. If the statement
2095 had UNDEFINED operands, then the result of the statement
2096 should be UNDEFINED. Otherwise, the statement is VARYING. */
2097 if (likelyvalue == UNDEFINED)
2099 val.lattice_val = likelyvalue;
2100 val.mask = double_int_zero;
2104 val.lattice_val = VARYING;
2105 val.mask = double_int_minus_one;
2108 val.value = NULL_TREE;
2114 /* Fold the stmt at *GSI with CCP specific information that propagating
2115 and regular folding does not catch. */
2118 ccp_fold_stmt (gimple_stmt_iterator *gsi)
2120 gimple stmt = gsi_stmt (*gsi);
2122 switch (gimple_code (stmt))
2127 /* Statement evaluation will handle type mismatches in constants
2128 more gracefully than the final propagation. This allows us to
2129 fold more conditionals here. */
2130 val = evaluate_stmt (stmt);
2131 if (val.lattice_val != CONSTANT
2132 || !double_int_zero_p (val.mask))
2137 fprintf (dump_file, "Folding predicate ");
2138 print_gimple_expr (dump_file, stmt, 0, 0);
2139 fprintf (dump_file, " to ");
2140 print_generic_expr (dump_file, val.value, 0);
2141 fprintf (dump_file, "\n");
2144 if (integer_zerop (val.value))
2145 gimple_cond_make_false (stmt);
2147 gimple_cond_make_true (stmt);
2154 tree lhs = gimple_call_lhs (stmt);
2157 bool changed = false;
2160 /* If the call was folded into a constant make sure it goes
2161 away even if we cannot propagate into all uses because of
2164 && TREE_CODE (lhs) == SSA_NAME
2165 && (val = get_constant_value (lhs)))
2167 tree new_rhs = unshare_expr (val);
2169 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2170 TREE_TYPE (new_rhs)))
2171 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2172 res = update_call_from_tree (gsi, new_rhs);
2177 /* Propagate into the call arguments. Compared to replace_uses_in
2178 this can use the argument slot types for type verification
2179 instead of the current argument type. We also can safely
2180 drop qualifiers here as we are dealing with constants anyway. */
2181 argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
2182 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2183 ++i, argt = TREE_CHAIN (argt))
2185 tree arg = gimple_call_arg (stmt, i);
2186 if (TREE_CODE (arg) == SSA_NAME
2187 && (val = get_constant_value (arg))
2188 && useless_type_conversion_p
2189 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
2190 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
2192 gimple_call_set_arg (stmt, i, unshare_expr (val));
2202 tree lhs = gimple_assign_lhs (stmt);
2205 /* If we have a load that turned out to be constant replace it
2206 as we cannot propagate into all uses in all cases. */
2207 if (gimple_assign_single_p (stmt)
2208 && TREE_CODE (lhs) == SSA_NAME
2209 && (val = get_constant_value (lhs)))
2211 tree rhs = unshare_expr (val);
2212 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2213 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2214 gimple_assign_set_rhs_from_tree (gsi, rhs);
2226 /* Visit the assignment statement STMT. Set the value of its LHS to the
2227 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2228 creates virtual definitions, set the value of each new name to that
2229 of the RHS (if we can derive a constant out of the RHS).
2230 Value-returning call statements also perform an assignment, and
2231 are handled here. */
2233 static enum ssa_prop_result
2234 visit_assignment (gimple stmt, tree *output_p)
2237 enum ssa_prop_result retval;
2239 tree lhs = gimple_get_lhs (stmt);
2241 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
2242 || gimple_call_lhs (stmt) != NULL_TREE);
2244 if (gimple_assign_single_p (stmt)
2245 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2246 /* For a simple copy operation, we copy the lattice values. */
2247 val = *get_value (gimple_assign_rhs1 (stmt));
2249 /* Evaluate the statement, which could be
2250 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2251 val = evaluate_stmt (stmt);
2253 retval = SSA_PROP_NOT_INTERESTING;
2255 /* Set the lattice value of the statement's output. */
2256 if (TREE_CODE (lhs) == SSA_NAME)
2258 /* If STMT is an assignment to an SSA_NAME, we only have one
2260 if (set_lattice_value (lhs, val))
2263 if (val.lattice_val == VARYING)
2264 retval = SSA_PROP_VARYING;
2266 retval = SSA_PROP_INTERESTING;
2274 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2275 if it can determine which edge will be taken. Otherwise, return
2276 SSA_PROP_VARYING. */
2278 static enum ssa_prop_result
2279 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
2284 block = gimple_bb (stmt);
2285 val = evaluate_stmt (stmt);
2286 if (val.lattice_val != CONSTANT
2287 || !double_int_zero_p (val.mask))
2288 return SSA_PROP_VARYING;
2290 /* Find which edge out of the conditional block will be taken and add it
2291 to the worklist. If no single edge can be determined statically,
2292 return SSA_PROP_VARYING to feed all the outgoing edges to the
2293 propagation engine. */
2294 *taken_edge_p = find_taken_edge (block, val.value);
2296 return SSA_PROP_INTERESTING;
2298 return SSA_PROP_VARYING;
2302 /* Evaluate statement STMT. If the statement produces an output value and
2303 its evaluation changes the lattice value of its output, return
2304 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2307 If STMT is a conditional branch and we can determine its truth
2308 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2309 value, return SSA_PROP_VARYING. */
2311 static enum ssa_prop_result
2312 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
2317 if (dump_file && (dump_flags & TDF_DETAILS))
2319 fprintf (dump_file, "\nVisiting statement:\n");
2320 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2323 switch (gimple_code (stmt))
2326 /* If the statement is an assignment that produces a single
2327 output value, evaluate its RHS to see if the lattice value of
2328 its output has changed. */
2329 return visit_assignment (stmt, output_p);
2332 /* A value-returning call also performs an assignment. */
2333 if (gimple_call_lhs (stmt) != NULL_TREE)
2334 return visit_assignment (stmt, output_p);
2339 /* If STMT is a conditional branch, see if we can determine
2340 which branch will be taken. */
2341 /* FIXME. It appears that we should be able to optimize
2342 computed GOTOs here as well. */
2343 return visit_cond_stmt (stmt, taken_edge_p);
2349 /* Any other kind of statement is not interesting for constant
2350 propagation and, therefore, not worth simulating. */
2351 if (dump_file && (dump_flags & TDF_DETAILS))
2352 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2354 /* Definitions made by statements other than assignments to
2355 SSA_NAMEs represent unknown modifications to their outputs.
2356 Mark them VARYING. */
2357 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2359 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
2360 set_lattice_value (def, v);
2363 return SSA_PROP_VARYING;
2367 /* Main entry point for SSA Conditional Constant Propagation. */
2373 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2374 if (ccp_finalize ())
2375 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
2384 return flag_tree_ccp != 0;
2388 struct gimple_opt_pass pass_ccp =
2393 gate_ccp, /* gate */
2394 do_ssa_ccp, /* execute */
2397 0, /* static_pass_number */
2398 TV_TREE_CCP, /* tv_id */
2399 PROP_cfg | PROP_ssa, /* properties_required */
2400 0, /* properties_provided */
2401 0, /* properties_destroyed */
2402 0, /* todo_flags_start */
2403 TODO_dump_func | TODO_verify_ssa
2404 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2410 /* Try to optimize out __builtin_stack_restore. Optimize it out
2411 if there is another __builtin_stack_restore in the same basic
2412 block and no calls or ASM_EXPRs are in between, or if this block's
2413 only outgoing edge is to EXIT_BLOCK and there are no calls or
2414 ASM_EXPRs after this __builtin_stack_restore. */
2417 optimize_stack_restore (gimple_stmt_iterator i)
2422 basic_block bb = gsi_bb (i);
2423 gimple call = gsi_stmt (i);
2425 if (gimple_code (call) != GIMPLE_CALL
2426 || gimple_call_num_args (call) != 1
2427 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2428 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2431 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2433 stmt = gsi_stmt (i);
2434 if (gimple_code (stmt) == GIMPLE_ASM)
2436 if (gimple_code (stmt) != GIMPLE_CALL)
2439 callee = gimple_call_fndecl (stmt);
2441 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2442 /* All regular builtins are ok, just obviously not alloca. */
2443 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
2446 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2447 goto second_stack_restore;
2453 /* Allow one successor of the exit block, or zero successors. */
2454 switch (EDGE_COUNT (bb->succs))
2459 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2465 second_stack_restore:
2467 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2468 If there are multiple uses, then the last one should remove the call.
2469 In any case, whether the call to __builtin_stack_save can be removed
2470 or not is irrelevant to removing the call to __builtin_stack_restore. */
2471 if (has_single_use (gimple_call_arg (call, 0)))
2473 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2474 if (is_gimple_call (stack_save))
2476 callee = gimple_call_fndecl (stack_save);
2478 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2479 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2481 gimple_stmt_iterator stack_save_gsi;
2484 stack_save_gsi = gsi_for_stmt (stack_save);
2485 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2486 update_call_from_tree (&stack_save_gsi, rhs);
2491 /* No effect, so the statement will be deleted. */
2492 return integer_zero_node;
2495 /* If va_list type is a simple pointer and nothing special is needed,
2496 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2497 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2498 pointer assignment. */
2501 optimize_stdarg_builtin (gimple call)
2503 tree callee, lhs, rhs, cfun_va_list;
2504 bool va_list_simple_ptr;
2505 location_t loc = gimple_location (call);
2507 if (gimple_code (call) != GIMPLE_CALL)
2510 callee = gimple_call_fndecl (call);
2512 cfun_va_list = targetm.fn_abi_va_list (callee);
2513 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2514 && (TREE_TYPE (cfun_va_list) == void_type_node
2515 || TREE_TYPE (cfun_va_list) == char_type_node);
2517 switch (DECL_FUNCTION_CODE (callee))
2519 case BUILT_IN_VA_START:
2520 if (!va_list_simple_ptr
2521 || targetm.expand_builtin_va_start != NULL
2522 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
2525 if (gimple_call_num_args (call) != 2)
2528 lhs = gimple_call_arg (call, 0);
2529 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2530 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2531 != TYPE_MAIN_VARIANT (cfun_va_list))
2534 lhs = build_fold_indirect_ref_loc (loc, lhs);
2535 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
2536 1, integer_zero_node);
2537 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2538 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2540 case BUILT_IN_VA_COPY:
2541 if (!va_list_simple_ptr)
2544 if (gimple_call_num_args (call) != 2)
2547 lhs = gimple_call_arg (call, 0);
2548 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2549 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2550 != TYPE_MAIN_VARIANT (cfun_va_list))
2553 lhs = build_fold_indirect_ref_loc (loc, lhs);
2554 rhs = gimple_call_arg (call, 1);
2555 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2556 != TYPE_MAIN_VARIANT (cfun_va_list))
2559 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2560 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2562 case BUILT_IN_VA_END:
2563 /* No effect, so the statement will be deleted. */
2564 return integer_zero_node;
2571 /* A simple pass that attempts to fold all builtin functions. This pass
2572 is run after we've propagated as many constants as we can. */
2575 execute_fold_all_builtins (void)
2577 bool cfg_changed = false;
2579 unsigned int todoflags = 0;
2583 gimple_stmt_iterator i;
2584 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2586 gimple stmt, old_stmt;
2587 tree callee, result;
2588 enum built_in_function fcode;
2590 stmt = gsi_stmt (i);
2592 if (gimple_code (stmt) != GIMPLE_CALL)
2597 callee = gimple_call_fndecl (stmt);
2598 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2603 fcode = DECL_FUNCTION_CODE (callee);
2605 result = gimple_fold_builtin (stmt);
2608 gimple_remove_stmt_histograms (cfun, stmt);
2611 switch (DECL_FUNCTION_CODE (callee))
2613 case BUILT_IN_CONSTANT_P:
2614 /* Resolve __builtin_constant_p. If it hasn't been
2615 folded to integer_one_node by now, it's fairly
2616 certain that the value simply isn't constant. */
2617 result = integer_zero_node;
2620 case BUILT_IN_STACK_RESTORE:
2621 result = optimize_stack_restore (i);
2627 case BUILT_IN_VA_START:
2628 case BUILT_IN_VA_END:
2629 case BUILT_IN_VA_COPY:
2630 /* These shouldn't be folded before pass_stdarg. */
2631 result = optimize_stdarg_builtin (stmt);
2641 if (dump_file && (dump_flags & TDF_DETAILS))
2643 fprintf (dump_file, "Simplified\n ");
2644 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2648 if (!update_call_from_tree (&i, result))
2650 gimplify_and_update_call_from_tree (&i, result);
2651 todoflags |= TODO_update_address_taken;
2654 stmt = gsi_stmt (i);
2657 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2658 && gimple_purge_dead_eh_edges (bb))
2661 if (dump_file && (dump_flags & TDF_DETAILS))
2663 fprintf (dump_file, "to\n ");
2664 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2665 fprintf (dump_file, "\n");
2668 /* Retry the same statement if it changed into another
2669 builtin, there might be new opportunities now. */
2670 if (gimple_code (stmt) != GIMPLE_CALL)
2675 callee = gimple_call_fndecl (stmt);
2677 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2678 || DECL_FUNCTION_CODE (callee) == fcode)
2683 /* Delete unreachable blocks. */
2685 todoflags |= TODO_cleanup_cfg;
2691 struct gimple_opt_pass pass_fold_builtins =
2697 execute_fold_all_builtins, /* execute */
2700 0, /* static_pass_number */
2701 TV_NONE, /* tv_id */
2702 PROP_cfg | PROP_ssa, /* properties_required */
2703 0, /* properties_provided */
2704 0, /* properties_destroyed */
2705 0, /* todo_flags_start */
2708 | TODO_update_ssa /* todo_flags_finish */