1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
115 #include "coretypes.h"
120 #include "basic-block.h"
122 #include "function.h"
123 #include "tree-pretty-print.h"
124 #include "gimple-pretty-print.h"
126 #include "tree-dump.h"
127 #include "tree-flow.h"
128 #include "tree-pass.h"
129 #include "tree-ssa-propagate.h"
130 #include "value-prof.h"
131 #include "langhooks.h"
133 #include "diagnostic-core.h"
138 /* Possible lattice values. */
147 struct prop_value_d {
149 ccp_lattice_t lattice_val;
151 /* Propagated value. */
154 /* Mask that applies to the propagated value during CCP. For
155 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
159 typedef struct prop_value_d prop_value_t;
161 /* Array of propagated constant values. After propagation,
162 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
163 the constant is held in an SSA name representing a memory store
164 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
165 memory reference used to store (i.e., the LHS of the assignment
167 static prop_value_t *const_val;
169 static void canonicalize_float_value (prop_value_t *);
170 static bool ccp_fold_stmt (gimple_stmt_iterator *);
172 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
175 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
177 switch (val.lattice_val)
180 fprintf (outf, "%sUNINITIALIZED", prefix);
183 fprintf (outf, "%sUNDEFINED", prefix);
186 fprintf (outf, "%sVARYING", prefix);
189 fprintf (outf, "%sCONSTANT ", prefix);
190 if (TREE_CODE (val.value) != INTEGER_CST
191 || double_int_zero_p (val.mask))
192 print_generic_expr (outf, val.value, dump_flags);
195 double_int cval = double_int_and_not (tree_to_double_int (val.value),
197 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
198 prefix, cval.high, cval.low);
199 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
200 val.mask.high, val.mask.low);
209 /* Print lattice value VAL to stderr. */
211 void debug_lattice_value (prop_value_t val);
214 debug_lattice_value (prop_value_t val)
216 dump_lattice_value (stderr, "", val);
217 fprintf (stderr, "\n");
221 /* Compute a default value for variable VAR and store it in the
222 CONST_VAL array. The following rules are used to get default
225 1- Global and static variables that are declared constant are
228 2- Any other value is considered UNDEFINED. This is useful when
229 considering PHI nodes. PHI arguments that are undefined do not
230 change the constant value of the PHI node, which allows for more
231 constants to be propagated.
233 3- Variables defined by statements other than assignments and PHI
234 nodes are considered VARYING.
236 4- Initial values of variables that are not GIMPLE registers are
237 considered VARYING. */
240 get_default_value (tree var)
242 tree sym = SSA_NAME_VAR (var);
243 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
246 stmt = SSA_NAME_DEF_STMT (var);
248 if (gimple_nop_p (stmt))
250 /* Variables defined by an empty statement are those used
251 before being initialized. If VAR is a local variable, we
252 can assume initially that it is UNDEFINED, otherwise we must
253 consider it VARYING. */
254 if (is_gimple_reg (sym)
255 && TREE_CODE (sym) == VAR_DECL)
256 val.lattice_val = UNDEFINED;
259 val.lattice_val = VARYING;
260 val.mask = double_int_minus_one;
263 else if (is_gimple_assign (stmt)
264 /* Value-returning GIMPLE_CALL statements assign to
265 a variable, and are treated similarly to GIMPLE_ASSIGN. */
266 || (is_gimple_call (stmt)
267 && gimple_call_lhs (stmt) != NULL_TREE)
268 || gimple_code (stmt) == GIMPLE_PHI)
271 if (gimple_assign_single_p (stmt)
272 && DECL_P (gimple_assign_rhs1 (stmt))
273 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
275 val.lattice_val = CONSTANT;
279 /* Any other variable defined by an assignment or a PHI node
280 is considered UNDEFINED. */
281 val.lattice_val = UNDEFINED;
285 /* Otherwise, VAR will never take on a constant value. */
286 val.lattice_val = VARYING;
287 val.mask = double_int_minus_one;
294 /* Get the constant value associated with variable VAR. */
296 static inline prop_value_t *
301 if (const_val == NULL)
304 val = &const_val[SSA_NAME_VERSION (var)];
305 if (val->lattice_val == UNINITIALIZED)
306 *val = get_default_value (var);
308 canonicalize_float_value (val);
313 /* Return the constant tree value associated with VAR. */
316 get_constant_value (tree var)
318 prop_value_t *val = get_value (var);
320 && val->lattice_val == CONSTANT
321 && (TREE_CODE (val->value) != INTEGER_CST
322 || double_int_zero_p (val->mask)))
327 /* Sets the value associated with VAR to VARYING. */
330 set_value_varying (tree var)
332 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
334 val->lattice_val = VARYING;
335 val->value = NULL_TREE;
336 val->mask = double_int_minus_one;
339 /* For float types, modify the value of VAL to make ccp work correctly
340 for non-standard values (-0, NaN):
342 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
343 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
344 This is to fix the following problem (see PR 29921): Suppose we have
348 and we set value of y to NaN. This causes value of x to be set to NaN.
349 When we later determine that y is in fact VARYING, fold uses the fact
350 that HONOR_NANS is false, and we try to change the value of x to 0,
351 causing an ICE. With HONOR_NANS being false, the real appearance of
352 NaN would cause undefined behavior, though, so claiming that y (and x)
353 are UNDEFINED initially is correct. */
356 canonicalize_float_value (prop_value_t *val)
358 enum machine_mode mode;
362 if (val->lattice_val != CONSTANT
363 || TREE_CODE (val->value) != REAL_CST)
366 d = TREE_REAL_CST (val->value);
367 type = TREE_TYPE (val->value);
368 mode = TYPE_MODE (type);
370 if (!HONOR_SIGNED_ZEROS (mode)
371 && REAL_VALUE_MINUS_ZERO (d))
373 val->value = build_real (type, dconst0);
377 if (!HONOR_NANS (mode)
378 && REAL_VALUE_ISNAN (d))
380 val->lattice_val = UNDEFINED;
386 /* Return whether the lattice transition is valid. */
389 valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
391 /* Lattice transitions must always be monotonically increasing in
393 if (old_val.lattice_val < new_val.lattice_val)
396 if (old_val.lattice_val != new_val.lattice_val)
399 if (!old_val.value && !new_val.value)
402 /* Now both lattice values are CONSTANT. */
404 /* Allow transitioning from &x to &x & ~3. */
405 if (TREE_CODE (old_val.value) != INTEGER_CST
406 && TREE_CODE (new_val.value) == INTEGER_CST)
409 /* Bit-lattices have to agree in the still valid bits. */
410 if (TREE_CODE (old_val.value) == INTEGER_CST
411 && TREE_CODE (new_val.value) == INTEGER_CST)
412 return double_int_equal_p
413 (double_int_and_not (tree_to_double_int (old_val.value),
415 double_int_and_not (tree_to_double_int (new_val.value),
418 /* Otherwise constant values have to agree. */
419 return operand_equal_p (old_val.value, new_val.value, 0);
422 /* Set the value for variable VAR to NEW_VAL. Return true if the new
423 value is different from VAR's previous value. */
426 set_lattice_value (tree var, prop_value_t new_val)
428 /* We can deal with old UNINITIALIZED values just fine here. */
429 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
431 canonicalize_float_value (&new_val);
433 /* We have to be careful to not go up the bitwise lattice
434 represented by the mask.
435 ??? This doesn't seem to be the best place to enforce this. */
436 if (new_val.lattice_val == CONSTANT
437 && old_val->lattice_val == CONSTANT
438 && TREE_CODE (new_val.value) == INTEGER_CST
439 && TREE_CODE (old_val->value) == INTEGER_CST)
442 diff = double_int_xor (tree_to_double_int (new_val.value),
443 tree_to_double_int (old_val->value));
444 new_val.mask = double_int_ior (new_val.mask,
445 double_int_ior (old_val->mask, diff));
448 gcc_assert (valid_lattice_transition (*old_val, new_val));
450 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
451 caller that this was a non-transition. */
452 if (old_val->lattice_val != new_val.lattice_val
453 || (new_val.lattice_val == CONSTANT
454 && TREE_CODE (new_val.value) == INTEGER_CST
455 && (TREE_CODE (old_val->value) != INTEGER_CST
456 || !double_int_equal_p (new_val.mask, old_val->mask))))
458 /* ??? We would like to delay creation of INTEGER_CSTs from
459 partially constants here. */
461 if (dump_file && (dump_flags & TDF_DETAILS))
463 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
464 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
469 gcc_assert (new_val.lattice_val != UNINITIALIZED);
476 static prop_value_t get_value_for_expr (tree, bool);
477 static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
478 static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
479 tree, double_int, double_int,
480 tree, double_int, double_int);
482 /* Return a double_int that can be used for bitwise simplifications
486 value_to_double_int (prop_value_t val)
489 && TREE_CODE (val.value) == INTEGER_CST)
490 return tree_to_double_int (val.value);
492 return double_int_zero;
495 /* Return the value for the address expression EXPR based on alignment
499 get_value_from_alignment (tree expr)
502 HOST_WIDE_INT bitsize, bitpos;
504 enum machine_mode mode;
507 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
509 base = get_inner_reference (TREE_OPERAND (expr, 0),
510 &bitsize, &bitpos, &offset,
511 &mode, &align, &align, false);
512 if (TREE_CODE (base) == MEM_REF)
513 val = bit_value_binop (PLUS_EXPR, TREE_TYPE (expr),
514 TREE_OPERAND (base, 0), TREE_OPERAND (base, 1));
516 && ((align = get_object_alignment (base, BIGGEST_ALIGNMENT))
519 val.lattice_val = CONSTANT;
520 /* We assume pointers are zero-extended. */
521 val.mask = double_int_and_not
522 (double_int_mask (TYPE_PRECISION (TREE_TYPE (expr))),
523 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
524 val.value = build_int_cst (TREE_TYPE (expr), 0);
528 val.lattice_val = VARYING;
529 val.mask = double_int_minus_one;
530 val.value = NULL_TREE;
534 double_int value, mask;
535 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
536 TREE_TYPE (expr), value_to_double_int (val), val.mask,
538 shwi_to_double_int (bitpos / BITS_PER_UNIT),
540 val.lattice_val = double_int_minus_one_p (mask) ? VARYING : CONSTANT;
542 if (val.lattice_val == CONSTANT)
543 val.value = double_int_to_tree (TREE_TYPE (expr), value);
545 val.value = NULL_TREE;
547 /* ??? We should handle i * 4 and more complex expressions from
548 the offset, possibly by just expanding get_value_for_expr. */
549 if (offset != NULL_TREE)
551 double_int value, mask;
552 prop_value_t oval = get_value_for_expr (offset, true);
553 bit_value_binop_1 (PLUS_EXPR, TREE_TYPE (expr), &value, &mask,
554 TREE_TYPE (expr), value_to_double_int (val), val.mask,
555 TREE_TYPE (expr), value_to_double_int (oval),
558 if (double_int_minus_one_p (mask))
560 val.lattice_val = VARYING;
561 val.value = NULL_TREE;
565 val.lattice_val = CONSTANT;
566 val.value = double_int_to_tree (TREE_TYPE (expr), value);
573 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
574 return constant bits extracted from alignment information for
575 invariant addresses. */
578 get_value_for_expr (tree expr, bool for_bits_p)
582 if (TREE_CODE (expr) == SSA_NAME)
584 val = *get_value (expr);
586 && val.lattice_val == CONSTANT
587 && TREE_CODE (val.value) == ADDR_EXPR)
588 val = get_value_from_alignment (val.value);
590 else if (is_gimple_min_invariant (expr)
591 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
593 val.lattice_val = CONSTANT;
595 val.mask = double_int_zero;
596 canonicalize_float_value (&val);
598 else if (TREE_CODE (expr) == ADDR_EXPR)
599 val = get_value_from_alignment (expr);
602 val.lattice_val = VARYING;
603 val.mask = double_int_minus_one;
604 val.value = NULL_TREE;
609 /* Return the likely CCP lattice value for STMT.
611 If STMT has no operands, then return CONSTANT.
613 Else if undefinedness of operands of STMT cause its value to be
614 undefined, then return UNDEFINED.
616 Else if any operands of STMT are constants, then return CONSTANT.
618 Else return VARYING. */
621 likely_value (gimple stmt)
623 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
628 enum gimple_code code = gimple_code (stmt);
630 /* This function appears to be called only for assignments, calls,
631 conditionals, and switches, due to the logic in visit_stmt. */
632 gcc_assert (code == GIMPLE_ASSIGN
633 || code == GIMPLE_CALL
634 || code == GIMPLE_COND
635 || code == GIMPLE_SWITCH);
637 /* If the statement has volatile operands, it won't fold to a
639 if (gimple_has_volatile_ops (stmt))
642 /* Arrive here for more complex cases. */
643 has_constant_operand = false;
644 has_undefined_operand = false;
645 all_undefined_operands = true;
646 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
648 prop_value_t *val = get_value (use);
650 if (val->lattice_val == UNDEFINED)
651 has_undefined_operand = true;
653 all_undefined_operands = false;
655 if (val->lattice_val == CONSTANT)
656 has_constant_operand = true;
659 /* There may be constants in regular rhs operands. For calls we
660 have to ignore lhs, fndecl and static chain, otherwise only
662 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
663 i < gimple_num_ops (stmt); ++i)
665 tree op = gimple_op (stmt, i);
666 if (!op || TREE_CODE (op) == SSA_NAME)
668 if (is_gimple_min_invariant (op))
669 has_constant_operand = true;
672 if (has_constant_operand)
673 all_undefined_operands = false;
675 /* If the operation combines operands like COMPLEX_EXPR make sure to
676 not mark the result UNDEFINED if only one part of the result is
678 if (has_undefined_operand && all_undefined_operands)
680 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
682 switch (gimple_assign_rhs_code (stmt))
684 /* Unary operators are handled with all_undefined_operands. */
687 case POINTER_PLUS_EXPR:
688 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
689 Not bitwise operators, one VARYING operand may specify the
690 result completely. Not logical operators for the same reason.
691 Not COMPLEX_EXPR as one VARYING operand makes the result partly
692 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
693 the undefined operand may be promoted. */
700 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
701 fall back to VARYING even if there were CONSTANT operands. */
702 if (has_undefined_operand)
705 /* We do not consider virtual operands here -- load from read-only
706 memory may have only VARYING virtual operands, but still be
708 if (has_constant_operand
709 || gimple_references_memory_p (stmt))
715 /* Returns true if STMT cannot be constant. */
718 surely_varying_stmt_p (gimple stmt)
720 /* If the statement has operands that we cannot handle, it cannot be
722 if (gimple_has_volatile_ops (stmt))
725 /* If it is a call and does not return a value or is not a
726 builtin and not an indirect call, it is varying. */
727 if (is_gimple_call (stmt))
730 if (!gimple_call_lhs (stmt)
731 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
732 && !DECL_BUILT_IN (fndecl)))
736 /* Any other store operation is not interesting. */
737 else if (gimple_vdef (stmt))
740 /* Anything other than assignments and conditional jumps are not
741 interesting for CCP. */
742 if (gimple_code (stmt) != GIMPLE_ASSIGN
743 && gimple_code (stmt) != GIMPLE_COND
744 && gimple_code (stmt) != GIMPLE_SWITCH
745 && gimple_code (stmt) != GIMPLE_CALL)
751 /* Initialize local data structures for CCP. */
754 ccp_initialize (void)
758 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
760 /* Initialize simulation flags for PHI nodes and statements. */
763 gimple_stmt_iterator i;
765 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
767 gimple stmt = gsi_stmt (i);
770 /* If the statement is a control insn, then we do not
771 want to avoid simulating the statement once. Failure
772 to do so means that those edges will never get added. */
773 if (stmt_ends_bb_p (stmt))
776 is_varying = surely_varying_stmt_p (stmt);
783 /* If the statement will not produce a constant, mark
784 all its outputs VARYING. */
785 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
786 set_value_varying (def);
788 prop_set_simulate_again (stmt, !is_varying);
792 /* Now process PHI nodes. We never clear the simulate_again flag on
793 phi nodes, since we do not know which edges are executable yet,
794 except for phi nodes for virtual operands when we do not do store ccp. */
797 gimple_stmt_iterator i;
799 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
801 gimple phi = gsi_stmt (i);
803 if (!is_gimple_reg (gimple_phi_result (phi)))
804 prop_set_simulate_again (phi, false);
806 prop_set_simulate_again (phi, true);
811 /* Debug count support. Reset the values of ssa names
812 VARYING when the total number ssa names analyzed is
813 beyond the debug count specified. */
819 for (i = 0; i < num_ssa_names; i++)
823 const_val[i].lattice_val = VARYING;
824 const_val[i].mask = double_int_minus_one;
825 const_val[i].value = NULL_TREE;
831 /* Do final substitution of propagated values, cleanup the flowgraph and
832 free allocated storage.
834 Return TRUE when something was optimized. */
839 bool something_changed;
844 /* Derive alignment and misalignment information from partially
845 constant pointers in the lattice. */
846 for (i = 1; i < num_ssa_names; ++i)
848 tree name = ssa_name (i);
850 struct ptr_info_def *pi;
851 unsigned int tem, align;
854 || !POINTER_TYPE_P (TREE_TYPE (name)))
857 val = get_value (name);
858 if (val->lattice_val != CONSTANT
859 || TREE_CODE (val->value) != INTEGER_CST)
862 /* Trailing constant bits specify the alignment, trailing value
863 bits the misalignment. */
865 align = (tem & -tem);
869 pi = get_ptr_info (name);
871 pi->misalign = TREE_INT_CST_LOW (val->value) & (align - 1);
874 /* Perform substitutions based on the known constant values. */
875 something_changed = substitute_and_fold (get_constant_value,
876 ccp_fold_stmt, true);
880 return something_changed;;
884 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
887 any M UNDEFINED = any
888 any M VARYING = VARYING
889 Ci M Cj = Ci if (i == j)
890 Ci M Cj = VARYING if (i != j)
894 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
896 if (val1->lattice_val == UNDEFINED)
898 /* UNDEFINED M any = any */
901 else if (val2->lattice_val == UNDEFINED)
903 /* any M UNDEFINED = any
904 Nothing to do. VAL1 already contains the value we want. */
907 else if (val1->lattice_val == VARYING
908 || val2->lattice_val == VARYING)
910 /* any M VARYING = VARYING. */
911 val1->lattice_val = VARYING;
912 val1->mask = double_int_minus_one;
913 val1->value = NULL_TREE;
915 else if (val1->lattice_val == CONSTANT
916 && val2->lattice_val == CONSTANT
917 && TREE_CODE (val1->value) == INTEGER_CST
918 && TREE_CODE (val2->value) == INTEGER_CST)
920 /* Ci M Cj = Ci if (i == j)
921 Ci M Cj = VARYING if (i != j)
923 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
926 = double_int_ior (double_int_ior (val1->mask,
928 double_int_xor (tree_to_double_int (val1->value),
929 tree_to_double_int (val2->value)));
930 if (double_int_minus_one_p (val1->mask))
932 val1->lattice_val = VARYING;
933 val1->value = NULL_TREE;
936 else if (val1->lattice_val == CONSTANT
937 && val2->lattice_val == CONSTANT
938 && simple_cst_equal (val1->value, val2->value) == 1)
940 /* Ci M Cj = Ci if (i == j)
941 Ci M Cj = VARYING if (i != j)
943 VAL1 already contains the value we want for equivalent values. */
945 else if (val1->lattice_val == CONSTANT
946 && val2->lattice_val == CONSTANT
947 && (TREE_CODE (val1->value) == ADDR_EXPR
948 || TREE_CODE (val2->value) == ADDR_EXPR))
950 /* When not equal addresses are involved try meeting for
952 prop_value_t tem = *val2;
953 if (TREE_CODE (val1->value) == ADDR_EXPR)
954 *val1 = get_value_for_expr (val1->value, true);
955 if (TREE_CODE (val2->value) == ADDR_EXPR)
956 tem = get_value_for_expr (val2->value, true);
957 ccp_lattice_meet (val1, &tem);
961 /* Any other combination is VARYING. */
962 val1->lattice_val = VARYING;
963 val1->mask = double_int_minus_one;
964 val1->value = NULL_TREE;
969 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
970 lattice values to determine PHI_NODE's lattice value. The value of a
971 PHI node is determined calling ccp_lattice_meet with all the arguments
972 of the PHI node that are incoming via executable edges. */
974 static enum ssa_prop_result
975 ccp_visit_phi_node (gimple phi)
978 prop_value_t *old_val, new_val;
980 if (dump_file && (dump_flags & TDF_DETAILS))
982 fprintf (dump_file, "\nVisiting PHI node: ");
983 print_gimple_stmt (dump_file, phi, 0, dump_flags);
986 old_val = get_value (gimple_phi_result (phi));
987 switch (old_val->lattice_val)
990 return SSA_PROP_VARYING;
997 new_val.lattice_val = UNDEFINED;
998 new_val.value = NULL_TREE;
1005 for (i = 0; i < gimple_phi_num_args (phi); i++)
1007 /* Compute the meet operator over all the PHI arguments flowing
1008 through executable edges. */
1009 edge e = gimple_phi_arg_edge (phi, i);
1011 if (dump_file && (dump_flags & TDF_DETAILS))
1014 "\n Argument #%d (%d -> %d %sexecutable)\n",
1015 i, e->src->index, e->dest->index,
1016 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1019 /* If the incoming edge is executable, Compute the meet operator for
1020 the existing value of the PHI node and the current PHI argument. */
1021 if (e->flags & EDGE_EXECUTABLE)
1023 tree arg = gimple_phi_arg (phi, i)->def;
1024 prop_value_t arg_val = get_value_for_expr (arg, false);
1026 ccp_lattice_meet (&new_val, &arg_val);
1028 if (dump_file && (dump_flags & TDF_DETAILS))
1030 fprintf (dump_file, "\t");
1031 print_generic_expr (dump_file, arg, dump_flags);
1032 dump_lattice_value (dump_file, "\tValue: ", arg_val);
1033 fprintf (dump_file, "\n");
1036 if (new_val.lattice_val == VARYING)
1041 if (dump_file && (dump_flags & TDF_DETAILS))
1043 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1044 fprintf (dump_file, "\n\n");
1047 /* Make the transition to the new value. */
1048 if (set_lattice_value (gimple_phi_result (phi), new_val))
1050 if (new_val.lattice_val == VARYING)
1051 return SSA_PROP_VARYING;
1053 return SSA_PROP_INTERESTING;
1056 return SSA_PROP_NOT_INTERESTING;
1059 /* Return the constant value for OP or OP otherwise. */
1062 valueize_op (tree op)
1064 if (TREE_CODE (op) == SSA_NAME)
1066 tree tem = get_constant_value (op);
1073 /* CCP specific front-end to the non-destructive constant folding
1076 Attempt to simplify the RHS of STMT knowing that one or more
1077 operands are constants.
1079 If simplification is possible, return the simplified RHS,
1080 otherwise return the original RHS or NULL_TREE. */
1083 ccp_fold (gimple stmt)
1085 location_t loc = gimple_location (stmt);
1086 switch (gimple_code (stmt))
1090 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1092 switch (get_gimple_rhs_class (subcode))
1094 case GIMPLE_SINGLE_RHS:
1096 tree rhs = gimple_assign_rhs1 (stmt);
1097 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
1099 if (TREE_CODE (rhs) == SSA_NAME)
1101 /* If the RHS is an SSA_NAME, return its known constant value,
1103 return get_constant_value (rhs);
1105 /* Handle propagating invariant addresses into address operations.
1106 The folding we do here matches that in tree-ssa-forwprop.c. */
1107 else if (TREE_CODE (rhs) == ADDR_EXPR)
1110 base = &TREE_OPERAND (rhs, 0);
1111 while (handled_component_p (*base))
1112 base = &TREE_OPERAND (*base, 0);
1113 if (TREE_CODE (*base) == MEM_REF
1114 && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
1116 tree val = get_constant_value (TREE_OPERAND (*base, 0));
1118 && TREE_CODE (val) == ADDR_EXPR)
1120 tree ret, save = *base;
1122 new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
1124 TREE_OPERAND (*base, 1));
1125 /* We need to return a new tree, not modify the IL
1126 or share parts of it. So play some tricks to
1127 avoid manually building it. */
1129 ret = unshare_expr (rhs);
1130 recompute_tree_invariant_for_addr_expr (ret);
1136 else if (TREE_CODE (rhs) == CONSTRUCTOR
1137 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
1138 && (CONSTRUCTOR_NELTS (rhs)
1139 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
1145 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
1147 val = valueize_op (val);
1148 if (TREE_CODE (val) == INTEGER_CST
1149 || TREE_CODE (val) == REAL_CST
1150 || TREE_CODE (val) == FIXED_CST)
1151 list = tree_cons (NULL_TREE, val, list);
1156 return build_vector (TREE_TYPE (rhs), nreverse (list));
1159 if (kind == tcc_reference)
1161 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
1162 || TREE_CODE (rhs) == REALPART_EXPR
1163 || TREE_CODE (rhs) == IMAGPART_EXPR)
1164 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1166 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1168 return fold_unary_loc (EXPR_LOCATION (rhs),
1170 TREE_TYPE (rhs), val);
1172 else if (TREE_CODE (rhs) == MEM_REF
1173 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
1175 tree val = get_constant_value (TREE_OPERAND (rhs, 0));
1177 && TREE_CODE (val) == ADDR_EXPR)
1179 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
1181 TREE_OPERAND (rhs, 1));
1186 return fold_const_aggregate_ref (rhs);
1188 else if (kind == tcc_declaration)
1189 return get_symbol_constant_value (rhs);
1193 case GIMPLE_UNARY_RHS:
1195 /* Handle unary operators that can appear in GIMPLE form.
1196 Note that we know the single operand must be a constant,
1197 so this should almost always return a simplified RHS. */
1198 tree lhs = gimple_assign_lhs (stmt);
1199 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1201 /* Conversions are useless for CCP purposes if they are
1202 value-preserving. Thus the restrictions that
1203 useless_type_conversion_p places for pointer type conversions
1204 do not apply here. Substitution later will only substitute to
1206 if (CONVERT_EXPR_CODE_P (subcode)
1207 && POINTER_TYPE_P (TREE_TYPE (lhs))
1208 && POINTER_TYPE_P (TREE_TYPE (op0)))
1211 /* Try to re-construct array references on-the-fly. */
1212 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1214 && ((tem = maybe_fold_offset_to_address
1216 op0, integer_zero_node, TREE_TYPE (lhs)))
1223 fold_unary_ignore_overflow_loc (loc, subcode,
1224 gimple_expr_type (stmt), op0);
1227 case GIMPLE_BINARY_RHS:
1229 /* Handle binary operators that can appear in GIMPLE form. */
1230 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1231 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1233 /* Translate &x + CST into an invariant form suitable for
1234 further propagation. */
1235 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
1236 && TREE_CODE (op0) == ADDR_EXPR
1237 && TREE_CODE (op1) == INTEGER_CST)
1239 tree off = fold_convert (ptr_type_node, op1);
1240 return build_fold_addr_expr
1241 (fold_build2 (MEM_REF,
1242 TREE_TYPE (TREE_TYPE (op0)),
1243 unshare_expr (op0), off));
1246 return fold_binary_loc (loc, subcode,
1247 gimple_expr_type (stmt), op0, op1);
1250 case GIMPLE_TERNARY_RHS:
1252 /* Handle ternary operators that can appear in GIMPLE form. */
1253 tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
1254 tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
1255 tree op2 = valueize_op (gimple_assign_rhs3 (stmt));
1257 return fold_ternary_loc (loc, subcode,
1258 gimple_expr_type (stmt), op0, op1, op2);
1269 tree fn = valueize_op (gimple_call_fn (stmt));
1270 if (TREE_CODE (fn) == ADDR_EXPR
1271 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1272 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
1274 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
1277 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1278 args[i] = valueize_op (gimple_call_arg (stmt, i));
1279 call = build_call_array_loc (loc,
1280 gimple_call_return_type (stmt),
1281 fn, gimple_call_num_args (stmt), args);
1282 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
1284 /* fold_call_expr wraps the result inside a NOP_EXPR. */
1285 STRIP_NOPS (retval);
1293 /* Handle comparison operators that can appear in GIMPLE form. */
1294 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1295 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1296 enum tree_code code = gimple_cond_code (stmt);
1297 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1302 /* Return the constant switch index. */
1303 return valueize_op (gimple_switch_index (stmt));
1311 /* Return the tree representing the element referenced by T if T is an
1312 ARRAY_REF or COMPONENT_REF into constant aggregates. Return
1313 NULL_TREE otherwise. */
1316 fold_const_aggregate_ref (tree t)
1318 tree base, ctor, idx, field;
1319 unsigned HOST_WIDE_INT cnt;
1323 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
1324 return get_symbol_constant_value (t);
1326 tem = fold_read_from_constant_string (t);
1330 switch (TREE_CODE (t))
1333 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1334 DECL_INITIAL. If BASE is a nested reference into another
1335 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1336 the inner reference. */
1337 base = TREE_OPERAND (t, 0);
1338 switch (TREE_CODE (base))
1341 /* ??? We could handle this case. */
1342 if (!integer_zerop (TREE_OPERAND (base, 1)))
1344 base = get_base_address (base);
1346 || TREE_CODE (base) != VAR_DECL)
1351 if (!TREE_READONLY (base)
1352 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1353 || ((TREE_STATIC (base) || DECL_EXTERNAL (base))
1354 && !varpool_get_node (base)->const_value_known))
1357 ctor = DECL_INITIAL (base);
1362 ctor = fold_const_aggregate_ref (base);
1374 if (ctor == NULL_TREE
1375 || (TREE_CODE (ctor) != CONSTRUCTOR
1376 && TREE_CODE (ctor) != STRING_CST)
1377 || !TREE_STATIC (ctor))
1380 /* Get the index. If we have an SSA_NAME, try to resolve it
1381 with the current lattice value for the SSA_NAME. */
1382 idx = TREE_OPERAND (t, 1);
1383 switch (TREE_CODE (idx))
1386 if ((tem = get_constant_value (idx))
1387 && TREE_CODE (tem) == INTEGER_CST)
1400 /* Fold read from constant string. */
1401 if (TREE_CODE (ctor) == STRING_CST)
1403 tree low_bound = array_ref_low_bound (t);
1404 double_int low_bound_cst;
1405 double_int index_cst;
1406 double_int length_cst;
1407 bool signed_p = TYPE_UNSIGNED (TREE_TYPE (idx));
1409 if (TREE_CODE (idx) != INTEGER_CST
1410 || !INTEGRAL_TYPE_P (TREE_TYPE (t))
1411 || TREE_CODE (low_bound) != INTEGER_CST)
1413 low_bound_cst = tree_to_double_int (low_bound);
1414 index_cst = tree_to_double_int (idx);
1415 length_cst = uhwi_to_double_int (TREE_STRING_LENGTH (ctor));
1416 index_cst = double_int_sub (index_cst, low_bound_cst);
1417 if ((TYPE_MODE (TREE_TYPE (t))
1418 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1419 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1421 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1422 && double_int_cmp (index_cst, length_cst, signed_p) < 0)
1423 return build_int_cst_type (TREE_TYPE (t),
1424 (TREE_STRING_POINTER (ctor)
1425 [double_int_to_uhwi (index_cst)]));
1429 /* Whoo-hoo! I'll fold ya baby. Yeah! */
1430 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1431 if (tree_int_cst_equal (cfield, idx))
1432 return canonicalize_constructor_val (cval);
1436 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
1437 DECL_INITIAL. If BASE is a nested reference into another
1438 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
1439 the inner reference. */
1440 base = TREE_OPERAND (t, 0);
1441 switch (TREE_CODE (base))
1444 if (!TREE_READONLY (base)
1445 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1446 || ((TREE_STATIC (base) || DECL_EXTERNAL (base))
1447 && !varpool_get_node (base)->const_value_known))
1450 ctor = DECL_INITIAL (base);
1455 ctor = fold_const_aggregate_ref (base);
1462 if (ctor == NULL_TREE
1463 || TREE_CODE (ctor) != CONSTRUCTOR
1464 || !TREE_STATIC (ctor))
1467 field = TREE_OPERAND (t, 1);
1469 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1471 /* FIXME: Handle bit-fields. */
1472 && ! DECL_BIT_FIELD (cfield))
1473 return canonicalize_constructor_val (cval);
1479 tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
1480 if (c && TREE_CODE (c) == COMPLEX_CST)
1481 return fold_build1_loc (EXPR_LOCATION (t),
1482 TREE_CODE (t), TREE_TYPE (t), c);
1487 /* Get the base object we are accessing. */
1488 base = TREE_OPERAND (t, 0);
1489 if (TREE_CODE (base) == SSA_NAME
1490 && (tem = get_constant_value (base)))
1492 if (TREE_CODE (base) != ADDR_EXPR)
1494 base = TREE_OPERAND (base, 0);
1495 switch (TREE_CODE (base))
1499 && !AGGREGATE_TYPE_P (TREE_TYPE (base))
1500 && integer_zerop (TREE_OPERAND (t, 1)))
1502 tree res = get_symbol_constant_value (base);
1504 && !useless_type_conversion_p
1505 (TREE_TYPE (t), TREE_TYPE (res)))
1506 res = fold_unary (VIEW_CONVERT_EXPR, TREE_TYPE (t), res);
1510 if (!TREE_READONLY (base)
1511 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
1512 || ((TREE_STATIC (base) || DECL_EXTERNAL (base))
1513 && !varpool_get_node (base)->const_value_known))
1516 ctor = DECL_INITIAL (base);
1528 if (ctor == NULL_TREE
1529 || (TREE_CODE (ctor) != CONSTRUCTOR
1530 && TREE_CODE (ctor) != STRING_CST)
1531 || !TREE_STATIC (ctor))
1534 /* Get the byte offset. */
1535 idx = TREE_OPERAND (t, 1);
1537 /* Fold read from constant string. */
1538 if (TREE_CODE (ctor) == STRING_CST)
1540 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1541 && (TYPE_MODE (TREE_TYPE (t))
1542 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1543 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1545 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
1546 && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
1547 return build_int_cst_type (TREE_TYPE (t),
1548 (TREE_STRING_POINTER (ctor)
1549 [TREE_INT_CST_LOW (idx)]));
1553 /* ??? Implement byte-offset indexing into a non-array CONSTRUCTOR. */
1554 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
1555 && (TYPE_MODE (TREE_TYPE (t))
1556 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
1557 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t))) != 0
1560 (TRUNC_MOD_EXPR, idx,
1561 size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t)))), 0)))
1563 idx = int_const_binop (TRUNC_DIV_EXPR, idx,
1564 size_int (GET_MODE_SIZE
1565 (TYPE_MODE (TREE_TYPE (t)))), 0);
1566 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
1567 if (tree_int_cst_equal (cfield, idx))
1569 cval = canonicalize_constructor_val (cval);
1570 if (useless_type_conversion_p (TREE_TYPE (t), TREE_TYPE (cval)))
1572 else if (CONSTANT_CLASS_P (cval))
1573 return fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (t), cval);
1587 /* Apply the operation CODE in type TYPE to the value, mask pair
1588 RVAL and RMASK representing a value of type RTYPE and set
1589 the value, mask pair *VAL and *MASK to the result. */
1592 bit_value_unop_1 (enum tree_code code, tree type,
1593 double_int *val, double_int *mask,
1594 tree rtype, double_int rval, double_int rmask)
1600 *val = double_int_not (rval);
1605 double_int temv, temm;
1606 /* Return ~rval + 1. */
1607 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1608 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1610 type, double_int_one, double_int_zero);
1618 /* First extend mask and value according to the original type. */
1619 uns = (TREE_CODE (rtype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (rtype)
1620 ? 0 : TYPE_UNSIGNED (rtype));
1621 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1622 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1624 /* Then extend mask and value according to the target type. */
1625 uns = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1626 ? 0 : TYPE_UNSIGNED (type));
1627 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1628 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1633 *mask = double_int_minus_one;
1638 /* Apply the operation CODE in type TYPE to the value, mask pairs
1639 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1640 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1643 bit_value_binop_1 (enum tree_code code, tree type,
1644 double_int *val, double_int *mask,
1645 tree r1type, double_int r1val, double_int r1mask,
1646 tree r2type, double_int r2val, double_int r2mask)
1648 bool uns = (TREE_CODE (type) == INTEGER_TYPE
1649 && TYPE_IS_SIZETYPE (type) ? 0 : TYPE_UNSIGNED (type));
1650 /* Assume we'll get a constant result. Use an initial varying value,
1651 we fall back to varying in the end if necessary. */
1652 *mask = double_int_minus_one;
1656 /* The mask is constant where there is a known not
1657 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1658 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1659 double_int_and (double_int_ior (r1val, r1mask),
1660 double_int_ior (r2val, r2mask)));
1661 *val = double_int_and (r1val, r2val);
1665 /* The mask is constant where there is a known
1666 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1667 *mask = double_int_and_not
1668 (double_int_ior (r1mask, r2mask),
1669 double_int_ior (double_int_and_not (r1val, r1mask),
1670 double_int_and_not (r2val, r2mask)));
1671 *val = double_int_ior (r1val, r2val);
1676 *mask = double_int_ior (r1mask, r2mask);
1677 *val = double_int_xor (r1val, r2val);
1682 if (double_int_zero_p (r2mask))
1684 HOST_WIDE_INT shift = r2val.low;
1685 if (code == RROTATE_EXPR)
1687 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1688 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1694 /* ??? We can handle partially known shift counts if we know
1695 its sign. That way we can tell that (x << (y | 8)) & 255
1697 if (double_int_zero_p (r2mask))
1699 HOST_WIDE_INT shift = r2val.low;
1700 if (code == RSHIFT_EXPR)
1702 /* We need to know if we are doing a left or a right shift
1703 to properly shift in zeros for left shift and unsigned
1704 right shifts and the sign bit for signed right shifts.
1705 For signed right shifts we shift in varying in case
1706 the sign bit was varying. */
1709 *mask = double_int_lshift (r1mask, shift,
1710 TYPE_PRECISION (type), false);
1711 *val = double_int_lshift (r1val, shift,
1712 TYPE_PRECISION (type), false);
1717 *mask = double_int_rshift (r1mask, shift,
1718 TYPE_PRECISION (type), !uns);
1719 *val = double_int_rshift (r1val, shift,
1720 TYPE_PRECISION (type), !uns);
1731 case POINTER_PLUS_EXPR:
1734 /* Do the addition with unknown bits set to zero, to give carry-ins of
1735 zero wherever possible. */
1736 lo = double_int_add (double_int_and_not (r1val, r1mask),
1737 double_int_and_not (r2val, r2mask));
1738 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1739 /* Do the addition with unknown bits set to one, to give carry-ins of
1740 one wherever possible. */
1741 hi = double_int_add (double_int_ior (r1val, r1mask),
1742 double_int_ior (r2val, r2mask));
1743 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1744 /* Each bit in the result is known if (a) the corresponding bits in
1745 both inputs are known, and (b) the carry-in to that bit position
1746 is known. We can check condition (b) by seeing if we got the same
1747 result with minimised carries as with maximised carries. */
1748 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1749 double_int_xor (lo, hi));
1750 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1751 /* It shouldn't matter whether we choose lo or hi here. */
1758 double_int temv, temm;
1759 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1760 r2type, r2val, r2mask);
1761 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1762 r1type, r1val, r1mask,
1763 r2type, temv, temm);
1769 /* Just track trailing zeros in both operands and transfer
1770 them to the other. */
1771 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1772 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1773 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1775 *mask = double_int_zero;
1776 *val = double_int_zero;
1778 else if (r1tz + r2tz > 0)
1780 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1781 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1782 *val = double_int_zero;
1790 double_int m = double_int_ior (r1mask, r2mask);
1791 if (!double_int_equal_p (double_int_and_not (r1val, m),
1792 double_int_and_not (r2val, m)))
1794 *mask = double_int_zero;
1795 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1799 /* We know the result of a comparison is always one or zero. */
1800 *mask = double_int_one;
1801 *val = double_int_zero;
1809 double_int tem = r1val;
1815 code = swap_tree_comparison (code);
1822 /* If the most significant bits are not known we know nothing. */
1823 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1826 /* If we know the most significant bits we know the values
1827 value ranges by means of treating varying bits as zero
1828 or one. Do a cross comparison of the max/min pairs. */
1829 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1830 double_int_and_not (r2val, r2mask), uns);
1831 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1832 double_int_ior (r2val, r2mask), uns);
1833 if (maxmin < 0) /* r1 is less than r2. */
1835 *mask = double_int_zero;
1836 *val = double_int_one;
1838 else if (minmax > 0) /* r1 is not less or equal to r2. */
1840 *mask = double_int_zero;
1841 *val = double_int_zero;
1843 else if (maxmin == minmax) /* r1 and r2 are equal. */
1845 /* This probably should never happen as we'd have
1846 folded the thing during fully constant value folding. */
1847 *mask = double_int_zero;
1848 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1852 /* We know the result of a comparison is always one or zero. */
1853 *mask = double_int_one;
1854 *val = double_int_zero;
1863 /* Return the propagation value when applying the operation CODE to
1864 the value RHS yielding type TYPE. */
1867 bit_value_unop (enum tree_code code, tree type, tree rhs)
1869 prop_value_t rval = get_value_for_expr (rhs, true);
1870 double_int value, mask;
1872 gcc_assert ((rval.lattice_val == CONSTANT
1873 && TREE_CODE (rval.value) == INTEGER_CST)
1874 || double_int_minus_one_p (rval.mask));
1875 bit_value_unop_1 (code, type, &value, &mask,
1876 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1877 if (!double_int_minus_one_p (mask))
1879 val.lattice_val = CONSTANT;
1881 /* ??? Delay building trees here. */
1882 val.value = double_int_to_tree (type, value);
1886 val.lattice_val = VARYING;
1887 val.value = NULL_TREE;
1888 val.mask = double_int_minus_one;
1893 /* Return the propagation value when applying the operation CODE to
1894 the values RHS1 and RHS2 yielding type TYPE. */
1897 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1899 prop_value_t r1val = get_value_for_expr (rhs1, true);
1900 prop_value_t r2val = get_value_for_expr (rhs2, true);
1901 double_int value, mask;
1903 gcc_assert ((r1val.lattice_val == CONSTANT
1904 && TREE_CODE (r1val.value) == INTEGER_CST)
1905 || double_int_minus_one_p (r1val.mask));
1906 gcc_assert ((r2val.lattice_val == CONSTANT
1907 && TREE_CODE (r2val.value) == INTEGER_CST)
1908 || double_int_minus_one_p (r2val.mask));
1909 bit_value_binop_1 (code, type, &value, &mask,
1910 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
1911 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
1912 if (!double_int_minus_one_p (mask))
1914 val.lattice_val = CONSTANT;
1916 /* ??? Delay building trees here. */
1917 val.value = double_int_to_tree (type, value);
1921 val.lattice_val = VARYING;
1922 val.value = NULL_TREE;
1923 val.mask = double_int_minus_one;
1928 /* Evaluate statement STMT.
1929 Valid only for assignments, calls, conditionals, and switches. */
1932 evaluate_stmt (gimple stmt)
1935 tree simplified = NULL_TREE;
1936 ccp_lattice_t likelyvalue = likely_value (stmt);
1937 bool is_constant = false;
1939 if (dump_file && (dump_flags & TDF_DETAILS))
1941 fprintf (dump_file, "which is likely ");
1942 switch (likelyvalue)
1945 fprintf (dump_file, "CONSTANT");
1948 fprintf (dump_file, "UNDEFINED");
1951 fprintf (dump_file, "VARYING");
1955 fprintf (dump_file, "\n");
1958 /* If the statement is likely to have a CONSTANT result, then try
1959 to fold the statement to determine the constant value. */
1960 /* FIXME. This is the only place that we call ccp_fold.
1961 Since likely_value never returns CONSTANT for calls, we will
1962 not attempt to fold them, including builtins that may profit. */
1963 if (likelyvalue == CONSTANT)
1965 fold_defer_overflow_warnings ();
1966 simplified = ccp_fold (stmt);
1967 is_constant = simplified && is_gimple_min_invariant (simplified);
1968 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1971 /* The statement produced a constant value. */
1972 val.lattice_val = CONSTANT;
1973 val.value = simplified;
1974 val.mask = double_int_zero;
1977 /* If the statement is likely to have a VARYING result, then do not
1978 bother folding the statement. */
1979 else if (likelyvalue == VARYING)
1981 enum gimple_code code = gimple_code (stmt);
1982 if (code == GIMPLE_ASSIGN)
1984 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1986 /* Other cases cannot satisfy is_gimple_min_invariant
1988 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1989 simplified = gimple_assign_rhs1 (stmt);
1991 else if (code == GIMPLE_SWITCH)
1992 simplified = gimple_switch_index (stmt);
1994 /* These cannot satisfy is_gimple_min_invariant without folding. */
1995 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1996 is_constant = simplified && is_gimple_min_invariant (simplified);
1999 /* The statement produced a constant value. */
2000 val.lattice_val = CONSTANT;
2001 val.value = simplified;
2002 val.mask = double_int_zero;
2006 /* Resort to simplification for bitwise tracking. */
2007 if (flag_tree_bit_ccp
2008 && likelyvalue == CONSTANT
2011 enum gimple_code code = gimple_code (stmt);
2013 val.lattice_val = VARYING;
2014 val.value = NULL_TREE;
2015 val.mask = double_int_minus_one;
2016 if (code == GIMPLE_ASSIGN)
2018 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2019 tree rhs1 = gimple_assign_rhs1 (stmt);
2020 switch (get_gimple_rhs_class (subcode))
2022 case GIMPLE_SINGLE_RHS:
2023 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2024 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2025 val = get_value_for_expr (rhs1, true);
2028 case GIMPLE_UNARY_RHS:
2029 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2030 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2031 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
2032 || POINTER_TYPE_P (gimple_expr_type (stmt))))
2033 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
2036 case GIMPLE_BINARY_RHS:
2037 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2038 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2040 tree rhs2 = gimple_assign_rhs2 (stmt);
2041 val = bit_value_binop (subcode,
2042 TREE_TYPE (rhs1), rhs1, rhs2);
2049 else if (code == GIMPLE_COND)
2051 enum tree_code code = gimple_cond_code (stmt);
2052 tree rhs1 = gimple_cond_lhs (stmt);
2053 tree rhs2 = gimple_cond_rhs (stmt);
2054 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2055 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
2056 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
2058 else if (code == GIMPLE_CALL
2059 && (fndecl = gimple_call_fndecl (stmt))
2060 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2062 switch (DECL_FUNCTION_CODE (fndecl))
2064 case BUILT_IN_MALLOC:
2065 case BUILT_IN_REALLOC:
2066 case BUILT_IN_CALLOC:
2067 val.lattice_val = CONSTANT;
2068 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2069 val.mask = shwi_to_double_int
2070 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
2071 / BITS_PER_UNIT - 1));
2074 case BUILT_IN_ALLOCA:
2075 val.lattice_val = CONSTANT;
2076 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
2077 val.mask = shwi_to_double_int
2078 (~(((HOST_WIDE_INT) BIGGEST_ALIGNMENT)
2079 / BITS_PER_UNIT - 1));
2085 is_constant = (val.lattice_val == CONSTANT);
2090 /* The statement produced a nonconstant value. If the statement
2091 had UNDEFINED operands, then the result of the statement
2092 should be UNDEFINED. Otherwise, the statement is VARYING. */
2093 if (likelyvalue == UNDEFINED)
2095 val.lattice_val = likelyvalue;
2096 val.mask = double_int_zero;
2100 val.lattice_val = VARYING;
2101 val.mask = double_int_minus_one;
2104 val.value = NULL_TREE;
2110 /* Fold the stmt at *GSI with CCP specific information that propagating
2111 and regular folding does not catch. */
2114 ccp_fold_stmt (gimple_stmt_iterator *gsi)
2116 gimple stmt = gsi_stmt (*gsi);
2118 switch (gimple_code (stmt))
2123 /* Statement evaluation will handle type mismatches in constants
2124 more gracefully than the final propagation. This allows us to
2125 fold more conditionals here. */
2126 val = evaluate_stmt (stmt);
2127 if (val.lattice_val != CONSTANT
2128 || !double_int_zero_p (val.mask))
2133 fprintf (dump_file, "Folding predicate ");
2134 print_gimple_expr (dump_file, stmt, 0, 0);
2135 fprintf (dump_file, " to ");
2136 print_generic_expr (dump_file, val.value, 0);
2137 fprintf (dump_file, "\n");
2140 if (integer_zerop (val.value))
2141 gimple_cond_make_false (stmt);
2143 gimple_cond_make_true (stmt);
2150 tree lhs = gimple_call_lhs (stmt);
2153 bool changed = false;
2156 /* If the call was folded into a constant make sure it goes
2157 away even if we cannot propagate into all uses because of
2160 && TREE_CODE (lhs) == SSA_NAME
2161 && (val = get_constant_value (lhs)))
2163 tree new_rhs = unshare_expr (val);
2165 if (!useless_type_conversion_p (TREE_TYPE (lhs),
2166 TREE_TYPE (new_rhs)))
2167 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2168 res = update_call_from_tree (gsi, new_rhs);
2173 /* Propagate into the call arguments. Compared to replace_uses_in
2174 this can use the argument slot types for type verification
2175 instead of the current argument type. We also can safely
2176 drop qualifiers here as we are dealing with constants anyway. */
2177 argt = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (gimple_call_fn (stmt))));
2178 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2179 ++i, argt = TREE_CHAIN (argt))
2181 tree arg = gimple_call_arg (stmt, i);
2182 if (TREE_CODE (arg) == SSA_NAME
2183 && (val = get_constant_value (arg))
2184 && useless_type_conversion_p
2185 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
2186 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
2188 gimple_call_set_arg (stmt, i, unshare_expr (val));
2198 tree lhs = gimple_assign_lhs (stmt);
2201 /* If we have a load that turned out to be constant replace it
2202 as we cannot propagate into all uses in all cases. */
2203 if (gimple_assign_single_p (stmt)
2204 && TREE_CODE (lhs) == SSA_NAME
2205 && (val = get_constant_value (lhs)))
2207 tree rhs = unshare_expr (val);
2208 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2209 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2210 gimple_assign_set_rhs_from_tree (gsi, rhs);
2222 /* Visit the assignment statement STMT. Set the value of its LHS to the
2223 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2224 creates virtual definitions, set the value of each new name to that
2225 of the RHS (if we can derive a constant out of the RHS).
2226 Value-returning call statements also perform an assignment, and
2227 are handled here. */
2229 static enum ssa_prop_result
2230 visit_assignment (gimple stmt, tree *output_p)
2233 enum ssa_prop_result retval;
2235 tree lhs = gimple_get_lhs (stmt);
2237 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
2238 || gimple_call_lhs (stmt) != NULL_TREE);
2240 if (gimple_assign_single_p (stmt)
2241 && gimple_assign_rhs_code (stmt) == SSA_NAME)
2242 /* For a simple copy operation, we copy the lattice values. */
2243 val = *get_value (gimple_assign_rhs1 (stmt));
2245 /* Evaluate the statement, which could be
2246 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2247 val = evaluate_stmt (stmt);
2249 retval = SSA_PROP_NOT_INTERESTING;
2251 /* Set the lattice value of the statement's output. */
2252 if (TREE_CODE (lhs) == SSA_NAME)
2254 /* If STMT is an assignment to an SSA_NAME, we only have one
2256 if (set_lattice_value (lhs, val))
2259 if (val.lattice_val == VARYING)
2260 retval = SSA_PROP_VARYING;
2262 retval = SSA_PROP_INTERESTING;
2270 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2271 if it can determine which edge will be taken. Otherwise, return
2272 SSA_PROP_VARYING. */
2274 static enum ssa_prop_result
2275 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
2280 block = gimple_bb (stmt);
2281 val = evaluate_stmt (stmt);
2282 if (val.lattice_val != CONSTANT
2283 || !double_int_zero_p (val.mask))
2284 return SSA_PROP_VARYING;
2286 /* Find which edge out of the conditional block will be taken and add it
2287 to the worklist. If no single edge can be determined statically,
2288 return SSA_PROP_VARYING to feed all the outgoing edges to the
2289 propagation engine. */
2290 *taken_edge_p = find_taken_edge (block, val.value);
2292 return SSA_PROP_INTERESTING;
2294 return SSA_PROP_VARYING;
2298 /* Evaluate statement STMT. If the statement produces an output value and
2299 its evaluation changes the lattice value of its output, return
2300 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2303 If STMT is a conditional branch and we can determine its truth
2304 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2305 value, return SSA_PROP_VARYING. */
2307 static enum ssa_prop_result
2308 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
2313 if (dump_file && (dump_flags & TDF_DETAILS))
2315 fprintf (dump_file, "\nVisiting statement:\n");
2316 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2319 switch (gimple_code (stmt))
2322 /* If the statement is an assignment that produces a single
2323 output value, evaluate its RHS to see if the lattice value of
2324 its output has changed. */
2325 return visit_assignment (stmt, output_p);
2328 /* A value-returning call also performs an assignment. */
2329 if (gimple_call_lhs (stmt) != NULL_TREE)
2330 return visit_assignment (stmt, output_p);
2335 /* If STMT is a conditional branch, see if we can determine
2336 which branch will be taken. */
2337 /* FIXME. It appears that we should be able to optimize
2338 computed GOTOs here as well. */
2339 return visit_cond_stmt (stmt, taken_edge_p);
2345 /* Any other kind of statement is not interesting for constant
2346 propagation and, therefore, not worth simulating. */
2347 if (dump_file && (dump_flags & TDF_DETAILS))
2348 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2350 /* Definitions made by statements other than assignments to
2351 SSA_NAMEs represent unknown modifications to their outputs.
2352 Mark them VARYING. */
2353 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2355 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
2356 set_lattice_value (def, v);
2359 return SSA_PROP_VARYING;
2363 /* Main entry point for SSA Conditional Constant Propagation. */
2369 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2370 if (ccp_finalize ())
2371 return (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
2380 return flag_tree_ccp != 0;
2384 struct gimple_opt_pass pass_ccp =
2389 gate_ccp, /* gate */
2390 do_ssa_ccp, /* execute */
2393 0, /* static_pass_number */
2394 TV_TREE_CCP, /* tv_id */
2395 PROP_cfg | PROP_ssa, /* properties_required */
2396 0, /* properties_provided */
2397 0, /* properties_destroyed */
2398 0, /* todo_flags_start */
2399 TODO_dump_func | TODO_verify_ssa
2400 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2406 /* Try to optimize out __builtin_stack_restore. Optimize it out
2407 if there is another __builtin_stack_restore in the same basic
2408 block and no calls or ASM_EXPRs are in between, or if this block's
2409 only outgoing edge is to EXIT_BLOCK and there are no calls or
2410 ASM_EXPRs after this __builtin_stack_restore. */
2413 optimize_stack_restore (gimple_stmt_iterator i)
2418 basic_block bb = gsi_bb (i);
2419 gimple call = gsi_stmt (i);
2421 if (gimple_code (call) != GIMPLE_CALL
2422 || gimple_call_num_args (call) != 1
2423 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2424 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2427 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2429 stmt = gsi_stmt (i);
2430 if (gimple_code (stmt) == GIMPLE_ASM)
2432 if (gimple_code (stmt) != GIMPLE_CALL)
2435 callee = gimple_call_fndecl (stmt);
2437 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2438 /* All regular builtins are ok, just obviously not alloca. */
2439 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA)
2442 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2443 goto second_stack_restore;
2449 /* Allow one successor of the exit block, or zero successors. */
2450 switch (EDGE_COUNT (bb->succs))
2455 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2461 second_stack_restore:
2463 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2464 If there are multiple uses, then the last one should remove the call.
2465 In any case, whether the call to __builtin_stack_save can be removed
2466 or not is irrelevant to removing the call to __builtin_stack_restore. */
2467 if (has_single_use (gimple_call_arg (call, 0)))
2469 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2470 if (is_gimple_call (stack_save))
2472 callee = gimple_call_fndecl (stack_save);
2474 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2475 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2477 gimple_stmt_iterator stack_save_gsi;
2480 stack_save_gsi = gsi_for_stmt (stack_save);
2481 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2482 update_call_from_tree (&stack_save_gsi, rhs);
2487 /* No effect, so the statement will be deleted. */
2488 return integer_zero_node;
2491 /* If va_list type is a simple pointer and nothing special is needed,
2492 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2493 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2494 pointer assignment. */
2497 optimize_stdarg_builtin (gimple call)
2499 tree callee, lhs, rhs, cfun_va_list;
2500 bool va_list_simple_ptr;
2501 location_t loc = gimple_location (call);
2503 if (gimple_code (call) != GIMPLE_CALL)
2506 callee = gimple_call_fndecl (call);
2508 cfun_va_list = targetm.fn_abi_va_list (callee);
2509 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2510 && (TREE_TYPE (cfun_va_list) == void_type_node
2511 || TREE_TYPE (cfun_va_list) == char_type_node);
2513 switch (DECL_FUNCTION_CODE (callee))
2515 case BUILT_IN_VA_START:
2516 if (!va_list_simple_ptr
2517 || targetm.expand_builtin_va_start != NULL
2518 || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
2521 if (gimple_call_num_args (call) != 2)
2524 lhs = gimple_call_arg (call, 0);
2525 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2526 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2527 != TYPE_MAIN_VARIANT (cfun_va_list))
2530 lhs = build_fold_indirect_ref_loc (loc, lhs);
2531 rhs = build_call_expr_loc (loc, built_in_decls[BUILT_IN_NEXT_ARG],
2532 1, integer_zero_node);
2533 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2534 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2536 case BUILT_IN_VA_COPY:
2537 if (!va_list_simple_ptr)
2540 if (gimple_call_num_args (call) != 2)
2543 lhs = gimple_call_arg (call, 0);
2544 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2545 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2546 != TYPE_MAIN_VARIANT (cfun_va_list))
2549 lhs = build_fold_indirect_ref_loc (loc, lhs);
2550 rhs = gimple_call_arg (call, 1);
2551 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2552 != TYPE_MAIN_VARIANT (cfun_va_list))
2555 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2556 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2558 case BUILT_IN_VA_END:
2559 /* No effect, so the statement will be deleted. */
2560 return integer_zero_node;
2567 /* A simple pass that attempts to fold all builtin functions. This pass
2568 is run after we've propagated as many constants as we can. */
2571 execute_fold_all_builtins (void)
2573 bool cfg_changed = false;
2575 unsigned int todoflags = 0;
2579 gimple_stmt_iterator i;
2580 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2582 gimple stmt, old_stmt;
2583 tree callee, result;
2584 enum built_in_function fcode;
2586 stmt = gsi_stmt (i);
2588 if (gimple_code (stmt) != GIMPLE_CALL)
2593 callee = gimple_call_fndecl (stmt);
2594 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2599 fcode = DECL_FUNCTION_CODE (callee);
2601 result = gimple_fold_builtin (stmt);
2604 gimple_remove_stmt_histograms (cfun, stmt);
2607 switch (DECL_FUNCTION_CODE (callee))
2609 case BUILT_IN_CONSTANT_P:
2610 /* Resolve __builtin_constant_p. If it hasn't been
2611 folded to integer_one_node by now, it's fairly
2612 certain that the value simply isn't constant. */
2613 result = integer_zero_node;
2616 case BUILT_IN_STACK_RESTORE:
2617 result = optimize_stack_restore (i);
2623 case BUILT_IN_VA_START:
2624 case BUILT_IN_VA_END:
2625 case BUILT_IN_VA_COPY:
2626 /* These shouldn't be folded before pass_stdarg. */
2627 result = optimize_stdarg_builtin (stmt);
2637 if (dump_file && (dump_flags & TDF_DETAILS))
2639 fprintf (dump_file, "Simplified\n ");
2640 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2644 if (!update_call_from_tree (&i, result))
2646 gimplify_and_update_call_from_tree (&i, result);
2647 todoflags |= TODO_update_address_taken;
2650 stmt = gsi_stmt (i);
2653 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2654 && gimple_purge_dead_eh_edges (bb))
2657 if (dump_file && (dump_flags & TDF_DETAILS))
2659 fprintf (dump_file, "to\n ");
2660 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2661 fprintf (dump_file, "\n");
2664 /* Retry the same statement if it changed into another
2665 builtin, there might be new opportunities now. */
2666 if (gimple_code (stmt) != GIMPLE_CALL)
2671 callee = gimple_call_fndecl (stmt);
2673 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2674 || DECL_FUNCTION_CODE (callee) == fcode)
2679 /* Delete unreachable blocks. */
2681 todoflags |= TODO_cleanup_cfg;
2687 struct gimple_opt_pass pass_fold_builtins =
2693 execute_fold_all_builtins, /* execute */
2696 0, /* static_pass_number */
2697 TV_NONE, /* tv_id */
2698 PROP_cfg | PROP_ssa, /* properties_required */
2699 0, /* properties_provided */
2700 0, /* properties_destroyed */
2701 0, /* todo_flags_start */
2704 | TODO_update_ssa /* todo_flags_finish */