1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "basic-block.h"
29 #include "tree-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
48 if (x) goto ... else goto ...
50 Will be transformed into:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
74 if (x) goto ... else goto ...
76 Will be transformed into:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
89 if (x) goto ... else goto ...
91 Will be transformed into:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
123 ptr = (type1*)&type2var;
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
133 ptr2 = ptr + <constant>;
137 ptr2 = &x[constant/elementsize];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
154 Provided that decl has known alignment >= 2, will get turned into
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
178 get_prop_dest_stmt (tree name, tree *final_name_p)
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
198 *final_name_p = name;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
214 bool single_use = true;
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
219 if (!has_single_use (name))
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
261 can_propagate_from (gimple def_stmt)
266 gcc_assert (is_gimple_assign (def_stmt));
268 /* If the rhs has side-effects we cannot propagate from it. */
269 if (gimple_has_volatile_ops (def_stmt))
272 /* If the rhs is a load we cannot propagate from it. */
273 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
274 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
277 /* Constants can be always propagated. */
278 if (gimple_assign_single_p (def_stmt)
279 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
282 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
283 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
287 /* If the definition is a conversion of a pointer to a function type,
288 then we can not apply optimizations as some targets require
289 function pointers to be canonicalized and in this case this
290 optimization could eliminate a necessary canonicalization. */
291 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
293 tree rhs = gimple_assign_rhs1 (def_stmt);
294 if (POINTER_TYPE_P (TREE_TYPE (rhs))
295 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
302 /* Remove a copy chain ending in NAME along the defs.
303 If NAME was replaced in its only use then this function can be used
304 to clean up dead stmts. Returns true if cleanup-cfg has to run. */
307 remove_prop_source_from_use (tree name)
309 gimple_stmt_iterator gsi;
311 bool cfg_changed = false;
316 if (!has_zero_uses (name))
319 stmt = SSA_NAME_DEF_STMT (name);
320 gsi = gsi_for_stmt (stmt);
321 bb = gimple_bb (stmt);
323 gsi_remove (&gsi, true);
324 cfg_changed |= gimple_purge_dead_eh_edges (bb);
326 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
327 } while (name && TREE_CODE (name) == SSA_NAME);
332 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
333 converted to type TYPE.
335 This should disappear, but is needed so we can combine expressions and use
336 the fold() interfaces. Long term, we need to develop folding and combine
337 routines that deal with gimple exclusively . */
340 rhs_to_tree (tree type, gimple stmt)
342 location_t loc = gimple_location (stmt);
343 enum tree_code code = gimple_assign_rhs_code (stmt);
344 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
345 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
346 gimple_assign_rhs2 (stmt),
347 gimple_assign_rhs3 (stmt));
348 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
349 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
350 gimple_assign_rhs2 (stmt));
351 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
352 return build1 (code, type, gimple_assign_rhs1 (stmt));
353 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
354 return gimple_assign_rhs1 (stmt);
359 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
360 the folded result in a form suitable for COND_EXPR_COND or
361 NULL_TREE, if there is no suitable simplified form. If
362 INVARIANT_ONLY is true only gimple_min_invariant results are
363 considered simplified. */
366 combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
367 tree op0, tree op1, bool invariant_only)
371 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
373 t = fold_binary_loc (loc, code, type, op0, op1);
377 /* Require that we got a boolean type out if we put one in. */
378 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
380 /* Canonicalize the combined condition for use in a COND_EXPR. */
381 t = canonicalize_cond_expr_cond (t);
383 /* Bail out if we required an invariant but didn't get one. */
384 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
390 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
391 of its operand. Return a new comparison tree or NULL_TREE if there
392 were no simplifying combines. */
395 forward_propagate_into_comparison (location_t loc,
396 enum tree_code code, tree type,
399 tree tmp = NULL_TREE;
400 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
401 bool single_use0_p = false, single_use1_p = false;
403 /* For comparisons use the first operand, that is likely to
404 simplify comparisons against constants. */
405 if (TREE_CODE (op0) == SSA_NAME)
407 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
408 if (def_stmt && can_propagate_from (def_stmt))
410 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
411 tmp = combine_cond_expr_cond (loc, code, type,
412 rhs0, op1, !single_use0_p);
418 /* If that wasn't successful, try the second operand. */
419 if (TREE_CODE (op1) == SSA_NAME)
421 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
422 if (def_stmt && can_propagate_from (def_stmt))
424 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
425 tmp = combine_cond_expr_cond (loc, code, type,
426 op0, rhs1, !single_use1_p);
432 /* If that wasn't successful either, try both operands. */
433 if (rhs0 != NULL_TREE
434 && rhs1 != NULL_TREE)
435 tmp = combine_cond_expr_cond (loc, code, type,
437 !(single_use0_p && single_use1_p));
442 /* Forward propagate the comparison defined in STMT like
443 cond_1 = x CMP y to uses of the form
447 Returns 1 if a transformation was done and 2 if the CFG should
448 be cleaned up. Else returns 0. */
451 forward_propagate_comparison (gimple stmt)
453 tree name = gimple_assign_lhs (stmt);
455 tree tmp = NULL_TREE;
456 int did_something = 0;
458 /* Combine the comparison with defining statements. */
461 tmp = forward_propagate_into_comparison (gimple_location (stmt),
462 gimple_assign_rhs_code (stmt),
464 gimple_assign_rhs1 (stmt),
465 gimple_assign_rhs2 (stmt));
468 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
469 bool inv = is_gimple_min_invariant (tmp);
470 gimple_assign_set_rhs_from_tree (&gsi, tmp);
471 gcc_assert (gsi_stmt (gsi) == stmt);
473 did_something = MAX (did_something, inv ? 2 : 1);
474 if (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) != tcc_comparison)
475 return did_something;
480 /* Don't propagate ssa names that occur in abnormal phis. */
481 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
482 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
483 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
484 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
485 return did_something;
487 /* Do not un-cse comparisons. But propagate through copies. */
488 use_stmt = get_prop_dest_stmt (name, &name);
490 return did_something;
492 /* Conversion of the condition result to another integral type. */
493 if (is_gimple_assign (use_stmt)
494 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
495 || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
497 || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
498 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
500 tree lhs = gimple_assign_lhs (use_stmt);
502 /* We can propagate the condition into a conversion. */
503 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
505 /* Avoid using fold here as that may create a COND_EXPR with
506 non-boolean condition as canonical form. */
507 tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs),
508 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
510 /* We can propagate the condition into X op CST where op
511 is EQ_EXPR or NE_EXPR and CST is either one or zero. */
512 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
514 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
515 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
517 enum tree_code code = gimple_assign_rhs_code (use_stmt);
518 tree cst = gimple_assign_rhs2 (use_stmt);
521 cond = build2 (gimple_assign_rhs_code (stmt),
523 gimple_assign_rhs1 (stmt),
524 gimple_assign_rhs2 (stmt));
526 tmp = combine_cond_expr_cond (gimple_location (use_stmt),
527 code, TREE_TYPE (lhs),
529 if (tmp == NULL_TREE)
530 return did_something;
532 /* We can propagate the condition into a statement that
533 computes the logical negation of the comparison result. */
534 else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
536 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
537 bool nans = HONOR_NANS (TYPE_MODE (type));
539 code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
540 if (code == ERROR_MARK)
541 return did_something;
543 tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
544 gimple_assign_rhs2 (stmt));
547 return did_something;
550 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
551 bool inv = is_gimple_min_invariant (tmp);
552 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
553 did_something = MAX (did_something, inv ? 2 : 1);
554 use_stmt = gsi_stmt (gsi);
555 update_stmt (use_stmt);
558 if (dump_file && (dump_flags & TDF_DETAILS))
560 tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)),
562 fprintf (dump_file, " Replaced '");
563 print_generic_expr (dump_file, old_rhs, dump_flags);
564 fprintf (dump_file, "' with '");
565 print_generic_expr (dump_file, tmp, dump_flags);
566 fprintf (dump_file, "'\n");
569 /* Remove defining statements. */
570 if (remove_prop_source_from_use (name))
573 did_something = MAX (did_something, 1);
576 return did_something;
579 /* Propagate from the ssa name definition statements of COND_EXPR
580 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
581 Returns zero if no statement was changed, one if there were
582 changes and two if cfg_cleanup needs to run.
584 This must be kept in sync with forward_propagate_into_cond. */
587 forward_propagate_into_gimple_cond (gimple stmt)
589 int did_something = 0;
590 location_t loc = gimple_location (stmt);
593 tree tmp = NULL_TREE;
594 enum tree_code code = gimple_cond_code (stmt);
596 /* We can do tree combining on SSA_NAME and comparison expressions. */
597 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison)
598 tmp = forward_propagate_into_comparison (loc, code,
600 gimple_cond_lhs (stmt),
601 gimple_cond_rhs (stmt));
605 if (dump_file && tmp)
607 tree cond = build2 (gimple_cond_code (stmt),
609 gimple_cond_lhs (stmt),
610 gimple_cond_rhs (stmt));
611 fprintf (dump_file, " Replaced '");
612 print_generic_expr (dump_file, cond, 0);
613 fprintf (dump_file, "' with '");
614 print_generic_expr (dump_file, tmp, 0);
615 fprintf (dump_file, "'\n");
618 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
621 /* Remove defining statements. */
622 if (is_gimple_min_invariant (tmp))
624 else if (did_something == 0)
627 /* Continue combining. */
634 return did_something;
638 /* Propagate from the ssa name definition statements of COND_EXPR
639 in the rhs of statement STMT into the conditional if that simplifies it.
640 Returns zero if no statement was changed, one if there were
641 changes and two if cfg_cleanup needs to run.
643 This must be kept in sync with forward_propagate_into_gimple_cond. */
646 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
648 gimple stmt = gsi_stmt (*gsi_p);
649 location_t loc = gimple_location (stmt);
650 int did_something = 0;
653 tree tmp = NULL_TREE;
654 tree cond = gimple_assign_rhs1 (stmt);
656 /* We can do tree combining on SSA_NAME and comparison expressions. */
657 if (COMPARISON_CLASS_P (cond))
658 tmp = forward_propagate_into_comparison (loc, TREE_CODE (cond),
660 TREE_OPERAND (cond, 0),
661 TREE_OPERAND (cond, 1));
662 else if (TREE_CODE (cond) == SSA_NAME)
664 tree name = cond, rhs0;
665 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
666 if (!def_stmt || !can_propagate_from (def_stmt))
667 return did_something;
669 rhs0 = gimple_assign_rhs1 (def_stmt);
670 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
671 build_int_cst (TREE_TYPE (rhs0), 0),
677 if (dump_file && tmp)
679 fprintf (dump_file, " Replaced '");
680 print_generic_expr (dump_file, cond, 0);
681 fprintf (dump_file, "' with '");
682 print_generic_expr (dump_file, tmp, 0);
683 fprintf (dump_file, "'\n");
686 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
687 stmt = gsi_stmt (*gsi_p);
690 /* Remove defining statements. */
691 if (is_gimple_min_invariant (tmp))
693 else if (did_something == 0)
696 /* Continue combining. */
703 return did_something;
706 /* We've just substituted an ADDR_EXPR into stmt. Update all the
707 relevant data structures to match. */
710 tidy_after_forward_propagate_addr (gimple stmt)
712 /* We may have turned a trapping insn into a non-trapping insn. */
713 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
714 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
717 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
718 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
721 /* DEF_RHS contains the address of the 0th element in an array.
722 USE_STMT uses type of DEF_RHS to compute the address of an
723 arbitrary element within the array. The (variable) byte offset
724 of the element is contained in OFFSET.
726 We walk back through the use-def chains of OFFSET to verify that
727 it is indeed computing the offset of an element within the array
728 and extract the index corresponding to the given byte offset.
730 We then try to fold the entire address expression into a form
733 If we are successful, we replace the right hand side of USE_STMT
734 with the new address computation. */
737 forward_propagate_addr_into_variable_array_index (tree offset,
739 gimple_stmt_iterator *use_stmt_gsi)
742 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
745 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
746 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
747 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
748 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
751 if (!host_integerp (tunit, 1))
754 /* Get the offset's defining statement. */
755 offset_def = SSA_NAME_DEF_STMT (offset);
757 /* Try to find an expression for a proper index. This is either a
758 multiplication expression by the element size or just the ssa name we came
759 along in case the element size is one. In that case, however, we do not
760 allow multiplications because they can be computing index to a higher
761 level dimension (PR 37861). */
762 if (integer_onep (tunit))
764 if (is_gimple_assign (offset_def)
765 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
772 /* The statement which defines OFFSET before type conversion
773 must be a simple GIMPLE_ASSIGN. */
774 if (!is_gimple_assign (offset_def))
777 /* The RHS of the statement which defines OFFSET must be a
778 multiplication of an object by the size of the array elements.
779 This implicitly verifies that the size of the array elements
781 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
782 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
783 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
785 /* The first operand to the MULT_EXPR is the desired index. */
786 index = gimple_assign_rhs1 (offset_def);
788 /* If we have idx * tunit + CST * tunit re-associate that. */
789 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
790 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
791 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
792 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
793 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
794 gimple_assign_rhs2 (offset_def),
795 tunit)) != NULL_TREE)
797 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
798 if (is_gimple_assign (offset_def2)
799 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
800 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
801 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
803 index = fold_build2 (gimple_assign_rhs_code (offset_def),
805 gimple_assign_rhs1 (offset_def2), tmp);
814 /* Replace the pointer addition with array indexing. */
815 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
816 true, GSI_SAME_STMT);
817 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
819 new_rhs = unshare_expr (def_rhs);
820 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
824 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
825 unshare_expr (TREE_OPERAND (def_rhs, 0)),
826 index, integer_zero_node, NULL_TREE);
827 new_rhs = build_fold_addr_expr (new_rhs);
828 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
829 TREE_TYPE (new_rhs)))
831 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
832 NULL_TREE, true, GSI_SAME_STMT);
833 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
837 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
838 use_stmt = gsi_stmt (*use_stmt_gsi);
840 /* That should have created gimple, so there is no need to
841 record information to undo the propagation. */
842 fold_stmt_inplace (use_stmt);
843 tidy_after_forward_propagate_addr (use_stmt);
847 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
848 ADDR_EXPR <whatever>.
850 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
851 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
852 node or for recovery of array indexing from pointer arithmetic.
854 Return true if the propagation was successful (the propagation can
855 be not totally successful, yet things may have been changed). */
858 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
859 gimple_stmt_iterator *use_stmt_gsi,
862 tree lhs, rhs, rhs2, array_ref;
863 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
864 enum tree_code rhs_code;
867 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
869 lhs = gimple_assign_lhs (use_stmt);
870 rhs_code = gimple_assign_rhs_code (use_stmt);
871 rhs = gimple_assign_rhs1 (use_stmt);
873 /* Trivial cases. The use statement could be a trivial copy or a
874 useless conversion. Recurse to the uses of the lhs as copyprop does
875 not copy through different variant pointers and FRE does not catch
876 all useless conversions. Treat the case of a single-use name and
877 a conversion to def_rhs type separate, though. */
878 if (TREE_CODE (lhs) == SSA_NAME
879 && ((rhs_code == SSA_NAME && rhs == name)
880 || CONVERT_EXPR_CODE_P (rhs_code)))
882 /* Only recurse if we don't deal with a single use or we cannot
883 do the propagation to the current statement. In particular
884 we can end up with a conversion needed for a non-invariant
885 address which we cannot do in a single statement. */
887 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
888 && (!is_gimple_min_invariant (def_rhs)
889 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
890 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
891 && (TYPE_PRECISION (TREE_TYPE (lhs))
892 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
893 return forward_propagate_addr_expr (lhs, def_rhs);
895 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
896 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
897 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
899 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
903 /* Propagate through constant pointer adjustments. */
904 if (TREE_CODE (lhs) == SSA_NAME
905 && rhs_code == POINTER_PLUS_EXPR
907 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
910 /* As we come here with non-invariant addresses in def_rhs we need
911 to make sure we can build a valid constant offsetted address
912 for further propagation. Simply rely on fold building that
913 and check after the fact. */
914 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
916 fold_convert (ptr_type_node,
917 gimple_assign_rhs2 (use_stmt)));
918 if (TREE_CODE (new_def_rhs) == MEM_REF
919 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
921 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
924 /* Recurse. If we could propagate into all uses of lhs do not
925 bother to replace into the current use but just pretend we did. */
926 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
927 && forward_propagate_addr_expr (lhs, new_def_rhs))
930 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
931 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
932 new_def_rhs, NULL_TREE);
933 else if (is_gimple_min_invariant (new_def_rhs))
934 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
935 new_def_rhs, NULL_TREE);
938 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
939 update_stmt (use_stmt);
943 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
944 ADDR_EXPR will not appear on the LHS. */
945 lhs = gimple_assign_lhs (use_stmt);
946 while (handled_component_p (lhs))
947 lhs = TREE_OPERAND (lhs, 0);
949 /* Now see if the LHS node is a MEM_REF using NAME. If so,
950 propagate the ADDR_EXPR into the use of NAME and fold the result. */
951 if (TREE_CODE (lhs) == MEM_REF
952 && TREE_OPERAND (lhs, 0) == name)
955 HOST_WIDE_INT def_rhs_offset;
956 /* If the address is invariant we can always fold it. */
957 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
960 double_int off = mem_ref_offset (lhs);
962 off = double_int_add (off,
963 shwi_to_double_int (def_rhs_offset));
964 if (TREE_CODE (def_rhs_base) == MEM_REF)
966 off = double_int_add (off, mem_ref_offset (def_rhs_base));
967 new_ptr = TREE_OPERAND (def_rhs_base, 0);
970 new_ptr = build_fold_addr_expr (def_rhs_base);
971 TREE_OPERAND (lhs, 0) = new_ptr;
972 TREE_OPERAND (lhs, 1)
973 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
974 tidy_after_forward_propagate_addr (use_stmt);
975 /* Continue propagating into the RHS if this was not the only use. */
979 /* If the LHS is a plain dereference and the value type is the same as
980 that of the pointed-to type of the address we can put the
981 dereferenced address on the LHS preserving the original alias-type. */
982 else if (gimple_assign_lhs (use_stmt) == lhs
983 && useless_type_conversion_p
984 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
985 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
987 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
988 tree new_offset, new_base, saved;
989 while (handled_component_p (*def_rhs_basep))
990 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
991 saved = *def_rhs_basep;
992 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
994 new_base = TREE_OPERAND (*def_rhs_basep, 0);
996 = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
997 TREE_OPERAND (*def_rhs_basep, 1));
1001 new_base = build_fold_addr_expr (*def_rhs_basep);
1002 new_offset = TREE_OPERAND (lhs, 1);
1004 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
1005 new_base, new_offset);
1006 gimple_assign_set_lhs (use_stmt,
1007 unshare_expr (TREE_OPERAND (def_rhs, 0)));
1008 *def_rhs_basep = saved;
1009 tidy_after_forward_propagate_addr (use_stmt);
1010 /* Continue propagating into the RHS if this was not the
1016 /* We can have a struct assignment dereferencing our name twice.
1017 Note that we didn't propagate into the lhs to not falsely
1018 claim we did when propagating into the rhs. */
1022 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
1023 nodes from the RHS. */
1024 rhs = gimple_assign_rhs1 (use_stmt);
1025 if (TREE_CODE (rhs) == ADDR_EXPR)
1026 rhs = TREE_OPERAND (rhs, 0);
1027 while (handled_component_p (rhs))
1028 rhs = TREE_OPERAND (rhs, 0);
1030 /* Now see if the RHS node is a MEM_REF using NAME. If so,
1031 propagate the ADDR_EXPR into the use of NAME and fold the result. */
1032 if (TREE_CODE (rhs) == MEM_REF
1033 && TREE_OPERAND (rhs, 0) == name)
1036 HOST_WIDE_INT def_rhs_offset;
1037 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
1040 double_int off = mem_ref_offset (rhs);
1042 off = double_int_add (off,
1043 shwi_to_double_int (def_rhs_offset));
1044 if (TREE_CODE (def_rhs_base) == MEM_REF)
1046 off = double_int_add (off, mem_ref_offset (def_rhs_base));
1047 new_ptr = TREE_OPERAND (def_rhs_base, 0);
1050 new_ptr = build_fold_addr_expr (def_rhs_base);
1051 TREE_OPERAND (rhs, 0) = new_ptr;
1052 TREE_OPERAND (rhs, 1)
1053 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
1054 fold_stmt_inplace (use_stmt);
1055 tidy_after_forward_propagate_addr (use_stmt);
1058 /* If the LHS is a plain dereference and the value type is the same as
1059 that of the pointed-to type of the address we can put the
1060 dereferenced address on the LHS preserving the original alias-type. */
1061 else if (gimple_assign_rhs1 (use_stmt) == rhs
1062 && useless_type_conversion_p
1063 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
1064 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
1066 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
1067 tree new_offset, new_base, saved;
1068 while (handled_component_p (*def_rhs_basep))
1069 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
1070 saved = *def_rhs_basep;
1071 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
1073 new_base = TREE_OPERAND (*def_rhs_basep, 0);
1075 = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
1076 TREE_OPERAND (*def_rhs_basep, 1));
1080 new_base = build_fold_addr_expr (*def_rhs_basep);
1081 new_offset = TREE_OPERAND (rhs, 1);
1083 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
1084 new_base, new_offset);
1085 gimple_assign_set_rhs1 (use_stmt,
1086 unshare_expr (TREE_OPERAND (def_rhs, 0)));
1087 *def_rhs_basep = saved;
1088 fold_stmt_inplace (use_stmt);
1089 tidy_after_forward_propagate_addr (use_stmt);
1094 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
1095 is nothing to do. */
1096 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
1097 || gimple_assign_rhs1 (use_stmt) != name)
1100 /* The remaining cases are all for turning pointer arithmetic into
1101 array indexing. They only apply when we have the address of
1102 element zero in an array. If that is not the case then there
1103 is nothing to do. */
1104 array_ref = TREE_OPERAND (def_rhs, 0);
1105 if ((TREE_CODE (array_ref) != ARRAY_REF
1106 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
1107 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
1108 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
1111 rhs2 = gimple_assign_rhs2 (use_stmt);
1112 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
1113 of the elements in X into &x[C1 + C2/element size]. */
1114 if (TREE_CODE (rhs2) == INTEGER_CST)
1116 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
1117 TREE_TYPE (def_rhs),
1121 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
1122 new_rhs = unshare_expr (new_rhs);
1123 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
1125 if (!is_gimple_min_invariant (new_rhs))
1126 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
1128 true, GSI_SAME_STMT);
1129 new_rhs = fold_convert (type, new_rhs);
1131 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1132 use_stmt = gsi_stmt (*use_stmt_gsi);
1133 update_stmt (use_stmt);
1134 tidy_after_forward_propagate_addr (use_stmt);
1139 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1140 converting a multiplication of an index by the size of the
1141 array elements, then the result is converted into the proper
1142 type for the arithmetic. */
1143 if (TREE_CODE (rhs2) == SSA_NAME
1144 && (TREE_CODE (array_ref) != ARRAY_REF
1145 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1146 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1147 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1148 different type than their operands. */
1149 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1150 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1155 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1157 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1158 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1159 node or for recovery of array indexing from pointer arithmetic.
1160 Returns true, if all uses have been propagated into. */
1163 forward_propagate_addr_expr (tree name, tree rhs)
1165 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1166 imm_use_iterator iter;
1169 bool single_use_p = has_single_use (name);
1171 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1176 /* If the use is not in a simple assignment statement, then
1177 there is nothing we can do. */
1178 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1180 if (!is_gimple_debug (use_stmt))
1185 /* If the use is in a deeper loop nest, then we do not want
1186 to propagate non-invariant ADDR_EXPRs into the loop as that
1187 is likely adding expression evaluations into the loop. */
1188 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1189 && !is_gimple_min_invariant (rhs))
1196 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1197 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1199 /* If the use has moved to a different statement adjust
1200 the update machinery for the old statement too. */
1201 if (use_stmt != gsi_stmt (gsi))
1203 update_stmt (use_stmt);
1204 use_stmt = gsi_stmt (gsi);
1207 update_stmt (use_stmt);
1211 /* Remove intermediate now unused copy and conversion chains. */
1212 use_rhs = gimple_assign_rhs1 (use_stmt);
1214 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1215 && TREE_CODE (use_rhs) == SSA_NAME
1216 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1218 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1219 release_defs (use_stmt);
1220 gsi_remove (&gsi, true);
1224 return all && has_zero_uses (name);
1227 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1228 If so, we can change STMT into lhs = y which can later be copy
1229 propagated. Similarly for negation.
1231 This could trivially be formulated as a forward propagation
1232 to immediate uses. However, we already had an implementation
1233 from DOM which used backward propagation via the use-def links.
1235 It turns out that backward propagation is actually faster as
1236 there's less work to do for each NOT/NEG expression we find.
1237 Backwards propagation needs to look at the statement in a single
1238 backlink. Forward propagation needs to look at potentially more
1239 than one forward link. */
1242 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1244 gimple stmt = gsi_stmt (*gsi_p);
1245 tree rhs = gimple_assign_rhs1 (stmt);
1246 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1248 /* See if the RHS_DEF_STMT has the same form as our statement. */
1249 if (is_gimple_assign (rhs_def_stmt)
1250 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1252 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1254 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1255 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1256 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1258 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1259 stmt = gsi_stmt (*gsi_p);
1265 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1266 the condition which we may be able to optimize better. */
1269 simplify_gimple_switch (gimple stmt)
1271 tree cond = gimple_switch_index (stmt);
1275 /* The optimization that we really care about is removing unnecessary
1276 casts. That will let us do much better in propagating the inferred
1277 constant at the switch target. */
1278 if (TREE_CODE (cond) == SSA_NAME)
1280 def_stmt = SSA_NAME_DEF_STMT (cond);
1281 if (is_gimple_assign (def_stmt))
1283 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1288 def = gimple_assign_rhs1 (def_stmt);
1290 /* ??? Why was Jeff testing this? We are gimple... */
1291 gcc_checking_assert (is_gimple_val (def));
1293 to = TREE_TYPE (cond);
1294 ti = TREE_TYPE (def);
1296 /* If we have an extension that preserves value, then we
1297 can copy the source value into the switch. */
1299 need_precision = TYPE_PRECISION (ti);
1301 if (! INTEGRAL_TYPE_P (ti))
1303 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1305 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1306 need_precision += 1;
1307 if (TYPE_PRECISION (to) < need_precision)
1312 gimple_switch_set_index (stmt, def);
1320 /* For pointers p2 and p1 return p2 - p1 if the
1321 difference is known and constant, otherwise return NULL. */
1324 constant_pointer_difference (tree p1, tree p2)
1327 #define CPD_ITERATIONS 5
1328 tree exps[2][CPD_ITERATIONS];
1329 tree offs[2][CPD_ITERATIONS];
1332 for (i = 0; i < 2; i++)
1334 tree p = i ? p1 : p2;
1335 tree off = size_zero_node;
1337 enum tree_code code;
1339 /* For each of p1 and p2 we need to iterate at least
1340 twice, to handle ADDR_EXPR directly in p1/p2,
1341 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1342 on definition's stmt RHS. Iterate a few extra times. */
1346 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1348 if (TREE_CODE (p) == ADDR_EXPR)
1350 tree q = TREE_OPERAND (p, 0);
1351 HOST_WIDE_INT offset;
1352 tree base = get_addr_base_and_unit_offset (q, &offset);
1357 off = size_binop (PLUS_EXPR, off, size_int (offset));
1359 if (TREE_CODE (q) == MEM_REF
1360 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1362 p = TREE_OPERAND (q, 0);
1363 off = size_binop (PLUS_EXPR, off,
1364 double_int_to_tree (sizetype,
1365 mem_ref_offset (q)));
1374 if (TREE_CODE (p) != SSA_NAME)
1378 if (j == CPD_ITERATIONS)
1380 stmt = SSA_NAME_DEF_STMT (p);
1381 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1383 code = gimple_assign_rhs_code (stmt);
1384 if (code == POINTER_PLUS_EXPR)
1386 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1388 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1389 p = gimple_assign_rhs1 (stmt);
1391 else if (code == ADDR_EXPR || code == NOP_EXPR)
1392 p = gimple_assign_rhs1 (stmt);
1400 for (i = 0; i < cnt[0]; i++)
1401 for (j = 0; j < cnt[1]; j++)
1402 if (exps[0][i] == exps[1][j])
1403 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1408 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1410 memcpy (p, "abcd", 4);
1411 memset (p + 4, ' ', 3);
1413 memcpy (p, "abcd ", 7);
1414 call if the latter can be stored by pieces during expansion. */
1417 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1419 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1420 tree vuse = gimple_vuse (stmt2);
1423 stmt1 = SSA_NAME_DEF_STMT (vuse);
1425 switch (DECL_FUNCTION_CODE (callee2))
1427 case BUILT_IN_MEMSET:
1428 if (gimple_call_num_args (stmt2) != 3
1429 || gimple_call_lhs (stmt2)
1431 || BITS_PER_UNIT != 8)
1436 tree ptr1, src1, str1, off1, len1, lhs1;
1437 tree ptr2 = gimple_call_arg (stmt2, 0);
1438 tree val2 = gimple_call_arg (stmt2, 1);
1439 tree len2 = gimple_call_arg (stmt2, 2);
1440 tree diff, vdef, new_str_cst;
1442 unsigned int ptr1_align;
1443 unsigned HOST_WIDE_INT src_len;
1445 use_operand_p use_p;
1447 if (!host_integerp (val2, 0)
1448 || !host_integerp (len2, 1))
1450 if (is_gimple_call (stmt1))
1452 /* If first stmt is a call, it needs to be memcpy
1453 or mempcpy, with string literal as second argument and
1455 callee1 = gimple_call_fndecl (stmt1);
1456 if (callee1 == NULL_TREE
1457 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1458 || gimple_call_num_args (stmt1) != 3)
1460 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1461 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1463 ptr1 = gimple_call_arg (stmt1, 0);
1464 src1 = gimple_call_arg (stmt1, 1);
1465 len1 = gimple_call_arg (stmt1, 2);
1466 lhs1 = gimple_call_lhs (stmt1);
1467 if (!host_integerp (len1, 1))
1469 str1 = string_constant (src1, &off1);
1470 if (str1 == NULL_TREE)
1472 if (!host_integerp (off1, 1)
1473 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1474 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1475 - tree_low_cst (off1, 1)) > 0
1476 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1477 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1478 != TYPE_MODE (char_type_node))
1481 else if (gimple_assign_single_p (stmt1))
1483 /* Otherwise look for length 1 memcpy optimized into
1485 ptr1 = gimple_assign_lhs (stmt1);
1486 src1 = gimple_assign_rhs1 (stmt1);
1487 if (TREE_CODE (ptr1) != MEM_REF
1488 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1489 || !host_integerp (src1, 0))
1491 ptr1 = build_fold_addr_expr (ptr1);
1492 callee1 = NULL_TREE;
1493 len1 = size_one_node;
1495 off1 = size_zero_node;
1501 diff = constant_pointer_difference (ptr1, ptr2);
1502 if (diff == NULL && lhs1 != NULL)
1504 diff = constant_pointer_difference (lhs1, ptr2);
1505 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1507 diff = size_binop (PLUS_EXPR, diff,
1508 fold_convert (sizetype, len1));
1510 /* If the difference between the second and first destination pointer
1511 is not constant, or is bigger than memcpy length, bail out. */
1513 || !host_integerp (diff, 1)
1514 || tree_int_cst_lt (len1, diff))
1517 /* Use maximum of difference plus memset length and memcpy length
1518 as the new memcpy length, if it is too big, bail out. */
1519 src_len = tree_low_cst (diff, 1);
1520 src_len += tree_low_cst (len2, 1);
1521 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1522 src_len = tree_low_cst (len1, 1);
1526 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1527 with bigger length will return different result. */
1528 if (lhs1 != NULL_TREE
1529 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1530 && (TREE_CODE (lhs1) != SSA_NAME
1531 || !single_imm_use (lhs1, &use_p, &use_stmt)
1532 || use_stmt != stmt2))
1535 /* If anything reads memory in between memcpy and memset
1536 call, the modified memcpy call might change it. */
1537 vdef = gimple_vdef (stmt1);
1539 && (!single_imm_use (vdef, &use_p, &use_stmt)
1540 || use_stmt != stmt2))
1543 ptr1_align = get_pointer_alignment (ptr1, BIGGEST_ALIGNMENT);
1544 /* Construct the new source string literal. */
1545 src_buf = XALLOCAVEC (char, src_len + 1);
1548 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1549 tree_low_cst (len1, 1));
1551 src_buf[0] = tree_low_cst (src1, 0);
1552 memset (src_buf + tree_low_cst (diff, 1),
1553 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1554 src_buf[src_len] = '\0';
1555 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1556 handle embedded '\0's. */
1557 if (strlen (src_buf) != src_len)
1559 rtl_profile_for_bb (gimple_bb (stmt2));
1560 /* If the new memcpy wouldn't be emitted by storing the literal
1561 by pieces, this optimization might enlarge .rodata too much,
1562 as commonly used string literals couldn't be shared any
1564 if (!can_store_by_pieces (src_len,
1565 builtin_strncpy_read_str,
1566 src_buf, ptr1_align, false))
1569 new_str_cst = build_string_literal (src_len, src_buf);
1572 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1574 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1575 gimple_call_set_lhs (stmt1, NULL_TREE);
1576 gimple_call_set_arg (stmt1, 1, new_str_cst);
1577 gimple_call_set_arg (stmt1, 2,
1578 build_int_cst (TREE_TYPE (len1), src_len));
1579 update_stmt (stmt1);
1580 unlink_stmt_vdef (stmt2);
1581 gsi_remove (gsi_p, true);
1582 release_defs (stmt2);
1583 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1584 release_ssa_name (lhs1);
1589 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1590 assignment, remove STMT1 and change memset call into
1592 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1594 if (!is_gimple_val (ptr1))
1595 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1596 true, GSI_SAME_STMT);
1597 gimple_call_set_fndecl (stmt2, built_in_decls [BUILT_IN_MEMCPY]);
1598 gimple_call_set_arg (stmt2, 0, ptr1);
1599 gimple_call_set_arg (stmt2, 1, new_str_cst);
1600 gimple_call_set_arg (stmt2, 2,
1601 build_int_cst (TREE_TYPE (len2), src_len));
1602 unlink_stmt_vdef (stmt1);
1603 gsi_remove (&gsi, true);
1604 release_defs (stmt1);
1605 update_stmt (stmt2);
1616 /* Simplify bitwise binary operations.
1617 Return true if a transformation applied, otherwise return false. */
1620 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1622 gimple stmt = gsi_stmt (*gsi);
1623 tree arg1 = gimple_assign_rhs1 (stmt);
1624 tree arg2 = gimple_assign_rhs2 (stmt);
1625 enum tree_code code = gimple_assign_rhs_code (stmt);
1627 gimple def1 = NULL, def2 = NULL;
1628 tree def1_arg1, def2_arg1;
1629 enum tree_code def1_code, def2_code;
1631 /* If the first argument is an SSA name that is itself a result of a
1632 typecast of an ADDR_EXPR to an integer, feed the ADDR_EXPR to the
1633 folder rather than the ssa name. */
1634 if (code == BIT_AND_EXPR
1635 && TREE_CODE (arg2) == INTEGER_CST
1636 && TREE_CODE (arg1) == SSA_NAME)
1638 gimple def = SSA_NAME_DEF_STMT (arg1);
1641 /* ??? This looks bogus - the conversion could be truncating. */
1642 if (is_gimple_assign (def)
1643 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def))
1644 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1646 tree opp = gimple_assign_rhs1 (def);
1647 if (TREE_CODE (opp) == ADDR_EXPR)
1651 res = fold_binary_loc (gimple_location (stmt),
1652 BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
1654 if (res && is_gimple_min_invariant (res))
1656 gimple_assign_set_rhs_from_tree (gsi, res);
1662 def1_code = TREE_CODE (arg1);
1664 if (TREE_CODE (arg1) == SSA_NAME)
1666 def1 = SSA_NAME_DEF_STMT (arg1);
1667 if (is_gimple_assign (def1))
1669 def1_code = gimple_assign_rhs_code (def1);
1670 def1_arg1 = gimple_assign_rhs1 (def1);
1674 def2_code = TREE_CODE (arg2);
1676 if (TREE_CODE (arg2) == SSA_NAME)
1678 def2 = SSA_NAME_DEF_STMT (arg2);
1679 if (is_gimple_assign (def2))
1681 def2_code = gimple_assign_rhs_code (def2);
1682 def2_arg1 = gimple_assign_rhs1 (def2);
1686 /* For bitwise binary operations apply operand conversions to the
1687 binary operation result instead of to the operands. This allows
1688 to combine successive conversions and bitwise binary operations. */
1689 if (CONVERT_EXPR_CODE_P (def1_code)
1690 && CONVERT_EXPR_CODE_P (def2_code)
1691 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1692 /* Make sure that the conversion widens the operands or that it
1693 changes the operation to a bitfield precision. */
1694 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1))
1695 < TYPE_PRECISION (TREE_TYPE (arg1)))
1696 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1)))
1698 || (TYPE_PRECISION (TREE_TYPE (arg1))
1699 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1))))))
1702 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1),
1704 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1705 tem = make_ssa_name (tem, newop);
1706 gimple_assign_set_lhs (newop, tem);
1707 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1708 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1709 tem, NULL_TREE, NULL_TREE);
1710 update_stmt (gsi_stmt (*gsi));
1714 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
1715 if (code == BIT_AND_EXPR
1716 && def1_code == BIT_IOR_EXPR
1717 && TREE_CODE (arg2) == INTEGER_CST
1718 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1720 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
1721 arg2, gimple_assign_rhs2 (def1));
1724 if (integer_zerop (cst))
1726 gimple_assign_set_rhs1 (stmt, def1_arg1);
1730 tem = create_tmp_reg (TREE_TYPE (arg2), NULL);
1731 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
1732 tem, def1_arg1, arg2);
1733 tem = make_ssa_name (tem, newop);
1734 gimple_assign_set_lhs (newop, tem);
1735 /* Make sure to re-process the new stmt as it's walking upwards. */
1736 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1737 gimple_assign_set_rhs1 (stmt, tem);
1738 gimple_assign_set_rhs2 (stmt, cst);
1739 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
1744 /* Combine successive equal operations with constants. */
1745 if ((code == BIT_AND_EXPR
1746 || code == BIT_IOR_EXPR
1747 || code == BIT_XOR_EXPR)
1748 && def1_code == code
1749 && TREE_CODE (arg2) == INTEGER_CST
1750 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1752 tree cst = fold_build2 (code, TREE_TYPE (arg2),
1753 arg2, gimple_assign_rhs2 (def1));
1754 gimple_assign_set_rhs1 (stmt, def1_arg1);
1755 gimple_assign_set_rhs2 (stmt, cst);
1764 /* Perform re-associations of the plus or minus statement STMT that are
1765 always permitted. Returns true if the CFG was changed. */
1768 associate_plusminus (gimple stmt)
1770 tree rhs1 = gimple_assign_rhs1 (stmt);
1771 tree rhs2 = gimple_assign_rhs2 (stmt);
1772 enum tree_code code = gimple_assign_rhs_code (stmt);
1773 gimple_stmt_iterator gsi;
1776 /* We can't reassociate at all for saturating types. */
1777 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
1780 /* First contract negates. */
1785 /* A +- (-B) -> A -+ B. */
1786 if (TREE_CODE (rhs2) == SSA_NAME)
1788 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1789 if (is_gimple_assign (def_stmt)
1790 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR)
1792 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
1793 gimple_assign_set_rhs_code (stmt, code);
1794 rhs2 = gimple_assign_rhs1 (def_stmt);
1795 gimple_assign_set_rhs2 (stmt, rhs2);
1796 gimple_set_modified (stmt, true);
1801 /* (-A) + B -> B - A. */
1802 if (TREE_CODE (rhs1) == SSA_NAME
1803 && code == PLUS_EXPR)
1805 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1806 if (is_gimple_assign (def_stmt)
1807 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR)
1810 gimple_assign_set_rhs_code (stmt, code);
1812 gimple_assign_set_rhs1 (stmt, rhs1);
1813 rhs2 = gimple_assign_rhs1 (def_stmt);
1814 gimple_assign_set_rhs2 (stmt, rhs2);
1815 gimple_set_modified (stmt, true);
1822 /* We can't reassociate floating-point or fixed-point plus or minus
1823 because of saturation to +-Inf. */
1824 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
1825 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
1828 /* Second match patterns that allow contracting a plus-minus pair
1829 irrespective of overflow issues.
1831 (A +- B) - A -> +- B
1833 (CST +- A) +- CST -> CST +- A
1834 (A + CST) +- CST -> A + CST
1837 A - (A +- B) -> -+ B
1838 A +- (B +- A) -> +- B
1839 CST +- (CST +- A) -> CST +- A
1840 CST +- (A +- CST) -> CST +- A
1843 via commutating the addition and contracting operations to zero
1844 by reassociation. */
1846 gsi = gsi_for_stmt (stmt);
1847 if (TREE_CODE (rhs1) == SSA_NAME)
1849 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1850 if (is_gimple_assign (def_stmt))
1852 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1853 if (def_code == PLUS_EXPR
1854 || def_code == MINUS_EXPR)
1856 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1857 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1858 if (operand_equal_p (def_rhs1, rhs2, 0)
1859 && code == MINUS_EXPR)
1861 /* (A +- B) - A -> +- B. */
1862 code = ((def_code == PLUS_EXPR)
1863 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
1866 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1867 gcc_assert (gsi_stmt (gsi) == stmt);
1868 gimple_set_modified (stmt, true);
1870 else if (operand_equal_p (def_rhs2, rhs2, 0)
1871 && code != def_code)
1873 /* (A +- B) -+ B -> A. */
1874 code = TREE_CODE (def_rhs1);
1877 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1878 gcc_assert (gsi_stmt (gsi) == stmt);
1879 gimple_set_modified (stmt, true);
1881 else if (TREE_CODE (rhs2) == INTEGER_CST
1882 && TREE_CODE (def_rhs1) == INTEGER_CST)
1884 /* (CST +- A) +- CST -> CST +- A. */
1885 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1887 if (cst && !TREE_OVERFLOW (cst))
1890 gimple_assign_set_rhs_code (stmt, code);
1892 gimple_assign_set_rhs1 (stmt, rhs1);
1894 gimple_assign_set_rhs2 (stmt, rhs2);
1895 gimple_set_modified (stmt, true);
1898 else if (TREE_CODE (rhs2) == INTEGER_CST
1899 && TREE_CODE (def_rhs2) == INTEGER_CST
1900 && def_code == PLUS_EXPR)
1902 /* (A + CST) +- CST -> A + CST. */
1903 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1905 if (cst && !TREE_OVERFLOW (cst))
1908 gimple_assign_set_rhs_code (stmt, code);
1910 gimple_assign_set_rhs1 (stmt, rhs1);
1912 gimple_assign_set_rhs2 (stmt, rhs2);
1913 gimple_set_modified (stmt, true);
1917 else if (def_code == BIT_NOT_EXPR
1918 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
1920 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1921 if (code == PLUS_EXPR
1922 && operand_equal_p (def_rhs1, rhs2, 0))
1926 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
1928 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1929 gcc_assert (gsi_stmt (gsi) == stmt);
1930 gimple_set_modified (stmt, true);
1932 else if (code == PLUS_EXPR
1933 && integer_onep (rhs1))
1939 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1940 gcc_assert (gsi_stmt (gsi) == stmt);
1941 gimple_set_modified (stmt, true);
1947 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
1949 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1950 if (is_gimple_assign (def_stmt))
1952 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1953 if (def_code == PLUS_EXPR
1954 || def_code == MINUS_EXPR)
1956 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1957 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1958 if (operand_equal_p (def_rhs1, rhs1, 0)
1959 && code == MINUS_EXPR)
1961 /* A - (A +- B) -> -+ B. */
1962 code = ((def_code == PLUS_EXPR)
1963 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
1966 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1967 gcc_assert (gsi_stmt (gsi) == stmt);
1968 gimple_set_modified (stmt, true);
1970 else if (operand_equal_p (def_rhs2, rhs1, 0)
1971 && code != def_code)
1973 /* A +- (B +- A) -> +- B. */
1974 code = ((code == PLUS_EXPR)
1975 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
1978 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1979 gcc_assert (gsi_stmt (gsi) == stmt);
1980 gimple_set_modified (stmt, true);
1982 else if (TREE_CODE (rhs1) == INTEGER_CST
1983 && TREE_CODE (def_rhs1) == INTEGER_CST)
1985 /* CST +- (CST +- A) -> CST +- A. */
1986 tree cst = fold_binary (code, TREE_TYPE (rhs2),
1988 if (cst && !TREE_OVERFLOW (cst))
1990 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
1991 gimple_assign_set_rhs_code (stmt, code);
1993 gimple_assign_set_rhs1 (stmt, rhs1);
1995 gimple_assign_set_rhs2 (stmt, rhs2);
1996 gimple_set_modified (stmt, true);
1999 else if (TREE_CODE (rhs1) == INTEGER_CST
2000 && TREE_CODE (def_rhs2) == INTEGER_CST)
2002 /* CST +- (A +- CST) -> CST +- A. */
2003 tree cst = fold_binary (def_code == code
2004 ? PLUS_EXPR : MINUS_EXPR,
2007 if (cst && !TREE_OVERFLOW (cst))
2010 gimple_assign_set_rhs1 (stmt, rhs1);
2012 gimple_assign_set_rhs2 (stmt, rhs2);
2013 gimple_set_modified (stmt, true);
2017 else if (def_code == BIT_NOT_EXPR
2018 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
2020 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2021 if (code == PLUS_EXPR
2022 && operand_equal_p (def_rhs1, rhs1, 0))
2026 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
2028 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2029 gcc_assert (gsi_stmt (gsi) == stmt);
2030 gimple_set_modified (stmt, true);
2037 if (gimple_modified_p (stmt))
2039 fold_stmt_inplace (stmt);
2041 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2042 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2049 /* Combine two conversions in a row for the second conversion at *GSI.
2050 Returns true if there were any changes made. */
2053 combine_conversions (gimple_stmt_iterator *gsi)
2055 gimple stmt = gsi_stmt (*gsi);
2058 enum tree_code code = gimple_assign_rhs_code (stmt);
2060 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2061 || code == FLOAT_EXPR
2062 || code == FIX_TRUNC_EXPR);
2064 lhs = gimple_assign_lhs (stmt);
2065 op0 = gimple_assign_rhs1 (stmt);
2066 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2068 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2072 if (TREE_CODE (op0) != SSA_NAME)
2075 def_stmt = SSA_NAME_DEF_STMT (op0);
2076 if (!is_gimple_assign (def_stmt))
2079 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2081 tree defop0 = gimple_assign_rhs1 (def_stmt);
2082 tree type = TREE_TYPE (lhs);
2083 tree inside_type = TREE_TYPE (defop0);
2084 tree inter_type = TREE_TYPE (op0);
2085 int inside_int = INTEGRAL_TYPE_P (inside_type);
2086 int inside_ptr = POINTER_TYPE_P (inside_type);
2087 int inside_float = FLOAT_TYPE_P (inside_type);
2088 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2089 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2090 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2091 int inter_int = INTEGRAL_TYPE_P (inter_type);
2092 int inter_ptr = POINTER_TYPE_P (inter_type);
2093 int inter_float = FLOAT_TYPE_P (inter_type);
2094 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2095 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2096 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2097 int final_int = INTEGRAL_TYPE_P (type);
2098 int final_ptr = POINTER_TYPE_P (type);
2099 int final_float = FLOAT_TYPE_P (type);
2100 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2101 unsigned int final_prec = TYPE_PRECISION (type);
2102 int final_unsignedp = TYPE_UNSIGNED (type);
2104 /* In addition to the cases of two conversions in a row
2105 handled below, if we are converting something to its own
2106 type via an object of identical or wider precision, neither
2107 conversion is needed. */
2108 if (useless_type_conversion_p (type, inside_type)
2109 && (((inter_int || inter_ptr) && final_int)
2110 || (inter_float && final_float))
2111 && inter_prec >= final_prec)
2113 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2114 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2119 /* Likewise, if the intermediate and initial types are either both
2120 float or both integer, we don't need the middle conversion if the
2121 former is wider than the latter and doesn't change the signedness
2122 (for integers). Avoid this if the final type is a pointer since
2123 then we sometimes need the middle conversion. Likewise if the
2124 final type has a precision not equal to the size of its mode. */
2125 if (((inter_int && inside_int)
2126 || (inter_float && inside_float)
2127 || (inter_vec && inside_vec))
2128 && inter_prec >= inside_prec
2129 && (inter_float || inter_vec
2130 || inter_unsignedp == inside_unsignedp)
2131 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2132 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2134 && (! final_vec || inter_prec == inside_prec))
2136 gimple_assign_set_rhs1 (stmt, defop0);
2141 /* If we have a sign-extension of a zero-extended value, we can
2142 replace that by a single zero-extension. */
2143 if (inside_int && inter_int && final_int
2144 && inside_prec < inter_prec && inter_prec < final_prec
2145 && inside_unsignedp && !inter_unsignedp)
2147 gimple_assign_set_rhs1 (stmt, defop0);
2152 /* Two conversions in a row are not needed unless:
2153 - some conversion is floating-point (overstrict for now), or
2154 - some conversion is a vector (overstrict for now), or
2155 - the intermediate type is narrower than both initial and
2157 - the intermediate type and innermost type differ in signedness,
2158 and the outermost type is wider than the intermediate, or
2159 - the initial type is a pointer type and the precisions of the
2160 intermediate and final types differ, or
2161 - the final type is a pointer type and the precisions of the
2162 initial and intermediate types differ. */
2163 if (! inside_float && ! inter_float && ! final_float
2164 && ! inside_vec && ! inter_vec && ! final_vec
2165 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2166 && ! (inside_int && inter_int
2167 && inter_unsignedp != inside_unsignedp
2168 && inter_prec < final_prec)
2169 && ((inter_unsignedp && inter_prec > inside_prec)
2170 == (final_unsignedp && final_prec > inter_prec))
2171 && ! (inside_ptr && inter_prec != final_prec)
2172 && ! (final_ptr && inside_prec != inter_prec)
2173 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2174 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2176 gimple_assign_set_rhs1 (stmt, defop0);
2181 /* A truncation to an unsigned type should be canonicalized as
2182 bitwise and of a mask. */
2183 if (final_int && inter_int && inside_int
2184 && final_prec == inside_prec
2185 && final_prec > inter_prec
2189 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2192 (inside_type, double_int_mask (inter_prec)));
2193 if (!useless_type_conversion_p (type, inside_type))
2195 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2197 gimple_assign_set_rhs1 (stmt, tem);
2200 gimple_assign_set_rhs_from_tree (gsi, tem);
2201 update_stmt (gsi_stmt (*gsi));
2209 /* Main entry point for the forward propagation optimizer. */
2212 tree_ssa_forward_propagate_single_use_vars (void)
2215 unsigned int todoflags = 0;
2217 cfg_changed = false;
2221 gimple_stmt_iterator gsi;
2223 /* Note we update GSI within the loop as necessary. */
2224 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2226 gimple stmt = gsi_stmt (gsi);
2228 /* If this statement sets an SSA_NAME to an address,
2229 try to propagate the address into the uses of the SSA_NAME. */
2230 if (is_gimple_assign (stmt))
2232 tree lhs = gimple_assign_lhs (stmt);
2233 tree rhs = gimple_assign_rhs1 (stmt);
2234 enum tree_code code = gimple_assign_rhs_code (stmt);
2236 if (TREE_CODE (lhs) != SSA_NAME
2237 || has_zero_uses (lhs))
2243 if (code == ADDR_EXPR
2244 /* Handle pointer conversions on invariant addresses
2245 as well, as this is valid gimple. */
2246 || (CONVERT_EXPR_CODE_P (code)
2247 && TREE_CODE (rhs) == ADDR_EXPR
2248 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2250 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2253 || decl_address_invariant_p (base))
2254 && !stmt_references_abnormal_ssa_name (stmt)
2255 && forward_propagate_addr_expr (lhs, rhs))
2257 release_defs (stmt);
2258 todoflags |= TODO_remove_unused_locals;
2259 gsi_remove (&gsi, true);
2264 else if (code == POINTER_PLUS_EXPR
2265 && can_propagate_from (stmt))
2267 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
2268 /* ??? Better adjust the interface to that function
2269 instead of building new trees here. */
2270 && forward_propagate_addr_expr
2274 fold_build2 (MEM_REF,
2275 TREE_TYPE (TREE_TYPE (rhs)),
2279 gimple_assign_rhs2 (stmt))))))
2281 release_defs (stmt);
2282 todoflags |= TODO_remove_unused_locals;
2283 gsi_remove (&gsi, true);
2285 else if (is_gimple_min_invariant (rhs))
2287 /* Make sure to fold &a[0] + off_1 here. */
2288 fold_stmt_inplace (stmt);
2290 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2296 else if ((code == BIT_NOT_EXPR
2297 || code == NEGATE_EXPR)
2298 && TREE_CODE (rhs) == SSA_NAME)
2300 simplify_not_neg_expr (&gsi);
2303 else if (code == COND_EXPR)
2305 /* In this case the entire COND_EXPR is in rhs1. */
2307 fold_defer_overflow_warnings ();
2308 did_something = forward_propagate_into_cond (&gsi);
2309 stmt = gsi_stmt (gsi);
2310 if (did_something == 2)
2312 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
2313 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2316 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2318 bool no_warning = gimple_no_warning_p (stmt);
2320 fold_defer_overflow_warnings ();
2321 did_something = forward_propagate_comparison (stmt);
2322 if (did_something == 2)
2324 fold_undefer_overflow_warnings
2325 (!no_warning && did_something,
2326 stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2329 else if (code == BIT_AND_EXPR
2330 || code == BIT_IOR_EXPR
2331 || code == BIT_XOR_EXPR)
2333 if (!simplify_bitwise_binary (&gsi))
2336 else if (code == PLUS_EXPR
2337 || code == MINUS_EXPR)
2339 cfg_changed |= associate_plusminus (stmt);
2342 else if (CONVERT_EXPR_CODE_P (code)
2343 || code == FLOAT_EXPR
2344 || code == FIX_TRUNC_EXPR)
2346 if (!combine_conversions (&gsi))
2352 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2354 simplify_gimple_switch (stmt);
2357 else if (gimple_code (stmt) == GIMPLE_COND)
2360 fold_defer_overflow_warnings ();
2361 did_something = forward_propagate_into_gimple_cond (stmt);
2362 if (did_something == 2)
2364 fold_undefer_overflow_warnings (did_something, stmt,
2365 WARN_STRICT_OVERFLOW_CONDITIONAL);
2368 else if (is_gimple_call (stmt))
2370 tree callee = gimple_call_fndecl (stmt);
2371 if (callee == NULL_TREE
2372 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2373 || !simplify_builtin_call (&gsi, callee))
2382 todoflags |= TODO_cleanup_cfg;
2388 gate_forwprop (void)
2390 return flag_tree_forwprop;
2393 struct gimple_opt_pass pass_forwprop =
2397 "forwprop", /* name */
2398 gate_forwprop, /* gate */
2399 tree_ssa_forward_propagate_single_use_vars, /* execute */
2402 0, /* static_pass_number */
2403 TV_TREE_FORWPROP, /* tv_id */
2404 PROP_cfg | PROP_ssa, /* properties_required */
2405 0, /* properties_provided */
2406 0, /* properties_destroyed */
2407 0, /* todo_flags_start */
2411 | TODO_verify_ssa /* todo_flags_finish */