1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "basic-block.h"
29 #include "tree-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
48 if (x) goto ... else goto ...
50 Will be transformed into:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
74 if (x) goto ... else goto ...
76 Will be transformed into:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
89 if (x) goto ... else goto ...
91 Will be transformed into:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
123 ptr = (type1*)&type2var;
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
133 ptr2 = ptr + <constant>;
137 ptr2 = &x[constant/elementsize];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
154 Provided that decl has known alignment >= 2, will get turned into
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
178 get_prop_dest_stmt (tree name, tree *final_name_p)
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
198 *final_name_p = name;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
214 bool single_use = true;
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
219 if (!has_single_use (name))
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
261 can_propagate_from (gimple def_stmt)
266 gcc_assert (is_gimple_assign (def_stmt));
268 /* If the rhs has side-effects we cannot propagate from it. */
269 if (gimple_has_volatile_ops (def_stmt))
272 /* If the rhs is a load we cannot propagate from it. */
273 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
274 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
277 /* Constants can be always propagated. */
278 if (gimple_assign_single_p (def_stmt)
279 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
282 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
283 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
284 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
287 /* If the definition is a conversion of a pointer to a function type,
288 then we can not apply optimizations as some targets require
289 function pointers to be canonicalized and in this case this
290 optimization could eliminate a necessary canonicalization. */
291 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
293 tree rhs = gimple_assign_rhs1 (def_stmt);
294 if (POINTER_TYPE_P (TREE_TYPE (rhs))
295 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
302 /* Remove a copy chain ending in NAME along the defs but not
303 further or including UP_TO_STMT. If NAME was replaced in
304 its only use then this function can be used to clean up
305 dead stmts. Returns true if UP_TO_STMT can be removed
306 as well, otherwise false. */
309 remove_prop_source_from_use (tree name, gimple up_to_stmt)
311 gimple_stmt_iterator gsi;
315 if (!has_zero_uses (name))
318 stmt = SSA_NAME_DEF_STMT (name);
319 if (stmt == up_to_stmt)
322 gsi = gsi_for_stmt (stmt);
324 gsi_remove (&gsi, true);
326 name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
327 } while (name && TREE_CODE (name) == SSA_NAME);
332 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
333 converted to type TYPE.
335 This should disappear, but is needed so we can combine expressions and use
336 the fold() interfaces. Long term, we need to develop folding and combine
337 routines that deal with gimple exclusively . */
340 rhs_to_tree (tree type, gimple stmt)
342 location_t loc = gimple_location (stmt);
343 enum tree_code code = gimple_assign_rhs_code (stmt);
344 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
345 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
346 gimple_assign_rhs2 (stmt));
347 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
348 return build1 (code, type, gimple_assign_rhs1 (stmt));
349 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
350 return gimple_assign_rhs1 (stmt);
355 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
356 the folded result in a form suitable for COND_EXPR_COND or
357 NULL_TREE, if there is no suitable simplified form. If
358 INVARIANT_ONLY is true only gimple_min_invariant results are
359 considered simplified. */
362 combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
363 tree op0, tree op1, bool invariant_only)
367 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
369 t = fold_binary_loc (loc, code, type, op0, op1);
373 /* Require that we got a boolean type out if we put one in. */
374 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
376 /* Canonicalize the combined condition for use in a COND_EXPR. */
377 t = canonicalize_cond_expr_cond (t);
379 /* Bail out if we required an invariant but didn't get one. */
380 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
386 /* Propagate from the ssa name definition statements of COND_EXPR
387 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
388 Returns zero if no statement was changed, one if there were
389 changes and two if cfg_cleanup needs to run.
391 This must be kept in sync with forward_propagate_into_cond. */
394 forward_propagate_into_gimple_cond (gimple stmt)
396 int did_something = 0;
397 location_t loc = gimple_location (stmt);
400 tree tmp = NULL_TREE;
401 tree name = NULL_TREE, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
403 bool single_use0_p = false, single_use1_p = false;
404 enum tree_code code = gimple_cond_code (stmt);
406 /* We can do tree combining on SSA_NAME and comparison expressions. */
407 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison)
409 /* For comparisons use the first operand, that is likely to
410 simplify comparisons against constants. */
411 if (TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
413 name = gimple_cond_lhs (stmt);
414 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
415 if (def_stmt && can_propagate_from (def_stmt))
417 tree op1 = gimple_cond_rhs (stmt);
418 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
419 tmp = combine_cond_expr_cond (loc, code, boolean_type_node,
420 rhs0, op1, !single_use0_p);
423 /* If that wasn't successful, try the second operand. */
425 && TREE_CODE (gimple_cond_rhs (stmt)) == SSA_NAME)
427 tree op0 = gimple_cond_lhs (stmt);
428 name = gimple_cond_rhs (stmt);
429 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
430 if (!def_stmt || !can_propagate_from (def_stmt))
431 return did_something;
433 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
434 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, op0,
435 rhs1, !single_use1_p);
437 /* If that wasn't successful either, try both operands. */
440 && rhs1 != NULL_TREE)
441 tmp = combine_cond_expr_cond (loc, code, boolean_type_node, rhs0,
442 fold_convert_loc (loc,
445 !(single_use0_p && single_use1_p));
450 if (dump_file && tmp)
452 tree cond = build2 (gimple_cond_code (stmt),
454 gimple_cond_lhs (stmt),
455 gimple_cond_rhs (stmt));
456 fprintf (dump_file, " Replaced '");
457 print_generic_expr (dump_file, cond, 0);
458 fprintf (dump_file, "' with '");
459 print_generic_expr (dump_file, tmp, 0);
460 fprintf (dump_file, "'\n");
463 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
466 /* Remove defining statements. */
467 remove_prop_source_from_use (name, NULL);
469 if (is_gimple_min_invariant (tmp))
471 else if (did_something == 0)
474 /* Continue combining. */
481 return did_something;
485 /* Propagate from the ssa name definition statements of COND_EXPR
486 in the rhs of statement STMT into the conditional if that simplifies it.
487 Returns zero if no statement was changed, one if there were
488 changes and two if cfg_cleanup needs to run.
490 This must be kept in sync with forward_propagate_into_gimple_cond. */
493 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
495 gimple stmt = gsi_stmt (*gsi_p);
496 location_t loc = gimple_location (stmt);
497 int did_something = 0;
500 tree tmp = NULL_TREE;
501 tree cond = gimple_assign_rhs1 (stmt);
502 tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
504 bool single_use0_p = false, single_use1_p = false;
506 /* We can do tree combining on SSA_NAME and comparison expressions. */
507 if (COMPARISON_CLASS_P (cond)
508 && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME)
510 /* For comparisons use the first operand, that is likely to
511 simplify comparisons against constants. */
512 name = TREE_OPERAND (cond, 0);
513 def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
514 if (def_stmt && can_propagate_from (def_stmt))
516 tree op1 = TREE_OPERAND (cond, 1);
517 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
518 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
520 rhs0, op1, !single_use0_p);
522 /* If that wasn't successful, try the second operand. */
524 && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
526 tree op0 = TREE_OPERAND (cond, 0);
527 name = TREE_OPERAND (cond, 1);
528 def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
529 if (!def_stmt || !can_propagate_from (def_stmt))
530 return did_something;
532 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
533 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
535 op0, rhs1, !single_use1_p);
537 /* If that wasn't successful either, try both operands. */
540 && rhs1 != NULL_TREE)
541 tmp = combine_cond_expr_cond (loc, TREE_CODE (cond),
544 fold_convert_loc (loc,
547 !(single_use0_p && single_use1_p));
549 else if (TREE_CODE (cond) == SSA_NAME)
552 def_stmt = get_prop_source_stmt (name, true, NULL);
553 if (def_stmt || !can_propagate_from (def_stmt))
554 return did_something;
556 rhs0 = gimple_assign_rhs1 (def_stmt);
557 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
558 build_int_cst (TREE_TYPE (rhs0), 0),
564 if (dump_file && tmp)
566 fprintf (dump_file, " Replaced '");
567 print_generic_expr (dump_file, cond, 0);
568 fprintf (dump_file, "' with '");
569 print_generic_expr (dump_file, tmp, 0);
570 fprintf (dump_file, "'\n");
573 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
574 stmt = gsi_stmt (*gsi_p);
577 /* Remove defining statements. */
578 remove_prop_source_from_use (name, NULL);
580 if (is_gimple_min_invariant (tmp))
582 else if (did_something == 0)
585 /* Continue combining. */
592 return did_something;
595 /* We've just substituted an ADDR_EXPR into stmt. Update all the
596 relevant data structures to match. */
599 tidy_after_forward_propagate_addr (gimple stmt)
601 /* We may have turned a trapping insn into a non-trapping insn. */
602 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
603 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
606 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
607 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
610 /* DEF_RHS contains the address of the 0th element in an array.
611 USE_STMT uses type of DEF_RHS to compute the address of an
612 arbitrary element within the array. The (variable) byte offset
613 of the element is contained in OFFSET.
615 We walk back through the use-def chains of OFFSET to verify that
616 it is indeed computing the offset of an element within the array
617 and extract the index corresponding to the given byte offset.
619 We then try to fold the entire address expression into a form
622 If we are successful, we replace the right hand side of USE_STMT
623 with the new address computation. */
626 forward_propagate_addr_into_variable_array_index (tree offset,
628 gimple_stmt_iterator *use_stmt_gsi)
631 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
634 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
635 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
636 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
637 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
640 if (!host_integerp (tunit, 1))
643 /* Get the offset's defining statement. */
644 offset_def = SSA_NAME_DEF_STMT (offset);
646 /* Try to find an expression for a proper index. This is either a
647 multiplication expression by the element size or just the ssa name we came
648 along in case the element size is one. In that case, however, we do not
649 allow multiplications because they can be computing index to a higher
650 level dimension (PR 37861). */
651 if (integer_onep (tunit))
653 if (is_gimple_assign (offset_def)
654 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
661 /* The statement which defines OFFSET before type conversion
662 must be a simple GIMPLE_ASSIGN. */
663 if (!is_gimple_assign (offset_def))
666 /* The RHS of the statement which defines OFFSET must be a
667 multiplication of an object by the size of the array elements.
668 This implicitly verifies that the size of the array elements
670 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
671 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
672 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
674 /* The first operand to the MULT_EXPR is the desired index. */
675 index = gimple_assign_rhs1 (offset_def);
677 /* If we have idx * tunit + CST * tunit re-associate that. */
678 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
679 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
680 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
681 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
682 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
683 gimple_assign_rhs2 (offset_def),
684 tunit)) != NULL_TREE)
686 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
687 if (is_gimple_assign (offset_def2)
688 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
689 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
690 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
692 index = fold_build2 (gimple_assign_rhs_code (offset_def),
694 gimple_assign_rhs1 (offset_def2), tmp);
703 /* Replace the pointer addition with array indexing. */
704 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
705 true, GSI_SAME_STMT);
706 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
708 new_rhs = unshare_expr (def_rhs);
709 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
713 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
714 unshare_expr (TREE_OPERAND (def_rhs, 0)),
715 index, integer_zero_node, NULL_TREE);
716 new_rhs = build_fold_addr_expr (new_rhs);
717 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
718 TREE_TYPE (new_rhs)))
720 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
721 NULL_TREE, true, GSI_SAME_STMT);
722 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
726 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
727 use_stmt = gsi_stmt (*use_stmt_gsi);
729 /* That should have created gimple, so there is no need to
730 record information to undo the propagation. */
731 fold_stmt_inplace (use_stmt);
732 tidy_after_forward_propagate_addr (use_stmt);
736 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
737 ADDR_EXPR <whatever>.
739 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
740 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
741 node or for recovery of array indexing from pointer arithmetic.
743 Return true if the propagation was successful (the propagation can
744 be not totally successful, yet things may have been changed). */
747 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
748 gimple_stmt_iterator *use_stmt_gsi,
751 tree lhs, rhs, rhs2, array_ref;
752 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
753 enum tree_code rhs_code;
756 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
758 lhs = gimple_assign_lhs (use_stmt);
759 rhs_code = gimple_assign_rhs_code (use_stmt);
760 rhs = gimple_assign_rhs1 (use_stmt);
762 /* Trivial cases. The use statement could be a trivial copy or a
763 useless conversion. Recurse to the uses of the lhs as copyprop does
764 not copy through different variant pointers and FRE does not catch
765 all useless conversions. Treat the case of a single-use name and
766 a conversion to def_rhs type separate, though. */
767 if (TREE_CODE (lhs) == SSA_NAME
768 && ((rhs_code == SSA_NAME && rhs == name)
769 || CONVERT_EXPR_CODE_P (rhs_code)))
771 /* Only recurse if we don't deal with a single use or we cannot
772 do the propagation to the current statement. In particular
773 we can end up with a conversion needed for a non-invariant
774 address which we cannot do in a single statement. */
776 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
777 && (!is_gimple_min_invariant (def_rhs)
778 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
779 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
780 && (TYPE_PRECISION (TREE_TYPE (lhs))
781 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
782 return forward_propagate_addr_expr (lhs, def_rhs);
784 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
785 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
786 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
788 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
792 /* Propagate through constant pointer adjustments. */
793 if (TREE_CODE (lhs) == SSA_NAME
794 && rhs_code == POINTER_PLUS_EXPR
796 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
799 /* As we come here with non-invariant addresses in def_rhs we need
800 to make sure we can build a valid constant offsetted address
801 for further propagation. Simply rely on fold building that
802 and check after the fact. */
803 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
805 fold_convert (ptr_type_node,
806 gimple_assign_rhs2 (use_stmt)));
807 if (TREE_CODE (new_def_rhs) == MEM_REF
808 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
810 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
813 /* Recurse. If we could propagate into all uses of lhs do not
814 bother to replace into the current use but just pretend we did. */
815 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
816 && forward_propagate_addr_expr (lhs, new_def_rhs))
819 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
820 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
821 new_def_rhs, NULL_TREE);
822 else if (is_gimple_min_invariant (new_def_rhs))
823 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
824 new_def_rhs, NULL_TREE);
827 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
828 update_stmt (use_stmt);
832 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
833 ADDR_EXPR will not appear on the LHS. */
834 lhs = gimple_assign_lhs (use_stmt);
835 while (handled_component_p (lhs))
836 lhs = TREE_OPERAND (lhs, 0);
838 /* Now see if the LHS node is a MEM_REF using NAME. If so,
839 propagate the ADDR_EXPR into the use of NAME and fold the result. */
840 if (TREE_CODE (lhs) == MEM_REF
841 && TREE_OPERAND (lhs, 0) == name)
844 HOST_WIDE_INT def_rhs_offset;
845 /* If the address is invariant we can always fold it. */
846 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
849 double_int off = mem_ref_offset (lhs);
851 off = double_int_add (off,
852 shwi_to_double_int (def_rhs_offset));
853 if (TREE_CODE (def_rhs_base) == MEM_REF)
855 off = double_int_add (off, mem_ref_offset (def_rhs_base));
856 new_ptr = TREE_OPERAND (def_rhs_base, 0);
859 new_ptr = build_fold_addr_expr (def_rhs_base);
860 TREE_OPERAND (lhs, 0) = new_ptr;
861 TREE_OPERAND (lhs, 1)
862 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
863 tidy_after_forward_propagate_addr (use_stmt);
864 /* Continue propagating into the RHS if this was not the only use. */
868 /* If the LHS is a plain dereference and the value type is the same as
869 that of the pointed-to type of the address we can put the
870 dereferenced address on the LHS preserving the original alias-type. */
871 else if (gimple_assign_lhs (use_stmt) == lhs
872 && useless_type_conversion_p
873 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
874 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
876 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
877 tree new_offset, new_base, saved;
878 while (handled_component_p (*def_rhs_basep))
879 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
880 saved = *def_rhs_basep;
881 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
883 new_base = TREE_OPERAND (*def_rhs_basep, 0);
885 = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
886 TREE_OPERAND (*def_rhs_basep, 1), 0);
890 new_base = build_fold_addr_expr (*def_rhs_basep);
891 new_offset = TREE_OPERAND (lhs, 1);
893 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
894 new_base, new_offset);
895 gimple_assign_set_lhs (use_stmt,
896 unshare_expr (TREE_OPERAND (def_rhs, 0)));
897 *def_rhs_basep = saved;
898 tidy_after_forward_propagate_addr (use_stmt);
899 /* Continue propagating into the RHS if this was not the
905 /* We can have a struct assignment dereferencing our name twice.
906 Note that we didn't propagate into the lhs to not falsely
907 claim we did when propagating into the rhs. */
911 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
912 nodes from the RHS. */
913 rhs = gimple_assign_rhs1 (use_stmt);
914 if (TREE_CODE (rhs) == ADDR_EXPR)
915 rhs = TREE_OPERAND (rhs, 0);
916 while (handled_component_p (rhs))
917 rhs = TREE_OPERAND (rhs, 0);
919 /* Now see if the RHS node is a MEM_REF using NAME. If so,
920 propagate the ADDR_EXPR into the use of NAME and fold the result. */
921 if (TREE_CODE (rhs) == MEM_REF
922 && TREE_OPERAND (rhs, 0) == name)
925 HOST_WIDE_INT def_rhs_offset;
926 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
929 double_int off = mem_ref_offset (rhs);
931 off = double_int_add (off,
932 shwi_to_double_int (def_rhs_offset));
933 if (TREE_CODE (def_rhs_base) == MEM_REF)
935 off = double_int_add (off, mem_ref_offset (def_rhs_base));
936 new_ptr = TREE_OPERAND (def_rhs_base, 0);
939 new_ptr = build_fold_addr_expr (def_rhs_base);
940 TREE_OPERAND (rhs, 0) = new_ptr;
941 TREE_OPERAND (rhs, 1)
942 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
943 fold_stmt_inplace (use_stmt);
944 tidy_after_forward_propagate_addr (use_stmt);
947 /* If the LHS is a plain dereference and the value type is the same as
948 that of the pointed-to type of the address we can put the
949 dereferenced address on the LHS preserving the original alias-type. */
950 else if (gimple_assign_rhs1 (use_stmt) == rhs
951 && useless_type_conversion_p
952 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
953 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
955 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
956 tree new_offset, new_base, saved;
957 while (handled_component_p (*def_rhs_basep))
958 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
959 saved = *def_rhs_basep;
960 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
962 new_base = TREE_OPERAND (*def_rhs_basep, 0);
964 = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
965 TREE_OPERAND (*def_rhs_basep, 1), 0);
969 new_base = build_fold_addr_expr (*def_rhs_basep);
970 new_offset = TREE_OPERAND (rhs, 1);
972 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
973 new_base, new_offset);
974 gimple_assign_set_rhs1 (use_stmt,
975 unshare_expr (TREE_OPERAND (def_rhs, 0)));
976 *def_rhs_basep = saved;
977 fold_stmt_inplace (use_stmt);
978 tidy_after_forward_propagate_addr (use_stmt);
983 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
985 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
986 || gimple_assign_rhs1 (use_stmt) != name)
989 /* The remaining cases are all for turning pointer arithmetic into
990 array indexing. They only apply when we have the address of
991 element zero in an array. If that is not the case then there
993 array_ref = TREE_OPERAND (def_rhs, 0);
994 if ((TREE_CODE (array_ref) != ARRAY_REF
995 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
996 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
997 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
1000 rhs2 = gimple_assign_rhs2 (use_stmt);
1001 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
1002 of the elements in X into &x[C1 + C2/element size]. */
1003 if (TREE_CODE (rhs2) == INTEGER_CST)
1005 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
1006 TREE_TYPE (def_rhs),
1010 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
1011 new_rhs = unshare_expr (new_rhs);
1012 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
1014 if (!is_gimple_min_invariant (new_rhs))
1015 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
1017 true, GSI_SAME_STMT);
1018 new_rhs = fold_convert (type, new_rhs);
1020 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1021 use_stmt = gsi_stmt (*use_stmt_gsi);
1022 update_stmt (use_stmt);
1023 tidy_after_forward_propagate_addr (use_stmt);
1028 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1029 converting a multiplication of an index by the size of the
1030 array elements, then the result is converted into the proper
1031 type for the arithmetic. */
1032 if (TREE_CODE (rhs2) == SSA_NAME
1033 && (TREE_CODE (array_ref) != ARRAY_REF
1034 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1035 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1036 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1037 different type than their operands. */
1038 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1039 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1044 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1046 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1047 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1048 node or for recovery of array indexing from pointer arithmetic.
1049 Returns true, if all uses have been propagated into. */
1052 forward_propagate_addr_expr (tree name, tree rhs)
1054 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1055 imm_use_iterator iter;
1058 bool single_use_p = has_single_use (name);
1060 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1065 /* If the use is not in a simple assignment statement, then
1066 there is nothing we can do. */
1067 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1069 if (!is_gimple_debug (use_stmt))
1074 /* If the use is in a deeper loop nest, then we do not want
1075 to propagate non-invariant ADDR_EXPRs into the loop as that
1076 is likely adding expression evaluations into the loop. */
1077 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1078 && !is_gimple_min_invariant (rhs))
1085 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1086 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1088 /* If the use has moved to a different statement adjust
1089 the update machinery for the old statement too. */
1090 if (use_stmt != gsi_stmt (gsi))
1092 update_stmt (use_stmt);
1093 use_stmt = gsi_stmt (gsi);
1096 update_stmt (use_stmt);
1100 /* Remove intermediate now unused copy and conversion chains. */
1101 use_rhs = gimple_assign_rhs1 (use_stmt);
1103 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1104 && TREE_CODE (use_rhs) == SSA_NAME
1105 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1107 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1108 release_defs (use_stmt);
1109 gsi_remove (&gsi, true);
1116 /* Forward propagate the comparison defined in STMT like
1117 cond_1 = x CMP y to uses of the form
1121 Returns true if stmt is now unused. */
1124 forward_propagate_comparison (gimple stmt)
1126 tree name = gimple_assign_lhs (stmt);
1128 tree tmp = NULL_TREE;
1130 /* Don't propagate ssa names that occur in abnormal phis. */
1131 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1132 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1133 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1134 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1137 /* Do not un-cse comparisons. But propagate through copies. */
1138 use_stmt = get_prop_dest_stmt (name, &name);
1142 /* Conversion of the condition result to another integral type. */
1143 if (is_gimple_assign (use_stmt)
1144 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
1145 || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1147 || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1148 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
1150 tree lhs = gimple_assign_lhs (use_stmt);
1152 /* We can propagate the condition into a conversion. */
1153 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
1155 /* Avoid using fold here as that may create a COND_EXPR with
1156 non-boolean condition as canonical form. */
1157 tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs),
1158 gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
1160 /* We can propagate the condition into X op CST where op
1161 is EQ_EXPR or NE_EXPR and CST is either one or zero. */
1162 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
1164 && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
1165 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
1167 enum tree_code code = gimple_assign_rhs_code (use_stmt);
1168 tree cst = gimple_assign_rhs2 (use_stmt);
1171 cond = build2 (gimple_assign_rhs_code (stmt),
1173 gimple_assign_rhs1 (stmt),
1174 gimple_assign_rhs2 (stmt));
1176 tmp = combine_cond_expr_cond (gimple_location (use_stmt),
1177 code, TREE_TYPE (lhs),
1179 if (tmp == NULL_TREE)
1182 /* We can propagate the condition into a statement that
1183 computes the logical negation of the comparison result. */
1184 else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
1186 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1187 bool nans = HONOR_NANS (TYPE_MODE (type));
1188 enum tree_code code;
1189 code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1190 if (code == ERROR_MARK)
1193 tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1194 gimple_assign_rhs2 (stmt));
1200 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1201 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1202 use_stmt = gsi_stmt (gsi);
1203 update_stmt (use_stmt);
1206 /* Remove defining statements. */
1207 remove_prop_source_from_use (name, stmt);
1209 if (dump_file && (dump_flags & TDF_DETAILS))
1211 tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)),
1213 fprintf (dump_file, " Replaced '");
1214 print_generic_expr (dump_file, old_rhs, dump_flags);
1215 fprintf (dump_file, "' with '");
1216 print_generic_expr (dump_file, tmp, dump_flags);
1217 fprintf (dump_file, "'\n");
1226 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1227 If so, we can change STMT into lhs = y which can later be copy
1228 propagated. Similarly for negation.
1230 This could trivially be formulated as a forward propagation
1231 to immediate uses. However, we already had an implementation
1232 from DOM which used backward propagation via the use-def links.
1234 It turns out that backward propagation is actually faster as
1235 there's less work to do for each NOT/NEG expression we find.
1236 Backwards propagation needs to look at the statement in a single
1237 backlink. Forward propagation needs to look at potentially more
1238 than one forward link. */
1241 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1243 gimple stmt = gsi_stmt (*gsi_p);
1244 tree rhs = gimple_assign_rhs1 (stmt);
1245 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1247 /* See if the RHS_DEF_STMT has the same form as our statement. */
1248 if (is_gimple_assign (rhs_def_stmt)
1249 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1251 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1253 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1254 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1255 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1257 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1258 stmt = gsi_stmt (*gsi_p);
1264 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1265 the condition which we may be able to optimize better. */
1268 simplify_gimple_switch (gimple stmt)
1270 tree cond = gimple_switch_index (stmt);
1274 /* The optimization that we really care about is removing unnecessary
1275 casts. That will let us do much better in propagating the inferred
1276 constant at the switch target. */
1277 if (TREE_CODE (cond) == SSA_NAME)
1279 def_stmt = SSA_NAME_DEF_STMT (cond);
1280 if (is_gimple_assign (def_stmt))
1282 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1287 def = gimple_assign_rhs1 (def_stmt);
1289 #ifdef ENABLE_CHECKING
1290 /* ??? Why was Jeff testing this? We are gimple... */
1291 gcc_assert (is_gimple_val (def));
1294 to = TREE_TYPE (cond);
1295 ti = TREE_TYPE (def);
1297 /* If we have an extension that preserves value, then we
1298 can copy the source value into the switch. */
1300 need_precision = TYPE_PRECISION (ti);
1302 if (! INTEGRAL_TYPE_P (ti))
1304 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1306 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1307 need_precision += 1;
1308 if (TYPE_PRECISION (to) < need_precision)
1313 gimple_switch_set_index (stmt, def);
1321 /* For pointers p2 and p1 return p2 - p1 if the
1322 difference is known and constant, otherwise return NULL. */
1325 constant_pointer_difference (tree p1, tree p2)
1328 #define CPD_ITERATIONS 5
1329 tree exps[2][CPD_ITERATIONS];
1330 tree offs[2][CPD_ITERATIONS];
1333 for (i = 0; i < 2; i++)
1335 tree p = i ? p1 : p2;
1336 tree off = size_zero_node;
1338 enum tree_code code;
1340 /* For each of p1 and p2 we need to iterate at least
1341 twice, to handle ADDR_EXPR directly in p1/p2,
1342 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1343 on definition's stmt RHS. Iterate a few extra times. */
1347 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1349 if (TREE_CODE (p) == ADDR_EXPR)
1351 tree q = TREE_OPERAND (p, 0);
1352 HOST_WIDE_INT offset;
1353 tree base = get_addr_base_and_unit_offset (q, &offset);
1358 off = size_binop (PLUS_EXPR, off, size_int (offset));
1360 if (TREE_CODE (q) == MEM_REF
1361 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1363 p = TREE_OPERAND (q, 0);
1364 off = size_binop (PLUS_EXPR, off,
1365 double_int_to_tree (sizetype,
1366 mem_ref_offset (q)));
1375 if (TREE_CODE (p) != SSA_NAME)
1379 if (j == CPD_ITERATIONS)
1381 stmt = SSA_NAME_DEF_STMT (p);
1382 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1384 code = gimple_assign_rhs_code (stmt);
1385 if (code == POINTER_PLUS_EXPR)
1387 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1389 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1390 p = gimple_assign_rhs1 (stmt);
1392 else if (code == ADDR_EXPR || code == NOP_EXPR)
1393 p = gimple_assign_rhs1 (stmt);
1401 for (i = 0; i < cnt[0]; i++)
1402 for (j = 0; j < cnt[1]; j++)
1403 if (exps[0][i] == exps[1][j])
1404 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1409 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1411 memcpy (p, "abcd", 4);
1412 memset (p + 4, ' ', 3);
1414 memcpy (p, "abcd ", 7);
1415 call if the latter can be stored by pieces during expansion. */
1418 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1420 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1421 tree vuse = gimple_vuse (stmt2);
1424 stmt1 = SSA_NAME_DEF_STMT (vuse);
1426 switch (DECL_FUNCTION_CODE (callee2))
1428 case BUILT_IN_MEMSET:
1429 if (gimple_call_num_args (stmt2) != 3
1430 || gimple_call_lhs (stmt2)
1432 || BITS_PER_UNIT != 8)
1437 tree ptr1, src1, str1, off1, len1, lhs1;
1438 tree ptr2 = gimple_call_arg (stmt2, 0);
1439 tree val2 = gimple_call_arg (stmt2, 1);
1440 tree len2 = gimple_call_arg (stmt2, 2);
1441 tree diff, vdef, new_str_cst;
1443 unsigned int ptr1_align;
1444 unsigned HOST_WIDE_INT src_len;
1446 use_operand_p use_p;
1448 if (!host_integerp (val2, 0)
1449 || !host_integerp (len2, 1))
1451 if (is_gimple_call (stmt1))
1453 /* If first stmt is a call, it needs to be memcpy
1454 or mempcpy, with string literal as second argument and
1456 callee1 = gimple_call_fndecl (stmt1);
1457 if (callee1 == NULL_TREE
1458 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1459 || gimple_call_num_args (stmt1) != 3)
1461 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1462 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1464 ptr1 = gimple_call_arg (stmt1, 0);
1465 src1 = gimple_call_arg (stmt1, 1);
1466 len1 = gimple_call_arg (stmt1, 2);
1467 lhs1 = gimple_call_lhs (stmt1);
1468 if (!host_integerp (len1, 1))
1470 str1 = string_constant (src1, &off1);
1471 if (str1 == NULL_TREE)
1473 if (!host_integerp (off1, 1)
1474 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1475 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1476 - tree_low_cst (off1, 1)) > 0
1477 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1478 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1479 != TYPE_MODE (char_type_node))
1482 else if (gimple_assign_single_p (stmt1))
1484 /* Otherwise look for length 1 memcpy optimized into
1486 ptr1 = gimple_assign_lhs (stmt1);
1487 src1 = gimple_assign_rhs1 (stmt1);
1488 if (TREE_CODE (ptr1) != MEM_REF
1489 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1490 || !host_integerp (src1, 0))
1492 ptr1 = build_fold_addr_expr (ptr1);
1493 callee1 = NULL_TREE;
1494 len1 = size_one_node;
1496 off1 = size_zero_node;
1502 diff = constant_pointer_difference (ptr1, ptr2);
1503 if (diff == NULL && lhs1 != NULL)
1505 diff = constant_pointer_difference (lhs1, ptr2);
1506 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1508 diff = size_binop (PLUS_EXPR, diff,
1509 fold_convert (sizetype, len1));
1511 /* If the difference between the second and first destination pointer
1512 is not constant, or is bigger than memcpy length, bail out. */
1514 || !host_integerp (diff, 1)
1515 || tree_int_cst_lt (len1, diff))
1518 /* Use maximum of difference plus memset length and memcpy length
1519 as the new memcpy length, if it is too big, bail out. */
1520 src_len = tree_low_cst (diff, 1);
1521 src_len += tree_low_cst (len2, 1);
1522 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1523 src_len = tree_low_cst (len1, 1);
1527 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1528 with bigger length will return different result. */
1529 if (lhs1 != NULL_TREE
1530 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1531 && (TREE_CODE (lhs1) != SSA_NAME
1532 || !single_imm_use (lhs1, &use_p, &use_stmt)
1533 || use_stmt != stmt2))
1536 /* If anything reads memory in between memcpy and memset
1537 call, the modified memcpy call might change it. */
1538 vdef = gimple_vdef (stmt1);
1540 && (!single_imm_use (vdef, &use_p, &use_stmt)
1541 || use_stmt != stmt2))
1544 ptr1_align = get_pointer_alignment (ptr1, BIGGEST_ALIGNMENT);
1545 /* Construct the new source string literal. */
1546 src_buf = XALLOCAVEC (char, src_len + 1);
1549 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1550 tree_low_cst (len1, 1));
1552 src_buf[0] = tree_low_cst (src1, 0);
1553 memset (src_buf + tree_low_cst (diff, 1),
1554 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1555 src_buf[src_len] = '\0';
1556 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1557 handle embedded '\0's. */
1558 if (strlen (src_buf) != src_len)
1560 rtl_profile_for_bb (gimple_bb (stmt2));
1561 /* If the new memcpy wouldn't be emitted by storing the literal
1562 by pieces, this optimization might enlarge .rodata too much,
1563 as commonly used string literals couldn't be shared any
1565 if (!can_store_by_pieces (src_len,
1566 builtin_strncpy_read_str,
1567 src_buf, ptr1_align, false))
1570 new_str_cst = build_string_literal (src_len, src_buf);
1573 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1575 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1576 gimple_call_set_lhs (stmt1, NULL_TREE);
1577 gimple_call_set_arg (stmt1, 1, new_str_cst);
1578 gimple_call_set_arg (stmt1, 2,
1579 build_int_cst (TREE_TYPE (len1), src_len));
1580 update_stmt (stmt1);
1581 unlink_stmt_vdef (stmt2);
1582 gsi_remove (gsi_p, true);
1583 release_defs (stmt2);
1584 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1585 release_ssa_name (lhs1);
1590 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1591 assignment, remove STMT1 and change memset call into
1593 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1595 gimple_call_set_fndecl (stmt2, built_in_decls [BUILT_IN_MEMCPY]);
1596 gimple_call_set_arg (stmt2, 0, ptr1);
1597 gimple_call_set_arg (stmt2, 1, new_str_cst);
1598 gimple_call_set_arg (stmt2, 2,
1599 build_int_cst (TREE_TYPE (len2), src_len));
1600 unlink_stmt_vdef (stmt1);
1601 gsi_remove (&gsi, true);
1602 release_defs (stmt1);
1603 update_stmt (stmt2);
1614 /* Run bitwise and assignments throug the folder. If the first argument is an
1615 ssa name that is itself a result of a typecast of an ADDR_EXPR to an
1616 integer, feed the ADDR_EXPR to the folder rather than the ssa name.
1620 simplify_bitwise_and (gimple_stmt_iterator *gsi, gimple stmt)
1623 tree arg1 = gimple_assign_rhs1 (stmt);
1624 tree arg2 = gimple_assign_rhs2 (stmt);
1626 if (TREE_CODE (arg2) != INTEGER_CST)
1629 if (TREE_CODE (arg1) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (arg1))
1631 gimple def = SSA_NAME_DEF_STMT (arg1);
1633 if (gimple_assign_cast_p (def)
1634 && INTEGRAL_TYPE_P (gimple_expr_type (def)))
1636 tree op = gimple_assign_rhs1 (def);
1638 if (TREE_CODE (op) == ADDR_EXPR)
1643 res = fold_binary_loc (gimple_location (stmt),
1644 BIT_AND_EXPR, TREE_TYPE (gimple_assign_lhs (stmt)),
1646 if (res && is_gimple_min_invariant (res))
1648 gimple_assign_set_rhs_from_tree (gsi, res);
1655 /* Perform re-associations of the plus or minus statement STMT that are
1656 always permitted. */
1659 associate_plusminus (gimple stmt)
1661 tree rhs1 = gimple_assign_rhs1 (stmt);
1662 tree rhs2 = gimple_assign_rhs2 (stmt);
1663 enum tree_code code = gimple_assign_rhs_code (stmt);
1664 gimple_stmt_iterator gsi;
1667 /* We can't reassociate at all for saturating types. */
1668 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
1671 /* First contract negates. */
1676 /* A +- (-B) -> A -+ B. */
1677 if (TREE_CODE (rhs2) == SSA_NAME)
1679 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1680 if (is_gimple_assign (def_stmt)
1681 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR)
1683 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
1684 gimple_assign_set_rhs_code (stmt, code);
1685 rhs2 = gimple_assign_rhs1 (def_stmt);
1686 gimple_assign_set_rhs2 (stmt, rhs2);
1687 gimple_set_modified (stmt, true);
1692 /* (-A) + B -> B - A. */
1693 if (TREE_CODE (rhs1) == SSA_NAME
1694 && code == PLUS_EXPR)
1696 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1697 if (is_gimple_assign (def_stmt)
1698 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR)
1701 gimple_assign_set_rhs_code (stmt, code);
1703 gimple_assign_set_rhs1 (stmt, rhs1);
1704 rhs2 = gimple_assign_rhs1 (def_stmt);
1705 gimple_assign_set_rhs2 (stmt, rhs2);
1706 gimple_set_modified (stmt, true);
1713 /* We can't reassociate floating-point or fixed-point plus or minus
1714 because of saturation to +-Inf. */
1715 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
1716 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
1719 /* Second match patterns that allow contracting a plus-minus pair
1720 irrespective of overflow issues.
1722 (A +- B) - A -> +- B
1724 (CST +- A) +- CST -> CST +- A
1725 (A + CST) +- CST -> A + CST
1728 A - (A +- B) -> -+ B
1729 A +- (B +- A) -> +- B
1730 CST +- (CST +- A) -> CST +- A
1731 CST +- (A +- CST) -> CST +- A
1734 via commutating the addition and contracting operations to zero
1735 by reassociation. */
1737 gsi = gsi_for_stmt (stmt);
1738 if (TREE_CODE (rhs1) == SSA_NAME)
1740 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1741 if (is_gimple_assign (def_stmt))
1743 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1744 if (def_code == PLUS_EXPR
1745 || def_code == MINUS_EXPR)
1747 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1748 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1749 if (operand_equal_p (def_rhs1, rhs2, 0)
1750 && code == MINUS_EXPR)
1752 /* (A +- B) - A -> +- B. */
1753 code = ((def_code == PLUS_EXPR)
1754 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
1757 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1758 gcc_assert (gsi_stmt (gsi) == stmt);
1759 gimple_set_modified (stmt, true);
1761 else if (operand_equal_p (def_rhs2, rhs2, 0)
1762 && code != def_code)
1764 /* (A +- B) -+ B -> A. */
1765 code = TREE_CODE (def_rhs1);
1768 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1769 gcc_assert (gsi_stmt (gsi) == stmt);
1770 gimple_set_modified (stmt, true);
1772 else if (TREE_CODE (rhs2) == INTEGER_CST
1773 && TREE_CODE (def_rhs1) == INTEGER_CST)
1775 /* (CST +- A) +- CST -> CST +- A. */
1776 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1778 if (cst && !TREE_OVERFLOW (cst))
1781 gimple_assign_set_rhs_code (stmt, code);
1783 gimple_assign_set_rhs1 (stmt, rhs1);
1785 gimple_assign_set_rhs2 (stmt, rhs2);
1786 gimple_set_modified (stmt, true);
1789 else if (TREE_CODE (rhs2) == INTEGER_CST
1790 && TREE_CODE (def_rhs2) == INTEGER_CST
1791 && def_code == PLUS_EXPR)
1793 /* (A + CST) +- CST -> A + CST. */
1794 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1796 if (cst && !TREE_OVERFLOW (cst))
1799 gimple_assign_set_rhs_code (stmt, code);
1801 gimple_assign_set_rhs1 (stmt, rhs1);
1803 gimple_assign_set_rhs2 (stmt, rhs2);
1804 gimple_set_modified (stmt, true);
1808 else if (def_code == BIT_NOT_EXPR
1809 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
1811 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1812 if (code == PLUS_EXPR
1813 && operand_equal_p (def_rhs1, rhs2, 0))
1817 rhs1 = build_int_cst (TREE_TYPE (rhs2), -1);
1819 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1820 gcc_assert (gsi_stmt (gsi) == stmt);
1821 gimple_set_modified (stmt, true);
1823 else if (code == PLUS_EXPR
1824 && integer_onep (rhs1))
1830 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1831 gcc_assert (gsi_stmt (gsi) == stmt);
1832 gimple_set_modified (stmt, true);
1838 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
1840 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1841 if (is_gimple_assign (def_stmt))
1843 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1844 if (def_code == PLUS_EXPR
1845 || def_code == MINUS_EXPR)
1847 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1848 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1849 if (operand_equal_p (def_rhs1, rhs1, 0)
1850 && code == MINUS_EXPR)
1852 /* A - (A +- B) -> -+ B. */
1853 code = ((def_code == PLUS_EXPR)
1854 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
1857 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1858 gcc_assert (gsi_stmt (gsi) == stmt);
1859 gimple_set_modified (stmt, true);
1861 else if (operand_equal_p (def_rhs2, rhs1, 0)
1862 && code != def_code)
1864 /* A +- (B +- A) -> +- B. */
1865 code = ((code == PLUS_EXPR)
1866 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
1869 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1870 gcc_assert (gsi_stmt (gsi) == stmt);
1871 gimple_set_modified (stmt, true);
1873 else if (TREE_CODE (rhs1) == INTEGER_CST
1874 && TREE_CODE (def_rhs1) == INTEGER_CST)
1876 /* CST +- (CST +- A) -> CST +- A. */
1877 tree cst = fold_binary (code, TREE_TYPE (rhs2),
1879 if (cst && !TREE_OVERFLOW (cst))
1881 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
1882 gimple_assign_set_rhs_code (stmt, code);
1884 gimple_assign_set_rhs1 (stmt, rhs1);
1886 gimple_assign_set_rhs2 (stmt, rhs2);
1887 gimple_set_modified (stmt, true);
1890 else if (TREE_CODE (rhs1) == INTEGER_CST
1891 && TREE_CODE (def_rhs2) == INTEGER_CST)
1893 /* CST +- (A +- CST) -> CST +- A. */
1894 tree cst = fold_binary (def_code == code
1895 ? PLUS_EXPR : MINUS_EXPR,
1898 if (cst && !TREE_OVERFLOW (cst))
1901 gimple_assign_set_rhs1 (stmt, rhs1);
1903 gimple_assign_set_rhs2 (stmt, rhs2);
1904 gimple_set_modified (stmt, true);
1908 else if (def_code == BIT_NOT_EXPR
1909 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
1911 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1912 if (code == PLUS_EXPR
1913 && operand_equal_p (def_rhs1, rhs1, 0))
1917 rhs1 = build_int_cst (TREE_TYPE (rhs1), -1);
1919 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1920 gcc_assert (gsi_stmt (gsi) == stmt);
1921 gimple_set_modified (stmt, true);
1928 if (gimple_modified_p (stmt))
1930 fold_stmt_inplace (stmt);
1935 /* Main entry point for the forward propagation optimizer. */
1938 tree_ssa_forward_propagate_single_use_vars (void)
1941 unsigned int todoflags = 0;
1943 cfg_changed = false;
1947 gimple_stmt_iterator gsi;
1949 /* Note we update GSI within the loop as necessary. */
1950 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
1952 gimple stmt = gsi_stmt (gsi);
1954 /* If this statement sets an SSA_NAME to an address,
1955 try to propagate the address into the uses of the SSA_NAME. */
1956 if (is_gimple_assign (stmt))
1958 tree lhs = gimple_assign_lhs (stmt);
1959 tree rhs = gimple_assign_rhs1 (stmt);
1961 if (TREE_CODE (lhs) != SSA_NAME)
1967 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR
1968 /* Handle pointer conversions on invariant addresses
1969 as well, as this is valid gimple. */
1970 || (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1971 && TREE_CODE (rhs) == ADDR_EXPR
1972 && POINTER_TYPE_P (TREE_TYPE (lhs))))
1974 tree base = get_base_address (TREE_OPERAND (rhs, 0));
1977 || decl_address_invariant_p (base))
1978 && !stmt_references_abnormal_ssa_name (stmt)
1979 && forward_propagate_addr_expr (lhs, rhs))
1981 release_defs (stmt);
1982 todoflags |= TODO_remove_unused_locals;
1983 gsi_remove (&gsi, true);
1988 else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1990 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
1991 /* ??? Better adjust the interface to that function
1992 instead of building new trees here. */
1993 && forward_propagate_addr_expr
1997 fold_build2 (MEM_REF,
1998 TREE_TYPE (TREE_TYPE (rhs)),
2002 gimple_assign_rhs2 (stmt))))))
2004 release_defs (stmt);
2005 todoflags |= TODO_remove_unused_locals;
2006 gsi_remove (&gsi, true);
2008 else if (is_gimple_min_invariant (rhs))
2010 /* Make sure to fold &a[0] + off_1 here. */
2011 fold_stmt_inplace (stmt);
2013 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2019 else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
2020 || gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
2021 && TREE_CODE (rhs) == SSA_NAME)
2023 simplify_not_neg_expr (&gsi);
2026 else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
2028 /* In this case the entire COND_EXPR is in rhs1. */
2030 fold_defer_overflow_warnings ();
2031 did_something = forward_propagate_into_cond (&gsi);
2032 stmt = gsi_stmt (gsi);
2033 if (did_something == 2)
2035 fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
2036 && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2039 else if (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
2042 if (forward_propagate_comparison (stmt))
2044 release_defs (stmt);
2045 todoflags |= TODO_remove_unused_locals;
2046 gsi_remove (&gsi, true);
2051 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
2053 simplify_bitwise_and (&gsi, stmt);
2056 else if (gimple_assign_rhs_code (stmt) == PLUS_EXPR
2057 || gimple_assign_rhs_code (stmt) == MINUS_EXPR)
2059 associate_plusminus (stmt);
2065 else if (gimple_code (stmt) == GIMPLE_SWITCH)
2067 simplify_gimple_switch (stmt);
2070 else if (gimple_code (stmt) == GIMPLE_COND)
2073 fold_defer_overflow_warnings ();
2074 did_something = forward_propagate_into_gimple_cond (stmt);
2075 if (did_something == 2)
2077 fold_undefer_overflow_warnings (did_something, stmt,
2078 WARN_STRICT_OVERFLOW_CONDITIONAL);
2081 else if (is_gimple_call (stmt))
2083 tree callee = gimple_call_fndecl (stmt);
2084 if (callee == NULL_TREE
2085 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2086 || !simplify_builtin_call (&gsi, callee))
2095 todoflags |= TODO_cleanup_cfg;
2101 gate_forwprop (void)
2103 return flag_tree_forwprop;
2106 struct gimple_opt_pass pass_forwprop =
2110 "forwprop", /* name */
2111 gate_forwprop, /* gate */
2112 tree_ssa_forward_propagate_single_use_vars, /* execute */
2115 0, /* static_pass_number */
2116 TV_TREE_FORWPROP, /* tv_id */
2117 PROP_cfg | PROP_ssa, /* properties_required */
2118 0, /* properties_provided */
2119 0, /* properties_destroyed */
2120 0, /* todo_flags_start */
2124 | TODO_verify_ssa /* todo_flags_finish */