1 /* Forward propagation of expressions for single use variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "basic-block.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-flow.h"
31 #include "tree-pass.h"
32 #include "tree-dump.h"
33 #include "langhooks.h"
38 /* This pass propagates the RHS of assignment statements into use
39 sites of the LHS of the assignment. It's basically a specialized
40 form of tree combination. It is hoped all of this can disappear
41 when we have a generalized tree combiner.
43 One class of common cases we handle is forward propagating a single use
44 variable into a COND_EXPR.
48 if (x) goto ... else goto ...
50 Will be transformed into:
53 if (a COND b) goto ... else goto ...
55 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
57 Or (assuming c1 and c2 are constants):
61 if (x EQ/NEQ c2) goto ... else goto ...
63 Will be transformed into:
66 if (a EQ/NEQ (c2 - c1)) goto ... else goto ...
68 Similarly for x = a - c1.
74 if (x) goto ... else goto ...
76 Will be transformed into:
79 if (a == 0) goto ... else goto ...
81 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
82 For these cases, we propagate A into all, possibly more than one,
83 COND_EXPRs that use X.
89 if (x) goto ... else goto ...
91 Will be transformed into:
94 if (a != 0) goto ... else goto ...
96 (Assuming a is an integral type and x is a boolean or x is an
97 integral and a is a boolean.)
99 Similarly for the tests (x == 0), (x != 0), (x == 1) and (x != 1).
100 For these cases, we propagate A into all, possibly more than one,
101 COND_EXPRs that use X.
103 In addition to eliminating the variable and the statement which assigns
104 a value to the variable, we may be able to later thread the jump without
105 adding insane complexity in the dominator optimizer.
107 Also note these transformations can cascade. We handle this by having
108 a worklist of COND_EXPR statements to examine. As we make a change to
109 a statement, we put it back on the worklist to examine on the next
110 iteration of the main loop.
112 A second class of propagation opportunities arises for ADDR_EXPR
123 ptr = (type1*)&type2var;
126 Will get turned into (if type1 and type2 are the same size
127 and neither have volatile on them):
128 res = VIEW_CONVERT_EXPR<type1>(type2var)
133 ptr2 = ptr + <constant>;
137 ptr2 = &x[constant/elementsize];
142 offset = index * element_size;
143 offset_p = (pointer) offset;
144 ptr2 = ptr + offset_p
146 Will get turned into:
154 Provided that decl has known alignment >= 2, will get turned into
158 We also propagate casts into SWITCH_EXPR and COND_EXPR conditions to
159 allow us to remove the cast and {NOT_EXPR,NEG_EXPR} into a subsequent
162 This will (of course) be extended as other needs arise. */
164 static bool forward_propagate_addr_expr (tree name, tree rhs);
166 /* Set to true if we delete EH edges during the optimization. */
167 static bool cfg_changed;
169 static tree rhs_to_tree (tree type, gimple stmt);
171 /* Get the next statement we can propagate NAME's value into skipping
172 trivial copies. Returns the statement that is suitable as a
173 propagation destination or NULL_TREE if there is no such one.
174 This only returns destinations in a single-use chain. FINAL_NAME_P
175 if non-NULL is written to the ssa name that represents the use. */
178 get_prop_dest_stmt (tree name, tree *final_name_p)
184 /* If name has multiple uses, bail out. */
185 if (!single_imm_use (name, &use, &use_stmt))
188 /* If this is not a trivial copy, we found it. */
189 if (!gimple_assign_ssa_name_copy_p (use_stmt)
190 || gimple_assign_rhs1 (use_stmt) != name)
193 /* Continue searching uses of the copy destination. */
194 name = gimple_assign_lhs (use_stmt);
198 *final_name_p = name;
203 /* Get the statement we can propagate from into NAME skipping
204 trivial copies. Returns the statement which defines the
205 propagation source or NULL_TREE if there is no such one.
206 If SINGLE_USE_ONLY is set considers only sources which have
207 a single use chain up to NAME. If SINGLE_USE_P is non-null,
208 it is set to whether the chain to NAME is a single use chain
209 or not. SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set. */
212 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
214 bool single_use = true;
217 gimple def_stmt = SSA_NAME_DEF_STMT (name);
219 if (!has_single_use (name))
226 /* If name is defined by a PHI node or is the default def, bail out. */
227 if (!is_gimple_assign (def_stmt))
230 /* If def_stmt is not a simple copy, we possibly found it. */
231 if (!gimple_assign_ssa_name_copy_p (def_stmt))
235 if (!single_use_only && single_use_p)
236 *single_use_p = single_use;
238 /* We can look through pointer conversions in the search
239 for a useful stmt for the comparison folding. */
240 rhs = gimple_assign_rhs1 (def_stmt);
241 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
242 && TREE_CODE (rhs) == SSA_NAME
243 && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
244 && POINTER_TYPE_P (TREE_TYPE (rhs)))
251 /* Continue searching the def of the copy source name. */
252 name = gimple_assign_rhs1 (def_stmt);
257 /* Checks if the destination ssa name in DEF_STMT can be used as
258 propagation source. Returns true if so, otherwise false. */
261 can_propagate_from (gimple def_stmt)
263 gcc_assert (is_gimple_assign (def_stmt));
265 /* If the rhs has side-effects we cannot propagate from it. */
266 if (gimple_has_volatile_ops (def_stmt))
269 /* If the rhs is a load we cannot propagate from it. */
270 if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
271 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
274 /* Constants can be always propagated. */
275 if (gimple_assign_single_p (def_stmt)
276 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
279 /* We cannot propagate ssa names that occur in abnormal phi nodes. */
280 if (stmt_references_abnormal_ssa_name (def_stmt))
283 /* If the definition is a conversion of a pointer to a function type,
284 then we can not apply optimizations as some targets require
285 function pointers to be canonicalized and in this case this
286 optimization could eliminate a necessary canonicalization. */
287 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
289 tree rhs = gimple_assign_rhs1 (def_stmt);
290 if (POINTER_TYPE_P (TREE_TYPE (rhs))
291 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
298 /* Remove a chain of dead statements starting at the definition of
299 NAME. The chain is linked via the first operand of the defining statements.
300 If NAME was replaced in its only use then this function can be used
301 to clean up dead stmts. The function handles already released SSA
303 Returns true if cleanup-cfg has to run. */
306 remove_prop_source_from_use (tree name)
308 gimple_stmt_iterator gsi;
310 bool cfg_changed = false;
315 if (SSA_NAME_IN_FREE_LIST (name)
316 || SSA_NAME_IS_DEFAULT_DEF (name)
317 || !has_zero_uses (name))
320 stmt = SSA_NAME_DEF_STMT (name);
321 if (gimple_code (stmt) == GIMPLE_PHI
322 || gimple_has_side_effects (stmt))
325 bb = gimple_bb (stmt);
326 gsi = gsi_for_stmt (stmt);
327 unlink_stmt_vdef (stmt);
328 gsi_remove (&gsi, true);
330 cfg_changed |= gimple_purge_dead_eh_edges (bb);
332 name = is_gimple_assign (stmt) ? gimple_assign_rhs1 (stmt) : NULL_TREE;
333 } while (name && TREE_CODE (name) == SSA_NAME);
338 /* Return the rhs of a gimple_assign STMT in a form of a single tree,
339 converted to type TYPE.
341 This should disappear, but is needed so we can combine expressions and use
342 the fold() interfaces. Long term, we need to develop folding and combine
343 routines that deal with gimple exclusively . */
346 rhs_to_tree (tree type, gimple stmt)
348 location_t loc = gimple_location (stmt);
349 enum tree_code code = gimple_assign_rhs_code (stmt);
350 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
351 return fold_build3_loc (loc, code, type, gimple_assign_rhs1 (stmt),
352 gimple_assign_rhs2 (stmt),
353 gimple_assign_rhs3 (stmt));
354 else if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
355 return fold_build2_loc (loc, code, type, gimple_assign_rhs1 (stmt),
356 gimple_assign_rhs2 (stmt));
357 else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
358 return build1 (code, type, gimple_assign_rhs1 (stmt));
359 else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
360 return gimple_assign_rhs1 (stmt);
365 /* Combine OP0 CODE OP1 in the context of a COND_EXPR. Returns
366 the folded result in a form suitable for COND_EXPR_COND or
367 NULL_TREE, if there is no suitable simplified form. If
368 INVARIANT_ONLY is true only gimple_min_invariant results are
369 considered simplified. */
372 combine_cond_expr_cond (location_t loc, enum tree_code code, tree type,
373 tree op0, tree op1, bool invariant_only)
377 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
379 t = fold_binary_loc (loc, code, type, op0, op1);
383 /* Require that we got a boolean type out if we put one in. */
384 gcc_assert (TREE_CODE (TREE_TYPE (t)) == TREE_CODE (type));
386 /* Canonicalize the combined condition for use in a COND_EXPR. */
387 t = canonicalize_cond_expr_cond (t);
389 /* Bail out if we required an invariant but didn't get one. */
390 if (!t || (invariant_only && !is_gimple_min_invariant (t)))
396 /* Combine the comparison OP0 CODE OP1 at LOC with the defining statements
397 of its operand. Return a new comparison tree or NULL_TREE if there
398 were no simplifying combines. */
401 forward_propagate_into_comparison_1 (location_t loc,
402 enum tree_code code, tree type,
405 tree tmp = NULL_TREE;
406 tree rhs0 = NULL_TREE, rhs1 = NULL_TREE;
407 bool single_use0_p = false, single_use1_p = false;
409 /* For comparisons use the first operand, that is likely to
410 simplify comparisons against constants. */
411 if (TREE_CODE (op0) == SSA_NAME)
413 gimple def_stmt = get_prop_source_stmt (op0, false, &single_use0_p);
414 if (def_stmt && can_propagate_from (def_stmt))
416 rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
417 tmp = combine_cond_expr_cond (loc, code, type,
418 rhs0, op1, !single_use0_p);
424 /* If that wasn't successful, try the second operand. */
425 if (TREE_CODE (op1) == SSA_NAME)
427 gimple def_stmt = get_prop_source_stmt (op1, false, &single_use1_p);
428 if (def_stmt && can_propagate_from (def_stmt))
430 rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
431 tmp = combine_cond_expr_cond (loc, code, type,
432 op0, rhs1, !single_use1_p);
438 /* If that wasn't successful either, try both operands. */
439 if (rhs0 != NULL_TREE
440 && rhs1 != NULL_TREE)
441 tmp = combine_cond_expr_cond (loc, code, type,
443 !(single_use0_p && single_use1_p));
448 /* Propagate from the ssa name definition statements of the assignment
449 from a comparison at *GSI into the conditional if that simplifies it.
450 Returns 1 if the stmt was modified and 2 if the CFG needs cleanup,
451 otherwise returns 0. */
454 forward_propagate_into_comparison (gimple_stmt_iterator *gsi)
456 gimple stmt = gsi_stmt (*gsi);
458 bool cfg_changed = false;
459 tree rhs1 = gimple_assign_rhs1 (stmt);
460 tree rhs2 = gimple_assign_rhs2 (stmt);
462 /* Combine the comparison with defining statements. */
463 tmp = forward_propagate_into_comparison_1 (gimple_location (stmt),
464 gimple_assign_rhs_code (stmt),
466 (gimple_assign_lhs (stmt)),
470 gimple_assign_set_rhs_from_tree (gsi, tmp);
472 if (TREE_CODE (rhs1) == SSA_NAME)
473 cfg_changed |= remove_prop_source_from_use (rhs1);
474 if (TREE_CODE (rhs2) == SSA_NAME)
475 cfg_changed |= remove_prop_source_from_use (rhs2);
476 return cfg_changed ? 2 : 1;
482 /* Propagate from the ssa name definition statements of COND_EXPR
483 in GIMPLE_COND statement STMT into the conditional if that simplifies it.
484 Returns zero if no statement was changed, one if there were
485 changes and two if cfg_cleanup needs to run.
487 This must be kept in sync with forward_propagate_into_cond. */
490 forward_propagate_into_gimple_cond (gimple stmt)
492 location_t loc = gimple_location (stmt);
494 enum tree_code code = gimple_cond_code (stmt);
495 bool cfg_changed = false;
496 tree rhs1 = gimple_cond_lhs (stmt);
497 tree rhs2 = gimple_cond_rhs (stmt);
499 /* We can do tree combining on SSA_NAME and comparison expressions. */
500 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
503 tmp = forward_propagate_into_comparison_1 (loc, code,
508 if (dump_file && tmp)
510 fprintf (dump_file, " Replaced '");
511 print_gimple_expr (dump_file, stmt, 0, 0);
512 fprintf (dump_file, "' with '");
513 print_generic_expr (dump_file, tmp, 0);
514 fprintf (dump_file, "'\n");
517 gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
520 if (TREE_CODE (rhs1) == SSA_NAME)
521 cfg_changed |= remove_prop_source_from_use (rhs1);
522 if (TREE_CODE (rhs2) == SSA_NAME)
523 cfg_changed |= remove_prop_source_from_use (rhs2);
524 return (cfg_changed || is_gimple_min_invariant (tmp)) ? 2 : 1;
531 /* Propagate from the ssa name definition statements of COND_EXPR
532 in the rhs of statement STMT into the conditional if that simplifies it.
533 Returns zero if no statement was changed, one if there were
534 changes and two if cfg_cleanup needs to run.
536 This must be kept in sync with forward_propagate_into_gimple_cond. */
539 forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
541 gimple stmt = gsi_stmt (*gsi_p);
542 location_t loc = gimple_location (stmt);
543 tree tmp = NULL_TREE;
544 tree cond = gimple_assign_rhs1 (stmt);
546 /* We can do tree combining on SSA_NAME and comparison expressions. */
547 if (COMPARISON_CLASS_P (cond))
548 tmp = forward_propagate_into_comparison_1 (loc, TREE_CODE (cond),
550 TREE_OPERAND (cond, 0),
551 TREE_OPERAND (cond, 1));
552 else if (TREE_CODE (cond) == SSA_NAME)
554 tree name = cond, rhs0;
555 gimple def_stmt = get_prop_source_stmt (name, true, NULL);
556 if (!def_stmt || !can_propagate_from (def_stmt))
559 rhs0 = gimple_assign_rhs1 (def_stmt);
560 tmp = combine_cond_expr_cond (loc, NE_EXPR, boolean_type_node, rhs0,
561 build_int_cst (TREE_TYPE (rhs0), 0),
567 if (dump_file && tmp)
569 fprintf (dump_file, " Replaced '");
570 print_generic_expr (dump_file, cond, 0);
571 fprintf (dump_file, "' with '");
572 print_generic_expr (dump_file, tmp, 0);
573 fprintf (dump_file, "'\n");
576 gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
577 stmt = gsi_stmt (*gsi_p);
580 return is_gimple_min_invariant (tmp) ? 2 : 1;
586 /* We've just substituted an ADDR_EXPR into stmt. Update all the
587 relevant data structures to match. */
590 tidy_after_forward_propagate_addr (gimple stmt)
592 /* We may have turned a trapping insn into a non-trapping insn. */
593 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
594 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
597 if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
598 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
601 /* DEF_RHS contains the address of the 0th element in an array.
602 USE_STMT uses type of DEF_RHS to compute the address of an
603 arbitrary element within the array. The (variable) byte offset
604 of the element is contained in OFFSET.
606 We walk back through the use-def chains of OFFSET to verify that
607 it is indeed computing the offset of an element within the array
608 and extract the index corresponding to the given byte offset.
610 We then try to fold the entire address expression into a form
613 If we are successful, we replace the right hand side of USE_STMT
614 with the new address computation. */
617 forward_propagate_addr_into_variable_array_index (tree offset,
619 gimple_stmt_iterator *use_stmt_gsi)
622 gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
625 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
626 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
627 else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
628 tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
631 if (!host_integerp (tunit, 1))
634 /* Get the offset's defining statement. */
635 offset_def = SSA_NAME_DEF_STMT (offset);
637 /* Try to find an expression for a proper index. This is either a
638 multiplication expression by the element size or just the ssa name we came
639 along in case the element size is one. In that case, however, we do not
640 allow multiplications because they can be computing index to a higher
641 level dimension (PR 37861). */
642 if (integer_onep (tunit))
644 if (is_gimple_assign (offset_def)
645 && gimple_assign_rhs_code (offset_def) == MULT_EXPR)
652 /* The statement which defines OFFSET before type conversion
653 must be a simple GIMPLE_ASSIGN. */
654 if (!is_gimple_assign (offset_def))
657 /* The RHS of the statement which defines OFFSET must be a
658 multiplication of an object by the size of the array elements.
659 This implicitly verifies that the size of the array elements
661 if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
662 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
663 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), tunit))
665 /* The first operand to the MULT_EXPR is the desired index. */
666 index = gimple_assign_rhs1 (offset_def);
668 /* If we have idx * tunit + CST * tunit re-associate that. */
669 else if ((gimple_assign_rhs_code (offset_def) == PLUS_EXPR
670 || gimple_assign_rhs_code (offset_def) == MINUS_EXPR)
671 && TREE_CODE (gimple_assign_rhs1 (offset_def)) == SSA_NAME
672 && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
673 && (tmp = div_if_zero_remainder (EXACT_DIV_EXPR,
674 gimple_assign_rhs2 (offset_def),
675 tunit)) != NULL_TREE)
677 gimple offset_def2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (offset_def));
678 if (is_gimple_assign (offset_def2)
679 && gimple_assign_rhs_code (offset_def2) == MULT_EXPR
680 && TREE_CODE (gimple_assign_rhs2 (offset_def2)) == INTEGER_CST
681 && tree_int_cst_equal (gimple_assign_rhs2 (offset_def2), tunit))
683 index = fold_build2 (gimple_assign_rhs_code (offset_def),
685 gimple_assign_rhs1 (offset_def2), tmp);
694 /* Replace the pointer addition with array indexing. */
695 index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
696 true, GSI_SAME_STMT);
697 if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
699 new_rhs = unshare_expr (def_rhs);
700 TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
704 new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
705 unshare_expr (TREE_OPERAND (def_rhs, 0)),
706 index, integer_zero_node, NULL_TREE);
707 new_rhs = build_fold_addr_expr (new_rhs);
708 if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
709 TREE_TYPE (new_rhs)))
711 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
712 NULL_TREE, true, GSI_SAME_STMT);
713 new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
717 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
718 use_stmt = gsi_stmt (*use_stmt_gsi);
720 /* That should have created gimple, so there is no need to
721 record information to undo the propagation. */
722 fold_stmt_inplace (use_stmt);
723 tidy_after_forward_propagate_addr (use_stmt);
727 /* NAME is a SSA_NAME representing DEF_RHS which is of the form
728 ADDR_EXPR <whatever>.
730 Try to forward propagate the ADDR_EXPR into the use USE_STMT.
731 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
732 node or for recovery of array indexing from pointer arithmetic.
734 Return true if the propagation was successful (the propagation can
735 be not totally successful, yet things may have been changed). */
738 forward_propagate_addr_expr_1 (tree name, tree def_rhs,
739 gimple_stmt_iterator *use_stmt_gsi,
742 tree lhs, rhs, rhs2, array_ref;
743 gimple use_stmt = gsi_stmt (*use_stmt_gsi);
744 enum tree_code rhs_code;
747 gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
749 lhs = gimple_assign_lhs (use_stmt);
750 rhs_code = gimple_assign_rhs_code (use_stmt);
751 rhs = gimple_assign_rhs1 (use_stmt);
753 /* Trivial cases. The use statement could be a trivial copy or a
754 useless conversion. Recurse to the uses of the lhs as copyprop does
755 not copy through different variant pointers and FRE does not catch
756 all useless conversions. Treat the case of a single-use name and
757 a conversion to def_rhs type separate, though. */
758 if (TREE_CODE (lhs) == SSA_NAME
759 && ((rhs_code == SSA_NAME && rhs == name)
760 || CONVERT_EXPR_CODE_P (rhs_code)))
762 /* Only recurse if we don't deal with a single use or we cannot
763 do the propagation to the current statement. In particular
764 we can end up with a conversion needed for a non-invariant
765 address which we cannot do in a single statement. */
767 || (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs))
768 && (!is_gimple_min_invariant (def_rhs)
769 || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
770 && POINTER_TYPE_P (TREE_TYPE (def_rhs))
771 && (TYPE_PRECISION (TREE_TYPE (lhs))
772 > TYPE_PRECISION (TREE_TYPE (def_rhs)))))))
773 return forward_propagate_addr_expr (lhs, def_rhs);
775 gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
776 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
777 gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
779 gimple_assign_set_rhs_code (use_stmt, NOP_EXPR);
783 /* Propagate through constant pointer adjustments. */
784 if (TREE_CODE (lhs) == SSA_NAME
785 && rhs_code == POINTER_PLUS_EXPR
787 && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
790 /* As we come here with non-invariant addresses in def_rhs we need
791 to make sure we can build a valid constant offsetted address
792 for further propagation. Simply rely on fold building that
793 and check after the fact. */
794 new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
796 fold_convert (ptr_type_node,
797 gimple_assign_rhs2 (use_stmt)));
798 if (TREE_CODE (new_def_rhs) == MEM_REF
799 && !is_gimple_mem_ref_addr (TREE_OPERAND (new_def_rhs, 0)))
801 new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
804 /* Recurse. If we could propagate into all uses of lhs do not
805 bother to replace into the current use but just pretend we did. */
806 if (TREE_CODE (new_def_rhs) == ADDR_EXPR
807 && forward_propagate_addr_expr (lhs, new_def_rhs))
810 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
811 gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
812 new_def_rhs, NULL_TREE);
813 else if (is_gimple_min_invariant (new_def_rhs))
814 gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
815 new_def_rhs, NULL_TREE);
818 gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
819 update_stmt (use_stmt);
823 /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
824 ADDR_EXPR will not appear on the LHS. */
825 lhs = gimple_assign_lhs (use_stmt);
826 while (handled_component_p (lhs))
827 lhs = TREE_OPERAND (lhs, 0);
829 /* Now see if the LHS node is a MEM_REF using NAME. If so,
830 propagate the ADDR_EXPR into the use of NAME and fold the result. */
831 if (TREE_CODE (lhs) == MEM_REF
832 && TREE_OPERAND (lhs, 0) == name)
835 HOST_WIDE_INT def_rhs_offset;
836 /* If the address is invariant we can always fold it. */
837 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
840 double_int off = mem_ref_offset (lhs);
842 off = double_int_add (off,
843 shwi_to_double_int (def_rhs_offset));
844 if (TREE_CODE (def_rhs_base) == MEM_REF)
846 off = double_int_add (off, mem_ref_offset (def_rhs_base));
847 new_ptr = TREE_OPERAND (def_rhs_base, 0);
850 new_ptr = build_fold_addr_expr (def_rhs_base);
851 TREE_OPERAND (lhs, 0) = new_ptr;
852 TREE_OPERAND (lhs, 1)
853 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
854 tidy_after_forward_propagate_addr (use_stmt);
855 /* Continue propagating into the RHS if this was not the only use. */
859 /* If the LHS is a plain dereference and the value type is the same as
860 that of the pointed-to type of the address we can put the
861 dereferenced address on the LHS preserving the original alias-type. */
862 else if (gimple_assign_lhs (use_stmt) == lhs
863 && useless_type_conversion_p
864 (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
865 TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
867 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
868 tree new_offset, new_base, saved;
869 while (handled_component_p (*def_rhs_basep))
870 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
871 saved = *def_rhs_basep;
872 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
874 new_base = TREE_OPERAND (*def_rhs_basep, 0);
876 = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
877 TREE_OPERAND (*def_rhs_basep, 1));
881 new_base = build_fold_addr_expr (*def_rhs_basep);
882 new_offset = TREE_OPERAND (lhs, 1);
884 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
885 new_base, new_offset);
886 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (lhs);
887 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (lhs);
888 gimple_assign_set_lhs (use_stmt,
889 unshare_expr (TREE_OPERAND (def_rhs, 0)));
890 *def_rhs_basep = saved;
891 tidy_after_forward_propagate_addr (use_stmt);
892 /* Continue propagating into the RHS if this was not the
898 /* We can have a struct assignment dereferencing our name twice.
899 Note that we didn't propagate into the lhs to not falsely
900 claim we did when propagating into the rhs. */
904 /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
905 nodes from the RHS. */
906 rhs = gimple_assign_rhs1 (use_stmt);
907 if (TREE_CODE (rhs) == ADDR_EXPR)
908 rhs = TREE_OPERAND (rhs, 0);
909 while (handled_component_p (rhs))
910 rhs = TREE_OPERAND (rhs, 0);
912 /* Now see if the RHS node is a MEM_REF using NAME. If so,
913 propagate the ADDR_EXPR into the use of NAME and fold the result. */
914 if (TREE_CODE (rhs) == MEM_REF
915 && TREE_OPERAND (rhs, 0) == name)
918 HOST_WIDE_INT def_rhs_offset;
919 if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
922 double_int off = mem_ref_offset (rhs);
924 off = double_int_add (off,
925 shwi_to_double_int (def_rhs_offset));
926 if (TREE_CODE (def_rhs_base) == MEM_REF)
928 off = double_int_add (off, mem_ref_offset (def_rhs_base));
929 new_ptr = TREE_OPERAND (def_rhs_base, 0);
932 new_ptr = build_fold_addr_expr (def_rhs_base);
933 TREE_OPERAND (rhs, 0) = new_ptr;
934 TREE_OPERAND (rhs, 1)
935 = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
936 fold_stmt_inplace (use_stmt);
937 tidy_after_forward_propagate_addr (use_stmt);
940 /* If the RHS is a plain dereference and the value type is the same as
941 that of the pointed-to type of the address we can put the
942 dereferenced address on the RHS preserving the original alias-type. */
943 else if (gimple_assign_rhs1 (use_stmt) == rhs
944 && useless_type_conversion_p
945 (TREE_TYPE (gimple_assign_lhs (use_stmt)),
946 TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
948 tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
949 tree new_offset, new_base, saved;
950 while (handled_component_p (*def_rhs_basep))
951 def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
952 saved = *def_rhs_basep;
953 if (TREE_CODE (*def_rhs_basep) == MEM_REF)
955 new_base = TREE_OPERAND (*def_rhs_basep, 0);
957 = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
958 TREE_OPERAND (*def_rhs_basep, 1));
962 new_base = build_fold_addr_expr (*def_rhs_basep);
963 new_offset = TREE_OPERAND (rhs, 1);
965 *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
966 new_base, new_offset);
967 TREE_THIS_VOLATILE (*def_rhs_basep) = TREE_THIS_VOLATILE (rhs);
968 TREE_THIS_NOTRAP (*def_rhs_basep) = TREE_THIS_NOTRAP (rhs);
969 gimple_assign_set_rhs1 (use_stmt,
970 unshare_expr (TREE_OPERAND (def_rhs, 0)));
971 *def_rhs_basep = saved;
972 fold_stmt_inplace (use_stmt);
973 tidy_after_forward_propagate_addr (use_stmt);
978 /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
980 if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
981 || gimple_assign_rhs1 (use_stmt) != name)
984 /* The remaining cases are all for turning pointer arithmetic into
985 array indexing. They only apply when we have the address of
986 element zero in an array. If that is not the case then there
988 array_ref = TREE_OPERAND (def_rhs, 0);
989 if ((TREE_CODE (array_ref) != ARRAY_REF
990 || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
991 || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
992 && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
995 rhs2 = gimple_assign_rhs2 (use_stmt);
996 /* Try to optimize &x[C1] p+ C2 where C2 is a multiple of the size
997 of the elements in X into &x[C1 + C2/element size]. */
998 if (TREE_CODE (rhs2) == INTEGER_CST)
1000 tree new_rhs = maybe_fold_stmt_addition (gimple_location (use_stmt),
1001 TREE_TYPE (def_rhs),
1005 tree type = TREE_TYPE (gimple_assign_lhs (use_stmt));
1006 new_rhs = unshare_expr (new_rhs);
1007 if (!useless_type_conversion_p (type, TREE_TYPE (new_rhs)))
1009 if (!is_gimple_min_invariant (new_rhs))
1010 new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs,
1012 true, GSI_SAME_STMT);
1013 new_rhs = fold_convert (type, new_rhs);
1015 gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
1016 use_stmt = gsi_stmt (*use_stmt_gsi);
1017 update_stmt (use_stmt);
1018 tidy_after_forward_propagate_addr (use_stmt);
1023 /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
1024 converting a multiplication of an index by the size of the
1025 array elements, then the result is converted into the proper
1026 type for the arithmetic. */
1027 if (TREE_CODE (rhs2) == SSA_NAME
1028 && (TREE_CODE (array_ref) != ARRAY_REF
1029 || integer_zerop (TREE_OPERAND (array_ref, 1)))
1030 && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
1031 /* Avoid problems with IVopts creating PLUS_EXPRs with a
1032 different type than their operands. */
1033 && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
1034 return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
1039 /* STMT is a statement of the form SSA_NAME = ADDR_EXPR <whatever>.
1041 Try to forward propagate the ADDR_EXPR into all uses of the SSA_NAME.
1042 Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
1043 node or for recovery of array indexing from pointer arithmetic.
1044 Returns true, if all uses have been propagated into. */
1047 forward_propagate_addr_expr (tree name, tree rhs)
1049 int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
1050 imm_use_iterator iter;
1053 bool single_use_p = has_single_use (name);
1055 FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
1060 /* If the use is not in a simple assignment statement, then
1061 there is nothing we can do. */
1062 if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
1064 if (!is_gimple_debug (use_stmt))
1069 /* If the use is in a deeper loop nest, then we do not want
1070 to propagate non-invariant ADDR_EXPRs into the loop as that
1071 is likely adding expression evaluations into the loop. */
1072 if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth
1073 && !is_gimple_min_invariant (rhs))
1080 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1081 result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
1083 /* If the use has moved to a different statement adjust
1084 the update machinery for the old statement too. */
1085 if (use_stmt != gsi_stmt (gsi))
1087 update_stmt (use_stmt);
1088 use_stmt = gsi_stmt (gsi);
1091 update_stmt (use_stmt);
1095 /* Remove intermediate now unused copy and conversion chains. */
1096 use_rhs = gimple_assign_rhs1 (use_stmt);
1098 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
1099 && TREE_CODE (use_rhs) == SSA_NAME
1100 && has_zero_uses (gimple_assign_lhs (use_stmt)))
1102 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
1103 release_defs (use_stmt);
1104 gsi_remove (&gsi, true);
1108 return all && has_zero_uses (name);
1112 /* Forward propagate the comparison defined in STMT like
1113 cond_1 = x CMP y to uses of the form
1117 Returns true if stmt is now unused. */
1120 forward_propagate_comparison (gimple stmt)
1122 tree name = gimple_assign_lhs (stmt);
1124 tree tmp = NULL_TREE;
1125 gimple_stmt_iterator gsi;
1126 enum tree_code code;
1129 /* Don't propagate ssa names that occur in abnormal phis. */
1130 if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
1131 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
1132 || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
1133 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
1136 /* Do not un-cse comparisons. But propagate through copies. */
1137 use_stmt = get_prop_dest_stmt (name, &name);
1139 || !is_gimple_assign (use_stmt))
1142 code = gimple_assign_rhs_code (use_stmt);
1143 lhs = gimple_assign_lhs (use_stmt);
1144 if (!INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
1147 /* We can propagate the condition into a statement that
1148 computes the logical negation of the comparison result. */
1149 if ((code == BIT_NOT_EXPR
1150 && TYPE_PRECISION (TREE_TYPE (lhs)) == 1)
1151 || (code == BIT_XOR_EXPR
1152 && integer_onep (gimple_assign_rhs2 (use_stmt))))
1154 tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
1155 bool nans = HONOR_NANS (TYPE_MODE (type));
1156 enum tree_code inv_code;
1157 inv_code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
1158 if (inv_code == ERROR_MARK)
1161 tmp = build2 (inv_code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
1162 gimple_assign_rhs2 (stmt));
1167 gsi = gsi_for_stmt (use_stmt);
1168 gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
1169 use_stmt = gsi_stmt (gsi);
1170 update_stmt (use_stmt);
1172 if (dump_file && (dump_flags & TDF_DETAILS))
1174 fprintf (dump_file, " Replaced '");
1175 print_gimple_expr (dump_file, stmt, 0, dump_flags);
1176 fprintf (dump_file, "' with '");
1177 print_gimple_expr (dump_file, use_stmt, 0, dump_flags);
1178 fprintf (dump_file, "'\n");
1181 /* Remove defining statements. */
1182 return remove_prop_source_from_use (name);
1186 /* If we have lhs = ~x (STMT), look and see if earlier we had x = ~y.
1187 If so, we can change STMT into lhs = y which can later be copy
1188 propagated. Similarly for negation.
1190 This could trivially be formulated as a forward propagation
1191 to immediate uses. However, we already had an implementation
1192 from DOM which used backward propagation via the use-def links.
1194 It turns out that backward propagation is actually faster as
1195 there's less work to do for each NOT/NEG expression we find.
1196 Backwards propagation needs to look at the statement in a single
1197 backlink. Forward propagation needs to look at potentially more
1198 than one forward link.
1200 Returns true when the statement was changed. */
1203 simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
1205 gimple stmt = gsi_stmt (*gsi_p);
1206 tree rhs = gimple_assign_rhs1 (stmt);
1207 gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
1209 /* See if the RHS_DEF_STMT has the same form as our statement. */
1210 if (is_gimple_assign (rhs_def_stmt)
1211 && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
1213 tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
1215 /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME. */
1216 if (TREE_CODE (rhs_def_operand) == SSA_NAME
1217 && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
1219 gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
1220 stmt = gsi_stmt (*gsi_p);
1229 /* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
1230 the condition which we may be able to optimize better. */
1233 simplify_gimple_switch (gimple stmt)
1235 tree cond = gimple_switch_index (stmt);
1239 /* The optimization that we really care about is removing unnecessary
1240 casts. That will let us do much better in propagating the inferred
1241 constant at the switch target. */
1242 if (TREE_CODE (cond) == SSA_NAME)
1244 def_stmt = SSA_NAME_DEF_STMT (cond);
1245 if (is_gimple_assign (def_stmt))
1247 if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
1252 def = gimple_assign_rhs1 (def_stmt);
1254 /* ??? Why was Jeff testing this? We are gimple... */
1255 gcc_checking_assert (is_gimple_val (def));
1257 to = TREE_TYPE (cond);
1258 ti = TREE_TYPE (def);
1260 /* If we have an extension that preserves value, then we
1261 can copy the source value into the switch. */
1263 need_precision = TYPE_PRECISION (ti);
1265 if (! INTEGRAL_TYPE_P (ti))
1267 else if (TYPE_UNSIGNED (to) && !TYPE_UNSIGNED (ti))
1269 else if (!TYPE_UNSIGNED (to) && TYPE_UNSIGNED (ti))
1270 need_precision += 1;
1271 if (TYPE_PRECISION (to) < need_precision)
1276 gimple_switch_set_index (stmt, def);
1287 /* For pointers p2 and p1 return p2 - p1 if the
1288 difference is known and constant, otherwise return NULL. */
1291 constant_pointer_difference (tree p1, tree p2)
1294 #define CPD_ITERATIONS 5
1295 tree exps[2][CPD_ITERATIONS];
1296 tree offs[2][CPD_ITERATIONS];
1299 for (i = 0; i < 2; i++)
1301 tree p = i ? p1 : p2;
1302 tree off = size_zero_node;
1304 enum tree_code code;
1306 /* For each of p1 and p2 we need to iterate at least
1307 twice, to handle ADDR_EXPR directly in p1/p2,
1308 SSA_NAME with ADDR_EXPR or POINTER_PLUS_EXPR etc.
1309 on definition's stmt RHS. Iterate a few extra times. */
1313 if (!POINTER_TYPE_P (TREE_TYPE (p)))
1315 if (TREE_CODE (p) == ADDR_EXPR)
1317 tree q = TREE_OPERAND (p, 0);
1318 HOST_WIDE_INT offset;
1319 tree base = get_addr_base_and_unit_offset (q, &offset);
1324 off = size_binop (PLUS_EXPR, off, size_int (offset));
1326 if (TREE_CODE (q) == MEM_REF
1327 && TREE_CODE (TREE_OPERAND (q, 0)) == SSA_NAME)
1329 p = TREE_OPERAND (q, 0);
1330 off = size_binop (PLUS_EXPR, off,
1331 double_int_to_tree (sizetype,
1332 mem_ref_offset (q)));
1341 if (TREE_CODE (p) != SSA_NAME)
1345 if (j == CPD_ITERATIONS)
1347 stmt = SSA_NAME_DEF_STMT (p);
1348 if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != p)
1350 code = gimple_assign_rhs_code (stmt);
1351 if (code == POINTER_PLUS_EXPR)
1353 if (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)
1355 off = size_binop (PLUS_EXPR, off, gimple_assign_rhs2 (stmt));
1356 p = gimple_assign_rhs1 (stmt);
1358 else if (code == ADDR_EXPR || code == NOP_EXPR)
1359 p = gimple_assign_rhs1 (stmt);
1367 for (i = 0; i < cnt[0]; i++)
1368 for (j = 0; j < cnt[1]; j++)
1369 if (exps[0][i] == exps[1][j])
1370 return size_binop (MINUS_EXPR, offs[0][i], offs[1][j]);
1375 /* *GSI_P is a GIMPLE_CALL to a builtin function.
1377 memcpy (p, "abcd", 4);
1378 memset (p + 4, ' ', 3);
1380 memcpy (p, "abcd ", 7);
1381 call if the latter can be stored by pieces during expansion. */
1384 simplify_builtin_call (gimple_stmt_iterator *gsi_p, tree callee2)
1386 gimple stmt1, stmt2 = gsi_stmt (*gsi_p);
1387 tree vuse = gimple_vuse (stmt2);
1390 stmt1 = SSA_NAME_DEF_STMT (vuse);
1392 switch (DECL_FUNCTION_CODE (callee2))
1394 case BUILT_IN_MEMSET:
1395 if (gimple_call_num_args (stmt2) != 3
1396 || gimple_call_lhs (stmt2)
1398 || BITS_PER_UNIT != 8)
1403 tree ptr1, src1, str1, off1, len1, lhs1;
1404 tree ptr2 = gimple_call_arg (stmt2, 0);
1405 tree val2 = gimple_call_arg (stmt2, 1);
1406 tree len2 = gimple_call_arg (stmt2, 2);
1407 tree diff, vdef, new_str_cst;
1409 unsigned int ptr1_align;
1410 unsigned HOST_WIDE_INT src_len;
1412 use_operand_p use_p;
1414 if (!host_integerp (val2, 0)
1415 || !host_integerp (len2, 1))
1417 if (is_gimple_call (stmt1))
1419 /* If first stmt is a call, it needs to be memcpy
1420 or mempcpy, with string literal as second argument and
1422 callee1 = gimple_call_fndecl (stmt1);
1423 if (callee1 == NULL_TREE
1424 || DECL_BUILT_IN_CLASS (callee1) != BUILT_IN_NORMAL
1425 || gimple_call_num_args (stmt1) != 3)
1427 if (DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMCPY
1428 && DECL_FUNCTION_CODE (callee1) != BUILT_IN_MEMPCPY)
1430 ptr1 = gimple_call_arg (stmt1, 0);
1431 src1 = gimple_call_arg (stmt1, 1);
1432 len1 = gimple_call_arg (stmt1, 2);
1433 lhs1 = gimple_call_lhs (stmt1);
1434 if (!host_integerp (len1, 1))
1436 str1 = string_constant (src1, &off1);
1437 if (str1 == NULL_TREE)
1439 if (!host_integerp (off1, 1)
1440 || compare_tree_int (off1, TREE_STRING_LENGTH (str1) - 1) > 0
1441 || compare_tree_int (len1, TREE_STRING_LENGTH (str1)
1442 - tree_low_cst (off1, 1)) > 0
1443 || TREE_CODE (TREE_TYPE (str1)) != ARRAY_TYPE
1444 || TYPE_MODE (TREE_TYPE (TREE_TYPE (str1)))
1445 != TYPE_MODE (char_type_node))
1448 else if (gimple_assign_single_p (stmt1))
1450 /* Otherwise look for length 1 memcpy optimized into
1452 ptr1 = gimple_assign_lhs (stmt1);
1453 src1 = gimple_assign_rhs1 (stmt1);
1454 if (TREE_CODE (ptr1) != MEM_REF
1455 || TYPE_MODE (TREE_TYPE (ptr1)) != TYPE_MODE (char_type_node)
1456 || !host_integerp (src1, 0))
1458 ptr1 = build_fold_addr_expr (ptr1);
1459 callee1 = NULL_TREE;
1460 len1 = size_one_node;
1462 off1 = size_zero_node;
1468 diff = constant_pointer_difference (ptr1, ptr2);
1469 if (diff == NULL && lhs1 != NULL)
1471 diff = constant_pointer_difference (lhs1, ptr2);
1472 if (DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1474 diff = size_binop (PLUS_EXPR, diff,
1475 fold_convert (sizetype, len1));
1477 /* If the difference between the second and first destination pointer
1478 is not constant, or is bigger than memcpy length, bail out. */
1480 || !host_integerp (diff, 1)
1481 || tree_int_cst_lt (len1, diff))
1484 /* Use maximum of difference plus memset length and memcpy length
1485 as the new memcpy length, if it is too big, bail out. */
1486 src_len = tree_low_cst (diff, 1);
1487 src_len += tree_low_cst (len2, 1);
1488 if (src_len < (unsigned HOST_WIDE_INT) tree_low_cst (len1, 1))
1489 src_len = tree_low_cst (len1, 1);
1493 /* If mempcpy value is used elsewhere, bail out, as mempcpy
1494 with bigger length will return different result. */
1495 if (lhs1 != NULL_TREE
1496 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY
1497 && (TREE_CODE (lhs1) != SSA_NAME
1498 || !single_imm_use (lhs1, &use_p, &use_stmt)
1499 || use_stmt != stmt2))
1502 /* If anything reads memory in between memcpy and memset
1503 call, the modified memcpy call might change it. */
1504 vdef = gimple_vdef (stmt1);
1506 && (!single_imm_use (vdef, &use_p, &use_stmt)
1507 || use_stmt != stmt2))
1510 ptr1_align = get_pointer_alignment (ptr1, BIGGEST_ALIGNMENT);
1511 /* Construct the new source string literal. */
1512 src_buf = XALLOCAVEC (char, src_len + 1);
1515 TREE_STRING_POINTER (str1) + tree_low_cst (off1, 1),
1516 tree_low_cst (len1, 1));
1518 src_buf[0] = tree_low_cst (src1, 0);
1519 memset (src_buf + tree_low_cst (diff, 1),
1520 tree_low_cst (val2, 1), tree_low_cst (len2, 1));
1521 src_buf[src_len] = '\0';
1522 /* Neither builtin_strncpy_read_str nor builtin_memcpy_read_str
1523 handle embedded '\0's. */
1524 if (strlen (src_buf) != src_len)
1526 rtl_profile_for_bb (gimple_bb (stmt2));
1527 /* If the new memcpy wouldn't be emitted by storing the literal
1528 by pieces, this optimization might enlarge .rodata too much,
1529 as commonly used string literals couldn't be shared any
1531 if (!can_store_by_pieces (src_len,
1532 builtin_strncpy_read_str,
1533 src_buf, ptr1_align, false))
1536 new_str_cst = build_string_literal (src_len, src_buf);
1539 /* If STMT1 is a mem{,p}cpy call, adjust it and remove
1541 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1542 gimple_call_set_lhs (stmt1, NULL_TREE);
1543 gimple_call_set_arg (stmt1, 1, new_str_cst);
1544 gimple_call_set_arg (stmt1, 2,
1545 build_int_cst (TREE_TYPE (len1), src_len));
1546 update_stmt (stmt1);
1547 unlink_stmt_vdef (stmt2);
1548 gsi_remove (gsi_p, true);
1549 release_defs (stmt2);
1550 if (lhs1 && DECL_FUNCTION_CODE (callee1) == BUILT_IN_MEMPCPY)
1551 release_ssa_name (lhs1);
1556 /* Otherwise, if STMT1 is length 1 memcpy optimized into
1557 assignment, remove STMT1 and change memset call into
1559 gimple_stmt_iterator gsi = gsi_for_stmt (stmt1);
1561 if (!is_gimple_val (ptr1))
1562 ptr1 = force_gimple_operand_gsi (gsi_p, ptr1, true, NULL_TREE,
1563 true, GSI_SAME_STMT);
1564 gimple_call_set_fndecl (stmt2, built_in_decls [BUILT_IN_MEMCPY]);
1565 gimple_call_set_arg (stmt2, 0, ptr1);
1566 gimple_call_set_arg (stmt2, 1, new_str_cst);
1567 gimple_call_set_arg (stmt2, 2,
1568 build_int_cst (TREE_TYPE (len2), src_len));
1569 unlink_stmt_vdef (stmt1);
1570 gsi_remove (&gsi, true);
1571 release_defs (stmt1);
1572 update_stmt (stmt2);
1583 /* Checks if expression has type of one-bit precision, or is a known
1584 truth-valued expression. */
1586 truth_valued_ssa_name (tree name)
1589 tree type = TREE_TYPE (name);
1591 if (!INTEGRAL_TYPE_P (type))
1593 /* Don't check here for BOOLEAN_TYPE as the precision isn't
1594 necessarily one and so ~X is not equal to !X. */
1595 if (TYPE_PRECISION (type) == 1)
1597 def = SSA_NAME_DEF_STMT (name);
1598 if (is_gimple_assign (def))
1599 return truth_value_p (gimple_assign_rhs_code (def));
1603 /* Helper routine for simplify_bitwise_binary_1 function.
1604 Return for the SSA name NAME the expression X if it mets condition
1605 NAME = !X. Otherwise return NULL_TREE.
1606 Detected patterns for NAME = !X are:
1607 !X and X == 0 for X with integral type.
1608 X ^ 1, X != 1,or ~X for X with integral type with precision of one. */
1610 lookup_logical_inverted_value (tree name)
1613 enum tree_code code;
1616 /* If name has none-intergal type, or isn't a SSA_NAME, then
1618 if (TREE_CODE (name) != SSA_NAME
1619 || !INTEGRAL_TYPE_P (TREE_TYPE (name)))
1621 def = SSA_NAME_DEF_STMT (name);
1622 if (!is_gimple_assign (def))
1625 code = gimple_assign_rhs_code (def);
1626 op1 = gimple_assign_rhs1 (def);
1629 /* Get for EQ_EXPR or BIT_XOR_EXPR operation the second operand.
1630 If CODE isn't an EQ_EXPR, BIT_XOR_EXPR, or BIT_NOT_EXPR, then return. */
1631 if (code == EQ_EXPR || code == NE_EXPR
1632 || code == BIT_XOR_EXPR)
1633 op2 = gimple_assign_rhs2 (def);
1638 if (truth_valued_ssa_name (name))
1642 /* Check if we have X == 0 and X has an integral type. */
1643 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1645 if (integer_zerop (op2))
1649 /* Check if we have X != 1 and X is a truth-valued. */
1650 if (!INTEGRAL_TYPE_P (TREE_TYPE (op1)))
1652 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1656 /* Check if we have X ^ 1 and X is truth valued. */
1657 if (integer_onep (op2) && truth_valued_ssa_name (op1))
1667 /* Optimize ARG1 CODE ARG2 to a constant for bitwise binary
1668 operations CODE, if one operand has the logically inverted
1669 value of the other. */
1671 simplify_bitwise_binary_1 (enum tree_code code, tree type,
1672 tree arg1, tree arg2)
1676 /* If CODE isn't a bitwise binary operation, return NULL_TREE. */
1677 if (code != BIT_AND_EXPR && code != BIT_IOR_EXPR
1678 && code != BIT_XOR_EXPR)
1681 /* First check if operands ARG1 and ARG2 are equal. If so
1682 return NULL_TREE as this optimization is handled fold_stmt. */
1685 /* See if we have in arguments logical-not patterns. */
1686 if (((anot = lookup_logical_inverted_value (arg1)) == NULL_TREE
1688 && ((anot = lookup_logical_inverted_value (arg2)) == NULL_TREE
1693 if (code == BIT_AND_EXPR)
1694 return fold_convert (type, integer_zero_node);
1695 /* X | !X -> 1 and X ^ !X -> 1, if X is truth-valued. */
1696 if (truth_valued_ssa_name (anot))
1697 return fold_convert (type, integer_one_node);
1699 /* ??? Otherwise result is (X != 0 ? X : 1). not handled. */
1703 /* Simplify bitwise binary operations.
1704 Return true if a transformation applied, otherwise return false. */
1707 simplify_bitwise_binary (gimple_stmt_iterator *gsi)
1709 gimple stmt = gsi_stmt (*gsi);
1710 tree arg1 = gimple_assign_rhs1 (stmt);
1711 tree arg2 = gimple_assign_rhs2 (stmt);
1712 enum tree_code code = gimple_assign_rhs_code (stmt);
1714 gimple def1 = NULL, def2 = NULL;
1715 tree def1_arg1, def2_arg1;
1716 enum tree_code def1_code, def2_code;
1718 def1_code = TREE_CODE (arg1);
1720 if (TREE_CODE (arg1) == SSA_NAME)
1722 def1 = SSA_NAME_DEF_STMT (arg1);
1723 if (is_gimple_assign (def1))
1725 def1_code = gimple_assign_rhs_code (def1);
1726 def1_arg1 = gimple_assign_rhs1 (def1);
1730 def2_code = TREE_CODE (arg2);
1732 if (TREE_CODE (arg2) == SSA_NAME)
1734 def2 = SSA_NAME_DEF_STMT (arg2);
1735 if (is_gimple_assign (def2))
1737 def2_code = gimple_assign_rhs_code (def2);
1738 def2_arg1 = gimple_assign_rhs1 (def2);
1742 /* Try to fold (type) X op CST -> (type) (X op ((type-x) CST)). */
1743 if (TREE_CODE (arg2) == INTEGER_CST
1744 && CONVERT_EXPR_CODE_P (def1_code)
1745 && INTEGRAL_TYPE_P (TREE_TYPE (def1_arg1))
1746 && int_fits_type_p (arg2, TREE_TYPE (def1_arg1)))
1749 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1), NULL);
1751 gimple_build_assign_with_ops (code, tem, def1_arg1,
1752 fold_convert_loc (gimple_location (stmt),
1753 TREE_TYPE (def1_arg1),
1755 tem = make_ssa_name (tem, newop);
1756 gimple_assign_set_lhs (newop, tem);
1757 gimple_set_location (newop, gimple_location (stmt));
1758 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1759 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1760 tem, NULL_TREE, NULL_TREE);
1761 update_stmt (gsi_stmt (*gsi));
1765 /* For bitwise binary operations apply operand conversions to the
1766 binary operation result instead of to the operands. This allows
1767 to combine successive conversions and bitwise binary operations. */
1768 if (CONVERT_EXPR_CODE_P (def1_code)
1769 && CONVERT_EXPR_CODE_P (def2_code)
1770 && types_compatible_p (TREE_TYPE (def1_arg1), TREE_TYPE (def2_arg1))
1771 /* Make sure that the conversion widens the operands, or has same
1772 precision, or that it changes the operation to a bitfield
1774 && ((TYPE_PRECISION (TREE_TYPE (def1_arg1))
1775 <= TYPE_PRECISION (TREE_TYPE (arg1)))
1776 || (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (arg1)))
1778 || (TYPE_PRECISION (TREE_TYPE (arg1))
1779 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg1))))))
1782 tree tem = create_tmp_reg (TREE_TYPE (def1_arg1),
1784 newop = gimple_build_assign_with_ops (code, tem, def1_arg1, def2_arg1);
1785 tem = make_ssa_name (tem, newop);
1786 gimple_assign_set_lhs (newop, tem);
1787 gimple_set_location (newop, gimple_location (stmt));
1788 gsi_insert_before (gsi, newop, GSI_SAME_STMT);
1789 gimple_assign_set_rhs_with_ops_1 (gsi, NOP_EXPR,
1790 tem, NULL_TREE, NULL_TREE);
1791 update_stmt (gsi_stmt (*gsi));
1795 /* (a | CST1) & CST2 -> (a & CST2) | (CST1 & CST2). */
1796 if (code == BIT_AND_EXPR
1797 && def1_code == BIT_IOR_EXPR
1798 && TREE_CODE (arg2) == INTEGER_CST
1799 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1801 tree cst = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg2),
1802 arg2, gimple_assign_rhs2 (def1));
1805 if (integer_zerop (cst))
1807 gimple_assign_set_rhs1 (stmt, def1_arg1);
1811 tem = create_tmp_reg (TREE_TYPE (arg2), NULL);
1812 newop = gimple_build_assign_with_ops (BIT_AND_EXPR,
1813 tem, def1_arg1, arg2);
1814 tem = make_ssa_name (tem, newop);
1815 gimple_assign_set_lhs (newop, tem);
1816 gimple_set_location (newop, gimple_location (stmt));
1817 /* Make sure to re-process the new stmt as it's walking upwards. */
1818 gsi_insert_before (gsi, newop, GSI_NEW_STMT);
1819 gimple_assign_set_rhs1 (stmt, tem);
1820 gimple_assign_set_rhs2 (stmt, cst);
1821 gimple_assign_set_rhs_code (stmt, BIT_IOR_EXPR);
1826 /* Combine successive equal operations with constants. */
1827 if ((code == BIT_AND_EXPR
1828 || code == BIT_IOR_EXPR
1829 || code == BIT_XOR_EXPR)
1830 && def1_code == code
1831 && TREE_CODE (arg2) == INTEGER_CST
1832 && TREE_CODE (gimple_assign_rhs2 (def1)) == INTEGER_CST)
1834 tree cst = fold_build2 (code, TREE_TYPE (arg2),
1835 arg2, gimple_assign_rhs2 (def1));
1836 gimple_assign_set_rhs1 (stmt, def1_arg1);
1837 gimple_assign_set_rhs2 (stmt, cst);
1842 /* Canonicalize X ^ ~0 to ~X. */
1843 if (code == BIT_XOR_EXPR
1844 && TREE_CODE (arg2) == INTEGER_CST
1845 && integer_all_onesp (arg2))
1847 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, arg1, NULL_TREE);
1848 gcc_assert (gsi_stmt (*gsi) == stmt);
1853 /* Try simple folding for X op !X, and X op X. */
1854 res = simplify_bitwise_binary_1 (code, TREE_TYPE (arg1), arg1, arg2);
1855 if (res != NULL_TREE)
1857 gimple_assign_set_rhs_from_tree (gsi, res);
1858 update_stmt (gsi_stmt (*gsi));
1866 /* Perform re-associations of the plus or minus statement STMT that are
1867 always permitted. Returns true if the CFG was changed. */
1870 associate_plusminus (gimple stmt)
1872 tree rhs1 = gimple_assign_rhs1 (stmt);
1873 tree rhs2 = gimple_assign_rhs2 (stmt);
1874 enum tree_code code = gimple_assign_rhs_code (stmt);
1875 gimple_stmt_iterator gsi;
1878 /* We can't reassociate at all for saturating types. */
1879 if (TYPE_SATURATING (TREE_TYPE (rhs1)))
1882 /* First contract negates. */
1887 /* A +- (-B) -> A -+ B. */
1888 if (TREE_CODE (rhs2) == SSA_NAME)
1890 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
1891 if (is_gimple_assign (def_stmt)
1892 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1893 && can_propagate_from (def_stmt))
1895 code = (code == MINUS_EXPR) ? PLUS_EXPR : MINUS_EXPR;
1896 gimple_assign_set_rhs_code (stmt, code);
1897 rhs2 = gimple_assign_rhs1 (def_stmt);
1898 gimple_assign_set_rhs2 (stmt, rhs2);
1899 gimple_set_modified (stmt, true);
1904 /* (-A) + B -> B - A. */
1905 if (TREE_CODE (rhs1) == SSA_NAME
1906 && code == PLUS_EXPR)
1908 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1909 if (is_gimple_assign (def_stmt)
1910 && gimple_assign_rhs_code (def_stmt) == NEGATE_EXPR
1911 && can_propagate_from (def_stmt))
1914 gimple_assign_set_rhs_code (stmt, code);
1916 gimple_assign_set_rhs1 (stmt, rhs1);
1917 rhs2 = gimple_assign_rhs1 (def_stmt);
1918 gimple_assign_set_rhs2 (stmt, rhs2);
1919 gimple_set_modified (stmt, true);
1926 /* We can't reassociate floating-point or fixed-point plus or minus
1927 because of saturation to +-Inf. */
1928 if (FLOAT_TYPE_P (TREE_TYPE (rhs1))
1929 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1)))
1932 /* Second match patterns that allow contracting a plus-minus pair
1933 irrespective of overflow issues.
1935 (A +- B) - A -> +- B
1937 (CST +- A) +- CST -> CST +- A
1938 (A + CST) +- CST -> A + CST
1941 A - (A +- B) -> -+ B
1942 A +- (B +- A) -> +- B
1943 CST +- (CST +- A) -> CST +- A
1944 CST +- (A +- CST) -> CST +- A
1947 via commutating the addition and contracting operations to zero
1948 by reassociation. */
1950 gsi = gsi_for_stmt (stmt);
1951 if (TREE_CODE (rhs1) == SSA_NAME)
1953 gimple def_stmt = SSA_NAME_DEF_STMT (rhs1);
1954 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
1956 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
1957 if (def_code == PLUS_EXPR
1958 || def_code == MINUS_EXPR)
1960 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
1961 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
1962 if (operand_equal_p (def_rhs1, rhs2, 0)
1963 && code == MINUS_EXPR)
1965 /* (A +- B) - A -> +- B. */
1966 code = ((def_code == PLUS_EXPR)
1967 ? TREE_CODE (def_rhs2) : NEGATE_EXPR);
1970 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1971 gcc_assert (gsi_stmt (gsi) == stmt);
1972 gimple_set_modified (stmt, true);
1974 else if (operand_equal_p (def_rhs2, rhs2, 0)
1975 && code != def_code)
1977 /* (A +- B) -+ B -> A. */
1978 code = TREE_CODE (def_rhs1);
1981 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
1982 gcc_assert (gsi_stmt (gsi) == stmt);
1983 gimple_set_modified (stmt, true);
1985 else if (TREE_CODE (rhs2) == INTEGER_CST
1986 && TREE_CODE (def_rhs1) == INTEGER_CST)
1988 /* (CST +- A) +- CST -> CST +- A. */
1989 tree cst = fold_binary (code, TREE_TYPE (rhs1),
1991 if (cst && !TREE_OVERFLOW (cst))
1994 gimple_assign_set_rhs_code (stmt, code);
1996 gimple_assign_set_rhs1 (stmt, rhs1);
1998 gimple_assign_set_rhs2 (stmt, rhs2);
1999 gimple_set_modified (stmt, true);
2002 else if (TREE_CODE (rhs2) == INTEGER_CST
2003 && TREE_CODE (def_rhs2) == INTEGER_CST
2004 && def_code == PLUS_EXPR)
2006 /* (A + CST) +- CST -> A + CST. */
2007 tree cst = fold_binary (code, TREE_TYPE (rhs1),
2009 if (cst && !TREE_OVERFLOW (cst))
2012 gimple_assign_set_rhs_code (stmt, code);
2014 gimple_assign_set_rhs1 (stmt, rhs1);
2016 gimple_assign_set_rhs2 (stmt, rhs2);
2017 gimple_set_modified (stmt, true);
2021 else if (def_code == BIT_NOT_EXPR
2022 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
2024 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2025 if (code == PLUS_EXPR
2026 && operand_equal_p (def_rhs1, rhs2, 0))
2030 rhs1 = build_int_cst_type (TREE_TYPE (rhs2), -1);
2032 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2033 gcc_assert (gsi_stmt (gsi) == stmt);
2034 gimple_set_modified (stmt, true);
2036 else if (code == PLUS_EXPR
2037 && integer_onep (rhs1))
2043 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2044 gcc_assert (gsi_stmt (gsi) == stmt);
2045 gimple_set_modified (stmt, true);
2051 if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
2053 gimple def_stmt = SSA_NAME_DEF_STMT (rhs2);
2054 if (is_gimple_assign (def_stmt) && can_propagate_from (def_stmt))
2056 enum tree_code def_code = gimple_assign_rhs_code (def_stmt);
2057 if (def_code == PLUS_EXPR
2058 || def_code == MINUS_EXPR)
2060 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2061 tree def_rhs2 = gimple_assign_rhs2 (def_stmt);
2062 if (operand_equal_p (def_rhs1, rhs1, 0)
2063 && code == MINUS_EXPR)
2065 /* A - (A +- B) -> -+ B. */
2066 code = ((def_code == PLUS_EXPR)
2067 ? NEGATE_EXPR : TREE_CODE (def_rhs2));
2070 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2071 gcc_assert (gsi_stmt (gsi) == stmt);
2072 gimple_set_modified (stmt, true);
2074 else if (operand_equal_p (def_rhs2, rhs1, 0)
2075 && code != def_code)
2077 /* A +- (B +- A) -> +- B. */
2078 code = ((code == PLUS_EXPR)
2079 ? TREE_CODE (def_rhs1) : NEGATE_EXPR);
2082 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2083 gcc_assert (gsi_stmt (gsi) == stmt);
2084 gimple_set_modified (stmt, true);
2086 else if (TREE_CODE (rhs1) == INTEGER_CST
2087 && TREE_CODE (def_rhs1) == INTEGER_CST)
2089 /* CST +- (CST +- A) -> CST +- A. */
2090 tree cst = fold_binary (code, TREE_TYPE (rhs2),
2092 if (cst && !TREE_OVERFLOW (cst))
2094 code = (code == def_code ? PLUS_EXPR : MINUS_EXPR);
2095 gimple_assign_set_rhs_code (stmt, code);
2097 gimple_assign_set_rhs1 (stmt, rhs1);
2099 gimple_assign_set_rhs2 (stmt, rhs2);
2100 gimple_set_modified (stmt, true);
2103 else if (TREE_CODE (rhs1) == INTEGER_CST
2104 && TREE_CODE (def_rhs2) == INTEGER_CST)
2106 /* CST +- (A +- CST) -> CST +- A. */
2107 tree cst = fold_binary (def_code == code
2108 ? PLUS_EXPR : MINUS_EXPR,
2111 if (cst && !TREE_OVERFLOW (cst))
2114 gimple_assign_set_rhs1 (stmt, rhs1);
2116 gimple_assign_set_rhs2 (stmt, rhs2);
2117 gimple_set_modified (stmt, true);
2121 else if (def_code == BIT_NOT_EXPR
2122 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
2124 tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
2125 if (code == PLUS_EXPR
2126 && operand_equal_p (def_rhs1, rhs1, 0))
2130 rhs1 = build_int_cst_type (TREE_TYPE (rhs1), -1);
2132 gimple_assign_set_rhs_with_ops (&gsi, code, rhs1, NULL_TREE);
2133 gcc_assert (gsi_stmt (gsi) == stmt);
2134 gimple_set_modified (stmt, true);
2141 if (gimple_modified_p (stmt))
2143 fold_stmt_inplace (stmt);
2145 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
2146 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2153 /* Combine two conversions in a row for the second conversion at *GSI.
2154 Returns 1 if there were any changes made, 2 if cfg-cleanup needs to
2155 run. Else it returns 0. */
2158 combine_conversions (gimple_stmt_iterator *gsi)
2160 gimple stmt = gsi_stmt (*gsi);
2163 enum tree_code code = gimple_assign_rhs_code (stmt);
2165 gcc_checking_assert (CONVERT_EXPR_CODE_P (code)
2166 || code == FLOAT_EXPR
2167 || code == FIX_TRUNC_EXPR);
2169 lhs = gimple_assign_lhs (stmt);
2170 op0 = gimple_assign_rhs1 (stmt);
2171 if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (op0)))
2173 gimple_assign_set_rhs_code (stmt, TREE_CODE (op0));
2177 if (TREE_CODE (op0) != SSA_NAME)
2180 def_stmt = SSA_NAME_DEF_STMT (op0);
2181 if (!is_gimple_assign (def_stmt))
2184 if (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
2186 tree defop0 = gimple_assign_rhs1 (def_stmt);
2187 tree type = TREE_TYPE (lhs);
2188 tree inside_type = TREE_TYPE (defop0);
2189 tree inter_type = TREE_TYPE (op0);
2190 int inside_int = INTEGRAL_TYPE_P (inside_type);
2191 int inside_ptr = POINTER_TYPE_P (inside_type);
2192 int inside_float = FLOAT_TYPE_P (inside_type);
2193 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
2194 unsigned int inside_prec = TYPE_PRECISION (inside_type);
2195 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
2196 int inter_int = INTEGRAL_TYPE_P (inter_type);
2197 int inter_ptr = POINTER_TYPE_P (inter_type);
2198 int inter_float = FLOAT_TYPE_P (inter_type);
2199 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
2200 unsigned int inter_prec = TYPE_PRECISION (inter_type);
2201 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
2202 int final_int = INTEGRAL_TYPE_P (type);
2203 int final_ptr = POINTER_TYPE_P (type);
2204 int final_float = FLOAT_TYPE_P (type);
2205 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
2206 unsigned int final_prec = TYPE_PRECISION (type);
2207 int final_unsignedp = TYPE_UNSIGNED (type);
2209 /* In addition to the cases of two conversions in a row
2210 handled below, if we are converting something to its own
2211 type via an object of identical or wider precision, neither
2212 conversion is needed. */
2213 if (useless_type_conversion_p (type, inside_type)
2214 && (((inter_int || inter_ptr) && final_int)
2215 || (inter_float && final_float))
2216 && inter_prec >= final_prec)
2218 gimple_assign_set_rhs1 (stmt, unshare_expr (defop0));
2219 gimple_assign_set_rhs_code (stmt, TREE_CODE (defop0));
2221 return remove_prop_source_from_use (op0) ? 2 : 1;
2224 /* Likewise, if the intermediate and initial types are either both
2225 float or both integer, we don't need the middle conversion if the
2226 former is wider than the latter and doesn't change the signedness
2227 (for integers). Avoid this if the final type is a pointer since
2228 then we sometimes need the middle conversion. Likewise if the
2229 final type has a precision not equal to the size of its mode. */
2230 if (((inter_int && inside_int)
2231 || (inter_float && inside_float)
2232 || (inter_vec && inside_vec))
2233 && inter_prec >= inside_prec
2234 && (inter_float || inter_vec
2235 || inter_unsignedp == inside_unsignedp)
2236 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2237 && TYPE_MODE (type) == TYPE_MODE (inter_type))
2239 && (! final_vec || inter_prec == inside_prec))
2241 gimple_assign_set_rhs1 (stmt, defop0);
2243 return remove_prop_source_from_use (op0) ? 2 : 1;
2246 /* If we have a sign-extension of a zero-extended value, we can
2247 replace that by a single zero-extension. */
2248 if (inside_int && inter_int && final_int
2249 && inside_prec < inter_prec && inter_prec < final_prec
2250 && inside_unsignedp && !inter_unsignedp)
2252 gimple_assign_set_rhs1 (stmt, defop0);
2254 return remove_prop_source_from_use (op0) ? 2 : 1;
2257 /* Two conversions in a row are not needed unless:
2258 - some conversion is floating-point (overstrict for now), or
2259 - some conversion is a vector (overstrict for now), or
2260 - the intermediate type is narrower than both initial and
2262 - the intermediate type and innermost type differ in signedness,
2263 and the outermost type is wider than the intermediate, or
2264 - the initial type is a pointer type and the precisions of the
2265 intermediate and final types differ, or
2266 - the final type is a pointer type and the precisions of the
2267 initial and intermediate types differ. */
2268 if (! inside_float && ! inter_float && ! final_float
2269 && ! inside_vec && ! inter_vec && ! final_vec
2270 && (inter_prec >= inside_prec || inter_prec >= final_prec)
2271 && ! (inside_int && inter_int
2272 && inter_unsignedp != inside_unsignedp
2273 && inter_prec < final_prec)
2274 && ((inter_unsignedp && inter_prec > inside_prec)
2275 == (final_unsignedp && final_prec > inter_prec))
2276 && ! (inside_ptr && inter_prec != final_prec)
2277 && ! (final_ptr && inside_prec != inter_prec)
2278 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
2279 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
2281 gimple_assign_set_rhs1 (stmt, defop0);
2283 return remove_prop_source_from_use (op0) ? 2 : 1;
2286 /* A truncation to an unsigned type should be canonicalized as
2287 bitwise and of a mask. */
2288 if (final_int && inter_int && inside_int
2289 && final_prec == inside_prec
2290 && final_prec > inter_prec
2294 tem = fold_build2 (BIT_AND_EXPR, inside_type,
2297 (inside_type, double_int_mask (inter_prec)));
2298 if (!useless_type_conversion_p (type, inside_type))
2300 tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,
2302 gimple_assign_set_rhs1 (stmt, tem);
2305 gimple_assign_set_rhs_from_tree (gsi, tem);
2306 update_stmt (gsi_stmt (*gsi));
2314 /* Main entry point for the forward propagation and statement combine
2318 ssa_forward_propagate_and_combine (void)
2321 unsigned int todoflags = 0;
2323 cfg_changed = false;
2327 gimple_stmt_iterator gsi, prev;
2328 bool prev_initialized;
2330 /* Apply forward propagation to all stmts in the basic-block.
2331 Note we update GSI within the loop as necessary. */
2332 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
2334 gimple stmt = gsi_stmt (gsi);
2336 enum tree_code code;
2338 if (!is_gimple_assign (stmt))
2344 lhs = gimple_assign_lhs (stmt);
2345 rhs = gimple_assign_rhs1 (stmt);
2346 code = gimple_assign_rhs_code (stmt);
2347 if (TREE_CODE (lhs) != SSA_NAME
2348 || has_zero_uses (lhs))
2354 /* If this statement sets an SSA_NAME to an address,
2355 try to propagate the address into the uses of the SSA_NAME. */
2356 if (code == ADDR_EXPR
2357 /* Handle pointer conversions on invariant addresses
2358 as well, as this is valid gimple. */
2359 || (CONVERT_EXPR_CODE_P (code)
2360 && TREE_CODE (rhs) == ADDR_EXPR
2361 && POINTER_TYPE_P (TREE_TYPE (lhs))))
2363 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2366 || decl_address_invariant_p (base))
2367 && !stmt_references_abnormal_ssa_name (stmt)
2368 && forward_propagate_addr_expr (lhs, rhs))
2370 release_defs (stmt);
2371 todoflags |= TODO_remove_unused_locals;
2372 gsi_remove (&gsi, true);
2377 else if (code == POINTER_PLUS_EXPR && can_propagate_from (stmt))
2379 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
2380 /* ??? Better adjust the interface to that function
2381 instead of building new trees here. */
2382 && forward_propagate_addr_expr
2386 fold_build2 (MEM_REF,
2387 TREE_TYPE (TREE_TYPE (rhs)),
2391 gimple_assign_rhs2 (stmt))))))
2393 release_defs (stmt);
2394 todoflags |= TODO_remove_unused_locals;
2395 gsi_remove (&gsi, true);
2397 else if (is_gimple_min_invariant (rhs))
2399 /* Make sure to fold &a[0] + off_1 here. */
2400 fold_stmt_inplace (stmt);
2402 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
2408 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2410 forward_propagate_comparison (stmt);
2417 /* Combine stmts with the stmts defining their operands.
2418 Note we update GSI within the loop as necessary. */
2419 prev_initialized = false;
2420 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
2422 gimple stmt = gsi_stmt (gsi);
2423 bool changed = false;
2425 switch (gimple_code (stmt))
2429 tree rhs1 = gimple_assign_rhs1 (stmt);
2430 enum tree_code code = gimple_assign_rhs_code (stmt);
2432 if ((code == BIT_NOT_EXPR
2433 || code == NEGATE_EXPR)
2434 && TREE_CODE (rhs1) == SSA_NAME)
2435 changed = simplify_not_neg_expr (&gsi);
2436 else if (code == COND_EXPR)
2438 /* In this case the entire COND_EXPR is in rhs1. */
2440 fold_defer_overflow_warnings ();
2441 did_something = forward_propagate_into_cond (&gsi);
2442 stmt = gsi_stmt (gsi);
2443 if (did_something == 2)
2445 fold_undefer_overflow_warnings
2446 (!TREE_NO_WARNING (rhs1) && did_something, stmt,
2447 WARN_STRICT_OVERFLOW_CONDITIONAL);
2448 changed = did_something != 0;
2450 else if (TREE_CODE_CLASS (code) == tcc_comparison)
2452 bool no_warning = gimple_no_warning_p (stmt);
2454 fold_defer_overflow_warnings ();
2455 did_something = forward_propagate_into_comparison (&gsi);
2456 if (did_something == 2)
2458 fold_undefer_overflow_warnings
2459 (!no_warning && changed,
2460 stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2461 changed = did_something != 0;
2463 else if (code == BIT_AND_EXPR
2464 || code == BIT_IOR_EXPR
2465 || code == BIT_XOR_EXPR)
2466 changed = simplify_bitwise_binary (&gsi);
2467 else if (code == PLUS_EXPR
2468 || code == MINUS_EXPR)
2469 changed = associate_plusminus (stmt);
2470 else if (CONVERT_EXPR_CODE_P (code)
2471 || code == FLOAT_EXPR
2472 || code == FIX_TRUNC_EXPR)
2474 int did_something = combine_conversions (&gsi);
2475 if (did_something == 2)
2477 changed = did_something != 0;
2483 changed = simplify_gimple_switch (stmt);
2489 fold_defer_overflow_warnings ();
2490 did_something = forward_propagate_into_gimple_cond (stmt);
2491 if (did_something == 2)
2493 fold_undefer_overflow_warnings
2494 (did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
2495 changed = did_something != 0;
2501 tree callee = gimple_call_fndecl (stmt);
2502 if (callee != NULL_TREE
2503 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2504 changed = simplify_builtin_call (&gsi, callee);
2513 /* If the stmt changed then re-visit it and the statements
2514 inserted before it. */
2515 if (!prev_initialized)
2516 gsi = gsi_start_bb (bb);
2526 prev_initialized = true;
2533 todoflags |= TODO_cleanup_cfg;
2540 gate_forwprop (void)
2542 return flag_tree_forwprop;
2545 struct gimple_opt_pass pass_forwprop =
2549 "forwprop", /* name */
2550 gate_forwprop, /* gate */
2551 ssa_forward_propagate_and_combine, /* execute */
2554 0, /* static_pass_number */
2555 TV_TREE_FORWPROP, /* tv_id */
2556 PROP_cfg | PROP_ssa, /* properties_required */
2557 0, /* properties_provided */
2558 0, /* properties_destroyed */
2559 0, /* todo_flags_start */
2562 | TODO_verify_ssa /* todo_flags_finish */