1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
4 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 - reordering of memory allocation and freeing to be more space efficient
24 - do rough calc of how many regs are needed in each block, and a rough
25 calc of how many regs are available in each class and use that to
26 throttle back the code in cases where RTX_COST is minimal.
27 - a store to the same address as a load does not kill the load if the
28 source of the store is also the destination of the load. Handling this
29 allows more load motion, particularly out of loops.
33 /* References searched while implementing this.
35 Compilers Principles, Techniques and Tools
39 Global Optimization by Suppression of Partial Redundancies
41 communications of the acm, Vol. 22, Num. 2, Feb. 1979
43 A Portable Machine-Independent Global Optimizer - Design and Measurements
45 Stanford Ph.D. thesis, Dec. 1983
47 A Fast Algorithm for Code Movement Optimization
49 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
51 A Solution to a Problem with Morel and Renvoise's
52 Global Optimization by Suppression of Partial Redundancies
53 K-H Drechsler, M.P. Stadel
54 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
56 Practical Adaptation of the Global Optimization
57 Algorithm of Morel and Renvoise
59 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
61 Efficiently Computing Static Single Assignment Form and the Control
63 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
64 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
67 J. Knoop, O. Ruthing, B. Steffen
68 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
70 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
71 Time for Reducible Flow Control
73 ACM Letters on Programming Languages and Systems,
74 Vol. 2, Num. 1-4, Mar-Dec 1993
76 An Efficient Representation for Sparse Sets
77 Preston Briggs, Linda Torczon
78 ACM Letters on Programming Languages and Systems,
79 Vol. 2, Num. 1-4, Mar-Dec 1993
81 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
82 K-H Drechsler, M.P. Stadel
83 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
85 Partial Dead Code Elimination
86 J. Knoop, O. Ruthing, B. Steffen
87 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
89 Effective Partial Redundancy Elimination
90 P. Briggs, K.D. Cooper
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 The Program Structure Tree: Computing Control Regions in Linear Time
94 R. Johnson, D. Pearson, K. Pingali
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 Optimal Code Motion: Theory and Practice
98 J. Knoop, O. Ruthing, B. Steffen
99 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
101 The power of assignment motion
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
105 Global code motion / global value numbering
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Value Driven Redundancy Elimination
111 Rice University Ph.D. thesis, Apr. 1996
115 Massively Scalar Compiler Project, Rice University, Sep. 1996
117 High Performance Compilers for Parallel Computing
121 Advanced Compiler Design and Implementation
123 Morgan Kaufmann, 1997
125 Building an Optimizing Compiler
129 People wishing to speed up the code here should read:
130 Elimination Algorithms for Data Flow Analysis
131 B.G. Ryder, M.C. Paull
132 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
134 How to Analyze Large Programs Efficiently and Informatively
135 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
136 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
138 People wishing to do something different can find various possibilities
139 in the above papers and elsewhere.
144 #include "coretypes.h"
152 #include "hard-reg-set.h"
155 #include "insn-config.h"
157 #include "basic-block.h"
159 #include "function.h"
168 #include "tree-pass.h"
173 /* Propagate flow information through back edges and thus enable PRE's
174 moving loop invariant calculations out of loops.
176 Originally this tended to create worse overall code, but several
177 improvements during the development of PRE seem to have made following
178 back edges generally a win.
180 Note much of the loop invariant code motion done here would normally
181 be done by loop.c, which has more heuristics for when to move invariants
182 out of loops. At some point we might need to move some of those
183 heuristics into gcse.c. */
185 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
186 are a superset of those done by GCSE.
188 We perform the following steps:
190 1) Compute table of places where registers are set.
192 2) Perform copy/constant propagation.
194 3) Perform global cse using lazy code motion if not optimizing
195 for size, or code hoisting if we are.
197 4) Perform another pass of copy/constant propagation. Try to bypass
198 conditional jumps if the condition can be computed from a value of
201 5) Perform store motion.
203 Two passes of copy/constant propagation are done because the first one
204 enables more GCSE and the second one helps to clean up the copies that
205 GCSE creates. This is needed more for PRE than for Classic because Classic
206 GCSE will try to use an existing register containing the common
207 subexpression rather than create a new one. This is harder to do for PRE
208 because of the code motion (which Classic GCSE doesn't do).
210 Expressions we are interested in GCSE-ing are of the form
211 (set (pseudo-reg) (expression)).
212 Function want_to_gcse_p says what these are.
214 In addition, expressions in REG_EQUAL notes are candidates for GXSE-ing.
215 This allows PRE to hoist expressions that are expressed in multiple insns,
216 such as comprex address calculations (e.g. for PIC code, or loads with a
217 high part and as lowe part).
219 PRE handles moving invariant expressions out of loops (by treating them as
220 partially redundant).
222 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
223 assignment) based GVN (global value numbering). L. T. Simpson's paper
224 (Rice University) on value numbering is a useful reference for this.
226 **********************
228 We used to support multiple passes but there are diminishing returns in
229 doing so. The first pass usually makes 90% of the changes that are doable.
230 A second pass can make a few more changes made possible by the first pass.
231 Experiments show any further passes don't make enough changes to justify
234 A study of spec92 using an unlimited number of passes:
235 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
236 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
237 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
239 It was found doing copy propagation between each pass enables further
242 This study was done before expressions in REG_EQUAL notes were added as
243 candidate expressions for optimization, and before the GIMPLE optimizers
244 were added. Probably, multiple passes is even less efficient now than
245 at the time when the study was conducted.
247 PRE is quite expensive in complicated functions because the DFA can take
248 a while to converge. Hence we only perform one pass.
250 **********************
252 The steps for PRE are:
254 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
256 2) Perform the data flow analysis for PRE.
258 3) Delete the redundant instructions
260 4) Insert the required copies [if any] that make the partially
261 redundant instructions fully redundant.
263 5) For other reaching expressions, insert an instruction to copy the value
264 to a newly created pseudo that will reach the redundant instruction.
266 The deletion is done first so that when we do insertions we
267 know which pseudo reg to use.
269 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
270 argue it is not. The number of iterations for the algorithm to converge
271 is typically 2-4 so I don't view it as that expensive (relatively speaking).
273 PRE GCSE depends heavily on the second CSE pass to clean up the copies
274 we create. To make an expression reach the place where it's redundant,
275 the result of the expression is copied to a new register, and the redundant
276 expression is deleted by replacing it with this new register. Classic GCSE
277 doesn't have this problem as much as it computes the reaching defs of
278 each register in each block and thus can try to use an existing
281 /* GCSE global vars. */
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
289 static int run_jump_opt_after_gcse;
291 /* An obstack for our working variables. */
292 static struct obstack gcse_obstack;
294 struct reg_use {rtx reg_rtx; };
296 /* Hash table of expressions. */
300 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
302 /* Index in the available expression bitmaps. */
304 /* Next entry with the same hash. */
305 struct expr *next_same_hash;
306 /* List of anticipatable occurrences in basic blocks in the function.
307 An "anticipatable occurrence" is one that is the first occurrence in the
308 basic block, the operands are not modified in the basic block prior
309 to the occurrence and the output is not used between the start of
310 the block and the occurrence. */
311 struct occr *antic_occr;
312 /* List of available occurrence in basic blocks in the function.
313 An "available occurrence" is one that is the last occurrence in the
314 basic block and the operands are not modified by following statements in
315 the basic block [including this insn]. */
316 struct occr *avail_occr;
317 /* Non-null if the computation is PRE redundant.
318 The value is the newly created pseudo-reg to record a copy of the
319 expression in all the places that reach the redundant copy. */
323 /* Occurrence of an expression.
324 There is one per basic block. If a pattern appears more than once the
325 last appearance is used [or first for anticipatable expressions]. */
329 /* Next occurrence of this expression. */
331 /* The insn that computes the expression. */
333 /* Nonzero if this [anticipatable] occurrence has been deleted. */
335 /* Nonzero if this [available] occurrence has been copied to
337 /* ??? This is mutually exclusive with deleted_p, so they could share
342 /* Expression and copy propagation hash tables.
343 Each hash table is an array of buckets.
344 ??? It is known that if it were an array of entries, structure elements
345 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
346 not clear whether in the final analysis a sufficient amount of memory would
347 be saved as the size of the available expression bitmaps would be larger
348 [one could build a mapping table without holes afterwards though].
349 Someday I'll perform the computation and figure it out. */
354 This is an array of `expr_hash_table_size' elements. */
357 /* Size of the hash table, in elements. */
360 /* Number of hash table elements. */
361 unsigned int n_elems;
363 /* Whether the table is expression of copy propagation one. */
367 /* Expression hash table. */
368 static struct hash_table expr_hash_table;
370 /* Copy propagation hash table. */
371 static struct hash_table set_hash_table;
373 /* Maximum register number in function prior to doing gcse + 1.
374 Registers created during this pass have regno >= max_gcse_regno.
375 This is named with "gcse" to not collide with global of same name. */
376 static unsigned int max_gcse_regno;
378 /* This is a list of expressions which are MEMs and will be used by load
380 Load motion tracks MEMs which aren't killed by
381 anything except itself. (i.e., loads and stores to a single location).
382 We can then allow movement of these MEM refs with a little special
383 allowance. (all stores copy the same value to the reaching reg used
384 for the loads). This means all values used to store into memory must have
385 no side effects so we can re-issue the setter value.
386 Store Motion uses this structure as an expression table to track stores
387 which look interesting, and might be moveable towards the exit block. */
391 struct expr * expr; /* Gcse expression reference for LM. */
392 rtx pattern; /* Pattern of this mem. */
393 rtx pattern_regs; /* List of registers mentioned by the mem. */
394 rtx loads; /* INSN list of loads seen. */
395 rtx stores; /* INSN list of stores seen. */
396 struct ls_expr * next; /* Next in the list. */
397 int invalid; /* Invalid for some reason. */
398 int index; /* If it maps to a bitmap index. */
399 unsigned int hash_index; /* Index when in a hash table. */
400 rtx reaching_reg; /* Register to use when re-writing. */
403 /* Array of implicit set patterns indexed by basic block index. */
404 static rtx *implicit_sets;
406 /* Head of the list of load/store memory refs. */
407 static struct ls_expr * pre_ldst_mems = NULL;
409 /* Hashtable for the load/store memory refs. */
410 static htab_t pre_ldst_table = NULL;
412 /* Bitmap containing one bit for each register in the program.
413 Used when performing GCSE to track which registers have been set since
414 the start of the basic block. */
415 static regset reg_set_bitmap;
417 /* Array, indexed by basic block number for a list of insns which modify
418 memory within that block. */
419 static rtx * modify_mem_list;
420 static bitmap modify_mem_list_set;
422 /* This array parallels modify_mem_list, but is kept canonicalized. */
423 static rtx * canon_modify_mem_list;
425 /* Bitmap indexed by block numbers to record which blocks contain
427 static bitmap blocks_with_calls;
429 /* Various variables for statistics gathering. */
431 /* Memory used in a pass.
432 This isn't intended to be absolutely precise. Its intent is only
433 to keep an eye on memory usage. */
434 static int bytes_used;
436 /* GCSE substitutions made. */
437 static int gcse_subst_count;
438 /* Number of copy instructions created. */
439 static int gcse_create_count;
440 /* Number of local constants propagated. */
441 static int local_const_prop_count;
442 /* Number of local copies propagated. */
443 static int local_copy_prop_count;
444 /* Number of global constants propagated. */
445 static int global_const_prop_count;
446 /* Number of global copies propagated. */
447 static int global_copy_prop_count;
449 /* For available exprs */
450 static sbitmap *ae_kill, *ae_gen;
452 static void compute_can_copy (void);
453 static void recompute_all_luids (void);
454 static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
455 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
456 static void *gcse_alloc (unsigned long);
457 static void alloc_gcse_mem (void);
458 static void free_gcse_mem (void);
459 static void hash_scan_insn (rtx, struct hash_table *);
460 static void hash_scan_set (rtx, rtx, struct hash_table *);
461 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
462 static void hash_scan_call (rtx, rtx, struct hash_table *);
463 static int want_to_gcse_p (rtx);
464 static bool can_assign_to_reg_p (rtx);
465 static bool gcse_constant_p (const_rtx);
466 static int oprs_unchanged_p (const_rtx, const_rtx, int);
467 static int oprs_anticipatable_p (const_rtx, const_rtx);
468 static int oprs_available_p (const_rtx, const_rtx);
469 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
470 struct hash_table *);
471 static void insert_set_in_table (rtx, rtx, struct hash_table *);
472 static unsigned int hash_expr (const_rtx, enum machine_mode, int *, int);
473 static unsigned int hash_set (int, int);
474 static int expr_equiv_p (const_rtx, const_rtx);
475 static void record_last_reg_set_info (rtx, int);
476 static void record_last_mem_set_info (rtx);
477 static void record_last_set_info (rtx, const_rtx, void *);
478 static void compute_hash_table (struct hash_table *);
479 static void alloc_hash_table (int, struct hash_table *, int);
480 static void free_hash_table (struct hash_table *);
481 static void compute_hash_table_work (struct hash_table *);
482 static void dump_hash_table (FILE *, const char *, struct hash_table *);
483 static struct expr *lookup_set (unsigned int, struct hash_table *);
484 static struct expr *next_set (unsigned int, struct expr *);
485 static void reset_opr_set_tables (void);
486 static int oprs_not_set_p (const_rtx, const_rtx);
487 static void mark_call (rtx);
488 static void mark_set (rtx, rtx);
489 static void mark_clobber (rtx, rtx);
490 static void mark_oprs_set (rtx);
491 static void alloc_cprop_mem (int, int);
492 static void free_cprop_mem (void);
493 static void compute_transp (const_rtx, int, sbitmap *, int);
494 static void compute_transpout (void);
495 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
496 struct hash_table *);
497 static void compute_cprop_data (void);
498 static void find_used_regs (rtx *, void *);
499 static int try_replace_reg (rtx, rtx, rtx);
500 static struct expr *find_avail_set (int, rtx);
501 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
502 static void mems_conflict_for_gcse_p (rtx, const_rtx, void *);
503 static int load_killed_in_block_p (const_basic_block, int, const_rtx, int);
504 static void canon_list_insert (rtx, const_rtx, void *);
505 static int cprop_insn (rtx, int);
506 static int cprop (int);
507 static void find_implicit_sets (void);
508 static int one_cprop_pass (int, bool, bool);
509 static bool constprop_register (rtx, rtx, rtx, bool);
510 static struct expr *find_bypass_set (int, int);
511 static bool reg_killed_on_edge (const_rtx, const_edge);
512 static int bypass_block (basic_block, rtx, rtx);
513 static int bypass_conditional_jumps (void);
514 static void alloc_pre_mem (int, int);
515 static void free_pre_mem (void);
516 static void compute_pre_data (void);
517 static int pre_expr_reaches_here_p (basic_block, struct expr *,
519 static void insert_insn_end_basic_block (struct expr *, basic_block, int);
520 static void pre_insert_copy_insn (struct expr *, rtx);
521 static void pre_insert_copies (void);
522 static int pre_delete (void);
523 static int pre_gcse (void);
524 static int one_pre_gcse_pass (int);
525 static void add_label_notes (rtx, rtx);
526 static void alloc_code_hoist_mem (int, int);
527 static void free_code_hoist_mem (void);
528 static void compute_code_hoist_vbeinout (void);
529 static void compute_code_hoist_data (void);
530 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
531 static void hoist_code (void);
532 static int one_code_hoisting_pass (void);
533 static rtx process_insert_insn (struct expr *);
534 static int pre_edge_insert (struct edge_list *, struct expr **);
535 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
536 basic_block, char *);
537 static struct ls_expr * ldst_entry (rtx);
538 static void free_ldst_entry (struct ls_expr *);
539 static void free_ldst_mems (void);
540 static void print_ldst_list (FILE *);
541 static struct ls_expr * find_rtx_in_ldst (rtx);
542 static int enumerate_ldsts (void);
543 static inline struct ls_expr * first_ls_expr (void);
544 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
545 static int simple_mem (const_rtx);
546 static void invalidate_any_buried_refs (rtx);
547 static void compute_ld_motion_mems (void);
548 static void trim_ld_motion_mems (void);
549 static void update_ld_motion_stores (struct expr *);
550 static void reg_set_info (rtx, const_rtx, void *);
551 static void reg_clear_last_set (rtx, const_rtx, void *);
552 static bool store_ops_ok (const_rtx, int *);
553 static rtx extract_mentioned_regs (rtx);
554 static rtx extract_mentioned_regs_helper (rtx, rtx);
555 static void find_moveable_store (rtx, int *, int *);
556 static int compute_store_table (void);
557 static bool load_kills_store (const_rtx, const_rtx, int);
558 static bool find_loads (const_rtx, const_rtx, int);
559 static bool store_killed_in_insn (const_rtx, const_rtx, const_rtx, int);
560 static bool store_killed_after (const_rtx, const_rtx, const_rtx, const_basic_block, int *, rtx *);
561 static bool store_killed_before (const_rtx, const_rtx, const_rtx, const_basic_block, int *);
562 static void build_store_vectors (void);
563 static void insert_insn_start_basic_block (rtx, basic_block);
564 static int insert_store (struct ls_expr *, edge);
565 static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
566 static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
567 static void delete_store (struct ls_expr *, basic_block);
568 static void free_store_memory (void);
569 static void store_motion (void);
570 static void free_insn_expr_list_list (rtx *);
571 static void clear_modify_mem_tables (void);
572 static void free_modify_mem_tables (void);
573 static rtx gcse_emit_move_after (rtx, rtx, rtx);
574 static void local_cprop_find_used_regs (rtx *, void *);
575 static bool do_local_cprop (rtx, rtx, bool);
576 static void local_cprop_pass (bool);
577 static bool is_too_expensive (const char *);
579 #define GNEW(T) ((T *) gmalloc (sizeof (T)))
580 #define GCNEW(T) ((T *) gcalloc (1, sizeof (T)))
582 #define GNEWVEC(T, N) ((T *) gmalloc (sizeof (T) * (N)))
583 #define GCNEWVEC(T, N) ((T *) gcalloc ((N), sizeof (T)))
585 #define GNEWVAR(T, S) ((T *) gmalloc ((S)))
586 #define GCNEWVAR(T, S) ((T *) gcalloc (1, (S)))
588 #define GOBNEW(T) ((T *) gcse_alloc (sizeof (T)))
589 #define GOBNEWVAR(T, S) ((T *) gcse_alloc ((S)))
592 /* Entry point for global common subexpression elimination.
593 F is the first instruction in the function. Return nonzero if a
597 gcse_main (rtx f ATTRIBUTE_UNUSED)
600 /* Point to release obstack data from for each pass. */
601 char *gcse_obstack_bottom;
603 /* We do not construct an accurate cfg in functions which call
604 setjmp, so just punt to be safe. */
605 if (cfun->calls_setjmp)
608 /* Assume that we do not need to run jump optimizations after gcse. */
609 run_jump_opt_after_gcse = 0;
611 /* Identify the basic block information for this function, including
612 successors and predecessors. */
613 max_gcse_regno = max_reg_num ();
615 df_note_add_problem ();
619 dump_flow_info (dump_file, dump_flags);
621 /* Return if there's nothing to do, or it is too expensive. */
622 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
623 || is_too_expensive (_("GCSE disabled")))
626 gcc_obstack_init (&gcse_obstack);
630 init_alias_analysis ();
632 gcse_obstack_bottom = GOBNEWVAR (char, 1);
636 fprintf (dump_file, "GCSE pass\n\n");
638 max_gcse_regno = max_reg_num ();
642 /* Don't allow constant propagation to modify jumps
644 if (dbg_cnt (cprop1))
646 timevar_push (TV_CPROP1);
647 changed = one_cprop_pass (1, false, false);
649 recompute_all_luids ();
650 timevar_pop (TV_CPROP1);
653 if (optimize_function_for_speed_p (cfun))
655 timevar_push (TV_PRE);
656 changed |= one_pre_gcse_pass (1);
657 /* We may have just created new basic blocks. Release and
658 recompute various things which are sized on the number of
660 ??? There would be no need for this if we used a block
661 based Lazy Code Motion variant, with all (or selected)
662 edges split before running the pass. That would also
663 help find_implicit_sets for cprop. FIXME. */
666 free_modify_mem_tables ();
667 modify_mem_list = GCNEWVEC (rtx, last_basic_block);
668 canon_modify_mem_list = GCNEWVEC (rtx, last_basic_block);
672 run_jump_opt_after_gcse = 1;
673 timevar_pop (TV_PRE);
677 /* This function is being optimized for code size.
678 It does not make sense to run code hoisting unless we are optimizing
679 for code size -- it rarely makes programs faster, and can make
680 them bigger if we did partial redundancy elimination (when optimizing
681 for space, we don't run the partial redundancy algorithms). */
682 timevar_push (TV_HOIST);
683 max_gcse_regno = max_reg_num ();
685 one_code_hoisting_pass ();
686 timevar_pop (TV_HOIST);
693 fprintf (dump_file, "\n");
697 obstack_free (&gcse_obstack, gcse_obstack_bottom);
699 /* Do the second const/copy propagation pass, including cprop into
700 conditional jumps. */
701 if (dbg_cnt (cprop2))
703 max_gcse_regno = max_reg_num ();
706 /* This time, go ahead and allow cprop to alter jumps. */
707 timevar_push (TV_CPROP2);
708 changed = one_cprop_pass (2, true, true);
710 recompute_all_luids ();
711 timevar_pop (TV_CPROP2);
717 fprintf (dump_file, "GCSE of %s: %d basic blocks, ",
718 current_function_name (), n_basic_blocks);
719 fprintf (dump_file, "pass 1, %d bytes\n\n", bytes_used);
722 obstack_free (&gcse_obstack, NULL);
724 /* We are finished with alias.
725 ??? Actually we recompute alias in store_motion. */
726 end_alias_analysis ();
728 /* Run store motion. */
729 if (optimize_function_for_speed_p (cfun) && flag_gcse_sm)
731 timevar_push (TV_LSM);
733 timevar_pop (TV_LSM);
736 /* Record where pseudo-registers are set. */
737 return run_jump_opt_after_gcse;
740 /* Misc. utilities. */
742 /* Nonzero for each mode that supports (set (reg) (reg)).
743 This is trivially true for integer and floating point values.
744 It may or may not be true for condition codes. */
745 static char can_copy[(int) NUM_MACHINE_MODES];
747 /* Compute which modes support reg/reg copy operations. */
750 compute_can_copy (void)
753 #ifndef AVOID_CCMODE_COPIES
756 memset (can_copy, 0, NUM_MACHINE_MODES);
759 for (i = 0; i < NUM_MACHINE_MODES; i++)
760 if (GET_MODE_CLASS (i) == MODE_CC)
762 #ifdef AVOID_CCMODE_COPIES
765 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
766 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
767 if (recog (PATTERN (insn), insn, NULL) >= 0)
777 /* Returns whether the mode supports reg/reg copy operations. */
780 can_copy_p (enum machine_mode mode)
782 static bool can_copy_init_p = false;
784 if (! can_copy_init_p)
787 can_copy_init_p = true;
790 return can_copy[mode] != 0;
793 /* Recompute the DF LUIDs for all basic blocks. If a sub-pass in this
794 file changes something, we have to recompute them for the next pass.
795 FIXME: If we would track which basic blocks we touch, we could
796 update LUIDs in only those basic blocks. */
799 recompute_all_luids (void)
803 df_recompute_luids (bb);
807 /* Cover function to xmalloc to record bytes allocated. */
810 gmalloc (size_t size)
813 return xmalloc (size);
816 /* Cover function to xcalloc to record bytes allocated. */
819 gcalloc (size_t nelem, size_t elsize)
821 bytes_used += nelem * elsize;
822 return xcalloc (nelem, elsize);
825 /* Cover function to obstack_alloc. */
828 gcse_alloc (unsigned long size)
831 return obstack_alloc (&gcse_obstack, size);
834 /* Allocate memory for the reg/memory set tracking tables.
835 This is called at the start of each pass. */
838 alloc_gcse_mem (void)
840 /* Allocate vars to track sets of regs. */
841 reg_set_bitmap = BITMAP_ALLOC (NULL);
843 /* Allocate array to keep a list of insns which modify memory in each
845 modify_mem_list = GCNEWVEC (rtx, last_basic_block);
846 canon_modify_mem_list = GCNEWVEC (rtx, last_basic_block);
847 modify_mem_list_set = BITMAP_ALLOC (NULL);
848 blocks_with_calls = BITMAP_ALLOC (NULL);
851 /* Free memory allocated by alloc_gcse_mem. */
856 free_modify_mem_tables ();
857 BITMAP_FREE (modify_mem_list_set);
858 BITMAP_FREE (blocks_with_calls);
861 /* Compute the local properties of each recorded expression.
863 Local properties are those that are defined by the block, irrespective of
866 An expression is transparent in a block if its operands are not modified
869 An expression is computed (locally available) in a block if it is computed
870 at least once and expression would contain the same value if the
871 computation was moved to the end of the block.
873 An expression is locally anticipatable in a block if it is computed at
874 least once and expression would contain the same value if the computation
875 was moved to the beginning of the block.
877 We call this routine for cprop, pre and code hoisting. They all compute
878 basically the same information and thus can easily share this code.
880 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
881 properties. If NULL, then it is not necessary to compute or record that
884 TABLE controls which hash table to look at. If it is set hash table,
885 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
889 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
890 struct hash_table *table)
894 /* Initialize any bitmaps that were passed in. */
898 sbitmap_vector_zero (transp, last_basic_block);
900 sbitmap_vector_ones (transp, last_basic_block);
904 sbitmap_vector_zero (comp, last_basic_block);
906 sbitmap_vector_zero (antloc, last_basic_block);
908 for (i = 0; i < table->size; i++)
912 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
914 int indx = expr->bitmap_index;
917 /* The expression is transparent in this block if it is not killed.
918 We start by assuming all are transparent [none are killed], and
919 then reset the bits for those that are. */
921 compute_transp (expr->expr, indx, transp, table->set_p);
923 /* The occurrences recorded in antic_occr are exactly those that
924 we want to set to nonzero in ANTLOC. */
926 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
928 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
930 /* While we're scanning the table, this is a good place to
935 /* The occurrences recorded in avail_occr are exactly those that
936 we want to set to nonzero in COMP. */
938 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
940 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
942 /* While we're scanning the table, this is a good place to
947 /* While we're scanning the table, this is a good place to
949 expr->reaching_reg = 0;
954 /* Hash table support. */
956 struct reg_avail_info
963 static struct reg_avail_info *reg_avail_info;
964 static basic_block current_bb;
967 /* See whether X, the source of a set, is something we want to consider for
971 want_to_gcse_p (rtx x)
974 /* On register stack architectures, don't GCSE constants from the
975 constant pool, as the benefits are often swamped by the overhead
976 of shuffling the register stack between basic blocks. */
977 if (IS_STACK_MODE (GET_MODE (x)))
978 x = avoid_constant_pool_reference (x);
981 switch (GET_CODE (x))
993 return can_assign_to_reg_p (x);
997 /* Used internally by can_assign_to_reg_p. */
999 static GTY(()) rtx test_insn;
1001 /* Return true if we can assign X to a pseudo register. */
1004 can_assign_to_reg_p (rtx x)
1006 int num_clobbers = 0;
1009 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1010 if (general_operand (x, GET_MODE (x)))
1012 else if (GET_MODE (x) == VOIDmode)
1015 /* Otherwise, check if we can make a valid insn from it. First initialize
1016 our test insn if we haven't already. */
1020 = make_insn_raw (gen_rtx_SET (VOIDmode,
1021 gen_rtx_REG (word_mode,
1022 FIRST_PSEUDO_REGISTER * 2),
1024 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1027 /* Now make an insn like the one we would make when GCSE'ing and see if
1029 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1030 SET_SRC (PATTERN (test_insn)) = x;
1031 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1032 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1035 /* Return nonzero if the operands of expression X are unchanged from the
1036 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1037 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1040 oprs_unchanged_p (const_rtx x, const_rtx insn, int avail_p)
1049 code = GET_CODE (x);
1054 struct reg_avail_info *info = ®_avail_info[REGNO (x)];
1056 if (info->last_bb != current_bb)
1059 return info->last_set < DF_INSN_LUID (insn);
1061 return info->first_set >= DF_INSN_LUID (insn);
1065 if (load_killed_in_block_p (current_bb, DF_INSN_LUID (insn),
1069 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1096 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1100 /* If we are about to do the last recursive call needed at this
1101 level, change it into iteration. This function is called enough
1104 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1106 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1109 else if (fmt[i] == 'E')
1110 for (j = 0; j < XVECLEN (x, i); j++)
1111 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1118 /* Used for communication between mems_conflict_for_gcse_p and
1119 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1120 conflict between two memory references. */
1121 static int gcse_mems_conflict_p;
1123 /* Used for communication between mems_conflict_for_gcse_p and
1124 load_killed_in_block_p. A memory reference for a load instruction,
1125 mems_conflict_for_gcse_p will see if a memory store conflicts with
1126 this memory load. */
1127 static const_rtx gcse_mem_operand;
1129 /* DEST is the output of an instruction. If it is a memory reference, and
1130 possibly conflicts with the load found in gcse_mem_operand, then set
1131 gcse_mems_conflict_p to a nonzero value. */
1134 mems_conflict_for_gcse_p (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
1135 void *data ATTRIBUTE_UNUSED)
1137 while (GET_CODE (dest) == SUBREG
1138 || GET_CODE (dest) == ZERO_EXTRACT
1139 || GET_CODE (dest) == STRICT_LOW_PART)
1140 dest = XEXP (dest, 0);
1142 /* If DEST is not a MEM, then it will not conflict with the load. Note
1143 that function calls are assumed to clobber memory, but are handled
1148 /* If we are setting a MEM in our list of specially recognized MEMs,
1149 don't mark as killed this time. */
1151 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1153 if (!find_rtx_in_ldst (dest))
1154 gcse_mems_conflict_p = 1;
1158 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1160 gcse_mems_conflict_p = 1;
1163 /* Return nonzero if the expression in X (a memory reference) is killed
1164 in block BB before or after the insn with the LUID in UID_LIMIT.
1165 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1168 To check the entire block, set UID_LIMIT to max_uid + 1 and
1172 load_killed_in_block_p (const_basic_block bb, int uid_limit, const_rtx x, int avail_p)
1174 rtx list_entry = modify_mem_list[bb->index];
1176 /* If this is a readonly then we aren't going to be changing it. */
1177 if (MEM_READONLY_P (x))
1183 /* Ignore entries in the list that do not apply. */
1185 && DF_INSN_LUID (XEXP (list_entry, 0)) < uid_limit)
1187 && DF_INSN_LUID (XEXP (list_entry, 0)) > uid_limit))
1189 list_entry = XEXP (list_entry, 1);
1193 setter = XEXP (list_entry, 0);
1195 /* If SETTER is a call everything is clobbered. Note that calls
1196 to pure functions are never put on the list, so we need not
1197 worry about them. */
1198 if (CALL_P (setter))
1201 /* SETTER must be an INSN of some kind that sets memory. Call
1202 note_stores to examine each hunk of memory that is modified.
1204 The note_stores interface is pretty limited, so we have to
1205 communicate via global variables. Yuk. */
1206 gcse_mem_operand = x;
1207 gcse_mems_conflict_p = 0;
1208 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1209 if (gcse_mems_conflict_p)
1211 list_entry = XEXP (list_entry, 1);
1216 /* Return nonzero if the operands of expression X are unchanged from
1217 the start of INSN's basic block up to but not including INSN. */
1220 oprs_anticipatable_p (const_rtx x, const_rtx insn)
1222 return oprs_unchanged_p (x, insn, 0);
1225 /* Return nonzero if the operands of expression X are unchanged from
1226 INSN to the end of INSN's basic block. */
1229 oprs_available_p (const_rtx x, const_rtx insn)
1231 return oprs_unchanged_p (x, insn, 1);
1234 /* Hash expression X.
1236 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1237 indicating if a volatile operand is found or if the expression contains
1238 something we don't want to insert in the table. HASH_TABLE_SIZE is
1239 the current size of the hash table to be probed. */
1242 hash_expr (const_rtx x, enum machine_mode mode, int *do_not_record_p,
1243 int hash_table_size)
1247 *do_not_record_p = 0;
1249 hash = hash_rtx (x, mode, do_not_record_p,
1250 NULL, /*have_reg_qty=*/false);
1251 return hash % hash_table_size;
1254 /* Hash a set of register REGNO.
1256 Sets are hashed on the register that is set. This simplifies the PRE copy
1259 ??? May need to make things more elaborate. Later, as necessary. */
1262 hash_set (int regno, int hash_table_size)
1267 return hash % hash_table_size;
1270 /* Return nonzero if exp1 is equivalent to exp2. */
1273 expr_equiv_p (const_rtx x, const_rtx y)
1275 return exp_equiv_p (x, y, 0, true);
1278 /* Insert expression X in INSN in the hash TABLE.
1279 If it is already present, record it as the last occurrence in INSN's
1282 MODE is the mode of the value X is being stored into.
1283 It is only used if X is a CONST_INT.
1285 ANTIC_P is nonzero if X is an anticipatable expression.
1286 AVAIL_P is nonzero if X is an available expression. */
1289 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1290 int avail_p, struct hash_table *table)
1292 int found, do_not_record_p;
1294 struct expr *cur_expr, *last_expr = NULL;
1295 struct occr *antic_occr, *avail_occr;
1297 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1299 /* Do not insert expression in table if it contains volatile operands,
1300 or if hash_expr determines the expression is something we don't want
1301 to or can't handle. */
1302 if (do_not_record_p)
1305 cur_expr = table->table[hash];
1308 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1310 /* If the expression isn't found, save a pointer to the end of
1312 last_expr = cur_expr;
1313 cur_expr = cur_expr->next_same_hash;
1318 cur_expr = GOBNEW (struct expr);
1319 bytes_used += sizeof (struct expr);
1320 if (table->table[hash] == NULL)
1321 /* This is the first pattern that hashed to this index. */
1322 table->table[hash] = cur_expr;
1324 /* Add EXPR to end of this hash chain. */
1325 last_expr->next_same_hash = cur_expr;
1327 /* Set the fields of the expr element. */
1329 cur_expr->bitmap_index = table->n_elems++;
1330 cur_expr->next_same_hash = NULL;
1331 cur_expr->antic_occr = NULL;
1332 cur_expr->avail_occr = NULL;
1335 /* Now record the occurrence(s). */
1338 antic_occr = cur_expr->antic_occr;
1340 if (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1344 /* Found another instance of the expression in the same basic block.
1345 Prefer the currently recorded one. We want the first one in the
1346 block and the block is scanned from start to end. */
1347 ; /* nothing to do */
1350 /* First occurrence of this expression in this basic block. */
1351 antic_occr = GOBNEW (struct occr);
1352 bytes_used += sizeof (struct occr);
1353 antic_occr->insn = insn;
1354 antic_occr->next = cur_expr->antic_occr;
1355 antic_occr->deleted_p = 0;
1356 cur_expr->antic_occr = antic_occr;
1362 avail_occr = cur_expr->avail_occr;
1364 if (avail_occr && BLOCK_NUM (avail_occr->insn) == BLOCK_NUM (insn))
1366 /* Found another instance of the expression in the same basic block.
1367 Prefer this occurrence to the currently recorded one. We want
1368 the last one in the block and the block is scanned from start
1370 avail_occr->insn = insn;
1374 /* First occurrence of this expression in this basic block. */
1375 avail_occr = GOBNEW (struct occr);
1376 bytes_used += sizeof (struct occr);
1377 avail_occr->insn = insn;
1378 avail_occr->next = cur_expr->avail_occr;
1379 avail_occr->deleted_p = 0;
1380 cur_expr->avail_occr = avail_occr;
1385 /* Insert pattern X in INSN in the hash table.
1386 X is a SET of a reg to either another reg or a constant.
1387 If it is already present, record it as the last occurrence in INSN's
1391 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
1395 struct expr *cur_expr, *last_expr = NULL;
1396 struct occr *cur_occr;
1398 gcc_assert (GET_CODE (x) == SET && REG_P (SET_DEST (x)));
1400 hash = hash_set (REGNO (SET_DEST (x)), table->size);
1402 cur_expr = table->table[hash];
1405 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1407 /* If the expression isn't found, save a pointer to the end of
1409 last_expr = cur_expr;
1410 cur_expr = cur_expr->next_same_hash;
1415 cur_expr = GOBNEW (struct expr);
1416 bytes_used += sizeof (struct expr);
1417 if (table->table[hash] == NULL)
1418 /* This is the first pattern that hashed to this index. */
1419 table->table[hash] = cur_expr;
1421 /* Add EXPR to end of this hash chain. */
1422 last_expr->next_same_hash = cur_expr;
1424 /* Set the fields of the expr element.
1425 We must copy X because it can be modified when copy propagation is
1426 performed on its operands. */
1427 cur_expr->expr = copy_rtx (x);
1428 cur_expr->bitmap_index = table->n_elems++;
1429 cur_expr->next_same_hash = NULL;
1430 cur_expr->antic_occr = NULL;
1431 cur_expr->avail_occr = NULL;
1434 /* Now record the occurrence. */
1435 cur_occr = cur_expr->avail_occr;
1437 if (cur_occr && BLOCK_NUM (cur_occr->insn) == BLOCK_NUM (insn))
1439 /* Found another instance of the expression in the same basic block.
1440 Prefer this occurrence to the currently recorded one. We want
1441 the last one in the block and the block is scanned from start
1443 cur_occr->insn = insn;
1447 /* First occurrence of this expression in this basic block. */
1448 cur_occr = GOBNEW (struct occr);
1449 bytes_used += sizeof (struct occr);
1451 cur_occr->insn = insn;
1452 cur_occr->next = cur_expr->avail_occr;
1453 cur_occr->deleted_p = 0;
1454 cur_expr->avail_occr = cur_occr;
1458 /* Determine whether the rtx X should be treated as a constant for
1459 the purposes of GCSE's constant propagation. */
1462 gcse_constant_p (const_rtx x)
1464 /* Consider a COMPARE of two integers constant. */
1465 if (GET_CODE (x) == COMPARE
1466 && GET_CODE (XEXP (x, 0)) == CONST_INT
1467 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1470 /* Consider a COMPARE of the same registers is a constant
1471 if they are not floating point registers. */
1472 if (GET_CODE(x) == COMPARE
1473 && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1))
1474 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
1475 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
1476 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
1479 return CONSTANT_P (x);
1482 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
1486 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
1488 rtx src = SET_SRC (pat);
1489 rtx dest = SET_DEST (pat);
1492 if (GET_CODE (src) == CALL)
1493 hash_scan_call (src, insn, table);
1495 else if (REG_P (dest))
1497 unsigned int regno = REGNO (dest);
1500 /* See if a REG_EQUAL note shows this equivalent to a simpler expression.
1502 This allows us to do a single GCSE pass and still eliminate
1503 redundant constants, addresses or other expressions that are
1504 constructed with multiple instructions.
1506 However, keep the original SRC if INSN is a simple reg-reg move. In
1507 In this case, there will almost always be a REG_EQUAL note on the
1508 insn that sets SRC. By recording the REG_EQUAL value here as SRC
1509 for INSN, we miss copy propagation opportunities and we perform the
1510 same PRE GCSE operation repeatedly on the same REG_EQUAL value if we
1511 do more than one PRE GCSE pass.
1513 Note that this does not impede profitable constant propagations. We
1514 "look through" reg-reg sets in lookup_avail_set. */
1515 note = find_reg_equal_equiv_note (insn);
1517 && REG_NOTE_KIND (note) == REG_EQUAL
1520 ? gcse_constant_p (XEXP (note, 0))
1521 : want_to_gcse_p (XEXP (note, 0))))
1522 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
1524 /* Only record sets of pseudo-regs in the hash table. */
1526 && regno >= FIRST_PSEUDO_REGISTER
1527 /* Don't GCSE something if we can't do a reg/reg copy. */
1528 && can_copy_p (GET_MODE (dest))
1529 /* GCSE commonly inserts instruction after the insn. We can't
1530 do that easily for EH_REGION notes so disable GCSE on these
1532 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1533 /* Is SET_SRC something we want to gcse? */
1534 && want_to_gcse_p (src)
1535 /* Don't CSE a nop. */
1536 && ! set_noop_p (pat)
1537 /* Don't GCSE if it has attached REG_EQUIV note.
1538 At this point this only function parameters should have
1539 REG_EQUIV notes and if the argument slot is used somewhere
1540 explicitly, it means address of parameter has been taken,
1541 so we should not extend the lifetime of the pseudo. */
1542 && (note == NULL_RTX || ! MEM_P (XEXP (note, 0))))
1544 /* An expression is not anticipatable if its operands are
1545 modified before this insn or if this is not the only SET in
1546 this insn. The latter condition does not have to mean that
1547 SRC itself is not anticipatable, but we just will not be
1548 able to handle code motion of insns with multiple sets. */
1549 int antic_p = oprs_anticipatable_p (src, insn)
1550 && !multiple_sets (insn);
1551 /* An expression is not available if its operands are
1552 subsequently modified, including this insn. It's also not
1553 available if this is a branch, because we can't insert
1554 a set after the branch. */
1555 int avail_p = (oprs_available_p (src, insn)
1556 && ! JUMP_P (insn));
1558 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
1561 /* Record sets for constant/copy propagation. */
1562 else if (table->set_p
1563 && regno >= FIRST_PSEUDO_REGISTER
1565 && REGNO (src) >= FIRST_PSEUDO_REGISTER
1566 && can_copy_p (GET_MODE (dest))
1567 && REGNO (src) != regno)
1568 || gcse_constant_p (src))
1569 /* A copy is not available if its src or dest is subsequently
1570 modified. Here we want to search from INSN+1 on, but
1571 oprs_available_p searches from INSN on. */
1572 && (insn == BB_END (BLOCK_FOR_INSN (insn))
1573 || (tmp = next_nonnote_insn (insn)) == NULL_RTX
1574 || BLOCK_FOR_INSN (tmp) != BLOCK_FOR_INSN (insn)
1575 || oprs_available_p (pat, tmp)))
1576 insert_set_in_table (pat, insn, table);
1578 /* In case of store we want to consider the memory value as available in
1579 the REG stored in that memory. This makes it possible to remove
1580 redundant loads from due to stores to the same location. */
1581 else if (flag_gcse_las && REG_P (src) && MEM_P (dest))
1583 unsigned int regno = REGNO (src);
1585 /* Do not do this for constant/copy propagation. */
1587 /* Only record sets of pseudo-regs in the hash table. */
1588 && regno >= FIRST_PSEUDO_REGISTER
1589 /* Don't GCSE something if we can't do a reg/reg copy. */
1590 && can_copy_p (GET_MODE (src))
1591 /* GCSE commonly inserts instruction after the insn. We can't
1592 do that easily for EH_REGION notes so disable GCSE on these
1594 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1595 /* Is SET_DEST something we want to gcse? */
1596 && want_to_gcse_p (dest)
1597 /* Don't CSE a nop. */
1598 && ! set_noop_p (pat)
1599 /* Don't GCSE if it has attached REG_EQUIV note.
1600 At this point this only function parameters should have
1601 REG_EQUIV notes and if the argument slot is used somewhere
1602 explicitly, it means address of parameter has been taken,
1603 so we should not extend the lifetime of the pseudo. */
1604 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
1605 || ! MEM_P (XEXP (note, 0))))
1607 /* Stores are never anticipatable. */
1609 /* An expression is not available if its operands are
1610 subsequently modified, including this insn. It's also not
1611 available if this is a branch, because we can't insert
1612 a set after the branch. */
1613 int avail_p = oprs_available_p (dest, insn)
1616 /* Record the memory expression (DEST) in the hash table. */
1617 insert_expr_in_table (dest, GET_MODE (dest), insn,
1618 antic_p, avail_p, table);
1624 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1625 struct hash_table *table ATTRIBUTE_UNUSED)
1627 /* Currently nothing to do. */
1631 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1632 struct hash_table *table ATTRIBUTE_UNUSED)
1634 /* Currently nothing to do. */
1637 /* Process INSN and add hash table entries as appropriate.
1639 Only available expressions that set a single pseudo-reg are recorded.
1641 Single sets in a PARALLEL could be handled, but it's an extra complication
1642 that isn't dealt with right now. The trick is handling the CLOBBERs that
1643 are also in the PARALLEL. Later.
1645 If SET_P is nonzero, this is for the assignment hash table,
1646 otherwise it is for the expression hash table. */
1649 hash_scan_insn (rtx insn, struct hash_table *table)
1651 rtx pat = PATTERN (insn);
1654 /* Pick out the sets of INSN and for other forms of instructions record
1655 what's been modified. */
1657 if (GET_CODE (pat) == SET)
1658 hash_scan_set (pat, insn, table);
1659 else if (GET_CODE (pat) == PARALLEL)
1660 for (i = 0; i < XVECLEN (pat, 0); i++)
1662 rtx x = XVECEXP (pat, 0, i);
1664 if (GET_CODE (x) == SET)
1665 hash_scan_set (x, insn, table);
1666 else if (GET_CODE (x) == CLOBBER)
1667 hash_scan_clobber (x, insn, table);
1668 else if (GET_CODE (x) == CALL)
1669 hash_scan_call (x, insn, table);
1672 else if (GET_CODE (pat) == CLOBBER)
1673 hash_scan_clobber (pat, insn, table);
1674 else if (GET_CODE (pat) == CALL)
1675 hash_scan_call (pat, insn, table);
1679 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
1682 /* Flattened out table, so it's printed in proper order. */
1683 struct expr **flat_table;
1684 unsigned int *hash_val;
1687 flat_table = XCNEWVEC (struct expr *, table->n_elems);
1688 hash_val = XNEWVEC (unsigned int, table->n_elems);
1690 for (i = 0; i < (int) table->size; i++)
1691 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1693 flat_table[expr->bitmap_index] = expr;
1694 hash_val[expr->bitmap_index] = i;
1697 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
1698 name, table->size, table->n_elems);
1700 for (i = 0; i < (int) table->n_elems; i++)
1701 if (flat_table[i] != 0)
1703 expr = flat_table[i];
1704 fprintf (file, "Index %d (hash value %d)\n ",
1705 expr->bitmap_index, hash_val[i]);
1706 print_rtl (file, expr->expr);
1707 fprintf (file, "\n");
1710 fprintf (file, "\n");
1716 /* Record register first/last/block set information for REGNO in INSN.
1718 first_set records the first place in the block where the register
1719 is set and is used to compute "anticipatability".
1721 last_set records the last place in the block where the register
1722 is set and is used to compute "availability".
1724 last_bb records the block for which first_set and last_set are
1725 valid, as a quick test to invalidate them. */
1728 record_last_reg_set_info (rtx insn, int regno)
1730 struct reg_avail_info *info = ®_avail_info[regno];
1731 int luid = DF_INSN_LUID (insn);
1733 info->last_set = luid;
1734 if (info->last_bb != current_bb)
1736 info->last_bb = current_bb;
1737 info->first_set = luid;
1742 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
1743 Note we store a pair of elements in the list, so they have to be
1744 taken off pairwise. */
1747 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, const_rtx unused1 ATTRIBUTE_UNUSED,
1750 rtx dest_addr, insn;
1753 while (GET_CODE (dest) == SUBREG
1754 || GET_CODE (dest) == ZERO_EXTRACT
1755 || GET_CODE (dest) == STRICT_LOW_PART)
1756 dest = XEXP (dest, 0);
1758 /* If DEST is not a MEM, then it will not conflict with a load. Note
1759 that function calls are assumed to clobber memory, but are handled
1765 dest_addr = get_addr (XEXP (dest, 0));
1766 dest_addr = canon_rtx (dest_addr);
1767 insn = (rtx) v_insn;
1768 bb = BLOCK_NUM (insn);
1770 canon_modify_mem_list[bb] =
1771 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
1772 canon_modify_mem_list[bb] =
1773 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
1776 /* Record memory modification information for INSN. We do not actually care
1777 about the memory location(s) that are set, or even how they are set (consider
1778 a CALL_INSN). We merely need to record which insns modify memory. */
1781 record_last_mem_set_info (rtx insn)
1783 int bb = BLOCK_NUM (insn);
1785 /* load_killed_in_block_p will handle the case of calls clobbering
1787 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
1788 bitmap_set_bit (modify_mem_list_set, bb);
1792 /* Note that traversals of this loop (other than for free-ing)
1793 will break after encountering a CALL_INSN. So, there's no
1794 need to insert a pair of items, as canon_list_insert does. */
1795 canon_modify_mem_list[bb] =
1796 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
1797 bitmap_set_bit (blocks_with_calls, bb);
1800 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
1803 /* Called from compute_hash_table via note_stores to handle one
1804 SET or CLOBBER in an insn. DATA is really the instruction in which
1805 the SET is taking place. */
1808 record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
1810 rtx last_set_insn = (rtx) data;
1812 if (GET_CODE (dest) == SUBREG)
1813 dest = SUBREG_REG (dest);
1816 record_last_reg_set_info (last_set_insn, REGNO (dest));
1817 else if (MEM_P (dest)
1818 /* Ignore pushes, they clobber nothing. */
1819 && ! push_operand (dest, GET_MODE (dest)))
1820 record_last_mem_set_info (last_set_insn);
1823 /* Top level function to create an expression or assignment hash table.
1825 Expression entries are placed in the hash table if
1826 - they are of the form (set (pseudo-reg) src),
1827 - src is something we want to perform GCSE on,
1828 - none of the operands are subsequently modified in the block
1830 Assignment entries are placed in the hash table if
1831 - they are of the form (set (pseudo-reg) src),
1832 - src is something we want to perform const/copy propagation on,
1833 - none of the operands or target are subsequently modified in the block
1835 Currently src must be a pseudo-reg or a const_int.
1837 TABLE is the table computed. */
1840 compute_hash_table_work (struct hash_table *table)
1844 /* re-Cache any INSN_LIST nodes we have allocated. */
1845 clear_modify_mem_tables ();
1846 /* Some working arrays used to track first and last set in each block. */
1847 reg_avail_info = GNEWVEC (struct reg_avail_info, max_gcse_regno);
1849 for (i = 0; i < max_gcse_regno; ++i)
1850 reg_avail_info[i].last_bb = NULL;
1852 FOR_EACH_BB (current_bb)
1857 /* First pass over the instructions records information used to
1858 determine when registers and memory are first and last set. */
1859 FOR_BB_INSNS (current_bb, insn)
1861 if (! INSN_P (insn))
1866 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1867 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
1868 record_last_reg_set_info (insn, regno);
1873 note_stores (PATTERN (insn), record_last_set_info, insn);
1876 /* Insert implicit sets in the hash table. */
1878 && implicit_sets[current_bb->index] != NULL_RTX)
1879 hash_scan_set (implicit_sets[current_bb->index],
1880 BB_HEAD (current_bb), table);
1882 /* The next pass builds the hash table. */
1883 FOR_BB_INSNS (current_bb, insn)
1885 hash_scan_insn (insn, table);
1888 free (reg_avail_info);
1889 reg_avail_info = NULL;
1892 /* Allocate space for the set/expr hash TABLE.
1893 N_INSNS is the number of instructions in the function.
1894 It is used to determine the number of buckets to use.
1895 SET_P determines whether set or expression table will
1899 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
1903 table->size = n_insns / 4;
1904 if (table->size < 11)
1907 /* Attempt to maintain efficient use of hash table.
1908 Making it an odd number is simplest for now.
1909 ??? Later take some measurements. */
1911 n = table->size * sizeof (struct expr *);
1912 table->table = GNEWVAR (struct expr *, n);
1913 table->set_p = set_p;
1916 /* Free things allocated by alloc_hash_table. */
1919 free_hash_table (struct hash_table *table)
1921 free (table->table);
1924 /* Compute the hash TABLE for doing copy/const propagation or
1925 expression hash table. */
1928 compute_hash_table (struct hash_table *table)
1930 /* Initialize count of number of entries in hash table. */
1932 memset (table->table, 0, table->size * sizeof (struct expr *));
1934 compute_hash_table_work (table);
1937 /* Expression tracking support. */
1939 /* Lookup REGNO in the set TABLE. The result is a pointer to the
1940 table entry, or NULL if not found. */
1942 static struct expr *
1943 lookup_set (unsigned int regno, struct hash_table *table)
1945 unsigned int hash = hash_set (regno, table->size);
1948 expr = table->table[hash];
1950 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
1951 expr = expr->next_same_hash;
1956 /* Return the next entry for REGNO in list EXPR. */
1958 static struct expr *
1959 next_set (unsigned int regno, struct expr *expr)
1962 expr = expr->next_same_hash;
1963 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
1968 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
1969 types may be mixed. */
1972 free_insn_expr_list_list (rtx *listp)
1976 for (list = *listp; list ; list = next)
1978 next = XEXP (list, 1);
1979 if (GET_CODE (list) == EXPR_LIST)
1980 free_EXPR_LIST_node (list);
1982 free_INSN_LIST_node (list);
1988 /* Clear canon_modify_mem_list and modify_mem_list tables. */
1990 clear_modify_mem_tables (void)
1995 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
1997 free_INSN_LIST_list (modify_mem_list + i);
1998 free_insn_expr_list_list (canon_modify_mem_list + i);
2000 bitmap_clear (modify_mem_list_set);
2001 bitmap_clear (blocks_with_calls);
2004 /* Release memory used by modify_mem_list_set. */
2007 free_modify_mem_tables (void)
2009 clear_modify_mem_tables ();
2010 free (modify_mem_list);
2011 free (canon_modify_mem_list);
2012 modify_mem_list = 0;
2013 canon_modify_mem_list = 0;
2016 /* Reset tables used to keep track of what's still available [since the
2017 start of the block]. */
2020 reset_opr_set_tables (void)
2022 /* Maintain a bitmap of which regs have been set since beginning of
2024 CLEAR_REG_SET (reg_set_bitmap);
2026 /* Also keep a record of the last instruction to modify memory.
2027 For now this is very trivial, we only record whether any memory
2028 location has been modified. */
2029 clear_modify_mem_tables ();
2032 /* Return nonzero if the operands of X are not set before INSN in
2033 INSN's basic block. */
2036 oprs_not_set_p (const_rtx x, const_rtx insn)
2045 code = GET_CODE (x);
2062 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2063 DF_INSN_LUID (insn), x, 0))
2066 return oprs_not_set_p (XEXP (x, 0), insn);
2069 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2075 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2079 /* If we are about to do the last recursive call
2080 needed at this level, change it into iteration.
2081 This function is called enough to be worth it. */
2083 return oprs_not_set_p (XEXP (x, i), insn);
2085 if (! oprs_not_set_p (XEXP (x, i), insn))
2088 else if (fmt[i] == 'E')
2089 for (j = 0; j < XVECLEN (x, i); j++)
2090 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2097 /* Mark things set by a CALL. */
2100 mark_call (rtx insn)
2102 if (! RTL_CONST_OR_PURE_CALL_P (insn))
2103 record_last_mem_set_info (insn);
2106 /* Mark things set by a SET. */
2109 mark_set (rtx pat, rtx insn)
2111 rtx dest = SET_DEST (pat);
2113 while (GET_CODE (dest) == SUBREG
2114 || GET_CODE (dest) == ZERO_EXTRACT
2115 || GET_CODE (dest) == STRICT_LOW_PART)
2116 dest = XEXP (dest, 0);
2119 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2120 else if (MEM_P (dest))
2121 record_last_mem_set_info (insn);
2123 if (GET_CODE (SET_SRC (pat)) == CALL)
2127 /* Record things set by a CLOBBER. */
2130 mark_clobber (rtx pat, rtx insn)
2132 rtx clob = XEXP (pat, 0);
2134 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2135 clob = XEXP (clob, 0);
2138 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2140 record_last_mem_set_info (insn);
2143 /* Record things set by INSN.
2144 This data is used by oprs_not_set_p. */
2147 mark_oprs_set (rtx insn)
2149 rtx pat = PATTERN (insn);
2152 if (GET_CODE (pat) == SET)
2153 mark_set (pat, insn);
2154 else if (GET_CODE (pat) == PARALLEL)
2155 for (i = 0; i < XVECLEN (pat, 0); i++)
2157 rtx x = XVECEXP (pat, 0, i);
2159 if (GET_CODE (x) == SET)
2161 else if (GET_CODE (x) == CLOBBER)
2162 mark_clobber (x, insn);
2163 else if (GET_CODE (x) == CALL)
2167 else if (GET_CODE (pat) == CLOBBER)
2168 mark_clobber (pat, insn);
2169 else if (GET_CODE (pat) == CALL)
2174 /* Compute copy/constant propagation working variables. */
2176 /* Local properties of assignments. */
2177 static sbitmap *cprop_pavloc;
2178 static sbitmap *cprop_absaltered;
2180 /* Global properties of assignments (computed from the local properties). */
2181 static sbitmap *cprop_avin;
2182 static sbitmap *cprop_avout;
2184 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
2185 basic blocks. N_SETS is the number of sets. */
2188 alloc_cprop_mem (int n_blocks, int n_sets)
2190 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
2191 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
2193 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
2194 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
2197 /* Free vars used by copy/const propagation. */
2200 free_cprop_mem (void)
2202 sbitmap_vector_free (cprop_pavloc);
2203 sbitmap_vector_free (cprop_absaltered);
2204 sbitmap_vector_free (cprop_avin);
2205 sbitmap_vector_free (cprop_avout);
2208 /* For each block, compute whether X is transparent. X is either an
2209 expression or an assignment [though we don't care which, for this context
2210 an assignment is treated as an expression]. For each block where an
2211 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
2215 compute_transp (const_rtx x, int indx, sbitmap *bmap, int set_p)
2221 /* repeat is used to turn tail-recursion into iteration since GCC
2222 can't do it when there's no return value. */
2228 code = GET_CODE (x);
2235 for (def = DF_REG_DEF_CHAIN (REGNO (x));
2237 def = DF_REF_NEXT_REG (def))
2238 SET_BIT (bmap[DF_REF_BB (def)->index], indx);
2243 for (def = DF_REG_DEF_CHAIN (REGNO (x));
2245 def = DF_REF_NEXT_REG (def))
2246 RESET_BIT (bmap[DF_REF_BB (def)->index], indx);
2252 if (! MEM_READONLY_P (x))
2257 /* First handle all the blocks with calls. We don't need to
2258 do any list walking for them. */
2259 EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
2262 SET_BIT (bmap[bb_index], indx);
2264 RESET_BIT (bmap[bb_index], indx);
2267 /* Now iterate over the blocks which have memory modifications
2268 but which do not have any calls. */
2269 EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set,
2273 rtx list_entry = canon_modify_mem_list[bb_index];
2277 rtx dest, dest_addr;
2279 /* LIST_ENTRY must be an INSN of some kind that sets memory.
2280 Examine each hunk of memory that is modified. */
2282 dest = XEXP (list_entry, 0);
2283 list_entry = XEXP (list_entry, 1);
2284 dest_addr = XEXP (list_entry, 0);
2286 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
2287 x, NULL_RTX, rtx_addr_varies_p))
2290 SET_BIT (bmap[bb_index], indx);
2292 RESET_BIT (bmap[bb_index], indx);
2295 list_entry = XEXP (list_entry, 1);
2320 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2324 /* If we are about to do the last recursive call
2325 needed at this level, change it into iteration.
2326 This function is called enough to be worth it. */
2333 compute_transp (XEXP (x, i), indx, bmap, set_p);
2335 else if (fmt[i] == 'E')
2336 for (j = 0; j < XVECLEN (x, i); j++)
2337 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
2341 /* Top level routine to do the dataflow analysis needed by copy/const
2345 compute_cprop_data (void)
2347 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
2348 compute_available (cprop_pavloc, cprop_absaltered,
2349 cprop_avout, cprop_avin);
2352 /* Copy/constant propagation. */
2354 /* Maximum number of register uses in an insn that we handle. */
2357 /* Table of uses found in an insn.
2358 Allocated statically to avoid alloc/free complexity and overhead. */
2359 static struct reg_use reg_use_table[MAX_USES];
2361 /* Index into `reg_use_table' while building it. */
2362 static int reg_use_count;
2364 /* Set up a list of register numbers used in INSN. The found uses are stored
2365 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
2366 and contains the number of uses in the table upon exit.
2368 ??? If a register appears multiple times we will record it multiple times.
2369 This doesn't hurt anything but it will slow things down. */
2372 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
2379 /* repeat is used to turn tail-recursion into iteration since GCC
2380 can't do it when there's no return value. */
2385 code = GET_CODE (x);
2388 if (reg_use_count == MAX_USES)
2391 reg_use_table[reg_use_count].reg_rtx = x;
2395 /* Recursively scan the operands of this expression. */
2397 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2401 /* If we are about to do the last recursive call
2402 needed at this level, change it into iteration.
2403 This function is called enough to be worth it. */
2410 find_used_regs (&XEXP (x, i), data);
2412 else if (fmt[i] == 'E')
2413 for (j = 0; j < XVECLEN (x, i); j++)
2414 find_used_regs (&XVECEXP (x, i, j), data);
2418 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
2419 Returns nonzero is successful. */
2422 try_replace_reg (rtx from, rtx to, rtx insn)
2424 rtx note = find_reg_equal_equiv_note (insn);
2427 rtx set = single_set (insn);
2429 /* Usually we substitute easy stuff, so we won't copy everything.
2430 We however need to take care to not duplicate non-trivial CONST
2434 validate_replace_src_group (from, to, insn);
2435 if (num_changes_pending () && apply_change_group ())
2438 /* Try to simplify SET_SRC if we have substituted a constant. */
2439 if (success && set && CONSTANT_P (to))
2441 src = simplify_rtx (SET_SRC (set));
2444 validate_change (insn, &SET_SRC (set), src, 0);
2447 /* If there is already a REG_EQUAL note, update the expression in it
2448 with our replacement. */
2449 if (note != 0 && REG_NOTE_KIND (note) == REG_EQUAL)
2450 set_unique_reg_note (insn, REG_EQUAL,
2451 simplify_replace_rtx (XEXP (note, 0), from,
2453 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
2455 /* If above failed and this is a single set, try to simplify the source of
2456 the set given our substitution. We could perhaps try this for multiple
2457 SETs, but it probably won't buy us anything. */
2458 src = simplify_replace_rtx (SET_SRC (set), from, to);
2460 if (!rtx_equal_p (src, SET_SRC (set))
2461 && validate_change (insn, &SET_SRC (set), src, 0))
2464 /* If we've failed to do replacement, have a single SET, don't already
2465 have a note, and have no special SET, add a REG_EQUAL note to not
2466 lose information. */
2467 if (!success && note == 0 && set != 0
2468 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
2469 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
2470 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
2473 /* REG_EQUAL may get simplified into register.
2474 We don't allow that. Remove that note. This code ought
2475 not to happen, because previous code ought to synthesize
2476 reg-reg move, but be on the safe side. */
2477 if (note && REG_NOTE_KIND (note) == REG_EQUAL && REG_P (XEXP (note, 0)))
2478 remove_note (insn, note);
2483 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
2484 NULL no such set is found. */
2486 static struct expr *
2487 find_avail_set (int regno, rtx insn)
2489 /* SET1 contains the last set found that can be returned to the caller for
2490 use in a substitution. */
2491 struct expr *set1 = 0;
2493 /* Loops are not possible here. To get a loop we would need two sets
2494 available at the start of the block containing INSN. i.e. we would
2495 need two sets like this available at the start of the block:
2497 (set (reg X) (reg Y))
2498 (set (reg Y) (reg X))
2500 This can not happen since the set of (reg Y) would have killed the
2501 set of (reg X) making it unavailable at the start of this block. */
2505 struct expr *set = lookup_set (regno, &set_hash_table);
2507 /* Find a set that is available at the start of the block
2508 which contains INSN. */
2511 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
2513 set = next_set (regno, set);
2516 /* If no available set was found we've reached the end of the
2517 (possibly empty) copy chain. */
2521 gcc_assert (GET_CODE (set->expr) == SET);
2523 src = SET_SRC (set->expr);
2525 /* We know the set is available.
2526 Now check that SRC is ANTLOC (i.e. none of the source operands
2527 have changed since the start of the block).
2529 If the source operand changed, we may still use it for the next
2530 iteration of this loop, but we may not use it for substitutions. */
2532 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
2535 /* If the source of the set is anything except a register, then
2536 we have reached the end of the copy chain. */
2540 /* Follow the copy chain, i.e. start another iteration of the loop
2541 and see if we have an available copy into SRC. */
2542 regno = REGNO (src);
2545 /* SET1 holds the last set that was available and anticipatable at
2550 /* Subroutine of cprop_insn that tries to propagate constants into
2551 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
2552 it is the instruction that immediately precedes JUMP, and must be a
2553 single SET of a register. FROM is what we will try to replace,
2554 SRC is the constant we will try to substitute for it. Returns nonzero
2555 if a change was made. */
2558 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
2560 rtx new_rtx, set_src, note_src;
2561 rtx set = pc_set (jump);
2562 rtx note = find_reg_equal_equiv_note (jump);
2566 note_src = XEXP (note, 0);
2567 if (GET_CODE (note_src) == EXPR_LIST)
2568 note_src = NULL_RTX;
2570 else note_src = NULL_RTX;
2572 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
2573 set_src = note_src ? note_src : SET_SRC (set);
2575 /* First substitute the SETCC condition into the JUMP instruction,
2576 then substitute that given values into this expanded JUMP. */
2577 if (setcc != NULL_RTX
2578 && !modified_between_p (from, setcc, jump)
2579 && !modified_between_p (src, setcc, jump))
2582 rtx setcc_set = single_set (setcc);
2583 rtx setcc_note = find_reg_equal_equiv_note (setcc);
2584 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
2585 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
2586 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
2592 new_rtx = simplify_replace_rtx (set_src, from, src);
2594 /* If no simplification can be made, then try the next register. */
2595 if (rtx_equal_p (new_rtx, SET_SRC (set)))
2598 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
2599 if (new_rtx == pc_rtx)
2603 /* Ensure the value computed inside the jump insn to be equivalent
2604 to one computed by setcc. */
2605 if (setcc && modified_in_p (new_rtx, setcc))
2607 if (! validate_unshare_change (jump, &SET_SRC (set), new_rtx, 0))
2609 /* When (some) constants are not valid in a comparison, and there
2610 are two registers to be replaced by constants before the entire
2611 comparison can be folded into a constant, we need to keep
2612 intermediate information in REG_EQUAL notes. For targets with
2613 separate compare insns, such notes are added by try_replace_reg.
2614 When we have a combined compare-and-branch instruction, however,
2615 we need to attach a note to the branch itself to make this
2616 optimization work. */
2618 if (!rtx_equal_p (new_rtx, note_src))
2619 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new_rtx));
2623 /* Remove REG_EQUAL note after simplification. */
2625 remove_note (jump, note);
2629 /* Delete the cc0 setter. */
2630 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
2631 delete_insn (setcc);
2634 run_jump_opt_after_gcse = 1;
2636 global_const_prop_count++;
2637 if (dump_file != NULL)
2640 "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
2641 REGNO (from), INSN_UID (jump));
2642 print_rtl (dump_file, src);
2643 fprintf (dump_file, "\n");
2645 purge_dead_edges (bb);
2647 /* If a conditional jump has been changed into unconditional jump, remove
2648 the jump and make the edge fallthru - this is always called in
2650 if (new_rtx != pc_rtx && simplejump_p (jump))
2655 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ei_next (&ei))
2656 if (e->dest != EXIT_BLOCK_PTR
2657 && BB_HEAD (e->dest) == JUMP_LABEL (jump))
2659 e->flags |= EDGE_FALLTHRU;
2669 constprop_register (rtx insn, rtx from, rtx to, bool alter_jumps)
2673 /* Check for reg or cc0 setting instructions followed by
2674 conditional branch instructions first. */
2676 && (sset = single_set (insn)) != NULL
2678 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
2680 rtx dest = SET_DEST (sset);
2681 if ((REG_P (dest) || CC0_P (dest))
2682 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
2686 /* Handle normal insns next. */
2687 if (NONJUMP_INSN_P (insn)
2688 && try_replace_reg (from, to, insn))
2691 /* Try to propagate a CONST_INT into a conditional jump.
2692 We're pretty specific about what we will handle in this
2693 code, we can extend this as necessary over time.
2695 Right now the insn in question must look like
2696 (set (pc) (if_then_else ...)) */
2697 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
2698 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
2702 /* Perform constant and copy propagation on INSN.
2703 The result is nonzero if a change was made. */
2706 cprop_insn (rtx insn, int alter_jumps)
2708 struct reg_use *reg_used;
2716 note_uses (&PATTERN (insn), find_used_regs, NULL);
2718 note = find_reg_equal_equiv_note (insn);
2720 /* We may win even when propagating constants into notes. */
2722 find_used_regs (&XEXP (note, 0), NULL);
2724 for (reg_used = ®_use_table[0]; reg_use_count > 0;
2725 reg_used++, reg_use_count--)
2727 unsigned int regno = REGNO (reg_used->reg_rtx);
2731 /* Ignore registers created by GCSE.
2732 We do this because ... */
2733 if (regno >= max_gcse_regno)
2736 /* If the register has already been set in this block, there's
2737 nothing we can do. */
2738 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
2741 /* Find an assignment that sets reg_used and is available
2742 at the start of the block. */
2743 set = find_avail_set (regno, insn);
2748 /* ??? We might be able to handle PARALLELs. Later. */
2749 gcc_assert (GET_CODE (pat) == SET);
2751 src = SET_SRC (pat);
2753 /* Constant propagation. */
2754 if (gcse_constant_p (src))
2756 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
2759 global_const_prop_count++;
2760 if (dump_file != NULL)
2762 fprintf (dump_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
2763 fprintf (dump_file, "insn %d with constant ", INSN_UID (insn));
2764 print_rtl (dump_file, src);
2765 fprintf (dump_file, "\n");
2767 if (INSN_DELETED_P (insn))
2771 else if (REG_P (src)
2772 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2773 && REGNO (src) != regno)
2775 if (try_replace_reg (reg_used->reg_rtx, src, insn))
2778 global_copy_prop_count++;
2779 if (dump_file != NULL)
2781 fprintf (dump_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
2782 regno, INSN_UID (insn));
2783 fprintf (dump_file, " with reg %d\n", REGNO (src));
2786 /* The original insn setting reg_used may or may not now be
2787 deletable. We leave the deletion to flow. */
2788 /* FIXME: If it turns out that the insn isn't deletable,
2789 then we may have unnecessarily extended register lifetimes
2790 and made things worse. */
2798 /* Like find_used_regs, but avoid recording uses that appear in
2799 input-output contexts such as zero_extract or pre_dec. This
2800 restricts the cases we consider to those for which local cprop
2801 can legitimately make replacements. */
2804 local_cprop_find_used_regs (rtx *xptr, void *data)
2811 switch (GET_CODE (x))
2815 case STRICT_LOW_PART:
2824 /* Can only legitimately appear this early in the context of
2825 stack pushes for function arguments, but handle all of the
2826 codes nonetheless. */
2830 /* Setting a subreg of a register larger than word_mode leaves
2831 the non-written words unchanged. */
2832 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
2840 find_used_regs (xptr, data);
2843 /* Try to perform local const/copy propagation on X in INSN.
2844 If ALTER_JUMPS is false, changing jump insns is not allowed. */
2847 do_local_cprop (rtx x, rtx insn, bool alter_jumps)
2849 rtx newreg = NULL, newcnst = NULL;
2851 /* Rule out USE instructions and ASM statements as we don't want to
2852 change the hard registers mentioned. */
2854 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
2855 || (GET_CODE (PATTERN (insn)) != USE
2856 && asm_noperands (PATTERN (insn)) < 0)))
2858 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
2859 struct elt_loc_list *l;
2863 for (l = val->locs; l; l = l->next)
2865 rtx this_rtx = l->loc;
2868 if (gcse_constant_p (this_rtx))
2870 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
2871 /* Don't copy propagate if it has attached REG_EQUIV note.
2872 At this point this only function parameters should have
2873 REG_EQUIV notes and if the argument slot is used somewhere
2874 explicitly, it means address of parameter has been taken,
2875 so we should not extend the lifetime of the pseudo. */
2876 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
2877 || ! MEM_P (XEXP (note, 0))))
2880 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
2882 if (dump_file != NULL)
2884 fprintf (dump_file, "LOCAL CONST-PROP: Replacing reg %d in ",
2886 fprintf (dump_file, "insn %d with constant ",
2888 print_rtl (dump_file, newcnst);
2889 fprintf (dump_file, "\n");
2891 local_const_prop_count++;
2894 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
2896 if (dump_file != NULL)
2899 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
2900 REGNO (x), INSN_UID (insn));
2901 fprintf (dump_file, " with reg %d\n", REGNO (newreg));
2903 local_copy_prop_count++;
2910 /* Do local const/copy propagation (i.e. within each basic block).
2911 If ALTER_JUMPS is true, allow propagating into jump insns, which
2912 could modify the CFG. */
2915 local_cprop_pass (bool alter_jumps)
2919 struct reg_use *reg_used;
2920 bool changed = false;
2922 cselib_init (false);
2925 FOR_BB_INSNS (bb, insn)
2929 rtx note = find_reg_equal_equiv_note (insn);
2933 note_uses (&PATTERN (insn), local_cprop_find_used_regs,
2936 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
2938 for (reg_used = ®_use_table[0]; reg_use_count > 0;
2939 reg_used++, reg_use_count--)
2941 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps))
2947 if (INSN_DELETED_P (insn))
2950 while (reg_use_count);
2952 cselib_process_insn (insn);
2955 /* Forget everything at the end of a basic block. */
2956 cselib_clear_table ();
2961 /* Global analysis may get into infinite loops for unreachable blocks. */
2962 if (changed && alter_jumps)
2963 delete_unreachable_blocks ();
2966 /* Forward propagate copies. This includes copies and constants. Return
2967 nonzero if a change was made. */
2970 cprop (int alter_jumps)
2976 /* Note we start at block 1. */
2977 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
2979 if (dump_file != NULL)
2980 fprintf (dump_file, "\n");
2985 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
2987 /* Reset tables used to keep track of what's still valid [since the
2988 start of the block]. */
2989 reset_opr_set_tables ();
2991 FOR_BB_INSNS (bb, insn)
2994 changed |= cprop_insn (insn, alter_jumps);
2996 /* Keep track of everything modified by this insn. */
2997 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
2998 call mark_oprs_set if we turned the insn into a NOTE. */
2999 if (! NOTE_P (insn))
3000 mark_oprs_set (insn);
3004 if (dump_file != NULL)
3005 fprintf (dump_file, "\n");
3010 /* Similar to get_condition, only the resulting condition must be
3011 valid at JUMP, instead of at EARLIEST.
3013 This differs from noce_get_condition in ifcvt.c in that we prefer not to
3014 settle for the condition variable in the jump instruction being integral.
3015 We prefer to be able to record the value of a user variable, rather than
3016 the value of a temporary used in a condition. This could be solved by
3017 recording the value of *every* register scanned by canonicalize_condition,
3018 but this would require some code reorganization. */
3021 fis_get_condition (rtx jump)
3023 return get_condition (jump, NULL, false, true);
3026 /* Check the comparison COND to see if we can safely form an implicit set from
3027 it. COND is either an EQ or NE comparison. */
3030 implicit_set_cond_p (const_rtx cond)
3032 const enum machine_mode mode = GET_MODE (XEXP (cond, 0));
3033 const_rtx cst = XEXP (cond, 1);
3035 /* We can't perform this optimization if either operand might be or might
3036 contain a signed zero. */
3037 if (HONOR_SIGNED_ZEROS (mode))
3039 /* It is sufficient to check if CST is or contains a zero. We must
3040 handle float, complex, and vector. If any subpart is a zero, then
3041 the optimization can't be performed. */
3042 /* ??? The complex and vector checks are not implemented yet. We just
3043 always return zero for them. */
3044 if (GET_CODE (cst) == CONST_DOUBLE)
3047 REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
3048 if (REAL_VALUES_EQUAL (d, dconst0))
3055 return gcse_constant_p (cst);
3058 /* Find the implicit sets of a function. An "implicit set" is a constraint
3059 on the value of a variable, implied by a conditional jump. For example,
3060 following "if (x == 2)", the then branch may be optimized as though the
3061 conditional performed an "explicit set", in this example, "x = 2". This
3062 function records the set patterns that are implicit at the start of each
3066 find_implicit_sets (void)
3068 basic_block bb, dest;
3074 /* Check for more than one successor. */
3075 if (EDGE_COUNT (bb->succs) > 1)
3077 cond = fis_get_condition (BB_END (bb));
3080 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
3081 && REG_P (XEXP (cond, 0))
3082 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
3083 && implicit_set_cond_p (cond))
3085 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
3086 : FALLTHRU_EDGE (bb)->dest;
3088 if (dest && single_pred_p (dest)
3089 && dest != EXIT_BLOCK_PTR)
3091 new_rtx = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
3093 implicit_sets[dest->index] = new_rtx;
3096 fprintf(dump_file, "Implicit set of reg %d in ",
3097 REGNO (XEXP (cond, 0)));
3098 fprintf(dump_file, "basic block %d\n", dest->index);
3106 fprintf (dump_file, "Found %d implicit sets\n", count);
3109 /* Perform one copy/constant propagation pass.
3110 PASS is the pass count. If CPROP_JUMPS is true, perform constant
3111 propagation into conditional jumps. If BYPASS_JUMPS is true,
3112 perform conditional jump bypassing optimizations. */
3115 one_cprop_pass (int pass, bool cprop_jumps, bool bypass_jumps)
3119 global_const_prop_count = local_const_prop_count = 0;
3120 global_copy_prop_count = local_copy_prop_count = 0;
3123 local_cprop_pass (cprop_jumps);
3125 /* Determine implicit sets. */
3126 implicit_sets = XCNEWVEC (rtx, last_basic_block);
3127 find_implicit_sets ();
3129 alloc_hash_table (get_max_uid (), &set_hash_table, 1);
3130 compute_hash_table (&set_hash_table);
3132 /* Free implicit_sets before peak usage. */
3133 free (implicit_sets);
3134 implicit_sets = NULL;
3137 dump_hash_table (dump_file, "SET", &set_hash_table);
3138 if (set_hash_table.n_elems > 0)
3140 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
3141 compute_cprop_data ();
3142 changed = cprop (cprop_jumps);
3144 changed |= bypass_conditional_jumps ();
3148 free_hash_table (&set_hash_table);
3152 fprintf (dump_file, "CPROP of %s, pass %d: %d bytes needed, ",
3153 current_function_name (), pass, bytes_used);
3154 fprintf (dump_file, "%d local const props, %d local copy props, ",
3155 local_const_prop_count, local_copy_prop_count);
3156 fprintf (dump_file, "%d global const props, %d global copy props\n\n",
3157 global_const_prop_count, global_copy_prop_count);
3159 /* Global analysis may get into infinite loops for unreachable blocks. */
3160 if (changed && cprop_jumps)
3161 delete_unreachable_blocks ();
3166 /* Bypass conditional jumps. */
3168 /* The value of last_basic_block at the beginning of the jump_bypass
3169 pass. The use of redirect_edge_and_branch_force may introduce new
3170 basic blocks, but the data flow analysis is only valid for basic
3171 block indices less than bypass_last_basic_block. */
3173 static int bypass_last_basic_block;
3175 /* Find a set of REGNO to a constant that is available at the end of basic
3176 block BB. Returns NULL if no such set is found. Based heavily upon
3179 static struct expr *
3180 find_bypass_set (int regno, int bb)
3182 struct expr *result = 0;
3187 struct expr *set = lookup_set (regno, &set_hash_table);
3191 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
3193 set = next_set (regno, set);
3199 gcc_assert (GET_CODE (set->expr) == SET);
3201 src = SET_SRC (set->expr);
3202 if (gcse_constant_p (src))
3208 regno = REGNO (src);
3214 /* Subroutine of bypass_block that checks whether a pseudo is killed by
3215 any of the instructions inserted on an edge. Jump bypassing places
3216 condition code setters on CFG edges using insert_insn_on_edge. This
3217 function is required to check that our data flow analysis is still
3218 valid prior to commit_edge_insertions. */
3221 reg_killed_on_edge (const_rtx reg, const_edge e)
3225 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
3226 if (INSN_P (insn) && reg_set_p (reg, insn))
3232 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
3233 basic block BB which has more than one predecessor. If not NULL, SETCC
3234 is the first instruction of BB, which is immediately followed by JUMP_INSN
3235 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
3236 Returns nonzero if a change was made.
3238 During the jump bypassing pass, we may place copies of SETCC instructions
3239 on CFG edges. The following routine must be careful to pay attention to
3240 these inserted insns when performing its transformations. */
3243 bypass_block (basic_block bb, rtx setcc, rtx jump)
3248 int may_be_loop_header;
3252 insn = (setcc != NULL) ? setcc : jump;
3254 /* Determine set of register uses in INSN. */
3256 note_uses (&PATTERN (insn), find_used_regs, NULL);
3257 note = find_reg_equal_equiv_note (insn);
3259 find_used_regs (&XEXP (note, 0), NULL);
3261 may_be_loop_header = false;
3262 FOR_EACH_EDGE (e, ei, bb->preds)
3263 if (e->flags & EDGE_DFS_BACK)
3265 may_be_loop_header = true;
3270 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3274 if (e->flags & EDGE_COMPLEX)
3280 /* We can't redirect edges from new basic blocks. */
3281 if (e->src->index >= bypass_last_basic_block)
3287 /* The irreducible loops created by redirecting of edges entering the
3288 loop from outside would decrease effectiveness of some of the following
3289 optimizations, so prevent this. */
3290 if (may_be_loop_header
3291 && !(e->flags & EDGE_DFS_BACK))
3297 for (i = 0; i < reg_use_count; i++)
3299 struct reg_use *reg_used = ®_use_table[i];
3300 unsigned int regno = REGNO (reg_used->reg_rtx);
3301 basic_block dest, old_dest;
3305 if (regno >= max_gcse_regno)
3308 set = find_bypass_set (regno, e->src->index);
3313 /* Check the data flow is valid after edge insertions. */
3314 if (e->insns.r && reg_killed_on_edge (reg_used->reg_rtx, e))
3317 src = SET_SRC (pc_set (jump));
3320 src = simplify_replace_rtx (src,
3321 SET_DEST (PATTERN (setcc)),
3322 SET_SRC (PATTERN (setcc)));
3324 new_rtx = simplify_replace_rtx (src, reg_used->reg_rtx,
3325 SET_SRC (set->expr));
3327 /* Jump bypassing may have already placed instructions on
3328 edges of the CFG. We can't bypass an outgoing edge that
3329 has instructions associated with it, as these insns won't
3330 get executed if the incoming edge is redirected. */
3332 if (new_rtx == pc_rtx)
3334 edest = FALLTHRU_EDGE (bb);
3335 dest = edest->insns.r ? NULL : edest->dest;
3337 else if (GET_CODE (new_rtx) == LABEL_REF)
3339 dest = BLOCK_FOR_INSN (XEXP (new_rtx, 0));
3340 /* Don't bypass edges containing instructions. */
3341 edest = find_edge (bb, dest);
3342 if (edest && edest->insns.r)
3348 /* Avoid unification of the edge with other edges from original
3349 branch. We would end up emitting the instruction on "both"
3352 if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc)))
3353 && find_edge (e->src, dest))
3359 && dest != EXIT_BLOCK_PTR)
3361 redirect_edge_and_branch_force (e, dest);
3363 /* Copy the register setter to the redirected edge.
3364 Don't copy CC0 setters, as CC0 is dead after jump. */
3367 rtx pat = PATTERN (setcc);
3368 if (!CC0_P (SET_DEST (pat)))
3369 insert_insn_on_edge (copy_insn (pat), e);
3372 if (dump_file != NULL)
3374 fprintf (dump_file, "JUMP-BYPASS: Proved reg %d "
3375 "in jump_insn %d equals constant ",
3376 regno, INSN_UID (jump));
3377 print_rtl (dump_file, SET_SRC (set->expr));
3378 fprintf (dump_file, "\nBypass edge from %d->%d to %d\n",
3379 e->src->index, old_dest->index, dest->index);
3392 /* Find basic blocks with more than one predecessor that only contain a
3393 single conditional jump. If the result of the comparison is known at
3394 compile-time from any incoming edge, redirect that edge to the
3395 appropriate target. Returns nonzero if a change was made.
3397 This function is now mis-named, because we also handle indirect jumps. */
3400 bypass_conditional_jumps (void)
3408 /* Note we start at block 1. */
3409 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3412 bypass_last_basic_block = last_basic_block;
3413 mark_dfs_back_edges ();
3416 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
3417 EXIT_BLOCK_PTR, next_bb)
3419 /* Check for more than one predecessor. */
3420 if (!single_pred_p (bb))
3423 FOR_BB_INSNS (bb, insn)
3424 if (NONJUMP_INSN_P (insn))
3428 if (GET_CODE (PATTERN (insn)) != SET)
3431 dest = SET_DEST (PATTERN (insn));
3432 if (REG_P (dest) || CC0_P (dest))
3437 else if (JUMP_P (insn))
3439 if ((any_condjump_p (insn) || computed_jump_p (insn))
3440 && onlyjump_p (insn))
3441 changed |= bypass_block (bb, setcc, insn);
3444 else if (INSN_P (insn))
3449 /* If we bypassed any register setting insns, we inserted a
3450 copy on the redirected edge. These need to be committed. */
3452 commit_edge_insertions ();
3457 /* Compute PRE+LCM working variables. */
3459 /* Local properties of expressions. */
3460 /* Nonzero for expressions that are transparent in the block. */
3461 static sbitmap *transp;
3463 /* Nonzero for expressions that are transparent at the end of the block.
3464 This is only zero for expressions killed by abnormal critical edge
3465 created by a calls. */
3466 static sbitmap *transpout;
3468 /* Nonzero for expressions that are computed (available) in the block. */
3469 static sbitmap *comp;
3471 /* Nonzero for expressions that are locally anticipatable in the block. */
3472 static sbitmap *antloc;
3474 /* Nonzero for expressions where this block is an optimal computation
3476 static sbitmap *pre_optimal;
3478 /* Nonzero for expressions which are redundant in a particular block. */
3479 static sbitmap *pre_redundant;
3481 /* Nonzero for expressions which should be inserted on a specific edge. */
3482 static sbitmap *pre_insert_map;
3484 /* Nonzero for expressions which should be deleted in a specific block. */
3485 static sbitmap *pre_delete_map;
3487 /* Contains the edge_list returned by pre_edge_lcm. */
3488 static struct edge_list *edge_list;
3490 /* Allocate vars used for PRE analysis. */
3493 alloc_pre_mem (int n_blocks, int n_exprs)
3495 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
3496 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
3497 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
3500 pre_redundant = NULL;
3501 pre_insert_map = NULL;
3502 pre_delete_map = NULL;
3503 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
3505 /* pre_insert and pre_delete are allocated later. */
3508 /* Free vars used for PRE analysis. */
3513 sbitmap_vector_free (transp);
3514 sbitmap_vector_free (comp);
3516 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
3519 sbitmap_vector_free (pre_optimal);
3521 sbitmap_vector_free (pre_redundant);
3523 sbitmap_vector_free (pre_insert_map);
3525 sbitmap_vector_free (pre_delete_map);
3527 transp = comp = NULL;
3528 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
3531 /* Top level routine to do the dataflow analysis needed by PRE. */
3534 compute_pre_data (void)
3536 sbitmap trapping_expr;
3540 compute_local_properties (transp, comp, antloc, &expr_hash_table);
3541 sbitmap_vector_zero (ae_kill, last_basic_block);
3543 /* Collect expressions which might trap. */
3544 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
3545 sbitmap_zero (trapping_expr);
3546 for (ui = 0; ui < expr_hash_table.size; ui++)
3549 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
3550 if (may_trap_p (e->expr))
3551 SET_BIT (trapping_expr, e->bitmap_index);
3554 /* Compute ae_kill for each basic block using:
3564 /* If the current block is the destination of an abnormal edge, we
3565 kill all trapping expressions because we won't be able to properly
3566 place the instruction on the edge. So make them neither
3567 anticipatable nor transparent. This is fairly conservative. */
3568 FOR_EACH_EDGE (e, ei, bb->preds)
3569 if (e->flags & EDGE_ABNORMAL)
3571 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
3572 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
3576 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
3577 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
3580 edge_list = pre_edge_lcm (expr_hash_table.n_elems, transp, comp, antloc,
3581 ae_kill, &pre_insert_map, &pre_delete_map);
3582 sbitmap_vector_free (antloc);
3584 sbitmap_vector_free (ae_kill);
3586 sbitmap_free (trapping_expr);
3591 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
3594 VISITED is a pointer to a working buffer for tracking which BB's have
3595 been visited. It is NULL for the top-level call.
3597 We treat reaching expressions that go through blocks containing the same
3598 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3599 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3600 2 as not reaching. The intent is to improve the probability of finding
3601 only one reaching expression and to reduce register lifetimes by picking
3602 the closest such expression. */
3605 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
3610 FOR_EACH_EDGE (pred, ei, bb->preds)
3612 basic_block pred_bb = pred->src;
3614 if (pred->src == ENTRY_BLOCK_PTR
3615 /* Has predecessor has already been visited? */
3616 || visited[pred_bb->index])
3617 ;/* Nothing to do. */
3619 /* Does this predecessor generate this expression? */
3620 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
3622 /* Is this the occurrence we're looking for?
3623 Note that there's only one generating occurrence per block
3624 so we just need to check the block number. */
3625 if (occr_bb == pred_bb)
3628 visited[pred_bb->index] = 1;
3630 /* Ignore this predecessor if it kills the expression. */
3631 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
3632 visited[pred_bb->index] = 1;
3634 /* Neither gen nor kill. */
3637 visited[pred_bb->index] = 1;
3638 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
3643 /* All paths have been checked. */
3647 /* The wrapper for pre_expr_reaches_here_work that ensures that any
3648 memory allocated for that function is returned. */
3651 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
3654 char *visited = XCNEWVEC (char, last_basic_block);
3656 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
3663 /* Given an expr, generate RTL which we can insert at the end of a BB,
3664 or on an edge. Set the block number of any insns generated to
3668 process_insert_insn (struct expr *expr)
3670 rtx reg = expr->reaching_reg;
3671 rtx exp = copy_rtx (expr->expr);
3676 /* If the expression is something that's an operand, like a constant,
3677 just copy it to a register. */
3678 if (general_operand (exp, GET_MODE (reg)))
3679 emit_move_insn (reg, exp);
3681 /* Otherwise, make a new insn to compute this expression and make sure the
3682 insn will be recognized (this also adds any needed CLOBBERs). Copy the
3683 expression to make sure we don't have any sharing issues. */
3686 rtx insn = emit_insn (gen_rtx_SET (VOIDmode, reg, exp));
3688 if (insn_invalid_p (insn))
3699 /* Add EXPR to the end of basic block BB.
3701 This is used by both the PRE and code hoisting.
3703 For PRE, we want to verify that the expr is either transparent
3704 or locally anticipatable in the target block. This check makes
3705 no sense for code hoisting. */
3708 insert_insn_end_basic_block (struct expr *expr, basic_block bb, int pre)
3710 rtx insn = BB_END (bb);
3712 rtx reg = expr->reaching_reg;
3713 int regno = REGNO (reg);
3716 pat = process_insert_insn (expr);
3717 gcc_assert (pat && INSN_P (pat));
3720 while (NEXT_INSN (pat_end) != NULL_RTX)
3721 pat_end = NEXT_INSN (pat_end);
3723 /* If the last insn is a jump, insert EXPR in front [taking care to
3724 handle cc0, etc. properly]. Similarly we need to care trapping
3725 instructions in presence of non-call exceptions. */
3728 || (NONJUMP_INSN_P (insn)
3729 && (!single_succ_p (bb)
3730 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
3735 /* It should always be the case that we can put these instructions
3736 anywhere in the basic block with performing PRE optimizations.
3738 gcc_assert (!NONJUMP_INSN_P (insn) || !pre
3739 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
3740 || TEST_BIT (transp[bb->index], expr->bitmap_index));
3742 /* If this is a jump table, then we can't insert stuff here. Since
3743 we know the previous real insn must be the tablejump, we insert
3744 the new instruction just before the tablejump. */
3745 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
3746 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
3747 insn = prev_real_insn (insn);
3750 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
3751 if cc0 isn't set. */
3752 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3754 insn = XEXP (note, 0);
3757 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
3758 if (maybe_cc0_setter
3759 && INSN_P (maybe_cc0_setter)
3760 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
3761 insn = maybe_cc0_setter;
3764 /* FIXME: What if something in cc0/jump uses value set in new insn? */
3765 new_insn = emit_insn_before_noloc (pat, insn, bb);
3768 /* Likewise if the last insn is a call, as will happen in the presence
3769 of exception handling. */
3770 else if (CALL_P (insn)
3771 && (!single_succ_p (bb)
3772 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
3774 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
3775 we search backward and place the instructions before the first
3776 parameter is loaded. Do this for everyone for consistency and a
3777 presumption that we'll get better code elsewhere as well.
3779 It should always be the case that we can put these instructions
3780 anywhere in the basic block with performing PRE optimizations.
3784 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
3785 || TEST_BIT (transp[bb->index], expr->bitmap_index));
3787 /* Since different machines initialize their parameter registers
3788 in different orders, assume nothing. Collect the set of all
3789 parameter registers. */
3790 insn = find_first_parameter_load (insn, BB_HEAD (bb));
3792 /* If we found all the parameter loads, then we want to insert
3793 before the first parameter load.
3795 If we did not find all the parameter loads, then we might have
3796 stopped on the head of the block, which could be a CODE_LABEL.
3797 If we inserted before the CODE_LABEL, then we would be putting
3798 the insn in the wrong basic block. In that case, put the insn
3799 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
3800 while (LABEL_P (insn)
3801 || NOTE_INSN_BASIC_BLOCK_P (insn))
3802 insn = NEXT_INSN (insn);
3804 new_insn = emit_insn_before_noloc (pat, insn, bb);
3807 new_insn = emit_insn_after_noloc (pat, insn, bb);
3812 add_label_notes (PATTERN (pat), new_insn);
3815 pat = NEXT_INSN (pat);
3818 gcse_create_count++;
3822 fprintf (dump_file, "PRE/HOIST: end of bb %d, insn %d, ",
3823 bb->index, INSN_UID (new_insn));
3824 fprintf (dump_file, "copying expression %d to reg %d\n",
3825 expr->bitmap_index, regno);
3829 /* Insert partially redundant expressions on edges in the CFG to make
3830 the expressions fully redundant. */
3833 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
3835 int e, i, j, num_edges, set_size, did_insert = 0;
3838 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
3839 if it reaches any of the deleted expressions. */
3841 set_size = pre_insert_map[0]->size;
3842 num_edges = NUM_EDGES (edge_list);
3843 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
3844 sbitmap_vector_zero (inserted, num_edges);
3846 for (e = 0; e < num_edges; e++)
3849 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
3851 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
3853 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
3855 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
3856 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
3858 struct expr *expr = index_map[j];
3861 /* Now look at each deleted occurrence of this expression. */
3862 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
3864 if (! occr->deleted_p)
3867 /* Insert this expression on this edge if it would
3868 reach the deleted occurrence in BB. */
3869 if (!TEST_BIT (inserted[e], j))
3872 edge eg = INDEX_EDGE (edge_list, e);
3874 /* We can't insert anything on an abnormal and
3875 critical edge, so we insert the insn at the end of
3876 the previous block. There are several alternatives
3877 detailed in Morgans book P277 (sec 10.5) for
3878 handling this situation. This one is easiest for
3881 if (eg->flags & EDGE_ABNORMAL)
3882 insert_insn_end_basic_block (index_map[j], bb, 0);
3885 insn = process_insert_insn (index_map[j]);
3886 insert_insn_on_edge (insn, eg);
3891 fprintf (dump_file, "PRE/HOIST: edge (%d,%d), ",
3893 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
3894 fprintf (dump_file, "copy expression %d\n",
3895 expr->bitmap_index);
3898 update_ld_motion_stores (expr);
3899 SET_BIT (inserted[e], j);
3901 gcse_create_count++;
3908 sbitmap_vector_free (inserted);
3912 /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
3913 Given "old_reg <- expr" (INSN), instead of adding after it
3914 reaching_reg <- old_reg
3915 it's better to do the following:
3916 reaching_reg <- expr
3917 old_reg <- reaching_reg
3918 because this way copy propagation can discover additional PRE
3919 opportunities. But if this fails, we try the old way.
3920 When "expr" is a store, i.e.
3921 given "MEM <- old_reg", instead of adding after it
3922 reaching_reg <- old_reg
3923 it's better to add it before as follows:
3924 reaching_reg <- old_reg
3925 MEM <- reaching_reg. */
3928 pre_insert_copy_insn (struct expr *expr, rtx insn)
3930 rtx reg = expr->reaching_reg;
3931 int regno = REGNO (reg);
3932 int indx = expr->bitmap_index;
3933 rtx pat = PATTERN (insn);
3934 rtx set, first_set, new_insn;
3938 /* This block matches the logic in hash_scan_insn. */
3939 switch (GET_CODE (pat))
3946 /* Search through the parallel looking for the set whose
3947 source was the expression that we're interested in. */
3948 first_set = NULL_RTX;
3950 for (i = 0; i < XVECLEN (pat, 0); i++)
3952 rtx x = XVECEXP (pat, 0, i);
3953 if (GET_CODE (x) == SET)
3955 /* If the source was a REG_EQUAL or REG_EQUIV note, we
3956 may not find an equivalent expression, but in this
3957 case the PARALLEL will have a single set. */
3958 if (first_set == NULL_RTX)
3960 if (expr_equiv_p (SET_SRC (x), expr->expr))
3968 gcc_assert (first_set);
3969 if (set == NULL_RTX)
3977 if (REG_P (SET_DEST (set)))
3979 old_reg = SET_DEST (set);
3980 /* Check if we can modify the set destination in the original insn. */
3981 if (validate_change (insn, &SET_DEST (set), reg, 0))
3983 new_insn = gen_move_insn (old_reg, reg);
3984 new_insn = emit_insn_after (new_insn, insn);
3988 new_insn = gen_move_insn (reg, old_reg);
3989 new_insn = emit_insn_after (new_insn, insn);
3992 else /* This is possible only in case of a store to memory. */
3994 old_reg = SET_SRC (set);
3995 new_insn = gen_move_insn (reg, old_reg);
3997 /* Check if we can modify the set source in the original insn. */
3998 if (validate_change (insn, &SET_SRC (set), reg, 0))
3999 new_insn = emit_insn_before (new_insn, insn);
4001 new_insn = emit_insn_after (new_insn, insn);
4004 gcse_create_count++;
4008 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4009 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4010 INSN_UID (insn), regno);
4013 /* Copy available expressions that reach the redundant expression
4014 to `reaching_reg'. */
4017 pre_insert_copies (void)
4019 unsigned int i, added_copy;
4024 /* For each available expression in the table, copy the result to
4025 `reaching_reg' if the expression reaches a deleted one.
4027 ??? The current algorithm is rather brute force.
4028 Need to do some profiling. */
4030 for (i = 0; i < expr_hash_table.size; i++)
4031 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4033 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4034 we don't want to insert a copy here because the expression may not
4035 really be redundant. So only insert an insn if the expression was
4036 deleted. This test also avoids further processing if the
4037 expression wasn't deleted anywhere. */
4038 if (expr->reaching_reg == NULL)
4041 /* Set when we add a copy for that expression. */
4044 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4046 if (! occr->deleted_p)
4049 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4051 rtx insn = avail->insn;
4053 /* No need to handle this one if handled already. */
4054 if (avail->copied_p)
4057 /* Don't handle this one if it's a redundant one. */
4058 if (INSN_DELETED_P (insn))
4061 /* Or if the expression doesn't reach the deleted one. */
4062 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
4064 BLOCK_FOR_INSN (occr->insn)))
4069 /* Copy the result of avail to reaching_reg. */
4070 pre_insert_copy_insn (expr, insn);
4071 avail->copied_p = 1;
4076 update_ld_motion_stores (expr);
4080 /* Emit move from SRC to DEST noting the equivalence with expression computed
4083 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
4086 rtx set = single_set (insn), set2;
4090 /* This should never fail since we're creating a reg->reg copy
4091 we've verified to be valid. */
4093 new_rtx = emit_insn_after (gen_move_insn (dest, src), insn);
4095 /* Note the equivalence for local CSE pass. */
4096 set2 = single_set (new_rtx);
4097 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
4099 if ((note = find_reg_equal_equiv_note (insn)))
4100 eqv = XEXP (note, 0);
4102 eqv = SET_SRC (set);
4104 set_unique_reg_note (new_rtx, REG_EQUAL, copy_insn_1 (eqv));
4109 /* Delete redundant computations.
4110 Deletion is done by changing the insn to copy the `reaching_reg' of
4111 the expression into the result of the SET. It is left to later passes
4112 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4114 Returns nonzero if a change is made. */
4125 for (i = 0; i < expr_hash_table.size; i++)
4126 for (expr = expr_hash_table.table[i];
4128 expr = expr->next_same_hash)
4130 int indx = expr->bitmap_index;
4132 /* We only need to search antic_occr since we require
4135 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4137 rtx insn = occr->insn;
4139 basic_block bb = BLOCK_FOR_INSN (insn);
4141 /* We only delete insns that have a single_set. */
4142 if (TEST_BIT (pre_delete_map[bb->index], indx)
4143 && (set = single_set (insn)) != 0
4144 && dbg_cnt (pre_insn))
4146 /* Create a pseudo-reg to store the result of reaching
4147 expressions into. Get the mode for the new pseudo from
4148 the mode of the original destination pseudo. */
4149 if (expr->reaching_reg == NULL)
4150 expr->reaching_reg = gen_reg_rtx_and_attrs (SET_DEST (set));
4152 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4154 occr->deleted_p = 1;
4161 "PRE: redundant insn %d (expression %d) in ",
4162 INSN_UID (insn), indx);
4163 fprintf (dump_file, "bb %d, reaching reg is %d\n",
4164 bb->index, REGNO (expr->reaching_reg));
4173 /* Perform GCSE optimizations using PRE.
4174 This is called by one_pre_gcse_pass after all the dataflow analysis
4177 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4178 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4179 Compiler Design and Implementation.
4181 ??? A new pseudo reg is created to hold the reaching expression. The nice
4182 thing about the classical approach is that it would try to use an existing
4183 reg. If the register can't be adequately optimized [i.e. we introduce
4184 reload problems], one could add a pass here to propagate the new register
4187 ??? We don't handle single sets in PARALLELs because we're [currently] not
4188 able to copy the rest of the parallel when we insert copies to create full
4189 redundancies from partial redundancies. However, there's no reason why we
4190 can't handle PARALLELs in the cases where there are no partial
4197 int did_insert, changed;
4198 struct expr **index_map;
4201 /* Compute a mapping from expression number (`bitmap_index') to
4202 hash table entry. */
4204 index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
4205 for (i = 0; i < expr_hash_table.size; i++)
4206 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4207 index_map[expr->bitmap_index] = expr;
4209 /* Delete the redundant insns first so that
4210 - we know what register to use for the new insns and for the other
4211 ones with reaching expressions
4212 - we know which insns are redundant when we go to create copies */
4214 changed = pre_delete ();
4215 did_insert = pre_edge_insert (edge_list, index_map);
4217 /* In other places with reaching expressions, copy the expression to the
4218 specially allocated pseudo-reg that reaches the redundant expr. */
4219 pre_insert_copies ();
4222 commit_edge_insertions ();
4230 /* Top level routine to perform one PRE GCSE pass.
4232 Return nonzero if a change was made. */
4235 one_pre_gcse_pass (int pass)
4239 gcse_subst_count = 0;
4240 gcse_create_count = 0;
4242 alloc_hash_table (get_max_uid (), &expr_hash_table, 0);
4243 add_noreturn_fake_exit_edges ();
4245 compute_ld_motion_mems ();
4247 compute_hash_table (&expr_hash_table);
4248 trim_ld_motion_mems ();
4250 dump_hash_table (dump_file, "Expression", &expr_hash_table);
4252 if (expr_hash_table.n_elems > 0)
4254 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
4255 compute_pre_data ();
4256 changed |= pre_gcse ();
4257 free_edge_list (edge_list);
4262 remove_fake_exit_edges ();
4263 free_hash_table (&expr_hash_table);
4267 fprintf (dump_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
4268 current_function_name (), pass, bytes_used);
4269 fprintf (dump_file, "%d substs, %d insns created\n",
4270 gcse_subst_count, gcse_create_count);
4276 /* If X contains any LABEL_REF's, add REG_LABEL_OPERAND notes for them
4277 to INSN. If such notes are added to an insn which references a
4278 CODE_LABEL, the LABEL_NUSES count is incremented. We have to add
4279 that note, because the following loop optimization pass requires
4282 /* ??? If there was a jump optimization pass after gcse and before loop,
4283 then we would not need to do this here, because jump would add the
4284 necessary REG_LABEL_OPERAND and REG_LABEL_TARGET notes. */
4287 add_label_notes (rtx x, rtx insn)
4289 enum rtx_code code = GET_CODE (x);
4293 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4295 /* This code used to ignore labels that referred to dispatch tables to
4296 avoid flow generating (slightly) worse code.
4298 We no longer ignore such label references (see LABEL_REF handling in
4299 mark_jump_label for additional information). */
4301 /* There's no reason for current users to emit jump-insns with
4302 such a LABEL_REF, so we don't have to handle REG_LABEL_TARGET
4304 gcc_assert (!JUMP_P (insn));
4305 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (x, 0));
4307 if (LABEL_P (XEXP (x, 0)))
4308 LABEL_NUSES (XEXP (x, 0))++;
4313 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
4316 add_label_notes (XEXP (x, i), insn);
4317 else if (fmt[i] == 'E')
4318 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4319 add_label_notes (XVECEXP (x, i, j), insn);
4323 /* Compute transparent outgoing information for each block.
4325 An expression is transparent to an edge unless it is killed by
4326 the edge itself. This can only happen with abnormal control flow,
4327 when the edge is traversed through a call. This happens with
4328 non-local labels and exceptions.
4330 This would not be necessary if we split the edge. While this is
4331 normally impossible for abnormal critical edges, with some effort
4332 it should be possible with exception handling, since we still have
4333 control over which handler should be invoked. But due to increased
4334 EH table sizes, this may not be worthwhile. */
4337 compute_transpout (void)
4343 sbitmap_vector_ones (transpout, last_basic_block);
4347 /* Note that flow inserted a nop at the end of basic blocks that
4348 end in call instructions for reasons other than abnormal
4350 if (! CALL_P (BB_END (bb)))
4353 for (i = 0; i < expr_hash_table.size; i++)
4354 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
4355 if (MEM_P (expr->expr))
4357 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
4358 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
4361 /* ??? Optimally, we would use interprocedural alias
4362 analysis to determine if this mem is actually killed
4364 RESET_BIT (transpout[bb->index], expr->bitmap_index);
4369 /* Code Hoisting variables and subroutines. */
4371 /* Very busy expressions. */
4372 static sbitmap *hoist_vbein;
4373 static sbitmap *hoist_vbeout;
4375 /* Hoistable expressions. */
4376 static sbitmap *hoist_exprs;
4378 /* ??? We could compute post dominators and run this algorithm in
4379 reverse to perform tail merging, doing so would probably be
4380 more effective than the tail merging code in jump.c.
4382 It's unclear if tail merging could be run in parallel with
4383 code hoisting. It would be nice. */
4385 /* Allocate vars used for code hoisting analysis. */
4388 alloc_code_hoist_mem (int n_blocks, int n_exprs)
4390 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4391 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4392 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4394 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
4395 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
4396 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
4397 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
4400 /* Free vars used for code hoisting analysis. */
4403 free_code_hoist_mem (void)
4405 sbitmap_vector_free (antloc);
4406 sbitmap_vector_free (transp);
4407 sbitmap_vector_free (comp);
4409 sbitmap_vector_free (hoist_vbein);
4410 sbitmap_vector_free (hoist_vbeout);
4411 sbitmap_vector_free (hoist_exprs);
4412 sbitmap_vector_free (transpout);
4414 free_dominance_info (CDI_DOMINATORS);
4417 /* Compute the very busy expressions at entry/exit from each block.
4419 An expression is very busy if all paths from a given point
4420 compute the expression. */
4423 compute_code_hoist_vbeinout (void)
4425 int changed, passes;
4428 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
4429 sbitmap_vector_zero (hoist_vbein, last_basic_block);
4438 /* We scan the blocks in the reverse order to speed up
4440 FOR_EACH_BB_REVERSE (bb)
4442 if (bb->next_bb != EXIT_BLOCK_PTR)
4443 sbitmap_intersection_of_succs (hoist_vbeout[bb->index],
4444 hoist_vbein, bb->index);
4446 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index],
4448 hoist_vbeout[bb->index],
4456 fprintf (dump_file, "hoisting vbeinout computation: %d passes\n", passes);
4459 /* Top level routine to do the dataflow analysis needed by code hoisting. */
4462 compute_code_hoist_data (void)
4464 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4465 compute_transpout ();
4466 compute_code_hoist_vbeinout ();
4467 calculate_dominance_info (CDI_DOMINATORS);
4469 fprintf (dump_file, "\n");
4472 /* Determine if the expression identified by EXPR_INDEX would
4473 reach BB unimpared if it was placed at the end of EXPR_BB.
4475 It's unclear exactly what Muchnick meant by "unimpared". It seems
4476 to me that the expression must either be computed or transparent in
4477 *every* block in the path(s) from EXPR_BB to BB. Any other definition
4478 would allow the expression to be hoisted out of loops, even if
4479 the expression wasn't a loop invariant.
4481 Contrast this to reachability for PRE where an expression is
4482 considered reachable if *any* path reaches instead of *all*
4486 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
4490 int visited_allocated_locally = 0;
4493 if (visited == NULL)
4495 visited_allocated_locally = 1;
4496 visited = XCNEWVEC (char, last_basic_block);
4499 FOR_EACH_EDGE (pred, ei, bb->preds)
4501 basic_block pred_bb = pred->src;
4503 if (pred->src == ENTRY_BLOCK_PTR)
4505 else if (pred_bb == expr_bb)
4507 else if (visited[pred_bb->index])
4510 /* Does this predecessor generate this expression? */
4511 else if (TEST_BIT (comp[pred_bb->index], expr_index))
4513 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
4519 visited[pred_bb->index] = 1;
4520 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
4525 if (visited_allocated_locally)
4528 return (pred == NULL);
4531 /* Actually perform code hoisting. */
4536 basic_block bb, dominated;
4537 VEC (basic_block, heap) *domby;
4539 struct expr **index_map;
4542 sbitmap_vector_zero (hoist_exprs, last_basic_block);
4544 /* Compute a mapping from expression number (`bitmap_index') to
4545 hash table entry. */
4547 index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
4548 for (i = 0; i < expr_hash_table.size; i++)
4549 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4550 index_map[expr->bitmap_index] = expr;
4552 /* Walk over each basic block looking for potentially hoistable
4553 expressions, nothing gets hoisted from the entry block. */
4557 int insn_inserted_p;
4559 domby = get_dominated_by (CDI_DOMINATORS, bb);
4560 /* Examine each expression that is very busy at the exit of this
4561 block. These are the potentially hoistable expressions. */
4562 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
4566 if (TEST_BIT (hoist_vbeout[bb->index], i)
4567 && TEST_BIT (transpout[bb->index], i))
4569 /* We've found a potentially hoistable expression, now
4570 we look at every block BB dominates to see if it
4571 computes the expression. */
4572 for (j = 0; VEC_iterate (basic_block, domby, j, dominated); j++)
4574 /* Ignore self dominance. */
4575 if (bb == dominated)
4577 /* We've found a dominated block, now see if it computes
4578 the busy expression and whether or not moving that
4579 expression to the "beginning" of that block is safe. */
4580 if (!TEST_BIT (antloc[dominated->index], i))
4583 /* Note if the expression would reach the dominated block
4584 unimpared if it was placed at the end of BB.
4586 Keep track of how many times this expression is hoistable
4587 from a dominated block into BB. */
4588 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4592 /* If we found more than one hoistable occurrence of this
4593 expression, then note it in the bitmap of expressions to
4594 hoist. It makes no sense to hoist things which are computed
4595 in only one BB, and doing so tends to pessimize register
4596 allocation. One could increase this value to try harder
4597 to avoid any possible code expansion due to register
4598 allocation issues; however experiments have shown that
4599 the vast majority of hoistable expressions are only movable
4600 from two successors, so raising this threshold is likely
4601 to nullify any benefit we get from code hoisting. */
4604 SET_BIT (hoist_exprs[bb->index], i);
4609 /* If we found nothing to hoist, then quit now. */
4612 VEC_free (basic_block, heap, domby);
4616 /* Loop over all the hoistable expressions. */
4617 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
4619 /* We want to insert the expression into BB only once, so
4620 note when we've inserted it. */
4621 insn_inserted_p = 0;
4623 /* These tests should be the same as the tests above. */
4624 if (TEST_BIT (hoist_exprs[bb->index], i))
4626 /* We've found a potentially hoistable expression, now
4627 we look at every block BB dominates to see if it
4628 computes the expression. */
4629 for (j = 0; VEC_iterate (basic_block, domby, j, dominated); j++)
4631 /* Ignore self dominance. */
4632 if (bb == dominated)
4635 /* We've found a dominated block, now see if it computes
4636 the busy expression and whether or not moving that
4637 expression to the "beginning" of that block is safe. */
4638 if (!TEST_BIT (antloc[dominated->index], i))
4641 /* The expression is computed in the dominated block and
4642 it would be safe to compute it at the start of the
4643 dominated block. Now we have to determine if the
4644 expression would reach the dominated block if it was
4645 placed at the end of BB. */
4646 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4648 struct expr *expr = index_map[i];
4649 struct occr *occr = expr->antic_occr;
4653 /* Find the right occurrence of this expression. */
4654 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
4659 set = single_set (insn);
4662 /* Create a pseudo-reg to store the result of reaching
4663 expressions into. Get the mode for the new pseudo
4664 from the mode of the original destination pseudo. */
4665 if (expr->reaching_reg == NULL)
4667 = gen_reg_rtx_and_attrs (SET_DEST (set));
4669 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4671 occr->deleted_p = 1;
4672 if (!insn_inserted_p)
4674 insert_insn_end_basic_block (index_map[i], bb, 0);
4675 insn_inserted_p = 1;
4681 VEC_free (basic_block, heap, domby);
4687 /* Top level routine to perform one code hoisting (aka unification) pass
4689 Return nonzero if a change was made. */
4692 one_code_hoisting_pass (void)
4696 alloc_hash_table (get_max_uid (), &expr_hash_table, 0);
4697 compute_hash_table (&expr_hash_table);
4699 dump_hash_table (dump_file, "Code Hosting Expressions", &expr_hash_table);
4701 if (expr_hash_table.n_elems > 0)
4703 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
4704 compute_code_hoist_data ();
4706 free_code_hoist_mem ();
4709 free_hash_table (&expr_hash_table);
4714 /* Here we provide the things required to do store motion towards
4715 the exit. In order for this to be effective, gcse also needed to
4716 be taught how to move a load when it is kill only by a store to itself.
4721 void foo(float scale)
4723 for (i=0; i<10; i++)
4727 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
4728 the load out since its live around the loop, and stored at the bottom
4731 The 'Load Motion' referred to and implemented in this file is
4732 an enhancement to gcse which when using edge based lcm, recognizes
4733 this situation and allows gcse to move the load out of the loop.
4735 Once gcse has hoisted the load, store motion can then push this
4736 load towards the exit, and we end up with no loads or stores of 'i'
4740 pre_ldst_expr_hash (const void *p)
4742 int do_not_record_p = 0;
4743 const struct ls_expr *const x = (const struct ls_expr *) p;
4744 return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
4748 pre_ldst_expr_eq (const void *p1, const void *p2)
4750 const struct ls_expr *const ptr1 = (const struct ls_expr *) p1,
4751 *const ptr2 = (const struct ls_expr *) p2;
4752 return expr_equiv_p (ptr1->pattern, ptr2->pattern);
4755 /* This will search the ldst list for a matching expression. If it
4756 doesn't find one, we create one and initialize it. */
4758 static struct ls_expr *
4761 int do_not_record_p = 0;
4762 struct ls_expr * ptr;
4767 hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
4768 NULL, /*have_reg_qty=*/false);
4771 slot = htab_find_slot_with_hash (pre_ldst_table, &e, hash, INSERT);
4773 return (struct ls_expr *)*slot;
4775 ptr = XNEW (struct ls_expr);
4777 ptr->next = pre_ldst_mems;
4780 ptr->pattern_regs = NULL_RTX;
4781 ptr->loads = NULL_RTX;
4782 ptr->stores = NULL_RTX;
4783 ptr->reaching_reg = NULL_RTX;
4786 ptr->hash_index = hash;
4787 pre_ldst_mems = ptr;
4793 /* Free up an individual ldst entry. */
4796 free_ldst_entry (struct ls_expr * ptr)
4798 free_INSN_LIST_list (& ptr->loads);
4799 free_INSN_LIST_list (& ptr->stores);
4804 /* Free up all memory associated with the ldst list. */
4807 free_ldst_mems (void)
4810 htab_delete (pre_ldst_table);
4811 pre_ldst_table = NULL;
4813 while (pre_ldst_mems)
4815 struct ls_expr * tmp = pre_ldst_mems;
4817 pre_ldst_mems = pre_ldst_mems->next;
4819 free_ldst_entry (tmp);
4822 pre_ldst_mems = NULL;
4825 /* Dump debugging info about the ldst list. */
4828 print_ldst_list (FILE * file)
4830 struct ls_expr * ptr;
4832 fprintf (file, "LDST list: \n");
4834 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
4836 fprintf (file, " Pattern (%3d): ", ptr->index);
4838 print_rtl (file, ptr->pattern);
4840 fprintf (file, "\n Loads : ");
4843 print_rtl (file, ptr->loads);
4845 fprintf (file, "(nil)");
4847 fprintf (file, "\n Stores : ");
4850 print_rtl (file, ptr->stores);
4852 fprintf (file, "(nil)");
4854 fprintf (file, "\n\n");
4857 fprintf (file, "\n");
4860 /* Returns 1 if X is in the list of ldst only expressions. */
4862 static struct ls_expr *
4863 find_rtx_in_ldst (rtx x)
4867 if (!pre_ldst_table)
4870 slot = htab_find_slot (pre_ldst_table, &e, NO_INSERT);
4871 if (!slot || ((struct ls_expr *)*slot)->invalid)
4873 return (struct ls_expr *) *slot;
4876 /* Assign each element of the list of mems a monotonically increasing value. */
4879 enumerate_ldsts (void)
4881 struct ls_expr * ptr;
4884 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
4890 /* Return first item in the list. */
4892 static inline struct ls_expr *
4893 first_ls_expr (void)
4895 return pre_ldst_mems;
4898 /* Return the next item in the list after the specified one. */
4900 static inline struct ls_expr *
4901 next_ls_expr (struct ls_expr * ptr)
4906 /* Load Motion for loads which only kill themselves. */
4908 /* Return true if x is a simple MEM operation, with no registers or
4909 side effects. These are the types of loads we consider for the
4910 ld_motion list, otherwise we let the usual aliasing take care of it. */
4913 simple_mem (const_rtx x)
4918 if (MEM_VOLATILE_P (x))
4921 if (GET_MODE (x) == BLKmode)
4924 /* If we are handling exceptions, we must be careful with memory references
4925 that may trap. If we are not, the behavior is undefined, so we may just
4927 if (flag_non_call_exceptions && may_trap_p (x))
4930 if (side_effects_p (x))
4933 /* Do not consider function arguments passed on stack. */
4934 if (reg_mentioned_p (stack_pointer_rtx, x))
4937 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
4943 /* Make sure there isn't a buried reference in this pattern anywhere.
4944 If there is, invalidate the entry for it since we're not capable
4945 of fixing it up just yet.. We have to be sure we know about ALL
4946 loads since the aliasing code will allow all entries in the
4947 ld_motion list to not-alias itself. If we miss a load, we will get
4948 the wrong value since gcse might common it and we won't know to
4952 invalidate_any_buried_refs (rtx x)
4956 struct ls_expr * ptr;
4958 /* Invalidate it in the list. */
4959 if (MEM_P (x) && simple_mem (x))
4961 ptr = ldst_entry (x);
4965 /* Recursively process the insn. */
4966 fmt = GET_RTX_FORMAT (GET_CODE (x));
4968 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
4971 invalidate_any_buried_refs (XEXP (x, i));
4972 else if (fmt[i] == 'E')
4973 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4974 invalidate_any_buried_refs (XVECEXP (x, i, j));
4978 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
4979 being defined as MEM loads and stores to symbols, with no side effects
4980 and no registers in the expression. For a MEM destination, we also
4981 check that the insn is still valid if we replace the destination with a
4982 REG, as is done in update_ld_motion_stores. If there are any uses/defs
4983 which don't match this criteria, they are invalidated and trimmed out
4987 compute_ld_motion_mems (void)
4989 struct ls_expr * ptr;
4993 pre_ldst_mems = NULL;
4994 pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
4995 pre_ldst_expr_eq, NULL);
4999 FOR_BB_INSNS (bb, insn)
5003 if (GET_CODE (PATTERN (insn)) == SET)
5005 rtx src = SET_SRC (PATTERN (insn));
5006 rtx dest = SET_DEST (PATTERN (insn));
5008 /* Check for a simple LOAD... */
5009 if (MEM_P (src) && simple_mem (src))
5011 ptr = ldst_entry (src);
5013 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
5019 /* Make sure there isn't a buried load somewhere. */
5020 invalidate_any_buried_refs (src);
5023 /* Check for stores. Don't worry about aliased ones, they
5024 will block any movement we might do later. We only care
5025 about this exact pattern since those are the only
5026 circumstance that we will ignore the aliasing info. */
5027 if (MEM_P (dest) && simple_mem (dest))
5029 ptr = ldst_entry (dest);
5032 && GET_CODE (src) != ASM_OPERANDS
5033 /* Check for REG manually since want_to_gcse_p
5034 returns 0 for all REGs. */
5035 && can_assign_to_reg_p (src))
5036 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
5042 invalidate_any_buried_refs (PATTERN (insn));
5048 /* Remove any references that have been either invalidated or are not in the
5049 expression list for pre gcse. */
5052 trim_ld_motion_mems (void)
5054 struct ls_expr * * last = & pre_ldst_mems;
5055 struct ls_expr * ptr = pre_ldst_mems;
5061 /* Delete if entry has been made invalid. */
5064 /* Delete if we cannot find this mem in the expression list. */
5065 unsigned int hash = ptr->hash_index % expr_hash_table.size;
5067 for (expr = expr_hash_table.table[hash];
5069 expr = expr->next_same_hash)
5070 if (expr_equiv_p (expr->expr, ptr->pattern))
5074 expr = (struct expr *) 0;
5078 /* Set the expression field if we are keeping it. */
5086 htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
5087 free_ldst_entry (ptr);
5092 /* Show the world what we've found. */
5093 if (dump_file && pre_ldst_mems != NULL)
5094 print_ldst_list (dump_file);
5097 /* This routine will take an expression which we are replacing with
5098 a reaching register, and update any stores that are needed if
5099 that expression is in the ld_motion list. Stores are updated by
5100 copying their SRC to the reaching register, and then storing
5101 the reaching register into the store location. These keeps the
5102 correct value in the reaching register for the loads. */
5105 update_ld_motion_stores (struct expr * expr)
5107 struct ls_expr * mem_ptr;
5109 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
5111 /* We can try to find just the REACHED stores, but is shouldn't
5112 matter to set the reaching reg everywhere... some might be
5113 dead and should be eliminated later. */
5115 /* We replace (set mem expr) with (set reg expr) (set mem reg)
5116 where reg is the reaching reg used in the load. We checked in
5117 compute_ld_motion_mems that we can replace (set mem expr) with
5118 (set reg expr) in that insn. */
5119 rtx list = mem_ptr->stores;
5121 for ( ; list != NULL_RTX; list = XEXP (list, 1))
5123 rtx insn = XEXP (list, 0);
5124 rtx pat = PATTERN (insn);
5125 rtx src = SET_SRC (pat);
5126 rtx reg = expr->reaching_reg;
5129 /* If we've already copied it, continue. */
5130 if (expr->reaching_reg == src)
5135 fprintf (dump_file, "PRE: store updated with reaching reg ");
5136 print_rtl (dump_file, expr->reaching_reg);
5137 fprintf (dump_file, ":\n ");
5138 print_inline_rtx (dump_file, insn, 8);
5139 fprintf (dump_file, "\n");
5142 copy = gen_move_insn (reg, copy_rtx (SET_SRC (pat)));
5143 new_rtx = emit_insn_before (copy, insn);
5144 SET_SRC (pat) = reg;
5145 df_insn_rescan (insn);
5147 /* un-recognize this pattern since it's probably different now. */
5148 INSN_CODE (insn) = -1;
5149 gcse_create_count++;
5154 /* Store motion code. */
5156 #define ANTIC_STORE_LIST(x) ((x)->loads)
5157 #define AVAIL_STORE_LIST(x) ((x)->stores)
5158 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
5160 /* This is used to communicate the target bitvector we want to use in the
5161 reg_set_info routine when called via the note_stores mechanism. */
5162 static int * regvec;
5164 /* And current insn, for the same routine. */
5165 static rtx compute_store_table_current_insn;
5167 /* Used in computing the reverse edge graph bit vectors. */
5168 static sbitmap * st_antloc;
5170 /* Global holding the number of store expressions we are dealing with. */
5171 static int num_stores;
5173 /* Checks to set if we need to mark a register set. Called from
5177 reg_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
5178 void *data ATTRIBUTE_UNUSED)
5180 if (GET_CODE (dest) == SUBREG)
5181 dest = SUBREG_REG (dest);
5184 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
5187 /* Clear any mark that says that this insn sets dest. Called from
5191 reg_clear_last_set (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
5194 int *dead_vec = (int *) data;
5196 if (GET_CODE (dest) == SUBREG)
5197 dest = SUBREG_REG (dest);
5200 dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
5201 dead_vec[REGNO (dest)] = 0;
5204 /* Return zero if some of the registers in list X are killed
5205 due to set of registers in bitmap REGS_SET. */
5208 store_ops_ok (const_rtx x, int *regs_set)
5212 for (; x; x = XEXP (x, 1))
5215 if (regs_set[REGNO(reg)])
5222 /* Returns a list of registers mentioned in X. */
5224 extract_mentioned_regs (rtx x)
5226 return extract_mentioned_regs_helper (x, NULL_RTX);
5229 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
5232 extract_mentioned_regs_helper (rtx x, rtx accum)
5238 /* Repeat is used to turn tail-recursion into iteration. */
5244 code = GET_CODE (x);
5248 return alloc_EXPR_LIST (0, x, accum);
5260 /* We do not run this function with arguments having side effects. */
5280 i = GET_RTX_LENGTH (code) - 1;
5281 fmt = GET_RTX_FORMAT (code);
5287 rtx tem = XEXP (x, i);
5289 /* If we are about to do the last recursive call
5290 needed at this level, change it into iteration. */
5297 accum = extract_mentioned_regs_helper (tem, accum);
5299 else if (fmt[i] == 'E')
5303 for (j = 0; j < XVECLEN (x, i); j++)
5304 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
5311 /* Determine whether INSN is MEM store pattern that we will consider moving.
5312 REGS_SET_BEFORE is bitmap of registers set before (and including) the
5313 current insn, REGS_SET_AFTER is bitmap of registers set after (and
5314 including) the insn in this basic block. We must be passing through BB from
5315 head to end, as we are using this fact to speed things up.
5317 The results are stored this way:
5319 -- the first anticipatable expression is added into ANTIC_STORE_LIST
5320 -- if the processed expression is not anticipatable, NULL_RTX is added
5321 there instead, so that we can use it as indicator that no further
5322 expression of this type may be anticipatable
5323 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
5324 consequently, all of them but this head are dead and may be deleted.
5325 -- if the expression is not available, the insn due to that it fails to be
5326 available is stored in reaching_reg.
5328 The things are complicated a bit by fact that there already may be stores
5329 to the same MEM from other blocks; also caller must take care of the
5330 necessary cleanup of the temporary markers after end of the basic block.
5334 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
5336 struct ls_expr * ptr;
5338 int check_anticipatable, check_available;
5339 basic_block bb = BLOCK_FOR_INSN (insn);
5341 set = single_set (insn);
5345 dest = SET_DEST (set);
5347 if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
5348 || GET_MODE (dest) == BLKmode)
5351 if (side_effects_p (dest))
5354 /* If we are handling exceptions, we must be careful with memory references
5355 that may trap. If we are not, the behavior is undefined, so we may just
5357 if (flag_non_call_exceptions && may_trap_p (dest))
5360 /* Even if the destination cannot trap, the source may. In this case we'd
5361 need to handle updating the REG_EH_REGION note. */
5362 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
5365 /* Make sure that the SET_SRC of this store insns can be assigned to
5366 a register, or we will fail later on in replace_store_insn, which
5367 assumes that we can do this. But sometimes the target machine has
5368 oddities like MEM read-modify-write instruction. See for example
5370 if (!can_assign_to_reg_p (SET_SRC (set)))
5373 ptr = ldst_entry (dest);
5374 if (!ptr->pattern_regs)
5375 ptr->pattern_regs = extract_mentioned_regs (dest);
5377 /* Do not check for anticipatability if we either found one anticipatable
5378 store already, or tested for one and found out that it was killed. */
5379 check_anticipatable = 0;
5380 if (!ANTIC_STORE_LIST (ptr))
5381 check_anticipatable = 1;
5384 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
5386 && BLOCK_FOR_INSN (tmp) != bb)
5387 check_anticipatable = 1;
5389 if (check_anticipatable)
5391 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
5395 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
5396 ANTIC_STORE_LIST (ptr));
5399 /* It is not necessary to check whether store is available if we did
5400 it successfully before; if we failed before, do not bother to check
5401 until we reach the insn that caused us to fail. */
5402 check_available = 0;
5403 if (!AVAIL_STORE_LIST (ptr))
5404 check_available = 1;
5407 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
5408 if (BLOCK_FOR_INSN (tmp) != bb)
5409 check_available = 1;
5411 if (check_available)
5413 /* Check that we have already reached the insn at that the check
5414 failed last time. */
5415 if (LAST_AVAIL_CHECK_FAILURE (ptr))
5417 for (tmp = BB_END (bb);
5418 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
5419 tmp = PREV_INSN (tmp))
5422 check_available = 0;
5425 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
5427 &LAST_AVAIL_CHECK_FAILURE (ptr));
5429 if (!check_available)
5430 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
5433 /* Find available and anticipatable stores. */
5436 compute_store_table (void)
5442 int *last_set_in, *already_set;
5443 struct ls_expr * ptr, **prev_next_ptr_ptr;
5445 max_gcse_regno = max_reg_num ();
5448 pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
5449 pre_ldst_expr_eq, NULL);
5450 last_set_in = XCNEWVEC (int, max_gcse_regno);
5451 already_set = XNEWVEC (int, max_gcse_regno);
5453 /* Find all the stores we care about. */
5456 /* First compute the registers set in this block. */
5457 regvec = last_set_in;
5459 FOR_BB_INSNS (bb, insn)
5461 if (! INSN_P (insn))
5466 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5467 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5468 last_set_in[regno] = INSN_UID (insn);
5471 pat = PATTERN (insn);
5472 compute_store_table_current_insn = insn;
5473 note_stores (pat, reg_set_info, NULL);
5476 /* Now find the stores. */
5477 memset (already_set, 0, sizeof (int) * max_gcse_regno);
5478 regvec = already_set;
5479 FOR_BB_INSNS (bb, insn)
5481 if (! INSN_P (insn))
5486 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5487 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5488 already_set[regno] = 1;
5491 pat = PATTERN (insn);
5492 note_stores (pat, reg_set_info, NULL);
5494 /* Now that we've marked regs, look for stores. */
5495 find_moveable_store (insn, already_set, last_set_in);
5497 /* Unmark regs that are no longer set. */
5498 compute_store_table_current_insn = insn;
5499 note_stores (pat, reg_clear_last_set, last_set_in);
5502 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5503 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
5504 && last_set_in[regno] == INSN_UID (insn))
5505 last_set_in[regno] = 0;
5509 #ifdef ENABLE_CHECKING
5510 /* last_set_in should now be all-zero. */
5511 for (regno = 0; regno < max_gcse_regno; regno++)
5512 gcc_assert (!last_set_in[regno]);
5515 /* Clear temporary marks. */
5516 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5518 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
5519 if (ANTIC_STORE_LIST (ptr)
5520 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
5521 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
5525 /* Remove the stores that are not available anywhere, as there will
5526 be no opportunity to optimize them. */
5527 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
5529 ptr = *prev_next_ptr_ptr)
5531 if (!AVAIL_STORE_LIST (ptr))
5533 *prev_next_ptr_ptr = ptr->next;
5534 htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
5535 free_ldst_entry (ptr);
5538 prev_next_ptr_ptr = &ptr->next;
5541 ret = enumerate_ldsts ();
5545 fprintf (dump_file, "ST_avail and ST_antic (shown under loads..)\n");
5546 print_ldst_list (dump_file);
5554 /* Check to see if the load X is aliased with STORE_PATTERN.
5555 AFTER is true if we are checking the case when STORE_PATTERN occurs
5559 load_kills_store (const_rtx x, const_rtx store_pattern, int after)
5562 return anti_dependence (x, store_pattern);
5564 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
5568 /* Go through the entire insn X, looking for any loads which might alias
5569 STORE_PATTERN. Return true if found.
5570 AFTER is true if we are checking the case when STORE_PATTERN occurs
5571 after the insn X. */
5574 find_loads (const_rtx x, const_rtx store_pattern, int after)
5583 if (GET_CODE (x) == SET)
5588 if (load_kills_store (x, store_pattern, after))
5592 /* Recursively process the insn. */
5593 fmt = GET_RTX_FORMAT (GET_CODE (x));
5595 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
5598 ret |= find_loads (XEXP (x, i), store_pattern, after);
5599 else if (fmt[i] == 'E')
5600 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5601 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
5607 store_killed_in_pat (const_rtx x, const_rtx pat, int after)
5609 if (GET_CODE (pat) == SET)
5611 rtx dest = SET_DEST (pat);
5613 if (GET_CODE (dest) == ZERO_EXTRACT)
5614 dest = XEXP (dest, 0);
5616 /* Check for memory stores to aliased objects. */
5618 && !expr_equiv_p (dest, x))
5622 if (output_dependence (dest, x))
5627 if (output_dependence (x, dest))
5633 if (find_loads (pat, x, after))
5639 /* Check if INSN kills the store pattern X (is aliased with it).
5640 AFTER is true if we are checking the case when store X occurs
5641 after the insn. Return true if it does. */
5644 store_killed_in_insn (const_rtx x, const_rtx x_regs, const_rtx insn, int after)
5646 const_rtx reg, base, note, pat;
5653 /* A normal or pure call might read from pattern,
5654 but a const call will not. */
5655 if (!RTL_CONST_CALL_P (insn))
5658 /* But even a const call reads its parameters. Check whether the
5659 base of some of registers used in mem is stack pointer. */
5660 for (reg = x_regs; reg; reg = XEXP (reg, 1))
5662 base = find_base_term (XEXP (reg, 0));
5664 || (GET_CODE (base) == ADDRESS
5665 && GET_MODE (base) == Pmode
5666 && XEXP (base, 0) == stack_pointer_rtx))
5673 pat = PATTERN (insn);
5674 if (GET_CODE (pat) == SET)
5676 if (store_killed_in_pat (x, pat, after))
5679 else if (GET_CODE (pat) == PARALLEL)
5683 for (i = 0; i < XVECLEN (pat, 0); i++)
5684 if (store_killed_in_pat (x, XVECEXP (pat, 0, i), after))
5687 else if (find_loads (PATTERN (insn), x, after))
5690 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
5691 location aliased with X, then this insn kills X. */
5692 note = find_reg_equal_equiv_note (insn);
5695 note = XEXP (note, 0);
5697 /* However, if the note represents a must alias rather than a may
5698 alias relationship, then it does not kill X. */
5699 if (expr_equiv_p (note, x))
5702 /* See if there are any aliased loads in the note. */
5703 return find_loads (note, x, after);
5706 /* Returns true if the expression X is loaded or clobbered on or after INSN
5707 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
5708 or after the insn. X_REGS is list of registers mentioned in X. If the store
5709 is killed, return the last insn in that it occurs in FAIL_INSN. */
5712 store_killed_after (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
5713 int *regs_set_after, rtx *fail_insn)
5715 rtx last = BB_END (bb), act;
5717 if (!store_ops_ok (x_regs, regs_set_after))
5719 /* We do not know where it will happen. */
5721 *fail_insn = NULL_RTX;
5725 /* Scan from the end, so that fail_insn is determined correctly. */
5726 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
5727 if (store_killed_in_insn (x, x_regs, act, false))
5737 /* Returns true if the expression X is loaded or clobbered on or before INSN
5738 within basic block BB. X_REGS is list of registers mentioned in X.
5739 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
5741 store_killed_before (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
5742 int *regs_set_before)
5744 rtx first = BB_HEAD (bb);
5746 if (!store_ops_ok (x_regs, regs_set_before))
5749 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
5750 if (store_killed_in_insn (x, x_regs, insn, true))
5756 /* Fill in available, anticipatable, transparent and kill vectors in
5757 STORE_DATA, based on lists of available and anticipatable stores. */
5759 build_store_vectors (void)
5762 int *regs_set_in_block;
5764 struct ls_expr * ptr;
5766 /* Build the gen_vector. This is any store in the table which is not killed
5767 by aliasing later in its block. */
5768 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
5769 sbitmap_vector_zero (ae_gen, last_basic_block);
5771 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
5772 sbitmap_vector_zero (st_antloc, last_basic_block);
5774 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5776 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
5778 insn = XEXP (st, 0);
5779 bb = BLOCK_FOR_INSN (insn);
5781 /* If we've already seen an available expression in this block,
5782 we can delete this one (It occurs earlier in the block). We'll
5783 copy the SRC expression to an unused register in case there
5784 are any side effects. */
5785 if (TEST_BIT (ae_gen[bb->index], ptr->index))
5787 rtx r = gen_reg_rtx_and_attrs (ptr->pattern);
5789 fprintf (dump_file, "Removing redundant store:\n");
5790 replace_store_insn (r, XEXP (st, 0), bb, ptr);
5793 SET_BIT (ae_gen[bb->index], ptr->index);
5796 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
5798 insn = XEXP (st, 0);
5799 bb = BLOCK_FOR_INSN (insn);
5800 SET_BIT (st_antloc[bb->index], ptr->index);
5804 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
5805 sbitmap_vector_zero (ae_kill, last_basic_block);
5807 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
5808 sbitmap_vector_zero (transp, last_basic_block);
5809 regs_set_in_block = XNEWVEC (int, max_gcse_regno);
5813 FOR_BB_INSNS (bb, insn)
5817 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
5819 unsigned int ref_regno = DF_REF_REGNO (*def_rec);
5820 if (ref_regno < max_gcse_regno)
5821 regs_set_in_block[DF_REF_REGNO (*def_rec)] = 1;
5825 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5827 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
5828 bb, regs_set_in_block, NULL))
5830 /* It should not be necessary to consider the expression
5831 killed if it is both anticipatable and available. */
5832 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
5833 || !TEST_BIT (ae_gen[bb->index], ptr->index))
5834 SET_BIT (ae_kill[bb->index], ptr->index);
5837 SET_BIT (transp[bb->index], ptr->index);
5841 free (regs_set_in_block);
5845 dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
5846 dump_sbitmap_vector (dump_file, "st_kill", "", ae_kill, last_basic_block);
5847 dump_sbitmap_vector (dump_file, "Transpt", "", transp, last_basic_block);
5848 dump_sbitmap_vector (dump_file, "st_avloc", "", ae_gen, last_basic_block);
5852 /* Insert an instruction at the beginning of a basic block, and update
5853 the BB_HEAD if needed. */
5856 insert_insn_start_basic_block (rtx insn, basic_block bb)
5858 /* Insert at start of successor block. */
5859 rtx prev = PREV_INSN (BB_HEAD (bb));
5860 rtx before = BB_HEAD (bb);
5863 if (! LABEL_P (before)
5864 && !NOTE_INSN_BASIC_BLOCK_P (before))
5867 if (prev == BB_END (bb))
5869 before = NEXT_INSN (before);
5872 insn = emit_insn_after_noloc (insn, prev, bb);
5876 fprintf (dump_file, "STORE_MOTION insert store at start of BB %d:\n",
5878 print_inline_rtx (dump_file, insn, 6);
5879 fprintf (dump_file, "\n");
5883 /* This routine will insert a store on an edge. EXPR is the ldst entry for
5884 the memory reference, and E is the edge to insert it on. Returns nonzero
5885 if an edge insertion was performed. */
5888 insert_store (struct ls_expr * expr, edge e)
5895 /* We did all the deleted before this insert, so if we didn't delete a
5896 store, then we haven't set the reaching reg yet either. */
5897 if (expr->reaching_reg == NULL_RTX)
5900 if (e->flags & EDGE_FAKE)
5903 reg = expr->reaching_reg;
5904 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
5906 /* If we are inserting this expression on ALL predecessor edges of a BB,
5907 insert it at the start of the BB, and reset the insert bits on the other
5908 edges so we don't try to insert it on the other edges. */
5910 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
5911 if (!(tmp->flags & EDGE_FAKE))
5913 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
5915 gcc_assert (index != EDGE_INDEX_NO_EDGE);
5916 if (! TEST_BIT (pre_insert_map[index], expr->index))
5920 /* If tmp is NULL, we found an insertion on every edge, blank the
5921 insertion vector for these edges, and insert at the start of the BB. */
5922 if (!tmp && bb != EXIT_BLOCK_PTR)
5924 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
5926 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
5927 RESET_BIT (pre_insert_map[index], expr->index);
5929 insert_insn_start_basic_block (insn, bb);
5933 /* We can't put stores in the front of blocks pointed to by abnormal
5934 edges since that may put a store where one didn't used to be. */
5935 gcc_assert (!(e->flags & EDGE_ABNORMAL));
5937 insert_insn_on_edge (insn, e);
5941 fprintf (dump_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
5942 e->src->index, e->dest->index);
5943 print_inline_rtx (dump_file, insn, 6);
5944 fprintf (dump_file, "\n");
5950 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
5951 memory location in SMEXPR set in basic block BB.
5953 This could be rather expensive. */
5956 remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
5958 edge_iterator *stack, ei;
5961 sbitmap visited = sbitmap_alloc (last_basic_block);
5962 rtx last, insn, note;
5963 rtx mem = smexpr->pattern;
5965 stack = XNEWVEC (edge_iterator, n_basic_blocks);
5967 ei = ei_start (bb->succs);
5969 sbitmap_zero (visited);
5971 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
5979 sbitmap_free (visited);
5982 act = ei_edge (stack[--sp]);
5986 if (bb == EXIT_BLOCK_PTR
5987 || TEST_BIT (visited, bb->index))
5991 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
5994 SET_BIT (visited, bb->index);
5996 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
5998 for (last = ANTIC_STORE_LIST (smexpr);
5999 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
6000 last = XEXP (last, 1))
6002 last = XEXP (last, 0);
6005 last = NEXT_INSN (BB_END (bb));
6007 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
6010 note = find_reg_equal_equiv_note (insn);
6011 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6015 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6017 remove_note (insn, note);
6022 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6024 if (EDGE_COUNT (bb->succs) > 0)
6028 ei = ei_start (bb->succs);
6029 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6034 /* This routine will replace a store with a SET to a specified register. */
6037 replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
6039 rtx insn, mem, note, set, ptr;
6041 mem = smexpr->pattern;
6042 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
6044 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
6045 if (XEXP (ptr, 0) == del)
6047 XEXP (ptr, 0) = insn;
6051 /* Move the notes from the deleted insn to its replacement. */
6052 REG_NOTES (insn) = REG_NOTES (del);
6054 /* Emit the insn AFTER all the notes are transferred.
6055 This is cheaper since we avoid df rescanning for the note change. */
6056 insn = emit_insn_after (insn, del);
6061 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
6062 print_inline_rtx (dump_file, del, 6);
6063 fprintf (dump_file, "\nSTORE MOTION replaced with insn:\n ");
6064 print_inline_rtx (dump_file, insn, 6);
6065 fprintf (dump_file, "\n");
6070 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
6071 they are no longer accurate provided that they are reached by this
6072 definition, so drop them. */
6073 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
6076 set = single_set (insn);
6079 if (expr_equiv_p (SET_DEST (set), mem))
6081 note = find_reg_equal_equiv_note (insn);
6082 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6086 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6088 remove_note (insn, note);
6090 remove_reachable_equiv_notes (bb, smexpr);
6094 /* Delete a store, but copy the value that would have been stored into
6095 the reaching_reg for later storing. */
6098 delete_store (struct ls_expr * expr, basic_block bb)
6102 if (expr->reaching_reg == NULL_RTX)
6103 expr->reaching_reg = gen_reg_rtx_and_attrs (expr->pattern);
6105 reg = expr->reaching_reg;
6107 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
6110 if (BLOCK_FOR_INSN (del) == bb)
6112 /* We know there is only one since we deleted redundant
6113 ones during the available computation. */
6114 replace_store_insn (reg, del, bb, expr);
6120 /* Free memory used by store motion. */
6123 free_store_memory (void)
6128 sbitmap_vector_free (ae_gen);
6130 sbitmap_vector_free (ae_kill);
6132 sbitmap_vector_free (transp);
6134 sbitmap_vector_free (st_antloc);
6136 sbitmap_vector_free (pre_insert_map);
6138 sbitmap_vector_free (pre_delete_map);
6140 ae_gen = ae_kill = transp = st_antloc = NULL;
6141 pre_insert_map = pre_delete_map = NULL;
6144 /* Perform store motion. Much like gcse, except we move expressions the
6145 other way by looking at the flowgraph in reverse. */
6152 struct ls_expr * ptr;
6153 int update_flow = 0;
6157 fprintf (dump_file, "before store motion\n");
6158 print_rtl (dump_file, get_insns ());
6161 init_alias_analysis ();
6163 /* Find all the available and anticipatable stores. */
6164 num_stores = compute_store_table ();
6165 if (num_stores == 0)
6167 htab_delete (pre_ldst_table);
6168 pre_ldst_table = NULL;
6169 end_alias_analysis ();
6173 /* Now compute kill & transp vectors. */
6174 build_store_vectors ();
6175 add_noreturn_fake_exit_edges ();
6176 connect_infinite_loops_to_exit ();
6178 edge_list = pre_edge_rev_lcm (num_stores, transp, ae_gen,
6179 st_antloc, ae_kill, &pre_insert_map,
6182 /* Now we want to insert the new stores which are going to be needed. */
6183 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6185 /* If any of the edges we have above are abnormal, we can't move this
6187 for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
6188 if (TEST_BIT (pre_insert_map[x], ptr->index)
6189 && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
6194 if (dump_file != NULL)
6196 "Can't replace store %d: abnormal edge from %d to %d\n",
6197 ptr->index, INDEX_EDGE (edge_list, x)->src->index,
6198 INDEX_EDGE (edge_list, x)->dest->index);
6202 /* Now we want to insert the new stores which are going to be needed. */
6205 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
6206 delete_store (ptr, bb);
6208 for (x = 0; x < NUM_EDGES (edge_list); x++)
6209 if (TEST_BIT (pre_insert_map[x], ptr->index))
6210 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
6214 commit_edge_insertions ();
6216 free_store_memory ();
6217 free_edge_list (edge_list);
6218 remove_fake_exit_edges ();
6219 end_alias_analysis ();
6223 /* Entry point for jump bypassing optimization pass. */
6230 /* We do not construct an accurate cfg in functions which call
6231 setjmp, so just punt to be safe. */
6232 if (cfun->calls_setjmp)
6235 /* Identify the basic block information for this function, including
6236 successors and predecessors. */
6237 max_gcse_regno = max_reg_num ();
6240 dump_flow_info (dump_file, dump_flags);
6242 /* Return if there's nothing to do, or it is too expensive. */
6243 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
6244 || is_too_expensive (_ ("jump bypassing disabled")))
6247 gcc_obstack_init (&gcse_obstack);
6250 /* We need alias. */
6251 init_alias_analysis ();
6253 max_gcse_regno = max_reg_num ();
6255 changed = one_cprop_pass (3, true, true);
6260 fprintf (dump_file, "BYPASS of %s: %d basic blocks, ",
6261 current_function_name (), n_basic_blocks);
6262 fprintf (dump_file, "%d bytes\n\n", bytes_used);
6265 obstack_free (&gcse_obstack, NULL);
6267 /* We are finished with alias. */
6268 end_alias_analysis ();
6273 /* Return true if the graph is too expensive to optimize. PASS is the
6274 optimization about to be performed. */
6277 is_too_expensive (const char *pass)
6279 /* Trying to perform global optimizations on flow graphs which have
6280 a high connectivity will take a long time and is unlikely to be
6281 particularly useful.
6283 In normal circumstances a cfg should have about twice as many
6284 edges as blocks. But we do not want to punish small functions
6285 which have a couple switch statements. Rather than simply
6286 threshold the number of blocks, uses something with a more
6287 graceful degradation. */
6288 if (n_edges > 20000 + n_basic_blocks * 4)
6290 warning (OPT_Wdisabled_optimization,
6291 "%s: %d basic blocks and %d edges/basic block",
6292 pass, n_basic_blocks, n_edges / n_basic_blocks);
6297 /* If allocating memory for the cprop bitmap would take up too much
6298 storage it's better just to disable the optimization. */
6300 * SBITMAP_SET_SIZE (max_reg_num ())
6301 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
6303 warning (OPT_Wdisabled_optimization,
6304 "%s: %d basic blocks and %d registers",
6305 pass, n_basic_blocks, max_reg_num ());
6314 gate_handle_jump_bypass (void)
6316 return optimize > 0 && flag_gcse
6317 && dbg_cnt (jump_bypass);
6320 /* Perform jump bypassing and control flow optimizations. */
6322 rest_of_handle_jump_bypass (void)
6324 delete_unreachable_blocks ();
6325 if (bypass_jumps ())
6327 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6328 rebuild_jump_labels (get_insns ());
6334 struct rtl_opt_pass pass_jump_bypass =
6338 "bypass", /* name */
6339 gate_handle_jump_bypass, /* gate */
6340 rest_of_handle_jump_bypass, /* execute */
6343 0, /* static_pass_number */
6344 TV_BYPASS, /* tv_id */
6345 PROP_cfglayout, /* properties_required */
6346 0, /* properties_provided */
6347 0, /* properties_destroyed */
6348 0, /* todo_flags_start */
6350 TODO_ggc_collect | TODO_verify_flow /* todo_flags_finish */
6356 gate_handle_gcse (void)
6358 return optimize > 0 && flag_gcse
6364 rest_of_handle_gcse (void)
6366 int save_csb, save_cfj;
6368 tem = gcse_main (get_insns ());
6369 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6370 rebuild_jump_labels (get_insns ());
6371 save_csb = flag_cse_skip_blocks;
6372 save_cfj = flag_cse_follow_jumps;
6373 flag_cse_skip_blocks = flag_cse_follow_jumps = 0;
6375 /* If -fexpensive-optimizations, re-run CSE to clean up things done
6377 if (flag_expensive_optimizations)
6379 timevar_push (TV_CSE);
6380 tem2 = cse_main (get_insns (), max_reg_num ());
6381 df_finish_pass (false);
6382 purge_all_dead_edges ();
6383 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6384 timevar_pop (TV_CSE);
6385 cse_not_expected = !flag_rerun_cse_after_loop;
6388 /* If gcse or cse altered any jumps, rerun jump optimizations to clean
6390 if (tem || tem2 == 2)
6392 timevar_push (TV_JUMP);
6393 rebuild_jump_labels (get_insns ());
6395 timevar_pop (TV_JUMP);
6400 flag_cse_skip_blocks = save_csb;
6401 flag_cse_follow_jumps = save_cfj;
6405 struct rtl_opt_pass pass_gcse =
6410 gate_handle_gcse, /* gate */
6411 rest_of_handle_gcse, /* execute */
6414 0, /* static_pass_number */
6415 TV_GCSE, /* tv_id */
6416 PROP_cfglayout, /* properties_required */
6417 0, /* properties_provided */
6418 0, /* properties_destroyed */
6419 0, /* todo_flags_start */
6420 TODO_df_finish | TODO_verify_rtl_sharing |
6422 TODO_verify_flow | TODO_ggc_collect /* todo_flags_finish */
6427 #include "gt-gcse.h"