1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
4 2006, 2007, 2008 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 - reordering of memory allocation and freeing to be more space efficient
24 - do rough calc of how many regs are needed in each block, and a rough
25 calc of how many regs are available in each class and use that to
26 throttle back the code in cases where RTX_COST is minimal.
27 - a store to the same address as a load does not kill the load if the
28 source of the store is also the destination of the load. Handling this
29 allows more load motion, particularly out of loops.
30 - ability to realloc sbitmap vectors would allow one initial computation
31 of reg_set_in_block with only subsequent additions, rather than
32 recomputing it for each pass
36 /* References searched while implementing this.
38 Compilers Principles, Techniques and Tools
42 Global Optimization by Suppression of Partial Redundancies
44 communications of the acm, Vol. 22, Num. 2, Feb. 1979
46 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Stanford Ph.D. thesis, Dec. 1983
50 A Fast Algorithm for Code Movement Optimization
52 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
54 A Solution to a Problem with Morel and Renvoise's
55 Global Optimization by Suppression of Partial Redundancies
56 K-H Drechsler, M.P. Stadel
57 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
59 Practical Adaptation of the Global Optimization
60 Algorithm of Morel and Renvoise
62 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
64 Efficiently Computing Static Single Assignment Form and the Control
66 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
67 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 J. Knoop, O. Ruthing, B. Steffen
71 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
73 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
74 Time for Reducible Flow Control
76 ACM Letters on Programming Languages and Systems,
77 Vol. 2, Num. 1-4, Mar-Dec 1993
79 An Efficient Representation for Sparse Sets
80 Preston Briggs, Linda Torczon
81 ACM Letters on Programming Languages and Systems,
82 Vol. 2, Num. 1-4, Mar-Dec 1993
84 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
85 K-H Drechsler, M.P. Stadel
86 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
88 Partial Dead Code Elimination
89 J. Knoop, O. Ruthing, B. Steffen
90 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
92 Effective Partial Redundancy Elimination
93 P. Briggs, K.D. Cooper
94 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
96 The Program Structure Tree: Computing Control Regions in Linear Time
97 R. Johnson, D. Pearson, K. Pingali
98 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
100 Optimal Code Motion: Theory and Practice
101 J. Knoop, O. Ruthing, B. Steffen
102 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
104 The power of assignment motion
105 J. Knoop, O. Ruthing, B. Steffen
106 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
108 Global code motion / global value numbering
110 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
112 Value Driven Redundancy Elimination
114 Rice University Ph.D. thesis, Apr. 1996
118 Massively Scalar Compiler Project, Rice University, Sep. 1996
120 High Performance Compilers for Parallel Computing
124 Advanced Compiler Design and Implementation
126 Morgan Kaufmann, 1997
128 Building an Optimizing Compiler
132 People wishing to speed up the code here should read:
133 Elimination Algorithms for Data Flow Analysis
134 B.G. Ryder, M.C. Paull
135 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
137 How to Analyze Large Programs Efficiently and Informatively
138 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
139 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
141 People wishing to do something different can find various possibilities
142 in the above papers and elsewhere.
147 #include "coretypes.h"
155 #include "hard-reg-set.h"
158 #include "insn-config.h"
160 #include "basic-block.h"
162 #include "function.h"
171 #include "tree-pass.h"
176 /* Propagate flow information through back edges and thus enable PRE's
177 moving loop invariant calculations out of loops.
179 Originally this tended to create worse overall code, but several
180 improvements during the development of PRE seem to have made following
181 back edges generally a win.
183 Note much of the loop invariant code motion done here would normally
184 be done by loop.c, which has more heuristics for when to move invariants
185 out of loops. At some point we might need to move some of those
186 heuristics into gcse.c. */
188 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
189 are a superset of those done by GCSE.
191 We perform the following steps:
193 1) Compute basic block information.
195 2) Compute table of places where registers are set.
197 3) Perform copy/constant propagation.
199 4) Perform global cse using lazy code motion if not optimizing
200 for size, or code hoisting if we are.
202 5) Perform another pass of copy/constant propagation.
204 Two passes of copy/constant propagation are done because the first one
205 enables more GCSE and the second one helps to clean up the copies that
206 GCSE creates. This is needed more for PRE than for Classic because Classic
207 GCSE will try to use an existing register containing the common
208 subexpression rather than create a new one. This is harder to do for PRE
209 because of the code motion (which Classic GCSE doesn't do).
211 Expressions we are interested in GCSE-ing are of the form
212 (set (pseudo-reg) (expression)).
213 Function want_to_gcse_p says what these are.
215 PRE handles moving invariant expressions out of loops (by treating them as
216 partially redundant).
218 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
219 assignment) based GVN (global value numbering). L. T. Simpson's paper
220 (Rice University) on value numbering is a useful reference for this.
222 **********************
224 We used to support multiple passes but there are diminishing returns in
225 doing so. The first pass usually makes 90% of the changes that are doable.
226 A second pass can make a few more changes made possible by the first pass.
227 Experiments show any further passes don't make enough changes to justify
230 A study of spec92 using an unlimited number of passes:
231 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
232 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
233 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
235 It was found doing copy propagation between each pass enables further
238 PRE is quite expensive in complicated functions because the DFA can take
239 a while to converge. Hence we only perform one pass. The parameter
240 max-gcse-passes can be modified if one wants to experiment.
242 **********************
244 The steps for PRE are:
246 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
248 2) Perform the data flow analysis for PRE.
250 3) Delete the redundant instructions
252 4) Insert the required copies [if any] that make the partially
253 redundant instructions fully redundant.
255 5) For other reaching expressions, insert an instruction to copy the value
256 to a newly created pseudo that will reach the redundant instruction.
258 The deletion is done first so that when we do insertions we
259 know which pseudo reg to use.
261 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
262 argue it is not. The number of iterations for the algorithm to converge
263 is typically 2-4 so I don't view it as that expensive (relatively speaking).
265 PRE GCSE depends heavily on the second CSE pass to clean up the copies
266 we create. To make an expression reach the place where it's redundant,
267 the result of the expression is copied to a new register, and the redundant
268 expression is deleted by replacing it with this new register. Classic GCSE
269 doesn't have this problem as much as it computes the reaching defs of
270 each register in each block and thus can try to use an existing
273 /* GCSE global vars. */
275 /* Note whether or not we should run jump optimization after gcse. We
276 want to do this for two cases.
278 * If we changed any jumps via cprop.
280 * If we added any labels via edge splitting. */
281 static int run_jump_opt_after_gcse;
283 /* An obstack for our working variables. */
284 static struct obstack gcse_obstack;
286 struct reg_use {rtx reg_rtx; };
288 /* Hash table of expressions. */
292 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
294 /* Index in the available expression bitmaps. */
296 /* Next entry with the same hash. */
297 struct expr *next_same_hash;
298 /* List of anticipatable occurrences in basic blocks in the function.
299 An "anticipatable occurrence" is one that is the first occurrence in the
300 basic block, the operands are not modified in the basic block prior
301 to the occurrence and the output is not used between the start of
302 the block and the occurrence. */
303 struct occr *antic_occr;
304 /* List of available occurrence in basic blocks in the function.
305 An "available occurrence" is one that is the last occurrence in the
306 basic block and the operands are not modified by following statements in
307 the basic block [including this insn]. */
308 struct occr *avail_occr;
309 /* Non-null if the computation is PRE redundant.
310 The value is the newly created pseudo-reg to record a copy of the
311 expression in all the places that reach the redundant copy. */
315 /* Occurrence of an expression.
316 There is one per basic block. If a pattern appears more than once the
317 last appearance is used [or first for anticipatable expressions]. */
321 /* Next occurrence of this expression. */
323 /* The insn that computes the expression. */
325 /* Nonzero if this [anticipatable] occurrence has been deleted. */
327 /* Nonzero if this [available] occurrence has been copied to
329 /* ??? This is mutually exclusive with deleted_p, so they could share
334 /* Expression and copy propagation hash tables.
335 Each hash table is an array of buckets.
336 ??? It is known that if it were an array of entries, structure elements
337 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
338 not clear whether in the final analysis a sufficient amount of memory would
339 be saved as the size of the available expression bitmaps would be larger
340 [one could build a mapping table without holes afterwards though].
341 Someday I'll perform the computation and figure it out. */
346 This is an array of `expr_hash_table_size' elements. */
349 /* Size of the hash table, in elements. */
352 /* Number of hash table elements. */
353 unsigned int n_elems;
355 /* Whether the table is expression of copy propagation one. */
359 /* Expression hash table. */
360 static struct hash_table expr_hash_table;
362 /* Copy propagation hash table. */
363 static struct hash_table set_hash_table;
365 /* Mapping of uids to cuids.
366 Only real insns get cuids. */
367 static int *uid_cuid;
369 /* Highest UID in UID_CUID. */
372 /* Get the cuid of an insn. */
373 #ifdef ENABLE_CHECKING
374 #define INSN_CUID(INSN) \
375 (gcc_assert (INSN_UID (INSN) <= max_uid), uid_cuid[INSN_UID (INSN)])
377 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
380 /* Number of cuids. */
383 /* Maximum register number in function prior to doing gcse + 1.
384 Registers created during this pass have regno >= max_gcse_regno.
385 This is named with "gcse" to not collide with global of same name. */
386 static unsigned int max_gcse_regno;
388 /* Table of registers that are modified.
390 For each register, each element is a list of places where the pseudo-reg
393 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
394 requires knowledge of which blocks kill which regs [and thus could use
395 a bitmap instead of the lists `reg_set_table' uses].
397 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
398 num-regs) [however perhaps it may be useful to keep the data as is]. One
399 advantage of recording things this way is that `reg_set_table' is fairly
400 sparse with respect to pseudo regs but for hard regs could be fairly dense
401 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
402 up functions like compute_transp since in the case of pseudo-regs we only
403 need to iterate over the number of times a pseudo-reg is set, not over the
404 number of basic blocks [clearly there is a bit of a slow down in the cases
405 where a pseudo is set more than once in a block, however it is believed
406 that the net effect is to speed things up]. This isn't done for hard-regs
407 because recording call-clobbered hard-regs in `reg_set_table' at each
408 function call can consume a fair bit of memory, and iterating over
409 hard-regs stored this way in compute_transp will be more expensive. */
411 typedef struct reg_set
413 /* The next setting of this register. */
414 struct reg_set *next;
415 /* The index of the block where it was set. */
419 static reg_set **reg_set_table;
421 /* Size of `reg_set_table'.
422 The table starts out at max_gcse_regno + slop, and is enlarged as
424 static int reg_set_table_size;
426 /* Amount to grow `reg_set_table' by when it's full. */
427 #define REG_SET_TABLE_SLOP 100
429 /* This is a list of expressions which are MEMs and will be used by load
431 Load motion tracks MEMs which aren't killed by
432 anything except itself. (i.e., loads and stores to a single location).
433 We can then allow movement of these MEM refs with a little special
434 allowance. (all stores copy the same value to the reaching reg used
435 for the loads). This means all values used to store into memory must have
436 no side effects so we can re-issue the setter value.
437 Store Motion uses this structure as an expression table to track stores
438 which look interesting, and might be moveable towards the exit block. */
442 struct expr * expr; /* Gcse expression reference for LM. */
443 rtx pattern; /* Pattern of this mem. */
444 rtx pattern_regs; /* List of registers mentioned by the mem. */
445 rtx loads; /* INSN list of loads seen. */
446 rtx stores; /* INSN list of stores seen. */
447 struct ls_expr * next; /* Next in the list. */
448 int invalid; /* Invalid for some reason. */
449 int index; /* If it maps to a bitmap index. */
450 unsigned int hash_index; /* Index when in a hash table. */
451 rtx reaching_reg; /* Register to use when re-writing. */
454 /* Array of implicit set patterns indexed by basic block index. */
455 static rtx *implicit_sets;
457 /* Head of the list of load/store memory refs. */
458 static struct ls_expr * pre_ldst_mems = NULL;
460 /* Hashtable for the load/store memory refs. */
461 static htab_t pre_ldst_table = NULL;
463 /* Bitmap containing one bit for each register in the program.
464 Used when performing GCSE to track which registers have been set since
465 the start of the basic block. */
466 static regset reg_set_bitmap;
468 /* For each block, a bitmap of registers set in the block.
469 This is used by compute_transp.
470 It is computed during hash table computation and not by compute_sets
471 as it includes registers added since the last pass (or between cprop and
472 gcse) and it's currently not easy to realloc sbitmap vectors. */
473 static sbitmap *reg_set_in_block;
475 /* Array, indexed by basic block number for a list of insns which modify
476 memory within that block. */
477 static rtx * modify_mem_list;
478 static bitmap modify_mem_list_set;
480 /* This array parallels modify_mem_list, but is kept canonicalized. */
481 static rtx * canon_modify_mem_list;
483 /* Bitmap indexed by block numbers to record which blocks contain
485 static bitmap blocks_with_calls;
487 /* Various variables for statistics gathering. */
489 /* Memory used in a pass.
490 This isn't intended to be absolutely precise. Its intent is only
491 to keep an eye on memory usage. */
492 static int bytes_used;
494 /* GCSE substitutions made. */
495 static int gcse_subst_count;
496 /* Number of copy instructions created. */
497 static int gcse_create_count;
498 /* Number of local constants propagated. */
499 static int local_const_prop_count;
500 /* Number of local copies propagated. */
501 static int local_copy_prop_count;
502 /* Number of global constants propagated. */
503 static int global_const_prop_count;
504 /* Number of global copies propagated. */
505 static int global_copy_prop_count;
507 /* For available exprs */
508 static sbitmap *ae_kill, *ae_gen;
510 static void compute_can_copy (void);
511 static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
512 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
513 static void *grealloc (void *, size_t);
514 static void *gcse_alloc (unsigned long);
515 static void alloc_gcse_mem (void);
516 static void free_gcse_mem (void);
517 static void alloc_reg_set_mem (int);
518 static void free_reg_set_mem (void);
519 static void record_one_set (int, rtx);
520 static void record_set_info (rtx, const_rtx, void *);
521 static void compute_sets (void);
522 static void hash_scan_insn (rtx, struct hash_table *);
523 static void hash_scan_set (rtx, rtx, struct hash_table *);
524 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
525 static void hash_scan_call (rtx, rtx, struct hash_table *);
526 static int want_to_gcse_p (rtx);
527 static bool can_assign_to_reg_p (rtx);
528 static bool gcse_constant_p (const_rtx);
529 static int oprs_unchanged_p (const_rtx, const_rtx, int);
530 static int oprs_anticipatable_p (const_rtx, const_rtx);
531 static int oprs_available_p (const_rtx, const_rtx);
532 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
533 struct hash_table *);
534 static void insert_set_in_table (rtx, rtx, struct hash_table *);
535 static unsigned int hash_expr (const_rtx, enum machine_mode, int *, int);
536 static unsigned int hash_set (int, int);
537 static int expr_equiv_p (const_rtx, const_rtx);
538 static void record_last_reg_set_info (rtx, int);
539 static void record_last_mem_set_info (rtx);
540 static void record_last_set_info (rtx, const_rtx, void *);
541 static void compute_hash_table (struct hash_table *);
542 static void alloc_hash_table (int, struct hash_table *, int);
543 static void free_hash_table (struct hash_table *);
544 static void compute_hash_table_work (struct hash_table *);
545 static void dump_hash_table (FILE *, const char *, struct hash_table *);
546 static struct expr *lookup_set (unsigned int, struct hash_table *);
547 static struct expr *next_set (unsigned int, struct expr *);
548 static void reset_opr_set_tables (void);
549 static int oprs_not_set_p (const_rtx, const_rtx);
550 static void mark_call (rtx);
551 static void mark_set (rtx, rtx);
552 static void mark_clobber (rtx, rtx);
553 static void mark_oprs_set (rtx);
554 static void alloc_cprop_mem (int, int);
555 static void free_cprop_mem (void);
556 static void compute_transp (const_rtx, int, sbitmap *, int);
557 static void compute_transpout (void);
558 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
559 struct hash_table *);
560 static void compute_cprop_data (void);
561 static void find_used_regs (rtx *, void *);
562 static int try_replace_reg (rtx, rtx, rtx);
563 static struct expr *find_avail_set (int, rtx);
564 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
565 static void mems_conflict_for_gcse_p (rtx, const_rtx, void *);
566 static int load_killed_in_block_p (const_basic_block, int, const_rtx, int);
567 static void canon_list_insert (rtx, const_rtx, void *);
568 static int cprop_insn (rtx, int);
569 static int cprop (int);
570 static void find_implicit_sets (void);
571 static int one_cprop_pass (int, bool, bool);
572 static bool constprop_register (rtx, rtx, rtx, bool);
573 static struct expr *find_bypass_set (int, int);
574 static bool reg_killed_on_edge (const_rtx, const_edge);
575 static int bypass_block (basic_block, rtx, rtx);
576 static int bypass_conditional_jumps (void);
577 static void alloc_pre_mem (int, int);
578 static void free_pre_mem (void);
579 static void compute_pre_data (void);
580 static int pre_expr_reaches_here_p (basic_block, struct expr *,
582 static void insert_insn_end_basic_block (struct expr *, basic_block, int);
583 static void pre_insert_copy_insn (struct expr *, rtx);
584 static void pre_insert_copies (void);
585 static int pre_delete (void);
586 static int pre_gcse (void);
587 static int one_pre_gcse_pass (int);
588 static void add_label_notes (rtx, rtx);
589 static void alloc_code_hoist_mem (int, int);
590 static void free_code_hoist_mem (void);
591 static void compute_code_hoist_vbeinout (void);
592 static void compute_code_hoist_data (void);
593 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
594 static void hoist_code (void);
595 static int one_code_hoisting_pass (void);
596 static rtx process_insert_insn (struct expr *);
597 static int pre_edge_insert (struct edge_list *, struct expr **);
598 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
599 basic_block, char *);
600 static struct ls_expr * ldst_entry (rtx);
601 static void free_ldst_entry (struct ls_expr *);
602 static void free_ldst_mems (void);
603 static void print_ldst_list (FILE *);
604 static struct ls_expr * find_rtx_in_ldst (rtx);
605 static int enumerate_ldsts (void);
606 static inline struct ls_expr * first_ls_expr (void);
607 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
608 static int simple_mem (const_rtx);
609 static void invalidate_any_buried_refs (rtx);
610 static void compute_ld_motion_mems (void);
611 static void trim_ld_motion_mems (void);
612 static void update_ld_motion_stores (struct expr *);
613 static void reg_set_info (rtx, const_rtx, void *);
614 static void reg_clear_last_set (rtx, const_rtx, void *);
615 static bool store_ops_ok (const_rtx, int *);
616 static rtx extract_mentioned_regs (rtx);
617 static rtx extract_mentioned_regs_helper (rtx, rtx);
618 static void find_moveable_store (rtx, int *, int *);
619 static int compute_store_table (void);
620 static bool load_kills_store (const_rtx, const_rtx, int);
621 static bool find_loads (const_rtx, const_rtx, int);
622 static bool store_killed_in_insn (const_rtx, const_rtx, const_rtx, int);
623 static bool store_killed_after (const_rtx, const_rtx, const_rtx, const_basic_block, int *, rtx *);
624 static bool store_killed_before (const_rtx, const_rtx, const_rtx, const_basic_block, int *);
625 static void build_store_vectors (void);
626 static void insert_insn_start_basic_block (rtx, basic_block);
627 static int insert_store (struct ls_expr *, edge);
628 static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
629 static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
630 static void delete_store (struct ls_expr *, basic_block);
631 static void free_store_memory (void);
632 static void store_motion (void);
633 static void free_insn_expr_list_list (rtx *);
634 static void clear_modify_mem_tables (void);
635 static void free_modify_mem_tables (void);
636 static rtx gcse_emit_move_after (rtx, rtx, rtx);
637 static void local_cprop_find_used_regs (rtx *, void *);
638 static bool do_local_cprop (rtx, rtx, bool);
639 static void local_cprop_pass (bool);
640 static bool is_too_expensive (const char *);
643 /* Entry point for global common subexpression elimination.
644 F is the first instruction in the function. Return nonzero if a
648 gcse_main (rtx f ATTRIBUTE_UNUSED)
651 /* Bytes used at start of pass. */
652 int initial_bytes_used;
653 /* Maximum number of bytes used by a pass. */
655 /* Point to release obstack data from for each pass. */
656 char *gcse_obstack_bottom;
658 /* We do not construct an accurate cfg in functions which call
659 setjmp, so just punt to be safe. */
660 if (cfun->calls_setjmp)
663 /* Assume that we do not need to run jump optimizations after gcse. */
664 run_jump_opt_after_gcse = 0;
666 /* Identify the basic block information for this function, including
667 successors and predecessors. */
668 max_gcse_regno = max_reg_num ();
670 df_note_add_problem ();
674 dump_flow_info (dump_file, dump_flags);
676 /* Return if there's nothing to do, or it is too expensive. */
677 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
678 || is_too_expensive (_("GCSE disabled")))
681 gcc_obstack_init (&gcse_obstack);
685 init_alias_analysis ();
686 /* Record where pseudo-registers are set. This data is kept accurate
687 during each pass. ??? We could also record hard-reg information here
688 [since it's unchanging], however it is currently done during hash table
691 It may be tempting to compute MEM set information here too, but MEM sets
692 will be subject to code motion one day and thus we need to compute
693 information about memory sets when we build the hash tables. */
695 alloc_reg_set_mem (max_gcse_regno);
699 initial_bytes_used = bytes_used;
701 gcse_obstack_bottom = gcse_alloc (1);
703 while (changed && pass < MAX_GCSE_PASSES)
707 fprintf (dump_file, "GCSE pass %d\n\n", pass + 1);
709 /* Initialize bytes_used to the space for the pred/succ lists,
710 and the reg_set_table data. */
711 bytes_used = initial_bytes_used;
713 /* Each pass may create new registers, so recalculate each time. */
714 max_gcse_regno = max_reg_num ();
718 /* Don't allow constant propagation to modify jumps
720 if (dbg_cnt (cprop1))
722 timevar_push (TV_CPROP1);
723 changed = one_cprop_pass (pass + 1, false, false);
724 timevar_pop (TV_CPROP1);
731 timevar_push (TV_PRE);
732 changed |= one_pre_gcse_pass (pass + 1);
733 /* We may have just created new basic blocks. Release and
734 recompute various things which are sized on the number of
738 free_modify_mem_tables ();
739 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
740 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
743 alloc_reg_set_mem (max_reg_num ());
745 run_jump_opt_after_gcse = 1;
746 timevar_pop (TV_PRE);
749 if (max_pass_bytes < bytes_used)
750 max_pass_bytes = bytes_used;
752 /* Free up memory, then reallocate for code hoisting. We can
753 not re-use the existing allocated memory because the tables
754 will not have info for the insns or registers created by
755 partial redundancy elimination. */
758 /* It does not make sense to run code hoisting unless we are optimizing
759 for code size -- it rarely makes programs faster, and can make
760 them bigger if we did partial redundancy elimination (when optimizing
761 for space, we don't run the partial redundancy algorithms). */
764 timevar_push (TV_HOIST);
765 max_gcse_regno = max_reg_num ();
767 changed |= one_code_hoisting_pass ();
770 if (max_pass_bytes < bytes_used)
771 max_pass_bytes = bytes_used;
772 timevar_pop (TV_HOIST);
777 fprintf (dump_file, "\n");
781 obstack_free (&gcse_obstack, gcse_obstack_bottom);
785 /* Do one last pass of copy propagation, including cprop into
786 conditional jumps. */
788 if (dbg_cnt (cprop2))
790 max_gcse_regno = max_reg_num ();
793 /* This time, go ahead and allow cprop to alter jumps. */
794 timevar_push (TV_CPROP2);
795 one_cprop_pass (pass + 1, true, true);
796 timevar_pop (TV_CPROP2);
802 fprintf (dump_file, "GCSE of %s: %d basic blocks, ",
803 current_function_name (), n_basic_blocks);
804 fprintf (dump_file, "%d pass%s, %d bytes\n\n",
805 pass, pass > 1 ? "es" : "", max_pass_bytes);
808 obstack_free (&gcse_obstack, NULL);
811 /* We are finished with alias. */
812 end_alias_analysis ();
814 if (!optimize_size && flag_gcse_sm)
816 timevar_push (TV_LSM);
818 timevar_pop (TV_LSM);
821 /* Record where pseudo-registers are set. */
822 return run_jump_opt_after_gcse;
825 /* Misc. utilities. */
827 /* Nonzero for each mode that supports (set (reg) (reg)).
828 This is trivially true for integer and floating point values.
829 It may or may not be true for condition codes. */
830 static char can_copy[(int) NUM_MACHINE_MODES];
832 /* Compute which modes support reg/reg copy operations. */
835 compute_can_copy (void)
838 #ifndef AVOID_CCMODE_COPIES
841 memset (can_copy, 0, NUM_MACHINE_MODES);
844 for (i = 0; i < NUM_MACHINE_MODES; i++)
845 if (GET_MODE_CLASS (i) == MODE_CC)
847 #ifdef AVOID_CCMODE_COPIES
850 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
851 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
852 if (recog (PATTERN (insn), insn, NULL) >= 0)
862 /* Returns whether the mode supports reg/reg copy operations. */
865 can_copy_p (enum machine_mode mode)
867 static bool can_copy_init_p = false;
869 if (! can_copy_init_p)
872 can_copy_init_p = true;
875 return can_copy[mode] != 0;
878 /* Cover function to xmalloc to record bytes allocated. */
881 gmalloc (size_t size)
884 return xmalloc (size);
887 /* Cover function to xcalloc to record bytes allocated. */
890 gcalloc (size_t nelem, size_t elsize)
892 bytes_used += nelem * elsize;
893 return xcalloc (nelem, elsize);
896 /* Cover function to xrealloc.
897 We don't record the additional size since we don't know it.
898 It won't affect memory usage stats much anyway. */
901 grealloc (void *ptr, size_t size)
903 return xrealloc (ptr, size);
906 /* Cover function to obstack_alloc. */
909 gcse_alloc (unsigned long size)
912 return obstack_alloc (&gcse_obstack, size);
915 /* Allocate memory for the cuid mapping array,
916 and reg/memory set tracking tables.
918 This is called at the start of each pass. */
921 alloc_gcse_mem (void)
927 /* Find the largest UID and create a mapping from UIDs to CUIDs.
928 CUIDs are like UIDs except they increase monotonically, have no gaps,
929 and only apply to real insns.
930 (Actually, there are gaps, for insn that are not inside a basic block.
931 but we should never see those anyway, so this is OK.) */
933 max_uid = get_max_uid ();
934 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
937 FOR_BB_INSNS (bb, insn)
940 uid_cuid[INSN_UID (insn)] = i++;
942 uid_cuid[INSN_UID (insn)] = i;
947 /* Allocate vars to track sets of regs. */
948 reg_set_bitmap = BITMAP_ALLOC (NULL);
950 /* Allocate vars to track sets of regs, memory per block. */
951 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
952 /* Allocate array to keep a list of insns which modify memory in each
954 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
955 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
956 modify_mem_list_set = BITMAP_ALLOC (NULL);
957 blocks_with_calls = BITMAP_ALLOC (NULL);
960 /* Free memory allocated by alloc_gcse_mem. */
967 BITMAP_FREE (reg_set_bitmap);
969 sbitmap_vector_free (reg_set_in_block);
970 free_modify_mem_tables ();
971 BITMAP_FREE (modify_mem_list_set);
972 BITMAP_FREE (blocks_with_calls);
975 /* Compute the local properties of each recorded expression.
977 Local properties are those that are defined by the block, irrespective of
980 An expression is transparent in a block if its operands are not modified
983 An expression is computed (locally available) in a block if it is computed
984 at least once and expression would contain the same value if the
985 computation was moved to the end of the block.
987 An expression is locally anticipatable in a block if it is computed at
988 least once and expression would contain the same value if the computation
989 was moved to the beginning of the block.
991 We call this routine for cprop, pre and code hoisting. They all compute
992 basically the same information and thus can easily share this code.
994 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
995 properties. If NULL, then it is not necessary to compute or record that
998 TABLE controls which hash table to look at. If it is set hash table,
999 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1003 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
1004 struct hash_table *table)
1008 /* Initialize any bitmaps that were passed in. */
1012 sbitmap_vector_zero (transp, last_basic_block);
1014 sbitmap_vector_ones (transp, last_basic_block);
1018 sbitmap_vector_zero (comp, last_basic_block);
1020 sbitmap_vector_zero (antloc, last_basic_block);
1022 for (i = 0; i < table->size; i++)
1026 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1028 int indx = expr->bitmap_index;
1031 /* The expression is transparent in this block if it is not killed.
1032 We start by assuming all are transparent [none are killed], and
1033 then reset the bits for those that are. */
1035 compute_transp (expr->expr, indx, transp, table->set_p);
1037 /* The occurrences recorded in antic_occr are exactly those that
1038 we want to set to nonzero in ANTLOC. */
1040 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1042 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1044 /* While we're scanning the table, this is a good place to
1046 occr->deleted_p = 0;
1049 /* The occurrences recorded in avail_occr are exactly those that
1050 we want to set to nonzero in COMP. */
1052 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1054 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1056 /* While we're scanning the table, this is a good place to
1061 /* While we're scanning the table, this is a good place to
1063 expr->reaching_reg = 0;
1068 /* Register set information.
1070 `reg_set_table' records where each register is set or otherwise
1073 static struct obstack reg_set_obstack;
1076 alloc_reg_set_mem (int n_regs)
1078 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1079 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
1081 gcc_obstack_init (®_set_obstack);
1085 free_reg_set_mem (void)
1087 free (reg_set_table);
1088 obstack_free (®_set_obstack, NULL);
1091 /* Record REGNO in the reg_set table. */
1094 record_one_set (int regno, rtx insn)
1096 /* Allocate a new reg_set element and link it onto the list. */
1097 struct reg_set *new_reg_info;
1099 /* If the table isn't big enough, enlarge it. */
1100 if (regno >= reg_set_table_size)
1102 int new_size = regno + REG_SET_TABLE_SLOP;
1104 reg_set_table = grealloc (reg_set_table,
1105 new_size * sizeof (struct reg_set *));
1106 memset (reg_set_table + reg_set_table_size, 0,
1107 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1108 reg_set_table_size = new_size;
1111 new_reg_info = obstack_alloc (®_set_obstack, sizeof (struct reg_set));
1112 bytes_used += sizeof (struct reg_set);
1113 new_reg_info->bb_index = BLOCK_NUM (insn);
1114 new_reg_info->next = reg_set_table[regno];
1115 reg_set_table[regno] = new_reg_info;
1118 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1119 an insn. The DATA is really the instruction in which the SET is
1123 record_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
1125 rtx record_set_insn = (rtx) data;
1127 if (REG_P (dest) && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1128 record_one_set (REGNO (dest), record_set_insn);
1131 /* Scan the function and record each set of each pseudo-register.
1133 This is called once, at the start of the gcse pass. See the comments for
1134 `reg_set_table' for further documentation. */
1143 FOR_BB_INSNS (bb, insn)
1145 note_stores (PATTERN (insn), record_set_info, insn);
1148 /* Hash table support. */
1150 struct reg_avail_info
1152 basic_block last_bb;
1157 static struct reg_avail_info *reg_avail_info;
1158 static basic_block current_bb;
1161 /* See whether X, the source of a set, is something we want to consider for
1165 want_to_gcse_p (rtx x)
1168 /* On register stack architectures, don't GCSE constants from the
1169 constant pool, as the benefits are often swamped by the overhead
1170 of shuffling the register stack between basic blocks. */
1171 if (IS_STACK_MODE (GET_MODE (x)))
1172 x = avoid_constant_pool_reference (x);
1175 switch (GET_CODE (x))
1187 return can_assign_to_reg_p (x);
1191 /* Used internally by can_assign_to_reg_p. */
1193 static GTY(()) rtx test_insn;
1195 /* Return true if we can assign X to a pseudo register. */
1198 can_assign_to_reg_p (rtx x)
1200 int num_clobbers = 0;
1203 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1204 if (general_operand (x, GET_MODE (x)))
1206 else if (GET_MODE (x) == VOIDmode)
1209 /* Otherwise, check if we can make a valid insn from it. First initialize
1210 our test insn if we haven't already. */
1214 = make_insn_raw (gen_rtx_SET (VOIDmode,
1215 gen_rtx_REG (word_mode,
1216 FIRST_PSEUDO_REGISTER * 2),
1218 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1221 /* Now make an insn like the one we would make when GCSE'ing and see if
1223 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1224 SET_SRC (PATTERN (test_insn)) = x;
1225 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1226 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1229 /* Return nonzero if the operands of expression X are unchanged from the
1230 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1231 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1234 oprs_unchanged_p (const_rtx x, const_rtx insn, int avail_p)
1243 code = GET_CODE (x);
1248 struct reg_avail_info *info = ®_avail_info[REGNO (x)];
1250 if (info->last_bb != current_bb)
1253 return info->last_set < INSN_CUID (insn);
1255 return info->first_set >= INSN_CUID (insn);
1259 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1263 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1290 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1294 /* If we are about to do the last recursive call needed at this
1295 level, change it into iteration. This function is called enough
1298 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1300 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1303 else if (fmt[i] == 'E')
1304 for (j = 0; j < XVECLEN (x, i); j++)
1305 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1312 /* Used for communication between mems_conflict_for_gcse_p and
1313 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1314 conflict between two memory references. */
1315 static int gcse_mems_conflict_p;
1317 /* Used for communication between mems_conflict_for_gcse_p and
1318 load_killed_in_block_p. A memory reference for a load instruction,
1319 mems_conflict_for_gcse_p will see if a memory store conflicts with
1320 this memory load. */
1321 static const_rtx gcse_mem_operand;
1323 /* DEST is the output of an instruction. If it is a memory reference, and
1324 possibly conflicts with the load found in gcse_mem_operand, then set
1325 gcse_mems_conflict_p to a nonzero value. */
1328 mems_conflict_for_gcse_p (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
1329 void *data ATTRIBUTE_UNUSED)
1331 while (GET_CODE (dest) == SUBREG
1332 || GET_CODE (dest) == ZERO_EXTRACT
1333 || GET_CODE (dest) == STRICT_LOW_PART)
1334 dest = XEXP (dest, 0);
1336 /* If DEST is not a MEM, then it will not conflict with the load. Note
1337 that function calls are assumed to clobber memory, but are handled
1342 /* If we are setting a MEM in our list of specially recognized MEMs,
1343 don't mark as killed this time. */
1345 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1347 if (!find_rtx_in_ldst (dest))
1348 gcse_mems_conflict_p = 1;
1352 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1354 gcse_mems_conflict_p = 1;
1357 /* Return nonzero if the expression in X (a memory reference) is killed
1358 in block BB before or after the insn with the CUID in UID_LIMIT.
1359 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1362 To check the entire block, set UID_LIMIT to max_uid + 1 and
1366 load_killed_in_block_p (const_basic_block bb, int uid_limit, const_rtx x, int avail_p)
1368 rtx list_entry = modify_mem_list[bb->index];
1370 /* If this is a readonly then we aren't going to be changing it. */
1371 if (MEM_READONLY_P (x))
1377 /* Ignore entries in the list that do not apply. */
1379 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1381 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1383 list_entry = XEXP (list_entry, 1);
1387 setter = XEXP (list_entry, 0);
1389 /* If SETTER is a call everything is clobbered. Note that calls
1390 to pure functions are never put on the list, so we need not
1391 worry about them. */
1392 if (CALL_P (setter))
1395 /* SETTER must be an INSN of some kind that sets memory. Call
1396 note_stores to examine each hunk of memory that is modified.
1398 The note_stores interface is pretty limited, so we have to
1399 communicate via global variables. Yuk. */
1400 gcse_mem_operand = x;
1401 gcse_mems_conflict_p = 0;
1402 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1403 if (gcse_mems_conflict_p)
1405 list_entry = XEXP (list_entry, 1);
1410 /* Return nonzero if the operands of expression X are unchanged from
1411 the start of INSN's basic block up to but not including INSN. */
1414 oprs_anticipatable_p (const_rtx x, const_rtx insn)
1416 return oprs_unchanged_p (x, insn, 0);
1419 /* Return nonzero if the operands of expression X are unchanged from
1420 INSN to the end of INSN's basic block. */
1423 oprs_available_p (const_rtx x, const_rtx insn)
1425 return oprs_unchanged_p (x, insn, 1);
1428 /* Hash expression X.
1430 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1431 indicating if a volatile operand is found or if the expression contains
1432 something we don't want to insert in the table. HASH_TABLE_SIZE is
1433 the current size of the hash table to be probed. */
1436 hash_expr (const_rtx x, enum machine_mode mode, int *do_not_record_p,
1437 int hash_table_size)
1441 *do_not_record_p = 0;
1443 hash = hash_rtx (x, mode, do_not_record_p,
1444 NULL, /*have_reg_qty=*/false);
1445 return hash % hash_table_size;
1448 /* Hash a set of register REGNO.
1450 Sets are hashed on the register that is set. This simplifies the PRE copy
1453 ??? May need to make things more elaborate. Later, as necessary. */
1456 hash_set (int regno, int hash_table_size)
1461 return hash % hash_table_size;
1464 /* Return nonzero if exp1 is equivalent to exp2. */
1467 expr_equiv_p (const_rtx x, const_rtx y)
1469 return exp_equiv_p (x, y, 0, true);
1472 /* Insert expression X in INSN in the hash TABLE.
1473 If it is already present, record it as the last occurrence in INSN's
1476 MODE is the mode of the value X is being stored into.
1477 It is only used if X is a CONST_INT.
1479 ANTIC_P is nonzero if X is an anticipatable expression.
1480 AVAIL_P is nonzero if X is an available expression. */
1483 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1484 int avail_p, struct hash_table *table)
1486 int found, do_not_record_p;
1488 struct expr *cur_expr, *last_expr = NULL;
1489 struct occr *antic_occr, *avail_occr;
1491 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1493 /* Do not insert expression in table if it contains volatile operands,
1494 or if hash_expr determines the expression is something we don't want
1495 to or can't handle. */
1496 if (do_not_record_p)
1499 cur_expr = table->table[hash];
1502 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1504 /* If the expression isn't found, save a pointer to the end of
1506 last_expr = cur_expr;
1507 cur_expr = cur_expr->next_same_hash;
1512 cur_expr = gcse_alloc (sizeof (struct expr));
1513 bytes_used += sizeof (struct expr);
1514 if (table->table[hash] == NULL)
1515 /* This is the first pattern that hashed to this index. */
1516 table->table[hash] = cur_expr;
1518 /* Add EXPR to end of this hash chain. */
1519 last_expr->next_same_hash = cur_expr;
1521 /* Set the fields of the expr element. */
1523 cur_expr->bitmap_index = table->n_elems++;
1524 cur_expr->next_same_hash = NULL;
1525 cur_expr->antic_occr = NULL;
1526 cur_expr->avail_occr = NULL;
1529 /* Now record the occurrence(s). */
1532 antic_occr = cur_expr->antic_occr;
1534 if (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1538 /* Found another instance of the expression in the same basic block.
1539 Prefer the currently recorded one. We want the first one in the
1540 block and the block is scanned from start to end. */
1541 ; /* nothing to do */
1544 /* First occurrence of this expression in this basic block. */
1545 antic_occr = gcse_alloc (sizeof (struct occr));
1546 bytes_used += sizeof (struct occr);
1547 antic_occr->insn = insn;
1548 antic_occr->next = cur_expr->antic_occr;
1549 antic_occr->deleted_p = 0;
1550 cur_expr->antic_occr = antic_occr;
1556 avail_occr = cur_expr->avail_occr;
1558 if (avail_occr && BLOCK_NUM (avail_occr->insn) == BLOCK_NUM (insn))
1560 /* Found another instance of the expression in the same basic block.
1561 Prefer this occurrence to the currently recorded one. We want
1562 the last one in the block and the block is scanned from start
1564 avail_occr->insn = insn;
1568 /* First occurrence of this expression in this basic block. */
1569 avail_occr = gcse_alloc (sizeof (struct occr));
1570 bytes_used += sizeof (struct occr);
1571 avail_occr->insn = insn;
1572 avail_occr->next = cur_expr->avail_occr;
1573 avail_occr->deleted_p = 0;
1574 cur_expr->avail_occr = avail_occr;
1579 /* Insert pattern X in INSN in the hash table.
1580 X is a SET of a reg to either another reg or a constant.
1581 If it is already present, record it as the last occurrence in INSN's
1585 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
1589 struct expr *cur_expr, *last_expr = NULL;
1590 struct occr *cur_occr;
1592 gcc_assert (GET_CODE (x) == SET && REG_P (SET_DEST (x)));
1594 hash = hash_set (REGNO (SET_DEST (x)), table->size);
1596 cur_expr = table->table[hash];
1599 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1601 /* If the expression isn't found, save a pointer to the end of
1603 last_expr = cur_expr;
1604 cur_expr = cur_expr->next_same_hash;
1609 cur_expr = gcse_alloc (sizeof (struct expr));
1610 bytes_used += sizeof (struct expr);
1611 if (table->table[hash] == NULL)
1612 /* This is the first pattern that hashed to this index. */
1613 table->table[hash] = cur_expr;
1615 /* Add EXPR to end of this hash chain. */
1616 last_expr->next_same_hash = cur_expr;
1618 /* Set the fields of the expr element.
1619 We must copy X because it can be modified when copy propagation is
1620 performed on its operands. */
1621 cur_expr->expr = copy_rtx (x);
1622 cur_expr->bitmap_index = table->n_elems++;
1623 cur_expr->next_same_hash = NULL;
1624 cur_expr->antic_occr = NULL;
1625 cur_expr->avail_occr = NULL;
1628 /* Now record the occurrence. */
1629 cur_occr = cur_expr->avail_occr;
1631 if (cur_occr && BLOCK_NUM (cur_occr->insn) == BLOCK_NUM (insn))
1633 /* Found another instance of the expression in the same basic block.
1634 Prefer this occurrence to the currently recorded one. We want
1635 the last one in the block and the block is scanned from start
1637 cur_occr->insn = insn;
1641 /* First occurrence of this expression in this basic block. */
1642 cur_occr = gcse_alloc (sizeof (struct occr));
1643 bytes_used += sizeof (struct occr);
1645 cur_occr->insn = insn;
1646 cur_occr->next = cur_expr->avail_occr;
1647 cur_occr->deleted_p = 0;
1648 cur_expr->avail_occr = cur_occr;
1652 /* Determine whether the rtx X should be treated as a constant for
1653 the purposes of GCSE's constant propagation. */
1656 gcse_constant_p (const_rtx x)
1658 /* Consider a COMPARE of two integers constant. */
1659 if (GET_CODE (x) == COMPARE
1660 && GET_CODE (XEXP (x, 0)) == CONST_INT
1661 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1664 /* Consider a COMPARE of the same registers is a constant
1665 if they are not floating point registers. */
1666 if (GET_CODE(x) == COMPARE
1667 && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1))
1668 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
1669 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
1670 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
1673 return CONSTANT_P (x);
1676 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
1680 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
1682 rtx src = SET_SRC (pat);
1683 rtx dest = SET_DEST (pat);
1686 if (GET_CODE (src) == CALL)
1687 hash_scan_call (src, insn, table);
1689 else if (REG_P (dest))
1691 unsigned int regno = REGNO (dest);
1694 /* See if a REG_EQUAL note shows this equivalent to a simpler expression.
1696 This allows us to do a single GCSE pass and still eliminate
1697 redundant constants, addresses or other expressions that are
1698 constructed with multiple instructions.
1700 However, keep the original SRC if INSN is a simple reg-reg move. In
1701 In this case, there will almost always be a REG_EQUAL note on the
1702 insn that sets SRC. By recording the REG_EQUAL value here as SRC
1703 for INSN, we miss copy propagation opportunities and we perform the
1704 same PRE GCSE operation repeatedly on the same REG_EQUAL value if we
1705 do more than one PRE GCSE pass.
1707 Note that this does not impede profitable constant propagations. We
1708 "look through" reg-reg sets in lookup_avail_set. */
1709 note = find_reg_equal_equiv_note (insn);
1711 && REG_NOTE_KIND (note) == REG_EQUAL
1714 ? gcse_constant_p (XEXP (note, 0))
1715 : want_to_gcse_p (XEXP (note, 0))))
1716 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
1718 /* Only record sets of pseudo-regs in the hash table. */
1720 && regno >= FIRST_PSEUDO_REGISTER
1721 /* Don't GCSE something if we can't do a reg/reg copy. */
1722 && can_copy_p (GET_MODE (dest))
1723 /* GCSE commonly inserts instruction after the insn. We can't
1724 do that easily for EH_REGION notes so disable GCSE on these
1726 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1727 /* Is SET_SRC something we want to gcse? */
1728 && want_to_gcse_p (src)
1729 /* Don't CSE a nop. */
1730 && ! set_noop_p (pat)
1731 /* Don't GCSE if it has attached REG_EQUIV note.
1732 At this point this only function parameters should have
1733 REG_EQUIV notes and if the argument slot is used somewhere
1734 explicitly, it means address of parameter has been taken,
1735 so we should not extend the lifetime of the pseudo. */
1736 && (note == NULL_RTX || ! MEM_P (XEXP (note, 0))))
1738 /* An expression is not anticipatable if its operands are
1739 modified before this insn or if this is not the only SET in
1740 this insn. The latter condition does not have to mean that
1741 SRC itself is not anticipatable, but we just will not be
1742 able to handle code motion of insns with multiple sets. */
1743 int antic_p = oprs_anticipatable_p (src, insn)
1744 && !multiple_sets (insn);
1745 /* An expression is not available if its operands are
1746 subsequently modified, including this insn. It's also not
1747 available if this is a branch, because we can't insert
1748 a set after the branch. */
1749 int avail_p = (oprs_available_p (src, insn)
1750 && ! JUMP_P (insn));
1752 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
1755 /* Record sets for constant/copy propagation. */
1756 else if (table->set_p
1757 && regno >= FIRST_PSEUDO_REGISTER
1759 && REGNO (src) >= FIRST_PSEUDO_REGISTER
1760 && can_copy_p (GET_MODE (dest))
1761 && REGNO (src) != regno)
1762 || gcse_constant_p (src))
1763 /* A copy is not available if its src or dest is subsequently
1764 modified. Here we want to search from INSN+1 on, but
1765 oprs_available_p searches from INSN on. */
1766 && (insn == BB_END (BLOCK_FOR_INSN (insn))
1767 || (tmp = next_nonnote_insn (insn)) == NULL_RTX
1768 || BLOCK_FOR_INSN (tmp) != BLOCK_FOR_INSN (insn)
1769 || oprs_available_p (pat, tmp)))
1770 insert_set_in_table (pat, insn, table);
1772 /* In case of store we want to consider the memory value as available in
1773 the REG stored in that memory. This makes it possible to remove
1774 redundant loads from due to stores to the same location. */
1775 else if (flag_gcse_las && REG_P (src) && MEM_P (dest))
1777 unsigned int regno = REGNO (src);
1779 /* Do not do this for constant/copy propagation. */
1781 /* Only record sets of pseudo-regs in the hash table. */
1782 && regno >= FIRST_PSEUDO_REGISTER
1783 /* Don't GCSE something if we can't do a reg/reg copy. */
1784 && can_copy_p (GET_MODE (src))
1785 /* GCSE commonly inserts instruction after the insn. We can't
1786 do that easily for EH_REGION notes so disable GCSE on these
1788 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1789 /* Is SET_DEST something we want to gcse? */
1790 && want_to_gcse_p (dest)
1791 /* Don't CSE a nop. */
1792 && ! set_noop_p (pat)
1793 /* Don't GCSE if it has attached REG_EQUIV note.
1794 At this point this only function parameters should have
1795 REG_EQUIV notes and if the argument slot is used somewhere
1796 explicitly, it means address of parameter has been taken,
1797 so we should not extend the lifetime of the pseudo. */
1798 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
1799 || ! MEM_P (XEXP (note, 0))))
1801 /* Stores are never anticipatable. */
1803 /* An expression is not available if its operands are
1804 subsequently modified, including this insn. It's also not
1805 available if this is a branch, because we can't insert
1806 a set after the branch. */
1807 int avail_p = oprs_available_p (dest, insn)
1810 /* Record the memory expression (DEST) in the hash table. */
1811 insert_expr_in_table (dest, GET_MODE (dest), insn,
1812 antic_p, avail_p, table);
1818 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1819 struct hash_table *table ATTRIBUTE_UNUSED)
1821 /* Currently nothing to do. */
1825 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1826 struct hash_table *table ATTRIBUTE_UNUSED)
1828 /* Currently nothing to do. */
1831 /* Process INSN and add hash table entries as appropriate.
1833 Only available expressions that set a single pseudo-reg are recorded.
1835 Single sets in a PARALLEL could be handled, but it's an extra complication
1836 that isn't dealt with right now. The trick is handling the CLOBBERs that
1837 are also in the PARALLEL. Later.
1839 If SET_P is nonzero, this is for the assignment hash table,
1840 otherwise it is for the expression hash table. */
1843 hash_scan_insn (rtx insn, struct hash_table *table)
1845 rtx pat = PATTERN (insn);
1848 /* Pick out the sets of INSN and for other forms of instructions record
1849 what's been modified. */
1851 if (GET_CODE (pat) == SET)
1852 hash_scan_set (pat, insn, table);
1853 else if (GET_CODE (pat) == PARALLEL)
1854 for (i = 0; i < XVECLEN (pat, 0); i++)
1856 rtx x = XVECEXP (pat, 0, i);
1858 if (GET_CODE (x) == SET)
1859 hash_scan_set (x, insn, table);
1860 else if (GET_CODE (x) == CLOBBER)
1861 hash_scan_clobber (x, insn, table);
1862 else if (GET_CODE (x) == CALL)
1863 hash_scan_call (x, insn, table);
1866 else if (GET_CODE (pat) == CLOBBER)
1867 hash_scan_clobber (pat, insn, table);
1868 else if (GET_CODE (pat) == CALL)
1869 hash_scan_call (pat, insn, table);
1873 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
1876 /* Flattened out table, so it's printed in proper order. */
1877 struct expr **flat_table;
1878 unsigned int *hash_val;
1881 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
1882 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
1884 for (i = 0; i < (int) table->size; i++)
1885 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1887 flat_table[expr->bitmap_index] = expr;
1888 hash_val[expr->bitmap_index] = i;
1891 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
1892 name, table->size, table->n_elems);
1894 for (i = 0; i < (int) table->n_elems; i++)
1895 if (flat_table[i] != 0)
1897 expr = flat_table[i];
1898 fprintf (file, "Index %d (hash value %d)\n ",
1899 expr->bitmap_index, hash_val[i]);
1900 print_rtl (file, expr->expr);
1901 fprintf (file, "\n");
1904 fprintf (file, "\n");
1910 /* Record register first/last/block set information for REGNO in INSN.
1912 first_set records the first place in the block where the register
1913 is set and is used to compute "anticipatability".
1915 last_set records the last place in the block where the register
1916 is set and is used to compute "availability".
1918 last_bb records the block for which first_set and last_set are
1919 valid, as a quick test to invalidate them.
1921 reg_set_in_block records whether the register is set in the block
1922 and is used to compute "transparency". */
1925 record_last_reg_set_info (rtx insn, int regno)
1927 struct reg_avail_info *info = ®_avail_info[regno];
1928 int cuid = INSN_CUID (insn);
1930 info->last_set = cuid;
1931 if (info->last_bb != current_bb)
1933 info->last_bb = current_bb;
1934 info->first_set = cuid;
1935 SET_BIT (reg_set_in_block[current_bb->index], regno);
1940 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
1941 Note we store a pair of elements in the list, so they have to be
1942 taken off pairwise. */
1945 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, const_rtx unused1 ATTRIBUTE_UNUSED,
1948 rtx dest_addr, insn;
1951 while (GET_CODE (dest) == SUBREG
1952 || GET_CODE (dest) == ZERO_EXTRACT
1953 || GET_CODE (dest) == STRICT_LOW_PART)
1954 dest = XEXP (dest, 0);
1956 /* If DEST is not a MEM, then it will not conflict with a load. Note
1957 that function calls are assumed to clobber memory, but are handled
1963 dest_addr = get_addr (XEXP (dest, 0));
1964 dest_addr = canon_rtx (dest_addr);
1965 insn = (rtx) v_insn;
1966 bb = BLOCK_NUM (insn);
1968 canon_modify_mem_list[bb] =
1969 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
1970 canon_modify_mem_list[bb] =
1971 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
1974 /* Record memory modification information for INSN. We do not actually care
1975 about the memory location(s) that are set, or even how they are set (consider
1976 a CALL_INSN). We merely need to record which insns modify memory. */
1979 record_last_mem_set_info (rtx insn)
1981 int bb = BLOCK_NUM (insn);
1983 /* load_killed_in_block_p will handle the case of calls clobbering
1985 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
1986 bitmap_set_bit (modify_mem_list_set, bb);
1990 /* Note that traversals of this loop (other than for free-ing)
1991 will break after encountering a CALL_INSN. So, there's no
1992 need to insert a pair of items, as canon_list_insert does. */
1993 canon_modify_mem_list[bb] =
1994 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
1995 bitmap_set_bit (blocks_with_calls, bb);
1998 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2001 /* Called from compute_hash_table via note_stores to handle one
2002 SET or CLOBBER in an insn. DATA is really the instruction in which
2003 the SET is taking place. */
2006 record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
2008 rtx last_set_insn = (rtx) data;
2010 if (GET_CODE (dest) == SUBREG)
2011 dest = SUBREG_REG (dest);
2014 record_last_reg_set_info (last_set_insn, REGNO (dest));
2015 else if (MEM_P (dest)
2016 /* Ignore pushes, they clobber nothing. */
2017 && ! push_operand (dest, GET_MODE (dest)))
2018 record_last_mem_set_info (last_set_insn);
2021 /* Top level function to create an expression or assignment hash table.
2023 Expression entries are placed in the hash table if
2024 - they are of the form (set (pseudo-reg) src),
2025 - src is something we want to perform GCSE on,
2026 - none of the operands are subsequently modified in the block
2028 Assignment entries are placed in the hash table if
2029 - they are of the form (set (pseudo-reg) src),
2030 - src is something we want to perform const/copy propagation on,
2031 - none of the operands or target are subsequently modified in the block
2033 Currently src must be a pseudo-reg or a const_int.
2035 TABLE is the table computed. */
2038 compute_hash_table_work (struct hash_table *table)
2042 /* While we compute the hash table we also compute a bit array of which
2043 registers are set in which blocks.
2044 ??? This isn't needed during const/copy propagation, but it's cheap to
2046 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2048 /* re-Cache any INSN_LIST nodes we have allocated. */
2049 clear_modify_mem_tables ();
2050 /* Some working arrays used to track first and last set in each block. */
2051 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2053 for (i = 0; i < max_gcse_regno; ++i)
2054 reg_avail_info[i].last_bb = NULL;
2056 FOR_EACH_BB (current_bb)
2061 /* First pass over the instructions records information used to
2062 determine when registers and memory are first and last set.
2063 ??? hard-reg reg_set_in_block computation
2064 could be moved to compute_sets since they currently don't change. */
2066 FOR_BB_INSNS (current_bb, insn)
2068 if (! INSN_P (insn))
2073 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2074 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2075 record_last_reg_set_info (insn, regno);
2080 note_stores (PATTERN (insn), record_last_set_info, insn);
2083 /* Insert implicit sets in the hash table. */
2085 && implicit_sets[current_bb->index] != NULL_RTX)
2086 hash_scan_set (implicit_sets[current_bb->index],
2087 BB_HEAD (current_bb), table);
2089 /* The next pass builds the hash table. */
2090 FOR_BB_INSNS (current_bb, insn)
2092 hash_scan_insn (insn, table);
2095 free (reg_avail_info);
2096 reg_avail_info = NULL;
2099 /* Allocate space for the set/expr hash TABLE.
2100 N_INSNS is the number of instructions in the function.
2101 It is used to determine the number of buckets to use.
2102 SET_P determines whether set or expression table will
2106 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2110 table->size = n_insns / 4;
2111 if (table->size < 11)
2114 /* Attempt to maintain efficient use of hash table.
2115 Making it an odd number is simplest for now.
2116 ??? Later take some measurements. */
2118 n = table->size * sizeof (struct expr *);
2119 table->table = gmalloc (n);
2120 table->set_p = set_p;
2123 /* Free things allocated by alloc_hash_table. */
2126 free_hash_table (struct hash_table *table)
2128 free (table->table);
2131 /* Compute the hash TABLE for doing copy/const propagation or
2132 expression hash table. */
2135 compute_hash_table (struct hash_table *table)
2137 /* Initialize count of number of entries in hash table. */
2139 memset (table->table, 0, table->size * sizeof (struct expr *));
2141 compute_hash_table_work (table);
2144 /* Expression tracking support. */
2146 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2147 table entry, or NULL if not found. */
2149 static struct expr *
2150 lookup_set (unsigned int regno, struct hash_table *table)
2152 unsigned int hash = hash_set (regno, table->size);
2155 expr = table->table[hash];
2157 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2158 expr = expr->next_same_hash;
2163 /* Return the next entry for REGNO in list EXPR. */
2165 static struct expr *
2166 next_set (unsigned int regno, struct expr *expr)
2169 expr = expr->next_same_hash;
2170 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2175 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2176 types may be mixed. */
2179 free_insn_expr_list_list (rtx *listp)
2183 for (list = *listp; list ; list = next)
2185 next = XEXP (list, 1);
2186 if (GET_CODE (list) == EXPR_LIST)
2187 free_EXPR_LIST_node (list);
2189 free_INSN_LIST_node (list);
2195 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2197 clear_modify_mem_tables (void)
2202 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
2204 free_INSN_LIST_list (modify_mem_list + i);
2205 free_insn_expr_list_list (canon_modify_mem_list + i);
2207 bitmap_clear (modify_mem_list_set);
2208 bitmap_clear (blocks_with_calls);
2211 /* Release memory used by modify_mem_list_set. */
2214 free_modify_mem_tables (void)
2216 clear_modify_mem_tables ();
2217 free (modify_mem_list);
2218 free (canon_modify_mem_list);
2219 modify_mem_list = 0;
2220 canon_modify_mem_list = 0;
2223 /* Reset tables used to keep track of what's still available [since the
2224 start of the block]. */
2227 reset_opr_set_tables (void)
2229 /* Maintain a bitmap of which regs have been set since beginning of
2231 CLEAR_REG_SET (reg_set_bitmap);
2233 /* Also keep a record of the last instruction to modify memory.
2234 For now this is very trivial, we only record whether any memory
2235 location has been modified. */
2236 clear_modify_mem_tables ();
2239 /* Return nonzero if the operands of X are not set before INSN in
2240 INSN's basic block. */
2243 oprs_not_set_p (const_rtx x, const_rtx insn)
2252 code = GET_CODE (x);
2269 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2270 INSN_CUID (insn), x, 0))
2273 return oprs_not_set_p (XEXP (x, 0), insn);
2276 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2282 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2286 /* If we are about to do the last recursive call
2287 needed at this level, change it into iteration.
2288 This function is called enough to be worth it. */
2290 return oprs_not_set_p (XEXP (x, i), insn);
2292 if (! oprs_not_set_p (XEXP (x, i), insn))
2295 else if (fmt[i] == 'E')
2296 for (j = 0; j < XVECLEN (x, i); j++)
2297 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2304 /* Mark things set by a CALL. */
2307 mark_call (rtx insn)
2309 if (! RTL_CONST_OR_PURE_CALL_P (insn))
2310 record_last_mem_set_info (insn);
2313 /* Mark things set by a SET. */
2316 mark_set (rtx pat, rtx insn)
2318 rtx dest = SET_DEST (pat);
2320 while (GET_CODE (dest) == SUBREG
2321 || GET_CODE (dest) == ZERO_EXTRACT
2322 || GET_CODE (dest) == STRICT_LOW_PART)
2323 dest = XEXP (dest, 0);
2326 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2327 else if (MEM_P (dest))
2328 record_last_mem_set_info (insn);
2330 if (GET_CODE (SET_SRC (pat)) == CALL)
2334 /* Record things set by a CLOBBER. */
2337 mark_clobber (rtx pat, rtx insn)
2339 rtx clob = XEXP (pat, 0);
2341 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2342 clob = XEXP (clob, 0);
2345 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2347 record_last_mem_set_info (insn);
2350 /* Record things set by INSN.
2351 This data is used by oprs_not_set_p. */
2354 mark_oprs_set (rtx insn)
2356 rtx pat = PATTERN (insn);
2359 if (GET_CODE (pat) == SET)
2360 mark_set (pat, insn);
2361 else if (GET_CODE (pat) == PARALLEL)
2362 for (i = 0; i < XVECLEN (pat, 0); i++)
2364 rtx x = XVECEXP (pat, 0, i);
2366 if (GET_CODE (x) == SET)
2368 else if (GET_CODE (x) == CLOBBER)
2369 mark_clobber (x, insn);
2370 else if (GET_CODE (x) == CALL)
2374 else if (GET_CODE (pat) == CLOBBER)
2375 mark_clobber (pat, insn);
2376 else if (GET_CODE (pat) == CALL)
2381 /* Compute copy/constant propagation working variables. */
2383 /* Local properties of assignments. */
2384 static sbitmap *cprop_pavloc;
2385 static sbitmap *cprop_absaltered;
2387 /* Global properties of assignments (computed from the local properties). */
2388 static sbitmap *cprop_avin;
2389 static sbitmap *cprop_avout;
2391 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
2392 basic blocks. N_SETS is the number of sets. */
2395 alloc_cprop_mem (int n_blocks, int n_sets)
2397 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
2398 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
2400 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
2401 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
2404 /* Free vars used by copy/const propagation. */
2407 free_cprop_mem (void)
2409 sbitmap_vector_free (cprop_pavloc);
2410 sbitmap_vector_free (cprop_absaltered);
2411 sbitmap_vector_free (cprop_avin);
2412 sbitmap_vector_free (cprop_avout);
2415 /* For each block, compute whether X is transparent. X is either an
2416 expression or an assignment [though we don't care which, for this context
2417 an assignment is treated as an expression]. For each block where an
2418 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
2422 compute_transp (const_rtx x, int indx, sbitmap *bmap, int set_p)
2430 /* repeat is used to turn tail-recursion into iteration since GCC
2431 can't do it when there's no return value. */
2437 code = GET_CODE (x);
2443 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2446 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2447 SET_BIT (bmap[bb->index], indx);
2451 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2452 SET_BIT (bmap[r->bb_index], indx);
2457 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2460 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2461 RESET_BIT (bmap[bb->index], indx);
2465 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2466 RESET_BIT (bmap[r->bb_index], indx);
2473 if (! MEM_READONLY_P (x))
2478 /* First handle all the blocks with calls. We don't need to
2479 do any list walking for them. */
2480 EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
2483 SET_BIT (bmap[bb_index], indx);
2485 RESET_BIT (bmap[bb_index], indx);
2488 /* Now iterate over the blocks which have memory modifications
2489 but which do not have any calls. */
2490 EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set,
2494 rtx list_entry = canon_modify_mem_list[bb_index];
2498 rtx dest, dest_addr;
2500 /* LIST_ENTRY must be an INSN of some kind that sets memory.
2501 Examine each hunk of memory that is modified. */
2503 dest = XEXP (list_entry, 0);
2504 list_entry = XEXP (list_entry, 1);
2505 dest_addr = XEXP (list_entry, 0);
2507 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
2508 x, rtx_addr_varies_p))
2511 SET_BIT (bmap[bb_index], indx);
2513 RESET_BIT (bmap[bb_index], indx);
2516 list_entry = XEXP (list_entry, 1);
2541 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2545 /* If we are about to do the last recursive call
2546 needed at this level, change it into iteration.
2547 This function is called enough to be worth it. */
2554 compute_transp (XEXP (x, i), indx, bmap, set_p);
2556 else if (fmt[i] == 'E')
2557 for (j = 0; j < XVECLEN (x, i); j++)
2558 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
2562 /* Top level routine to do the dataflow analysis needed by copy/const
2566 compute_cprop_data (void)
2568 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
2569 compute_available (cprop_pavloc, cprop_absaltered,
2570 cprop_avout, cprop_avin);
2573 /* Copy/constant propagation. */
2575 /* Maximum number of register uses in an insn that we handle. */
2578 /* Table of uses found in an insn.
2579 Allocated statically to avoid alloc/free complexity and overhead. */
2580 static struct reg_use reg_use_table[MAX_USES];
2582 /* Index into `reg_use_table' while building it. */
2583 static int reg_use_count;
2585 /* Set up a list of register numbers used in INSN. The found uses are stored
2586 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
2587 and contains the number of uses in the table upon exit.
2589 ??? If a register appears multiple times we will record it multiple times.
2590 This doesn't hurt anything but it will slow things down. */
2593 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
2600 /* repeat is used to turn tail-recursion into iteration since GCC
2601 can't do it when there's no return value. */
2606 code = GET_CODE (x);
2609 if (reg_use_count == MAX_USES)
2612 reg_use_table[reg_use_count].reg_rtx = x;
2616 /* Recursively scan the operands of this expression. */
2618 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2622 /* If we are about to do the last recursive call
2623 needed at this level, change it into iteration.
2624 This function is called enough to be worth it. */
2631 find_used_regs (&XEXP (x, i), data);
2633 else if (fmt[i] == 'E')
2634 for (j = 0; j < XVECLEN (x, i); j++)
2635 find_used_regs (&XVECEXP (x, i, j), data);
2639 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
2640 Returns nonzero is successful. */
2643 try_replace_reg (rtx from, rtx to, rtx insn)
2645 rtx note = find_reg_equal_equiv_note (insn);
2648 rtx set = single_set (insn);
2650 /* Usually we substitute easy stuff, so we won't copy everything.
2651 We however need to take care to not duplicate non-trivial CONST
2655 validate_replace_src_group (from, to, insn);
2656 if (num_changes_pending () && apply_change_group ())
2659 /* Try to simplify SET_SRC if we have substituted a constant. */
2660 if (success && set && CONSTANT_P (to))
2662 src = simplify_rtx (SET_SRC (set));
2665 validate_change (insn, &SET_SRC (set), src, 0);
2668 /* If there is already a REG_EQUAL note, update the expression in it
2669 with our replacement. */
2670 if (note != 0 && REG_NOTE_KIND (note) == REG_EQUAL)
2671 set_unique_reg_note (insn, REG_EQUAL,
2672 simplify_replace_rtx (XEXP (note, 0), from,
2674 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
2676 /* If above failed and this is a single set, try to simplify the source of
2677 the set given our substitution. We could perhaps try this for multiple
2678 SETs, but it probably won't buy us anything. */
2679 src = simplify_replace_rtx (SET_SRC (set), from, to);
2681 if (!rtx_equal_p (src, SET_SRC (set))
2682 && validate_change (insn, &SET_SRC (set), src, 0))
2685 /* If we've failed to do replacement, have a single SET, don't already
2686 have a note, and have no special SET, add a REG_EQUAL note to not
2687 lose information. */
2688 if (!success && note == 0 && set != 0
2689 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
2690 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
2691 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
2694 /* REG_EQUAL may get simplified into register.
2695 We don't allow that. Remove that note. This code ought
2696 not to happen, because previous code ought to synthesize
2697 reg-reg move, but be on the safe side. */
2698 if (note && REG_NOTE_KIND (note) == REG_EQUAL && REG_P (XEXP (note, 0)))
2699 remove_note (insn, note);
2704 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
2705 NULL no such set is found. */
2707 static struct expr *
2708 find_avail_set (int regno, rtx insn)
2710 /* SET1 contains the last set found that can be returned to the caller for
2711 use in a substitution. */
2712 struct expr *set1 = 0;
2714 /* Loops are not possible here. To get a loop we would need two sets
2715 available at the start of the block containing INSN. i.e. we would
2716 need two sets like this available at the start of the block:
2718 (set (reg X) (reg Y))
2719 (set (reg Y) (reg X))
2721 This can not happen since the set of (reg Y) would have killed the
2722 set of (reg X) making it unavailable at the start of this block. */
2726 struct expr *set = lookup_set (regno, &set_hash_table);
2728 /* Find a set that is available at the start of the block
2729 which contains INSN. */
2732 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
2734 set = next_set (regno, set);
2737 /* If no available set was found we've reached the end of the
2738 (possibly empty) copy chain. */
2742 gcc_assert (GET_CODE (set->expr) == SET);
2744 src = SET_SRC (set->expr);
2746 /* We know the set is available.
2747 Now check that SRC is ANTLOC (i.e. none of the source operands
2748 have changed since the start of the block).
2750 If the source operand changed, we may still use it for the next
2751 iteration of this loop, but we may not use it for substitutions. */
2753 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
2756 /* If the source of the set is anything except a register, then
2757 we have reached the end of the copy chain. */
2761 /* Follow the copy chain, i.e. start another iteration of the loop
2762 and see if we have an available copy into SRC. */
2763 regno = REGNO (src);
2766 /* SET1 holds the last set that was available and anticipatable at
2771 /* Subroutine of cprop_insn that tries to propagate constants into
2772 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
2773 it is the instruction that immediately precedes JUMP, and must be a
2774 single SET of a register. FROM is what we will try to replace,
2775 SRC is the constant we will try to substitute for it. Returns nonzero
2776 if a change was made. */
2779 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
2781 rtx new, set_src, note_src;
2782 rtx set = pc_set (jump);
2783 rtx note = find_reg_equal_equiv_note (jump);
2787 note_src = XEXP (note, 0);
2788 if (GET_CODE (note_src) == EXPR_LIST)
2789 note_src = NULL_RTX;
2791 else note_src = NULL_RTX;
2793 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
2794 set_src = note_src ? note_src : SET_SRC (set);
2796 /* First substitute the SETCC condition into the JUMP instruction,
2797 then substitute that given values into this expanded JUMP. */
2798 if (setcc != NULL_RTX
2799 && !modified_between_p (from, setcc, jump)
2800 && !modified_between_p (src, setcc, jump))
2803 rtx setcc_set = single_set (setcc);
2804 rtx setcc_note = find_reg_equal_equiv_note (setcc);
2805 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
2806 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
2807 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
2813 new = simplify_replace_rtx (set_src, from, src);
2815 /* If no simplification can be made, then try the next register. */
2816 if (rtx_equal_p (new, SET_SRC (set)))
2819 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
2824 /* Ensure the value computed inside the jump insn to be equivalent
2825 to one computed by setcc. */
2826 if (setcc && modified_in_p (new, setcc))
2828 if (! validate_unshare_change (jump, &SET_SRC (set), new, 0))
2830 /* When (some) constants are not valid in a comparison, and there
2831 are two registers to be replaced by constants before the entire
2832 comparison can be folded into a constant, we need to keep
2833 intermediate information in REG_EQUAL notes. For targets with
2834 separate compare insns, such notes are added by try_replace_reg.
2835 When we have a combined compare-and-branch instruction, however,
2836 we need to attach a note to the branch itself to make this
2837 optimization work. */
2839 if (!rtx_equal_p (new, note_src))
2840 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
2844 /* Remove REG_EQUAL note after simplification. */
2846 remove_note (jump, note);
2850 /* Delete the cc0 setter. */
2851 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
2852 delete_insn (setcc);
2855 run_jump_opt_after_gcse = 1;
2857 global_const_prop_count++;
2858 if (dump_file != NULL)
2861 "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
2862 REGNO (from), INSN_UID (jump));
2863 print_rtl (dump_file, src);
2864 fprintf (dump_file, "\n");
2866 purge_dead_edges (bb);
2868 /* If a conditional jump has been changed into unconditional jump, remove
2869 the jump and make the edge fallthru - this is always called in
2871 if (new != pc_rtx && simplejump_p (jump))
2876 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ei_next (&ei))
2877 if (e->dest != EXIT_BLOCK_PTR
2878 && BB_HEAD (e->dest) == JUMP_LABEL (jump))
2880 e->flags |= EDGE_FALLTHRU;
2890 constprop_register (rtx insn, rtx from, rtx to, bool alter_jumps)
2894 /* Check for reg or cc0 setting instructions followed by
2895 conditional branch instructions first. */
2897 && (sset = single_set (insn)) != NULL
2899 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
2901 rtx dest = SET_DEST (sset);
2902 if ((REG_P (dest) || CC0_P (dest))
2903 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
2907 /* Handle normal insns next. */
2908 if (NONJUMP_INSN_P (insn)
2909 && try_replace_reg (from, to, insn))
2912 /* Try to propagate a CONST_INT into a conditional jump.
2913 We're pretty specific about what we will handle in this
2914 code, we can extend this as necessary over time.
2916 Right now the insn in question must look like
2917 (set (pc) (if_then_else ...)) */
2918 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
2919 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
2923 /* Perform constant and copy propagation on INSN.
2924 The result is nonzero if a change was made. */
2927 cprop_insn (rtx insn, int alter_jumps)
2929 struct reg_use *reg_used;
2937 note_uses (&PATTERN (insn), find_used_regs, NULL);
2939 note = find_reg_equal_equiv_note (insn);
2941 /* We may win even when propagating constants into notes. */
2943 find_used_regs (&XEXP (note, 0), NULL);
2945 for (reg_used = ®_use_table[0]; reg_use_count > 0;
2946 reg_used++, reg_use_count--)
2948 unsigned int regno = REGNO (reg_used->reg_rtx);
2952 /* Ignore registers created by GCSE.
2953 We do this because ... */
2954 if (regno >= max_gcse_regno)
2957 /* If the register has already been set in this block, there's
2958 nothing we can do. */
2959 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
2962 /* Find an assignment that sets reg_used and is available
2963 at the start of the block. */
2964 set = find_avail_set (regno, insn);
2969 /* ??? We might be able to handle PARALLELs. Later. */
2970 gcc_assert (GET_CODE (pat) == SET);
2972 src = SET_SRC (pat);
2974 /* Constant propagation. */
2975 if (gcse_constant_p (src))
2977 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
2980 global_const_prop_count++;
2981 if (dump_file != NULL)
2983 fprintf (dump_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
2984 fprintf (dump_file, "insn %d with constant ", INSN_UID (insn));
2985 print_rtl (dump_file, src);
2986 fprintf (dump_file, "\n");
2988 if (INSN_DELETED_P (insn))
2992 else if (REG_P (src)
2993 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2994 && REGNO (src) != regno)
2996 if (try_replace_reg (reg_used->reg_rtx, src, insn))
2999 global_copy_prop_count++;
3000 if (dump_file != NULL)
3002 fprintf (dump_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
3003 regno, INSN_UID (insn));
3004 fprintf (dump_file, " with reg %d\n", REGNO (src));
3007 /* The original insn setting reg_used may or may not now be
3008 deletable. We leave the deletion to flow. */
3009 /* FIXME: If it turns out that the insn isn't deletable,
3010 then we may have unnecessarily extended register lifetimes
3011 and made things worse. */
3019 /* Like find_used_regs, but avoid recording uses that appear in
3020 input-output contexts such as zero_extract or pre_dec. This
3021 restricts the cases we consider to those for which local cprop
3022 can legitimately make replacements. */
3025 local_cprop_find_used_regs (rtx *xptr, void *data)
3032 switch (GET_CODE (x))
3036 case STRICT_LOW_PART:
3045 /* Can only legitimately appear this early in the context of
3046 stack pushes for function arguments, but handle all of the
3047 codes nonetheless. */
3051 /* Setting a subreg of a register larger than word_mode leaves
3052 the non-written words unchanged. */
3053 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
3061 find_used_regs (xptr, data);
3064 /* Try to perform local const/copy propagation on X in INSN.
3065 If ALTER_JUMPS is false, changing jump insns is not allowed. */
3068 do_local_cprop (rtx x, rtx insn, bool alter_jumps)
3070 rtx newreg = NULL, newcnst = NULL;
3072 /* Rule out USE instructions and ASM statements as we don't want to
3073 change the hard registers mentioned. */
3075 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
3076 || (GET_CODE (PATTERN (insn)) != USE
3077 && asm_noperands (PATTERN (insn)) < 0)))
3079 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
3080 struct elt_loc_list *l;
3084 for (l = val->locs; l; l = l->next)
3086 rtx this_rtx = l->loc;
3089 if (gcse_constant_p (this_rtx))
3091 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
3092 /* Don't copy propagate if it has attached REG_EQUIV note.
3093 At this point this only function parameters should have
3094 REG_EQUIV notes and if the argument slot is used somewhere
3095 explicitly, it means address of parameter has been taken,
3096 so we should not extend the lifetime of the pseudo. */
3097 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
3098 || ! MEM_P (XEXP (note, 0))))
3101 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
3103 if (dump_file != NULL)
3105 fprintf (dump_file, "LOCAL CONST-PROP: Replacing reg %d in ",
3107 fprintf (dump_file, "insn %d with constant ",
3109 print_rtl (dump_file, newcnst);
3110 fprintf (dump_file, "\n");
3112 local_const_prop_count++;
3115 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
3117 if (dump_file != NULL)
3120 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
3121 REGNO (x), INSN_UID (insn));
3122 fprintf (dump_file, " with reg %d\n", REGNO (newreg));
3124 local_copy_prop_count++;
3131 /* Do local const/copy propagation (i.e. within each basic block).
3132 If ALTER_JUMPS is true, allow propagating into jump insns, which
3133 could modify the CFG. */
3136 local_cprop_pass (bool alter_jumps)
3140 struct reg_use *reg_used;
3141 bool changed = false;
3143 cselib_init (false);
3146 FOR_BB_INSNS (bb, insn)
3150 rtx note = find_reg_equal_equiv_note (insn);
3154 note_uses (&PATTERN (insn), local_cprop_find_used_regs,
3157 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
3159 for (reg_used = ®_use_table[0]; reg_use_count > 0;
3160 reg_used++, reg_use_count--)
3162 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps))
3168 if (INSN_DELETED_P (insn))
3171 while (reg_use_count);
3173 cselib_process_insn (insn);
3176 /* Forget everything at the end of a basic block. */
3177 cselib_clear_table ();
3182 /* Global analysis may get into infinite loops for unreachable blocks. */
3183 if (changed && alter_jumps)
3185 delete_unreachable_blocks ();
3186 free_reg_set_mem ();
3187 alloc_reg_set_mem (max_reg_num ());
3192 /* Forward propagate copies. This includes copies and constants. Return
3193 nonzero if a change was made. */
3196 cprop (int alter_jumps)
3202 /* Note we start at block 1. */
3203 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3205 if (dump_file != NULL)
3206 fprintf (dump_file, "\n");
3211 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3213 /* Reset tables used to keep track of what's still valid [since the
3214 start of the block]. */
3215 reset_opr_set_tables ();
3217 FOR_BB_INSNS (bb, insn)
3220 changed |= cprop_insn (insn, alter_jumps);
3222 /* Keep track of everything modified by this insn. */
3223 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
3224 call mark_oprs_set if we turned the insn into a NOTE. */
3225 if (! NOTE_P (insn))
3226 mark_oprs_set (insn);
3230 if (dump_file != NULL)
3231 fprintf (dump_file, "\n");
3236 /* Similar to get_condition, only the resulting condition must be
3237 valid at JUMP, instead of at EARLIEST.
3239 This differs from noce_get_condition in ifcvt.c in that we prefer not to
3240 settle for the condition variable in the jump instruction being integral.
3241 We prefer to be able to record the value of a user variable, rather than
3242 the value of a temporary used in a condition. This could be solved by
3243 recording the value of *every* register scanned by canonicalize_condition,
3244 but this would require some code reorganization. */
3247 fis_get_condition (rtx jump)
3249 return get_condition (jump, NULL, false, true);
3252 /* Check the comparison COND to see if we can safely form an implicit set from
3253 it. COND is either an EQ or NE comparison. */
3256 implicit_set_cond_p (const_rtx cond)
3258 const enum machine_mode mode = GET_MODE (XEXP (cond, 0));
3259 const_rtx cst = XEXP (cond, 1);
3261 /* We can't perform this optimization if either operand might be or might
3262 contain a signed zero. */
3263 if (HONOR_SIGNED_ZEROS (mode))
3265 /* It is sufficient to check if CST is or contains a zero. We must
3266 handle float, complex, and vector. If any subpart is a zero, then
3267 the optimization can't be performed. */
3268 /* ??? The complex and vector checks are not implemented yet. We just
3269 always return zero for them. */
3270 if (GET_CODE (cst) == CONST_DOUBLE)
3273 REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
3274 if (REAL_VALUES_EQUAL (d, dconst0))
3281 return gcse_constant_p (cst);
3284 /* Find the implicit sets of a function. An "implicit set" is a constraint
3285 on the value of a variable, implied by a conditional jump. For example,
3286 following "if (x == 2)", the then branch may be optimized as though the
3287 conditional performed an "explicit set", in this example, "x = 2". This
3288 function records the set patterns that are implicit at the start of each
3292 find_implicit_sets (void)
3294 basic_block bb, dest;
3300 /* Check for more than one successor. */
3301 if (EDGE_COUNT (bb->succs) > 1)
3303 cond = fis_get_condition (BB_END (bb));
3306 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
3307 && REG_P (XEXP (cond, 0))
3308 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
3309 && implicit_set_cond_p (cond))
3311 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
3312 : FALLTHRU_EDGE (bb)->dest;
3314 if (dest && single_pred_p (dest)
3315 && dest != EXIT_BLOCK_PTR)
3317 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
3319 implicit_sets[dest->index] = new;
3322 fprintf(dump_file, "Implicit set of reg %d in ",
3323 REGNO (XEXP (cond, 0)));
3324 fprintf(dump_file, "basic block %d\n", dest->index);
3332 fprintf (dump_file, "Found %d implicit sets\n", count);
3335 /* Perform one copy/constant propagation pass.
3336 PASS is the pass count. If CPROP_JUMPS is true, perform constant
3337 propagation into conditional jumps. If BYPASS_JUMPS is true,
3338 perform conditional jump bypassing optimizations. */
3341 one_cprop_pass (int pass, bool cprop_jumps, bool bypass_jumps)
3345 global_const_prop_count = local_const_prop_count = 0;
3346 global_copy_prop_count = local_copy_prop_count = 0;
3349 local_cprop_pass (cprop_jumps);
3351 /* Determine implicit sets. */
3352 implicit_sets = XCNEWVEC (rtx, last_basic_block);
3353 find_implicit_sets ();
3355 alloc_hash_table (max_cuid, &set_hash_table, 1);
3356 compute_hash_table (&set_hash_table);
3358 /* Free implicit_sets before peak usage. */
3359 free (implicit_sets);
3360 implicit_sets = NULL;
3363 dump_hash_table (dump_file, "SET", &set_hash_table);
3364 if (set_hash_table.n_elems > 0)
3366 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
3367 compute_cprop_data ();
3368 changed = cprop (cprop_jumps);
3370 changed |= bypass_conditional_jumps ();
3374 free_hash_table (&set_hash_table);
3378 fprintf (dump_file, "CPROP of %s, pass %d: %d bytes needed, ",
3379 current_function_name (), pass, bytes_used);
3380 fprintf (dump_file, "%d local const props, %d local copy props, ",
3381 local_const_prop_count, local_copy_prop_count);
3382 fprintf (dump_file, "%d global const props, %d global copy props\n\n",
3383 global_const_prop_count, global_copy_prop_count);
3385 /* Global analysis may get into infinite loops for unreachable blocks. */
3386 if (changed && cprop_jumps)
3387 delete_unreachable_blocks ();
3392 /* Bypass conditional jumps. */
3394 /* The value of last_basic_block at the beginning of the jump_bypass
3395 pass. The use of redirect_edge_and_branch_force may introduce new
3396 basic blocks, but the data flow analysis is only valid for basic
3397 block indices less than bypass_last_basic_block. */
3399 static int bypass_last_basic_block;
3401 /* Find a set of REGNO to a constant that is available at the end of basic
3402 block BB. Returns NULL if no such set is found. Based heavily upon
3405 static struct expr *
3406 find_bypass_set (int regno, int bb)
3408 struct expr *result = 0;
3413 struct expr *set = lookup_set (regno, &set_hash_table);
3417 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
3419 set = next_set (regno, set);
3425 gcc_assert (GET_CODE (set->expr) == SET);
3427 src = SET_SRC (set->expr);
3428 if (gcse_constant_p (src))
3434 regno = REGNO (src);
3440 /* Subroutine of bypass_block that checks whether a pseudo is killed by
3441 any of the instructions inserted on an edge. Jump bypassing places
3442 condition code setters on CFG edges using insert_insn_on_edge. This
3443 function is required to check that our data flow analysis is still
3444 valid prior to commit_edge_insertions. */
3447 reg_killed_on_edge (const_rtx reg, const_edge e)
3451 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
3452 if (INSN_P (insn) && reg_set_p (reg, insn))
3458 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
3459 basic block BB which has more than one predecessor. If not NULL, SETCC
3460 is the first instruction of BB, which is immediately followed by JUMP_INSN
3461 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
3462 Returns nonzero if a change was made.
3464 During the jump bypassing pass, we may place copies of SETCC instructions
3465 on CFG edges. The following routine must be careful to pay attention to
3466 these inserted insns when performing its transformations. */
3469 bypass_block (basic_block bb, rtx setcc, rtx jump)
3474 int may_be_loop_header;
3478 insn = (setcc != NULL) ? setcc : jump;
3480 /* Determine set of register uses in INSN. */
3482 note_uses (&PATTERN (insn), find_used_regs, NULL);
3483 note = find_reg_equal_equiv_note (insn);
3485 find_used_regs (&XEXP (note, 0), NULL);
3487 may_be_loop_header = false;
3488 FOR_EACH_EDGE (e, ei, bb->preds)
3489 if (e->flags & EDGE_DFS_BACK)
3491 may_be_loop_header = true;
3496 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3500 if (e->flags & EDGE_COMPLEX)
3506 /* We can't redirect edges from new basic blocks. */
3507 if (e->src->index >= bypass_last_basic_block)
3513 /* The irreducible loops created by redirecting of edges entering the
3514 loop from outside would decrease effectiveness of some of the following
3515 optimizations, so prevent this. */
3516 if (may_be_loop_header
3517 && !(e->flags & EDGE_DFS_BACK))
3523 for (i = 0; i < reg_use_count; i++)
3525 struct reg_use *reg_used = ®_use_table[i];
3526 unsigned int regno = REGNO (reg_used->reg_rtx);
3527 basic_block dest, old_dest;
3531 if (regno >= max_gcse_regno)
3534 set = find_bypass_set (regno, e->src->index);
3539 /* Check the data flow is valid after edge insertions. */
3540 if (e->insns.r && reg_killed_on_edge (reg_used->reg_rtx, e))
3543 src = SET_SRC (pc_set (jump));
3546 src = simplify_replace_rtx (src,
3547 SET_DEST (PATTERN (setcc)),
3548 SET_SRC (PATTERN (setcc)));
3550 new = simplify_replace_rtx (src, reg_used->reg_rtx,
3551 SET_SRC (set->expr));
3553 /* Jump bypassing may have already placed instructions on
3554 edges of the CFG. We can't bypass an outgoing edge that
3555 has instructions associated with it, as these insns won't
3556 get executed if the incoming edge is redirected. */
3560 edest = FALLTHRU_EDGE (bb);
3561 dest = edest->insns.r ? NULL : edest->dest;
3563 else if (GET_CODE (new) == LABEL_REF)
3565 dest = BLOCK_FOR_INSN (XEXP (new, 0));
3566 /* Don't bypass edges containing instructions. */
3567 edest = find_edge (bb, dest);
3568 if (edest && edest->insns.r)
3574 /* Avoid unification of the edge with other edges from original
3575 branch. We would end up emitting the instruction on "both"
3578 if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc)))
3579 && find_edge (e->src, dest))
3585 && dest != EXIT_BLOCK_PTR)
3587 redirect_edge_and_branch_force (e, dest);
3589 /* Copy the register setter to the redirected edge.
3590 Don't copy CC0 setters, as CC0 is dead after jump. */
3593 rtx pat = PATTERN (setcc);
3594 if (!CC0_P (SET_DEST (pat)))
3595 insert_insn_on_edge (copy_insn (pat), e);
3598 if (dump_file != NULL)
3600 fprintf (dump_file, "JUMP-BYPASS: Proved reg %d "
3601 "in jump_insn %d equals constant ",
3602 regno, INSN_UID (jump));
3603 print_rtl (dump_file, SET_SRC (set->expr));
3604 fprintf (dump_file, "\nBypass edge from %d->%d to %d\n",
3605 e->src->index, old_dest->index, dest->index);
3618 /* Find basic blocks with more than one predecessor that only contain a
3619 single conditional jump. If the result of the comparison is known at
3620 compile-time from any incoming edge, redirect that edge to the
3621 appropriate target. Returns nonzero if a change was made.
3623 This function is now mis-named, because we also handle indirect jumps. */
3626 bypass_conditional_jumps (void)
3634 /* Note we start at block 1. */
3635 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3638 bypass_last_basic_block = last_basic_block;
3639 mark_dfs_back_edges ();
3642 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
3643 EXIT_BLOCK_PTR, next_bb)
3645 /* Check for more than one predecessor. */
3646 if (!single_pred_p (bb))
3649 FOR_BB_INSNS (bb, insn)
3650 if (NONJUMP_INSN_P (insn))
3654 if (GET_CODE (PATTERN (insn)) != SET)
3657 dest = SET_DEST (PATTERN (insn));
3658 if (REG_P (dest) || CC0_P (dest))
3663 else if (JUMP_P (insn))
3665 if ((any_condjump_p (insn) || computed_jump_p (insn))
3666 && onlyjump_p (insn))
3667 changed |= bypass_block (bb, setcc, insn);
3670 else if (INSN_P (insn))
3675 /* If we bypassed any register setting insns, we inserted a
3676 copy on the redirected edge. These need to be committed. */
3678 commit_edge_insertions ();
3683 /* Compute PRE+LCM working variables. */
3685 /* Local properties of expressions. */
3686 /* Nonzero for expressions that are transparent in the block. */
3687 static sbitmap *transp;
3689 /* Nonzero for expressions that are transparent at the end of the block.
3690 This is only zero for expressions killed by abnormal critical edge
3691 created by a calls. */
3692 static sbitmap *transpout;
3694 /* Nonzero for expressions that are computed (available) in the block. */
3695 static sbitmap *comp;
3697 /* Nonzero for expressions that are locally anticipatable in the block. */
3698 static sbitmap *antloc;
3700 /* Nonzero for expressions where this block is an optimal computation
3702 static sbitmap *pre_optimal;
3704 /* Nonzero for expressions which are redundant in a particular block. */
3705 static sbitmap *pre_redundant;
3707 /* Nonzero for expressions which should be inserted on a specific edge. */
3708 static sbitmap *pre_insert_map;
3710 /* Nonzero for expressions which should be deleted in a specific block. */
3711 static sbitmap *pre_delete_map;
3713 /* Contains the edge_list returned by pre_edge_lcm. */
3714 static struct edge_list *edge_list;
3716 /* Redundant insns. */
3717 static sbitmap pre_redundant_insns;
3719 /* Allocate vars used for PRE analysis. */
3722 alloc_pre_mem (int n_blocks, int n_exprs)
3724 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
3725 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
3726 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
3729 pre_redundant = NULL;
3730 pre_insert_map = NULL;
3731 pre_delete_map = NULL;
3732 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
3734 /* pre_insert and pre_delete are allocated later. */
3737 /* Free vars used for PRE analysis. */
3742 sbitmap_vector_free (transp);
3743 sbitmap_vector_free (comp);
3745 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
3748 sbitmap_vector_free (pre_optimal);
3750 sbitmap_vector_free (pre_redundant);
3752 sbitmap_vector_free (pre_insert_map);
3754 sbitmap_vector_free (pre_delete_map);
3756 transp = comp = NULL;
3757 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
3760 /* Top level routine to do the dataflow analysis needed by PRE. */
3763 compute_pre_data (void)
3765 sbitmap trapping_expr;
3769 compute_local_properties (transp, comp, antloc, &expr_hash_table);
3770 sbitmap_vector_zero (ae_kill, last_basic_block);
3772 /* Collect expressions which might trap. */
3773 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
3774 sbitmap_zero (trapping_expr);
3775 for (ui = 0; ui < expr_hash_table.size; ui++)
3778 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
3779 if (may_trap_p (e->expr))
3780 SET_BIT (trapping_expr, e->bitmap_index);
3783 /* Compute ae_kill for each basic block using:
3793 /* If the current block is the destination of an abnormal edge, we
3794 kill all trapping expressions because we won't be able to properly
3795 place the instruction on the edge. So make them neither
3796 anticipatable nor transparent. This is fairly conservative. */
3797 FOR_EACH_EDGE (e, ei, bb->preds)
3798 if (e->flags & EDGE_ABNORMAL)
3800 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
3801 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
3805 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
3806 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
3809 edge_list = pre_edge_lcm (expr_hash_table.n_elems, transp, comp, antloc,
3810 ae_kill, &pre_insert_map, &pre_delete_map);
3811 sbitmap_vector_free (antloc);
3813 sbitmap_vector_free (ae_kill);
3815 sbitmap_free (trapping_expr);
3820 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
3823 VISITED is a pointer to a working buffer for tracking which BB's have
3824 been visited. It is NULL for the top-level call.
3826 We treat reaching expressions that go through blocks containing the same
3827 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3828 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3829 2 as not reaching. The intent is to improve the probability of finding
3830 only one reaching expression and to reduce register lifetimes by picking
3831 the closest such expression. */
3834 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
3839 FOR_EACH_EDGE (pred, ei, bb->preds)
3841 basic_block pred_bb = pred->src;
3843 if (pred->src == ENTRY_BLOCK_PTR
3844 /* Has predecessor has already been visited? */
3845 || visited[pred_bb->index])
3846 ;/* Nothing to do. */
3848 /* Does this predecessor generate this expression? */
3849 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
3851 /* Is this the occurrence we're looking for?
3852 Note that there's only one generating occurrence per block
3853 so we just need to check the block number. */
3854 if (occr_bb == pred_bb)
3857 visited[pred_bb->index] = 1;
3859 /* Ignore this predecessor if it kills the expression. */
3860 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
3861 visited[pred_bb->index] = 1;
3863 /* Neither gen nor kill. */
3866 visited[pred_bb->index] = 1;
3867 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
3872 /* All paths have been checked. */
3876 /* The wrapper for pre_expr_reaches_here_work that ensures that any
3877 memory allocated for that function is returned. */
3880 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
3883 char *visited = XCNEWVEC (char, last_basic_block);
3885 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
3892 /* Given an expr, generate RTL which we can insert at the end of a BB,
3893 or on an edge. Set the block number of any insns generated to
3897 process_insert_insn (struct expr *expr)
3899 rtx reg = expr->reaching_reg;
3900 rtx exp = copy_rtx (expr->expr);
3905 /* If the expression is something that's an operand, like a constant,
3906 just copy it to a register. */
3907 if (general_operand (exp, GET_MODE (reg)))
3908 emit_move_insn (reg, exp);
3910 /* Otherwise, make a new insn to compute this expression and make sure the
3911 insn will be recognized (this also adds any needed CLOBBERs). Copy the
3912 expression to make sure we don't have any sharing issues. */
3915 rtx insn = emit_insn (gen_rtx_SET (VOIDmode, reg, exp));
3917 if (insn_invalid_p (insn))
3928 /* Add EXPR to the end of basic block BB.
3930 This is used by both the PRE and code hoisting.
3932 For PRE, we want to verify that the expr is either transparent
3933 or locally anticipatable in the target block. This check makes
3934 no sense for code hoisting. */
3937 insert_insn_end_basic_block (struct expr *expr, basic_block bb, int pre)
3939 rtx insn = BB_END (bb);
3941 rtx reg = expr->reaching_reg;
3942 int regno = REGNO (reg);
3945 pat = process_insert_insn (expr);
3946 gcc_assert (pat && INSN_P (pat));
3949 while (NEXT_INSN (pat_end) != NULL_RTX)
3950 pat_end = NEXT_INSN (pat_end);
3952 /* If the last insn is a jump, insert EXPR in front [taking care to
3953 handle cc0, etc. properly]. Similarly we need to care trapping
3954 instructions in presence of non-call exceptions. */
3957 || (NONJUMP_INSN_P (insn)
3958 && (!single_succ_p (bb)
3959 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
3964 /* It should always be the case that we can put these instructions
3965 anywhere in the basic block with performing PRE optimizations.
3967 gcc_assert (!NONJUMP_INSN_P (insn) || !pre
3968 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
3969 || TEST_BIT (transp[bb->index], expr->bitmap_index));
3971 /* If this is a jump table, then we can't insert stuff here. Since
3972 we know the previous real insn must be the tablejump, we insert
3973 the new instruction just before the tablejump. */
3974 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
3975 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
3976 insn = prev_real_insn (insn);
3979 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
3980 if cc0 isn't set. */
3981 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3983 insn = XEXP (note, 0);
3986 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
3987 if (maybe_cc0_setter
3988 && INSN_P (maybe_cc0_setter)
3989 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
3990 insn = maybe_cc0_setter;
3993 /* FIXME: What if something in cc0/jump uses value set in new insn? */
3994 new_insn = emit_insn_before_noloc (pat, insn, bb);
3997 /* Likewise if the last insn is a call, as will happen in the presence
3998 of exception handling. */
3999 else if (CALL_P (insn)
4000 && (!single_succ_p (bb)
4001 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
4003 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4004 we search backward and place the instructions before the first
4005 parameter is loaded. Do this for everyone for consistency and a
4006 presumption that we'll get better code elsewhere as well.
4008 It should always be the case that we can put these instructions
4009 anywhere in the basic block with performing PRE optimizations.
4013 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4014 || TEST_BIT (transp[bb->index], expr->bitmap_index));
4016 /* Since different machines initialize their parameter registers
4017 in different orders, assume nothing. Collect the set of all
4018 parameter registers. */
4019 insn = find_first_parameter_load (insn, BB_HEAD (bb));
4021 /* If we found all the parameter loads, then we want to insert
4022 before the first parameter load.
4024 If we did not find all the parameter loads, then we might have
4025 stopped on the head of the block, which could be a CODE_LABEL.
4026 If we inserted before the CODE_LABEL, then we would be putting
4027 the insn in the wrong basic block. In that case, put the insn
4028 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
4029 while (LABEL_P (insn)
4030 || NOTE_INSN_BASIC_BLOCK_P (insn))
4031 insn = NEXT_INSN (insn);
4033 new_insn = emit_insn_before_noloc (pat, insn, bb);
4036 new_insn = emit_insn_after_noloc (pat, insn, bb);
4042 add_label_notes (PATTERN (pat), new_insn);
4043 note_stores (PATTERN (pat), record_set_info, pat);
4047 pat = NEXT_INSN (pat);
4050 gcse_create_count++;
4054 fprintf (dump_file, "PRE/HOIST: end of bb %d, insn %d, ",
4055 bb->index, INSN_UID (new_insn));
4056 fprintf (dump_file, "copying expression %d to reg %d\n",
4057 expr->bitmap_index, regno);
4061 /* Insert partially redundant expressions on edges in the CFG to make
4062 the expressions fully redundant. */
4065 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
4067 int e, i, j, num_edges, set_size, did_insert = 0;
4070 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4071 if it reaches any of the deleted expressions. */
4073 set_size = pre_insert_map[0]->size;
4074 num_edges = NUM_EDGES (edge_list);
4075 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
4076 sbitmap_vector_zero (inserted, num_edges);
4078 for (e = 0; e < num_edges; e++)
4081 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
4083 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
4085 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
4087 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
4088 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4090 struct expr *expr = index_map[j];
4093 /* Now look at each deleted occurrence of this expression. */
4094 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4096 if (! occr->deleted_p)
4099 /* Insert this expression on this edge if it would
4100 reach the deleted occurrence in BB. */
4101 if (!TEST_BIT (inserted[e], j))
4104 edge eg = INDEX_EDGE (edge_list, e);
4106 /* We can't insert anything on an abnormal and
4107 critical edge, so we insert the insn at the end of
4108 the previous block. There are several alternatives
4109 detailed in Morgans book P277 (sec 10.5) for
4110 handling this situation. This one is easiest for
4113 if (eg->flags & EDGE_ABNORMAL)
4114 insert_insn_end_basic_block (index_map[j], bb, 0);
4117 insn = process_insert_insn (index_map[j]);
4118 insert_insn_on_edge (insn, eg);
4123 fprintf (dump_file, "PRE/HOIST: edge (%d,%d), ",
4125 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
4126 fprintf (dump_file, "copy expression %d\n",
4127 expr->bitmap_index);
4130 update_ld_motion_stores (expr);
4131 SET_BIT (inserted[e], j);
4133 gcse_create_count++;
4140 sbitmap_vector_free (inserted);
4144 /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
4145 Given "old_reg <- expr" (INSN), instead of adding after it
4146 reaching_reg <- old_reg
4147 it's better to do the following:
4148 reaching_reg <- expr
4149 old_reg <- reaching_reg
4150 because this way copy propagation can discover additional PRE
4151 opportunities. But if this fails, we try the old way.
4152 When "expr" is a store, i.e.
4153 given "MEM <- old_reg", instead of adding after it
4154 reaching_reg <- old_reg
4155 it's better to add it before as follows:
4156 reaching_reg <- old_reg
4157 MEM <- reaching_reg. */
4160 pre_insert_copy_insn (struct expr *expr, rtx insn)
4162 rtx reg = expr->reaching_reg;
4163 int regno = REGNO (reg);
4164 int indx = expr->bitmap_index;
4165 rtx pat = PATTERN (insn);
4166 rtx set, first_set, new_insn;
4170 /* This block matches the logic in hash_scan_insn. */
4171 switch (GET_CODE (pat))
4178 /* Search through the parallel looking for the set whose
4179 source was the expression that we're interested in. */
4180 first_set = NULL_RTX;
4182 for (i = 0; i < XVECLEN (pat, 0); i++)
4184 rtx x = XVECEXP (pat, 0, i);
4185 if (GET_CODE (x) == SET)
4187 /* If the source was a REG_EQUAL or REG_EQUIV note, we
4188 may not find an equivalent expression, but in this
4189 case the PARALLEL will have a single set. */
4190 if (first_set == NULL_RTX)
4192 if (expr_equiv_p (SET_SRC (x), expr->expr))
4200 gcc_assert (first_set);
4201 if (set == NULL_RTX)
4209 if (REG_P (SET_DEST (set)))
4211 old_reg = SET_DEST (set);
4212 /* Check if we can modify the set destination in the original insn. */
4213 if (validate_change (insn, &SET_DEST (set), reg, 0))
4215 new_insn = gen_move_insn (old_reg, reg);
4216 new_insn = emit_insn_after (new_insn, insn);
4218 /* Keep register set table up to date. */
4219 record_one_set (regno, insn);
4223 new_insn = gen_move_insn (reg, old_reg);
4224 new_insn = emit_insn_after (new_insn, insn);
4226 /* Keep register set table up to date. */
4227 record_one_set (regno, new_insn);
4230 else /* This is possible only in case of a store to memory. */
4232 old_reg = SET_SRC (set);
4233 new_insn = gen_move_insn (reg, old_reg);
4235 /* Check if we can modify the set source in the original insn. */
4236 if (validate_change (insn, &SET_SRC (set), reg, 0))
4237 new_insn = emit_insn_before (new_insn, insn);
4239 new_insn = emit_insn_after (new_insn, insn);
4241 /* Keep register set table up to date. */
4242 record_one_set (regno, new_insn);
4245 gcse_create_count++;
4249 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4250 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4251 INSN_UID (insn), regno);
4254 /* Copy available expressions that reach the redundant expression
4255 to `reaching_reg'. */
4258 pre_insert_copies (void)
4260 unsigned int i, added_copy;
4265 /* For each available expression in the table, copy the result to
4266 `reaching_reg' if the expression reaches a deleted one.
4268 ??? The current algorithm is rather brute force.
4269 Need to do some profiling. */
4271 for (i = 0; i < expr_hash_table.size; i++)
4272 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4274 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4275 we don't want to insert a copy here because the expression may not
4276 really be redundant. So only insert an insn if the expression was
4277 deleted. This test also avoids further processing if the
4278 expression wasn't deleted anywhere. */
4279 if (expr->reaching_reg == NULL)
4282 /* Set when we add a copy for that expression. */
4285 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4287 if (! occr->deleted_p)
4290 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4292 rtx insn = avail->insn;
4294 /* No need to handle this one if handled already. */
4295 if (avail->copied_p)
4298 /* Don't handle this one if it's a redundant one. */
4299 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4302 /* Or if the expression doesn't reach the deleted one. */
4303 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
4305 BLOCK_FOR_INSN (occr->insn)))
4310 /* Copy the result of avail to reaching_reg. */
4311 pre_insert_copy_insn (expr, insn);
4312 avail->copied_p = 1;
4317 update_ld_motion_stores (expr);
4321 /* Emit move from SRC to DEST noting the equivalence with expression computed
4324 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
4327 rtx set = single_set (insn), set2;
4331 /* This should never fail since we're creating a reg->reg copy
4332 we've verified to be valid. */
4334 new = emit_insn_after (gen_move_insn (dest, src), insn);
4336 /* Note the equivalence for local CSE pass. */
4337 set2 = single_set (new);
4338 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
4340 if ((note = find_reg_equal_equiv_note (insn)))
4341 eqv = XEXP (note, 0);
4343 eqv = SET_SRC (set);
4345 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
4350 /* Delete redundant computations.
4351 Deletion is done by changing the insn to copy the `reaching_reg' of
4352 the expression into the result of the SET. It is left to later passes
4353 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4355 Returns nonzero if a change is made. */
4366 for (i = 0; i < expr_hash_table.size; i++)
4367 for (expr = expr_hash_table.table[i];
4369 expr = expr->next_same_hash)
4371 int indx = expr->bitmap_index;
4373 /* We only need to search antic_occr since we require
4376 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4378 rtx insn = occr->insn;
4380 basic_block bb = BLOCK_FOR_INSN (insn);
4382 /* We only delete insns that have a single_set. */
4383 if (TEST_BIT (pre_delete_map[bb->index], indx)
4384 && (set = single_set (insn)) != 0
4385 && dbg_cnt (pre_insn))
4387 /* Create a pseudo-reg to store the result of reaching
4388 expressions into. Get the mode for the new pseudo from
4389 the mode of the original destination pseudo. */
4390 if (expr->reaching_reg == NULL)
4391 expr->reaching_reg = gen_reg_rtx_and_attrs (SET_DEST (set));
4393 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4395 occr->deleted_p = 1;
4396 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4403 "PRE: redundant insn %d (expression %d) in ",
4404 INSN_UID (insn), indx);
4405 fprintf (dump_file, "bb %d, reaching reg is %d\n",
4406 bb->index, REGNO (expr->reaching_reg));
4415 /* Perform GCSE optimizations using PRE.
4416 This is called by one_pre_gcse_pass after all the dataflow analysis
4419 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4420 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4421 Compiler Design and Implementation.
4423 ??? A new pseudo reg is created to hold the reaching expression. The nice
4424 thing about the classical approach is that it would try to use an existing
4425 reg. If the register can't be adequately optimized [i.e. we introduce
4426 reload problems], one could add a pass here to propagate the new register
4429 ??? We don't handle single sets in PARALLELs because we're [currently] not
4430 able to copy the rest of the parallel when we insert copies to create full
4431 redundancies from partial redundancies. However, there's no reason why we
4432 can't handle PARALLELs in the cases where there are no partial
4439 int did_insert, changed;
4440 struct expr **index_map;
4443 /* Compute a mapping from expression number (`bitmap_index') to
4444 hash table entry. */
4446 index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
4447 for (i = 0; i < expr_hash_table.size; i++)
4448 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4449 index_map[expr->bitmap_index] = expr;
4451 /* Reset bitmap used to track which insns are redundant. */
4452 pre_redundant_insns = sbitmap_alloc (max_cuid);
4453 sbitmap_zero (pre_redundant_insns);
4455 /* Delete the redundant insns first so that
4456 - we know what register to use for the new insns and for the other
4457 ones with reaching expressions
4458 - we know which insns are redundant when we go to create copies */
4460 changed = pre_delete ();
4461 did_insert = pre_edge_insert (edge_list, index_map);
4463 /* In other places with reaching expressions, copy the expression to the
4464 specially allocated pseudo-reg that reaches the redundant expr. */
4465 pre_insert_copies ();
4468 commit_edge_insertions ();
4473 sbitmap_free (pre_redundant_insns);
4477 /* Top level routine to perform one PRE GCSE pass.
4479 Return nonzero if a change was made. */
4482 one_pre_gcse_pass (int pass)
4486 gcse_subst_count = 0;
4487 gcse_create_count = 0;
4489 alloc_hash_table (max_cuid, &expr_hash_table, 0);
4490 add_noreturn_fake_exit_edges ();
4492 compute_ld_motion_mems ();
4494 compute_hash_table (&expr_hash_table);
4495 trim_ld_motion_mems ();
4497 dump_hash_table (dump_file, "Expression", &expr_hash_table);
4499 if (expr_hash_table.n_elems > 0)
4501 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
4502 compute_pre_data ();
4503 changed |= pre_gcse ();
4504 free_edge_list (edge_list);
4509 remove_fake_exit_edges ();
4510 free_hash_table (&expr_hash_table);
4514 fprintf (dump_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
4515 current_function_name (), pass, bytes_used);
4516 fprintf (dump_file, "%d substs, %d insns created\n",
4517 gcse_subst_count, gcse_create_count);
4523 /* If X contains any LABEL_REF's, add REG_LABEL_OPERAND notes for them
4524 to INSN. If such notes are added to an insn which references a
4525 CODE_LABEL, the LABEL_NUSES count is incremented. We have to add
4526 that note, because the following loop optimization pass requires
4529 /* ??? If there was a jump optimization pass after gcse and before loop,
4530 then we would not need to do this here, because jump would add the
4531 necessary REG_LABEL_OPERAND and REG_LABEL_TARGET notes. */
4534 add_label_notes (rtx x, rtx insn)
4536 enum rtx_code code = GET_CODE (x);
4540 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4542 /* This code used to ignore labels that referred to dispatch tables to
4543 avoid flow generating (slightly) worse code.
4545 We no longer ignore such label references (see LABEL_REF handling in
4546 mark_jump_label for additional information). */
4548 /* There's no reason for current users to emit jump-insns with
4549 such a LABEL_REF, so we don't have to handle REG_LABEL_TARGET
4551 gcc_assert (!JUMP_P (insn));
4553 = gen_rtx_INSN_LIST (REG_LABEL_OPERAND, XEXP (x, 0),
4555 if (LABEL_P (XEXP (x, 0)))
4556 LABEL_NUSES (XEXP (x, 0))++;
4561 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
4564 add_label_notes (XEXP (x, i), insn);
4565 else if (fmt[i] == 'E')
4566 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4567 add_label_notes (XVECEXP (x, i, j), insn);
4571 /* Compute transparent outgoing information for each block.
4573 An expression is transparent to an edge unless it is killed by
4574 the edge itself. This can only happen with abnormal control flow,
4575 when the edge is traversed through a call. This happens with
4576 non-local labels and exceptions.
4578 This would not be necessary if we split the edge. While this is
4579 normally impossible for abnormal critical edges, with some effort
4580 it should be possible with exception handling, since we still have
4581 control over which handler should be invoked. But due to increased
4582 EH table sizes, this may not be worthwhile. */
4585 compute_transpout (void)
4591 sbitmap_vector_ones (transpout, last_basic_block);
4595 /* Note that flow inserted a nop at the end of basic blocks that
4596 end in call instructions for reasons other than abnormal
4598 if (! CALL_P (BB_END (bb)))
4601 for (i = 0; i < expr_hash_table.size; i++)
4602 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
4603 if (MEM_P (expr->expr))
4605 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
4606 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
4609 /* ??? Optimally, we would use interprocedural alias
4610 analysis to determine if this mem is actually killed
4612 RESET_BIT (transpout[bb->index], expr->bitmap_index);
4617 /* Code Hoisting variables and subroutines. */
4619 /* Very busy expressions. */
4620 static sbitmap *hoist_vbein;
4621 static sbitmap *hoist_vbeout;
4623 /* Hoistable expressions. */
4624 static sbitmap *hoist_exprs;
4626 /* ??? We could compute post dominators and run this algorithm in
4627 reverse to perform tail merging, doing so would probably be
4628 more effective than the tail merging code in jump.c.
4630 It's unclear if tail merging could be run in parallel with
4631 code hoisting. It would be nice. */
4633 /* Allocate vars used for code hoisting analysis. */
4636 alloc_code_hoist_mem (int n_blocks, int n_exprs)
4638 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4639 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4640 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4642 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
4643 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
4644 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
4645 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
4648 /* Free vars used for code hoisting analysis. */
4651 free_code_hoist_mem (void)
4653 sbitmap_vector_free (antloc);
4654 sbitmap_vector_free (transp);
4655 sbitmap_vector_free (comp);
4657 sbitmap_vector_free (hoist_vbein);
4658 sbitmap_vector_free (hoist_vbeout);
4659 sbitmap_vector_free (hoist_exprs);
4660 sbitmap_vector_free (transpout);
4662 free_dominance_info (CDI_DOMINATORS);
4665 /* Compute the very busy expressions at entry/exit from each block.
4667 An expression is very busy if all paths from a given point
4668 compute the expression. */
4671 compute_code_hoist_vbeinout (void)
4673 int changed, passes;
4676 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
4677 sbitmap_vector_zero (hoist_vbein, last_basic_block);
4686 /* We scan the blocks in the reverse order to speed up
4688 FOR_EACH_BB_REVERSE (bb)
4690 if (bb->next_bb != EXIT_BLOCK_PTR)
4691 sbitmap_intersection_of_succs (hoist_vbeout[bb->index],
4692 hoist_vbein, bb->index);
4694 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index],
4696 hoist_vbeout[bb->index],
4704 fprintf (dump_file, "hoisting vbeinout computation: %d passes\n", passes);
4707 /* Top level routine to do the dataflow analysis needed by code hoisting. */
4710 compute_code_hoist_data (void)
4712 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4713 compute_transpout ();
4714 compute_code_hoist_vbeinout ();
4715 calculate_dominance_info (CDI_DOMINATORS);
4717 fprintf (dump_file, "\n");
4720 /* Determine if the expression identified by EXPR_INDEX would
4721 reach BB unimpared if it was placed at the end of EXPR_BB.
4723 It's unclear exactly what Muchnick meant by "unimpared". It seems
4724 to me that the expression must either be computed or transparent in
4725 *every* block in the path(s) from EXPR_BB to BB. Any other definition
4726 would allow the expression to be hoisted out of loops, even if
4727 the expression wasn't a loop invariant.
4729 Contrast this to reachability for PRE where an expression is
4730 considered reachable if *any* path reaches instead of *all*
4734 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
4738 int visited_allocated_locally = 0;
4741 if (visited == NULL)
4743 visited_allocated_locally = 1;
4744 visited = XCNEWVEC (char, last_basic_block);
4747 FOR_EACH_EDGE (pred, ei, bb->preds)
4749 basic_block pred_bb = pred->src;
4751 if (pred->src == ENTRY_BLOCK_PTR)
4753 else if (pred_bb == expr_bb)
4755 else if (visited[pred_bb->index])
4758 /* Does this predecessor generate this expression? */
4759 else if (TEST_BIT (comp[pred_bb->index], expr_index))
4761 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
4767 visited[pred_bb->index] = 1;
4768 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
4773 if (visited_allocated_locally)
4776 return (pred == NULL);
4779 /* Actually perform code hoisting. */
4784 basic_block bb, dominated;
4785 VEC (basic_block, heap) *domby;
4787 struct expr **index_map;
4790 sbitmap_vector_zero (hoist_exprs, last_basic_block);
4792 /* Compute a mapping from expression number (`bitmap_index') to
4793 hash table entry. */
4795 index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
4796 for (i = 0; i < expr_hash_table.size; i++)
4797 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4798 index_map[expr->bitmap_index] = expr;
4800 /* Walk over each basic block looking for potentially hoistable
4801 expressions, nothing gets hoisted from the entry block. */
4805 int insn_inserted_p;
4807 domby = get_dominated_by (CDI_DOMINATORS, bb);
4808 /* Examine each expression that is very busy at the exit of this
4809 block. These are the potentially hoistable expressions. */
4810 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
4814 if (TEST_BIT (hoist_vbeout[bb->index], i)
4815 && TEST_BIT (transpout[bb->index], i))
4817 /* We've found a potentially hoistable expression, now
4818 we look at every block BB dominates to see if it
4819 computes the expression. */
4820 for (j = 0; VEC_iterate (basic_block, domby, j, dominated); j++)
4822 /* Ignore self dominance. */
4823 if (bb == dominated)
4825 /* We've found a dominated block, now see if it computes
4826 the busy expression and whether or not moving that
4827 expression to the "beginning" of that block is safe. */
4828 if (!TEST_BIT (antloc[dominated->index], i))
4831 /* Note if the expression would reach the dominated block
4832 unimpared if it was placed at the end of BB.
4834 Keep track of how many times this expression is hoistable
4835 from a dominated block into BB. */
4836 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4840 /* If we found more than one hoistable occurrence of this
4841 expression, then note it in the bitmap of expressions to
4842 hoist. It makes no sense to hoist things which are computed
4843 in only one BB, and doing so tends to pessimize register
4844 allocation. One could increase this value to try harder
4845 to avoid any possible code expansion due to register
4846 allocation issues; however experiments have shown that
4847 the vast majority of hoistable expressions are only movable
4848 from two successors, so raising this threshold is likely
4849 to nullify any benefit we get from code hoisting. */
4852 SET_BIT (hoist_exprs[bb->index], i);
4857 /* If we found nothing to hoist, then quit now. */
4860 VEC_free (basic_block, heap, domby);
4864 /* Loop over all the hoistable expressions. */
4865 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
4867 /* We want to insert the expression into BB only once, so
4868 note when we've inserted it. */
4869 insn_inserted_p = 0;
4871 /* These tests should be the same as the tests above. */
4872 if (TEST_BIT (hoist_exprs[bb->index], i))
4874 /* We've found a potentially hoistable expression, now
4875 we look at every block BB dominates to see if it
4876 computes the expression. */
4877 for (j = 0; VEC_iterate (basic_block, domby, j, dominated); j++)
4879 /* Ignore self dominance. */
4880 if (bb == dominated)
4883 /* We've found a dominated block, now see if it computes
4884 the busy expression and whether or not moving that
4885 expression to the "beginning" of that block is safe. */
4886 if (!TEST_BIT (antloc[dominated->index], i))
4889 /* The expression is computed in the dominated block and
4890 it would be safe to compute it at the start of the
4891 dominated block. Now we have to determine if the
4892 expression would reach the dominated block if it was
4893 placed at the end of BB. */
4894 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4896 struct expr *expr = index_map[i];
4897 struct occr *occr = expr->antic_occr;
4901 /* Find the right occurrence of this expression. */
4902 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
4907 set = single_set (insn);
4910 /* Create a pseudo-reg to store the result of reaching
4911 expressions into. Get the mode for the new pseudo
4912 from the mode of the original destination pseudo. */
4913 if (expr->reaching_reg == NULL)
4915 = gen_reg_rtx_and_attrs (SET_DEST (set));
4917 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4919 occr->deleted_p = 1;
4920 if (!insn_inserted_p)
4922 insert_insn_end_basic_block (index_map[i], bb, 0);
4923 insn_inserted_p = 1;
4929 VEC_free (basic_block, heap, domby);
4935 /* Top level routine to perform one code hoisting (aka unification) pass
4937 Return nonzero if a change was made. */
4940 one_code_hoisting_pass (void)
4944 alloc_hash_table (max_cuid, &expr_hash_table, 0);
4945 compute_hash_table (&expr_hash_table);
4947 dump_hash_table (dump_file, "Code Hosting Expressions", &expr_hash_table);
4949 if (expr_hash_table.n_elems > 0)
4951 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
4952 compute_code_hoist_data ();
4954 free_code_hoist_mem ();
4957 free_hash_table (&expr_hash_table);
4962 /* Here we provide the things required to do store motion towards
4963 the exit. In order for this to be effective, gcse also needed to
4964 be taught how to move a load when it is kill only by a store to itself.
4969 void foo(float scale)
4971 for (i=0; i<10; i++)
4975 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
4976 the load out since its live around the loop, and stored at the bottom
4979 The 'Load Motion' referred to and implemented in this file is
4980 an enhancement to gcse which when using edge based lcm, recognizes
4981 this situation and allows gcse to move the load out of the loop.
4983 Once gcse has hoisted the load, store motion can then push this
4984 load towards the exit, and we end up with no loads or stores of 'i'
4988 pre_ldst_expr_hash (const void *p)
4990 int do_not_record_p = 0;
4991 const struct ls_expr *x = p;
4992 return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
4996 pre_ldst_expr_eq (const void *p1, const void *p2)
4998 const struct ls_expr *ptr1 = p1, *ptr2 = p2;
4999 return expr_equiv_p (ptr1->pattern, ptr2->pattern);
5002 /* This will search the ldst list for a matching expression. If it
5003 doesn't find one, we create one and initialize it. */
5005 static struct ls_expr *
5008 int do_not_record_p = 0;
5009 struct ls_expr * ptr;
5014 hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
5015 NULL, /*have_reg_qty=*/false);
5018 slot = htab_find_slot_with_hash (pre_ldst_table, &e, hash, INSERT);
5020 return (struct ls_expr *)*slot;
5022 ptr = XNEW (struct ls_expr);
5024 ptr->next = pre_ldst_mems;
5027 ptr->pattern_regs = NULL_RTX;
5028 ptr->loads = NULL_RTX;
5029 ptr->stores = NULL_RTX;
5030 ptr->reaching_reg = NULL_RTX;
5033 ptr->hash_index = hash;
5034 pre_ldst_mems = ptr;
5040 /* Free up an individual ldst entry. */
5043 free_ldst_entry (struct ls_expr * ptr)
5045 free_INSN_LIST_list (& ptr->loads);
5046 free_INSN_LIST_list (& ptr->stores);
5051 /* Free up all memory associated with the ldst list. */
5054 free_ldst_mems (void)
5057 htab_delete (pre_ldst_table);
5058 pre_ldst_table = NULL;
5060 while (pre_ldst_mems)
5062 struct ls_expr * tmp = pre_ldst_mems;
5064 pre_ldst_mems = pre_ldst_mems->next;
5066 free_ldst_entry (tmp);
5069 pre_ldst_mems = NULL;
5072 /* Dump debugging info about the ldst list. */
5075 print_ldst_list (FILE * file)
5077 struct ls_expr * ptr;
5079 fprintf (file, "LDST list: \n");
5081 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5083 fprintf (file, " Pattern (%3d): ", ptr->index);
5085 print_rtl (file, ptr->pattern);
5087 fprintf (file, "\n Loads : ");
5090 print_rtl (file, ptr->loads);
5092 fprintf (file, "(nil)");
5094 fprintf (file, "\n Stores : ");
5097 print_rtl (file, ptr->stores);
5099 fprintf (file, "(nil)");
5101 fprintf (file, "\n\n");
5104 fprintf (file, "\n");
5107 /* Returns 1 if X is in the list of ldst only expressions. */
5109 static struct ls_expr *
5110 find_rtx_in_ldst (rtx x)
5114 if (!pre_ldst_table)
5117 slot = htab_find_slot (pre_ldst_table, &e, NO_INSERT);
5118 if (!slot || ((struct ls_expr *)*slot)->invalid)
5123 /* Assign each element of the list of mems a monotonically increasing value. */
5126 enumerate_ldsts (void)
5128 struct ls_expr * ptr;
5131 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5137 /* Return first item in the list. */
5139 static inline struct ls_expr *
5140 first_ls_expr (void)
5142 return pre_ldst_mems;
5145 /* Return the next item in the list after the specified one. */
5147 static inline struct ls_expr *
5148 next_ls_expr (struct ls_expr * ptr)
5153 /* Load Motion for loads which only kill themselves. */
5155 /* Return true if x is a simple MEM operation, with no registers or
5156 side effects. These are the types of loads we consider for the
5157 ld_motion list, otherwise we let the usual aliasing take care of it. */
5160 simple_mem (const_rtx x)
5165 if (MEM_VOLATILE_P (x))
5168 if (GET_MODE (x) == BLKmode)
5171 /* If we are handling exceptions, we must be careful with memory references
5172 that may trap. If we are not, the behavior is undefined, so we may just
5174 if (flag_non_call_exceptions && may_trap_p (x))
5177 if (side_effects_p (x))
5180 /* Do not consider function arguments passed on stack. */
5181 if (reg_mentioned_p (stack_pointer_rtx, x))
5184 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
5190 /* Make sure there isn't a buried reference in this pattern anywhere.
5191 If there is, invalidate the entry for it since we're not capable
5192 of fixing it up just yet.. We have to be sure we know about ALL
5193 loads since the aliasing code will allow all entries in the
5194 ld_motion list to not-alias itself. If we miss a load, we will get
5195 the wrong value since gcse might common it and we won't know to
5199 invalidate_any_buried_refs (rtx x)
5203 struct ls_expr * ptr;
5205 /* Invalidate it in the list. */
5206 if (MEM_P (x) && simple_mem (x))
5208 ptr = ldst_entry (x);
5212 /* Recursively process the insn. */
5213 fmt = GET_RTX_FORMAT (GET_CODE (x));
5215 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5218 invalidate_any_buried_refs (XEXP (x, i));
5219 else if (fmt[i] == 'E')
5220 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5221 invalidate_any_buried_refs (XVECEXP (x, i, j));
5225 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
5226 being defined as MEM loads and stores to symbols, with no side effects
5227 and no registers in the expression. For a MEM destination, we also
5228 check that the insn is still valid if we replace the destination with a
5229 REG, as is done in update_ld_motion_stores. If there are any uses/defs
5230 which don't match this criteria, they are invalidated and trimmed out
5234 compute_ld_motion_mems (void)
5236 struct ls_expr * ptr;
5240 pre_ldst_mems = NULL;
5241 pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
5242 pre_ldst_expr_eq, NULL);
5246 FOR_BB_INSNS (bb, insn)
5250 if (GET_CODE (PATTERN (insn)) == SET)
5252 rtx src = SET_SRC (PATTERN (insn));
5253 rtx dest = SET_DEST (PATTERN (insn));
5255 /* Check for a simple LOAD... */
5256 if (MEM_P (src) && simple_mem (src))
5258 ptr = ldst_entry (src);
5260 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
5266 /* Make sure there isn't a buried load somewhere. */
5267 invalidate_any_buried_refs (src);
5270 /* Check for stores. Don't worry about aliased ones, they
5271 will block any movement we might do later. We only care
5272 about this exact pattern since those are the only
5273 circumstance that we will ignore the aliasing info. */
5274 if (MEM_P (dest) && simple_mem (dest))
5276 ptr = ldst_entry (dest);
5279 && GET_CODE (src) != ASM_OPERANDS
5280 /* Check for REG manually since want_to_gcse_p
5281 returns 0 for all REGs. */
5282 && can_assign_to_reg_p (src))
5283 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
5289 invalidate_any_buried_refs (PATTERN (insn));
5295 /* Remove any references that have been either invalidated or are not in the
5296 expression list for pre gcse. */
5299 trim_ld_motion_mems (void)
5301 struct ls_expr * * last = & pre_ldst_mems;
5302 struct ls_expr * ptr = pre_ldst_mems;
5308 /* Delete if entry has been made invalid. */
5311 /* Delete if we cannot find this mem in the expression list. */
5312 unsigned int hash = ptr->hash_index % expr_hash_table.size;
5314 for (expr = expr_hash_table.table[hash];
5316 expr = expr->next_same_hash)
5317 if (expr_equiv_p (expr->expr, ptr->pattern))
5321 expr = (struct expr *) 0;
5325 /* Set the expression field if we are keeping it. */
5333 htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
5334 free_ldst_entry (ptr);
5339 /* Show the world what we've found. */
5340 if (dump_file && pre_ldst_mems != NULL)
5341 print_ldst_list (dump_file);
5344 /* This routine will take an expression which we are replacing with
5345 a reaching register, and update any stores that are needed if
5346 that expression is in the ld_motion list. Stores are updated by
5347 copying their SRC to the reaching register, and then storing
5348 the reaching register into the store location. These keeps the
5349 correct value in the reaching register for the loads. */
5352 update_ld_motion_stores (struct expr * expr)
5354 struct ls_expr * mem_ptr;
5356 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
5358 /* We can try to find just the REACHED stores, but is shouldn't
5359 matter to set the reaching reg everywhere... some might be
5360 dead and should be eliminated later. */
5362 /* We replace (set mem expr) with (set reg expr) (set mem reg)
5363 where reg is the reaching reg used in the load. We checked in
5364 compute_ld_motion_mems that we can replace (set mem expr) with
5365 (set reg expr) in that insn. */
5366 rtx list = mem_ptr->stores;
5368 for ( ; list != NULL_RTX; list = XEXP (list, 1))
5370 rtx insn = XEXP (list, 0);
5371 rtx pat = PATTERN (insn);
5372 rtx src = SET_SRC (pat);
5373 rtx reg = expr->reaching_reg;
5376 /* If we've already copied it, continue. */
5377 if (expr->reaching_reg == src)
5382 fprintf (dump_file, "PRE: store updated with reaching reg ");
5383 print_rtl (dump_file, expr->reaching_reg);
5384 fprintf (dump_file, ":\n ");
5385 print_inline_rtx (dump_file, insn, 8);
5386 fprintf (dump_file, "\n");
5389 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
5390 new = emit_insn_before (copy, insn);
5391 record_one_set (REGNO (reg), new);
5392 SET_SRC (pat) = reg;
5393 df_insn_rescan (insn);
5395 /* un-recognize this pattern since it's probably different now. */
5396 INSN_CODE (insn) = -1;
5397 gcse_create_count++;
5402 /* Store motion code. */
5404 #define ANTIC_STORE_LIST(x) ((x)->loads)
5405 #define AVAIL_STORE_LIST(x) ((x)->stores)
5406 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
5408 /* This is used to communicate the target bitvector we want to use in the
5409 reg_set_info routine when called via the note_stores mechanism. */
5410 static int * regvec;
5412 /* And current insn, for the same routine. */
5413 static rtx compute_store_table_current_insn;
5415 /* Used in computing the reverse edge graph bit vectors. */
5416 static sbitmap * st_antloc;
5418 /* Global holding the number of store expressions we are dealing with. */
5419 static int num_stores;
5421 /* Checks to set if we need to mark a register set. Called from
5425 reg_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
5428 sbitmap bb_reg = data;
5430 if (GET_CODE (dest) == SUBREG)
5431 dest = SUBREG_REG (dest);
5435 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
5437 SET_BIT (bb_reg, REGNO (dest));
5441 /* Clear any mark that says that this insn sets dest. Called from
5445 reg_clear_last_set (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
5448 int *dead_vec = data;
5450 if (GET_CODE (dest) == SUBREG)
5451 dest = SUBREG_REG (dest);
5454 dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
5455 dead_vec[REGNO (dest)] = 0;
5458 /* Return zero if some of the registers in list X are killed
5459 due to set of registers in bitmap REGS_SET. */
5462 store_ops_ok (const_rtx x, int *regs_set)
5466 for (; x; x = XEXP (x, 1))
5469 if (regs_set[REGNO(reg)])
5476 /* Returns a list of registers mentioned in X. */
5478 extract_mentioned_regs (rtx x)
5480 return extract_mentioned_regs_helper (x, NULL_RTX);
5483 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
5486 extract_mentioned_regs_helper (rtx x, rtx accum)
5492 /* Repeat is used to turn tail-recursion into iteration. */
5498 code = GET_CODE (x);
5502 return alloc_EXPR_LIST (0, x, accum);
5514 /* We do not run this function with arguments having side effects. */
5534 i = GET_RTX_LENGTH (code) - 1;
5535 fmt = GET_RTX_FORMAT (code);
5541 rtx tem = XEXP (x, i);
5543 /* If we are about to do the last recursive call
5544 needed at this level, change it into iteration. */
5551 accum = extract_mentioned_regs_helper (tem, accum);
5553 else if (fmt[i] == 'E')
5557 for (j = 0; j < XVECLEN (x, i); j++)
5558 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
5565 /* Determine whether INSN is MEM store pattern that we will consider moving.
5566 REGS_SET_BEFORE is bitmap of registers set before (and including) the
5567 current insn, REGS_SET_AFTER is bitmap of registers set after (and
5568 including) the insn in this basic block. We must be passing through BB from
5569 head to end, as we are using this fact to speed things up.
5571 The results are stored this way:
5573 -- the first anticipatable expression is added into ANTIC_STORE_LIST
5574 -- if the processed expression is not anticipatable, NULL_RTX is added
5575 there instead, so that we can use it as indicator that no further
5576 expression of this type may be anticipatable
5577 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
5578 consequently, all of them but this head are dead and may be deleted.
5579 -- if the expression is not available, the insn due to that it fails to be
5580 available is stored in reaching_reg.
5582 The things are complicated a bit by fact that there already may be stores
5583 to the same MEM from other blocks; also caller must take care of the
5584 necessary cleanup of the temporary markers after end of the basic block.
5588 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
5590 struct ls_expr * ptr;
5592 int check_anticipatable, check_available;
5593 basic_block bb = BLOCK_FOR_INSN (insn);
5595 set = single_set (insn);
5599 dest = SET_DEST (set);
5601 if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
5602 || GET_MODE (dest) == BLKmode)
5605 if (side_effects_p (dest))
5608 /* If we are handling exceptions, we must be careful with memory references
5609 that may trap. If we are not, the behavior is undefined, so we may just
5611 if (flag_non_call_exceptions && may_trap_p (dest))
5614 /* Even if the destination cannot trap, the source may. In this case we'd
5615 need to handle updating the REG_EH_REGION note. */
5616 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
5619 /* Make sure that the SET_SRC of this store insns can be assigned to
5620 a register, or we will fail later on in replace_store_insn, which
5621 assumes that we can do this. But sometimes the target machine has
5622 oddities like MEM read-modify-write instruction. See for example
5624 if (!can_assign_to_reg_p (SET_SRC (set)))
5627 ptr = ldst_entry (dest);
5628 if (!ptr->pattern_regs)
5629 ptr->pattern_regs = extract_mentioned_regs (dest);
5631 /* Do not check for anticipatability if we either found one anticipatable
5632 store already, or tested for one and found out that it was killed. */
5633 check_anticipatable = 0;
5634 if (!ANTIC_STORE_LIST (ptr))
5635 check_anticipatable = 1;
5638 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
5640 && BLOCK_FOR_INSN (tmp) != bb)
5641 check_anticipatable = 1;
5643 if (check_anticipatable)
5645 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
5649 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
5650 ANTIC_STORE_LIST (ptr));
5653 /* It is not necessary to check whether store is available if we did
5654 it successfully before; if we failed before, do not bother to check
5655 until we reach the insn that caused us to fail. */
5656 check_available = 0;
5657 if (!AVAIL_STORE_LIST (ptr))
5658 check_available = 1;
5661 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
5662 if (BLOCK_FOR_INSN (tmp) != bb)
5663 check_available = 1;
5665 if (check_available)
5667 /* Check that we have already reached the insn at that the check
5668 failed last time. */
5669 if (LAST_AVAIL_CHECK_FAILURE (ptr))
5671 for (tmp = BB_END (bb);
5672 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
5673 tmp = PREV_INSN (tmp))
5676 check_available = 0;
5679 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
5681 &LAST_AVAIL_CHECK_FAILURE (ptr));
5683 if (!check_available)
5684 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
5687 /* Find available and anticipatable stores. */
5690 compute_store_table (void)
5696 int *last_set_in, *already_set;
5697 struct ls_expr * ptr, **prev_next_ptr_ptr;
5699 max_gcse_regno = max_reg_num ();
5701 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
5703 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
5705 pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
5706 pre_ldst_expr_eq, NULL);
5707 last_set_in = XCNEWVEC (int, max_gcse_regno);
5708 already_set = XNEWVEC (int, max_gcse_regno);
5710 /* Find all the stores we care about. */
5713 /* First compute the registers set in this block. */
5714 regvec = last_set_in;
5716 FOR_BB_INSNS (bb, insn)
5718 if (! INSN_P (insn))
5723 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5724 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5726 last_set_in[regno] = INSN_UID (insn);
5727 SET_BIT (reg_set_in_block[bb->index], regno);
5731 pat = PATTERN (insn);
5732 compute_store_table_current_insn = insn;
5733 note_stores (pat, reg_set_info, reg_set_in_block[bb->index]);
5736 /* Now find the stores. */
5737 memset (already_set, 0, sizeof (int) * max_gcse_regno);
5738 regvec = already_set;
5739 FOR_BB_INSNS (bb, insn)
5741 if (! INSN_P (insn))
5746 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5747 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5748 already_set[regno] = 1;
5751 pat = PATTERN (insn);
5752 note_stores (pat, reg_set_info, NULL);
5754 /* Now that we've marked regs, look for stores. */
5755 find_moveable_store (insn, already_set, last_set_in);
5757 /* Unmark regs that are no longer set. */
5758 compute_store_table_current_insn = insn;
5759 note_stores (pat, reg_clear_last_set, last_set_in);
5762 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5763 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
5764 && last_set_in[regno] == INSN_UID (insn))
5765 last_set_in[regno] = 0;
5769 #ifdef ENABLE_CHECKING
5770 /* last_set_in should now be all-zero. */
5771 for (regno = 0; regno < max_gcse_regno; regno++)
5772 gcc_assert (!last_set_in[regno]);
5775 /* Clear temporary marks. */
5776 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5778 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
5779 if (ANTIC_STORE_LIST (ptr)
5780 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
5781 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
5785 /* Remove the stores that are not available anywhere, as there will
5786 be no opportunity to optimize them. */
5787 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
5789 ptr = *prev_next_ptr_ptr)
5791 if (!AVAIL_STORE_LIST (ptr))
5793 *prev_next_ptr_ptr = ptr->next;
5794 htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
5795 free_ldst_entry (ptr);
5798 prev_next_ptr_ptr = &ptr->next;
5801 ret = enumerate_ldsts ();
5805 fprintf (dump_file, "ST_avail and ST_antic (shown under loads..)\n");
5806 print_ldst_list (dump_file);
5814 /* Check to see if the load X is aliased with STORE_PATTERN.
5815 AFTER is true if we are checking the case when STORE_PATTERN occurs
5819 load_kills_store (const_rtx x, const_rtx store_pattern, int after)
5822 return anti_dependence (x, store_pattern);
5824 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
5828 /* Go through the entire insn X, looking for any loads which might alias
5829 STORE_PATTERN. Return true if found.
5830 AFTER is true if we are checking the case when STORE_PATTERN occurs
5831 after the insn X. */
5834 find_loads (const_rtx x, const_rtx store_pattern, int after)
5843 if (GET_CODE (x) == SET)
5848 if (load_kills_store (x, store_pattern, after))
5852 /* Recursively process the insn. */
5853 fmt = GET_RTX_FORMAT (GET_CODE (x));
5855 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
5858 ret |= find_loads (XEXP (x, i), store_pattern, after);
5859 else if (fmt[i] == 'E')
5860 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5861 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
5867 store_killed_in_pat (const_rtx x, const_rtx pat, int after)
5869 if (GET_CODE (pat) == SET)
5871 rtx dest = SET_DEST (pat);
5873 if (GET_CODE (dest) == ZERO_EXTRACT)
5874 dest = XEXP (dest, 0);
5876 /* Check for memory stores to aliased objects. */
5878 && !expr_equiv_p (dest, x))
5882 if (output_dependence (dest, x))
5887 if (output_dependence (x, dest))
5893 if (find_loads (pat, x, after))
5899 /* Check if INSN kills the store pattern X (is aliased with it).
5900 AFTER is true if we are checking the case when store X occurs
5901 after the insn. Return true if it does. */
5904 store_killed_in_insn (const_rtx x, const_rtx x_regs, const_rtx insn, int after)
5906 const_rtx reg, base, note, pat;
5913 /* A normal or pure call might read from pattern,
5914 but a const call will not. */
5915 if (!RTL_CONST_CALL_P (insn))
5918 /* But even a const call reads its parameters. Check whether the
5919 base of some of registers used in mem is stack pointer. */
5920 for (reg = x_regs; reg; reg = XEXP (reg, 1))
5922 base = find_base_term (XEXP (reg, 0));
5924 || (GET_CODE (base) == ADDRESS
5925 && GET_MODE (base) == Pmode
5926 && XEXP (base, 0) == stack_pointer_rtx))
5933 pat = PATTERN (insn);
5934 if (GET_CODE (pat) == SET)
5936 if (store_killed_in_pat (x, pat, after))
5939 else if (GET_CODE (pat) == PARALLEL)
5943 for (i = 0; i < XVECLEN (pat, 0); i++)
5944 if (store_killed_in_pat (x, XVECEXP (pat, 0, i), after))
5947 else if (find_loads (PATTERN (insn), x, after))
5950 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
5951 location aliased with X, then this insn kills X. */
5952 note = find_reg_equal_equiv_note (insn);
5955 note = XEXP (note, 0);
5957 /* However, if the note represents a must alias rather than a may
5958 alias relationship, then it does not kill X. */
5959 if (expr_equiv_p (note, x))
5962 /* See if there are any aliased loads in the note. */
5963 return find_loads (note, x, after);
5966 /* Returns true if the expression X is loaded or clobbered on or after INSN
5967 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
5968 or after the insn. X_REGS is list of registers mentioned in X. If the store
5969 is killed, return the last insn in that it occurs in FAIL_INSN. */
5972 store_killed_after (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
5973 int *regs_set_after, rtx *fail_insn)
5975 rtx last = BB_END (bb), act;
5977 if (!store_ops_ok (x_regs, regs_set_after))
5979 /* We do not know where it will happen. */
5981 *fail_insn = NULL_RTX;
5985 /* Scan from the end, so that fail_insn is determined correctly. */
5986 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
5987 if (store_killed_in_insn (x, x_regs, act, false))
5997 /* Returns true if the expression X is loaded or clobbered on or before INSN
5998 within basic block BB. X_REGS is list of registers mentioned in X.
5999 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
6001 store_killed_before (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
6002 int *regs_set_before)
6004 rtx first = BB_HEAD (bb);
6006 if (!store_ops_ok (x_regs, regs_set_before))
6009 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
6010 if (store_killed_in_insn (x, x_regs, insn, true))
6016 /* Fill in available, anticipatable, transparent and kill vectors in
6017 STORE_DATA, based on lists of available and anticipatable stores. */
6019 build_store_vectors (void)
6022 int *regs_set_in_block;
6024 struct ls_expr * ptr;
6027 /* Build the gen_vector. This is any store in the table which is not killed
6028 by aliasing later in its block. */
6029 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
6030 sbitmap_vector_zero (ae_gen, last_basic_block);
6032 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
6033 sbitmap_vector_zero (st_antloc, last_basic_block);
6035 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6037 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6039 insn = XEXP (st, 0);
6040 bb = BLOCK_FOR_INSN (insn);
6042 /* If we've already seen an available expression in this block,
6043 we can delete this one (It occurs earlier in the block). We'll
6044 copy the SRC expression to an unused register in case there
6045 are any side effects. */
6046 if (TEST_BIT (ae_gen[bb->index], ptr->index))
6048 rtx r = gen_reg_rtx_and_attrs (ptr->pattern);
6050 fprintf (dump_file, "Removing redundant store:\n");
6051 replace_store_insn (r, XEXP (st, 0), bb, ptr);
6054 SET_BIT (ae_gen[bb->index], ptr->index);
6057 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6059 insn = XEXP (st, 0);
6060 bb = BLOCK_FOR_INSN (insn);
6061 SET_BIT (st_antloc[bb->index], ptr->index);
6065 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
6066 sbitmap_vector_zero (ae_kill, last_basic_block);
6068 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
6069 sbitmap_vector_zero (transp, last_basic_block);
6070 regs_set_in_block = XNEWVEC (int, max_gcse_regno);
6074 for (regno = 0; regno < max_gcse_regno; regno++)
6075 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
6077 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6079 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
6080 bb, regs_set_in_block, NULL))
6082 /* It should not be necessary to consider the expression
6083 killed if it is both anticipatable and available. */
6084 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
6085 || !TEST_BIT (ae_gen[bb->index], ptr->index))
6086 SET_BIT (ae_kill[bb->index], ptr->index);
6089 SET_BIT (transp[bb->index], ptr->index);
6093 free (regs_set_in_block);
6097 dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
6098 dump_sbitmap_vector (dump_file, "st_kill", "", ae_kill, last_basic_block);
6099 dump_sbitmap_vector (dump_file, "Transpt", "", transp, last_basic_block);
6100 dump_sbitmap_vector (dump_file, "st_avloc", "", ae_gen, last_basic_block);
6104 /* Insert an instruction at the beginning of a basic block, and update
6105 the BB_HEAD if needed. */
6108 insert_insn_start_basic_block (rtx insn, basic_block bb)
6110 /* Insert at start of successor block. */
6111 rtx prev = PREV_INSN (BB_HEAD (bb));
6112 rtx before = BB_HEAD (bb);
6115 if (! LABEL_P (before)
6116 && !NOTE_INSN_BASIC_BLOCK_P (before))
6119 if (prev == BB_END (bb))
6121 before = NEXT_INSN (before);
6124 insn = emit_insn_after_noloc (insn, prev, bb);
6128 fprintf (dump_file, "STORE_MOTION insert store at start of BB %d:\n",
6130 print_inline_rtx (dump_file, insn, 6);
6131 fprintf (dump_file, "\n");
6135 /* This routine will insert a store on an edge. EXPR is the ldst entry for
6136 the memory reference, and E is the edge to insert it on. Returns nonzero
6137 if an edge insertion was performed. */
6140 insert_store (struct ls_expr * expr, edge e)
6147 /* We did all the deleted before this insert, so if we didn't delete a
6148 store, then we haven't set the reaching reg yet either. */
6149 if (expr->reaching_reg == NULL_RTX)
6152 if (e->flags & EDGE_FAKE)
6155 reg = expr->reaching_reg;
6156 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
6158 /* If we are inserting this expression on ALL predecessor edges of a BB,
6159 insert it at the start of the BB, and reset the insert bits on the other
6160 edges so we don't try to insert it on the other edges. */
6162 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
6163 if (!(tmp->flags & EDGE_FAKE))
6165 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6167 gcc_assert (index != EDGE_INDEX_NO_EDGE);
6168 if (! TEST_BIT (pre_insert_map[index], expr->index))
6172 /* If tmp is NULL, we found an insertion on every edge, blank the
6173 insertion vector for these edges, and insert at the start of the BB. */
6174 if (!tmp && bb != EXIT_BLOCK_PTR)
6176 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
6178 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6179 RESET_BIT (pre_insert_map[index], expr->index);
6181 insert_insn_start_basic_block (insn, bb);
6185 /* We can't put stores in the front of blocks pointed to by abnormal
6186 edges since that may put a store where one didn't used to be. */
6187 gcc_assert (!(e->flags & EDGE_ABNORMAL));
6189 insert_insn_on_edge (insn, e);
6193 fprintf (dump_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
6194 e->src->index, e->dest->index);
6195 print_inline_rtx (dump_file, insn, 6);
6196 fprintf (dump_file, "\n");
6202 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
6203 memory location in SMEXPR set in basic block BB.
6205 This could be rather expensive. */
6208 remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
6210 edge_iterator *stack, ei;
6213 sbitmap visited = sbitmap_alloc (last_basic_block);
6214 rtx last, insn, note;
6215 rtx mem = smexpr->pattern;
6217 stack = XNEWVEC (edge_iterator, n_basic_blocks);
6219 ei = ei_start (bb->succs);
6221 sbitmap_zero (visited);
6223 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6231 sbitmap_free (visited);
6234 act = ei_edge (stack[--sp]);
6238 if (bb == EXIT_BLOCK_PTR
6239 || TEST_BIT (visited, bb->index))
6243 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6246 SET_BIT (visited, bb->index);
6248 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
6250 for (last = ANTIC_STORE_LIST (smexpr);
6251 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
6252 last = XEXP (last, 1))
6254 last = XEXP (last, 0);
6257 last = NEXT_INSN (BB_END (bb));
6259 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
6262 note = find_reg_equal_equiv_note (insn);
6263 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6267 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6269 remove_note (insn, note);
6274 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6276 if (EDGE_COUNT (bb->succs) > 0)
6280 ei = ei_start (bb->succs);
6281 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6286 /* This routine will replace a store with a SET to a specified register. */
6289 replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
6291 rtx insn, mem, note, set, ptr;
6293 mem = smexpr->pattern;
6294 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
6296 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
6297 if (XEXP (ptr, 0) == del)
6299 XEXP (ptr, 0) = insn;
6303 /* Move the notes from the deleted insn to its replacement. */
6304 REG_NOTES (insn) = REG_NOTES (del);
6306 /* Emit the insn AFTER all the notes are transferred.
6307 This is cheaper since we avoid df rescanning for the note change. */
6308 insn = emit_insn_after (insn, del);
6313 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
6314 print_inline_rtx (dump_file, del, 6);
6315 fprintf (dump_file, "\nSTORE MOTION replaced with insn:\n ");
6316 print_inline_rtx (dump_file, insn, 6);
6317 fprintf (dump_file, "\n");
6322 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
6323 they are no longer accurate provided that they are reached by this
6324 definition, so drop them. */
6325 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
6328 set = single_set (insn);
6331 if (expr_equiv_p (SET_DEST (set), mem))
6333 note = find_reg_equal_equiv_note (insn);
6334 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6338 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6340 remove_note (insn, note);
6342 remove_reachable_equiv_notes (bb, smexpr);
6346 /* Delete a store, but copy the value that would have been stored into
6347 the reaching_reg for later storing. */
6350 delete_store (struct ls_expr * expr, basic_block bb)
6354 if (expr->reaching_reg == NULL_RTX)
6355 expr->reaching_reg = gen_reg_rtx_and_attrs (expr->pattern);
6357 reg = expr->reaching_reg;
6359 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
6362 if (BLOCK_FOR_INSN (del) == bb)
6364 /* We know there is only one since we deleted redundant
6365 ones during the available computation. */
6366 replace_store_insn (reg, del, bb, expr);
6372 /* Free memory used by store motion. */
6375 free_store_memory (void)
6380 sbitmap_vector_free (ae_gen);
6382 sbitmap_vector_free (ae_kill);
6384 sbitmap_vector_free (transp);
6386 sbitmap_vector_free (st_antloc);
6388 sbitmap_vector_free (pre_insert_map);
6390 sbitmap_vector_free (pre_delete_map);
6391 if (reg_set_in_block)
6392 sbitmap_vector_free (reg_set_in_block);
6394 ae_gen = ae_kill = transp = st_antloc = NULL;
6395 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
6398 /* Perform store motion. Much like gcse, except we move expressions the
6399 other way by looking at the flowgraph in reverse. */
6406 struct ls_expr * ptr;
6407 int update_flow = 0;
6411 fprintf (dump_file, "before store motion\n");
6412 print_rtl (dump_file, get_insns ());
6415 init_alias_analysis ();
6417 /* Find all the available and anticipatable stores. */
6418 num_stores = compute_store_table ();
6419 if (num_stores == 0)
6421 htab_delete (pre_ldst_table);
6422 pre_ldst_table = NULL;
6423 sbitmap_vector_free (reg_set_in_block);
6424 end_alias_analysis ();
6428 /* Now compute kill & transp vectors. */
6429 build_store_vectors ();
6430 add_noreturn_fake_exit_edges ();
6431 connect_infinite_loops_to_exit ();
6433 edge_list = pre_edge_rev_lcm (num_stores, transp, ae_gen,
6434 st_antloc, ae_kill, &pre_insert_map,
6437 /* Now we want to insert the new stores which are going to be needed. */
6438 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6440 /* If any of the edges we have above are abnormal, we can't move this
6442 for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
6443 if (TEST_BIT (pre_insert_map[x], ptr->index)
6444 && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
6449 if (dump_file != NULL)
6451 "Can't replace store %d: abnormal edge from %d to %d\n",
6452 ptr->index, INDEX_EDGE (edge_list, x)->src->index,
6453 INDEX_EDGE (edge_list, x)->dest->index);
6457 /* Now we want to insert the new stores which are going to be needed. */
6460 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
6461 delete_store (ptr, bb);
6463 for (x = 0; x < NUM_EDGES (edge_list); x++)
6464 if (TEST_BIT (pre_insert_map[x], ptr->index))
6465 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
6469 commit_edge_insertions ();
6471 free_store_memory ();
6472 free_edge_list (edge_list);
6473 remove_fake_exit_edges ();
6474 end_alias_analysis ();
6478 /* Entry point for jump bypassing optimization pass. */
6485 /* We do not construct an accurate cfg in functions which call
6486 setjmp, so just punt to be safe. */
6487 if (cfun->calls_setjmp)
6490 /* Identify the basic block information for this function, including
6491 successors and predecessors. */
6492 max_gcse_regno = max_reg_num ();
6495 dump_flow_info (dump_file, dump_flags);
6497 /* Return if there's nothing to do, or it is too expensive. */
6498 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
6499 || is_too_expensive (_ ("jump bypassing disabled")))
6502 gcc_obstack_init (&gcse_obstack);
6505 /* We need alias. */
6506 init_alias_analysis ();
6508 /* Record where pseudo-registers are set. This data is kept accurate
6509 during each pass. ??? We could also record hard-reg information here
6510 [since it's unchanging], however it is currently done during hash table
6513 It may be tempting to compute MEM set information here too, but MEM sets
6514 will be subject to code motion one day and thus we need to compute
6515 information about memory sets when we build the hash tables. */
6517 alloc_reg_set_mem (max_gcse_regno);
6520 max_gcse_regno = max_reg_num ();
6522 changed = one_cprop_pass (MAX_GCSE_PASSES + 2, true, true);
6527 fprintf (dump_file, "BYPASS of %s: %d basic blocks, ",
6528 current_function_name (), n_basic_blocks);
6529 fprintf (dump_file, "%d bytes\n\n", bytes_used);
6532 obstack_free (&gcse_obstack, NULL);
6533 free_reg_set_mem ();
6535 /* We are finished with alias. */
6536 end_alias_analysis ();
6541 /* Return true if the graph is too expensive to optimize. PASS is the
6542 optimization about to be performed. */
6545 is_too_expensive (const char *pass)
6547 /* Trying to perform global optimizations on flow graphs which have
6548 a high connectivity will take a long time and is unlikely to be
6549 particularly useful.
6551 In normal circumstances a cfg should have about twice as many
6552 edges as blocks. But we do not want to punish small functions
6553 which have a couple switch statements. Rather than simply
6554 threshold the number of blocks, uses something with a more
6555 graceful degradation. */
6556 if (n_edges > 20000 + n_basic_blocks * 4)
6558 warning (OPT_Wdisabled_optimization,
6559 "%s: %d basic blocks and %d edges/basic block",
6560 pass, n_basic_blocks, n_edges / n_basic_blocks);
6565 /* If allocating memory for the cprop bitmap would take up too much
6566 storage it's better just to disable the optimization. */
6568 * SBITMAP_SET_SIZE (max_reg_num ())
6569 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
6571 warning (OPT_Wdisabled_optimization,
6572 "%s: %d basic blocks and %d registers",
6573 pass, n_basic_blocks, max_reg_num ());
6582 gate_handle_jump_bypass (void)
6584 return optimize > 0 && flag_gcse
6585 && dbg_cnt (jump_bypass);
6588 /* Perform jump bypassing and control flow optimizations. */
6590 rest_of_handle_jump_bypass (void)
6592 delete_unreachable_blocks ();
6593 if (bypass_jumps ())
6595 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6596 rebuild_jump_labels (get_insns ());
6602 struct rtl_opt_pass pass_jump_bypass =
6606 "bypass", /* name */
6607 gate_handle_jump_bypass, /* gate */
6608 rest_of_handle_jump_bypass, /* execute */
6611 0, /* static_pass_number */
6612 TV_BYPASS, /* tv_id */
6613 0, /* properties_required */
6614 0, /* properties_provided */
6615 0, /* properties_destroyed */
6616 0, /* todo_flags_start */
6618 TODO_ggc_collect | TODO_verify_flow /* todo_flags_finish */
6624 gate_handle_gcse (void)
6626 return optimize > 0 && flag_gcse
6632 rest_of_handle_gcse (void)
6634 int save_csb, save_cfj;
6636 tem = gcse_main (get_insns ());
6637 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6638 rebuild_jump_labels (get_insns ());
6639 save_csb = flag_cse_skip_blocks;
6640 save_cfj = flag_cse_follow_jumps;
6641 flag_cse_skip_blocks = flag_cse_follow_jumps = 0;
6643 /* If -fexpensive-optimizations, re-run CSE to clean up things done
6645 if (flag_expensive_optimizations)
6647 timevar_push (TV_CSE);
6648 tem2 = cse_main (get_insns (), max_reg_num ());
6649 df_finish_pass (false);
6650 purge_all_dead_edges ();
6651 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6652 timevar_pop (TV_CSE);
6653 cse_not_expected = !flag_rerun_cse_after_loop;
6656 /* If gcse or cse altered any jumps, rerun jump optimizations to clean
6658 if (tem || tem2 == 2)
6660 timevar_push (TV_JUMP);
6661 rebuild_jump_labels (get_insns ());
6663 timevar_pop (TV_JUMP);
6668 flag_cse_skip_blocks = save_csb;
6669 flag_cse_follow_jumps = save_cfj;
6673 struct rtl_opt_pass pass_gcse =
6678 gate_handle_gcse, /* gate */
6679 rest_of_handle_gcse, /* execute */
6682 0, /* static_pass_number */
6683 TV_GCSE, /* tv_id */
6684 0, /* properties_required */
6685 0, /* properties_provided */
6686 0, /* properties_destroyed */
6687 0, /* todo_flags_start */
6688 TODO_df_finish | TODO_verify_rtl_sharing |
6690 TODO_verify_flow | TODO_ggc_collect /* todo_flags_finish */
6695 #include "gt-gcse.h"