1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
4 2006, 2007, 2008 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 - reordering of memory allocation and freeing to be more space efficient
24 - do rough calc of how many regs are needed in each block, and a rough
25 calc of how many regs are available in each class and use that to
26 throttle back the code in cases where RTX_COST is minimal.
27 - a store to the same address as a load does not kill the load if the
28 source of the store is also the destination of the load. Handling this
29 allows more load motion, particularly out of loops.
30 - ability to realloc sbitmap vectors would allow one initial computation
31 of reg_set_in_block with only subsequent additions, rather than
32 recomputing it for each pass
36 /* References searched while implementing this.
38 Compilers Principles, Techniques and Tools
42 Global Optimization by Suppression of Partial Redundancies
44 communications of the acm, Vol. 22, Num. 2, Feb. 1979
46 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Stanford Ph.D. thesis, Dec. 1983
50 A Fast Algorithm for Code Movement Optimization
52 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
54 A Solution to a Problem with Morel and Renvoise's
55 Global Optimization by Suppression of Partial Redundancies
56 K-H Drechsler, M.P. Stadel
57 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
59 Practical Adaptation of the Global Optimization
60 Algorithm of Morel and Renvoise
62 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
64 Efficiently Computing Static Single Assignment Form and the Control
66 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
67 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 J. Knoop, O. Ruthing, B. Steffen
71 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
73 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
74 Time for Reducible Flow Control
76 ACM Letters on Programming Languages and Systems,
77 Vol. 2, Num. 1-4, Mar-Dec 1993
79 An Efficient Representation for Sparse Sets
80 Preston Briggs, Linda Torczon
81 ACM Letters on Programming Languages and Systems,
82 Vol. 2, Num. 1-4, Mar-Dec 1993
84 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
85 K-H Drechsler, M.P. Stadel
86 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
88 Partial Dead Code Elimination
89 J. Knoop, O. Ruthing, B. Steffen
90 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
92 Effective Partial Redundancy Elimination
93 P. Briggs, K.D. Cooper
94 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
96 The Program Structure Tree: Computing Control Regions in Linear Time
97 R. Johnson, D. Pearson, K. Pingali
98 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
100 Optimal Code Motion: Theory and Practice
101 J. Knoop, O. Ruthing, B. Steffen
102 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
104 The power of assignment motion
105 J. Knoop, O. Ruthing, B. Steffen
106 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
108 Global code motion / global value numbering
110 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
112 Value Driven Redundancy Elimination
114 Rice University Ph.D. thesis, Apr. 1996
118 Massively Scalar Compiler Project, Rice University, Sep. 1996
120 High Performance Compilers for Parallel Computing
124 Advanced Compiler Design and Implementation
126 Morgan Kaufmann, 1997
128 Building an Optimizing Compiler
132 People wishing to speed up the code here should read:
133 Elimination Algorithms for Data Flow Analysis
134 B.G. Ryder, M.C. Paull
135 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
137 How to Analyze Large Programs Efficiently and Informatively
138 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
139 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
141 People wishing to do something different can find various possibilities
142 in the above papers and elsewhere.
147 #include "coretypes.h"
155 #include "hard-reg-set.h"
158 #include "insn-config.h"
160 #include "basic-block.h"
162 #include "function.h"
171 #include "tree-pass.h"
176 /* Propagate flow information through back edges and thus enable PRE's
177 moving loop invariant calculations out of loops.
179 Originally this tended to create worse overall code, but several
180 improvements during the development of PRE seem to have made following
181 back edges generally a win.
183 Note much of the loop invariant code motion done here would normally
184 be done by loop.c, which has more heuristics for when to move invariants
185 out of loops. At some point we might need to move some of those
186 heuristics into gcse.c. */
188 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
189 are a superset of those done by GCSE.
191 We perform the following steps:
193 1) Compute basic block information.
195 2) Compute table of places where registers are set.
197 3) Perform copy/constant propagation.
199 4) Perform global cse using lazy code motion if not optimizing
200 for size, or code hoisting if we are.
202 5) Perform another pass of copy/constant propagation.
204 Two passes of copy/constant propagation are done because the first one
205 enables more GCSE and the second one helps to clean up the copies that
206 GCSE creates. This is needed more for PRE than for Classic because Classic
207 GCSE will try to use an existing register containing the common
208 subexpression rather than create a new one. This is harder to do for PRE
209 because of the code motion (which Classic GCSE doesn't do).
211 Expressions we are interested in GCSE-ing are of the form
212 (set (pseudo-reg) (expression)).
213 Function want_to_gcse_p says what these are.
215 PRE handles moving invariant expressions out of loops (by treating them as
216 partially redundant).
218 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
219 assignment) based GVN (global value numbering). L. T. Simpson's paper
220 (Rice University) on value numbering is a useful reference for this.
222 **********************
224 We used to support multiple passes but there are diminishing returns in
225 doing so. The first pass usually makes 90% of the changes that are doable.
226 A second pass can make a few more changes made possible by the first pass.
227 Experiments show any further passes don't make enough changes to justify
230 A study of spec92 using an unlimited number of passes:
231 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
232 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
233 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
235 It was found doing copy propagation between each pass enables further
238 PRE is quite expensive in complicated functions because the DFA can take
239 a while to converge. Hence we only perform one pass. The parameter
240 max-gcse-passes can be modified if one wants to experiment.
242 **********************
244 The steps for PRE are:
246 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
248 2) Perform the data flow analysis for PRE.
250 3) Delete the redundant instructions
252 4) Insert the required copies [if any] that make the partially
253 redundant instructions fully redundant.
255 5) For other reaching expressions, insert an instruction to copy the value
256 to a newly created pseudo that will reach the redundant instruction.
258 The deletion is done first so that when we do insertions we
259 know which pseudo reg to use.
261 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
262 argue it is not. The number of iterations for the algorithm to converge
263 is typically 2-4 so I don't view it as that expensive (relatively speaking).
265 PRE GCSE depends heavily on the second CSE pass to clean up the copies
266 we create. To make an expression reach the place where it's redundant,
267 the result of the expression is copied to a new register, and the redundant
268 expression is deleted by replacing it with this new register. Classic GCSE
269 doesn't have this problem as much as it computes the reaching defs of
270 each register in each block and thus can try to use an existing
273 /* GCSE global vars. */
275 /* Note whether or not we should run jump optimization after gcse. We
276 want to do this for two cases.
278 * If we changed any jumps via cprop.
280 * If we added any labels via edge splitting. */
281 static int run_jump_opt_after_gcse;
283 /* An obstack for our working variables. */
284 static struct obstack gcse_obstack;
286 struct reg_use {rtx reg_rtx; };
288 /* Hash table of expressions. */
292 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
294 /* Index in the available expression bitmaps. */
296 /* Next entry with the same hash. */
297 struct expr *next_same_hash;
298 /* List of anticipatable occurrences in basic blocks in the function.
299 An "anticipatable occurrence" is one that is the first occurrence in the
300 basic block, the operands are not modified in the basic block prior
301 to the occurrence and the output is not used between the start of
302 the block and the occurrence. */
303 struct occr *antic_occr;
304 /* List of available occurrence in basic blocks in the function.
305 An "available occurrence" is one that is the last occurrence in the
306 basic block and the operands are not modified by following statements in
307 the basic block [including this insn]. */
308 struct occr *avail_occr;
309 /* Non-null if the computation is PRE redundant.
310 The value is the newly created pseudo-reg to record a copy of the
311 expression in all the places that reach the redundant copy. */
315 /* Occurrence of an expression.
316 There is one per basic block. If a pattern appears more than once the
317 last appearance is used [or first for anticipatable expressions]. */
321 /* Next occurrence of this expression. */
323 /* The insn that computes the expression. */
325 /* Nonzero if this [anticipatable] occurrence has been deleted. */
327 /* Nonzero if this [available] occurrence has been copied to
329 /* ??? This is mutually exclusive with deleted_p, so they could share
334 /* Expression and copy propagation hash tables.
335 Each hash table is an array of buckets.
336 ??? It is known that if it were an array of entries, structure elements
337 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
338 not clear whether in the final analysis a sufficient amount of memory would
339 be saved as the size of the available expression bitmaps would be larger
340 [one could build a mapping table without holes afterwards though].
341 Someday I'll perform the computation and figure it out. */
346 This is an array of `expr_hash_table_size' elements. */
349 /* Size of the hash table, in elements. */
352 /* Number of hash table elements. */
353 unsigned int n_elems;
355 /* Whether the table is expression of copy propagation one. */
359 /* Expression hash table. */
360 static struct hash_table expr_hash_table;
362 /* Copy propagation hash table. */
363 static struct hash_table set_hash_table;
365 /* Mapping of uids to cuids.
366 Only real insns get cuids. */
367 static int *uid_cuid;
369 /* Highest UID in UID_CUID. */
372 /* Get the cuid of an insn. */
373 #ifdef ENABLE_CHECKING
374 #define INSN_CUID(INSN) \
375 (gcc_assert (INSN_UID (INSN) <= max_uid), uid_cuid[INSN_UID (INSN)])
377 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
380 /* Number of cuids. */
383 /* Maximum register number in function prior to doing gcse + 1.
384 Registers created during this pass have regno >= max_gcse_regno.
385 This is named with "gcse" to not collide with global of same name. */
386 static unsigned int max_gcse_regno;
388 /* Table of registers that are modified.
390 For each register, each element is a list of places where the pseudo-reg
393 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
394 requires knowledge of which blocks kill which regs [and thus could use
395 a bitmap instead of the lists `reg_set_table' uses].
397 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
398 num-regs) [however perhaps it may be useful to keep the data as is]. One
399 advantage of recording things this way is that `reg_set_table' is fairly
400 sparse with respect to pseudo regs but for hard regs could be fairly dense
401 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
402 up functions like compute_transp since in the case of pseudo-regs we only
403 need to iterate over the number of times a pseudo-reg is set, not over the
404 number of basic blocks [clearly there is a bit of a slow down in the cases
405 where a pseudo is set more than once in a block, however it is believed
406 that the net effect is to speed things up]. This isn't done for hard-regs
407 because recording call-clobbered hard-regs in `reg_set_table' at each
408 function call can consume a fair bit of memory, and iterating over
409 hard-regs stored this way in compute_transp will be more expensive. */
411 typedef struct reg_set
413 /* The next setting of this register. */
414 struct reg_set *next;
415 /* The index of the block where it was set. */
419 static reg_set **reg_set_table;
421 /* Size of `reg_set_table'.
422 The table starts out at max_gcse_regno + slop, and is enlarged as
424 static int reg_set_table_size;
426 /* Amount to grow `reg_set_table' by when it's full. */
427 #define REG_SET_TABLE_SLOP 100
429 /* This is a list of expressions which are MEMs and will be used by load
431 Load motion tracks MEMs which aren't killed by
432 anything except itself. (i.e., loads and stores to a single location).
433 We can then allow movement of these MEM refs with a little special
434 allowance. (all stores copy the same value to the reaching reg used
435 for the loads). This means all values used to store into memory must have
436 no side effects so we can re-issue the setter value.
437 Store Motion uses this structure as an expression table to track stores
438 which look interesting, and might be moveable towards the exit block. */
442 struct expr * expr; /* Gcse expression reference for LM. */
443 rtx pattern; /* Pattern of this mem. */
444 rtx pattern_regs; /* List of registers mentioned by the mem. */
445 rtx loads; /* INSN list of loads seen. */
446 rtx stores; /* INSN list of stores seen. */
447 struct ls_expr * next; /* Next in the list. */
448 int invalid; /* Invalid for some reason. */
449 int index; /* If it maps to a bitmap index. */
450 unsigned int hash_index; /* Index when in a hash table. */
451 rtx reaching_reg; /* Register to use when re-writing. */
454 /* Array of implicit set patterns indexed by basic block index. */
455 static rtx *implicit_sets;
457 /* Head of the list of load/store memory refs. */
458 static struct ls_expr * pre_ldst_mems = NULL;
460 /* Hashtable for the load/store memory refs. */
461 static htab_t pre_ldst_table = NULL;
463 /* Bitmap containing one bit for each register in the program.
464 Used when performing GCSE to track which registers have been set since
465 the start of the basic block. */
466 static regset reg_set_bitmap;
468 /* For each block, a bitmap of registers set in the block.
469 This is used by compute_transp.
470 It is computed during hash table computation and not by compute_sets
471 as it includes registers added since the last pass (or between cprop and
472 gcse) and it's currently not easy to realloc sbitmap vectors. */
473 static sbitmap *reg_set_in_block;
475 /* Array, indexed by basic block number for a list of insns which modify
476 memory within that block. */
477 static rtx * modify_mem_list;
478 static bitmap modify_mem_list_set;
480 /* This array parallels modify_mem_list, but is kept canonicalized. */
481 static rtx * canon_modify_mem_list;
483 /* Bitmap indexed by block numbers to record which blocks contain
485 static bitmap blocks_with_calls;
487 /* Various variables for statistics gathering. */
489 /* Memory used in a pass.
490 This isn't intended to be absolutely precise. Its intent is only
491 to keep an eye on memory usage. */
492 static int bytes_used;
494 /* GCSE substitutions made. */
495 static int gcse_subst_count;
496 /* Number of copy instructions created. */
497 static int gcse_create_count;
498 /* Number of local constants propagated. */
499 static int local_const_prop_count;
500 /* Number of local copies propagated. */
501 static int local_copy_prop_count;
502 /* Number of global constants propagated. */
503 static int global_const_prop_count;
504 /* Number of global copies propagated. */
505 static int global_copy_prop_count;
507 /* For available exprs */
508 static sbitmap *ae_kill, *ae_gen;
510 static void compute_can_copy (void);
511 static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
512 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
513 static void *grealloc (void *, size_t);
514 static void *gcse_alloc (unsigned long);
515 static void alloc_gcse_mem (void);
516 static void free_gcse_mem (void);
517 static void alloc_reg_set_mem (int);
518 static void free_reg_set_mem (void);
519 static void record_one_set (int, rtx);
520 static void record_set_info (rtx, const_rtx, void *);
521 static void compute_sets (void);
522 static void hash_scan_insn (rtx, struct hash_table *, int);
523 static void hash_scan_set (rtx, rtx, struct hash_table *);
524 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
525 static void hash_scan_call (rtx, rtx, struct hash_table *);
526 static int want_to_gcse_p (rtx);
527 static bool can_assign_to_reg_p (rtx);
528 static bool gcse_constant_p (const_rtx);
529 static int oprs_unchanged_p (const_rtx, const_rtx, int);
530 static int oprs_anticipatable_p (const_rtx, const_rtx);
531 static int oprs_available_p (const_rtx, const_rtx);
532 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
533 struct hash_table *);
534 static void insert_set_in_table (rtx, rtx, struct hash_table *);
535 static unsigned int hash_expr (const_rtx, enum machine_mode, int *, int);
536 static unsigned int hash_set (int, int);
537 static int expr_equiv_p (const_rtx, const_rtx);
538 static void record_last_reg_set_info (rtx, int);
539 static void record_last_mem_set_info (rtx);
540 static void record_last_set_info (rtx, const_rtx, void *);
541 static void compute_hash_table (struct hash_table *);
542 static void alloc_hash_table (int, struct hash_table *, int);
543 static void free_hash_table (struct hash_table *);
544 static void compute_hash_table_work (struct hash_table *);
545 static void dump_hash_table (FILE *, const char *, struct hash_table *);
546 static struct expr *lookup_set (unsigned int, struct hash_table *);
547 static struct expr *next_set (unsigned int, struct expr *);
548 static void reset_opr_set_tables (void);
549 static int oprs_not_set_p (const_rtx, const_rtx);
550 static void mark_call (rtx);
551 static void mark_set (rtx, rtx);
552 static void mark_clobber (rtx, rtx);
553 static void mark_oprs_set (rtx);
554 static void alloc_cprop_mem (int, int);
555 static void free_cprop_mem (void);
556 static void compute_transp (const_rtx, int, sbitmap *, int);
557 static void compute_transpout (void);
558 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
559 struct hash_table *);
560 static void compute_cprop_data (void);
561 static void find_used_regs (rtx *, void *);
562 static int try_replace_reg (rtx, rtx, rtx);
563 static struct expr *find_avail_set (int, rtx);
564 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
565 static void mems_conflict_for_gcse_p (rtx, const_rtx, void *);
566 static int load_killed_in_block_p (const_basic_block, int, const_rtx, int);
567 static void canon_list_insert (rtx, const_rtx, void *);
568 static int cprop_insn (rtx, int);
569 static int cprop (int);
570 static void find_implicit_sets (void);
571 static int one_cprop_pass (int, bool, bool);
572 static bool constprop_register (rtx, rtx, rtx, bool);
573 static struct expr *find_bypass_set (int, int);
574 static bool reg_killed_on_edge (const_rtx, const_edge);
575 static int bypass_block (basic_block, rtx, rtx);
576 static int bypass_conditional_jumps (void);
577 static void alloc_pre_mem (int, int);
578 static void free_pre_mem (void);
579 static void compute_pre_data (void);
580 static int pre_expr_reaches_here_p (basic_block, struct expr *,
582 static void insert_insn_end_basic_block (struct expr *, basic_block, int);
583 static void pre_insert_copy_insn (struct expr *, rtx);
584 static void pre_insert_copies (void);
585 static int pre_delete (void);
586 static int pre_gcse (void);
587 static int one_pre_gcse_pass (int);
588 static void add_label_notes (rtx, rtx);
589 static void alloc_code_hoist_mem (int, int);
590 static void free_code_hoist_mem (void);
591 static void compute_code_hoist_vbeinout (void);
592 static void compute_code_hoist_data (void);
593 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
594 static void hoist_code (void);
595 static int one_code_hoisting_pass (void);
596 static rtx process_insert_insn (struct expr *);
597 static int pre_edge_insert (struct edge_list *, struct expr **);
598 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
599 basic_block, char *);
600 static struct ls_expr * ldst_entry (rtx);
601 static void free_ldst_entry (struct ls_expr *);
602 static void free_ldst_mems (void);
603 static void print_ldst_list (FILE *);
604 static struct ls_expr * find_rtx_in_ldst (rtx);
605 static int enumerate_ldsts (void);
606 static inline struct ls_expr * first_ls_expr (void);
607 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
608 static int simple_mem (const_rtx);
609 static void invalidate_any_buried_refs (rtx);
610 static void compute_ld_motion_mems (void);
611 static void trim_ld_motion_mems (void);
612 static void update_ld_motion_stores (struct expr *);
613 static void reg_set_info (rtx, const_rtx, void *);
614 static void reg_clear_last_set (rtx, const_rtx, void *);
615 static bool store_ops_ok (const_rtx, int *);
616 static rtx extract_mentioned_regs (rtx);
617 static rtx extract_mentioned_regs_helper (rtx, rtx);
618 static void find_moveable_store (rtx, int *, int *);
619 static int compute_store_table (void);
620 static bool load_kills_store (const_rtx, const_rtx, int);
621 static bool find_loads (const_rtx, const_rtx, int);
622 static bool store_killed_in_insn (const_rtx, const_rtx, const_rtx, int);
623 static bool store_killed_after (const_rtx, const_rtx, const_rtx, const_basic_block, int *, rtx *);
624 static bool store_killed_before (const_rtx, const_rtx, const_rtx, const_basic_block, int *);
625 static void build_store_vectors (void);
626 static void insert_insn_start_basic_block (rtx, basic_block);
627 static int insert_store (struct ls_expr *, edge);
628 static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
629 static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
630 static void delete_store (struct ls_expr *, basic_block);
631 static void free_store_memory (void);
632 static void store_motion (void);
633 static void free_insn_expr_list_list (rtx *);
634 static void clear_modify_mem_tables (void);
635 static void free_modify_mem_tables (void);
636 static rtx gcse_emit_move_after (rtx, rtx, rtx);
637 static void local_cprop_find_used_regs (rtx *, void *);
638 static bool do_local_cprop (rtx, rtx, bool, rtx*);
639 static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
640 static void local_cprop_pass (bool);
641 static bool is_too_expensive (const char *);
644 /* Entry point for global common subexpression elimination.
645 F is the first instruction in the function. Return nonzero if a
649 gcse_main (rtx f ATTRIBUTE_UNUSED)
652 /* Bytes used at start of pass. */
653 int initial_bytes_used;
654 /* Maximum number of bytes used by a pass. */
656 /* Point to release obstack data from for each pass. */
657 char *gcse_obstack_bottom;
659 /* We do not construct an accurate cfg in functions which call
660 setjmp, so just punt to be safe. */
661 if (cfun->calls_setjmp)
664 /* Assume that we do not need to run jump optimizations after gcse. */
665 run_jump_opt_after_gcse = 0;
667 /* Identify the basic block information for this function, including
668 successors and predecessors. */
669 max_gcse_regno = max_reg_num ();
671 df_note_add_problem ();
675 dump_flow_info (dump_file, dump_flags);
677 /* Return if there's nothing to do, or it is too expensive. */
678 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
679 || is_too_expensive (_("GCSE disabled")))
682 gcc_obstack_init (&gcse_obstack);
686 init_alias_analysis ();
687 /* Record where pseudo-registers are set. This data is kept accurate
688 during each pass. ??? We could also record hard-reg information here
689 [since it's unchanging], however it is currently done during hash table
692 It may be tempting to compute MEM set information here too, but MEM sets
693 will be subject to code motion one day and thus we need to compute
694 information about memory sets when we build the hash tables. */
696 alloc_reg_set_mem (max_gcse_regno);
700 initial_bytes_used = bytes_used;
702 gcse_obstack_bottom = gcse_alloc (1);
704 while (changed && pass < MAX_GCSE_PASSES)
708 fprintf (dump_file, "GCSE pass %d\n\n", pass + 1);
710 /* Initialize bytes_used to the space for the pred/succ lists,
711 and the reg_set_table data. */
712 bytes_used = initial_bytes_used;
714 /* Each pass may create new registers, so recalculate each time. */
715 max_gcse_regno = max_reg_num ();
719 /* Don't allow constant propagation to modify jumps
721 if (dbg_cnt (cprop1))
723 timevar_push (TV_CPROP1);
724 changed = one_cprop_pass (pass + 1, false, false);
725 timevar_pop (TV_CPROP1);
732 timevar_push (TV_PRE);
733 changed |= one_pre_gcse_pass (pass + 1);
734 /* We may have just created new basic blocks. Release and
735 recompute various things which are sized on the number of
739 free_modify_mem_tables ();
740 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
741 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
744 alloc_reg_set_mem (max_reg_num ());
746 run_jump_opt_after_gcse = 1;
747 timevar_pop (TV_PRE);
750 if (max_pass_bytes < bytes_used)
751 max_pass_bytes = bytes_used;
753 /* Free up memory, then reallocate for code hoisting. We can
754 not re-use the existing allocated memory because the tables
755 will not have info for the insns or registers created by
756 partial redundancy elimination. */
759 /* It does not make sense to run code hoisting unless we are optimizing
760 for code size -- it rarely makes programs faster, and can make
761 them bigger if we did partial redundancy elimination (when optimizing
762 for space, we don't run the partial redundancy algorithms). */
765 timevar_push (TV_HOIST);
766 max_gcse_regno = max_reg_num ();
768 changed |= one_code_hoisting_pass ();
771 if (max_pass_bytes < bytes_used)
772 max_pass_bytes = bytes_used;
773 timevar_pop (TV_HOIST);
778 fprintf (dump_file, "\n");
782 obstack_free (&gcse_obstack, gcse_obstack_bottom);
786 /* Do one last pass of copy propagation, including cprop into
787 conditional jumps. */
789 if (dbg_cnt (cprop2))
791 max_gcse_regno = max_reg_num ();
794 /* This time, go ahead and allow cprop to alter jumps. */
795 timevar_push (TV_CPROP2);
796 one_cprop_pass (pass + 1, true, true);
797 timevar_pop (TV_CPROP2);
803 fprintf (dump_file, "GCSE of %s: %d basic blocks, ",
804 current_function_name (), n_basic_blocks);
805 fprintf (dump_file, "%d pass%s, %d bytes\n\n",
806 pass, pass > 1 ? "es" : "", max_pass_bytes);
809 obstack_free (&gcse_obstack, NULL);
812 /* We are finished with alias. */
813 end_alias_analysis ();
815 if (!optimize_size && flag_gcse_sm)
817 timevar_push (TV_LSM);
819 timevar_pop (TV_LSM);
822 /* Record where pseudo-registers are set. */
823 return run_jump_opt_after_gcse;
826 /* Misc. utilities. */
828 /* Nonzero for each mode that supports (set (reg) (reg)).
829 This is trivially true for integer and floating point values.
830 It may or may not be true for condition codes. */
831 static char can_copy[(int) NUM_MACHINE_MODES];
833 /* Compute which modes support reg/reg copy operations. */
836 compute_can_copy (void)
839 #ifndef AVOID_CCMODE_COPIES
842 memset (can_copy, 0, NUM_MACHINE_MODES);
845 for (i = 0; i < NUM_MACHINE_MODES; i++)
846 if (GET_MODE_CLASS (i) == MODE_CC)
848 #ifdef AVOID_CCMODE_COPIES
851 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
852 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
853 if (recog (PATTERN (insn), insn, NULL) >= 0)
863 /* Returns whether the mode supports reg/reg copy operations. */
866 can_copy_p (enum machine_mode mode)
868 static bool can_copy_init_p = false;
870 if (! can_copy_init_p)
873 can_copy_init_p = true;
876 return can_copy[mode] != 0;
879 /* Cover function to xmalloc to record bytes allocated. */
882 gmalloc (size_t size)
885 return xmalloc (size);
888 /* Cover function to xcalloc to record bytes allocated. */
891 gcalloc (size_t nelem, size_t elsize)
893 bytes_used += nelem * elsize;
894 return xcalloc (nelem, elsize);
897 /* Cover function to xrealloc.
898 We don't record the additional size since we don't know it.
899 It won't affect memory usage stats much anyway. */
902 grealloc (void *ptr, size_t size)
904 return xrealloc (ptr, size);
907 /* Cover function to obstack_alloc. */
910 gcse_alloc (unsigned long size)
913 return obstack_alloc (&gcse_obstack, size);
916 /* Allocate memory for the cuid mapping array,
917 and reg/memory set tracking tables.
919 This is called at the start of each pass. */
922 alloc_gcse_mem (void)
928 /* Find the largest UID and create a mapping from UIDs to CUIDs.
929 CUIDs are like UIDs except they increase monotonically, have no gaps,
930 and only apply to real insns.
931 (Actually, there are gaps, for insn that are not inside a basic block.
932 but we should never see those anyway, so this is OK.) */
934 max_uid = get_max_uid ();
935 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
938 FOR_BB_INSNS (bb, insn)
941 uid_cuid[INSN_UID (insn)] = i++;
943 uid_cuid[INSN_UID (insn)] = i;
948 /* Allocate vars to track sets of regs. */
949 reg_set_bitmap = BITMAP_ALLOC (NULL);
951 /* Allocate vars to track sets of regs, memory per block. */
952 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
953 /* Allocate array to keep a list of insns which modify memory in each
955 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
956 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
957 modify_mem_list_set = BITMAP_ALLOC (NULL);
958 blocks_with_calls = BITMAP_ALLOC (NULL);
961 /* Free memory allocated by alloc_gcse_mem. */
968 BITMAP_FREE (reg_set_bitmap);
970 sbitmap_vector_free (reg_set_in_block);
971 free_modify_mem_tables ();
972 BITMAP_FREE (modify_mem_list_set);
973 BITMAP_FREE (blocks_with_calls);
976 /* Compute the local properties of each recorded expression.
978 Local properties are those that are defined by the block, irrespective of
981 An expression is transparent in a block if its operands are not modified
984 An expression is computed (locally available) in a block if it is computed
985 at least once and expression would contain the same value if the
986 computation was moved to the end of the block.
988 An expression is locally anticipatable in a block if it is computed at
989 least once and expression would contain the same value if the computation
990 was moved to the beginning of the block.
992 We call this routine for cprop, pre and code hoisting. They all compute
993 basically the same information and thus can easily share this code.
995 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
996 properties. If NULL, then it is not necessary to compute or record that
999 TABLE controls which hash table to look at. If it is set hash table,
1000 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1004 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
1005 struct hash_table *table)
1009 /* Initialize any bitmaps that were passed in. */
1013 sbitmap_vector_zero (transp, last_basic_block);
1015 sbitmap_vector_ones (transp, last_basic_block);
1019 sbitmap_vector_zero (comp, last_basic_block);
1021 sbitmap_vector_zero (antloc, last_basic_block);
1023 for (i = 0; i < table->size; i++)
1027 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1029 int indx = expr->bitmap_index;
1032 /* The expression is transparent in this block if it is not killed.
1033 We start by assuming all are transparent [none are killed], and
1034 then reset the bits for those that are. */
1036 compute_transp (expr->expr, indx, transp, table->set_p);
1038 /* The occurrences recorded in antic_occr are exactly those that
1039 we want to set to nonzero in ANTLOC. */
1041 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1043 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1045 /* While we're scanning the table, this is a good place to
1047 occr->deleted_p = 0;
1050 /* The occurrences recorded in avail_occr are exactly those that
1051 we want to set to nonzero in COMP. */
1053 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1055 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1057 /* While we're scanning the table, this is a good place to
1062 /* While we're scanning the table, this is a good place to
1064 expr->reaching_reg = 0;
1069 /* Register set information.
1071 `reg_set_table' records where each register is set or otherwise
1074 static struct obstack reg_set_obstack;
1077 alloc_reg_set_mem (int n_regs)
1079 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1080 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
1082 gcc_obstack_init (®_set_obstack);
1086 free_reg_set_mem (void)
1088 free (reg_set_table);
1089 obstack_free (®_set_obstack, NULL);
1092 /* Record REGNO in the reg_set table. */
1095 record_one_set (int regno, rtx insn)
1097 /* Allocate a new reg_set element and link it onto the list. */
1098 struct reg_set *new_reg_info;
1100 /* If the table isn't big enough, enlarge it. */
1101 if (regno >= reg_set_table_size)
1103 int new_size = regno + REG_SET_TABLE_SLOP;
1105 reg_set_table = grealloc (reg_set_table,
1106 new_size * sizeof (struct reg_set *));
1107 memset (reg_set_table + reg_set_table_size, 0,
1108 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1109 reg_set_table_size = new_size;
1112 new_reg_info = obstack_alloc (®_set_obstack, sizeof (struct reg_set));
1113 bytes_used += sizeof (struct reg_set);
1114 new_reg_info->bb_index = BLOCK_NUM (insn);
1115 new_reg_info->next = reg_set_table[regno];
1116 reg_set_table[regno] = new_reg_info;
1119 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1120 an insn. The DATA is really the instruction in which the SET is
1124 record_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
1126 rtx record_set_insn = (rtx) data;
1128 if (REG_P (dest) && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1129 record_one_set (REGNO (dest), record_set_insn);
1132 /* Scan the function and record each set of each pseudo-register.
1134 This is called once, at the start of the gcse pass. See the comments for
1135 `reg_set_table' for further documentation. */
1144 FOR_BB_INSNS (bb, insn)
1146 note_stores (PATTERN (insn), record_set_info, insn);
1149 /* Hash table support. */
1151 struct reg_avail_info
1153 basic_block last_bb;
1158 static struct reg_avail_info *reg_avail_info;
1159 static basic_block current_bb;
1162 /* See whether X, the source of a set, is something we want to consider for
1166 want_to_gcse_p (rtx x)
1169 /* On register stack architectures, don't GCSE constants from the
1170 constant pool, as the benefits are often swamped by the overhead
1171 of shuffling the register stack between basic blocks. */
1172 if (IS_STACK_MODE (GET_MODE (x)))
1173 x = avoid_constant_pool_reference (x);
1176 switch (GET_CODE (x))
1188 return can_assign_to_reg_p (x);
1192 /* Used internally by can_assign_to_reg_p. */
1194 static GTY(()) rtx test_insn;
1196 /* Return true if we can assign X to a pseudo register. */
1199 can_assign_to_reg_p (rtx x)
1201 int num_clobbers = 0;
1204 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1205 if (general_operand (x, GET_MODE (x)))
1207 else if (GET_MODE (x) == VOIDmode)
1210 /* Otherwise, check if we can make a valid insn from it. First initialize
1211 our test insn if we haven't already. */
1215 = make_insn_raw (gen_rtx_SET (VOIDmode,
1216 gen_rtx_REG (word_mode,
1217 FIRST_PSEUDO_REGISTER * 2),
1219 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1222 /* Now make an insn like the one we would make when GCSE'ing and see if
1224 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1225 SET_SRC (PATTERN (test_insn)) = x;
1226 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1227 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1230 /* Return nonzero if the operands of expression X are unchanged from the
1231 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1232 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1235 oprs_unchanged_p (const_rtx x, const_rtx insn, int avail_p)
1244 code = GET_CODE (x);
1249 struct reg_avail_info *info = ®_avail_info[REGNO (x)];
1251 if (info->last_bb != current_bb)
1254 return info->last_set < INSN_CUID (insn);
1256 return info->first_set >= INSN_CUID (insn);
1260 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1264 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1291 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1295 /* If we are about to do the last recursive call needed at this
1296 level, change it into iteration. This function is called enough
1299 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1301 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1304 else if (fmt[i] == 'E')
1305 for (j = 0; j < XVECLEN (x, i); j++)
1306 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1313 /* Used for communication between mems_conflict_for_gcse_p and
1314 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1315 conflict between two memory references. */
1316 static int gcse_mems_conflict_p;
1318 /* Used for communication between mems_conflict_for_gcse_p and
1319 load_killed_in_block_p. A memory reference for a load instruction,
1320 mems_conflict_for_gcse_p will see if a memory store conflicts with
1321 this memory load. */
1322 static const_rtx gcse_mem_operand;
1324 /* DEST is the output of an instruction. If it is a memory reference, and
1325 possibly conflicts with the load found in gcse_mem_operand, then set
1326 gcse_mems_conflict_p to a nonzero value. */
1329 mems_conflict_for_gcse_p (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
1330 void *data ATTRIBUTE_UNUSED)
1332 while (GET_CODE (dest) == SUBREG
1333 || GET_CODE (dest) == ZERO_EXTRACT
1334 || GET_CODE (dest) == STRICT_LOW_PART)
1335 dest = XEXP (dest, 0);
1337 /* If DEST is not a MEM, then it will not conflict with the load. Note
1338 that function calls are assumed to clobber memory, but are handled
1343 /* If we are setting a MEM in our list of specially recognized MEMs,
1344 don't mark as killed this time. */
1346 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1348 if (!find_rtx_in_ldst (dest))
1349 gcse_mems_conflict_p = 1;
1353 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1355 gcse_mems_conflict_p = 1;
1358 /* Return nonzero if the expression in X (a memory reference) is killed
1359 in block BB before or after the insn with the CUID in UID_LIMIT.
1360 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1363 To check the entire block, set UID_LIMIT to max_uid + 1 and
1367 load_killed_in_block_p (const_basic_block bb, int uid_limit, const_rtx x, int avail_p)
1369 rtx list_entry = modify_mem_list[bb->index];
1371 /* If this is a readonly then we aren't going to be changing it. */
1372 if (MEM_READONLY_P (x))
1378 /* Ignore entries in the list that do not apply. */
1380 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1382 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1384 list_entry = XEXP (list_entry, 1);
1388 setter = XEXP (list_entry, 0);
1390 /* If SETTER is a call everything is clobbered. Note that calls
1391 to pure functions are never put on the list, so we need not
1392 worry about them. */
1393 if (CALL_P (setter))
1396 /* SETTER must be an INSN of some kind that sets memory. Call
1397 note_stores to examine each hunk of memory that is modified.
1399 The note_stores interface is pretty limited, so we have to
1400 communicate via global variables. Yuk. */
1401 gcse_mem_operand = x;
1402 gcse_mems_conflict_p = 0;
1403 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1404 if (gcse_mems_conflict_p)
1406 list_entry = XEXP (list_entry, 1);
1411 /* Return nonzero if the operands of expression X are unchanged from
1412 the start of INSN's basic block up to but not including INSN. */
1415 oprs_anticipatable_p (const_rtx x, const_rtx insn)
1417 return oprs_unchanged_p (x, insn, 0);
1420 /* Return nonzero if the operands of expression X are unchanged from
1421 INSN to the end of INSN's basic block. */
1424 oprs_available_p (const_rtx x, const_rtx insn)
1426 return oprs_unchanged_p (x, insn, 1);
1429 /* Hash expression X.
1431 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1432 indicating if a volatile operand is found or if the expression contains
1433 something we don't want to insert in the table. HASH_TABLE_SIZE is
1434 the current size of the hash table to be probed. */
1437 hash_expr (const_rtx x, enum machine_mode mode, int *do_not_record_p,
1438 int hash_table_size)
1442 *do_not_record_p = 0;
1444 hash = hash_rtx (x, mode, do_not_record_p,
1445 NULL, /*have_reg_qty=*/false);
1446 return hash % hash_table_size;
1449 /* Hash a set of register REGNO.
1451 Sets are hashed on the register that is set. This simplifies the PRE copy
1454 ??? May need to make things more elaborate. Later, as necessary. */
1457 hash_set (int regno, int hash_table_size)
1462 return hash % hash_table_size;
1465 /* Return nonzero if exp1 is equivalent to exp2. */
1468 expr_equiv_p (const_rtx x, const_rtx y)
1470 return exp_equiv_p (x, y, 0, true);
1473 /* Insert expression X in INSN in the hash TABLE.
1474 If it is already present, record it as the last occurrence in INSN's
1477 MODE is the mode of the value X is being stored into.
1478 It is only used if X is a CONST_INT.
1480 ANTIC_P is nonzero if X is an anticipatable expression.
1481 AVAIL_P is nonzero if X is an available expression. */
1484 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1485 int avail_p, struct hash_table *table)
1487 int found, do_not_record_p;
1489 struct expr *cur_expr, *last_expr = NULL;
1490 struct occr *antic_occr, *avail_occr;
1492 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1494 /* Do not insert expression in table if it contains volatile operands,
1495 or if hash_expr determines the expression is something we don't want
1496 to or can't handle. */
1497 if (do_not_record_p)
1500 cur_expr = table->table[hash];
1503 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1505 /* If the expression isn't found, save a pointer to the end of
1507 last_expr = cur_expr;
1508 cur_expr = cur_expr->next_same_hash;
1513 cur_expr = gcse_alloc (sizeof (struct expr));
1514 bytes_used += sizeof (struct expr);
1515 if (table->table[hash] == NULL)
1516 /* This is the first pattern that hashed to this index. */
1517 table->table[hash] = cur_expr;
1519 /* Add EXPR to end of this hash chain. */
1520 last_expr->next_same_hash = cur_expr;
1522 /* Set the fields of the expr element. */
1524 cur_expr->bitmap_index = table->n_elems++;
1525 cur_expr->next_same_hash = NULL;
1526 cur_expr->antic_occr = NULL;
1527 cur_expr->avail_occr = NULL;
1530 /* Now record the occurrence(s). */
1533 antic_occr = cur_expr->antic_occr;
1535 if (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1539 /* Found another instance of the expression in the same basic block.
1540 Prefer the currently recorded one. We want the first one in the
1541 block and the block is scanned from start to end. */
1542 ; /* nothing to do */
1545 /* First occurrence of this expression in this basic block. */
1546 antic_occr = gcse_alloc (sizeof (struct occr));
1547 bytes_used += sizeof (struct occr);
1548 antic_occr->insn = insn;
1549 antic_occr->next = cur_expr->antic_occr;
1550 antic_occr->deleted_p = 0;
1551 cur_expr->antic_occr = antic_occr;
1557 avail_occr = cur_expr->avail_occr;
1559 if (avail_occr && BLOCK_NUM (avail_occr->insn) == BLOCK_NUM (insn))
1561 /* Found another instance of the expression in the same basic block.
1562 Prefer this occurrence to the currently recorded one. We want
1563 the last one in the block and the block is scanned from start
1565 avail_occr->insn = insn;
1569 /* First occurrence of this expression in this basic block. */
1570 avail_occr = gcse_alloc (sizeof (struct occr));
1571 bytes_used += sizeof (struct occr);
1572 avail_occr->insn = insn;
1573 avail_occr->next = cur_expr->avail_occr;
1574 avail_occr->deleted_p = 0;
1575 cur_expr->avail_occr = avail_occr;
1580 /* Insert pattern X in INSN in the hash table.
1581 X is a SET of a reg to either another reg or a constant.
1582 If it is already present, record it as the last occurrence in INSN's
1586 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
1590 struct expr *cur_expr, *last_expr = NULL;
1591 struct occr *cur_occr;
1593 gcc_assert (GET_CODE (x) == SET && REG_P (SET_DEST (x)));
1595 hash = hash_set (REGNO (SET_DEST (x)), table->size);
1597 cur_expr = table->table[hash];
1600 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1602 /* If the expression isn't found, save a pointer to the end of
1604 last_expr = cur_expr;
1605 cur_expr = cur_expr->next_same_hash;
1610 cur_expr = gcse_alloc (sizeof (struct expr));
1611 bytes_used += sizeof (struct expr);
1612 if (table->table[hash] == NULL)
1613 /* This is the first pattern that hashed to this index. */
1614 table->table[hash] = cur_expr;
1616 /* Add EXPR to end of this hash chain. */
1617 last_expr->next_same_hash = cur_expr;
1619 /* Set the fields of the expr element.
1620 We must copy X because it can be modified when copy propagation is
1621 performed on its operands. */
1622 cur_expr->expr = copy_rtx (x);
1623 cur_expr->bitmap_index = table->n_elems++;
1624 cur_expr->next_same_hash = NULL;
1625 cur_expr->antic_occr = NULL;
1626 cur_expr->avail_occr = NULL;
1629 /* Now record the occurrence. */
1630 cur_occr = cur_expr->avail_occr;
1632 if (cur_occr && BLOCK_NUM (cur_occr->insn) == BLOCK_NUM (insn))
1634 /* Found another instance of the expression in the same basic block.
1635 Prefer this occurrence to the currently recorded one. We want
1636 the last one in the block and the block is scanned from start
1638 cur_occr->insn = insn;
1642 /* First occurrence of this expression in this basic block. */
1643 cur_occr = gcse_alloc (sizeof (struct occr));
1644 bytes_used += sizeof (struct occr);
1646 cur_occr->insn = insn;
1647 cur_occr->next = cur_expr->avail_occr;
1648 cur_occr->deleted_p = 0;
1649 cur_expr->avail_occr = cur_occr;
1653 /* Determine whether the rtx X should be treated as a constant for
1654 the purposes of GCSE's constant propagation. */
1657 gcse_constant_p (const_rtx x)
1659 /* Consider a COMPARE of two integers constant. */
1660 if (GET_CODE (x) == COMPARE
1661 && GET_CODE (XEXP (x, 0)) == CONST_INT
1662 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1665 /* Consider a COMPARE of the same registers is a constant
1666 if they are not floating point registers. */
1667 if (GET_CODE(x) == COMPARE
1668 && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1))
1669 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
1670 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
1671 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
1674 return CONSTANT_P (x);
1677 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
1681 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
1683 rtx src = SET_SRC (pat);
1684 rtx dest = SET_DEST (pat);
1687 if (GET_CODE (src) == CALL)
1688 hash_scan_call (src, insn, table);
1690 else if (REG_P (dest))
1692 unsigned int regno = REGNO (dest);
1695 /* See if a REG_EQUAL note shows this equivalent to a simpler expression.
1697 This allows us to do a single GCSE pass and still eliminate
1698 redundant constants, addresses or other expressions that are
1699 constructed with multiple instructions.
1701 However, keep the original SRC if INSN is a simple reg-reg move. In
1702 In this case, there will almost always be a REG_EQUAL note on the
1703 insn that sets SRC. By recording the REG_EQUAL value here as SRC
1704 for INSN, we miss copy propagation opportunities and we perform the
1705 same PRE GCSE operation repeatedly on the same REG_EQUAL value if we
1706 do more than one PRE GCSE pass.
1708 Note that this does not impede profitable constant propagations. We
1709 "look through" reg-reg sets in lookup_avail_set. */
1710 note = find_reg_equal_equiv_note (insn);
1712 && REG_NOTE_KIND (note) == REG_EQUAL
1715 ? gcse_constant_p (XEXP (note, 0))
1716 : want_to_gcse_p (XEXP (note, 0))))
1717 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
1719 /* Only record sets of pseudo-regs in the hash table. */
1721 && regno >= FIRST_PSEUDO_REGISTER
1722 /* Don't GCSE something if we can't do a reg/reg copy. */
1723 && can_copy_p (GET_MODE (dest))
1724 /* GCSE commonly inserts instruction after the insn. We can't
1725 do that easily for EH_REGION notes so disable GCSE on these
1727 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1728 /* Is SET_SRC something we want to gcse? */
1729 && want_to_gcse_p (src)
1730 /* Don't CSE a nop. */
1731 && ! set_noop_p (pat)
1732 /* Don't GCSE if it has attached REG_EQUIV note.
1733 At this point this only function parameters should have
1734 REG_EQUIV notes and if the argument slot is used somewhere
1735 explicitly, it means address of parameter has been taken,
1736 so we should not extend the lifetime of the pseudo. */
1737 && (note == NULL_RTX || ! MEM_P (XEXP (note, 0))))
1739 /* An expression is not anticipatable if its operands are
1740 modified before this insn or if this is not the only SET in
1741 this insn. The latter condition does not have to mean that
1742 SRC itself is not anticipatable, but we just will not be
1743 able to handle code motion of insns with multiple sets. */
1744 int antic_p = oprs_anticipatable_p (src, insn)
1745 && !multiple_sets (insn);
1746 /* An expression is not available if its operands are
1747 subsequently modified, including this insn. It's also not
1748 available if this is a branch, because we can't insert
1749 a set after the branch. */
1750 int avail_p = (oprs_available_p (src, insn)
1751 && ! JUMP_P (insn));
1753 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
1756 /* Record sets for constant/copy propagation. */
1757 else if (table->set_p
1758 && regno >= FIRST_PSEUDO_REGISTER
1760 && REGNO (src) >= FIRST_PSEUDO_REGISTER
1761 && can_copy_p (GET_MODE (dest))
1762 && REGNO (src) != regno)
1763 || gcse_constant_p (src))
1764 /* A copy is not available if its src or dest is subsequently
1765 modified. Here we want to search from INSN+1 on, but
1766 oprs_available_p searches from INSN on. */
1767 && (insn == BB_END (BLOCK_FOR_INSN (insn))
1768 || (tmp = next_nonnote_insn (insn)) == NULL_RTX
1769 || BLOCK_FOR_INSN (tmp) != BLOCK_FOR_INSN (insn)
1770 || oprs_available_p (pat, tmp)))
1771 insert_set_in_table (pat, insn, table);
1773 /* In case of store we want to consider the memory value as available in
1774 the REG stored in that memory. This makes it possible to remove
1775 redundant loads from due to stores to the same location. */
1776 else if (flag_gcse_las && REG_P (src) && MEM_P (dest))
1778 unsigned int regno = REGNO (src);
1780 /* Do not do this for constant/copy propagation. */
1782 /* Only record sets of pseudo-regs in the hash table. */
1783 && regno >= FIRST_PSEUDO_REGISTER
1784 /* Don't GCSE something if we can't do a reg/reg copy. */
1785 && can_copy_p (GET_MODE (src))
1786 /* GCSE commonly inserts instruction after the insn. We can't
1787 do that easily for EH_REGION notes so disable GCSE on these
1789 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1790 /* Is SET_DEST something we want to gcse? */
1791 && want_to_gcse_p (dest)
1792 /* Don't CSE a nop. */
1793 && ! set_noop_p (pat)
1794 /* Don't GCSE if it has attached REG_EQUIV note.
1795 At this point this only function parameters should have
1796 REG_EQUIV notes and if the argument slot is used somewhere
1797 explicitly, it means address of parameter has been taken,
1798 so we should not extend the lifetime of the pseudo. */
1799 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
1800 || ! MEM_P (XEXP (note, 0))))
1802 /* Stores are never anticipatable. */
1804 /* An expression is not available if its operands are
1805 subsequently modified, including this insn. It's also not
1806 available if this is a branch, because we can't insert
1807 a set after the branch. */
1808 int avail_p = oprs_available_p (dest, insn)
1811 /* Record the memory expression (DEST) in the hash table. */
1812 insert_expr_in_table (dest, GET_MODE (dest), insn,
1813 antic_p, avail_p, table);
1819 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1820 struct hash_table *table ATTRIBUTE_UNUSED)
1822 /* Currently nothing to do. */
1826 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1827 struct hash_table *table ATTRIBUTE_UNUSED)
1829 /* Currently nothing to do. */
1832 /* Process INSN and add hash table entries as appropriate.
1834 Only available expressions that set a single pseudo-reg are recorded.
1836 Single sets in a PARALLEL could be handled, but it's an extra complication
1837 that isn't dealt with right now. The trick is handling the CLOBBERs that
1838 are also in the PARALLEL. Later.
1840 If SET_P is nonzero, this is for the assignment hash table,
1841 otherwise it is for the expression hash table.
1842 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
1843 not record any expressions. */
1846 hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
1848 rtx pat = PATTERN (insn);
1851 if (in_libcall_block)
1854 /* Pick out the sets of INSN and for other forms of instructions record
1855 what's been modified. */
1857 if (GET_CODE (pat) == SET)
1858 hash_scan_set (pat, insn, table);
1859 else if (GET_CODE (pat) == PARALLEL)
1860 for (i = 0; i < XVECLEN (pat, 0); i++)
1862 rtx x = XVECEXP (pat, 0, i);
1864 if (GET_CODE (x) == SET)
1865 hash_scan_set (x, insn, table);
1866 else if (GET_CODE (x) == CLOBBER)
1867 hash_scan_clobber (x, insn, table);
1868 else if (GET_CODE (x) == CALL)
1869 hash_scan_call (x, insn, table);
1872 else if (GET_CODE (pat) == CLOBBER)
1873 hash_scan_clobber (pat, insn, table);
1874 else if (GET_CODE (pat) == CALL)
1875 hash_scan_call (pat, insn, table);
1879 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
1882 /* Flattened out table, so it's printed in proper order. */
1883 struct expr **flat_table;
1884 unsigned int *hash_val;
1887 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
1888 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
1890 for (i = 0; i < (int) table->size; i++)
1891 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1893 flat_table[expr->bitmap_index] = expr;
1894 hash_val[expr->bitmap_index] = i;
1897 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
1898 name, table->size, table->n_elems);
1900 for (i = 0; i < (int) table->n_elems; i++)
1901 if (flat_table[i] != 0)
1903 expr = flat_table[i];
1904 fprintf (file, "Index %d (hash value %d)\n ",
1905 expr->bitmap_index, hash_val[i]);
1906 print_rtl (file, expr->expr);
1907 fprintf (file, "\n");
1910 fprintf (file, "\n");
1916 /* Record register first/last/block set information for REGNO in INSN.
1918 first_set records the first place in the block where the register
1919 is set and is used to compute "anticipatability".
1921 last_set records the last place in the block where the register
1922 is set and is used to compute "availability".
1924 last_bb records the block for which first_set and last_set are
1925 valid, as a quick test to invalidate them.
1927 reg_set_in_block records whether the register is set in the block
1928 and is used to compute "transparency". */
1931 record_last_reg_set_info (rtx insn, int regno)
1933 struct reg_avail_info *info = ®_avail_info[regno];
1934 int cuid = INSN_CUID (insn);
1936 info->last_set = cuid;
1937 if (info->last_bb != current_bb)
1939 info->last_bb = current_bb;
1940 info->first_set = cuid;
1941 SET_BIT (reg_set_in_block[current_bb->index], regno);
1946 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
1947 Note we store a pair of elements in the list, so they have to be
1948 taken off pairwise. */
1951 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, const_rtx unused1 ATTRIBUTE_UNUSED,
1954 rtx dest_addr, insn;
1957 while (GET_CODE (dest) == SUBREG
1958 || GET_CODE (dest) == ZERO_EXTRACT
1959 || GET_CODE (dest) == STRICT_LOW_PART)
1960 dest = XEXP (dest, 0);
1962 /* If DEST is not a MEM, then it will not conflict with a load. Note
1963 that function calls are assumed to clobber memory, but are handled
1969 dest_addr = get_addr (XEXP (dest, 0));
1970 dest_addr = canon_rtx (dest_addr);
1971 insn = (rtx) v_insn;
1972 bb = BLOCK_NUM (insn);
1974 canon_modify_mem_list[bb] =
1975 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
1976 canon_modify_mem_list[bb] =
1977 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
1980 /* Record memory modification information for INSN. We do not actually care
1981 about the memory location(s) that are set, or even how they are set (consider
1982 a CALL_INSN). We merely need to record which insns modify memory. */
1985 record_last_mem_set_info (rtx insn)
1987 int bb = BLOCK_NUM (insn);
1989 /* load_killed_in_block_p will handle the case of calls clobbering
1991 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
1992 bitmap_set_bit (modify_mem_list_set, bb);
1996 /* Note that traversals of this loop (other than for free-ing)
1997 will break after encountering a CALL_INSN. So, there's no
1998 need to insert a pair of items, as canon_list_insert does. */
1999 canon_modify_mem_list[bb] =
2000 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2001 bitmap_set_bit (blocks_with_calls, bb);
2004 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2007 /* Called from compute_hash_table via note_stores to handle one
2008 SET or CLOBBER in an insn. DATA is really the instruction in which
2009 the SET is taking place. */
2012 record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
2014 rtx last_set_insn = (rtx) data;
2016 if (GET_CODE (dest) == SUBREG)
2017 dest = SUBREG_REG (dest);
2020 record_last_reg_set_info (last_set_insn, REGNO (dest));
2021 else if (MEM_P (dest)
2022 /* Ignore pushes, they clobber nothing. */
2023 && ! push_operand (dest, GET_MODE (dest)))
2024 record_last_mem_set_info (last_set_insn);
2027 /* Top level function to create an expression or assignment hash table.
2029 Expression entries are placed in the hash table if
2030 - they are of the form (set (pseudo-reg) src),
2031 - src is something we want to perform GCSE on,
2032 - none of the operands are subsequently modified in the block
2034 Assignment entries are placed in the hash table if
2035 - they are of the form (set (pseudo-reg) src),
2036 - src is something we want to perform const/copy propagation on,
2037 - none of the operands or target are subsequently modified in the block
2039 Currently src must be a pseudo-reg or a const_int.
2041 TABLE is the table computed. */
2044 compute_hash_table_work (struct hash_table *table)
2048 /* While we compute the hash table we also compute a bit array of which
2049 registers are set in which blocks.
2050 ??? This isn't needed during const/copy propagation, but it's cheap to
2052 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2054 /* re-Cache any INSN_LIST nodes we have allocated. */
2055 clear_modify_mem_tables ();
2056 /* Some working arrays used to track first and last set in each block. */
2057 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2059 for (i = 0; i < max_gcse_regno; ++i)
2060 reg_avail_info[i].last_bb = NULL;
2062 FOR_EACH_BB (current_bb)
2066 int in_libcall_block;
2068 /* First pass over the instructions records information used to
2069 determine when registers and memory are first and last set.
2070 ??? hard-reg reg_set_in_block computation
2071 could be moved to compute_sets since they currently don't change. */
2073 FOR_BB_INSNS (current_bb, insn)
2075 if (! INSN_P (insn))
2080 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2081 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2082 record_last_reg_set_info (insn, regno);
2087 note_stores (PATTERN (insn), record_last_set_info, insn);
2090 /* Insert implicit sets in the hash table. */
2092 && implicit_sets[current_bb->index] != NULL_RTX)
2093 hash_scan_set (implicit_sets[current_bb->index],
2094 BB_HEAD (current_bb), table);
2096 /* The next pass builds the hash table. */
2097 in_libcall_block = 0;
2098 FOR_BB_INSNS (current_bb, insn)
2101 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2102 in_libcall_block = 1;
2103 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2104 in_libcall_block = 0;
2105 hash_scan_insn (insn, table, in_libcall_block);
2106 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2107 in_libcall_block = 0;
2111 free (reg_avail_info);
2112 reg_avail_info = NULL;
2115 /* Allocate space for the set/expr hash TABLE.
2116 N_INSNS is the number of instructions in the function.
2117 It is used to determine the number of buckets to use.
2118 SET_P determines whether set or expression table will
2122 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2126 table->size = n_insns / 4;
2127 if (table->size < 11)
2130 /* Attempt to maintain efficient use of hash table.
2131 Making it an odd number is simplest for now.
2132 ??? Later take some measurements. */
2134 n = table->size * sizeof (struct expr *);
2135 table->table = gmalloc (n);
2136 table->set_p = set_p;
2139 /* Free things allocated by alloc_hash_table. */
2142 free_hash_table (struct hash_table *table)
2144 free (table->table);
2147 /* Compute the hash TABLE for doing copy/const propagation or
2148 expression hash table. */
2151 compute_hash_table (struct hash_table *table)
2153 /* Initialize count of number of entries in hash table. */
2155 memset (table->table, 0, table->size * sizeof (struct expr *));
2157 compute_hash_table_work (table);
2160 /* Expression tracking support. */
2162 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2163 table entry, or NULL if not found. */
2165 static struct expr *
2166 lookup_set (unsigned int regno, struct hash_table *table)
2168 unsigned int hash = hash_set (regno, table->size);
2171 expr = table->table[hash];
2173 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2174 expr = expr->next_same_hash;
2179 /* Return the next entry for REGNO in list EXPR. */
2181 static struct expr *
2182 next_set (unsigned int regno, struct expr *expr)
2185 expr = expr->next_same_hash;
2186 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2191 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2192 types may be mixed. */
2195 free_insn_expr_list_list (rtx *listp)
2199 for (list = *listp; list ; list = next)
2201 next = XEXP (list, 1);
2202 if (GET_CODE (list) == EXPR_LIST)
2203 free_EXPR_LIST_node (list);
2205 free_INSN_LIST_node (list);
2211 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2213 clear_modify_mem_tables (void)
2218 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
2220 free_INSN_LIST_list (modify_mem_list + i);
2221 free_insn_expr_list_list (canon_modify_mem_list + i);
2223 bitmap_clear (modify_mem_list_set);
2224 bitmap_clear (blocks_with_calls);
2227 /* Release memory used by modify_mem_list_set. */
2230 free_modify_mem_tables (void)
2232 clear_modify_mem_tables ();
2233 free (modify_mem_list);
2234 free (canon_modify_mem_list);
2235 modify_mem_list = 0;
2236 canon_modify_mem_list = 0;
2239 /* Reset tables used to keep track of what's still available [since the
2240 start of the block]. */
2243 reset_opr_set_tables (void)
2245 /* Maintain a bitmap of which regs have been set since beginning of
2247 CLEAR_REG_SET (reg_set_bitmap);
2249 /* Also keep a record of the last instruction to modify memory.
2250 For now this is very trivial, we only record whether any memory
2251 location has been modified. */
2252 clear_modify_mem_tables ();
2255 /* Return nonzero if the operands of X are not set before INSN in
2256 INSN's basic block. */
2259 oprs_not_set_p (const_rtx x, const_rtx insn)
2268 code = GET_CODE (x);
2285 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2286 INSN_CUID (insn), x, 0))
2289 return oprs_not_set_p (XEXP (x, 0), insn);
2292 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2298 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2302 /* If we are about to do the last recursive call
2303 needed at this level, change it into iteration.
2304 This function is called enough to be worth it. */
2306 return oprs_not_set_p (XEXP (x, i), insn);
2308 if (! oprs_not_set_p (XEXP (x, i), insn))
2311 else if (fmt[i] == 'E')
2312 for (j = 0; j < XVECLEN (x, i); j++)
2313 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2320 /* Mark things set by a CALL. */
2323 mark_call (rtx insn)
2325 if (! RTL_CONST_OR_PURE_CALL_P (insn))
2326 record_last_mem_set_info (insn);
2329 /* Mark things set by a SET. */
2332 mark_set (rtx pat, rtx insn)
2334 rtx dest = SET_DEST (pat);
2336 while (GET_CODE (dest) == SUBREG
2337 || GET_CODE (dest) == ZERO_EXTRACT
2338 || GET_CODE (dest) == STRICT_LOW_PART)
2339 dest = XEXP (dest, 0);
2342 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2343 else if (MEM_P (dest))
2344 record_last_mem_set_info (insn);
2346 if (GET_CODE (SET_SRC (pat)) == CALL)
2350 /* Record things set by a CLOBBER. */
2353 mark_clobber (rtx pat, rtx insn)
2355 rtx clob = XEXP (pat, 0);
2357 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2358 clob = XEXP (clob, 0);
2361 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2363 record_last_mem_set_info (insn);
2366 /* Record things set by INSN.
2367 This data is used by oprs_not_set_p. */
2370 mark_oprs_set (rtx insn)
2372 rtx pat = PATTERN (insn);
2375 if (GET_CODE (pat) == SET)
2376 mark_set (pat, insn);
2377 else if (GET_CODE (pat) == PARALLEL)
2378 for (i = 0; i < XVECLEN (pat, 0); i++)
2380 rtx x = XVECEXP (pat, 0, i);
2382 if (GET_CODE (x) == SET)
2384 else if (GET_CODE (x) == CLOBBER)
2385 mark_clobber (x, insn);
2386 else if (GET_CODE (x) == CALL)
2390 else if (GET_CODE (pat) == CLOBBER)
2391 mark_clobber (pat, insn);
2392 else if (GET_CODE (pat) == CALL)
2397 /* Compute copy/constant propagation working variables. */
2399 /* Local properties of assignments. */
2400 static sbitmap *cprop_pavloc;
2401 static sbitmap *cprop_absaltered;
2403 /* Global properties of assignments (computed from the local properties). */
2404 static sbitmap *cprop_avin;
2405 static sbitmap *cprop_avout;
2407 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
2408 basic blocks. N_SETS is the number of sets. */
2411 alloc_cprop_mem (int n_blocks, int n_sets)
2413 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
2414 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
2416 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
2417 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
2420 /* Free vars used by copy/const propagation. */
2423 free_cprop_mem (void)
2425 sbitmap_vector_free (cprop_pavloc);
2426 sbitmap_vector_free (cprop_absaltered);
2427 sbitmap_vector_free (cprop_avin);
2428 sbitmap_vector_free (cprop_avout);
2431 /* For each block, compute whether X is transparent. X is either an
2432 expression or an assignment [though we don't care which, for this context
2433 an assignment is treated as an expression]. For each block where an
2434 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
2438 compute_transp (const_rtx x, int indx, sbitmap *bmap, int set_p)
2446 /* repeat is used to turn tail-recursion into iteration since GCC
2447 can't do it when there's no return value. */
2453 code = GET_CODE (x);
2459 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2462 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2463 SET_BIT (bmap[bb->index], indx);
2467 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2468 SET_BIT (bmap[r->bb_index], indx);
2473 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2476 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2477 RESET_BIT (bmap[bb->index], indx);
2481 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2482 RESET_BIT (bmap[r->bb_index], indx);
2489 if (! MEM_READONLY_P (x))
2494 /* First handle all the blocks with calls. We don't need to
2495 do any list walking for them. */
2496 EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
2499 SET_BIT (bmap[bb_index], indx);
2501 RESET_BIT (bmap[bb_index], indx);
2504 /* Now iterate over the blocks which have memory modifications
2505 but which do not have any calls. */
2506 EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set,
2510 rtx list_entry = canon_modify_mem_list[bb_index];
2514 rtx dest, dest_addr;
2516 /* LIST_ENTRY must be an INSN of some kind that sets memory.
2517 Examine each hunk of memory that is modified. */
2519 dest = XEXP (list_entry, 0);
2520 list_entry = XEXP (list_entry, 1);
2521 dest_addr = XEXP (list_entry, 0);
2523 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
2524 x, rtx_addr_varies_p))
2527 SET_BIT (bmap[bb_index], indx);
2529 RESET_BIT (bmap[bb_index], indx);
2532 list_entry = XEXP (list_entry, 1);
2557 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2561 /* If we are about to do the last recursive call
2562 needed at this level, change it into iteration.
2563 This function is called enough to be worth it. */
2570 compute_transp (XEXP (x, i), indx, bmap, set_p);
2572 else if (fmt[i] == 'E')
2573 for (j = 0; j < XVECLEN (x, i); j++)
2574 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
2578 /* Top level routine to do the dataflow analysis needed by copy/const
2582 compute_cprop_data (void)
2584 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
2585 compute_available (cprop_pavloc, cprop_absaltered,
2586 cprop_avout, cprop_avin);
2589 /* Copy/constant propagation. */
2591 /* Maximum number of register uses in an insn that we handle. */
2594 /* Table of uses found in an insn.
2595 Allocated statically to avoid alloc/free complexity and overhead. */
2596 static struct reg_use reg_use_table[MAX_USES];
2598 /* Index into `reg_use_table' while building it. */
2599 static int reg_use_count;
2601 /* Set up a list of register numbers used in INSN. The found uses are stored
2602 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
2603 and contains the number of uses in the table upon exit.
2605 ??? If a register appears multiple times we will record it multiple times.
2606 This doesn't hurt anything but it will slow things down. */
2609 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
2616 /* repeat is used to turn tail-recursion into iteration since GCC
2617 can't do it when there's no return value. */
2622 code = GET_CODE (x);
2625 if (reg_use_count == MAX_USES)
2628 reg_use_table[reg_use_count].reg_rtx = x;
2632 /* Recursively scan the operands of this expression. */
2634 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2638 /* If we are about to do the last recursive call
2639 needed at this level, change it into iteration.
2640 This function is called enough to be worth it. */
2647 find_used_regs (&XEXP (x, i), data);
2649 else if (fmt[i] == 'E')
2650 for (j = 0; j < XVECLEN (x, i); j++)
2651 find_used_regs (&XVECEXP (x, i, j), data);
2655 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
2656 Returns nonzero is successful. */
2659 try_replace_reg (rtx from, rtx to, rtx insn)
2661 rtx note = find_reg_equal_equiv_note (insn);
2664 rtx set = single_set (insn);
2666 /* Usually we substitute easy stuff, so we won't copy everything.
2667 We however need to take care to not duplicate non-trivial CONST
2671 validate_replace_src_group (from, to, insn);
2672 if (num_changes_pending () && apply_change_group ())
2675 /* Try to simplify SET_SRC if we have substituted a constant. */
2676 if (success && set && CONSTANT_P (to))
2678 src = simplify_rtx (SET_SRC (set));
2681 validate_change (insn, &SET_SRC (set), src, 0);
2684 /* If there is already a REG_EQUAL note, update the expression in it
2685 with our replacement. */
2686 if (note != 0 && REG_NOTE_KIND (note) == REG_EQUAL)
2687 set_unique_reg_note (insn, REG_EQUAL,
2688 simplify_replace_rtx (XEXP (note, 0), from,
2690 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
2692 /* If above failed and this is a single set, try to simplify the source of
2693 the set given our substitution. We could perhaps try this for multiple
2694 SETs, but it probably won't buy us anything. */
2695 src = simplify_replace_rtx (SET_SRC (set), from, to);
2697 if (!rtx_equal_p (src, SET_SRC (set))
2698 && validate_change (insn, &SET_SRC (set), src, 0))
2701 /* If we've failed to do replacement, have a single SET, don't already
2702 have a note, and have no special SET, add a REG_EQUAL note to not
2703 lose information. */
2704 if (!success && note == 0 && set != 0
2705 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
2706 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
2707 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
2710 /* REG_EQUAL may get simplified into register.
2711 We don't allow that. Remove that note. This code ought
2712 not to happen, because previous code ought to synthesize
2713 reg-reg move, but be on the safe side. */
2714 if (note && REG_NOTE_KIND (note) == REG_EQUAL && REG_P (XEXP (note, 0)))
2715 remove_note (insn, note);
2720 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
2721 NULL no such set is found. */
2723 static struct expr *
2724 find_avail_set (int regno, rtx insn)
2726 /* SET1 contains the last set found that can be returned to the caller for
2727 use in a substitution. */
2728 struct expr *set1 = 0;
2730 /* Loops are not possible here. To get a loop we would need two sets
2731 available at the start of the block containing INSN. i.e. we would
2732 need two sets like this available at the start of the block:
2734 (set (reg X) (reg Y))
2735 (set (reg Y) (reg X))
2737 This can not happen since the set of (reg Y) would have killed the
2738 set of (reg X) making it unavailable at the start of this block. */
2742 struct expr *set = lookup_set (regno, &set_hash_table);
2744 /* Find a set that is available at the start of the block
2745 which contains INSN. */
2748 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
2750 set = next_set (regno, set);
2753 /* If no available set was found we've reached the end of the
2754 (possibly empty) copy chain. */
2758 gcc_assert (GET_CODE (set->expr) == SET);
2760 src = SET_SRC (set->expr);
2762 /* We know the set is available.
2763 Now check that SRC is ANTLOC (i.e. none of the source operands
2764 have changed since the start of the block).
2766 If the source operand changed, we may still use it for the next
2767 iteration of this loop, but we may not use it for substitutions. */
2769 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
2772 /* If the source of the set is anything except a register, then
2773 we have reached the end of the copy chain. */
2777 /* Follow the copy chain, i.e. start another iteration of the loop
2778 and see if we have an available copy into SRC. */
2779 regno = REGNO (src);
2782 /* SET1 holds the last set that was available and anticipatable at
2787 /* Subroutine of cprop_insn that tries to propagate constants into
2788 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
2789 it is the instruction that immediately precedes JUMP, and must be a
2790 single SET of a register. FROM is what we will try to replace,
2791 SRC is the constant we will try to substitute for it. Returns nonzero
2792 if a change was made. */
2795 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
2797 rtx new, set_src, note_src;
2798 rtx set = pc_set (jump);
2799 rtx note = find_reg_equal_equiv_note (jump);
2803 note_src = XEXP (note, 0);
2804 if (GET_CODE (note_src) == EXPR_LIST)
2805 note_src = NULL_RTX;
2807 else note_src = NULL_RTX;
2809 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
2810 set_src = note_src ? note_src : SET_SRC (set);
2812 /* First substitute the SETCC condition into the JUMP instruction,
2813 then substitute that given values into this expanded JUMP. */
2814 if (setcc != NULL_RTX
2815 && !modified_between_p (from, setcc, jump)
2816 && !modified_between_p (src, setcc, jump))
2819 rtx setcc_set = single_set (setcc);
2820 rtx setcc_note = find_reg_equal_equiv_note (setcc);
2821 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
2822 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
2823 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
2829 new = simplify_replace_rtx (set_src, from, src);
2831 /* If no simplification can be made, then try the next register. */
2832 if (rtx_equal_p (new, SET_SRC (set)))
2835 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
2840 /* Ensure the value computed inside the jump insn to be equivalent
2841 to one computed by setcc. */
2842 if (setcc && modified_in_p (new, setcc))
2844 if (! validate_unshare_change (jump, &SET_SRC (set), new, 0))
2846 /* When (some) constants are not valid in a comparison, and there
2847 are two registers to be replaced by constants before the entire
2848 comparison can be folded into a constant, we need to keep
2849 intermediate information in REG_EQUAL notes. For targets with
2850 separate compare insns, such notes are added by try_replace_reg.
2851 When we have a combined compare-and-branch instruction, however,
2852 we need to attach a note to the branch itself to make this
2853 optimization work. */
2855 if (!rtx_equal_p (new, note_src))
2856 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
2860 /* Remove REG_EQUAL note after simplification. */
2862 remove_note (jump, note);
2866 /* Delete the cc0 setter. */
2867 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
2868 delete_insn (setcc);
2871 run_jump_opt_after_gcse = 1;
2873 global_const_prop_count++;
2874 if (dump_file != NULL)
2877 "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
2878 REGNO (from), INSN_UID (jump));
2879 print_rtl (dump_file, src);
2880 fprintf (dump_file, "\n");
2882 purge_dead_edges (bb);
2884 /* If a conditional jump has been changed into unconditional jump, remove
2885 the jump and make the edge fallthru - this is always called in
2887 if (new != pc_rtx && simplejump_p (jump))
2892 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ei_next (&ei))
2893 if (e->dest != EXIT_BLOCK_PTR
2894 && BB_HEAD (e->dest) == JUMP_LABEL (jump))
2896 e->flags |= EDGE_FALLTHRU;
2906 constprop_register (rtx insn, rtx from, rtx to, bool alter_jumps)
2910 /* Check for reg or cc0 setting instructions followed by
2911 conditional branch instructions first. */
2913 && (sset = single_set (insn)) != NULL
2915 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
2917 rtx dest = SET_DEST (sset);
2918 if ((REG_P (dest) || CC0_P (dest))
2919 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
2923 /* Handle normal insns next. */
2924 if (NONJUMP_INSN_P (insn)
2925 && try_replace_reg (from, to, insn))
2928 /* Try to propagate a CONST_INT into a conditional jump.
2929 We're pretty specific about what we will handle in this
2930 code, we can extend this as necessary over time.
2932 Right now the insn in question must look like
2933 (set (pc) (if_then_else ...)) */
2934 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
2935 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
2939 /* Perform constant and copy propagation on INSN.
2940 The result is nonzero if a change was made. */
2943 cprop_insn (rtx insn, int alter_jumps)
2945 struct reg_use *reg_used;
2953 note_uses (&PATTERN (insn), find_used_regs, NULL);
2955 note = find_reg_equal_equiv_note (insn);
2957 /* We may win even when propagating constants into notes. */
2959 find_used_regs (&XEXP (note, 0), NULL);
2961 for (reg_used = ®_use_table[0]; reg_use_count > 0;
2962 reg_used++, reg_use_count--)
2964 unsigned int regno = REGNO (reg_used->reg_rtx);
2968 /* Ignore registers created by GCSE.
2969 We do this because ... */
2970 if (regno >= max_gcse_regno)
2973 /* If the register has already been set in this block, there's
2974 nothing we can do. */
2975 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
2978 /* Find an assignment that sets reg_used and is available
2979 at the start of the block. */
2980 set = find_avail_set (regno, insn);
2985 /* ??? We might be able to handle PARALLELs. Later. */
2986 gcc_assert (GET_CODE (pat) == SET);
2988 src = SET_SRC (pat);
2990 /* Constant propagation. */
2991 if (gcse_constant_p (src))
2993 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
2996 global_const_prop_count++;
2997 if (dump_file != NULL)
2999 fprintf (dump_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
3000 fprintf (dump_file, "insn %d with constant ", INSN_UID (insn));
3001 print_rtl (dump_file, src);
3002 fprintf (dump_file, "\n");
3004 if (INSN_DELETED_P (insn))
3008 else if (REG_P (src)
3009 && REGNO (src) >= FIRST_PSEUDO_REGISTER
3010 && REGNO (src) != regno)
3012 if (try_replace_reg (reg_used->reg_rtx, src, insn))
3015 global_copy_prop_count++;
3016 if (dump_file != NULL)
3018 fprintf (dump_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
3019 regno, INSN_UID (insn));
3020 fprintf (dump_file, " with reg %d\n", REGNO (src));
3023 /* The original insn setting reg_used may or may not now be
3024 deletable. We leave the deletion to flow. */
3025 /* FIXME: If it turns out that the insn isn't deletable,
3026 then we may have unnecessarily extended register lifetimes
3027 and made things worse. */
3035 /* Like find_used_regs, but avoid recording uses that appear in
3036 input-output contexts such as zero_extract or pre_dec. This
3037 restricts the cases we consider to those for which local cprop
3038 can legitimately make replacements. */
3041 local_cprop_find_used_regs (rtx *xptr, void *data)
3048 switch (GET_CODE (x))
3052 case STRICT_LOW_PART:
3061 /* Can only legitimately appear this early in the context of
3062 stack pushes for function arguments, but handle all of the
3063 codes nonetheless. */
3067 /* Setting a subreg of a register larger than word_mode leaves
3068 the non-written words unchanged. */
3069 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
3077 find_used_regs (xptr, data);
3080 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3081 their REG_EQUAL notes need updating. */
3084 do_local_cprop (rtx x, rtx insn, bool alter_jumps, rtx *libcall_sp)
3086 rtx newreg = NULL, newcnst = NULL;
3088 /* Rule out USE instructions and ASM statements as we don't want to
3089 change the hard registers mentioned. */
3091 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
3092 || (GET_CODE (PATTERN (insn)) != USE
3093 && asm_noperands (PATTERN (insn)) < 0)))
3095 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
3096 struct elt_loc_list *l;
3100 for (l = val->locs; l; l = l->next)
3102 rtx this_rtx = l->loc;
3105 /* Don't CSE non-constant values out of libcall blocks. */
3106 if (l->in_libcall && ! CONSTANT_P (this_rtx))
3109 if (gcse_constant_p (this_rtx))
3111 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
3112 /* Don't copy propagate if it has attached REG_EQUIV note.
3113 At this point this only function parameters should have
3114 REG_EQUIV notes and if the argument slot is used somewhere
3115 explicitly, it means address of parameter has been taken,
3116 so we should not extend the lifetime of the pseudo. */
3117 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
3118 || ! MEM_P (XEXP (note, 0))))
3121 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
3123 /* If we find a case where we can't fix the retval REG_EQUAL notes
3124 match the new register, we either have to abandon this replacement
3125 or fix delete_trivially_dead_insns to preserve the setting insn,
3126 or make it delete the REG_EQUAL note, and fix up all passes that
3127 require the REG_EQUAL note there. */
3130 adjusted = adjust_libcall_notes (x, newcnst, insn, libcall_sp);
3131 gcc_assert (adjusted);
3133 if (dump_file != NULL)
3135 fprintf (dump_file, "LOCAL CONST-PROP: Replacing reg %d in ",
3137 fprintf (dump_file, "insn %d with constant ",
3139 print_rtl (dump_file, newcnst);
3140 fprintf (dump_file, "\n");
3142 local_const_prop_count++;
3145 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
3147 adjust_libcall_notes (x, newreg, insn, libcall_sp);
3148 if (dump_file != NULL)
3151 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
3152 REGNO (x), INSN_UID (insn));
3153 fprintf (dump_file, " with reg %d\n", REGNO (newreg));
3155 local_copy_prop_count++;
3162 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3163 their REG_EQUAL notes need updating to reflect that OLDREG has been
3164 replaced with NEWVAL in INSN. Return true if all substitutions could
3167 adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
3171 while ((end = *libcall_sp++))
3173 rtx note = find_reg_equal_equiv_note (end);
3180 if (reg_set_between_p (newval, PREV_INSN (insn), end))
3184 note = find_reg_equal_equiv_note (end);
3187 if (reg_mentioned_p (newval, XEXP (note, 0)))
3190 while ((end = *libcall_sp++));
3194 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), oldreg, newval);
3195 df_notes_rescan (end);
3201 #define MAX_NESTED_LIBCALLS 9
3203 /* Do local const/copy propagation (i.e. within each basic block).
3204 If ALTER_JUMPS is true, allow propagating into jump insns, which
3205 could modify the CFG. */
3208 local_cprop_pass (bool alter_jumps)
3212 struct reg_use *reg_used;
3213 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
3214 bool changed = false;
3216 cselib_init (false);
3217 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
3221 FOR_BB_INSNS (bb, insn)
3225 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
3229 gcc_assert (libcall_sp != libcall_stack);
3230 *--libcall_sp = XEXP (note, 0);
3232 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3235 note = find_reg_equal_equiv_note (insn);
3239 note_uses (&PATTERN (insn), local_cprop_find_used_regs,
3242 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
3244 for (reg_used = ®_use_table[0]; reg_use_count > 0;
3245 reg_used++, reg_use_count--)
3247 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
3254 if (INSN_DELETED_P (insn))
3257 while (reg_use_count);
3259 cselib_process_insn (insn);
3262 /* Forget everything at the end of a basic block. Make sure we are
3263 not inside a libcall, they should never cross basic blocks. */
3264 cselib_clear_table ();
3265 gcc_assert (libcall_sp == &libcall_stack[MAX_NESTED_LIBCALLS]);
3270 /* Global analysis may get into infinite loops for unreachable blocks. */
3271 if (changed && alter_jumps)
3273 delete_unreachable_blocks ();
3274 free_reg_set_mem ();
3275 alloc_reg_set_mem (max_reg_num ());
3280 /* Forward propagate copies. This includes copies and constants. Return
3281 nonzero if a change was made. */
3284 cprop (int alter_jumps)
3290 /* Note we start at block 1. */
3291 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3293 if (dump_file != NULL)
3294 fprintf (dump_file, "\n");
3299 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3301 /* Reset tables used to keep track of what's still valid [since the
3302 start of the block]. */
3303 reset_opr_set_tables ();
3305 FOR_BB_INSNS (bb, insn)
3308 changed |= cprop_insn (insn, alter_jumps);
3310 /* Keep track of everything modified by this insn. */
3311 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
3312 call mark_oprs_set if we turned the insn into a NOTE. */
3313 if (! NOTE_P (insn))
3314 mark_oprs_set (insn);
3318 if (dump_file != NULL)
3319 fprintf (dump_file, "\n");
3324 /* Similar to get_condition, only the resulting condition must be
3325 valid at JUMP, instead of at EARLIEST.
3327 This differs from noce_get_condition in ifcvt.c in that we prefer not to
3328 settle for the condition variable in the jump instruction being integral.
3329 We prefer to be able to record the value of a user variable, rather than
3330 the value of a temporary used in a condition. This could be solved by
3331 recording the value of *every* register scanned by canonicalize_condition,
3332 but this would require some code reorganization. */
3335 fis_get_condition (rtx jump)
3337 return get_condition (jump, NULL, false, true);
3340 /* Check the comparison COND to see if we can safely form an implicit set from
3341 it. COND is either an EQ or NE comparison. */
3344 implicit_set_cond_p (const_rtx cond)
3346 const enum machine_mode mode = GET_MODE (XEXP (cond, 0));
3347 const_rtx cst = XEXP (cond, 1);
3349 /* We can't perform this optimization if either operand might be or might
3350 contain a signed zero. */
3351 if (HONOR_SIGNED_ZEROS (mode))
3353 /* It is sufficient to check if CST is or contains a zero. We must
3354 handle float, complex, and vector. If any subpart is a zero, then
3355 the optimization can't be performed. */
3356 /* ??? The complex and vector checks are not implemented yet. We just
3357 always return zero for them. */
3358 if (GET_CODE (cst) == CONST_DOUBLE)
3361 REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
3362 if (REAL_VALUES_EQUAL (d, dconst0))
3369 return gcse_constant_p (cst);
3372 /* Find the implicit sets of a function. An "implicit set" is a constraint
3373 on the value of a variable, implied by a conditional jump. For example,
3374 following "if (x == 2)", the then branch may be optimized as though the
3375 conditional performed an "explicit set", in this example, "x = 2". This
3376 function records the set patterns that are implicit at the start of each
3380 find_implicit_sets (void)
3382 basic_block bb, dest;
3388 /* Check for more than one successor. */
3389 if (EDGE_COUNT (bb->succs) > 1)
3391 cond = fis_get_condition (BB_END (bb));
3394 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
3395 && REG_P (XEXP (cond, 0))
3396 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
3397 && implicit_set_cond_p (cond))
3399 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
3400 : FALLTHRU_EDGE (bb)->dest;
3402 if (dest && single_pred_p (dest)
3403 && dest != EXIT_BLOCK_PTR)
3405 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
3407 implicit_sets[dest->index] = new;
3410 fprintf(dump_file, "Implicit set of reg %d in ",
3411 REGNO (XEXP (cond, 0)));
3412 fprintf(dump_file, "basic block %d\n", dest->index);
3420 fprintf (dump_file, "Found %d implicit sets\n", count);
3423 /* Perform one copy/constant propagation pass.
3424 PASS is the pass count. If CPROP_JUMPS is true, perform constant
3425 propagation into conditional jumps. If BYPASS_JUMPS is true,
3426 perform conditional jump bypassing optimizations. */
3429 one_cprop_pass (int pass, bool cprop_jumps, bool bypass_jumps)
3433 global_const_prop_count = local_const_prop_count = 0;
3434 global_copy_prop_count = local_copy_prop_count = 0;
3437 local_cprop_pass (cprop_jumps);
3439 /* Determine implicit sets. */
3440 implicit_sets = XCNEWVEC (rtx, last_basic_block);
3441 find_implicit_sets ();
3443 alloc_hash_table (max_cuid, &set_hash_table, 1);
3444 compute_hash_table (&set_hash_table);
3446 /* Free implicit_sets before peak usage. */
3447 free (implicit_sets);
3448 implicit_sets = NULL;
3451 dump_hash_table (dump_file, "SET", &set_hash_table);
3452 if (set_hash_table.n_elems > 0)
3454 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
3455 compute_cprop_data ();
3456 changed = cprop (cprop_jumps);
3458 changed |= bypass_conditional_jumps ();
3462 free_hash_table (&set_hash_table);
3466 fprintf (dump_file, "CPROP of %s, pass %d: %d bytes needed, ",
3467 current_function_name (), pass, bytes_used);
3468 fprintf (dump_file, "%d local const props, %d local copy props, ",
3469 local_const_prop_count, local_copy_prop_count);
3470 fprintf (dump_file, "%d global const props, %d global copy props\n\n",
3471 global_const_prop_count, global_copy_prop_count);
3473 /* Global analysis may get into infinite loops for unreachable blocks. */
3474 if (changed && cprop_jumps)
3475 delete_unreachable_blocks ();
3480 /* Bypass conditional jumps. */
3482 /* The value of last_basic_block at the beginning of the jump_bypass
3483 pass. The use of redirect_edge_and_branch_force may introduce new
3484 basic blocks, but the data flow analysis is only valid for basic
3485 block indices less than bypass_last_basic_block. */
3487 static int bypass_last_basic_block;
3489 /* Find a set of REGNO to a constant that is available at the end of basic
3490 block BB. Returns NULL if no such set is found. Based heavily upon
3493 static struct expr *
3494 find_bypass_set (int regno, int bb)
3496 struct expr *result = 0;
3501 struct expr *set = lookup_set (regno, &set_hash_table);
3505 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
3507 set = next_set (regno, set);
3513 gcc_assert (GET_CODE (set->expr) == SET);
3515 src = SET_SRC (set->expr);
3516 if (gcse_constant_p (src))
3522 regno = REGNO (src);
3528 /* Subroutine of bypass_block that checks whether a pseudo is killed by
3529 any of the instructions inserted on an edge. Jump bypassing places
3530 condition code setters on CFG edges using insert_insn_on_edge. This
3531 function is required to check that our data flow analysis is still
3532 valid prior to commit_edge_insertions. */
3535 reg_killed_on_edge (const_rtx reg, const_edge e)
3539 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
3540 if (INSN_P (insn) && reg_set_p (reg, insn))
3546 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
3547 basic block BB which has more than one predecessor. If not NULL, SETCC
3548 is the first instruction of BB, which is immediately followed by JUMP_INSN
3549 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
3550 Returns nonzero if a change was made.
3552 During the jump bypassing pass, we may place copies of SETCC instructions
3553 on CFG edges. The following routine must be careful to pay attention to
3554 these inserted insns when performing its transformations. */
3557 bypass_block (basic_block bb, rtx setcc, rtx jump)
3562 int may_be_loop_header;
3566 insn = (setcc != NULL) ? setcc : jump;
3568 /* Determine set of register uses in INSN. */
3570 note_uses (&PATTERN (insn), find_used_regs, NULL);
3571 note = find_reg_equal_equiv_note (insn);
3573 find_used_regs (&XEXP (note, 0), NULL);
3575 may_be_loop_header = false;
3576 FOR_EACH_EDGE (e, ei, bb->preds)
3577 if (e->flags & EDGE_DFS_BACK)
3579 may_be_loop_header = true;
3584 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3588 if (e->flags & EDGE_COMPLEX)
3594 /* We can't redirect edges from new basic blocks. */
3595 if (e->src->index >= bypass_last_basic_block)
3601 /* The irreducible loops created by redirecting of edges entering the
3602 loop from outside would decrease effectiveness of some of the following
3603 optimizations, so prevent this. */
3604 if (may_be_loop_header
3605 && !(e->flags & EDGE_DFS_BACK))
3611 for (i = 0; i < reg_use_count; i++)
3613 struct reg_use *reg_used = ®_use_table[i];
3614 unsigned int regno = REGNO (reg_used->reg_rtx);
3615 basic_block dest, old_dest;
3619 if (regno >= max_gcse_regno)
3622 set = find_bypass_set (regno, e->src->index);
3627 /* Check the data flow is valid after edge insertions. */
3628 if (e->insns.r && reg_killed_on_edge (reg_used->reg_rtx, e))
3631 src = SET_SRC (pc_set (jump));
3634 src = simplify_replace_rtx (src,
3635 SET_DEST (PATTERN (setcc)),
3636 SET_SRC (PATTERN (setcc)));
3638 new = simplify_replace_rtx (src, reg_used->reg_rtx,
3639 SET_SRC (set->expr));
3641 /* Jump bypassing may have already placed instructions on
3642 edges of the CFG. We can't bypass an outgoing edge that
3643 has instructions associated with it, as these insns won't
3644 get executed if the incoming edge is redirected. */
3648 edest = FALLTHRU_EDGE (bb);
3649 dest = edest->insns.r ? NULL : edest->dest;
3651 else if (GET_CODE (new) == LABEL_REF)
3653 dest = BLOCK_FOR_INSN (XEXP (new, 0));
3654 /* Don't bypass edges containing instructions. */
3655 edest = find_edge (bb, dest);
3656 if (edest && edest->insns.r)
3662 /* Avoid unification of the edge with other edges from original
3663 branch. We would end up emitting the instruction on "both"
3666 if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc)))
3667 && find_edge (e->src, dest))
3673 && dest != EXIT_BLOCK_PTR)
3675 redirect_edge_and_branch_force (e, dest);
3677 /* Copy the register setter to the redirected edge.
3678 Don't copy CC0 setters, as CC0 is dead after jump. */
3681 rtx pat = PATTERN (setcc);
3682 if (!CC0_P (SET_DEST (pat)))
3683 insert_insn_on_edge (copy_insn (pat), e);
3686 if (dump_file != NULL)
3688 fprintf (dump_file, "JUMP-BYPASS: Proved reg %d "
3689 "in jump_insn %d equals constant ",
3690 regno, INSN_UID (jump));
3691 print_rtl (dump_file, SET_SRC (set->expr));
3692 fprintf (dump_file, "\nBypass edge from %d->%d to %d\n",
3693 e->src->index, old_dest->index, dest->index);
3706 /* Find basic blocks with more than one predecessor that only contain a
3707 single conditional jump. If the result of the comparison is known at
3708 compile-time from any incoming edge, redirect that edge to the
3709 appropriate target. Returns nonzero if a change was made.
3711 This function is now mis-named, because we also handle indirect jumps. */
3714 bypass_conditional_jumps (void)
3722 /* Note we start at block 1. */
3723 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3726 bypass_last_basic_block = last_basic_block;
3727 mark_dfs_back_edges ();
3730 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
3731 EXIT_BLOCK_PTR, next_bb)
3733 /* Check for more than one predecessor. */
3734 if (!single_pred_p (bb))
3737 FOR_BB_INSNS (bb, insn)
3738 if (NONJUMP_INSN_P (insn))
3742 if (GET_CODE (PATTERN (insn)) != SET)
3745 dest = SET_DEST (PATTERN (insn));
3746 if (REG_P (dest) || CC0_P (dest))
3751 else if (JUMP_P (insn))
3753 if ((any_condjump_p (insn) || computed_jump_p (insn))
3754 && onlyjump_p (insn))
3755 changed |= bypass_block (bb, setcc, insn);
3758 else if (INSN_P (insn))
3763 /* If we bypassed any register setting insns, we inserted a
3764 copy on the redirected edge. These need to be committed. */
3766 commit_edge_insertions ();
3771 /* Compute PRE+LCM working variables. */
3773 /* Local properties of expressions. */
3774 /* Nonzero for expressions that are transparent in the block. */
3775 static sbitmap *transp;
3777 /* Nonzero for expressions that are transparent at the end of the block.
3778 This is only zero for expressions killed by abnormal critical edge
3779 created by a calls. */
3780 static sbitmap *transpout;
3782 /* Nonzero for expressions that are computed (available) in the block. */
3783 static sbitmap *comp;
3785 /* Nonzero for expressions that are locally anticipatable in the block. */
3786 static sbitmap *antloc;
3788 /* Nonzero for expressions where this block is an optimal computation
3790 static sbitmap *pre_optimal;
3792 /* Nonzero for expressions which are redundant in a particular block. */
3793 static sbitmap *pre_redundant;
3795 /* Nonzero for expressions which should be inserted on a specific edge. */
3796 static sbitmap *pre_insert_map;
3798 /* Nonzero for expressions which should be deleted in a specific block. */
3799 static sbitmap *pre_delete_map;
3801 /* Contains the edge_list returned by pre_edge_lcm. */
3802 static struct edge_list *edge_list;
3804 /* Redundant insns. */
3805 static sbitmap pre_redundant_insns;
3807 /* Allocate vars used for PRE analysis. */
3810 alloc_pre_mem (int n_blocks, int n_exprs)
3812 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
3813 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
3814 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
3817 pre_redundant = NULL;
3818 pre_insert_map = NULL;
3819 pre_delete_map = NULL;
3820 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
3822 /* pre_insert and pre_delete are allocated later. */
3825 /* Free vars used for PRE analysis. */
3830 sbitmap_vector_free (transp);
3831 sbitmap_vector_free (comp);
3833 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
3836 sbitmap_vector_free (pre_optimal);
3838 sbitmap_vector_free (pre_redundant);
3840 sbitmap_vector_free (pre_insert_map);
3842 sbitmap_vector_free (pre_delete_map);
3844 transp = comp = NULL;
3845 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
3848 /* Top level routine to do the dataflow analysis needed by PRE. */
3851 compute_pre_data (void)
3853 sbitmap trapping_expr;
3857 compute_local_properties (transp, comp, antloc, &expr_hash_table);
3858 sbitmap_vector_zero (ae_kill, last_basic_block);
3860 /* Collect expressions which might trap. */
3861 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
3862 sbitmap_zero (trapping_expr);
3863 for (ui = 0; ui < expr_hash_table.size; ui++)
3866 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
3867 if (may_trap_p (e->expr))
3868 SET_BIT (trapping_expr, e->bitmap_index);
3871 /* Compute ae_kill for each basic block using:
3881 /* If the current block is the destination of an abnormal edge, we
3882 kill all trapping expressions because we won't be able to properly
3883 place the instruction on the edge. So make them neither
3884 anticipatable nor transparent. This is fairly conservative. */
3885 FOR_EACH_EDGE (e, ei, bb->preds)
3886 if (e->flags & EDGE_ABNORMAL)
3888 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
3889 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
3893 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
3894 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
3897 edge_list = pre_edge_lcm (expr_hash_table.n_elems, transp, comp, antloc,
3898 ae_kill, &pre_insert_map, &pre_delete_map);
3899 sbitmap_vector_free (antloc);
3901 sbitmap_vector_free (ae_kill);
3903 sbitmap_free (trapping_expr);
3908 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
3911 VISITED is a pointer to a working buffer for tracking which BB's have
3912 been visited. It is NULL for the top-level call.
3914 We treat reaching expressions that go through blocks containing the same
3915 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3916 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3917 2 as not reaching. The intent is to improve the probability of finding
3918 only one reaching expression and to reduce register lifetimes by picking
3919 the closest such expression. */
3922 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
3927 FOR_EACH_EDGE (pred, ei, bb->preds)
3929 basic_block pred_bb = pred->src;
3931 if (pred->src == ENTRY_BLOCK_PTR
3932 /* Has predecessor has already been visited? */
3933 || visited[pred_bb->index])
3934 ;/* Nothing to do. */
3936 /* Does this predecessor generate this expression? */
3937 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
3939 /* Is this the occurrence we're looking for?
3940 Note that there's only one generating occurrence per block
3941 so we just need to check the block number. */
3942 if (occr_bb == pred_bb)
3945 visited[pred_bb->index] = 1;
3947 /* Ignore this predecessor if it kills the expression. */
3948 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
3949 visited[pred_bb->index] = 1;
3951 /* Neither gen nor kill. */
3954 visited[pred_bb->index] = 1;
3955 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
3960 /* All paths have been checked. */
3964 /* The wrapper for pre_expr_reaches_here_work that ensures that any
3965 memory allocated for that function is returned. */
3968 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
3971 char *visited = XCNEWVEC (char, last_basic_block);
3973 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
3980 /* Given an expr, generate RTL which we can insert at the end of a BB,
3981 or on an edge. Set the block number of any insns generated to
3985 process_insert_insn (struct expr *expr)
3987 rtx reg = expr->reaching_reg;
3988 rtx exp = copy_rtx (expr->expr);
3993 /* If the expression is something that's an operand, like a constant,
3994 just copy it to a register. */
3995 if (general_operand (exp, GET_MODE (reg)))
3996 emit_move_insn (reg, exp);
3998 /* Otherwise, make a new insn to compute this expression and make sure the
3999 insn will be recognized (this also adds any needed CLOBBERs). Copy the
4000 expression to make sure we don't have any sharing issues. */
4003 rtx insn = emit_insn (gen_rtx_SET (VOIDmode, reg, exp));
4005 if (insn_invalid_p (insn))
4016 /* Add EXPR to the end of basic block BB.
4018 This is used by both the PRE and code hoisting.
4020 For PRE, we want to verify that the expr is either transparent
4021 or locally anticipatable in the target block. This check makes
4022 no sense for code hoisting. */
4025 insert_insn_end_basic_block (struct expr *expr, basic_block bb, int pre)
4027 rtx insn = BB_END (bb);
4029 rtx reg = expr->reaching_reg;
4030 int regno = REGNO (reg);
4033 pat = process_insert_insn (expr);
4034 gcc_assert (pat && INSN_P (pat));
4037 while (NEXT_INSN (pat_end) != NULL_RTX)
4038 pat_end = NEXT_INSN (pat_end);
4040 /* If the last insn is a jump, insert EXPR in front [taking care to
4041 handle cc0, etc. properly]. Similarly we need to care trapping
4042 instructions in presence of non-call exceptions. */
4045 || (NONJUMP_INSN_P (insn)
4046 && (!single_succ_p (bb)
4047 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
4052 /* It should always be the case that we can put these instructions
4053 anywhere in the basic block with performing PRE optimizations.
4055 gcc_assert (!NONJUMP_INSN_P (insn) || !pre
4056 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4057 || TEST_BIT (transp[bb->index], expr->bitmap_index));
4059 /* If this is a jump table, then we can't insert stuff here. Since
4060 we know the previous real insn must be the tablejump, we insert
4061 the new instruction just before the tablejump. */
4062 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4063 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4064 insn = prev_real_insn (insn);
4067 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4068 if cc0 isn't set. */
4069 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4071 insn = XEXP (note, 0);
4074 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4075 if (maybe_cc0_setter
4076 && INSN_P (maybe_cc0_setter)
4077 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4078 insn = maybe_cc0_setter;
4081 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4082 new_insn = emit_insn_before_noloc (pat, insn, bb);
4085 /* Likewise if the last insn is a call, as will happen in the presence
4086 of exception handling. */
4087 else if (CALL_P (insn)
4088 && (!single_succ_p (bb)
4089 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
4091 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4092 we search backward and place the instructions before the first
4093 parameter is loaded. Do this for everyone for consistency and a
4094 presumption that we'll get better code elsewhere as well.
4096 It should always be the case that we can put these instructions
4097 anywhere in the basic block with performing PRE optimizations.
4101 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4102 || TEST_BIT (transp[bb->index], expr->bitmap_index));
4104 /* Since different machines initialize their parameter registers
4105 in different orders, assume nothing. Collect the set of all
4106 parameter registers. */
4107 insn = find_first_parameter_load (insn, BB_HEAD (bb));
4109 /* If we found all the parameter loads, then we want to insert
4110 before the first parameter load.
4112 If we did not find all the parameter loads, then we might have
4113 stopped on the head of the block, which could be a CODE_LABEL.
4114 If we inserted before the CODE_LABEL, then we would be putting
4115 the insn in the wrong basic block. In that case, put the insn
4116 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
4117 while (LABEL_P (insn)
4118 || NOTE_INSN_BASIC_BLOCK_P (insn))
4119 insn = NEXT_INSN (insn);
4121 new_insn = emit_insn_before_noloc (pat, insn, bb);
4124 new_insn = emit_insn_after_noloc (pat, insn, bb);
4130 add_label_notes (PATTERN (pat), new_insn);
4131 note_stores (PATTERN (pat), record_set_info, pat);
4135 pat = NEXT_INSN (pat);
4138 gcse_create_count++;
4142 fprintf (dump_file, "PRE/HOIST: end of bb %d, insn %d, ",
4143 bb->index, INSN_UID (new_insn));
4144 fprintf (dump_file, "copying expression %d to reg %d\n",
4145 expr->bitmap_index, regno);
4149 /* Insert partially redundant expressions on edges in the CFG to make
4150 the expressions fully redundant. */
4153 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
4155 int e, i, j, num_edges, set_size, did_insert = 0;
4158 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4159 if it reaches any of the deleted expressions. */
4161 set_size = pre_insert_map[0]->size;
4162 num_edges = NUM_EDGES (edge_list);
4163 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
4164 sbitmap_vector_zero (inserted, num_edges);
4166 for (e = 0; e < num_edges; e++)
4169 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
4171 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
4173 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
4175 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
4176 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4178 struct expr *expr = index_map[j];
4181 /* Now look at each deleted occurrence of this expression. */
4182 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4184 if (! occr->deleted_p)
4187 /* Insert this expression on this edge if it would
4188 reach the deleted occurrence in BB. */
4189 if (!TEST_BIT (inserted[e], j))
4192 edge eg = INDEX_EDGE (edge_list, e);
4194 /* We can't insert anything on an abnormal and
4195 critical edge, so we insert the insn at the end of
4196 the previous block. There are several alternatives
4197 detailed in Morgans book P277 (sec 10.5) for
4198 handling this situation. This one is easiest for
4201 if (eg->flags & EDGE_ABNORMAL)
4202 insert_insn_end_basic_block (index_map[j], bb, 0);
4205 insn = process_insert_insn (index_map[j]);
4206 insert_insn_on_edge (insn, eg);
4211 fprintf (dump_file, "PRE/HOIST: edge (%d,%d), ",
4213 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
4214 fprintf (dump_file, "copy expression %d\n",
4215 expr->bitmap_index);
4218 update_ld_motion_stores (expr);
4219 SET_BIT (inserted[e], j);
4221 gcse_create_count++;
4228 sbitmap_vector_free (inserted);
4232 /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
4233 Given "old_reg <- expr" (INSN), instead of adding after it
4234 reaching_reg <- old_reg
4235 it's better to do the following:
4236 reaching_reg <- expr
4237 old_reg <- reaching_reg
4238 because this way copy propagation can discover additional PRE
4239 opportunities. But if this fails, we try the old way.
4240 When "expr" is a store, i.e.
4241 given "MEM <- old_reg", instead of adding after it
4242 reaching_reg <- old_reg
4243 it's better to add it before as follows:
4244 reaching_reg <- old_reg
4245 MEM <- reaching_reg. */
4248 pre_insert_copy_insn (struct expr *expr, rtx insn)
4250 rtx reg = expr->reaching_reg;
4251 int regno = REGNO (reg);
4252 int indx = expr->bitmap_index;
4253 rtx pat = PATTERN (insn);
4254 rtx set, first_set, new_insn;
4258 /* This block matches the logic in hash_scan_insn. */
4259 switch (GET_CODE (pat))
4266 /* Search through the parallel looking for the set whose
4267 source was the expression that we're interested in. */
4268 first_set = NULL_RTX;
4270 for (i = 0; i < XVECLEN (pat, 0); i++)
4272 rtx x = XVECEXP (pat, 0, i);
4273 if (GET_CODE (x) == SET)
4275 /* If the source was a REG_EQUAL or REG_EQUIV note, we
4276 may not find an equivalent expression, but in this
4277 case the PARALLEL will have a single set. */
4278 if (first_set == NULL_RTX)
4280 if (expr_equiv_p (SET_SRC (x), expr->expr))
4288 gcc_assert (first_set);
4289 if (set == NULL_RTX)
4297 if (REG_P (SET_DEST (set)))
4299 old_reg = SET_DEST (set);
4300 /* Check if we can modify the set destination in the original insn. */
4301 if (validate_change (insn, &SET_DEST (set), reg, 0))
4303 new_insn = gen_move_insn (old_reg, reg);
4304 new_insn = emit_insn_after (new_insn, insn);
4306 /* Keep register set table up to date. */
4307 record_one_set (regno, insn);
4311 new_insn = gen_move_insn (reg, old_reg);
4312 new_insn = emit_insn_after (new_insn, insn);
4314 /* Keep register set table up to date. */
4315 record_one_set (regno, new_insn);
4318 else /* This is possible only in case of a store to memory. */
4320 old_reg = SET_SRC (set);
4321 new_insn = gen_move_insn (reg, old_reg);
4323 /* Check if we can modify the set source in the original insn. */
4324 if (validate_change (insn, &SET_SRC (set), reg, 0))
4325 new_insn = emit_insn_before (new_insn, insn);
4327 new_insn = emit_insn_after (new_insn, insn);
4329 /* Keep register set table up to date. */
4330 record_one_set (regno, new_insn);
4333 gcse_create_count++;
4337 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4338 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4339 INSN_UID (insn), regno);
4342 /* Copy available expressions that reach the redundant expression
4343 to `reaching_reg'. */
4346 pre_insert_copies (void)
4348 unsigned int i, added_copy;
4353 /* For each available expression in the table, copy the result to
4354 `reaching_reg' if the expression reaches a deleted one.
4356 ??? The current algorithm is rather brute force.
4357 Need to do some profiling. */
4359 for (i = 0; i < expr_hash_table.size; i++)
4360 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4362 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4363 we don't want to insert a copy here because the expression may not
4364 really be redundant. So only insert an insn if the expression was
4365 deleted. This test also avoids further processing if the
4366 expression wasn't deleted anywhere. */
4367 if (expr->reaching_reg == NULL)
4370 /* Set when we add a copy for that expression. */
4373 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4375 if (! occr->deleted_p)
4378 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4380 rtx insn = avail->insn;
4382 /* No need to handle this one if handled already. */
4383 if (avail->copied_p)
4386 /* Don't handle this one if it's a redundant one. */
4387 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4390 /* Or if the expression doesn't reach the deleted one. */
4391 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
4393 BLOCK_FOR_INSN (occr->insn)))
4398 /* Copy the result of avail to reaching_reg. */
4399 pre_insert_copy_insn (expr, insn);
4400 avail->copied_p = 1;
4405 update_ld_motion_stores (expr);
4409 /* Emit move from SRC to DEST noting the equivalence with expression computed
4412 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
4415 rtx set = single_set (insn), set2;
4419 /* This should never fail since we're creating a reg->reg copy
4420 we've verified to be valid. */
4422 new = emit_insn_after (gen_move_insn (dest, src), insn);
4424 /* Note the equivalence for local CSE pass. */
4425 set2 = single_set (new);
4426 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
4428 if ((note = find_reg_equal_equiv_note (insn)))
4429 eqv = XEXP (note, 0);
4431 eqv = SET_SRC (set);
4433 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
4438 /* Delete redundant computations.
4439 Deletion is done by changing the insn to copy the `reaching_reg' of
4440 the expression into the result of the SET. It is left to later passes
4441 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4443 Returns nonzero if a change is made. */
4454 for (i = 0; i < expr_hash_table.size; i++)
4455 for (expr = expr_hash_table.table[i];
4457 expr = expr->next_same_hash)
4459 int indx = expr->bitmap_index;
4461 /* We only need to search antic_occr since we require
4464 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4466 rtx insn = occr->insn;
4468 basic_block bb = BLOCK_FOR_INSN (insn);
4470 /* We only delete insns that have a single_set. */
4471 if (TEST_BIT (pre_delete_map[bb->index], indx)
4472 && (set = single_set (insn)) != 0
4473 && dbg_cnt (pre_insn))
4475 /* Create a pseudo-reg to store the result of reaching
4476 expressions into. Get the mode for the new pseudo from
4477 the mode of the original destination pseudo. */
4478 if (expr->reaching_reg == NULL)
4479 expr->reaching_reg = gen_reg_rtx_and_attrs (SET_DEST (set));
4481 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4483 occr->deleted_p = 1;
4484 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4491 "PRE: redundant insn %d (expression %d) in ",
4492 INSN_UID (insn), indx);
4493 fprintf (dump_file, "bb %d, reaching reg is %d\n",
4494 bb->index, REGNO (expr->reaching_reg));
4503 /* Perform GCSE optimizations using PRE.
4504 This is called by one_pre_gcse_pass after all the dataflow analysis
4507 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4508 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4509 Compiler Design and Implementation.
4511 ??? A new pseudo reg is created to hold the reaching expression. The nice
4512 thing about the classical approach is that it would try to use an existing
4513 reg. If the register can't be adequately optimized [i.e. we introduce
4514 reload problems], one could add a pass here to propagate the new register
4517 ??? We don't handle single sets in PARALLELs because we're [currently] not
4518 able to copy the rest of the parallel when we insert copies to create full
4519 redundancies from partial redundancies. However, there's no reason why we
4520 can't handle PARALLELs in the cases where there are no partial
4527 int did_insert, changed;
4528 struct expr **index_map;
4531 /* Compute a mapping from expression number (`bitmap_index') to
4532 hash table entry. */
4534 index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
4535 for (i = 0; i < expr_hash_table.size; i++)
4536 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4537 index_map[expr->bitmap_index] = expr;
4539 /* Reset bitmap used to track which insns are redundant. */
4540 pre_redundant_insns = sbitmap_alloc (max_cuid);
4541 sbitmap_zero (pre_redundant_insns);
4543 /* Delete the redundant insns first so that
4544 - we know what register to use for the new insns and for the other
4545 ones with reaching expressions
4546 - we know which insns are redundant when we go to create copies */
4548 changed = pre_delete ();
4549 did_insert = pre_edge_insert (edge_list, index_map);
4551 /* In other places with reaching expressions, copy the expression to the
4552 specially allocated pseudo-reg that reaches the redundant expr. */
4553 pre_insert_copies ();
4556 commit_edge_insertions ();
4561 sbitmap_free (pre_redundant_insns);
4565 /* Top level routine to perform one PRE GCSE pass.
4567 Return nonzero if a change was made. */
4570 one_pre_gcse_pass (int pass)
4574 gcse_subst_count = 0;
4575 gcse_create_count = 0;
4577 alloc_hash_table (max_cuid, &expr_hash_table, 0);
4578 add_noreturn_fake_exit_edges ();
4580 compute_ld_motion_mems ();
4582 compute_hash_table (&expr_hash_table);
4583 trim_ld_motion_mems ();
4585 dump_hash_table (dump_file, "Expression", &expr_hash_table);
4587 if (expr_hash_table.n_elems > 0)
4589 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
4590 compute_pre_data ();
4591 changed |= pre_gcse ();
4592 free_edge_list (edge_list);
4597 remove_fake_exit_edges ();
4598 free_hash_table (&expr_hash_table);
4602 fprintf (dump_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
4603 current_function_name (), pass, bytes_used);
4604 fprintf (dump_file, "%d substs, %d insns created\n",
4605 gcse_subst_count, gcse_create_count);
4611 /* If X contains any LABEL_REF's, add REG_LABEL_OPERAND notes for them
4612 to INSN. If such notes are added to an insn which references a
4613 CODE_LABEL, the LABEL_NUSES count is incremented. We have to add
4614 that note, because the following loop optimization pass requires
4617 /* ??? If there was a jump optimization pass after gcse and before loop,
4618 then we would not need to do this here, because jump would add the
4619 necessary REG_LABEL_OPERAND and REG_LABEL_TARGET notes. */
4622 add_label_notes (rtx x, rtx insn)
4624 enum rtx_code code = GET_CODE (x);
4628 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4630 /* This code used to ignore labels that referred to dispatch tables to
4631 avoid flow generating (slightly) worse code.
4633 We no longer ignore such label references (see LABEL_REF handling in
4634 mark_jump_label for additional information). */
4636 /* There's no reason for current users to emit jump-insns with
4637 such a LABEL_REF, so we don't have to handle REG_LABEL_TARGET
4639 gcc_assert (!JUMP_P (insn));
4641 = gen_rtx_INSN_LIST (REG_LABEL_OPERAND, XEXP (x, 0),
4643 if (LABEL_P (XEXP (x, 0)))
4644 LABEL_NUSES (XEXP (x, 0))++;
4649 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
4652 add_label_notes (XEXP (x, i), insn);
4653 else if (fmt[i] == 'E')
4654 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4655 add_label_notes (XVECEXP (x, i, j), insn);
4659 /* Compute transparent outgoing information for each block.
4661 An expression is transparent to an edge unless it is killed by
4662 the edge itself. This can only happen with abnormal control flow,
4663 when the edge is traversed through a call. This happens with
4664 non-local labels and exceptions.
4666 This would not be necessary if we split the edge. While this is
4667 normally impossible for abnormal critical edges, with some effort
4668 it should be possible with exception handling, since we still have
4669 control over which handler should be invoked. But due to increased
4670 EH table sizes, this may not be worthwhile. */
4673 compute_transpout (void)
4679 sbitmap_vector_ones (transpout, last_basic_block);
4683 /* Note that flow inserted a nop at the end of basic blocks that
4684 end in call instructions for reasons other than abnormal
4686 if (! CALL_P (BB_END (bb)))
4689 for (i = 0; i < expr_hash_table.size; i++)
4690 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
4691 if (MEM_P (expr->expr))
4693 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
4694 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
4697 /* ??? Optimally, we would use interprocedural alias
4698 analysis to determine if this mem is actually killed
4700 RESET_BIT (transpout[bb->index], expr->bitmap_index);
4705 /* Code Hoisting variables and subroutines. */
4707 /* Very busy expressions. */
4708 static sbitmap *hoist_vbein;
4709 static sbitmap *hoist_vbeout;
4711 /* Hoistable expressions. */
4712 static sbitmap *hoist_exprs;
4714 /* ??? We could compute post dominators and run this algorithm in
4715 reverse to perform tail merging, doing so would probably be
4716 more effective than the tail merging code in jump.c.
4718 It's unclear if tail merging could be run in parallel with
4719 code hoisting. It would be nice. */
4721 /* Allocate vars used for code hoisting analysis. */
4724 alloc_code_hoist_mem (int n_blocks, int n_exprs)
4726 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4727 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4728 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4730 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
4731 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
4732 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
4733 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
4736 /* Free vars used for code hoisting analysis. */
4739 free_code_hoist_mem (void)
4741 sbitmap_vector_free (antloc);
4742 sbitmap_vector_free (transp);
4743 sbitmap_vector_free (comp);
4745 sbitmap_vector_free (hoist_vbein);
4746 sbitmap_vector_free (hoist_vbeout);
4747 sbitmap_vector_free (hoist_exprs);
4748 sbitmap_vector_free (transpout);
4750 free_dominance_info (CDI_DOMINATORS);
4753 /* Compute the very busy expressions at entry/exit from each block.
4755 An expression is very busy if all paths from a given point
4756 compute the expression. */
4759 compute_code_hoist_vbeinout (void)
4761 int changed, passes;
4764 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
4765 sbitmap_vector_zero (hoist_vbein, last_basic_block);
4774 /* We scan the blocks in the reverse order to speed up
4776 FOR_EACH_BB_REVERSE (bb)
4778 if (bb->next_bb != EXIT_BLOCK_PTR)
4779 sbitmap_intersection_of_succs (hoist_vbeout[bb->index],
4780 hoist_vbein, bb->index);
4782 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index],
4784 hoist_vbeout[bb->index],
4792 fprintf (dump_file, "hoisting vbeinout computation: %d passes\n", passes);
4795 /* Top level routine to do the dataflow analysis needed by code hoisting. */
4798 compute_code_hoist_data (void)
4800 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4801 compute_transpout ();
4802 compute_code_hoist_vbeinout ();
4803 calculate_dominance_info (CDI_DOMINATORS);
4805 fprintf (dump_file, "\n");
4808 /* Determine if the expression identified by EXPR_INDEX would
4809 reach BB unimpared if it was placed at the end of EXPR_BB.
4811 It's unclear exactly what Muchnick meant by "unimpared". It seems
4812 to me that the expression must either be computed or transparent in
4813 *every* block in the path(s) from EXPR_BB to BB. Any other definition
4814 would allow the expression to be hoisted out of loops, even if
4815 the expression wasn't a loop invariant.
4817 Contrast this to reachability for PRE where an expression is
4818 considered reachable if *any* path reaches instead of *all*
4822 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
4826 int visited_allocated_locally = 0;
4829 if (visited == NULL)
4831 visited_allocated_locally = 1;
4832 visited = XCNEWVEC (char, last_basic_block);
4835 FOR_EACH_EDGE (pred, ei, bb->preds)
4837 basic_block pred_bb = pred->src;
4839 if (pred->src == ENTRY_BLOCK_PTR)
4841 else if (pred_bb == expr_bb)
4843 else if (visited[pred_bb->index])
4846 /* Does this predecessor generate this expression? */
4847 else if (TEST_BIT (comp[pred_bb->index], expr_index))
4849 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
4855 visited[pred_bb->index] = 1;
4856 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
4861 if (visited_allocated_locally)
4864 return (pred == NULL);
4867 /* Actually perform code hoisting. */
4872 basic_block bb, dominated;
4873 VEC (basic_block, heap) *domby;
4875 struct expr **index_map;
4878 sbitmap_vector_zero (hoist_exprs, last_basic_block);
4880 /* Compute a mapping from expression number (`bitmap_index') to
4881 hash table entry. */
4883 index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
4884 for (i = 0; i < expr_hash_table.size; i++)
4885 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4886 index_map[expr->bitmap_index] = expr;
4888 /* Walk over each basic block looking for potentially hoistable
4889 expressions, nothing gets hoisted from the entry block. */
4893 int insn_inserted_p;
4895 domby = get_dominated_by (CDI_DOMINATORS, bb);
4896 /* Examine each expression that is very busy at the exit of this
4897 block. These are the potentially hoistable expressions. */
4898 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
4902 if (TEST_BIT (hoist_vbeout[bb->index], i)
4903 && TEST_BIT (transpout[bb->index], i))
4905 /* We've found a potentially hoistable expression, now
4906 we look at every block BB dominates to see if it
4907 computes the expression. */
4908 for (j = 0; VEC_iterate (basic_block, domby, j, dominated); j++)
4910 /* Ignore self dominance. */
4911 if (bb == dominated)
4913 /* We've found a dominated block, now see if it computes
4914 the busy expression and whether or not moving that
4915 expression to the "beginning" of that block is safe. */
4916 if (!TEST_BIT (antloc[dominated->index], i))
4919 /* Note if the expression would reach the dominated block
4920 unimpared if it was placed at the end of BB.
4922 Keep track of how many times this expression is hoistable
4923 from a dominated block into BB. */
4924 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4928 /* If we found more than one hoistable occurrence of this
4929 expression, then note it in the bitmap of expressions to
4930 hoist. It makes no sense to hoist things which are computed
4931 in only one BB, and doing so tends to pessimize register
4932 allocation. One could increase this value to try harder
4933 to avoid any possible code expansion due to register
4934 allocation issues; however experiments have shown that
4935 the vast majority of hoistable expressions are only movable
4936 from two successors, so raising this threshold is likely
4937 to nullify any benefit we get from code hoisting. */
4940 SET_BIT (hoist_exprs[bb->index], i);
4945 /* If we found nothing to hoist, then quit now. */
4948 VEC_free (basic_block, heap, domby);
4952 /* Loop over all the hoistable expressions. */
4953 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
4955 /* We want to insert the expression into BB only once, so
4956 note when we've inserted it. */
4957 insn_inserted_p = 0;
4959 /* These tests should be the same as the tests above. */
4960 if (TEST_BIT (hoist_exprs[bb->index], i))
4962 /* We've found a potentially hoistable expression, now
4963 we look at every block BB dominates to see if it
4964 computes the expression. */
4965 for (j = 0; VEC_iterate (basic_block, domby, j, dominated); j++)
4967 /* Ignore self dominance. */
4968 if (bb == dominated)
4971 /* We've found a dominated block, now see if it computes
4972 the busy expression and whether or not moving that
4973 expression to the "beginning" of that block is safe. */
4974 if (!TEST_BIT (antloc[dominated->index], i))
4977 /* The expression is computed in the dominated block and
4978 it would be safe to compute it at the start of the
4979 dominated block. Now we have to determine if the
4980 expression would reach the dominated block if it was
4981 placed at the end of BB. */
4982 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4984 struct expr *expr = index_map[i];
4985 struct occr *occr = expr->antic_occr;
4989 /* Find the right occurrence of this expression. */
4990 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
4995 set = single_set (insn);
4998 /* Create a pseudo-reg to store the result of reaching
4999 expressions into. Get the mode for the new pseudo
5000 from the mode of the original destination pseudo. */
5001 if (expr->reaching_reg == NULL)
5003 = gen_reg_rtx_and_attrs (SET_DEST (set));
5005 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5007 occr->deleted_p = 1;
5008 if (!insn_inserted_p)
5010 insert_insn_end_basic_block (index_map[i], bb, 0);
5011 insn_inserted_p = 1;
5017 VEC_free (basic_block, heap, domby);
5023 /* Top level routine to perform one code hoisting (aka unification) pass
5025 Return nonzero if a change was made. */
5028 one_code_hoisting_pass (void)
5032 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5033 compute_hash_table (&expr_hash_table);
5035 dump_hash_table (dump_file, "Code Hosting Expressions", &expr_hash_table);
5037 if (expr_hash_table.n_elems > 0)
5039 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
5040 compute_code_hoist_data ();
5042 free_code_hoist_mem ();
5045 free_hash_table (&expr_hash_table);
5050 /* Here we provide the things required to do store motion towards
5051 the exit. In order for this to be effective, gcse also needed to
5052 be taught how to move a load when it is kill only by a store to itself.
5057 void foo(float scale)
5059 for (i=0; i<10; i++)
5063 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
5064 the load out since its live around the loop, and stored at the bottom
5067 The 'Load Motion' referred to and implemented in this file is
5068 an enhancement to gcse which when using edge based lcm, recognizes
5069 this situation and allows gcse to move the load out of the loop.
5071 Once gcse has hoisted the load, store motion can then push this
5072 load towards the exit, and we end up with no loads or stores of 'i'
5076 pre_ldst_expr_hash (const void *p)
5078 int do_not_record_p = 0;
5079 const struct ls_expr *x = p;
5080 return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
5084 pre_ldst_expr_eq (const void *p1, const void *p2)
5086 const struct ls_expr *ptr1 = p1, *ptr2 = p2;
5087 return expr_equiv_p (ptr1->pattern, ptr2->pattern);
5090 /* This will search the ldst list for a matching expression. If it
5091 doesn't find one, we create one and initialize it. */
5093 static struct ls_expr *
5096 int do_not_record_p = 0;
5097 struct ls_expr * ptr;
5102 hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
5103 NULL, /*have_reg_qty=*/false);
5106 slot = htab_find_slot_with_hash (pre_ldst_table, &e, hash, INSERT);
5108 return (struct ls_expr *)*slot;
5110 ptr = XNEW (struct ls_expr);
5112 ptr->next = pre_ldst_mems;
5115 ptr->pattern_regs = NULL_RTX;
5116 ptr->loads = NULL_RTX;
5117 ptr->stores = NULL_RTX;
5118 ptr->reaching_reg = NULL_RTX;
5121 ptr->hash_index = hash;
5122 pre_ldst_mems = ptr;
5128 /* Free up an individual ldst entry. */
5131 free_ldst_entry (struct ls_expr * ptr)
5133 free_INSN_LIST_list (& ptr->loads);
5134 free_INSN_LIST_list (& ptr->stores);
5139 /* Free up all memory associated with the ldst list. */
5142 free_ldst_mems (void)
5145 htab_delete (pre_ldst_table);
5146 pre_ldst_table = NULL;
5148 while (pre_ldst_mems)
5150 struct ls_expr * tmp = pre_ldst_mems;
5152 pre_ldst_mems = pre_ldst_mems->next;
5154 free_ldst_entry (tmp);
5157 pre_ldst_mems = NULL;
5160 /* Dump debugging info about the ldst list. */
5163 print_ldst_list (FILE * file)
5165 struct ls_expr * ptr;
5167 fprintf (file, "LDST list: \n");
5169 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5171 fprintf (file, " Pattern (%3d): ", ptr->index);
5173 print_rtl (file, ptr->pattern);
5175 fprintf (file, "\n Loads : ");
5178 print_rtl (file, ptr->loads);
5180 fprintf (file, "(nil)");
5182 fprintf (file, "\n Stores : ");
5185 print_rtl (file, ptr->stores);
5187 fprintf (file, "(nil)");
5189 fprintf (file, "\n\n");
5192 fprintf (file, "\n");
5195 /* Returns 1 if X is in the list of ldst only expressions. */
5197 static struct ls_expr *
5198 find_rtx_in_ldst (rtx x)
5202 if (!pre_ldst_table)
5205 slot = htab_find_slot (pre_ldst_table, &e, NO_INSERT);
5206 if (!slot || ((struct ls_expr *)*slot)->invalid)
5211 /* Assign each element of the list of mems a monotonically increasing value. */
5214 enumerate_ldsts (void)
5216 struct ls_expr * ptr;
5219 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5225 /* Return first item in the list. */
5227 static inline struct ls_expr *
5228 first_ls_expr (void)
5230 return pre_ldst_mems;
5233 /* Return the next item in the list after the specified one. */
5235 static inline struct ls_expr *
5236 next_ls_expr (struct ls_expr * ptr)
5241 /* Load Motion for loads which only kill themselves. */
5243 /* Return true if x is a simple MEM operation, with no registers or
5244 side effects. These are the types of loads we consider for the
5245 ld_motion list, otherwise we let the usual aliasing take care of it. */
5248 simple_mem (const_rtx x)
5253 if (MEM_VOLATILE_P (x))
5256 if (GET_MODE (x) == BLKmode)
5259 /* If we are handling exceptions, we must be careful with memory references
5260 that may trap. If we are not, the behavior is undefined, so we may just
5262 if (flag_non_call_exceptions && may_trap_p (x))
5265 if (side_effects_p (x))
5268 /* Do not consider function arguments passed on stack. */
5269 if (reg_mentioned_p (stack_pointer_rtx, x))
5272 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
5278 /* Make sure there isn't a buried reference in this pattern anywhere.
5279 If there is, invalidate the entry for it since we're not capable
5280 of fixing it up just yet.. We have to be sure we know about ALL
5281 loads since the aliasing code will allow all entries in the
5282 ld_motion list to not-alias itself. If we miss a load, we will get
5283 the wrong value since gcse might common it and we won't know to
5287 invalidate_any_buried_refs (rtx x)
5291 struct ls_expr * ptr;
5293 /* Invalidate it in the list. */
5294 if (MEM_P (x) && simple_mem (x))
5296 ptr = ldst_entry (x);
5300 /* Recursively process the insn. */
5301 fmt = GET_RTX_FORMAT (GET_CODE (x));
5303 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5306 invalidate_any_buried_refs (XEXP (x, i));
5307 else if (fmt[i] == 'E')
5308 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5309 invalidate_any_buried_refs (XVECEXP (x, i, j));
5313 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
5314 being defined as MEM loads and stores to symbols, with no side effects
5315 and no registers in the expression. For a MEM destination, we also
5316 check that the insn is still valid if we replace the destination with a
5317 REG, as is done in update_ld_motion_stores. If there are any uses/defs
5318 which don't match this criteria, they are invalidated and trimmed out
5322 compute_ld_motion_mems (void)
5324 struct ls_expr * ptr;
5328 pre_ldst_mems = NULL;
5329 pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
5330 pre_ldst_expr_eq, NULL);
5334 FOR_BB_INSNS (bb, insn)
5338 if (GET_CODE (PATTERN (insn)) == SET)
5340 rtx src = SET_SRC (PATTERN (insn));
5341 rtx dest = SET_DEST (PATTERN (insn));
5343 /* Check for a simple LOAD... */
5344 if (MEM_P (src) && simple_mem (src))
5346 ptr = ldst_entry (src);
5348 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
5354 /* Make sure there isn't a buried load somewhere. */
5355 invalidate_any_buried_refs (src);
5358 /* Check for stores. Don't worry about aliased ones, they
5359 will block any movement we might do later. We only care
5360 about this exact pattern since those are the only
5361 circumstance that we will ignore the aliasing info. */
5362 if (MEM_P (dest) && simple_mem (dest))
5364 ptr = ldst_entry (dest);
5367 && GET_CODE (src) != ASM_OPERANDS
5368 /* Check for REG manually since want_to_gcse_p
5369 returns 0 for all REGs. */
5370 && can_assign_to_reg_p (src))
5371 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
5377 invalidate_any_buried_refs (PATTERN (insn));
5383 /* Remove any references that have been either invalidated or are not in the
5384 expression list for pre gcse. */
5387 trim_ld_motion_mems (void)
5389 struct ls_expr * * last = & pre_ldst_mems;
5390 struct ls_expr * ptr = pre_ldst_mems;
5396 /* Delete if entry has been made invalid. */
5399 /* Delete if we cannot find this mem in the expression list. */
5400 unsigned int hash = ptr->hash_index % expr_hash_table.size;
5402 for (expr = expr_hash_table.table[hash];
5404 expr = expr->next_same_hash)
5405 if (expr_equiv_p (expr->expr, ptr->pattern))
5409 expr = (struct expr *) 0;
5413 /* Set the expression field if we are keeping it. */
5421 htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
5422 free_ldst_entry (ptr);
5427 /* Show the world what we've found. */
5428 if (dump_file && pre_ldst_mems != NULL)
5429 print_ldst_list (dump_file);
5432 /* This routine will take an expression which we are replacing with
5433 a reaching register, and update any stores that are needed if
5434 that expression is in the ld_motion list. Stores are updated by
5435 copying their SRC to the reaching register, and then storing
5436 the reaching register into the store location. These keeps the
5437 correct value in the reaching register for the loads. */
5440 update_ld_motion_stores (struct expr * expr)
5442 struct ls_expr * mem_ptr;
5444 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
5446 /* We can try to find just the REACHED stores, but is shouldn't
5447 matter to set the reaching reg everywhere... some might be
5448 dead and should be eliminated later. */
5450 /* We replace (set mem expr) with (set reg expr) (set mem reg)
5451 where reg is the reaching reg used in the load. We checked in
5452 compute_ld_motion_mems that we can replace (set mem expr) with
5453 (set reg expr) in that insn. */
5454 rtx list = mem_ptr->stores;
5456 for ( ; list != NULL_RTX; list = XEXP (list, 1))
5458 rtx insn = XEXP (list, 0);
5459 rtx pat = PATTERN (insn);
5460 rtx src = SET_SRC (pat);
5461 rtx reg = expr->reaching_reg;
5464 /* If we've already copied it, continue. */
5465 if (expr->reaching_reg == src)
5470 fprintf (dump_file, "PRE: store updated with reaching reg ");
5471 print_rtl (dump_file, expr->reaching_reg);
5472 fprintf (dump_file, ":\n ");
5473 print_inline_rtx (dump_file, insn, 8);
5474 fprintf (dump_file, "\n");
5477 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
5478 new = emit_insn_before (copy, insn);
5479 record_one_set (REGNO (reg), new);
5480 SET_SRC (pat) = reg;
5481 df_insn_rescan (insn);
5483 /* un-recognize this pattern since it's probably different now. */
5484 INSN_CODE (insn) = -1;
5485 gcse_create_count++;
5490 /* Store motion code. */
5492 #define ANTIC_STORE_LIST(x) ((x)->loads)
5493 #define AVAIL_STORE_LIST(x) ((x)->stores)
5494 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
5496 /* This is used to communicate the target bitvector we want to use in the
5497 reg_set_info routine when called via the note_stores mechanism. */
5498 static int * regvec;
5500 /* And current insn, for the same routine. */
5501 static rtx compute_store_table_current_insn;
5503 /* Used in computing the reverse edge graph bit vectors. */
5504 static sbitmap * st_antloc;
5506 /* Global holding the number of store expressions we are dealing with. */
5507 static int num_stores;
5509 /* Checks to set if we need to mark a register set. Called from
5513 reg_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
5516 sbitmap bb_reg = data;
5518 if (GET_CODE (dest) == SUBREG)
5519 dest = SUBREG_REG (dest);
5523 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
5525 SET_BIT (bb_reg, REGNO (dest));
5529 /* Clear any mark that says that this insn sets dest. Called from
5533 reg_clear_last_set (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
5536 int *dead_vec = data;
5538 if (GET_CODE (dest) == SUBREG)
5539 dest = SUBREG_REG (dest);
5542 dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
5543 dead_vec[REGNO (dest)] = 0;
5546 /* Return zero if some of the registers in list X are killed
5547 due to set of registers in bitmap REGS_SET. */
5550 store_ops_ok (const_rtx x, int *regs_set)
5554 for (; x; x = XEXP (x, 1))
5557 if (regs_set[REGNO(reg)])
5564 /* Returns a list of registers mentioned in X. */
5566 extract_mentioned_regs (rtx x)
5568 return extract_mentioned_regs_helper (x, NULL_RTX);
5571 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
5574 extract_mentioned_regs_helper (rtx x, rtx accum)
5580 /* Repeat is used to turn tail-recursion into iteration. */
5586 code = GET_CODE (x);
5590 return alloc_EXPR_LIST (0, x, accum);
5602 /* We do not run this function with arguments having side effects. */
5622 i = GET_RTX_LENGTH (code) - 1;
5623 fmt = GET_RTX_FORMAT (code);
5629 rtx tem = XEXP (x, i);
5631 /* If we are about to do the last recursive call
5632 needed at this level, change it into iteration. */
5639 accum = extract_mentioned_regs_helper (tem, accum);
5641 else if (fmt[i] == 'E')
5645 for (j = 0; j < XVECLEN (x, i); j++)
5646 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
5653 /* Determine whether INSN is MEM store pattern that we will consider moving.
5654 REGS_SET_BEFORE is bitmap of registers set before (and including) the
5655 current insn, REGS_SET_AFTER is bitmap of registers set after (and
5656 including) the insn in this basic block. We must be passing through BB from
5657 head to end, as we are using this fact to speed things up.
5659 The results are stored this way:
5661 -- the first anticipatable expression is added into ANTIC_STORE_LIST
5662 -- if the processed expression is not anticipatable, NULL_RTX is added
5663 there instead, so that we can use it as indicator that no further
5664 expression of this type may be anticipatable
5665 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
5666 consequently, all of them but this head are dead and may be deleted.
5667 -- if the expression is not available, the insn due to that it fails to be
5668 available is stored in reaching_reg.
5670 The things are complicated a bit by fact that there already may be stores
5671 to the same MEM from other blocks; also caller must take care of the
5672 necessary cleanup of the temporary markers after end of the basic block.
5676 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
5678 struct ls_expr * ptr;
5680 int check_anticipatable, check_available;
5681 basic_block bb = BLOCK_FOR_INSN (insn);
5683 set = single_set (insn);
5687 dest = SET_DEST (set);
5689 if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
5690 || GET_MODE (dest) == BLKmode)
5693 if (side_effects_p (dest))
5696 /* If we are handling exceptions, we must be careful with memory references
5697 that may trap. If we are not, the behavior is undefined, so we may just
5699 if (flag_non_call_exceptions && may_trap_p (dest))
5702 /* Even if the destination cannot trap, the source may. In this case we'd
5703 need to handle updating the REG_EH_REGION note. */
5704 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
5707 /* Make sure that the SET_SRC of this store insns can be assigned to
5708 a register, or we will fail later on in replace_store_insn, which
5709 assumes that we can do this. But sometimes the target machine has
5710 oddities like MEM read-modify-write instruction. See for example
5712 if (!can_assign_to_reg_p (SET_SRC (set)))
5715 ptr = ldst_entry (dest);
5716 if (!ptr->pattern_regs)
5717 ptr->pattern_regs = extract_mentioned_regs (dest);
5719 /* Do not check for anticipatability if we either found one anticipatable
5720 store already, or tested for one and found out that it was killed. */
5721 check_anticipatable = 0;
5722 if (!ANTIC_STORE_LIST (ptr))
5723 check_anticipatable = 1;
5726 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
5728 && BLOCK_FOR_INSN (tmp) != bb)
5729 check_anticipatable = 1;
5731 if (check_anticipatable)
5733 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
5737 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
5738 ANTIC_STORE_LIST (ptr));
5741 /* It is not necessary to check whether store is available if we did
5742 it successfully before; if we failed before, do not bother to check
5743 until we reach the insn that caused us to fail. */
5744 check_available = 0;
5745 if (!AVAIL_STORE_LIST (ptr))
5746 check_available = 1;
5749 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
5750 if (BLOCK_FOR_INSN (tmp) != bb)
5751 check_available = 1;
5753 if (check_available)
5755 /* Check that we have already reached the insn at that the check
5756 failed last time. */
5757 if (LAST_AVAIL_CHECK_FAILURE (ptr))
5759 for (tmp = BB_END (bb);
5760 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
5761 tmp = PREV_INSN (tmp))
5764 check_available = 0;
5767 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
5769 &LAST_AVAIL_CHECK_FAILURE (ptr));
5771 if (!check_available)
5772 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
5775 /* Find available and anticipatable stores. */
5778 compute_store_table (void)
5784 int *last_set_in, *already_set;
5785 struct ls_expr * ptr, **prev_next_ptr_ptr;
5787 max_gcse_regno = max_reg_num ();
5789 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
5791 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
5793 pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
5794 pre_ldst_expr_eq, NULL);
5795 last_set_in = XCNEWVEC (int, max_gcse_regno);
5796 already_set = XNEWVEC (int, max_gcse_regno);
5798 /* Find all the stores we care about. */
5801 /* First compute the registers set in this block. */
5802 regvec = last_set_in;
5804 FOR_BB_INSNS (bb, insn)
5806 if (! INSN_P (insn))
5811 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5812 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5814 last_set_in[regno] = INSN_UID (insn);
5815 SET_BIT (reg_set_in_block[bb->index], regno);
5819 pat = PATTERN (insn);
5820 compute_store_table_current_insn = insn;
5821 note_stores (pat, reg_set_info, reg_set_in_block[bb->index]);
5824 /* Now find the stores. */
5825 memset (already_set, 0, sizeof (int) * max_gcse_regno);
5826 regvec = already_set;
5827 FOR_BB_INSNS (bb, insn)
5829 if (! INSN_P (insn))
5834 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5835 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5836 already_set[regno] = 1;
5839 pat = PATTERN (insn);
5840 note_stores (pat, reg_set_info, NULL);
5842 /* Now that we've marked regs, look for stores. */
5843 find_moveable_store (insn, already_set, last_set_in);
5845 /* Unmark regs that are no longer set. */
5846 compute_store_table_current_insn = insn;
5847 note_stores (pat, reg_clear_last_set, last_set_in);
5850 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5851 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
5852 && last_set_in[regno] == INSN_UID (insn))
5853 last_set_in[regno] = 0;
5857 #ifdef ENABLE_CHECKING
5858 /* last_set_in should now be all-zero. */
5859 for (regno = 0; regno < max_gcse_regno; regno++)
5860 gcc_assert (!last_set_in[regno]);
5863 /* Clear temporary marks. */
5864 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5866 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
5867 if (ANTIC_STORE_LIST (ptr)
5868 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
5869 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
5873 /* Remove the stores that are not available anywhere, as there will
5874 be no opportunity to optimize them. */
5875 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
5877 ptr = *prev_next_ptr_ptr)
5879 if (!AVAIL_STORE_LIST (ptr))
5881 *prev_next_ptr_ptr = ptr->next;
5882 htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
5883 free_ldst_entry (ptr);
5886 prev_next_ptr_ptr = &ptr->next;
5889 ret = enumerate_ldsts ();
5893 fprintf (dump_file, "ST_avail and ST_antic (shown under loads..)\n");
5894 print_ldst_list (dump_file);
5902 /* Check to see if the load X is aliased with STORE_PATTERN.
5903 AFTER is true if we are checking the case when STORE_PATTERN occurs
5907 load_kills_store (const_rtx x, const_rtx store_pattern, int after)
5910 return anti_dependence (x, store_pattern);
5912 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
5916 /* Go through the entire insn X, looking for any loads which might alias
5917 STORE_PATTERN. Return true if found.
5918 AFTER is true if we are checking the case when STORE_PATTERN occurs
5919 after the insn X. */
5922 find_loads (const_rtx x, const_rtx store_pattern, int after)
5931 if (GET_CODE (x) == SET)
5936 if (load_kills_store (x, store_pattern, after))
5940 /* Recursively process the insn. */
5941 fmt = GET_RTX_FORMAT (GET_CODE (x));
5943 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
5946 ret |= find_loads (XEXP (x, i), store_pattern, after);
5947 else if (fmt[i] == 'E')
5948 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5949 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
5955 store_killed_in_pat (const_rtx x, const_rtx pat, int after)
5957 if (GET_CODE (pat) == SET)
5959 rtx dest = SET_DEST (pat);
5961 if (GET_CODE (dest) == ZERO_EXTRACT)
5962 dest = XEXP (dest, 0);
5964 /* Check for memory stores to aliased objects. */
5966 && !expr_equiv_p (dest, x))
5970 if (output_dependence (dest, x))
5975 if (output_dependence (x, dest))
5981 if (find_loads (pat, x, after))
5987 /* Check if INSN kills the store pattern X (is aliased with it).
5988 AFTER is true if we are checking the case when store X occurs
5989 after the insn. Return true if it does. */
5992 store_killed_in_insn (const_rtx x, const_rtx x_regs, const_rtx insn, int after)
5994 const_rtx reg, base, note, pat;
6001 /* A normal or pure call might read from pattern,
6002 but a const call will not. */
6003 if (!RTL_CONST_CALL_P (insn))
6006 /* But even a const call reads its parameters. Check whether the
6007 base of some of registers used in mem is stack pointer. */
6008 for (reg = x_regs; reg; reg = XEXP (reg, 1))
6010 base = find_base_term (XEXP (reg, 0));
6012 || (GET_CODE (base) == ADDRESS
6013 && GET_MODE (base) == Pmode
6014 && XEXP (base, 0) == stack_pointer_rtx))
6021 pat = PATTERN (insn);
6022 if (GET_CODE (pat) == SET)
6024 if (store_killed_in_pat (x, pat, after))
6027 else if (GET_CODE (pat) == PARALLEL)
6031 for (i = 0; i < XVECLEN (pat, 0); i++)
6032 if (store_killed_in_pat (x, XVECEXP (pat, 0, i), after))
6035 else if (find_loads (PATTERN (insn), x, after))
6038 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
6039 location aliased with X, then this insn kills X. */
6040 note = find_reg_equal_equiv_note (insn);
6043 note = XEXP (note, 0);
6045 /* However, if the note represents a must alias rather than a may
6046 alias relationship, then it does not kill X. */
6047 if (expr_equiv_p (note, x))
6050 /* See if there are any aliased loads in the note. */
6051 return find_loads (note, x, after);
6054 /* Returns true if the expression X is loaded or clobbered on or after INSN
6055 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
6056 or after the insn. X_REGS is list of registers mentioned in X. If the store
6057 is killed, return the last insn in that it occurs in FAIL_INSN. */
6060 store_killed_after (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
6061 int *regs_set_after, rtx *fail_insn)
6063 rtx last = BB_END (bb), act;
6065 if (!store_ops_ok (x_regs, regs_set_after))
6067 /* We do not know where it will happen. */
6069 *fail_insn = NULL_RTX;
6073 /* Scan from the end, so that fail_insn is determined correctly. */
6074 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
6075 if (store_killed_in_insn (x, x_regs, act, false))
6085 /* Returns true if the expression X is loaded or clobbered on or before INSN
6086 within basic block BB. X_REGS is list of registers mentioned in X.
6087 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
6089 store_killed_before (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
6090 int *regs_set_before)
6092 rtx first = BB_HEAD (bb);
6094 if (!store_ops_ok (x_regs, regs_set_before))
6097 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
6098 if (store_killed_in_insn (x, x_regs, insn, true))
6104 /* Fill in available, anticipatable, transparent and kill vectors in
6105 STORE_DATA, based on lists of available and anticipatable stores. */
6107 build_store_vectors (void)
6110 int *regs_set_in_block;
6112 struct ls_expr * ptr;
6115 /* Build the gen_vector. This is any store in the table which is not killed
6116 by aliasing later in its block. */
6117 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
6118 sbitmap_vector_zero (ae_gen, last_basic_block);
6120 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
6121 sbitmap_vector_zero (st_antloc, last_basic_block);
6123 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6125 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6127 insn = XEXP (st, 0);
6128 bb = BLOCK_FOR_INSN (insn);
6130 /* If we've already seen an available expression in this block,
6131 we can delete this one (It occurs earlier in the block). We'll
6132 copy the SRC expression to an unused register in case there
6133 are any side effects. */
6134 if (TEST_BIT (ae_gen[bb->index], ptr->index))
6136 rtx r = gen_reg_rtx_and_attrs (ptr->pattern);
6138 fprintf (dump_file, "Removing redundant store:\n");
6139 replace_store_insn (r, XEXP (st, 0), bb, ptr);
6142 SET_BIT (ae_gen[bb->index], ptr->index);
6145 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6147 insn = XEXP (st, 0);
6148 bb = BLOCK_FOR_INSN (insn);
6149 SET_BIT (st_antloc[bb->index], ptr->index);
6153 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
6154 sbitmap_vector_zero (ae_kill, last_basic_block);
6156 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
6157 sbitmap_vector_zero (transp, last_basic_block);
6158 regs_set_in_block = XNEWVEC (int, max_gcse_regno);
6162 for (regno = 0; regno < max_gcse_regno; regno++)
6163 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
6165 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6167 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
6168 bb, regs_set_in_block, NULL))
6170 /* It should not be necessary to consider the expression
6171 killed if it is both anticipatable and available. */
6172 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
6173 || !TEST_BIT (ae_gen[bb->index], ptr->index))
6174 SET_BIT (ae_kill[bb->index], ptr->index);
6177 SET_BIT (transp[bb->index], ptr->index);
6181 free (regs_set_in_block);
6185 dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
6186 dump_sbitmap_vector (dump_file, "st_kill", "", ae_kill, last_basic_block);
6187 dump_sbitmap_vector (dump_file, "Transpt", "", transp, last_basic_block);
6188 dump_sbitmap_vector (dump_file, "st_avloc", "", ae_gen, last_basic_block);
6192 /* Insert an instruction at the beginning of a basic block, and update
6193 the BB_HEAD if needed. */
6196 insert_insn_start_basic_block (rtx insn, basic_block bb)
6198 /* Insert at start of successor block. */
6199 rtx prev = PREV_INSN (BB_HEAD (bb));
6200 rtx before = BB_HEAD (bb);
6203 if (! LABEL_P (before)
6204 && !NOTE_INSN_BASIC_BLOCK_P (before))
6207 if (prev == BB_END (bb))
6209 before = NEXT_INSN (before);
6212 insn = emit_insn_after_noloc (insn, prev, bb);
6216 fprintf (dump_file, "STORE_MOTION insert store at start of BB %d:\n",
6218 print_inline_rtx (dump_file, insn, 6);
6219 fprintf (dump_file, "\n");
6223 /* This routine will insert a store on an edge. EXPR is the ldst entry for
6224 the memory reference, and E is the edge to insert it on. Returns nonzero
6225 if an edge insertion was performed. */
6228 insert_store (struct ls_expr * expr, edge e)
6235 /* We did all the deleted before this insert, so if we didn't delete a
6236 store, then we haven't set the reaching reg yet either. */
6237 if (expr->reaching_reg == NULL_RTX)
6240 if (e->flags & EDGE_FAKE)
6243 reg = expr->reaching_reg;
6244 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
6246 /* If we are inserting this expression on ALL predecessor edges of a BB,
6247 insert it at the start of the BB, and reset the insert bits on the other
6248 edges so we don't try to insert it on the other edges. */
6250 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
6251 if (!(tmp->flags & EDGE_FAKE))
6253 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6255 gcc_assert (index != EDGE_INDEX_NO_EDGE);
6256 if (! TEST_BIT (pre_insert_map[index], expr->index))
6260 /* If tmp is NULL, we found an insertion on every edge, blank the
6261 insertion vector for these edges, and insert at the start of the BB. */
6262 if (!tmp && bb != EXIT_BLOCK_PTR)
6264 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
6266 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6267 RESET_BIT (pre_insert_map[index], expr->index);
6269 insert_insn_start_basic_block (insn, bb);
6273 /* We can't put stores in the front of blocks pointed to by abnormal
6274 edges since that may put a store where one didn't used to be. */
6275 gcc_assert (!(e->flags & EDGE_ABNORMAL));
6277 insert_insn_on_edge (insn, e);
6281 fprintf (dump_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
6282 e->src->index, e->dest->index);
6283 print_inline_rtx (dump_file, insn, 6);
6284 fprintf (dump_file, "\n");
6290 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
6291 memory location in SMEXPR set in basic block BB.
6293 This could be rather expensive. */
6296 remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
6298 edge_iterator *stack, ei;
6301 sbitmap visited = sbitmap_alloc (last_basic_block);
6302 rtx last, insn, note;
6303 rtx mem = smexpr->pattern;
6305 stack = XNEWVEC (edge_iterator, n_basic_blocks);
6307 ei = ei_start (bb->succs);
6309 sbitmap_zero (visited);
6311 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6319 sbitmap_free (visited);
6322 act = ei_edge (stack[--sp]);
6326 if (bb == EXIT_BLOCK_PTR
6327 || TEST_BIT (visited, bb->index))
6331 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6334 SET_BIT (visited, bb->index);
6336 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
6338 for (last = ANTIC_STORE_LIST (smexpr);
6339 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
6340 last = XEXP (last, 1))
6342 last = XEXP (last, 0);
6345 last = NEXT_INSN (BB_END (bb));
6347 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
6350 note = find_reg_equal_equiv_note (insn);
6351 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6355 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6357 remove_note (insn, note);
6362 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6364 if (EDGE_COUNT (bb->succs) > 0)
6368 ei = ei_start (bb->succs);
6369 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6374 /* This routine will replace a store with a SET to a specified register. */
6377 replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
6379 rtx insn, mem, note, set, ptr, pair;
6381 mem = smexpr->pattern;
6382 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
6384 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
6385 if (XEXP (ptr, 0) == del)
6387 XEXP (ptr, 0) = insn;
6391 /* Move the notes from the deleted insn to its replacement, and patch
6392 up the LIBCALL notes. */
6393 REG_NOTES (insn) = REG_NOTES (del);
6395 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
6398 pair = XEXP (note, 0);
6399 note = find_reg_note (pair, REG_LIBCALL, NULL_RTX);
6400 XEXP (note, 0) = insn;
6402 note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
6405 pair = XEXP (note, 0);
6406 note = find_reg_note (pair, REG_RETVAL, NULL_RTX);
6407 XEXP (note, 0) = insn;
6410 /* Emit the insn AFTER all the notes are transferred.
6411 This is cheaper since we avoid df rescanning for the note change. */
6412 insn = emit_insn_after (insn, del);
6417 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
6418 print_inline_rtx (dump_file, del, 6);
6419 fprintf (dump_file, "\nSTORE MOTION replaced with insn:\n ");
6420 print_inline_rtx (dump_file, insn, 6);
6421 fprintf (dump_file, "\n");
6426 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
6427 they are no longer accurate provided that they are reached by this
6428 definition, so drop them. */
6429 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
6432 set = single_set (insn);
6435 if (expr_equiv_p (SET_DEST (set), mem))
6437 note = find_reg_equal_equiv_note (insn);
6438 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6442 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6444 remove_note (insn, note);
6446 remove_reachable_equiv_notes (bb, smexpr);
6450 /* Delete a store, but copy the value that would have been stored into
6451 the reaching_reg for later storing. */
6454 delete_store (struct ls_expr * expr, basic_block bb)
6458 if (expr->reaching_reg == NULL_RTX)
6459 expr->reaching_reg = gen_reg_rtx_and_attrs (expr->pattern);
6461 reg = expr->reaching_reg;
6463 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
6466 if (BLOCK_FOR_INSN (del) == bb)
6468 /* We know there is only one since we deleted redundant
6469 ones during the available computation. */
6470 replace_store_insn (reg, del, bb, expr);
6476 /* Free memory used by store motion. */
6479 free_store_memory (void)
6484 sbitmap_vector_free (ae_gen);
6486 sbitmap_vector_free (ae_kill);
6488 sbitmap_vector_free (transp);
6490 sbitmap_vector_free (st_antloc);
6492 sbitmap_vector_free (pre_insert_map);
6494 sbitmap_vector_free (pre_delete_map);
6495 if (reg_set_in_block)
6496 sbitmap_vector_free (reg_set_in_block);
6498 ae_gen = ae_kill = transp = st_antloc = NULL;
6499 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
6502 /* Perform store motion. Much like gcse, except we move expressions the
6503 other way by looking at the flowgraph in reverse. */
6510 struct ls_expr * ptr;
6511 int update_flow = 0;
6515 fprintf (dump_file, "before store motion\n");
6516 print_rtl (dump_file, get_insns ());
6519 init_alias_analysis ();
6521 /* Find all the available and anticipatable stores. */
6522 num_stores = compute_store_table ();
6523 if (num_stores == 0)
6525 htab_delete (pre_ldst_table);
6526 pre_ldst_table = NULL;
6527 sbitmap_vector_free (reg_set_in_block);
6528 end_alias_analysis ();
6532 /* Now compute kill & transp vectors. */
6533 build_store_vectors ();
6534 add_noreturn_fake_exit_edges ();
6535 connect_infinite_loops_to_exit ();
6537 edge_list = pre_edge_rev_lcm (num_stores, transp, ae_gen,
6538 st_antloc, ae_kill, &pre_insert_map,
6541 /* Now we want to insert the new stores which are going to be needed. */
6542 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6544 /* If any of the edges we have above are abnormal, we can't move this
6546 for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
6547 if (TEST_BIT (pre_insert_map[x], ptr->index)
6548 && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
6553 if (dump_file != NULL)
6555 "Can't replace store %d: abnormal edge from %d to %d\n",
6556 ptr->index, INDEX_EDGE (edge_list, x)->src->index,
6557 INDEX_EDGE (edge_list, x)->dest->index);
6561 /* Now we want to insert the new stores which are going to be needed. */
6564 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
6565 delete_store (ptr, bb);
6567 for (x = 0; x < NUM_EDGES (edge_list); x++)
6568 if (TEST_BIT (pre_insert_map[x], ptr->index))
6569 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
6573 commit_edge_insertions ();
6575 free_store_memory ();
6576 free_edge_list (edge_list);
6577 remove_fake_exit_edges ();
6578 end_alias_analysis ();
6582 /* Entry point for jump bypassing optimization pass. */
6589 /* We do not construct an accurate cfg in functions which call
6590 setjmp, so just punt to be safe. */
6591 if (cfun->calls_setjmp)
6594 /* Identify the basic block information for this function, including
6595 successors and predecessors. */
6596 max_gcse_regno = max_reg_num ();
6599 dump_flow_info (dump_file, dump_flags);
6601 /* Return if there's nothing to do, or it is too expensive. */
6602 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
6603 || is_too_expensive (_ ("jump bypassing disabled")))
6606 gcc_obstack_init (&gcse_obstack);
6609 /* We need alias. */
6610 init_alias_analysis ();
6612 /* Record where pseudo-registers are set. This data is kept accurate
6613 during each pass. ??? We could also record hard-reg information here
6614 [since it's unchanging], however it is currently done during hash table
6617 It may be tempting to compute MEM set information here too, but MEM sets
6618 will be subject to code motion one day and thus we need to compute
6619 information about memory sets when we build the hash tables. */
6621 alloc_reg_set_mem (max_gcse_regno);
6624 max_gcse_regno = max_reg_num ();
6626 changed = one_cprop_pass (MAX_GCSE_PASSES + 2, true, true);
6631 fprintf (dump_file, "BYPASS of %s: %d basic blocks, ",
6632 current_function_name (), n_basic_blocks);
6633 fprintf (dump_file, "%d bytes\n\n", bytes_used);
6636 obstack_free (&gcse_obstack, NULL);
6637 free_reg_set_mem ();
6639 /* We are finished with alias. */
6640 end_alias_analysis ();
6645 /* Return true if the graph is too expensive to optimize. PASS is the
6646 optimization about to be performed. */
6649 is_too_expensive (const char *pass)
6651 /* Trying to perform global optimizations on flow graphs which have
6652 a high connectivity will take a long time and is unlikely to be
6653 particularly useful.
6655 In normal circumstances a cfg should have about twice as many
6656 edges as blocks. But we do not want to punish small functions
6657 which have a couple switch statements. Rather than simply
6658 threshold the number of blocks, uses something with a more
6659 graceful degradation. */
6660 if (n_edges > 20000 + n_basic_blocks * 4)
6662 warning (OPT_Wdisabled_optimization,
6663 "%s: %d basic blocks and %d edges/basic block",
6664 pass, n_basic_blocks, n_edges / n_basic_blocks);
6669 /* If allocating memory for the cprop bitmap would take up too much
6670 storage it's better just to disable the optimization. */
6672 * SBITMAP_SET_SIZE (max_reg_num ())
6673 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
6675 warning (OPT_Wdisabled_optimization,
6676 "%s: %d basic blocks and %d registers",
6677 pass, n_basic_blocks, max_reg_num ());
6686 gate_handle_jump_bypass (void)
6688 return optimize > 0 && flag_gcse
6689 && dbg_cnt (jump_bypass);
6692 /* Perform jump bypassing and control flow optimizations. */
6694 rest_of_handle_jump_bypass (void)
6696 delete_unreachable_blocks ();
6697 if (bypass_jumps ())
6699 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6700 rebuild_jump_labels (get_insns ());
6706 struct rtl_opt_pass pass_jump_bypass =
6710 "bypass", /* name */
6711 gate_handle_jump_bypass, /* gate */
6712 rest_of_handle_jump_bypass, /* execute */
6715 0, /* static_pass_number */
6716 TV_BYPASS, /* tv_id */
6717 0, /* properties_required */
6718 0, /* properties_provided */
6719 0, /* properties_destroyed */
6720 0, /* todo_flags_start */
6722 TODO_ggc_collect | TODO_verify_flow /* todo_flags_finish */
6728 gate_handle_gcse (void)
6730 return optimize > 0 && flag_gcse
6736 rest_of_handle_gcse (void)
6738 int save_csb, save_cfj;
6740 tem = gcse_main (get_insns ());
6741 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6742 rebuild_jump_labels (get_insns ());
6743 save_csb = flag_cse_skip_blocks;
6744 save_cfj = flag_cse_follow_jumps;
6745 flag_cse_skip_blocks = flag_cse_follow_jumps = 0;
6747 /* If -fexpensive-optimizations, re-run CSE to clean up things done
6749 if (flag_expensive_optimizations)
6751 timevar_push (TV_CSE);
6752 tem2 = cse_main (get_insns (), max_reg_num ());
6753 df_finish_pass (false);
6754 purge_all_dead_edges ();
6755 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6756 timevar_pop (TV_CSE);
6757 cse_not_expected = !flag_rerun_cse_after_loop;
6760 /* If gcse or cse altered any jumps, rerun jump optimizations to clean
6762 if (tem || tem2 == 2)
6764 timevar_push (TV_JUMP);
6765 rebuild_jump_labels (get_insns ());
6767 timevar_pop (TV_JUMP);
6772 flag_cse_skip_blocks = save_csb;
6773 flag_cse_follow_jumps = save_cfj;
6777 struct rtl_opt_pass pass_gcse =
6782 gate_handle_gcse, /* gate */
6783 rest_of_handle_gcse, /* execute */
6786 0, /* static_pass_number */
6787 TV_GCSE, /* tv_id */
6788 0, /* properties_required */
6789 0, /* properties_provided */
6790 0, /* properties_destroyed */
6791 0, /* todo_flags_start */
6792 TODO_df_finish | TODO_verify_rtl_sharing |
6794 TODO_verify_flow | TODO_ggc_collect /* todo_flags_finish */
6799 #include "gt-gcse.h"