1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
4 2006, 2007 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 - reordering of memory allocation and freeing to be more space efficient
24 - do rough calc of how many regs are needed in each block, and a rough
25 calc of how many regs are available in each class and use that to
26 throttle back the code in cases where RTX_COST is minimal.
27 - a store to the same address as a load does not kill the load if the
28 source of the store is also the destination of the load. Handling this
29 allows more load motion, particularly out of loops.
30 - ability to realloc sbitmap vectors would allow one initial computation
31 of reg_set_in_block with only subsequent additions, rather than
32 recomputing it for each pass
36 /* References searched while implementing this.
38 Compilers Principles, Techniques and Tools
42 Global Optimization by Suppression of Partial Redundancies
44 communications of the acm, Vol. 22, Num. 2, Feb. 1979
46 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Stanford Ph.D. thesis, Dec. 1983
50 A Fast Algorithm for Code Movement Optimization
52 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
54 A Solution to a Problem with Morel and Renvoise's
55 Global Optimization by Suppression of Partial Redundancies
56 K-H Drechsler, M.P. Stadel
57 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
59 Practical Adaptation of the Global Optimization
60 Algorithm of Morel and Renvoise
62 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
64 Efficiently Computing Static Single Assignment Form and the Control
66 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
67 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 J. Knoop, O. Ruthing, B. Steffen
71 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
73 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
74 Time for Reducible Flow Control
76 ACM Letters on Programming Languages and Systems,
77 Vol. 2, Num. 1-4, Mar-Dec 1993
79 An Efficient Representation for Sparse Sets
80 Preston Briggs, Linda Torczon
81 ACM Letters on Programming Languages and Systems,
82 Vol. 2, Num. 1-4, Mar-Dec 1993
84 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
85 K-H Drechsler, M.P. Stadel
86 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
88 Partial Dead Code Elimination
89 J. Knoop, O. Ruthing, B. Steffen
90 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
92 Effective Partial Redundancy Elimination
93 P. Briggs, K.D. Cooper
94 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
96 The Program Structure Tree: Computing Control Regions in Linear Time
97 R. Johnson, D. Pearson, K. Pingali
98 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
100 Optimal Code Motion: Theory and Practice
101 J. Knoop, O. Ruthing, B. Steffen
102 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
104 The power of assignment motion
105 J. Knoop, O. Ruthing, B. Steffen
106 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
108 Global code motion / global value numbering
110 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
112 Value Driven Redundancy Elimination
114 Rice University Ph.D. thesis, Apr. 1996
118 Massively Scalar Compiler Project, Rice University, Sep. 1996
120 High Performance Compilers for Parallel Computing
124 Advanced Compiler Design and Implementation
126 Morgan Kaufmann, 1997
128 Building an Optimizing Compiler
132 People wishing to speed up the code here should read:
133 Elimination Algorithms for Data Flow Analysis
134 B.G. Ryder, M.C. Paull
135 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
137 How to Analyze Large Programs Efficiently and Informatively
138 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
139 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
141 People wishing to do something different can find various possibilities
142 in the above papers and elsewhere.
147 #include "coretypes.h"
155 #include "hard-reg-set.h"
158 #include "insn-config.h"
160 #include "basic-block.h"
162 #include "function.h"
171 #include "tree-pass.h"
176 /* Propagate flow information through back edges and thus enable PRE's
177 moving loop invariant calculations out of loops.
179 Originally this tended to create worse overall code, but several
180 improvements during the development of PRE seem to have made following
181 back edges generally a win.
183 Note much of the loop invariant code motion done here would normally
184 be done by loop.c, which has more heuristics for when to move invariants
185 out of loops. At some point we might need to move some of those
186 heuristics into gcse.c. */
188 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
189 are a superset of those done by GCSE.
191 We perform the following steps:
193 1) Compute basic block information.
195 2) Compute table of places where registers are set.
197 3) Perform copy/constant propagation.
199 4) Perform global cse using lazy code motion if not optimizing
200 for size, or code hoisting if we are.
202 5) Perform another pass of copy/constant propagation.
204 Two passes of copy/constant propagation are done because the first one
205 enables more GCSE and the second one helps to clean up the copies that
206 GCSE creates. This is needed more for PRE than for Classic because Classic
207 GCSE will try to use an existing register containing the common
208 subexpression rather than create a new one. This is harder to do for PRE
209 because of the code motion (which Classic GCSE doesn't do).
211 Expressions we are interested in GCSE-ing are of the form
212 (set (pseudo-reg) (expression)).
213 Function want_to_gcse_p says what these are.
215 PRE handles moving invariant expressions out of loops (by treating them as
216 partially redundant).
218 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
219 assignment) based GVN (global value numbering). L. T. Simpson's paper
220 (Rice University) on value numbering is a useful reference for this.
222 **********************
224 We used to support multiple passes but there are diminishing returns in
225 doing so. The first pass usually makes 90% of the changes that are doable.
226 A second pass can make a few more changes made possible by the first pass.
227 Experiments show any further passes don't make enough changes to justify
230 A study of spec92 using an unlimited number of passes:
231 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
232 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
233 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
235 It was found doing copy propagation between each pass enables further
238 PRE is quite expensive in complicated functions because the DFA can take
239 a while to converge. Hence we only perform one pass. The parameter
240 max-gcse-passes can be modified if one wants to experiment.
242 **********************
244 The steps for PRE are:
246 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
248 2) Perform the data flow analysis for PRE.
250 3) Delete the redundant instructions
252 4) Insert the required copies [if any] that make the partially
253 redundant instructions fully redundant.
255 5) For other reaching expressions, insert an instruction to copy the value
256 to a newly created pseudo that will reach the redundant instruction.
258 The deletion is done first so that when we do insertions we
259 know which pseudo reg to use.
261 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
262 argue it is not. The number of iterations for the algorithm to converge
263 is typically 2-4 so I don't view it as that expensive (relatively speaking).
265 PRE GCSE depends heavily on the second CSE pass to clean up the copies
266 we create. To make an expression reach the place where it's redundant,
267 the result of the expression is copied to a new register, and the redundant
268 expression is deleted by replacing it with this new register. Classic GCSE
269 doesn't have this problem as much as it computes the reaching defs of
270 each register in each block and thus can try to use an existing
273 /* GCSE global vars. */
275 /* Note whether or not we should run jump optimization after gcse. We
276 want to do this for two cases.
278 * If we changed any jumps via cprop.
280 * If we added any labels via edge splitting. */
281 static int run_jump_opt_after_gcse;
283 /* An obstack for our working variables. */
284 static struct obstack gcse_obstack;
286 struct reg_use {rtx reg_rtx; };
288 /* Hash table of expressions. */
292 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
294 /* Index in the available expression bitmaps. */
296 /* Next entry with the same hash. */
297 struct expr *next_same_hash;
298 /* List of anticipatable occurrences in basic blocks in the function.
299 An "anticipatable occurrence" is one that is the first occurrence in the
300 basic block, the operands are not modified in the basic block prior
301 to the occurrence and the output is not used between the start of
302 the block and the occurrence. */
303 struct occr *antic_occr;
304 /* List of available occurrence in basic blocks in the function.
305 An "available occurrence" is one that is the last occurrence in the
306 basic block and the operands are not modified by following statements in
307 the basic block [including this insn]. */
308 struct occr *avail_occr;
309 /* Non-null if the computation is PRE redundant.
310 The value is the newly created pseudo-reg to record a copy of the
311 expression in all the places that reach the redundant copy. */
315 /* Occurrence of an expression.
316 There is one per basic block. If a pattern appears more than once the
317 last appearance is used [or first for anticipatable expressions]. */
321 /* Next occurrence of this expression. */
323 /* The insn that computes the expression. */
325 /* Nonzero if this [anticipatable] occurrence has been deleted. */
327 /* Nonzero if this [available] occurrence has been copied to
329 /* ??? This is mutually exclusive with deleted_p, so they could share
334 /* Expression and copy propagation hash tables.
335 Each hash table is an array of buckets.
336 ??? It is known that if it were an array of entries, structure elements
337 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
338 not clear whether in the final analysis a sufficient amount of memory would
339 be saved as the size of the available expression bitmaps would be larger
340 [one could build a mapping table without holes afterwards though].
341 Someday I'll perform the computation and figure it out. */
346 This is an array of `expr_hash_table_size' elements. */
349 /* Size of the hash table, in elements. */
352 /* Number of hash table elements. */
353 unsigned int n_elems;
355 /* Whether the table is expression of copy propagation one. */
359 /* Expression hash table. */
360 static struct hash_table expr_hash_table;
362 /* Copy propagation hash table. */
363 static struct hash_table set_hash_table;
365 /* Mapping of uids to cuids.
366 Only real insns get cuids. */
367 static int *uid_cuid;
369 /* Highest UID in UID_CUID. */
372 /* Get the cuid of an insn. */
373 #ifdef ENABLE_CHECKING
374 #define INSN_CUID(INSN) \
375 (gcc_assert (INSN_UID (INSN) <= max_uid), uid_cuid[INSN_UID (INSN)])
377 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
380 /* Number of cuids. */
383 /* Maximum register number in function prior to doing gcse + 1.
384 Registers created during this pass have regno >= max_gcse_regno.
385 This is named with "gcse" to not collide with global of same name. */
386 static unsigned int max_gcse_regno;
388 /* Table of registers that are modified.
390 For each register, each element is a list of places where the pseudo-reg
393 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
394 requires knowledge of which blocks kill which regs [and thus could use
395 a bitmap instead of the lists `reg_set_table' uses].
397 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
398 num-regs) [however perhaps it may be useful to keep the data as is]. One
399 advantage of recording things this way is that `reg_set_table' is fairly
400 sparse with respect to pseudo regs but for hard regs could be fairly dense
401 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
402 up functions like compute_transp since in the case of pseudo-regs we only
403 need to iterate over the number of times a pseudo-reg is set, not over the
404 number of basic blocks [clearly there is a bit of a slow down in the cases
405 where a pseudo is set more than once in a block, however it is believed
406 that the net effect is to speed things up]. This isn't done for hard-regs
407 because recording call-clobbered hard-regs in `reg_set_table' at each
408 function call can consume a fair bit of memory, and iterating over
409 hard-regs stored this way in compute_transp will be more expensive. */
411 typedef struct reg_set
413 /* The next setting of this register. */
414 struct reg_set *next;
415 /* The index of the block where it was set. */
419 static reg_set **reg_set_table;
421 /* Size of `reg_set_table'.
422 The table starts out at max_gcse_regno + slop, and is enlarged as
424 static int reg_set_table_size;
426 /* Amount to grow `reg_set_table' by when it's full. */
427 #define REG_SET_TABLE_SLOP 100
429 /* This is a list of expressions which are MEMs and will be used by load
431 Load motion tracks MEMs which aren't killed by
432 anything except itself. (i.e., loads and stores to a single location).
433 We can then allow movement of these MEM refs with a little special
434 allowance. (all stores copy the same value to the reaching reg used
435 for the loads). This means all values used to store into memory must have
436 no side effects so we can re-issue the setter value.
437 Store Motion uses this structure as an expression table to track stores
438 which look interesting, and might be moveable towards the exit block. */
442 struct expr * expr; /* Gcse expression reference for LM. */
443 rtx pattern; /* Pattern of this mem. */
444 rtx pattern_regs; /* List of registers mentioned by the mem. */
445 rtx loads; /* INSN list of loads seen. */
446 rtx stores; /* INSN list of stores seen. */
447 struct ls_expr * next; /* Next in the list. */
448 int invalid; /* Invalid for some reason. */
449 int index; /* If it maps to a bitmap index. */
450 unsigned int hash_index; /* Index when in a hash table. */
451 rtx reaching_reg; /* Register to use when re-writing. */
454 /* Array of implicit set patterns indexed by basic block index. */
455 static rtx *implicit_sets;
457 /* Head of the list of load/store memory refs. */
458 static struct ls_expr * pre_ldst_mems = NULL;
460 /* Hashtable for the load/store memory refs. */
461 static htab_t pre_ldst_table = NULL;
463 /* Bitmap containing one bit for each register in the program.
464 Used when performing GCSE to track which registers have been set since
465 the start of the basic block. */
466 static regset reg_set_bitmap;
468 /* For each block, a bitmap of registers set in the block.
469 This is used by compute_transp.
470 It is computed during hash table computation and not by compute_sets
471 as it includes registers added since the last pass (or between cprop and
472 gcse) and it's currently not easy to realloc sbitmap vectors. */
473 static sbitmap *reg_set_in_block;
475 /* Array, indexed by basic block number for a list of insns which modify
476 memory within that block. */
477 static rtx * modify_mem_list;
478 static bitmap modify_mem_list_set;
480 /* This array parallels modify_mem_list, but is kept canonicalized. */
481 static rtx * canon_modify_mem_list;
483 /* Bitmap indexed by block numbers to record which blocks contain
485 static bitmap blocks_with_calls;
487 /* Various variables for statistics gathering. */
489 /* Memory used in a pass.
490 This isn't intended to be absolutely precise. Its intent is only
491 to keep an eye on memory usage. */
492 static int bytes_used;
494 /* GCSE substitutions made. */
495 static int gcse_subst_count;
496 /* Number of copy instructions created. */
497 static int gcse_create_count;
498 /* Number of local constants propagated. */
499 static int local_const_prop_count;
500 /* Number of local copies propagated. */
501 static int local_copy_prop_count;
502 /* Number of global constants propagated. */
503 static int global_const_prop_count;
504 /* Number of global copies propagated. */
505 static int global_copy_prop_count;
507 /* For available exprs */
508 static sbitmap *ae_kill, *ae_gen;
510 static void compute_can_copy (void);
511 static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
512 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
513 static void *grealloc (void *, size_t);
514 static void *gcse_alloc (unsigned long);
515 static void alloc_gcse_mem (void);
516 static void free_gcse_mem (void);
517 static void alloc_reg_set_mem (int);
518 static void free_reg_set_mem (void);
519 static void record_one_set (int, rtx);
520 static void record_set_info (rtx, const_rtx, void *);
521 static void compute_sets (void);
522 static void hash_scan_insn (rtx, struct hash_table *, int);
523 static void hash_scan_set (rtx, rtx, struct hash_table *);
524 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
525 static void hash_scan_call (rtx, rtx, struct hash_table *);
526 static int want_to_gcse_p (rtx);
527 static bool can_assign_to_reg_p (rtx);
528 static bool gcse_constant_p (const_rtx);
529 static int oprs_unchanged_p (const_rtx, const_rtx, int);
530 static int oprs_anticipatable_p (const_rtx, const_rtx);
531 static int oprs_available_p (const_rtx, const_rtx);
532 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
533 struct hash_table *);
534 static void insert_set_in_table (rtx, rtx, struct hash_table *);
535 static unsigned int hash_expr (const_rtx, enum machine_mode, int *, int);
536 static unsigned int hash_set (int, int);
537 static int expr_equiv_p (const_rtx, const_rtx);
538 static void record_last_reg_set_info (rtx, int);
539 static void record_last_mem_set_info (rtx);
540 static void record_last_set_info (rtx, const_rtx, void *);
541 static void compute_hash_table (struct hash_table *);
542 static void alloc_hash_table (int, struct hash_table *, int);
543 static void free_hash_table (struct hash_table *);
544 static void compute_hash_table_work (struct hash_table *);
545 static void dump_hash_table (FILE *, const char *, struct hash_table *);
546 static struct expr *lookup_set (unsigned int, struct hash_table *);
547 static struct expr *next_set (unsigned int, struct expr *);
548 static void reset_opr_set_tables (void);
549 static int oprs_not_set_p (const_rtx, const_rtx);
550 static void mark_call (rtx);
551 static void mark_set (rtx, rtx);
552 static void mark_clobber (rtx, rtx);
553 static void mark_oprs_set (rtx);
554 static void alloc_cprop_mem (int, int);
555 static void free_cprop_mem (void);
556 static void compute_transp (const_rtx, int, sbitmap *, int);
557 static void compute_transpout (void);
558 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
559 struct hash_table *);
560 static void compute_cprop_data (void);
561 static void find_used_regs (rtx *, void *);
562 static int try_replace_reg (rtx, rtx, rtx);
563 static struct expr *find_avail_set (int, rtx);
564 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
565 static void mems_conflict_for_gcse_p (rtx, const_rtx, void *);
566 static int load_killed_in_block_p (const_basic_block, int, const_rtx, int);
567 static void canon_list_insert (rtx, const_rtx, void *);
568 static int cprop_insn (rtx, int);
569 static int cprop (int);
570 static void find_implicit_sets (void);
571 static int one_cprop_pass (int, bool, bool);
572 static bool constprop_register (rtx, rtx, rtx, bool);
573 static struct expr *find_bypass_set (int, int);
574 static bool reg_killed_on_edge (const_rtx, const_edge);
575 static int bypass_block (basic_block, rtx, rtx);
576 static int bypass_conditional_jumps (void);
577 static void alloc_pre_mem (int, int);
578 static void free_pre_mem (void);
579 static void compute_pre_data (void);
580 static int pre_expr_reaches_here_p (basic_block, struct expr *,
582 static void insert_insn_end_basic_block (struct expr *, basic_block, int);
583 static void pre_insert_copy_insn (struct expr *, rtx);
584 static void pre_insert_copies (void);
585 static int pre_delete (void);
586 static int pre_gcse (void);
587 static int one_pre_gcse_pass (int);
588 static void add_label_notes (rtx, rtx);
589 static void alloc_code_hoist_mem (int, int);
590 static void free_code_hoist_mem (void);
591 static void compute_code_hoist_vbeinout (void);
592 static void compute_code_hoist_data (void);
593 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
594 static void hoist_code (void);
595 static int one_code_hoisting_pass (void);
596 static rtx process_insert_insn (struct expr *);
597 static int pre_edge_insert (struct edge_list *, struct expr **);
598 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
599 basic_block, char *);
600 static struct ls_expr * ldst_entry (rtx);
601 static void free_ldst_entry (struct ls_expr *);
602 static void free_ldst_mems (void);
603 static void print_ldst_list (FILE *);
604 static struct ls_expr * find_rtx_in_ldst (rtx);
605 static int enumerate_ldsts (void);
606 static inline struct ls_expr * first_ls_expr (void);
607 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
608 static int simple_mem (const_rtx);
609 static void invalidate_any_buried_refs (rtx);
610 static void compute_ld_motion_mems (void);
611 static void trim_ld_motion_mems (void);
612 static void update_ld_motion_stores (struct expr *);
613 static void reg_set_info (rtx, const_rtx, void *);
614 static void reg_clear_last_set (rtx, const_rtx, void *);
615 static bool store_ops_ok (const_rtx, int *);
616 static rtx extract_mentioned_regs (rtx);
617 static rtx extract_mentioned_regs_helper (rtx, rtx);
618 static void find_moveable_store (rtx, int *, int *);
619 static int compute_store_table (void);
620 static bool load_kills_store (const_rtx, const_rtx, int);
621 static bool find_loads (const_rtx, const_rtx, int);
622 static bool store_killed_in_insn (const_rtx, const_rtx, const_rtx, int);
623 static bool store_killed_after (const_rtx, const_rtx, const_rtx, const_basic_block, int *, rtx *);
624 static bool store_killed_before (const_rtx, const_rtx, const_rtx, const_basic_block, int *);
625 static void build_store_vectors (void);
626 static void insert_insn_start_basic_block (rtx, basic_block);
627 static int insert_store (struct ls_expr *, edge);
628 static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
629 static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
630 static void delete_store (struct ls_expr *, basic_block);
631 static void free_store_memory (void);
632 static void store_motion (void);
633 static void free_insn_expr_list_list (rtx *);
634 static void clear_modify_mem_tables (void);
635 static void free_modify_mem_tables (void);
636 static rtx gcse_emit_move_after (rtx, rtx, rtx);
637 static void local_cprop_find_used_regs (rtx *, void *);
638 static bool do_local_cprop (rtx, rtx, bool, rtx*);
639 static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
640 static void local_cprop_pass (bool);
641 static bool is_too_expensive (const char *);
644 /* Entry point for global common subexpression elimination.
645 F is the first instruction in the function. Return nonzero if a
649 gcse_main (rtx f ATTRIBUTE_UNUSED)
652 /* Bytes used at start of pass. */
653 int initial_bytes_used;
654 /* Maximum number of bytes used by a pass. */
656 /* Point to release obstack data from for each pass. */
657 char *gcse_obstack_bottom;
659 /* We do not construct an accurate cfg in functions which call
660 setjmp, so just punt to be safe. */
661 if (current_function_calls_setjmp)
664 /* Assume that we do not need to run jump optimizations after gcse. */
665 run_jump_opt_after_gcse = 0;
667 /* Identify the basic block information for this function, including
668 successors and predecessors. */
669 max_gcse_regno = max_reg_num ();
671 df_note_add_problem ();
675 dump_flow_info (dump_file, dump_flags);
677 /* Return if there's nothing to do, or it is too expensive. */
678 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
679 || is_too_expensive (_("GCSE disabled")))
682 gcc_obstack_init (&gcse_obstack);
686 init_alias_analysis ();
687 /* Record where pseudo-registers are set. This data is kept accurate
688 during each pass. ??? We could also record hard-reg information here
689 [since it's unchanging], however it is currently done during hash table
692 It may be tempting to compute MEM set information here too, but MEM sets
693 will be subject to code motion one day and thus we need to compute
694 information about memory sets when we build the hash tables. */
696 alloc_reg_set_mem (max_gcse_regno);
700 initial_bytes_used = bytes_used;
702 gcse_obstack_bottom = gcse_alloc (1);
704 while (changed && pass < MAX_GCSE_PASSES)
708 fprintf (dump_file, "GCSE pass %d\n\n", pass + 1);
710 /* Initialize bytes_used to the space for the pred/succ lists,
711 and the reg_set_table data. */
712 bytes_used = initial_bytes_used;
714 /* Each pass may create new registers, so recalculate each time. */
715 max_gcse_regno = max_reg_num ();
719 /* Don't allow constant propagation to modify jumps
721 timevar_push (TV_CPROP1);
722 changed = one_cprop_pass (pass + 1, false, false);
723 timevar_pop (TV_CPROP1);
729 timevar_push (TV_PRE);
730 changed |= one_pre_gcse_pass (pass + 1);
731 /* We may have just created new basic blocks. Release and
732 recompute various things which are sized on the number of
736 free_modify_mem_tables ();
737 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
738 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
741 alloc_reg_set_mem (max_reg_num ());
743 run_jump_opt_after_gcse = 1;
744 timevar_pop (TV_PRE);
747 if (max_pass_bytes < bytes_used)
748 max_pass_bytes = bytes_used;
750 /* Free up memory, then reallocate for code hoisting. We can
751 not re-use the existing allocated memory because the tables
752 will not have info for the insns or registers created by
753 partial redundancy elimination. */
756 /* It does not make sense to run code hoisting unless we are optimizing
757 for code size -- it rarely makes programs faster, and can make
758 them bigger if we did partial redundancy elimination (when optimizing
759 for space, we don't run the partial redundancy algorithms). */
762 timevar_push (TV_HOIST);
763 max_gcse_regno = max_reg_num ();
765 changed |= one_code_hoisting_pass ();
768 if (max_pass_bytes < bytes_used)
769 max_pass_bytes = bytes_used;
770 timevar_pop (TV_HOIST);
775 fprintf (dump_file, "\n");
779 obstack_free (&gcse_obstack, gcse_obstack_bottom);
783 /* Do one last pass of copy propagation, including cprop into
784 conditional jumps. */
786 max_gcse_regno = max_reg_num ();
788 /* This time, go ahead and allow cprop to alter jumps. */
789 timevar_push (TV_CPROP2);
790 one_cprop_pass (pass + 1, true, true);
791 timevar_pop (TV_CPROP2);
796 fprintf (dump_file, "GCSE of %s: %d basic blocks, ",
797 current_function_name (), n_basic_blocks);
798 fprintf (dump_file, "%d pass%s, %d bytes\n\n",
799 pass, pass > 1 ? "es" : "", max_pass_bytes);
802 obstack_free (&gcse_obstack, NULL);
805 /* We are finished with alias. */
806 end_alias_analysis ();
808 if (!optimize_size && flag_gcse_sm)
810 timevar_push (TV_LSM);
812 timevar_pop (TV_LSM);
815 /* Record where pseudo-registers are set. */
816 return run_jump_opt_after_gcse;
819 /* Misc. utilities. */
821 /* Nonzero for each mode that supports (set (reg) (reg)).
822 This is trivially true for integer and floating point values.
823 It may or may not be true for condition codes. */
824 static char can_copy[(int) NUM_MACHINE_MODES];
826 /* Compute which modes support reg/reg copy operations. */
829 compute_can_copy (void)
832 #ifndef AVOID_CCMODE_COPIES
835 memset (can_copy, 0, NUM_MACHINE_MODES);
838 for (i = 0; i < NUM_MACHINE_MODES; i++)
839 if (GET_MODE_CLASS (i) == MODE_CC)
841 #ifdef AVOID_CCMODE_COPIES
844 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
845 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
846 if (recog (PATTERN (insn), insn, NULL) >= 0)
856 /* Returns whether the mode supports reg/reg copy operations. */
859 can_copy_p (enum machine_mode mode)
861 static bool can_copy_init_p = false;
863 if (! can_copy_init_p)
866 can_copy_init_p = true;
869 return can_copy[mode] != 0;
872 /* Cover function to xmalloc to record bytes allocated. */
875 gmalloc (size_t size)
878 return xmalloc (size);
881 /* Cover function to xcalloc to record bytes allocated. */
884 gcalloc (size_t nelem, size_t elsize)
886 bytes_used += nelem * elsize;
887 return xcalloc (nelem, elsize);
890 /* Cover function to xrealloc.
891 We don't record the additional size since we don't know it.
892 It won't affect memory usage stats much anyway. */
895 grealloc (void *ptr, size_t size)
897 return xrealloc (ptr, size);
900 /* Cover function to obstack_alloc. */
903 gcse_alloc (unsigned long size)
906 return obstack_alloc (&gcse_obstack, size);
909 /* Allocate memory for the cuid mapping array,
910 and reg/memory set tracking tables.
912 This is called at the start of each pass. */
915 alloc_gcse_mem (void)
921 /* Find the largest UID and create a mapping from UIDs to CUIDs.
922 CUIDs are like UIDs except they increase monotonically, have no gaps,
923 and only apply to real insns.
924 (Actually, there are gaps, for insn that are not inside a basic block.
925 but we should never see those anyway, so this is OK.) */
927 max_uid = get_max_uid ();
928 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
931 FOR_BB_INSNS (bb, insn)
934 uid_cuid[INSN_UID (insn)] = i++;
936 uid_cuid[INSN_UID (insn)] = i;
941 /* Allocate vars to track sets of regs. */
942 reg_set_bitmap = BITMAP_ALLOC (NULL);
944 /* Allocate vars to track sets of regs, memory per block. */
945 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
946 /* Allocate array to keep a list of insns which modify memory in each
948 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
949 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
950 modify_mem_list_set = BITMAP_ALLOC (NULL);
951 blocks_with_calls = BITMAP_ALLOC (NULL);
954 /* Free memory allocated by alloc_gcse_mem. */
961 BITMAP_FREE (reg_set_bitmap);
963 sbitmap_vector_free (reg_set_in_block);
964 free_modify_mem_tables ();
965 BITMAP_FREE (modify_mem_list_set);
966 BITMAP_FREE (blocks_with_calls);
969 /* Compute the local properties of each recorded expression.
971 Local properties are those that are defined by the block, irrespective of
974 An expression is transparent in a block if its operands are not modified
977 An expression is computed (locally available) in a block if it is computed
978 at least once and expression would contain the same value if the
979 computation was moved to the end of the block.
981 An expression is locally anticipatable in a block if it is computed at
982 least once and expression would contain the same value if the computation
983 was moved to the beginning of the block.
985 We call this routine for cprop, pre and code hoisting. They all compute
986 basically the same information and thus can easily share this code.
988 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
989 properties. If NULL, then it is not necessary to compute or record that
992 TABLE controls which hash table to look at. If it is set hash table,
993 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
997 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
998 struct hash_table *table)
1002 /* Initialize any bitmaps that were passed in. */
1006 sbitmap_vector_zero (transp, last_basic_block);
1008 sbitmap_vector_ones (transp, last_basic_block);
1012 sbitmap_vector_zero (comp, last_basic_block);
1014 sbitmap_vector_zero (antloc, last_basic_block);
1016 for (i = 0; i < table->size; i++)
1020 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1022 int indx = expr->bitmap_index;
1025 /* The expression is transparent in this block if it is not killed.
1026 We start by assuming all are transparent [none are killed], and
1027 then reset the bits for those that are. */
1029 compute_transp (expr->expr, indx, transp, table->set_p);
1031 /* The occurrences recorded in antic_occr are exactly those that
1032 we want to set to nonzero in ANTLOC. */
1034 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1036 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1038 /* While we're scanning the table, this is a good place to
1040 occr->deleted_p = 0;
1043 /* The occurrences recorded in avail_occr are exactly those that
1044 we want to set to nonzero in COMP. */
1046 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1048 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1050 /* While we're scanning the table, this is a good place to
1055 /* While we're scanning the table, this is a good place to
1057 expr->reaching_reg = 0;
1062 /* Register set information.
1064 `reg_set_table' records where each register is set or otherwise
1067 static struct obstack reg_set_obstack;
1070 alloc_reg_set_mem (int n_regs)
1072 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1073 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
1075 gcc_obstack_init (®_set_obstack);
1079 free_reg_set_mem (void)
1081 free (reg_set_table);
1082 obstack_free (®_set_obstack, NULL);
1085 /* Record REGNO in the reg_set table. */
1088 record_one_set (int regno, rtx insn)
1090 /* Allocate a new reg_set element and link it onto the list. */
1091 struct reg_set *new_reg_info;
1093 /* If the table isn't big enough, enlarge it. */
1094 if (regno >= reg_set_table_size)
1096 int new_size = regno + REG_SET_TABLE_SLOP;
1098 reg_set_table = grealloc (reg_set_table,
1099 new_size * sizeof (struct reg_set *));
1100 memset (reg_set_table + reg_set_table_size, 0,
1101 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1102 reg_set_table_size = new_size;
1105 new_reg_info = obstack_alloc (®_set_obstack, sizeof (struct reg_set));
1106 bytes_used += sizeof (struct reg_set);
1107 new_reg_info->bb_index = BLOCK_NUM (insn);
1108 new_reg_info->next = reg_set_table[regno];
1109 reg_set_table[regno] = new_reg_info;
1112 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1113 an insn. The DATA is really the instruction in which the SET is
1117 record_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
1119 rtx record_set_insn = (rtx) data;
1121 if (REG_P (dest) && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1122 record_one_set (REGNO (dest), record_set_insn);
1125 /* Scan the function and record each set of each pseudo-register.
1127 This is called once, at the start of the gcse pass. See the comments for
1128 `reg_set_table' for further documentation. */
1137 FOR_BB_INSNS (bb, insn)
1139 note_stores (PATTERN (insn), record_set_info, insn);
1142 /* Hash table support. */
1144 struct reg_avail_info
1146 basic_block last_bb;
1151 static struct reg_avail_info *reg_avail_info;
1152 static basic_block current_bb;
1155 /* See whether X, the source of a set, is something we want to consider for
1159 want_to_gcse_p (rtx x)
1162 /* On register stack architectures, don't GCSE constants from the
1163 constant pool, as the benefits are often swamped by the overhead
1164 of shuffling the register stack between basic blocks. */
1165 if (IS_STACK_MODE (GET_MODE (x)))
1166 x = avoid_constant_pool_reference (x);
1169 switch (GET_CODE (x))
1181 return can_assign_to_reg_p (x);
1185 /* Used internally by can_assign_to_reg_p. */
1187 static GTY(()) rtx test_insn;
1189 /* Return true if we can assign X to a pseudo register. */
1192 can_assign_to_reg_p (rtx x)
1194 int num_clobbers = 0;
1197 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1198 if (general_operand (x, GET_MODE (x)))
1200 else if (GET_MODE (x) == VOIDmode)
1203 /* Otherwise, check if we can make a valid insn from it. First initialize
1204 our test insn if we haven't already. */
1208 = make_insn_raw (gen_rtx_SET (VOIDmode,
1209 gen_rtx_REG (word_mode,
1210 FIRST_PSEUDO_REGISTER * 2),
1212 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1215 /* Now make an insn like the one we would make when GCSE'ing and see if
1217 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1218 SET_SRC (PATTERN (test_insn)) = x;
1219 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1220 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1223 /* Return nonzero if the operands of expression X are unchanged from the
1224 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1225 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1228 oprs_unchanged_p (const_rtx x, const_rtx insn, int avail_p)
1237 code = GET_CODE (x);
1242 struct reg_avail_info *info = ®_avail_info[REGNO (x)];
1244 if (info->last_bb != current_bb)
1247 return info->last_set < INSN_CUID (insn);
1249 return info->first_set >= INSN_CUID (insn);
1253 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1257 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1284 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1288 /* If we are about to do the last recursive call needed at this
1289 level, change it into iteration. This function is called enough
1292 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1294 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1297 else if (fmt[i] == 'E')
1298 for (j = 0; j < XVECLEN (x, i); j++)
1299 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1306 /* Used for communication between mems_conflict_for_gcse_p and
1307 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1308 conflict between two memory references. */
1309 static int gcse_mems_conflict_p;
1311 /* Used for communication between mems_conflict_for_gcse_p and
1312 load_killed_in_block_p. A memory reference for a load instruction,
1313 mems_conflict_for_gcse_p will see if a memory store conflicts with
1314 this memory load. */
1315 static const_rtx gcse_mem_operand;
1317 /* DEST is the output of an instruction. If it is a memory reference, and
1318 possibly conflicts with the load found in gcse_mem_operand, then set
1319 gcse_mems_conflict_p to a nonzero value. */
1322 mems_conflict_for_gcse_p (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
1323 void *data ATTRIBUTE_UNUSED)
1325 while (GET_CODE (dest) == SUBREG
1326 || GET_CODE (dest) == ZERO_EXTRACT
1327 || GET_CODE (dest) == STRICT_LOW_PART)
1328 dest = XEXP (dest, 0);
1330 /* If DEST is not a MEM, then it will not conflict with the load. Note
1331 that function calls are assumed to clobber memory, but are handled
1336 /* If we are setting a MEM in our list of specially recognized MEMs,
1337 don't mark as killed this time. */
1339 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1341 if (!find_rtx_in_ldst (dest))
1342 gcse_mems_conflict_p = 1;
1346 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1348 gcse_mems_conflict_p = 1;
1351 /* Return nonzero if the expression in X (a memory reference) is killed
1352 in block BB before or after the insn with the CUID in UID_LIMIT.
1353 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1356 To check the entire block, set UID_LIMIT to max_uid + 1 and
1360 load_killed_in_block_p (const_basic_block bb, int uid_limit, const_rtx x, int avail_p)
1362 rtx list_entry = modify_mem_list[bb->index];
1364 /* If this is a readonly then we aren't going to be changing it. */
1365 if (MEM_READONLY_P (x))
1371 /* Ignore entries in the list that do not apply. */
1373 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1375 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1377 list_entry = XEXP (list_entry, 1);
1381 setter = XEXP (list_entry, 0);
1383 /* If SETTER is a call everything is clobbered. Note that calls
1384 to pure functions are never put on the list, so we need not
1385 worry about them. */
1386 if (CALL_P (setter))
1389 /* SETTER must be an INSN of some kind that sets memory. Call
1390 note_stores to examine each hunk of memory that is modified.
1392 The note_stores interface is pretty limited, so we have to
1393 communicate via global variables. Yuk. */
1394 gcse_mem_operand = x;
1395 gcse_mems_conflict_p = 0;
1396 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1397 if (gcse_mems_conflict_p)
1399 list_entry = XEXP (list_entry, 1);
1404 /* Return nonzero if the operands of expression X are unchanged from
1405 the start of INSN's basic block up to but not including INSN. */
1408 oprs_anticipatable_p (const_rtx x, const_rtx insn)
1410 return oprs_unchanged_p (x, insn, 0);
1413 /* Return nonzero if the operands of expression X are unchanged from
1414 INSN to the end of INSN's basic block. */
1417 oprs_available_p (const_rtx x, const_rtx insn)
1419 return oprs_unchanged_p (x, insn, 1);
1422 /* Hash expression X.
1424 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1425 indicating if a volatile operand is found or if the expression contains
1426 something we don't want to insert in the table. HASH_TABLE_SIZE is
1427 the current size of the hash table to be probed. */
1430 hash_expr (const_rtx x, enum machine_mode mode, int *do_not_record_p,
1431 int hash_table_size)
1435 *do_not_record_p = 0;
1437 hash = hash_rtx (x, mode, do_not_record_p,
1438 NULL, /*have_reg_qty=*/false);
1439 return hash % hash_table_size;
1442 /* Hash a set of register REGNO.
1444 Sets are hashed on the register that is set. This simplifies the PRE copy
1447 ??? May need to make things more elaborate. Later, as necessary. */
1450 hash_set (int regno, int hash_table_size)
1455 return hash % hash_table_size;
1458 /* Return nonzero if exp1 is equivalent to exp2. */
1461 expr_equiv_p (const_rtx x, const_rtx y)
1463 return exp_equiv_p (x, y, 0, true);
1466 /* Insert expression X in INSN in the hash TABLE.
1467 If it is already present, record it as the last occurrence in INSN's
1470 MODE is the mode of the value X is being stored into.
1471 It is only used if X is a CONST_INT.
1473 ANTIC_P is nonzero if X is an anticipatable expression.
1474 AVAIL_P is nonzero if X is an available expression. */
1477 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1478 int avail_p, struct hash_table *table)
1480 int found, do_not_record_p;
1482 struct expr *cur_expr, *last_expr = NULL;
1483 struct occr *antic_occr, *avail_occr;
1485 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1487 /* Do not insert expression in table if it contains volatile operands,
1488 or if hash_expr determines the expression is something we don't want
1489 to or can't handle. */
1490 if (do_not_record_p)
1493 cur_expr = table->table[hash];
1496 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1498 /* If the expression isn't found, save a pointer to the end of
1500 last_expr = cur_expr;
1501 cur_expr = cur_expr->next_same_hash;
1506 cur_expr = gcse_alloc (sizeof (struct expr));
1507 bytes_used += sizeof (struct expr);
1508 if (table->table[hash] == NULL)
1509 /* This is the first pattern that hashed to this index. */
1510 table->table[hash] = cur_expr;
1512 /* Add EXPR to end of this hash chain. */
1513 last_expr->next_same_hash = cur_expr;
1515 /* Set the fields of the expr element. */
1517 cur_expr->bitmap_index = table->n_elems++;
1518 cur_expr->next_same_hash = NULL;
1519 cur_expr->antic_occr = NULL;
1520 cur_expr->avail_occr = NULL;
1523 /* Now record the occurrence(s). */
1526 antic_occr = cur_expr->antic_occr;
1528 if (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1532 /* Found another instance of the expression in the same basic block.
1533 Prefer the currently recorded one. We want the first one in the
1534 block and the block is scanned from start to end. */
1535 ; /* nothing to do */
1538 /* First occurrence of this expression in this basic block. */
1539 antic_occr = gcse_alloc (sizeof (struct occr));
1540 bytes_used += sizeof (struct occr);
1541 antic_occr->insn = insn;
1542 antic_occr->next = cur_expr->antic_occr;
1543 antic_occr->deleted_p = 0;
1544 cur_expr->antic_occr = antic_occr;
1550 avail_occr = cur_expr->avail_occr;
1552 if (avail_occr && BLOCK_NUM (avail_occr->insn) == BLOCK_NUM (insn))
1554 /* Found another instance of the expression in the same basic block.
1555 Prefer this occurrence to the currently recorded one. We want
1556 the last one in the block and the block is scanned from start
1558 avail_occr->insn = insn;
1562 /* First occurrence of this expression in this basic block. */
1563 avail_occr = gcse_alloc (sizeof (struct occr));
1564 bytes_used += sizeof (struct occr);
1565 avail_occr->insn = insn;
1566 avail_occr->next = cur_expr->avail_occr;
1567 avail_occr->deleted_p = 0;
1568 cur_expr->avail_occr = avail_occr;
1573 /* Insert pattern X in INSN in the hash table.
1574 X is a SET of a reg to either another reg or a constant.
1575 If it is already present, record it as the last occurrence in INSN's
1579 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
1583 struct expr *cur_expr, *last_expr = NULL;
1584 struct occr *cur_occr;
1586 gcc_assert (GET_CODE (x) == SET && REG_P (SET_DEST (x)));
1588 hash = hash_set (REGNO (SET_DEST (x)), table->size);
1590 cur_expr = table->table[hash];
1593 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1595 /* If the expression isn't found, save a pointer to the end of
1597 last_expr = cur_expr;
1598 cur_expr = cur_expr->next_same_hash;
1603 cur_expr = gcse_alloc (sizeof (struct expr));
1604 bytes_used += sizeof (struct expr);
1605 if (table->table[hash] == NULL)
1606 /* This is the first pattern that hashed to this index. */
1607 table->table[hash] = cur_expr;
1609 /* Add EXPR to end of this hash chain. */
1610 last_expr->next_same_hash = cur_expr;
1612 /* Set the fields of the expr element.
1613 We must copy X because it can be modified when copy propagation is
1614 performed on its operands. */
1615 cur_expr->expr = copy_rtx (x);
1616 cur_expr->bitmap_index = table->n_elems++;
1617 cur_expr->next_same_hash = NULL;
1618 cur_expr->antic_occr = NULL;
1619 cur_expr->avail_occr = NULL;
1622 /* Now record the occurrence. */
1623 cur_occr = cur_expr->avail_occr;
1625 if (cur_occr && BLOCK_NUM (cur_occr->insn) == BLOCK_NUM (insn))
1627 /* Found another instance of the expression in the same basic block.
1628 Prefer this occurrence to the currently recorded one. We want
1629 the last one in the block and the block is scanned from start
1631 cur_occr->insn = insn;
1635 /* First occurrence of this expression in this basic block. */
1636 cur_occr = gcse_alloc (sizeof (struct occr));
1637 bytes_used += sizeof (struct occr);
1639 cur_occr->insn = insn;
1640 cur_occr->next = cur_expr->avail_occr;
1641 cur_occr->deleted_p = 0;
1642 cur_expr->avail_occr = cur_occr;
1646 /* Determine whether the rtx X should be treated as a constant for
1647 the purposes of GCSE's constant propagation. */
1650 gcse_constant_p (const_rtx x)
1652 /* Consider a COMPARE of two integers constant. */
1653 if (GET_CODE (x) == COMPARE
1654 && GET_CODE (XEXP (x, 0)) == CONST_INT
1655 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1658 /* Consider a COMPARE of the same registers is a constant
1659 if they are not floating point registers. */
1660 if (GET_CODE(x) == COMPARE
1661 && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1))
1662 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
1663 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
1664 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
1667 return CONSTANT_P (x);
1670 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
1674 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
1676 rtx src = SET_SRC (pat);
1677 rtx dest = SET_DEST (pat);
1680 if (GET_CODE (src) == CALL)
1681 hash_scan_call (src, insn, table);
1683 else if (REG_P (dest))
1685 unsigned int regno = REGNO (dest);
1688 /* See if a REG_NOTE shows this equivalent to a simpler expression.
1689 This allows us to do a single GCSE pass and still eliminate
1690 redundant constants, addresses or other expressions that are
1691 constructed with multiple instructions. */
1692 note = find_reg_equal_equiv_note (insn);
1695 ? gcse_constant_p (XEXP (note, 0))
1696 : want_to_gcse_p (XEXP (note, 0))))
1697 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
1699 /* Only record sets of pseudo-regs in the hash table. */
1701 && regno >= FIRST_PSEUDO_REGISTER
1702 /* Don't GCSE something if we can't do a reg/reg copy. */
1703 && can_copy_p (GET_MODE (dest))
1704 /* GCSE commonly inserts instruction after the insn. We can't
1705 do that easily for EH_REGION notes so disable GCSE on these
1707 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1708 /* Is SET_SRC something we want to gcse? */
1709 && want_to_gcse_p (src)
1710 /* Don't CSE a nop. */
1711 && ! set_noop_p (pat)
1712 /* Don't GCSE if it has attached REG_EQUIV note.
1713 At this point this only function parameters should have
1714 REG_EQUIV notes and if the argument slot is used somewhere
1715 explicitly, it means address of parameter has been taken,
1716 so we should not extend the lifetime of the pseudo. */
1717 && (note == NULL_RTX || ! MEM_P (XEXP (note, 0))))
1719 /* An expression is not anticipatable if its operands are
1720 modified before this insn or if this is not the only SET in
1721 this insn. The latter condition does not have to mean that
1722 SRC itself is not anticipatable, but we just will not be
1723 able to handle code motion of insns with multiple sets. */
1724 int antic_p = oprs_anticipatable_p (src, insn)
1725 && !multiple_sets (insn);
1726 /* An expression is not available if its operands are
1727 subsequently modified, including this insn. It's also not
1728 available if this is a branch, because we can't insert
1729 a set after the branch. */
1730 int avail_p = (oprs_available_p (src, insn)
1731 && ! JUMP_P (insn));
1733 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
1736 /* Record sets for constant/copy propagation. */
1737 else if (table->set_p
1738 && regno >= FIRST_PSEUDO_REGISTER
1740 && REGNO (src) >= FIRST_PSEUDO_REGISTER
1741 && can_copy_p (GET_MODE (dest))
1742 && REGNO (src) != regno)
1743 || gcse_constant_p (src))
1744 /* A copy is not available if its src or dest is subsequently
1745 modified. Here we want to search from INSN+1 on, but
1746 oprs_available_p searches from INSN on. */
1747 && (insn == BB_END (BLOCK_FOR_INSN (insn))
1748 || (tmp = next_nonnote_insn (insn)) == NULL_RTX
1749 || BLOCK_FOR_INSN (tmp) != BLOCK_FOR_INSN (insn)
1750 || oprs_available_p (pat, tmp)))
1751 insert_set_in_table (pat, insn, table);
1753 /* In case of store we want to consider the memory value as available in
1754 the REG stored in that memory. This makes it possible to remove
1755 redundant loads from due to stores to the same location. */
1756 else if (flag_gcse_las && REG_P (src) && MEM_P (dest))
1758 unsigned int regno = REGNO (src);
1760 /* Do not do this for constant/copy propagation. */
1762 /* Only record sets of pseudo-regs in the hash table. */
1763 && regno >= FIRST_PSEUDO_REGISTER
1764 /* Don't GCSE something if we can't do a reg/reg copy. */
1765 && can_copy_p (GET_MODE (src))
1766 /* GCSE commonly inserts instruction after the insn. We can't
1767 do that easily for EH_REGION notes so disable GCSE on these
1769 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1770 /* Is SET_DEST something we want to gcse? */
1771 && want_to_gcse_p (dest)
1772 /* Don't CSE a nop. */
1773 && ! set_noop_p (pat)
1774 /* Don't GCSE if it has attached REG_EQUIV note.
1775 At this point this only function parameters should have
1776 REG_EQUIV notes and if the argument slot is used somewhere
1777 explicitly, it means address of parameter has been taken,
1778 so we should not extend the lifetime of the pseudo. */
1779 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
1780 || ! MEM_P (XEXP (note, 0))))
1782 /* Stores are never anticipatable. */
1784 /* An expression is not available if its operands are
1785 subsequently modified, including this insn. It's also not
1786 available if this is a branch, because we can't insert
1787 a set after the branch. */
1788 int avail_p = oprs_available_p (dest, insn)
1791 /* Record the memory expression (DEST) in the hash table. */
1792 insert_expr_in_table (dest, GET_MODE (dest), insn,
1793 antic_p, avail_p, table);
1799 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1800 struct hash_table *table ATTRIBUTE_UNUSED)
1802 /* Currently nothing to do. */
1806 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1807 struct hash_table *table ATTRIBUTE_UNUSED)
1809 /* Currently nothing to do. */
1812 /* Process INSN and add hash table entries as appropriate.
1814 Only available expressions that set a single pseudo-reg are recorded.
1816 Single sets in a PARALLEL could be handled, but it's an extra complication
1817 that isn't dealt with right now. The trick is handling the CLOBBERs that
1818 are also in the PARALLEL. Later.
1820 If SET_P is nonzero, this is for the assignment hash table,
1821 otherwise it is for the expression hash table.
1822 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
1823 not record any expressions. */
1826 hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
1828 rtx pat = PATTERN (insn);
1831 if (in_libcall_block)
1834 /* Pick out the sets of INSN and for other forms of instructions record
1835 what's been modified. */
1837 if (GET_CODE (pat) == SET)
1838 hash_scan_set (pat, insn, table);
1839 else if (GET_CODE (pat) == PARALLEL)
1840 for (i = 0; i < XVECLEN (pat, 0); i++)
1842 rtx x = XVECEXP (pat, 0, i);
1844 if (GET_CODE (x) == SET)
1845 hash_scan_set (x, insn, table);
1846 else if (GET_CODE (x) == CLOBBER)
1847 hash_scan_clobber (x, insn, table);
1848 else if (GET_CODE (x) == CALL)
1849 hash_scan_call (x, insn, table);
1852 else if (GET_CODE (pat) == CLOBBER)
1853 hash_scan_clobber (pat, insn, table);
1854 else if (GET_CODE (pat) == CALL)
1855 hash_scan_call (pat, insn, table);
1859 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
1862 /* Flattened out table, so it's printed in proper order. */
1863 struct expr **flat_table;
1864 unsigned int *hash_val;
1867 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
1868 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
1870 for (i = 0; i < (int) table->size; i++)
1871 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1873 flat_table[expr->bitmap_index] = expr;
1874 hash_val[expr->bitmap_index] = i;
1877 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
1878 name, table->size, table->n_elems);
1880 for (i = 0; i < (int) table->n_elems; i++)
1881 if (flat_table[i] != 0)
1883 expr = flat_table[i];
1884 fprintf (file, "Index %d (hash value %d)\n ",
1885 expr->bitmap_index, hash_val[i]);
1886 print_rtl (file, expr->expr);
1887 fprintf (file, "\n");
1890 fprintf (file, "\n");
1896 /* Record register first/last/block set information for REGNO in INSN.
1898 first_set records the first place in the block where the register
1899 is set and is used to compute "anticipatability".
1901 last_set records the last place in the block where the register
1902 is set and is used to compute "availability".
1904 last_bb records the block for which first_set and last_set are
1905 valid, as a quick test to invalidate them.
1907 reg_set_in_block records whether the register is set in the block
1908 and is used to compute "transparency". */
1911 record_last_reg_set_info (rtx insn, int regno)
1913 struct reg_avail_info *info = ®_avail_info[regno];
1914 int cuid = INSN_CUID (insn);
1916 info->last_set = cuid;
1917 if (info->last_bb != current_bb)
1919 info->last_bb = current_bb;
1920 info->first_set = cuid;
1921 SET_BIT (reg_set_in_block[current_bb->index], regno);
1926 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
1927 Note we store a pair of elements in the list, so they have to be
1928 taken off pairwise. */
1931 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, const_rtx unused1 ATTRIBUTE_UNUSED,
1934 rtx dest_addr, insn;
1937 while (GET_CODE (dest) == SUBREG
1938 || GET_CODE (dest) == ZERO_EXTRACT
1939 || GET_CODE (dest) == STRICT_LOW_PART)
1940 dest = XEXP (dest, 0);
1942 /* If DEST is not a MEM, then it will not conflict with a load. Note
1943 that function calls are assumed to clobber memory, but are handled
1949 dest_addr = get_addr (XEXP (dest, 0));
1950 dest_addr = canon_rtx (dest_addr);
1951 insn = (rtx) v_insn;
1952 bb = BLOCK_NUM (insn);
1954 canon_modify_mem_list[bb] =
1955 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
1956 canon_modify_mem_list[bb] =
1957 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
1960 /* Record memory modification information for INSN. We do not actually care
1961 about the memory location(s) that are set, or even how they are set (consider
1962 a CALL_INSN). We merely need to record which insns modify memory. */
1965 record_last_mem_set_info (rtx insn)
1967 int bb = BLOCK_NUM (insn);
1969 /* load_killed_in_block_p will handle the case of calls clobbering
1971 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
1972 bitmap_set_bit (modify_mem_list_set, bb);
1976 /* Note that traversals of this loop (other than for free-ing)
1977 will break after encountering a CALL_INSN. So, there's no
1978 need to insert a pair of items, as canon_list_insert does. */
1979 canon_modify_mem_list[bb] =
1980 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
1981 bitmap_set_bit (blocks_with_calls, bb);
1984 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
1987 /* Called from compute_hash_table via note_stores to handle one
1988 SET or CLOBBER in an insn. DATA is really the instruction in which
1989 the SET is taking place. */
1992 record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
1994 rtx last_set_insn = (rtx) data;
1996 if (GET_CODE (dest) == SUBREG)
1997 dest = SUBREG_REG (dest);
2000 record_last_reg_set_info (last_set_insn, REGNO (dest));
2001 else if (MEM_P (dest)
2002 /* Ignore pushes, they clobber nothing. */
2003 && ! push_operand (dest, GET_MODE (dest)))
2004 record_last_mem_set_info (last_set_insn);
2007 /* Top level function to create an expression or assignment hash table.
2009 Expression entries are placed in the hash table if
2010 - they are of the form (set (pseudo-reg) src),
2011 - src is something we want to perform GCSE on,
2012 - none of the operands are subsequently modified in the block
2014 Assignment entries are placed in the hash table if
2015 - they are of the form (set (pseudo-reg) src),
2016 - src is something we want to perform const/copy propagation on,
2017 - none of the operands or target are subsequently modified in the block
2019 Currently src must be a pseudo-reg or a const_int.
2021 TABLE is the table computed. */
2024 compute_hash_table_work (struct hash_table *table)
2028 /* While we compute the hash table we also compute a bit array of which
2029 registers are set in which blocks.
2030 ??? This isn't needed during const/copy propagation, but it's cheap to
2032 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2034 /* re-Cache any INSN_LIST nodes we have allocated. */
2035 clear_modify_mem_tables ();
2036 /* Some working arrays used to track first and last set in each block. */
2037 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2039 for (i = 0; i < max_gcse_regno; ++i)
2040 reg_avail_info[i].last_bb = NULL;
2042 FOR_EACH_BB (current_bb)
2046 int in_libcall_block;
2048 /* First pass over the instructions records information used to
2049 determine when registers and memory are first and last set.
2050 ??? hard-reg reg_set_in_block computation
2051 could be moved to compute_sets since they currently don't change. */
2053 FOR_BB_INSNS (current_bb, insn)
2055 if (! INSN_P (insn))
2060 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2061 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2062 record_last_reg_set_info (insn, regno);
2067 note_stores (PATTERN (insn), record_last_set_info, insn);
2070 /* Insert implicit sets in the hash table. */
2072 && implicit_sets[current_bb->index] != NULL_RTX)
2073 hash_scan_set (implicit_sets[current_bb->index],
2074 BB_HEAD (current_bb), table);
2076 /* The next pass builds the hash table. */
2077 in_libcall_block = 0;
2078 FOR_BB_INSNS (current_bb, insn)
2081 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2082 in_libcall_block = 1;
2083 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2084 in_libcall_block = 0;
2085 hash_scan_insn (insn, table, in_libcall_block);
2086 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2087 in_libcall_block = 0;
2091 free (reg_avail_info);
2092 reg_avail_info = NULL;
2095 /* Allocate space for the set/expr hash TABLE.
2096 N_INSNS is the number of instructions in the function.
2097 It is used to determine the number of buckets to use.
2098 SET_P determines whether set or expression table will
2102 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2106 table->size = n_insns / 4;
2107 if (table->size < 11)
2110 /* Attempt to maintain efficient use of hash table.
2111 Making it an odd number is simplest for now.
2112 ??? Later take some measurements. */
2114 n = table->size * sizeof (struct expr *);
2115 table->table = gmalloc (n);
2116 table->set_p = set_p;
2119 /* Free things allocated by alloc_hash_table. */
2122 free_hash_table (struct hash_table *table)
2124 free (table->table);
2127 /* Compute the hash TABLE for doing copy/const propagation or
2128 expression hash table. */
2131 compute_hash_table (struct hash_table *table)
2133 /* Initialize count of number of entries in hash table. */
2135 memset (table->table, 0, table->size * sizeof (struct expr *));
2137 compute_hash_table_work (table);
2140 /* Expression tracking support. */
2142 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2143 table entry, or NULL if not found. */
2145 static struct expr *
2146 lookup_set (unsigned int regno, struct hash_table *table)
2148 unsigned int hash = hash_set (regno, table->size);
2151 expr = table->table[hash];
2153 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2154 expr = expr->next_same_hash;
2159 /* Return the next entry for REGNO in list EXPR. */
2161 static struct expr *
2162 next_set (unsigned int regno, struct expr *expr)
2165 expr = expr->next_same_hash;
2166 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2171 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2172 types may be mixed. */
2175 free_insn_expr_list_list (rtx *listp)
2179 for (list = *listp; list ; list = next)
2181 next = XEXP (list, 1);
2182 if (GET_CODE (list) == EXPR_LIST)
2183 free_EXPR_LIST_node (list);
2185 free_INSN_LIST_node (list);
2191 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2193 clear_modify_mem_tables (void)
2198 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
2200 free_INSN_LIST_list (modify_mem_list + i);
2201 free_insn_expr_list_list (canon_modify_mem_list + i);
2203 bitmap_clear (modify_mem_list_set);
2204 bitmap_clear (blocks_with_calls);
2207 /* Release memory used by modify_mem_list_set. */
2210 free_modify_mem_tables (void)
2212 clear_modify_mem_tables ();
2213 free (modify_mem_list);
2214 free (canon_modify_mem_list);
2215 modify_mem_list = 0;
2216 canon_modify_mem_list = 0;
2219 /* Reset tables used to keep track of what's still available [since the
2220 start of the block]. */
2223 reset_opr_set_tables (void)
2225 /* Maintain a bitmap of which regs have been set since beginning of
2227 CLEAR_REG_SET (reg_set_bitmap);
2229 /* Also keep a record of the last instruction to modify memory.
2230 For now this is very trivial, we only record whether any memory
2231 location has been modified. */
2232 clear_modify_mem_tables ();
2235 /* Return nonzero if the operands of X are not set before INSN in
2236 INSN's basic block. */
2239 oprs_not_set_p (const_rtx x, const_rtx insn)
2248 code = GET_CODE (x);
2265 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2266 INSN_CUID (insn), x, 0))
2269 return oprs_not_set_p (XEXP (x, 0), insn);
2272 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2278 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2282 /* If we are about to do the last recursive call
2283 needed at this level, change it into iteration.
2284 This function is called enough to be worth it. */
2286 return oprs_not_set_p (XEXP (x, i), insn);
2288 if (! oprs_not_set_p (XEXP (x, i), insn))
2291 else if (fmt[i] == 'E')
2292 for (j = 0; j < XVECLEN (x, i); j++)
2293 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2300 /* Mark things set by a CALL. */
2303 mark_call (rtx insn)
2305 if (! CONST_OR_PURE_CALL_P (insn))
2306 record_last_mem_set_info (insn);
2309 /* Mark things set by a SET. */
2312 mark_set (rtx pat, rtx insn)
2314 rtx dest = SET_DEST (pat);
2316 while (GET_CODE (dest) == SUBREG
2317 || GET_CODE (dest) == ZERO_EXTRACT
2318 || GET_CODE (dest) == STRICT_LOW_PART)
2319 dest = XEXP (dest, 0);
2322 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2323 else if (MEM_P (dest))
2324 record_last_mem_set_info (insn);
2326 if (GET_CODE (SET_SRC (pat)) == CALL)
2330 /* Record things set by a CLOBBER. */
2333 mark_clobber (rtx pat, rtx insn)
2335 rtx clob = XEXP (pat, 0);
2337 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2338 clob = XEXP (clob, 0);
2341 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2343 record_last_mem_set_info (insn);
2346 /* Record things set by INSN.
2347 This data is used by oprs_not_set_p. */
2350 mark_oprs_set (rtx insn)
2352 rtx pat = PATTERN (insn);
2355 if (GET_CODE (pat) == SET)
2356 mark_set (pat, insn);
2357 else if (GET_CODE (pat) == PARALLEL)
2358 for (i = 0; i < XVECLEN (pat, 0); i++)
2360 rtx x = XVECEXP (pat, 0, i);
2362 if (GET_CODE (x) == SET)
2364 else if (GET_CODE (x) == CLOBBER)
2365 mark_clobber (x, insn);
2366 else if (GET_CODE (x) == CALL)
2370 else if (GET_CODE (pat) == CLOBBER)
2371 mark_clobber (pat, insn);
2372 else if (GET_CODE (pat) == CALL)
2377 /* Compute copy/constant propagation working variables. */
2379 /* Local properties of assignments. */
2380 static sbitmap *cprop_pavloc;
2381 static sbitmap *cprop_absaltered;
2383 /* Global properties of assignments (computed from the local properties). */
2384 static sbitmap *cprop_avin;
2385 static sbitmap *cprop_avout;
2387 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
2388 basic blocks. N_SETS is the number of sets. */
2391 alloc_cprop_mem (int n_blocks, int n_sets)
2393 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
2394 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
2396 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
2397 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
2400 /* Free vars used by copy/const propagation. */
2403 free_cprop_mem (void)
2405 sbitmap_vector_free (cprop_pavloc);
2406 sbitmap_vector_free (cprop_absaltered);
2407 sbitmap_vector_free (cprop_avin);
2408 sbitmap_vector_free (cprop_avout);
2411 /* For each block, compute whether X is transparent. X is either an
2412 expression or an assignment [though we don't care which, for this context
2413 an assignment is treated as an expression]. For each block where an
2414 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
2418 compute_transp (const_rtx x, int indx, sbitmap *bmap, int set_p)
2426 /* repeat is used to turn tail-recursion into iteration since GCC
2427 can't do it when there's no return value. */
2433 code = GET_CODE (x);
2439 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2442 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2443 SET_BIT (bmap[bb->index], indx);
2447 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2448 SET_BIT (bmap[r->bb_index], indx);
2453 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2456 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2457 RESET_BIT (bmap[bb->index], indx);
2461 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2462 RESET_BIT (bmap[r->bb_index], indx);
2469 if (! MEM_READONLY_P (x))
2474 /* First handle all the blocks with calls. We don't need to
2475 do any list walking for them. */
2476 EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
2479 SET_BIT (bmap[bb_index], indx);
2481 RESET_BIT (bmap[bb_index], indx);
2484 /* Now iterate over the blocks which have memory modifications
2485 but which do not have any calls. */
2486 EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set,
2490 rtx list_entry = canon_modify_mem_list[bb_index];
2494 rtx dest, dest_addr;
2496 /* LIST_ENTRY must be an INSN of some kind that sets memory.
2497 Examine each hunk of memory that is modified. */
2499 dest = XEXP (list_entry, 0);
2500 list_entry = XEXP (list_entry, 1);
2501 dest_addr = XEXP (list_entry, 0);
2503 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
2504 x, rtx_addr_varies_p))
2507 SET_BIT (bmap[bb_index], indx);
2509 RESET_BIT (bmap[bb_index], indx);
2512 list_entry = XEXP (list_entry, 1);
2537 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2541 /* If we are about to do the last recursive call
2542 needed at this level, change it into iteration.
2543 This function is called enough to be worth it. */
2550 compute_transp (XEXP (x, i), indx, bmap, set_p);
2552 else if (fmt[i] == 'E')
2553 for (j = 0; j < XVECLEN (x, i); j++)
2554 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
2558 /* Top level routine to do the dataflow analysis needed by copy/const
2562 compute_cprop_data (void)
2564 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
2565 compute_available (cprop_pavloc, cprop_absaltered,
2566 cprop_avout, cprop_avin);
2569 /* Copy/constant propagation. */
2571 /* Maximum number of register uses in an insn that we handle. */
2574 /* Table of uses found in an insn.
2575 Allocated statically to avoid alloc/free complexity and overhead. */
2576 static struct reg_use reg_use_table[MAX_USES];
2578 /* Index into `reg_use_table' while building it. */
2579 static int reg_use_count;
2581 /* Set up a list of register numbers used in INSN. The found uses are stored
2582 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
2583 and contains the number of uses in the table upon exit.
2585 ??? If a register appears multiple times we will record it multiple times.
2586 This doesn't hurt anything but it will slow things down. */
2589 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
2596 /* repeat is used to turn tail-recursion into iteration since GCC
2597 can't do it when there's no return value. */
2602 code = GET_CODE (x);
2605 if (reg_use_count == MAX_USES)
2608 reg_use_table[reg_use_count].reg_rtx = x;
2612 /* Recursively scan the operands of this expression. */
2614 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2618 /* If we are about to do the last recursive call
2619 needed at this level, change it into iteration.
2620 This function is called enough to be worth it. */
2627 find_used_regs (&XEXP (x, i), data);
2629 else if (fmt[i] == 'E')
2630 for (j = 0; j < XVECLEN (x, i); j++)
2631 find_used_regs (&XVECEXP (x, i, j), data);
2635 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
2636 Returns nonzero is successful. */
2639 try_replace_reg (rtx from, rtx to, rtx insn)
2641 rtx note = find_reg_equal_equiv_note (insn);
2644 rtx set = single_set (insn);
2646 /* Usually we substitute easy stuff, so we won't copy everything.
2647 We however need to take care to not duplicate non-trivial CONST
2651 validate_replace_src_group (from, to, insn);
2652 if (num_changes_pending () && apply_change_group ())
2655 /* Try to simplify SET_SRC if we have substituted a constant. */
2656 if (success && set && CONSTANT_P (to))
2658 src = simplify_rtx (SET_SRC (set));
2661 validate_change (insn, &SET_SRC (set), src, 0);
2664 /* If there is already a REG_EQUAL note, update the expression in it
2665 with our replacement. */
2666 if (note != 0 && REG_NOTE_KIND (note) == REG_EQUAL)
2667 set_unique_reg_note (insn, REG_EQUAL,
2668 simplify_replace_rtx (XEXP (note, 0), from, to));
2669 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
2671 /* If above failed and this is a single set, try to simplify the source of
2672 the set given our substitution. We could perhaps try this for multiple
2673 SETs, but it probably won't buy us anything. */
2674 src = simplify_replace_rtx (SET_SRC (set), from, to);
2676 if (!rtx_equal_p (src, SET_SRC (set))
2677 && validate_change (insn, &SET_SRC (set), src, 0))
2680 /* If we've failed to do replacement, have a single SET, don't already
2681 have a note, and have no special SET, add a REG_EQUAL note to not
2682 lose information. */
2683 if (!success && note == 0 && set != 0
2684 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
2685 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
2686 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
2689 /* REG_EQUAL may get simplified into register.
2690 We don't allow that. Remove that note. This code ought
2691 not to happen, because previous code ought to synthesize
2692 reg-reg move, but be on the safe side. */
2693 if (note && REG_NOTE_KIND (note) == REG_EQUAL && REG_P (XEXP (note, 0)))
2694 remove_note (insn, note);
2699 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
2700 NULL no such set is found. */
2702 static struct expr *
2703 find_avail_set (int regno, rtx insn)
2705 /* SET1 contains the last set found that can be returned to the caller for
2706 use in a substitution. */
2707 struct expr *set1 = 0;
2709 /* Loops are not possible here. To get a loop we would need two sets
2710 available at the start of the block containing INSN. i.e. we would
2711 need two sets like this available at the start of the block:
2713 (set (reg X) (reg Y))
2714 (set (reg Y) (reg X))
2716 This can not happen since the set of (reg Y) would have killed the
2717 set of (reg X) making it unavailable at the start of this block. */
2721 struct expr *set = lookup_set (regno, &set_hash_table);
2723 /* Find a set that is available at the start of the block
2724 which contains INSN. */
2727 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
2729 set = next_set (regno, set);
2732 /* If no available set was found we've reached the end of the
2733 (possibly empty) copy chain. */
2737 gcc_assert (GET_CODE (set->expr) == SET);
2739 src = SET_SRC (set->expr);
2741 /* We know the set is available.
2742 Now check that SRC is ANTLOC (i.e. none of the source operands
2743 have changed since the start of the block).
2745 If the source operand changed, we may still use it for the next
2746 iteration of this loop, but we may not use it for substitutions. */
2748 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
2751 /* If the source of the set is anything except a register, then
2752 we have reached the end of the copy chain. */
2756 /* Follow the copy chain, i.e. start another iteration of the loop
2757 and see if we have an available copy into SRC. */
2758 regno = REGNO (src);
2761 /* SET1 holds the last set that was available and anticipatable at
2766 /* Subroutine of cprop_insn that tries to propagate constants into
2767 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
2768 it is the instruction that immediately precedes JUMP, and must be a
2769 single SET of a register. FROM is what we will try to replace,
2770 SRC is the constant we will try to substitute for it. Returns nonzero
2771 if a change was made. */
2774 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
2776 rtx new, set_src, note_src;
2777 rtx set = pc_set (jump);
2778 rtx note = find_reg_equal_equiv_note (jump);
2782 note_src = XEXP (note, 0);
2783 if (GET_CODE (note_src) == EXPR_LIST)
2784 note_src = NULL_RTX;
2786 else note_src = NULL_RTX;
2788 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
2789 set_src = note_src ? note_src : SET_SRC (set);
2791 /* First substitute the SETCC condition into the JUMP instruction,
2792 then substitute that given values into this expanded JUMP. */
2793 if (setcc != NULL_RTX
2794 && !modified_between_p (from, setcc, jump)
2795 && !modified_between_p (src, setcc, jump))
2798 rtx setcc_set = single_set (setcc);
2799 rtx setcc_note = find_reg_equal_equiv_note (setcc);
2800 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
2801 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
2802 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
2808 new = simplify_replace_rtx (set_src, from, src);
2810 /* If no simplification can be made, then try the next register. */
2811 if (rtx_equal_p (new, SET_SRC (set)))
2814 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
2819 /* Ensure the value computed inside the jump insn to be equivalent
2820 to one computed by setcc. */
2821 if (setcc && modified_in_p (new, setcc))
2823 if (! validate_change (jump, &SET_SRC (set), new, 0))
2825 /* When (some) constants are not valid in a comparison, and there
2826 are two registers to be replaced by constants before the entire
2827 comparison can be folded into a constant, we need to keep
2828 intermediate information in REG_EQUAL notes. For targets with
2829 separate compare insns, such notes are added by try_replace_reg.
2830 When we have a combined compare-and-branch instruction, however,
2831 we need to attach a note to the branch itself to make this
2832 optimization work. */
2834 if (!rtx_equal_p (new, note_src))
2835 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
2839 /* Remove REG_EQUAL note after simplification. */
2841 remove_note (jump, note);
2845 /* Delete the cc0 setter. */
2846 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
2847 delete_insn (setcc);
2850 run_jump_opt_after_gcse = 1;
2852 global_const_prop_count++;
2853 if (dump_file != NULL)
2856 "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
2857 REGNO (from), INSN_UID (jump));
2858 print_rtl (dump_file, src);
2859 fprintf (dump_file, "\n");
2861 purge_dead_edges (bb);
2863 /* If a conditional jump has been changed into unconditional jump, remove
2864 the jump and make the edge fallthru - this is always called in
2866 if (new != pc_rtx && simplejump_p (jump))
2871 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ei_next (&ei))
2872 if (e->dest != EXIT_BLOCK_PTR
2873 && BB_HEAD (e->dest) == JUMP_LABEL (jump))
2875 e->flags |= EDGE_FALLTHRU;
2885 constprop_register (rtx insn, rtx from, rtx to, bool alter_jumps)
2889 /* Check for reg or cc0 setting instructions followed by
2890 conditional branch instructions first. */
2892 && (sset = single_set (insn)) != NULL
2894 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
2896 rtx dest = SET_DEST (sset);
2897 if ((REG_P (dest) || CC0_P (dest))
2898 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
2902 /* Handle normal insns next. */
2903 if (NONJUMP_INSN_P (insn)
2904 && try_replace_reg (from, to, insn))
2907 /* Try to propagate a CONST_INT into a conditional jump.
2908 We're pretty specific about what we will handle in this
2909 code, we can extend this as necessary over time.
2911 Right now the insn in question must look like
2912 (set (pc) (if_then_else ...)) */
2913 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
2914 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
2918 /* Perform constant and copy propagation on INSN.
2919 The result is nonzero if a change was made. */
2922 cprop_insn (rtx insn, int alter_jumps)
2924 struct reg_use *reg_used;
2932 note_uses (&PATTERN (insn), find_used_regs, NULL);
2934 note = find_reg_equal_equiv_note (insn);
2936 /* We may win even when propagating constants into notes. */
2938 find_used_regs (&XEXP (note, 0), NULL);
2940 for (reg_used = ®_use_table[0]; reg_use_count > 0;
2941 reg_used++, reg_use_count--)
2943 unsigned int regno = REGNO (reg_used->reg_rtx);
2947 /* Ignore registers created by GCSE.
2948 We do this because ... */
2949 if (regno >= max_gcse_regno)
2952 /* If the register has already been set in this block, there's
2953 nothing we can do. */
2954 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
2957 /* Find an assignment that sets reg_used and is available
2958 at the start of the block. */
2959 set = find_avail_set (regno, insn);
2964 /* ??? We might be able to handle PARALLELs. Later. */
2965 gcc_assert (GET_CODE (pat) == SET);
2967 src = SET_SRC (pat);
2969 /* Constant propagation. */
2970 if (gcse_constant_p (src))
2972 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
2975 global_const_prop_count++;
2976 if (dump_file != NULL)
2978 fprintf (dump_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
2979 fprintf (dump_file, "insn %d with constant ", INSN_UID (insn));
2980 print_rtl (dump_file, src);
2981 fprintf (dump_file, "\n");
2983 if (INSN_DELETED_P (insn))
2987 else if (REG_P (src)
2988 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2989 && REGNO (src) != regno)
2991 if (try_replace_reg (reg_used->reg_rtx, src, insn))
2994 global_copy_prop_count++;
2995 if (dump_file != NULL)
2997 fprintf (dump_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
2998 regno, INSN_UID (insn));
2999 fprintf (dump_file, " with reg %d\n", REGNO (src));
3002 /* The original insn setting reg_used may or may not now be
3003 deletable. We leave the deletion to flow. */
3004 /* FIXME: If it turns out that the insn isn't deletable,
3005 then we may have unnecessarily extended register lifetimes
3006 and made things worse. */
3014 /* Like find_used_regs, but avoid recording uses that appear in
3015 input-output contexts such as zero_extract or pre_dec. This
3016 restricts the cases we consider to those for which local cprop
3017 can legitimately make replacements. */
3020 local_cprop_find_used_regs (rtx *xptr, void *data)
3027 switch (GET_CODE (x))
3031 case STRICT_LOW_PART:
3040 /* Can only legitimately appear this early in the context of
3041 stack pushes for function arguments, but handle all of the
3042 codes nonetheless. */
3046 /* Setting a subreg of a register larger than word_mode leaves
3047 the non-written words unchanged. */
3048 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
3056 find_used_regs (xptr, data);
3059 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3060 their REG_EQUAL notes need updating. */
3063 do_local_cprop (rtx x, rtx insn, bool alter_jumps, rtx *libcall_sp)
3065 rtx newreg = NULL, newcnst = NULL;
3067 /* Rule out USE instructions and ASM statements as we don't want to
3068 change the hard registers mentioned. */
3070 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
3071 || (GET_CODE (PATTERN (insn)) != USE
3072 && asm_noperands (PATTERN (insn)) < 0)))
3074 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
3075 struct elt_loc_list *l;
3079 for (l = val->locs; l; l = l->next)
3081 rtx this_rtx = l->loc;
3084 /* Don't CSE non-constant values out of libcall blocks. */
3085 if (l->in_libcall && ! CONSTANT_P (this_rtx))
3088 if (gcse_constant_p (this_rtx))
3090 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
3091 /* Don't copy propagate if it has attached REG_EQUIV note.
3092 At this point this only function parameters should have
3093 REG_EQUIV notes and if the argument slot is used somewhere
3094 explicitly, it means address of parameter has been taken,
3095 so we should not extend the lifetime of the pseudo. */
3096 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
3097 || ! MEM_P (XEXP (note, 0))))
3100 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
3102 /* If we find a case where we can't fix the retval REG_EQUAL notes
3103 match the new register, we either have to abandon this replacement
3104 or fix delete_trivially_dead_insns to preserve the setting insn,
3105 or make it delete the REG_EQUAL note, and fix up all passes that
3106 require the REG_EQUAL note there. */
3109 adjusted = adjust_libcall_notes (x, newcnst, insn, libcall_sp);
3110 gcc_assert (adjusted);
3112 if (dump_file != NULL)
3114 fprintf (dump_file, "LOCAL CONST-PROP: Replacing reg %d in ",
3116 fprintf (dump_file, "insn %d with constant ",
3118 print_rtl (dump_file, newcnst);
3119 fprintf (dump_file, "\n");
3121 local_const_prop_count++;
3124 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
3126 adjust_libcall_notes (x, newreg, insn, libcall_sp);
3127 if (dump_file != NULL)
3130 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
3131 REGNO (x), INSN_UID (insn));
3132 fprintf (dump_file, " with reg %d\n", REGNO (newreg));
3134 local_copy_prop_count++;
3141 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3142 their REG_EQUAL notes need updating to reflect that OLDREG has been
3143 replaced with NEWVAL in INSN. Return true if all substitutions could
3146 adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
3150 while ((end = *libcall_sp++))
3152 rtx note = find_reg_equal_equiv_note (end);
3159 if (reg_set_between_p (newval, PREV_INSN (insn), end))
3163 note = find_reg_equal_equiv_note (end);
3166 if (reg_mentioned_p (newval, XEXP (note, 0)))
3169 while ((end = *libcall_sp++));
3173 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), oldreg, newval);
3174 df_notes_rescan (end);
3180 #define MAX_NESTED_LIBCALLS 9
3182 /* Do local const/copy propagation (i.e. within each basic block).
3183 If ALTER_JUMPS is true, allow propagating into jump insns, which
3184 could modify the CFG. */
3187 local_cprop_pass (bool alter_jumps)
3191 struct reg_use *reg_used;
3192 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
3193 bool changed = false;
3195 cselib_init (false);
3196 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
3200 FOR_BB_INSNS (bb, insn)
3204 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
3208 gcc_assert (libcall_sp != libcall_stack);
3209 *--libcall_sp = XEXP (note, 0);
3211 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3214 note = find_reg_equal_equiv_note (insn);
3218 note_uses (&PATTERN (insn), local_cprop_find_used_regs,
3221 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
3223 for (reg_used = ®_use_table[0]; reg_use_count > 0;
3224 reg_used++, reg_use_count--)
3226 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
3233 if (INSN_DELETED_P (insn))
3236 while (reg_use_count);
3238 cselib_process_insn (insn);
3241 /* Forget everything at the end of a basic block. Make sure we are
3242 not inside a libcall, they should never cross basic blocks. */
3243 cselib_clear_table ();
3244 gcc_assert (libcall_sp == &libcall_stack[MAX_NESTED_LIBCALLS]);
3249 /* Global analysis may get into infinite loops for unreachable blocks. */
3250 if (changed && alter_jumps)
3252 delete_unreachable_blocks ();
3253 free_reg_set_mem ();
3254 alloc_reg_set_mem (max_reg_num ());
3259 /* Forward propagate copies. This includes copies and constants. Return
3260 nonzero if a change was made. */
3263 cprop (int alter_jumps)
3269 /* Note we start at block 1. */
3270 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3272 if (dump_file != NULL)
3273 fprintf (dump_file, "\n");
3278 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3280 /* Reset tables used to keep track of what's still valid [since the
3281 start of the block]. */
3282 reset_opr_set_tables ();
3284 FOR_BB_INSNS (bb, insn)
3287 changed |= cprop_insn (insn, alter_jumps);
3289 /* Keep track of everything modified by this insn. */
3290 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
3291 call mark_oprs_set if we turned the insn into a NOTE. */
3292 if (! NOTE_P (insn))
3293 mark_oprs_set (insn);
3297 if (dump_file != NULL)
3298 fprintf (dump_file, "\n");
3303 /* Similar to get_condition, only the resulting condition must be
3304 valid at JUMP, instead of at EARLIEST.
3306 This differs from noce_get_condition in ifcvt.c in that we prefer not to
3307 settle for the condition variable in the jump instruction being integral.
3308 We prefer to be able to record the value of a user variable, rather than
3309 the value of a temporary used in a condition. This could be solved by
3310 recording the value of *every* register scanned by canonicalize_condition,
3311 but this would require some code reorganization. */
3314 fis_get_condition (rtx jump)
3316 return get_condition (jump, NULL, false, true);
3319 /* Check the comparison COND to see if we can safely form an implicit set from
3320 it. COND is either an EQ or NE comparison. */
3323 implicit_set_cond_p (const_rtx cond)
3325 const enum machine_mode mode = GET_MODE (XEXP (cond, 0));
3326 const_rtx cst = XEXP (cond, 1);
3328 /* We can't perform this optimization if either operand might be or might
3329 contain a signed zero. */
3330 if (HONOR_SIGNED_ZEROS (mode))
3332 /* It is sufficient to check if CST is or contains a zero. We must
3333 handle float, complex, and vector. If any subpart is a zero, then
3334 the optimization can't be performed. */
3335 /* ??? The complex and vector checks are not implemented yet. We just
3336 always return zero for them. */
3337 if (GET_CODE (cst) == CONST_DOUBLE)
3340 REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
3341 if (REAL_VALUES_EQUAL (d, dconst0))
3348 return gcse_constant_p (cst);
3351 /* Find the implicit sets of a function. An "implicit set" is a constraint
3352 on the value of a variable, implied by a conditional jump. For example,
3353 following "if (x == 2)", the then branch may be optimized as though the
3354 conditional performed an "explicit set", in this example, "x = 2". This
3355 function records the set patterns that are implicit at the start of each
3359 find_implicit_sets (void)
3361 basic_block bb, dest;
3367 /* Check for more than one successor. */
3368 if (EDGE_COUNT (bb->succs) > 1)
3370 cond = fis_get_condition (BB_END (bb));
3373 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
3374 && REG_P (XEXP (cond, 0))
3375 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
3376 && implicit_set_cond_p (cond))
3378 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
3379 : FALLTHRU_EDGE (bb)->dest;
3381 if (dest && single_pred_p (dest)
3382 && dest != EXIT_BLOCK_PTR)
3384 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
3386 implicit_sets[dest->index] = new;
3389 fprintf(dump_file, "Implicit set of reg %d in ",
3390 REGNO (XEXP (cond, 0)));
3391 fprintf(dump_file, "basic block %d\n", dest->index);
3399 fprintf (dump_file, "Found %d implicit sets\n", count);
3402 /* Perform one copy/constant propagation pass.
3403 PASS is the pass count. If CPROP_JUMPS is true, perform constant
3404 propagation into conditional jumps. If BYPASS_JUMPS is true,
3405 perform conditional jump bypassing optimizations. */
3408 one_cprop_pass (int pass, bool cprop_jumps, bool bypass_jumps)
3412 global_const_prop_count = local_const_prop_count = 0;
3413 global_copy_prop_count = local_copy_prop_count = 0;
3416 local_cprop_pass (cprop_jumps);
3418 /* Determine implicit sets. */
3419 implicit_sets = XCNEWVEC (rtx, last_basic_block);
3420 find_implicit_sets ();
3422 alloc_hash_table (max_cuid, &set_hash_table, 1);
3423 compute_hash_table (&set_hash_table);
3425 /* Free implicit_sets before peak usage. */
3426 free (implicit_sets);
3427 implicit_sets = NULL;
3430 dump_hash_table (dump_file, "SET", &set_hash_table);
3431 if (set_hash_table.n_elems > 0)
3433 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
3434 compute_cprop_data ();
3435 changed = cprop (cprop_jumps);
3437 changed |= bypass_conditional_jumps ();
3441 free_hash_table (&set_hash_table);
3445 fprintf (dump_file, "CPROP of %s, pass %d: %d bytes needed, ",
3446 current_function_name (), pass, bytes_used);
3447 fprintf (dump_file, "%d local const props, %d local copy props, ",
3448 local_const_prop_count, local_copy_prop_count);
3449 fprintf (dump_file, "%d global const props, %d global copy props\n\n",
3450 global_const_prop_count, global_copy_prop_count);
3452 /* Global analysis may get into infinite loops for unreachable blocks. */
3453 if (changed && cprop_jumps)
3454 delete_unreachable_blocks ();
3459 /* Bypass conditional jumps. */
3461 /* The value of last_basic_block at the beginning of the jump_bypass
3462 pass. The use of redirect_edge_and_branch_force may introduce new
3463 basic blocks, but the data flow analysis is only valid for basic
3464 block indices less than bypass_last_basic_block. */
3466 static int bypass_last_basic_block;
3468 /* Find a set of REGNO to a constant that is available at the end of basic
3469 block BB. Returns NULL if no such set is found. Based heavily upon
3472 static struct expr *
3473 find_bypass_set (int regno, int bb)
3475 struct expr *result = 0;
3480 struct expr *set = lookup_set (regno, &set_hash_table);
3484 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
3486 set = next_set (regno, set);
3492 gcc_assert (GET_CODE (set->expr) == SET);
3494 src = SET_SRC (set->expr);
3495 if (gcse_constant_p (src))
3501 regno = REGNO (src);
3507 /* Subroutine of bypass_block that checks whether a pseudo is killed by
3508 any of the instructions inserted on an edge. Jump bypassing places
3509 condition code setters on CFG edges using insert_insn_on_edge. This
3510 function is required to check that our data flow analysis is still
3511 valid prior to commit_edge_insertions. */
3514 reg_killed_on_edge (const_rtx reg, const_edge e)
3518 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
3519 if (INSN_P (insn) && reg_set_p (reg, insn))
3525 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
3526 basic block BB which has more than one predecessor. If not NULL, SETCC
3527 is the first instruction of BB, which is immediately followed by JUMP_INSN
3528 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
3529 Returns nonzero if a change was made.
3531 During the jump bypassing pass, we may place copies of SETCC instructions
3532 on CFG edges. The following routine must be careful to pay attention to
3533 these inserted insns when performing its transformations. */
3536 bypass_block (basic_block bb, rtx setcc, rtx jump)
3541 int may_be_loop_header;
3545 insn = (setcc != NULL) ? setcc : jump;
3547 /* Determine set of register uses in INSN. */
3549 note_uses (&PATTERN (insn), find_used_regs, NULL);
3550 note = find_reg_equal_equiv_note (insn);
3552 find_used_regs (&XEXP (note, 0), NULL);
3554 may_be_loop_header = false;
3555 FOR_EACH_EDGE (e, ei, bb->preds)
3556 if (e->flags & EDGE_DFS_BACK)
3558 may_be_loop_header = true;
3563 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3567 if (e->flags & EDGE_COMPLEX)
3573 /* We can't redirect edges from new basic blocks. */
3574 if (e->src->index >= bypass_last_basic_block)
3580 /* The irreducible loops created by redirecting of edges entering the
3581 loop from outside would decrease effectiveness of some of the following
3582 optimizations, so prevent this. */
3583 if (may_be_loop_header
3584 && !(e->flags & EDGE_DFS_BACK))
3590 for (i = 0; i < reg_use_count; i++)
3592 struct reg_use *reg_used = ®_use_table[i];
3593 unsigned int regno = REGNO (reg_used->reg_rtx);
3594 basic_block dest, old_dest;
3598 if (regno >= max_gcse_regno)
3601 set = find_bypass_set (regno, e->src->index);
3606 /* Check the data flow is valid after edge insertions. */
3607 if (e->insns.r && reg_killed_on_edge (reg_used->reg_rtx, e))
3610 src = SET_SRC (pc_set (jump));
3613 src = simplify_replace_rtx (src,
3614 SET_DEST (PATTERN (setcc)),
3615 SET_SRC (PATTERN (setcc)));
3617 new = simplify_replace_rtx (src, reg_used->reg_rtx,
3618 SET_SRC (set->expr));
3620 /* Jump bypassing may have already placed instructions on
3621 edges of the CFG. We can't bypass an outgoing edge that
3622 has instructions associated with it, as these insns won't
3623 get executed if the incoming edge is redirected. */
3627 edest = FALLTHRU_EDGE (bb);
3628 dest = edest->insns.r ? NULL : edest->dest;
3630 else if (GET_CODE (new) == LABEL_REF)
3632 dest = BLOCK_FOR_INSN (XEXP (new, 0));
3633 /* Don't bypass edges containing instructions. */
3634 edest = find_edge (bb, dest);
3635 if (edest && edest->insns.r)
3641 /* Avoid unification of the edge with other edges from original
3642 branch. We would end up emitting the instruction on "both"
3645 if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc)))
3646 && find_edge (e->src, dest))
3652 && dest != EXIT_BLOCK_PTR)
3654 redirect_edge_and_branch_force (e, dest);
3656 /* Copy the register setter to the redirected edge.
3657 Don't copy CC0 setters, as CC0 is dead after jump. */
3660 rtx pat = PATTERN (setcc);
3661 if (!CC0_P (SET_DEST (pat)))
3662 insert_insn_on_edge (copy_insn (pat), e);
3665 if (dump_file != NULL)
3667 fprintf (dump_file, "JUMP-BYPASS: Proved reg %d "
3668 "in jump_insn %d equals constant ",
3669 regno, INSN_UID (jump));
3670 print_rtl (dump_file, SET_SRC (set->expr));
3671 fprintf (dump_file, "\nBypass edge from %d->%d to %d\n",
3672 e->src->index, old_dest->index, dest->index);
3685 /* Find basic blocks with more than one predecessor that only contain a
3686 single conditional jump. If the result of the comparison is known at
3687 compile-time from any incoming edge, redirect that edge to the
3688 appropriate target. Returns nonzero if a change was made.
3690 This function is now mis-named, because we also handle indirect jumps. */
3693 bypass_conditional_jumps (void)
3701 /* Note we start at block 1. */
3702 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3705 bypass_last_basic_block = last_basic_block;
3706 mark_dfs_back_edges ();
3709 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
3710 EXIT_BLOCK_PTR, next_bb)
3712 /* Check for more than one predecessor. */
3713 if (!single_pred_p (bb))
3716 FOR_BB_INSNS (bb, insn)
3717 if (NONJUMP_INSN_P (insn))
3721 if (GET_CODE (PATTERN (insn)) != SET)
3724 dest = SET_DEST (PATTERN (insn));
3725 if (REG_P (dest) || CC0_P (dest))
3730 else if (JUMP_P (insn))
3732 if ((any_condjump_p (insn) || computed_jump_p (insn))
3733 && onlyjump_p (insn))
3734 changed |= bypass_block (bb, setcc, insn);
3737 else if (INSN_P (insn))
3742 /* If we bypassed any register setting insns, we inserted a
3743 copy on the redirected edge. These need to be committed. */
3745 commit_edge_insertions ();
3750 /* Compute PRE+LCM working variables. */
3752 /* Local properties of expressions. */
3753 /* Nonzero for expressions that are transparent in the block. */
3754 static sbitmap *transp;
3756 /* Nonzero for expressions that are transparent at the end of the block.
3757 This is only zero for expressions killed by abnormal critical edge
3758 created by a calls. */
3759 static sbitmap *transpout;
3761 /* Nonzero for expressions that are computed (available) in the block. */
3762 static sbitmap *comp;
3764 /* Nonzero for expressions that are locally anticipatable in the block. */
3765 static sbitmap *antloc;
3767 /* Nonzero for expressions where this block is an optimal computation
3769 static sbitmap *pre_optimal;
3771 /* Nonzero for expressions which are redundant in a particular block. */
3772 static sbitmap *pre_redundant;
3774 /* Nonzero for expressions which should be inserted on a specific edge. */
3775 static sbitmap *pre_insert_map;
3777 /* Nonzero for expressions which should be deleted in a specific block. */
3778 static sbitmap *pre_delete_map;
3780 /* Contains the edge_list returned by pre_edge_lcm. */
3781 static struct edge_list *edge_list;
3783 /* Redundant insns. */
3784 static sbitmap pre_redundant_insns;
3786 /* Allocate vars used for PRE analysis. */
3789 alloc_pre_mem (int n_blocks, int n_exprs)
3791 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
3792 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
3793 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
3796 pre_redundant = NULL;
3797 pre_insert_map = NULL;
3798 pre_delete_map = NULL;
3799 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
3801 /* pre_insert and pre_delete are allocated later. */
3804 /* Free vars used for PRE analysis. */
3809 sbitmap_vector_free (transp);
3810 sbitmap_vector_free (comp);
3812 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
3815 sbitmap_vector_free (pre_optimal);
3817 sbitmap_vector_free (pre_redundant);
3819 sbitmap_vector_free (pre_insert_map);
3821 sbitmap_vector_free (pre_delete_map);
3823 transp = comp = NULL;
3824 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
3827 /* Top level routine to do the dataflow analysis needed by PRE. */
3830 compute_pre_data (void)
3832 sbitmap trapping_expr;
3836 compute_local_properties (transp, comp, antloc, &expr_hash_table);
3837 sbitmap_vector_zero (ae_kill, last_basic_block);
3839 /* Collect expressions which might trap. */
3840 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
3841 sbitmap_zero (trapping_expr);
3842 for (ui = 0; ui < expr_hash_table.size; ui++)
3845 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
3846 if (may_trap_p (e->expr))
3847 SET_BIT (trapping_expr, e->bitmap_index);
3850 /* Compute ae_kill for each basic block using:
3860 /* If the current block is the destination of an abnormal edge, we
3861 kill all trapping expressions because we won't be able to properly
3862 place the instruction on the edge. So make them neither
3863 anticipatable nor transparent. This is fairly conservative. */
3864 FOR_EACH_EDGE (e, ei, bb->preds)
3865 if (e->flags & EDGE_ABNORMAL)
3867 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
3868 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
3872 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
3873 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
3876 edge_list = pre_edge_lcm (expr_hash_table.n_elems, transp, comp, antloc,
3877 ae_kill, &pre_insert_map, &pre_delete_map);
3878 sbitmap_vector_free (antloc);
3880 sbitmap_vector_free (ae_kill);
3882 sbitmap_free (trapping_expr);
3887 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
3890 VISITED is a pointer to a working buffer for tracking which BB's have
3891 been visited. It is NULL for the top-level call.
3893 We treat reaching expressions that go through blocks containing the same
3894 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3895 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3896 2 as not reaching. The intent is to improve the probability of finding
3897 only one reaching expression and to reduce register lifetimes by picking
3898 the closest such expression. */
3901 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
3906 FOR_EACH_EDGE (pred, ei, bb->preds)
3908 basic_block pred_bb = pred->src;
3910 if (pred->src == ENTRY_BLOCK_PTR
3911 /* Has predecessor has already been visited? */
3912 || visited[pred_bb->index])
3913 ;/* Nothing to do. */
3915 /* Does this predecessor generate this expression? */
3916 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
3918 /* Is this the occurrence we're looking for?
3919 Note that there's only one generating occurrence per block
3920 so we just need to check the block number. */
3921 if (occr_bb == pred_bb)
3924 visited[pred_bb->index] = 1;
3926 /* Ignore this predecessor if it kills the expression. */
3927 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
3928 visited[pred_bb->index] = 1;
3930 /* Neither gen nor kill. */
3933 visited[pred_bb->index] = 1;
3934 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
3939 /* All paths have been checked. */
3943 /* The wrapper for pre_expr_reaches_here_work that ensures that any
3944 memory allocated for that function is returned. */
3947 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
3950 char *visited = XCNEWVEC (char, last_basic_block);
3952 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
3959 /* Given an expr, generate RTL which we can insert at the end of a BB,
3960 or on an edge. Set the block number of any insns generated to
3964 process_insert_insn (struct expr *expr)
3966 rtx reg = expr->reaching_reg;
3967 rtx exp = copy_rtx (expr->expr);
3972 /* If the expression is something that's an operand, like a constant,
3973 just copy it to a register. */
3974 if (general_operand (exp, GET_MODE (reg)))
3975 emit_move_insn (reg, exp);
3977 /* Otherwise, make a new insn to compute this expression and make sure the
3978 insn will be recognized (this also adds any needed CLOBBERs). Copy the
3979 expression to make sure we don't have any sharing issues. */
3982 rtx insn = emit_insn (gen_rtx_SET (VOIDmode, reg, exp));
3984 if (insn_invalid_p (insn))
3995 /* Add EXPR to the end of basic block BB.
3997 This is used by both the PRE and code hoisting.
3999 For PRE, we want to verify that the expr is either transparent
4000 or locally anticipatable in the target block. This check makes
4001 no sense for code hoisting. */
4004 insert_insn_end_basic_block (struct expr *expr, basic_block bb, int pre)
4006 rtx insn = BB_END (bb);
4008 rtx reg = expr->reaching_reg;
4009 int regno = REGNO (reg);
4012 pat = process_insert_insn (expr);
4013 gcc_assert (pat && INSN_P (pat));
4016 while (NEXT_INSN (pat_end) != NULL_RTX)
4017 pat_end = NEXT_INSN (pat_end);
4019 /* If the last insn is a jump, insert EXPR in front [taking care to
4020 handle cc0, etc. properly]. Similarly we need to care trapping
4021 instructions in presence of non-call exceptions. */
4024 || (NONJUMP_INSN_P (insn)
4025 && (!single_succ_p (bb)
4026 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
4031 /* It should always be the case that we can put these instructions
4032 anywhere in the basic block with performing PRE optimizations.
4034 gcc_assert (!NONJUMP_INSN_P (insn) || !pre
4035 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4036 || TEST_BIT (transp[bb->index], expr->bitmap_index));
4038 /* If this is a jump table, then we can't insert stuff here. Since
4039 we know the previous real insn must be the tablejump, we insert
4040 the new instruction just before the tablejump. */
4041 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4042 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4043 insn = prev_real_insn (insn);
4046 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4047 if cc0 isn't set. */
4048 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4050 insn = XEXP (note, 0);
4053 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4054 if (maybe_cc0_setter
4055 && INSN_P (maybe_cc0_setter)
4056 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4057 insn = maybe_cc0_setter;
4060 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4061 new_insn = emit_insn_before_noloc (pat, insn, bb);
4064 /* Likewise if the last insn is a call, as will happen in the presence
4065 of exception handling. */
4066 else if (CALL_P (insn)
4067 && (!single_succ_p (bb)
4068 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
4070 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4071 we search backward and place the instructions before the first
4072 parameter is loaded. Do this for everyone for consistency and a
4073 presumption that we'll get better code elsewhere as well.
4075 It should always be the case that we can put these instructions
4076 anywhere in the basic block with performing PRE optimizations.
4080 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4081 || TEST_BIT (transp[bb->index], expr->bitmap_index));
4083 /* Since different machines initialize their parameter registers
4084 in different orders, assume nothing. Collect the set of all
4085 parameter registers. */
4086 insn = find_first_parameter_load (insn, BB_HEAD (bb));
4088 /* If we found all the parameter loads, then we want to insert
4089 before the first parameter load.
4091 If we did not find all the parameter loads, then we might have
4092 stopped on the head of the block, which could be a CODE_LABEL.
4093 If we inserted before the CODE_LABEL, then we would be putting
4094 the insn in the wrong basic block. In that case, put the insn
4095 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
4096 while (LABEL_P (insn)
4097 || NOTE_INSN_BASIC_BLOCK_P (insn))
4098 insn = NEXT_INSN (insn);
4100 new_insn = emit_insn_before_noloc (pat, insn, bb);
4103 new_insn = emit_insn_after_noloc (pat, insn, bb);
4109 add_label_notes (PATTERN (pat), new_insn);
4110 note_stores (PATTERN (pat), record_set_info, pat);
4114 pat = NEXT_INSN (pat);
4117 gcse_create_count++;
4121 fprintf (dump_file, "PRE/HOIST: end of bb %d, insn %d, ",
4122 bb->index, INSN_UID (new_insn));
4123 fprintf (dump_file, "copying expression %d to reg %d\n",
4124 expr->bitmap_index, regno);
4128 /* Insert partially redundant expressions on edges in the CFG to make
4129 the expressions fully redundant. */
4132 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
4134 int e, i, j, num_edges, set_size, did_insert = 0;
4137 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4138 if it reaches any of the deleted expressions. */
4140 set_size = pre_insert_map[0]->size;
4141 num_edges = NUM_EDGES (edge_list);
4142 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
4143 sbitmap_vector_zero (inserted, num_edges);
4145 for (e = 0; e < num_edges; e++)
4148 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
4150 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
4152 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
4154 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
4155 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4157 struct expr *expr = index_map[j];
4160 /* Now look at each deleted occurrence of this expression. */
4161 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4163 if (! occr->deleted_p)
4166 /* Insert this expression on this edge if it would
4167 reach the deleted occurrence in BB. */
4168 if (!TEST_BIT (inserted[e], j))
4171 edge eg = INDEX_EDGE (edge_list, e);
4173 /* We can't insert anything on an abnormal and
4174 critical edge, so we insert the insn at the end of
4175 the previous block. There are several alternatives
4176 detailed in Morgans book P277 (sec 10.5) for
4177 handling this situation. This one is easiest for
4180 if (eg->flags & EDGE_ABNORMAL)
4181 insert_insn_end_basic_block (index_map[j], bb, 0);
4184 insn = process_insert_insn (index_map[j]);
4185 insert_insn_on_edge (insn, eg);
4190 fprintf (dump_file, "PRE/HOIST: edge (%d,%d), ",
4192 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
4193 fprintf (dump_file, "copy expression %d\n",
4194 expr->bitmap_index);
4197 update_ld_motion_stores (expr);
4198 SET_BIT (inserted[e], j);
4200 gcse_create_count++;
4207 sbitmap_vector_free (inserted);
4211 /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
4212 Given "old_reg <- expr" (INSN), instead of adding after it
4213 reaching_reg <- old_reg
4214 it's better to do the following:
4215 reaching_reg <- expr
4216 old_reg <- reaching_reg
4217 because this way copy propagation can discover additional PRE
4218 opportunities. But if this fails, we try the old way.
4219 When "expr" is a store, i.e.
4220 given "MEM <- old_reg", instead of adding after it
4221 reaching_reg <- old_reg
4222 it's better to add it before as follows:
4223 reaching_reg <- old_reg
4224 MEM <- reaching_reg. */
4227 pre_insert_copy_insn (struct expr *expr, rtx insn)
4229 rtx reg = expr->reaching_reg;
4230 int regno = REGNO (reg);
4231 int indx = expr->bitmap_index;
4232 rtx pat = PATTERN (insn);
4233 rtx set, first_set, new_insn;
4237 /* This block matches the logic in hash_scan_insn. */
4238 switch (GET_CODE (pat))
4245 /* Search through the parallel looking for the set whose
4246 source was the expression that we're interested in. */
4247 first_set = NULL_RTX;
4249 for (i = 0; i < XVECLEN (pat, 0); i++)
4251 rtx x = XVECEXP (pat, 0, i);
4252 if (GET_CODE (x) == SET)
4254 /* If the source was a REG_EQUAL or REG_EQUIV note, we
4255 may not find an equivalent expression, but in this
4256 case the PARALLEL will have a single set. */
4257 if (first_set == NULL_RTX)
4259 if (expr_equiv_p (SET_SRC (x), expr->expr))
4267 gcc_assert (first_set);
4268 if (set == NULL_RTX)
4276 if (REG_P (SET_DEST (set)))
4278 old_reg = SET_DEST (set);
4279 /* Check if we can modify the set destination in the original insn. */
4280 if (validate_change (insn, &SET_DEST (set), reg, 0))
4282 new_insn = gen_move_insn (old_reg, reg);
4283 new_insn = emit_insn_after (new_insn, insn);
4285 /* Keep register set table up to date. */
4286 record_one_set (regno, insn);
4290 new_insn = gen_move_insn (reg, old_reg);
4291 new_insn = emit_insn_after (new_insn, insn);
4293 /* Keep register set table up to date. */
4294 record_one_set (regno, new_insn);
4297 else /* This is possible only in case of a store to memory. */
4299 old_reg = SET_SRC (set);
4300 new_insn = gen_move_insn (reg, old_reg);
4302 /* Check if we can modify the set source in the original insn. */
4303 if (validate_change (insn, &SET_SRC (set), reg, 0))
4304 new_insn = emit_insn_before (new_insn, insn);
4306 new_insn = emit_insn_after (new_insn, insn);
4308 /* Keep register set table up to date. */
4309 record_one_set (regno, new_insn);
4312 gcse_create_count++;
4316 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4317 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4318 INSN_UID (insn), regno);
4321 /* Copy available expressions that reach the redundant expression
4322 to `reaching_reg'. */
4325 pre_insert_copies (void)
4327 unsigned int i, added_copy;
4332 /* For each available expression in the table, copy the result to
4333 `reaching_reg' if the expression reaches a deleted one.
4335 ??? The current algorithm is rather brute force.
4336 Need to do some profiling. */
4338 for (i = 0; i < expr_hash_table.size; i++)
4339 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4341 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4342 we don't want to insert a copy here because the expression may not
4343 really be redundant. So only insert an insn if the expression was
4344 deleted. This test also avoids further processing if the
4345 expression wasn't deleted anywhere. */
4346 if (expr->reaching_reg == NULL)
4349 /* Set when we add a copy for that expression. */
4352 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4354 if (! occr->deleted_p)
4357 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4359 rtx insn = avail->insn;
4361 /* No need to handle this one if handled already. */
4362 if (avail->copied_p)
4365 /* Don't handle this one if it's a redundant one. */
4366 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4369 /* Or if the expression doesn't reach the deleted one. */
4370 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
4372 BLOCK_FOR_INSN (occr->insn)))
4377 /* Copy the result of avail to reaching_reg. */
4378 pre_insert_copy_insn (expr, insn);
4379 avail->copied_p = 1;
4384 update_ld_motion_stores (expr);
4388 /* Emit move from SRC to DEST noting the equivalence with expression computed
4391 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
4394 rtx set = single_set (insn), set2;
4398 /* This should never fail since we're creating a reg->reg copy
4399 we've verified to be valid. */
4401 new = emit_insn_after (gen_move_insn (dest, src), insn);
4403 /* Note the equivalence for local CSE pass. */
4404 set2 = single_set (new);
4405 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
4407 if ((note = find_reg_equal_equiv_note (insn)))
4408 eqv = XEXP (note, 0);
4410 eqv = SET_SRC (set);
4412 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
4417 /* Delete redundant computations.
4418 Deletion is done by changing the insn to copy the `reaching_reg' of
4419 the expression into the result of the SET. It is left to later passes
4420 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4422 Returns nonzero if a change is made. */
4433 for (i = 0; i < expr_hash_table.size; i++)
4434 for (expr = expr_hash_table.table[i];
4436 expr = expr->next_same_hash)
4438 int indx = expr->bitmap_index;
4440 /* We only need to search antic_occr since we require
4443 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4445 rtx insn = occr->insn;
4447 basic_block bb = BLOCK_FOR_INSN (insn);
4449 /* We only delete insns that have a single_set. */
4450 if (TEST_BIT (pre_delete_map[bb->index], indx)
4451 && (set = single_set (insn)) != 0
4452 && dbg_cnt (pre_insn))
4454 /* Create a pseudo-reg to store the result of reaching
4455 expressions into. Get the mode for the new pseudo from
4456 the mode of the original destination pseudo. */
4457 if (expr->reaching_reg == NULL)
4459 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4461 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4463 occr->deleted_p = 1;
4464 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4471 "PRE: redundant insn %d (expression %d) in ",
4472 INSN_UID (insn), indx);
4473 fprintf (dump_file, "bb %d, reaching reg is %d\n",
4474 bb->index, REGNO (expr->reaching_reg));
4483 /* Perform GCSE optimizations using PRE.
4484 This is called by one_pre_gcse_pass after all the dataflow analysis
4487 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4488 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4489 Compiler Design and Implementation.
4491 ??? A new pseudo reg is created to hold the reaching expression. The nice
4492 thing about the classical approach is that it would try to use an existing
4493 reg. If the register can't be adequately optimized [i.e. we introduce
4494 reload problems], one could add a pass here to propagate the new register
4497 ??? We don't handle single sets in PARALLELs because we're [currently] not
4498 able to copy the rest of the parallel when we insert copies to create full
4499 redundancies from partial redundancies. However, there's no reason why we
4500 can't handle PARALLELs in the cases where there are no partial
4507 int did_insert, changed;
4508 struct expr **index_map;
4511 /* Compute a mapping from expression number (`bitmap_index') to
4512 hash table entry. */
4514 index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
4515 for (i = 0; i < expr_hash_table.size; i++)
4516 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4517 index_map[expr->bitmap_index] = expr;
4519 /* Reset bitmap used to track which insns are redundant. */
4520 pre_redundant_insns = sbitmap_alloc (max_cuid);
4521 sbitmap_zero (pre_redundant_insns);
4523 /* Delete the redundant insns first so that
4524 - we know what register to use for the new insns and for the other
4525 ones with reaching expressions
4526 - we know which insns are redundant when we go to create copies */
4528 changed = pre_delete ();
4529 did_insert = pre_edge_insert (edge_list, index_map);
4531 /* In other places with reaching expressions, copy the expression to the
4532 specially allocated pseudo-reg that reaches the redundant expr. */
4533 pre_insert_copies ();
4536 commit_edge_insertions ();
4541 sbitmap_free (pre_redundant_insns);
4545 /* Top level routine to perform one PRE GCSE pass.
4547 Return nonzero if a change was made. */
4550 one_pre_gcse_pass (int pass)
4554 gcse_subst_count = 0;
4555 gcse_create_count = 0;
4557 alloc_hash_table (max_cuid, &expr_hash_table, 0);
4558 add_noreturn_fake_exit_edges ();
4560 compute_ld_motion_mems ();
4562 compute_hash_table (&expr_hash_table);
4563 trim_ld_motion_mems ();
4565 dump_hash_table (dump_file, "Expression", &expr_hash_table);
4567 if (expr_hash_table.n_elems > 0)
4569 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
4570 compute_pre_data ();
4571 changed |= pre_gcse ();
4572 free_edge_list (edge_list);
4577 remove_fake_exit_edges ();
4578 free_hash_table (&expr_hash_table);
4582 fprintf (dump_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
4583 current_function_name (), pass, bytes_used);
4584 fprintf (dump_file, "%d substs, %d insns created\n",
4585 gcse_subst_count, gcse_create_count);
4591 /* If X contains any LABEL_REF's, add REG_LABEL_OPERAND notes for them
4592 to INSN. If such notes are added to an insn which references a
4593 CODE_LABEL, the LABEL_NUSES count is incremented. We have to add
4594 that note, because the following loop optimization pass requires
4597 /* ??? If there was a jump optimization pass after gcse and before loop,
4598 then we would not need to do this here, because jump would add the
4599 necessary REG_LABEL_OPERAND and REG_LABEL_TARGET notes. */
4602 add_label_notes (rtx x, rtx insn)
4604 enum rtx_code code = GET_CODE (x);
4608 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4610 /* This code used to ignore labels that referred to dispatch tables to
4611 avoid flow generating (slightly) worse code.
4613 We no longer ignore such label references (see LABEL_REF handling in
4614 mark_jump_label for additional information). */
4616 if (reg_mentioned_p (XEXP (x, 0), insn))
4618 /* There's no reason for current users to emit jump-insns
4619 with such a LABEL_REF, so we don't have to handle
4620 REG_LABEL_TARGET notes. */
4621 gcc_assert (!JUMP_P (insn));
4623 = gen_rtx_INSN_LIST (REG_LABEL_OPERAND, XEXP (x, 0),
4625 if (LABEL_P (XEXP (x, 0)))
4626 LABEL_NUSES (XEXP (x, 0))++;
4631 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
4634 add_label_notes (XEXP (x, i), insn);
4635 else if (fmt[i] == 'E')
4636 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4637 add_label_notes (XVECEXP (x, i, j), insn);
4641 /* Compute transparent outgoing information for each block.
4643 An expression is transparent to an edge unless it is killed by
4644 the edge itself. This can only happen with abnormal control flow,
4645 when the edge is traversed through a call. This happens with
4646 non-local labels and exceptions.
4648 This would not be necessary if we split the edge. While this is
4649 normally impossible for abnormal critical edges, with some effort
4650 it should be possible with exception handling, since we still have
4651 control over which handler should be invoked. But due to increased
4652 EH table sizes, this may not be worthwhile. */
4655 compute_transpout (void)
4661 sbitmap_vector_ones (transpout, last_basic_block);
4665 /* Note that flow inserted a nop a the end of basic blocks that
4666 end in call instructions for reasons other than abnormal
4668 if (! CALL_P (BB_END (bb)))
4671 for (i = 0; i < expr_hash_table.size; i++)
4672 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
4673 if (MEM_P (expr->expr))
4675 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
4676 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
4679 /* ??? Optimally, we would use interprocedural alias
4680 analysis to determine if this mem is actually killed
4682 RESET_BIT (transpout[bb->index], expr->bitmap_index);
4687 /* Code Hoisting variables and subroutines. */
4689 /* Very busy expressions. */
4690 static sbitmap *hoist_vbein;
4691 static sbitmap *hoist_vbeout;
4693 /* Hoistable expressions. */
4694 static sbitmap *hoist_exprs;
4696 /* ??? We could compute post dominators and run this algorithm in
4697 reverse to perform tail merging, doing so would probably be
4698 more effective than the tail merging code in jump.c.
4700 It's unclear if tail merging could be run in parallel with
4701 code hoisting. It would be nice. */
4703 /* Allocate vars used for code hoisting analysis. */
4706 alloc_code_hoist_mem (int n_blocks, int n_exprs)
4708 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4709 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4710 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4712 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
4713 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
4714 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
4715 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
4718 /* Free vars used for code hoisting analysis. */
4721 free_code_hoist_mem (void)
4723 sbitmap_vector_free (antloc);
4724 sbitmap_vector_free (transp);
4725 sbitmap_vector_free (comp);
4727 sbitmap_vector_free (hoist_vbein);
4728 sbitmap_vector_free (hoist_vbeout);
4729 sbitmap_vector_free (hoist_exprs);
4730 sbitmap_vector_free (transpout);
4732 free_dominance_info (CDI_DOMINATORS);
4735 /* Compute the very busy expressions at entry/exit from each block.
4737 An expression is very busy if all paths from a given point
4738 compute the expression. */
4741 compute_code_hoist_vbeinout (void)
4743 int changed, passes;
4746 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
4747 sbitmap_vector_zero (hoist_vbein, last_basic_block);
4756 /* We scan the blocks in the reverse order to speed up
4758 FOR_EACH_BB_REVERSE (bb)
4760 if (bb->next_bb != EXIT_BLOCK_PTR)
4761 sbitmap_intersection_of_succs (hoist_vbeout[bb->index],
4762 hoist_vbein, bb->index);
4764 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index],
4766 hoist_vbeout[bb->index],
4774 fprintf (dump_file, "hoisting vbeinout computation: %d passes\n", passes);
4777 /* Top level routine to do the dataflow analysis needed by code hoisting. */
4780 compute_code_hoist_data (void)
4782 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4783 compute_transpout ();
4784 compute_code_hoist_vbeinout ();
4785 calculate_dominance_info (CDI_DOMINATORS);
4787 fprintf (dump_file, "\n");
4790 /* Determine if the expression identified by EXPR_INDEX would
4791 reach BB unimpared if it was placed at the end of EXPR_BB.
4793 It's unclear exactly what Muchnick meant by "unimpared". It seems
4794 to me that the expression must either be computed or transparent in
4795 *every* block in the path(s) from EXPR_BB to BB. Any other definition
4796 would allow the expression to be hoisted out of loops, even if
4797 the expression wasn't a loop invariant.
4799 Contrast this to reachability for PRE where an expression is
4800 considered reachable if *any* path reaches instead of *all*
4804 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
4808 int visited_allocated_locally = 0;
4811 if (visited == NULL)
4813 visited_allocated_locally = 1;
4814 visited = XCNEWVEC (char, last_basic_block);
4817 FOR_EACH_EDGE (pred, ei, bb->preds)
4819 basic_block pred_bb = pred->src;
4821 if (pred->src == ENTRY_BLOCK_PTR)
4823 else if (pred_bb == expr_bb)
4825 else if (visited[pred_bb->index])
4828 /* Does this predecessor generate this expression? */
4829 else if (TEST_BIT (comp[pred_bb->index], expr_index))
4831 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
4837 visited[pred_bb->index] = 1;
4838 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
4843 if (visited_allocated_locally)
4846 return (pred == NULL);
4849 /* Actually perform code hoisting. */
4854 basic_block bb, dominated;
4855 VEC (basic_block, heap) *domby;
4857 struct expr **index_map;
4860 sbitmap_vector_zero (hoist_exprs, last_basic_block);
4862 /* Compute a mapping from expression number (`bitmap_index') to
4863 hash table entry. */
4865 index_map = XCNEWVEC (struct expr *, expr_hash_table.n_elems);
4866 for (i = 0; i < expr_hash_table.size; i++)
4867 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4868 index_map[expr->bitmap_index] = expr;
4870 /* Walk over each basic block looking for potentially hoistable
4871 expressions, nothing gets hoisted from the entry block. */
4875 int insn_inserted_p;
4877 domby = get_dominated_by (CDI_DOMINATORS, bb);
4878 /* Examine each expression that is very busy at the exit of this
4879 block. These are the potentially hoistable expressions. */
4880 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
4884 if (TEST_BIT (hoist_vbeout[bb->index], i)
4885 && TEST_BIT (transpout[bb->index], i))
4887 /* We've found a potentially hoistable expression, now
4888 we look at every block BB dominates to see if it
4889 computes the expression. */
4890 for (j = 0; VEC_iterate (basic_block, domby, j, dominated); j++)
4892 /* Ignore self dominance. */
4893 if (bb == dominated)
4895 /* We've found a dominated block, now see if it computes
4896 the busy expression and whether or not moving that
4897 expression to the "beginning" of that block is safe. */
4898 if (!TEST_BIT (antloc[dominated->index], i))
4901 /* Note if the expression would reach the dominated block
4902 unimpared if it was placed at the end of BB.
4904 Keep track of how many times this expression is hoistable
4905 from a dominated block into BB. */
4906 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4910 /* If we found more than one hoistable occurrence of this
4911 expression, then note it in the bitmap of expressions to
4912 hoist. It makes no sense to hoist things which are computed
4913 in only one BB, and doing so tends to pessimize register
4914 allocation. One could increase this value to try harder
4915 to avoid any possible code expansion due to register
4916 allocation issues; however experiments have shown that
4917 the vast majority of hoistable expressions are only movable
4918 from two successors, so raising this threshold is likely
4919 to nullify any benefit we get from code hoisting. */
4922 SET_BIT (hoist_exprs[bb->index], i);
4927 /* If we found nothing to hoist, then quit now. */
4930 VEC_free (basic_block, heap, domby);
4934 /* Loop over all the hoistable expressions. */
4935 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
4937 /* We want to insert the expression into BB only once, so
4938 note when we've inserted it. */
4939 insn_inserted_p = 0;
4941 /* These tests should be the same as the tests above. */
4942 if (TEST_BIT (hoist_exprs[bb->index], i))
4944 /* We've found a potentially hoistable expression, now
4945 we look at every block BB dominates to see if it
4946 computes the expression. */
4947 for (j = 0; VEC_iterate (basic_block, domby, j, dominated); j++)
4949 /* Ignore self dominance. */
4950 if (bb == dominated)
4953 /* We've found a dominated block, now see if it computes
4954 the busy expression and whether or not moving that
4955 expression to the "beginning" of that block is safe. */
4956 if (!TEST_BIT (antloc[dominated->index], i))
4959 /* The expression is computed in the dominated block and
4960 it would be safe to compute it at the start of the
4961 dominated block. Now we have to determine if the
4962 expression would reach the dominated block if it was
4963 placed at the end of BB. */
4964 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4966 struct expr *expr = index_map[i];
4967 struct occr *occr = expr->antic_occr;
4971 /* Find the right occurrence of this expression. */
4972 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
4977 set = single_set (insn);
4980 /* Create a pseudo-reg to store the result of reaching
4981 expressions into. Get the mode for the new pseudo
4982 from the mode of the original destination pseudo. */
4983 if (expr->reaching_reg == NULL)
4985 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4987 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4989 occr->deleted_p = 1;
4990 if (!insn_inserted_p)
4992 insert_insn_end_basic_block (index_map[i], bb, 0);
4993 insn_inserted_p = 1;
4999 VEC_free (basic_block, heap, domby);
5005 /* Top level routine to perform one code hoisting (aka unification) pass
5007 Return nonzero if a change was made. */
5010 one_code_hoisting_pass (void)
5014 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5015 compute_hash_table (&expr_hash_table);
5017 dump_hash_table (dump_file, "Code Hosting Expressions", &expr_hash_table);
5019 if (expr_hash_table.n_elems > 0)
5021 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
5022 compute_code_hoist_data ();
5024 free_code_hoist_mem ();
5027 free_hash_table (&expr_hash_table);
5032 /* Here we provide the things required to do store motion towards
5033 the exit. In order for this to be effective, gcse also needed to
5034 be taught how to move a load when it is kill only by a store to itself.
5039 void foo(float scale)
5041 for (i=0; i<10; i++)
5045 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
5046 the load out since its live around the loop, and stored at the bottom
5049 The 'Load Motion' referred to and implemented in this file is
5050 an enhancement to gcse which when using edge based lcm, recognizes
5051 this situation and allows gcse to move the load out of the loop.
5053 Once gcse has hoisted the load, store motion can then push this
5054 load towards the exit, and we end up with no loads or stores of 'i'
5058 pre_ldst_expr_hash (const void *p)
5060 int do_not_record_p = 0;
5061 const struct ls_expr *x = p;
5062 return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
5066 pre_ldst_expr_eq (const void *p1, const void *p2)
5068 const struct ls_expr *ptr1 = p1, *ptr2 = p2;
5069 return expr_equiv_p (ptr1->pattern, ptr2->pattern);
5072 /* This will search the ldst list for a matching expression. If it
5073 doesn't find one, we create one and initialize it. */
5075 static struct ls_expr *
5078 int do_not_record_p = 0;
5079 struct ls_expr * ptr;
5084 hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
5085 NULL, /*have_reg_qty=*/false);
5088 slot = htab_find_slot_with_hash (pre_ldst_table, &e, hash, INSERT);
5090 return (struct ls_expr *)*slot;
5092 ptr = XNEW (struct ls_expr);
5094 ptr->next = pre_ldst_mems;
5097 ptr->pattern_regs = NULL_RTX;
5098 ptr->loads = NULL_RTX;
5099 ptr->stores = NULL_RTX;
5100 ptr->reaching_reg = NULL_RTX;
5103 ptr->hash_index = hash;
5104 pre_ldst_mems = ptr;
5110 /* Free up an individual ldst entry. */
5113 free_ldst_entry (struct ls_expr * ptr)
5115 free_INSN_LIST_list (& ptr->loads);
5116 free_INSN_LIST_list (& ptr->stores);
5121 /* Free up all memory associated with the ldst list. */
5124 free_ldst_mems (void)
5127 htab_delete (pre_ldst_table);
5128 pre_ldst_table = NULL;
5130 while (pre_ldst_mems)
5132 struct ls_expr * tmp = pre_ldst_mems;
5134 pre_ldst_mems = pre_ldst_mems->next;
5136 free_ldst_entry (tmp);
5139 pre_ldst_mems = NULL;
5142 /* Dump debugging info about the ldst list. */
5145 print_ldst_list (FILE * file)
5147 struct ls_expr * ptr;
5149 fprintf (file, "LDST list: \n");
5151 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5153 fprintf (file, " Pattern (%3d): ", ptr->index);
5155 print_rtl (file, ptr->pattern);
5157 fprintf (file, "\n Loads : ");
5160 print_rtl (file, ptr->loads);
5162 fprintf (file, "(nil)");
5164 fprintf (file, "\n Stores : ");
5167 print_rtl (file, ptr->stores);
5169 fprintf (file, "(nil)");
5171 fprintf (file, "\n\n");
5174 fprintf (file, "\n");
5177 /* Returns 1 if X is in the list of ldst only expressions. */
5179 static struct ls_expr *
5180 find_rtx_in_ldst (rtx x)
5184 if (!pre_ldst_table)
5187 slot = htab_find_slot (pre_ldst_table, &e, NO_INSERT);
5188 if (!slot || ((struct ls_expr *)*slot)->invalid)
5193 /* Assign each element of the list of mems a monotonically increasing value. */
5196 enumerate_ldsts (void)
5198 struct ls_expr * ptr;
5201 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5207 /* Return first item in the list. */
5209 static inline struct ls_expr *
5210 first_ls_expr (void)
5212 return pre_ldst_mems;
5215 /* Return the next item in the list after the specified one. */
5217 static inline struct ls_expr *
5218 next_ls_expr (struct ls_expr * ptr)
5223 /* Load Motion for loads which only kill themselves. */
5225 /* Return true if x is a simple MEM operation, with no registers or
5226 side effects. These are the types of loads we consider for the
5227 ld_motion list, otherwise we let the usual aliasing take care of it. */
5230 simple_mem (const_rtx x)
5235 if (MEM_VOLATILE_P (x))
5238 if (GET_MODE (x) == BLKmode)
5241 /* If we are handling exceptions, we must be careful with memory references
5242 that may trap. If we are not, the behavior is undefined, so we may just
5244 if (flag_non_call_exceptions && may_trap_p (x))
5247 if (side_effects_p (x))
5250 /* Do not consider function arguments passed on stack. */
5251 if (reg_mentioned_p (stack_pointer_rtx, x))
5254 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
5260 /* Make sure there isn't a buried reference in this pattern anywhere.
5261 If there is, invalidate the entry for it since we're not capable
5262 of fixing it up just yet.. We have to be sure we know about ALL
5263 loads since the aliasing code will allow all entries in the
5264 ld_motion list to not-alias itself. If we miss a load, we will get
5265 the wrong value since gcse might common it and we won't know to
5269 invalidate_any_buried_refs (rtx x)
5273 struct ls_expr * ptr;
5275 /* Invalidate it in the list. */
5276 if (MEM_P (x) && simple_mem (x))
5278 ptr = ldst_entry (x);
5282 /* Recursively process the insn. */
5283 fmt = GET_RTX_FORMAT (GET_CODE (x));
5285 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5288 invalidate_any_buried_refs (XEXP (x, i));
5289 else if (fmt[i] == 'E')
5290 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5291 invalidate_any_buried_refs (XVECEXP (x, i, j));
5295 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
5296 being defined as MEM loads and stores to symbols, with no side effects
5297 and no registers in the expression. For a MEM destination, we also
5298 check that the insn is still valid if we replace the destination with a
5299 REG, as is done in update_ld_motion_stores. If there are any uses/defs
5300 which don't match this criteria, they are invalidated and trimmed out
5304 compute_ld_motion_mems (void)
5306 struct ls_expr * ptr;
5310 pre_ldst_mems = NULL;
5311 pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
5312 pre_ldst_expr_eq, NULL);
5316 FOR_BB_INSNS (bb, insn)
5320 if (GET_CODE (PATTERN (insn)) == SET)
5322 rtx src = SET_SRC (PATTERN (insn));
5323 rtx dest = SET_DEST (PATTERN (insn));
5325 /* Check for a simple LOAD... */
5326 if (MEM_P (src) && simple_mem (src))
5328 ptr = ldst_entry (src);
5330 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
5336 /* Make sure there isn't a buried load somewhere. */
5337 invalidate_any_buried_refs (src);
5340 /* Check for stores. Don't worry about aliased ones, they
5341 will block any movement we might do later. We only care
5342 about this exact pattern since those are the only
5343 circumstance that we will ignore the aliasing info. */
5344 if (MEM_P (dest) && simple_mem (dest))
5346 ptr = ldst_entry (dest);
5349 && GET_CODE (src) != ASM_OPERANDS
5350 /* Check for REG manually since want_to_gcse_p
5351 returns 0 for all REGs. */
5352 && can_assign_to_reg_p (src))
5353 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
5359 invalidate_any_buried_refs (PATTERN (insn));
5365 /* Remove any references that have been either invalidated or are not in the
5366 expression list for pre gcse. */
5369 trim_ld_motion_mems (void)
5371 struct ls_expr * * last = & pre_ldst_mems;
5372 struct ls_expr * ptr = pre_ldst_mems;
5378 /* Delete if entry has been made invalid. */
5381 /* Delete if we cannot find this mem in the expression list. */
5382 unsigned int hash = ptr->hash_index % expr_hash_table.size;
5384 for (expr = expr_hash_table.table[hash];
5386 expr = expr->next_same_hash)
5387 if (expr_equiv_p (expr->expr, ptr->pattern))
5391 expr = (struct expr *) 0;
5395 /* Set the expression field if we are keeping it. */
5403 htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
5404 free_ldst_entry (ptr);
5409 /* Show the world what we've found. */
5410 if (dump_file && pre_ldst_mems != NULL)
5411 print_ldst_list (dump_file);
5414 /* This routine will take an expression which we are replacing with
5415 a reaching register, and update any stores that are needed if
5416 that expression is in the ld_motion list. Stores are updated by
5417 copying their SRC to the reaching register, and then storing
5418 the reaching register into the store location. These keeps the
5419 correct value in the reaching register for the loads. */
5422 update_ld_motion_stores (struct expr * expr)
5424 struct ls_expr * mem_ptr;
5426 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
5428 /* We can try to find just the REACHED stores, but is shouldn't
5429 matter to set the reaching reg everywhere... some might be
5430 dead and should be eliminated later. */
5432 /* We replace (set mem expr) with (set reg expr) (set mem reg)
5433 where reg is the reaching reg used in the load. We checked in
5434 compute_ld_motion_mems that we can replace (set mem expr) with
5435 (set reg expr) in that insn. */
5436 rtx list = mem_ptr->stores;
5438 for ( ; list != NULL_RTX; list = XEXP (list, 1))
5440 rtx insn = XEXP (list, 0);
5441 rtx pat = PATTERN (insn);
5442 rtx src = SET_SRC (pat);
5443 rtx reg = expr->reaching_reg;
5446 /* If we've already copied it, continue. */
5447 if (expr->reaching_reg == src)
5452 fprintf (dump_file, "PRE: store updated with reaching reg ");
5453 print_rtl (dump_file, expr->reaching_reg);
5454 fprintf (dump_file, ":\n ");
5455 print_inline_rtx (dump_file, insn, 8);
5456 fprintf (dump_file, "\n");
5459 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
5460 new = emit_insn_before (copy, insn);
5461 record_one_set (REGNO (reg), new);
5462 SET_SRC (pat) = reg;
5463 df_insn_rescan (insn);
5465 /* un-recognize this pattern since it's probably different now. */
5466 INSN_CODE (insn) = -1;
5467 gcse_create_count++;
5472 /* Store motion code. */
5474 #define ANTIC_STORE_LIST(x) ((x)->loads)
5475 #define AVAIL_STORE_LIST(x) ((x)->stores)
5476 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
5478 /* This is used to communicate the target bitvector we want to use in the
5479 reg_set_info routine when called via the note_stores mechanism. */
5480 static int * regvec;
5482 /* And current insn, for the same routine. */
5483 static rtx compute_store_table_current_insn;
5485 /* Used in computing the reverse edge graph bit vectors. */
5486 static sbitmap * st_antloc;
5488 /* Global holding the number of store expressions we are dealing with. */
5489 static int num_stores;
5491 /* Checks to set if we need to mark a register set. Called from
5495 reg_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
5498 sbitmap bb_reg = data;
5500 if (GET_CODE (dest) == SUBREG)
5501 dest = SUBREG_REG (dest);
5505 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
5507 SET_BIT (bb_reg, REGNO (dest));
5511 /* Clear any mark that says that this insn sets dest. Called from
5515 reg_clear_last_set (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
5518 int *dead_vec = data;
5520 if (GET_CODE (dest) == SUBREG)
5521 dest = SUBREG_REG (dest);
5524 dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
5525 dead_vec[REGNO (dest)] = 0;
5528 /* Return zero if some of the registers in list X are killed
5529 due to set of registers in bitmap REGS_SET. */
5532 store_ops_ok (const_rtx x, int *regs_set)
5536 for (; x; x = XEXP (x, 1))
5539 if (regs_set[REGNO(reg)])
5546 /* Returns a list of registers mentioned in X. */
5548 extract_mentioned_regs (rtx x)
5550 return extract_mentioned_regs_helper (x, NULL_RTX);
5553 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
5556 extract_mentioned_regs_helper (rtx x, rtx accum)
5562 /* Repeat is used to turn tail-recursion into iteration. */
5568 code = GET_CODE (x);
5572 return alloc_EXPR_LIST (0, x, accum);
5584 /* We do not run this function with arguments having side effects. */
5604 i = GET_RTX_LENGTH (code) - 1;
5605 fmt = GET_RTX_FORMAT (code);
5611 rtx tem = XEXP (x, i);
5613 /* If we are about to do the last recursive call
5614 needed at this level, change it into iteration. */
5621 accum = extract_mentioned_regs_helper (tem, accum);
5623 else if (fmt[i] == 'E')
5627 for (j = 0; j < XVECLEN (x, i); j++)
5628 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
5635 /* Determine whether INSN is MEM store pattern that we will consider moving.
5636 REGS_SET_BEFORE is bitmap of registers set before (and including) the
5637 current insn, REGS_SET_AFTER is bitmap of registers set after (and
5638 including) the insn in this basic block. We must be passing through BB from
5639 head to end, as we are using this fact to speed things up.
5641 The results are stored this way:
5643 -- the first anticipatable expression is added into ANTIC_STORE_LIST
5644 -- if the processed expression is not anticipatable, NULL_RTX is added
5645 there instead, so that we can use it as indicator that no further
5646 expression of this type may be anticipatable
5647 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
5648 consequently, all of them but this head are dead and may be deleted.
5649 -- if the expression is not available, the insn due to that it fails to be
5650 available is stored in reaching_reg.
5652 The things are complicated a bit by fact that there already may be stores
5653 to the same MEM from other blocks; also caller must take care of the
5654 necessary cleanup of the temporary markers after end of the basic block.
5658 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
5660 struct ls_expr * ptr;
5662 int check_anticipatable, check_available;
5663 basic_block bb = BLOCK_FOR_INSN (insn);
5665 set = single_set (insn);
5669 dest = SET_DEST (set);
5671 if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
5672 || GET_MODE (dest) == BLKmode)
5675 if (side_effects_p (dest))
5678 /* If we are handling exceptions, we must be careful with memory references
5679 that may trap. If we are not, the behavior is undefined, so we may just
5681 if (flag_non_call_exceptions && may_trap_p (dest))
5684 /* Even if the destination cannot trap, the source may. In this case we'd
5685 need to handle updating the REG_EH_REGION note. */
5686 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
5689 /* Make sure that the SET_SRC of this store insns can be assigned to
5690 a register, or we will fail later on in replace_store_insn, which
5691 assumes that we can do this. But sometimes the target machine has
5692 oddities like MEM read-modify-write instruction. See for example
5694 if (!can_assign_to_reg_p (SET_SRC (set)))
5697 ptr = ldst_entry (dest);
5698 if (!ptr->pattern_regs)
5699 ptr->pattern_regs = extract_mentioned_regs (dest);
5701 /* Do not check for anticipatability if we either found one anticipatable
5702 store already, or tested for one and found out that it was killed. */
5703 check_anticipatable = 0;
5704 if (!ANTIC_STORE_LIST (ptr))
5705 check_anticipatable = 1;
5708 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
5710 && BLOCK_FOR_INSN (tmp) != bb)
5711 check_anticipatable = 1;
5713 if (check_anticipatable)
5715 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
5719 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
5720 ANTIC_STORE_LIST (ptr));
5723 /* It is not necessary to check whether store is available if we did
5724 it successfully before; if we failed before, do not bother to check
5725 until we reach the insn that caused us to fail. */
5726 check_available = 0;
5727 if (!AVAIL_STORE_LIST (ptr))
5728 check_available = 1;
5731 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
5732 if (BLOCK_FOR_INSN (tmp) != bb)
5733 check_available = 1;
5735 if (check_available)
5737 /* Check that we have already reached the insn at that the check
5738 failed last time. */
5739 if (LAST_AVAIL_CHECK_FAILURE (ptr))
5741 for (tmp = BB_END (bb);
5742 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
5743 tmp = PREV_INSN (tmp))
5746 check_available = 0;
5749 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
5751 &LAST_AVAIL_CHECK_FAILURE (ptr));
5753 if (!check_available)
5754 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
5757 /* Find available and anticipatable stores. */
5760 compute_store_table (void)
5766 int *last_set_in, *already_set;
5767 struct ls_expr * ptr, **prev_next_ptr_ptr;
5769 max_gcse_regno = max_reg_num ();
5771 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
5773 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
5775 pre_ldst_table = htab_create (13, pre_ldst_expr_hash,
5776 pre_ldst_expr_eq, NULL);
5777 last_set_in = XCNEWVEC (int, max_gcse_regno);
5778 already_set = XNEWVEC (int, max_gcse_regno);
5780 /* Find all the stores we care about. */
5783 /* First compute the registers set in this block. */
5784 regvec = last_set_in;
5786 FOR_BB_INSNS (bb, insn)
5788 if (! INSN_P (insn))
5793 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5794 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5796 last_set_in[regno] = INSN_UID (insn);
5797 SET_BIT (reg_set_in_block[bb->index], regno);
5801 pat = PATTERN (insn);
5802 compute_store_table_current_insn = insn;
5803 note_stores (pat, reg_set_info, reg_set_in_block[bb->index]);
5806 /* Now find the stores. */
5807 memset (already_set, 0, sizeof (int) * max_gcse_regno);
5808 regvec = already_set;
5809 FOR_BB_INSNS (bb, insn)
5811 if (! INSN_P (insn))
5816 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5817 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5818 already_set[regno] = 1;
5821 pat = PATTERN (insn);
5822 note_stores (pat, reg_set_info, NULL);
5824 /* Now that we've marked regs, look for stores. */
5825 find_moveable_store (insn, already_set, last_set_in);
5827 /* Unmark regs that are no longer set. */
5828 compute_store_table_current_insn = insn;
5829 note_stores (pat, reg_clear_last_set, last_set_in);
5832 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5833 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
5834 && last_set_in[regno] == INSN_UID (insn))
5835 last_set_in[regno] = 0;
5839 #ifdef ENABLE_CHECKING
5840 /* last_set_in should now be all-zero. */
5841 for (regno = 0; regno < max_gcse_regno; regno++)
5842 gcc_assert (!last_set_in[regno]);
5845 /* Clear temporary marks. */
5846 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5848 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
5849 if (ANTIC_STORE_LIST (ptr)
5850 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
5851 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
5855 /* Remove the stores that are not available anywhere, as there will
5856 be no opportunity to optimize them. */
5857 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
5859 ptr = *prev_next_ptr_ptr)
5861 if (!AVAIL_STORE_LIST (ptr))
5863 *prev_next_ptr_ptr = ptr->next;
5864 htab_remove_elt_with_hash (pre_ldst_table, ptr, ptr->hash_index);
5865 free_ldst_entry (ptr);
5868 prev_next_ptr_ptr = &ptr->next;
5871 ret = enumerate_ldsts ();
5875 fprintf (dump_file, "ST_avail and ST_antic (shown under loads..)\n");
5876 print_ldst_list (dump_file);
5884 /* Check to see if the load X is aliased with STORE_PATTERN.
5885 AFTER is true if we are checking the case when STORE_PATTERN occurs
5889 load_kills_store (const_rtx x, const_rtx store_pattern, int after)
5892 return anti_dependence (x, store_pattern);
5894 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
5898 /* Go through the entire insn X, looking for any loads which might alias
5899 STORE_PATTERN. Return true if found.
5900 AFTER is true if we are checking the case when STORE_PATTERN occurs
5901 after the insn X. */
5904 find_loads (const_rtx x, const_rtx store_pattern, int after)
5913 if (GET_CODE (x) == SET)
5918 if (load_kills_store (x, store_pattern, after))
5922 /* Recursively process the insn. */
5923 fmt = GET_RTX_FORMAT (GET_CODE (x));
5925 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
5928 ret |= find_loads (XEXP (x, i), store_pattern, after);
5929 else if (fmt[i] == 'E')
5930 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5931 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
5937 store_killed_in_pat (const_rtx x, const_rtx pat, int after)
5939 if (GET_CODE (pat) == SET)
5941 rtx dest = SET_DEST (pat);
5943 if (GET_CODE (dest) == ZERO_EXTRACT)
5944 dest = XEXP (dest, 0);
5946 /* Check for memory stores to aliased objects. */
5948 && !expr_equiv_p (dest, x))
5952 if (output_dependence (dest, x))
5957 if (output_dependence (x, dest))
5963 if (find_loads (pat, x, after))
5969 /* Check if INSN kills the store pattern X (is aliased with it).
5970 AFTER is true if we are checking the case when store X occurs
5971 after the insn. Return true if it does. */
5974 store_killed_in_insn (const_rtx x, const_rtx x_regs, const_rtx insn, int after)
5976 const_rtx reg, base, note, pat;
5983 /* A normal or pure call might read from pattern,
5984 but a const call will not. */
5985 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
5988 /* But even a const call reads its parameters. Check whether the
5989 base of some of registers used in mem is stack pointer. */
5990 for (reg = x_regs; reg; reg = XEXP (reg, 1))
5992 base = find_base_term (XEXP (reg, 0));
5994 || (GET_CODE (base) == ADDRESS
5995 && GET_MODE (base) == Pmode
5996 && XEXP (base, 0) == stack_pointer_rtx))
6003 pat = PATTERN (insn);
6004 if (GET_CODE (pat) == SET)
6006 if (store_killed_in_pat (x, pat, after))
6009 else if (GET_CODE (pat) == PARALLEL)
6013 for (i = 0; i < XVECLEN (pat, 0); i++)
6014 if (store_killed_in_pat (x, XVECEXP (pat, 0, i), after))
6017 else if (find_loads (PATTERN (insn), x, after))
6020 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
6021 location aliased with X, then this insn kills X. */
6022 note = find_reg_equal_equiv_note (insn);
6025 note = XEXP (note, 0);
6027 /* However, if the note represents a must alias rather than a may
6028 alias relationship, then it does not kill X. */
6029 if (expr_equiv_p (note, x))
6032 /* See if there are any aliased loads in the note. */
6033 return find_loads (note, x, after);
6036 /* Returns true if the expression X is loaded or clobbered on or after INSN
6037 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
6038 or after the insn. X_REGS is list of registers mentioned in X. If the store
6039 is killed, return the last insn in that it occurs in FAIL_INSN. */
6042 store_killed_after (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
6043 int *regs_set_after, rtx *fail_insn)
6045 rtx last = BB_END (bb), act;
6047 if (!store_ops_ok (x_regs, regs_set_after))
6049 /* We do not know where it will happen. */
6051 *fail_insn = NULL_RTX;
6055 /* Scan from the end, so that fail_insn is determined correctly. */
6056 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
6057 if (store_killed_in_insn (x, x_regs, act, false))
6067 /* Returns true if the expression X is loaded or clobbered on or before INSN
6068 within basic block BB. X_REGS is list of registers mentioned in X.
6069 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
6071 store_killed_before (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
6072 int *regs_set_before)
6074 rtx first = BB_HEAD (bb);
6076 if (!store_ops_ok (x_regs, regs_set_before))
6079 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
6080 if (store_killed_in_insn (x, x_regs, insn, true))
6086 /* Fill in available, anticipatable, transparent and kill vectors in
6087 STORE_DATA, based on lists of available and anticipatable stores. */
6089 build_store_vectors (void)
6092 int *regs_set_in_block;
6094 struct ls_expr * ptr;
6097 /* Build the gen_vector. This is any store in the table which is not killed
6098 by aliasing later in its block. */
6099 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
6100 sbitmap_vector_zero (ae_gen, last_basic_block);
6102 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
6103 sbitmap_vector_zero (st_antloc, last_basic_block);
6105 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6107 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6109 insn = XEXP (st, 0);
6110 bb = BLOCK_FOR_INSN (insn);
6112 /* If we've already seen an available expression in this block,
6113 we can delete this one (It occurs earlier in the block). We'll
6114 copy the SRC expression to an unused register in case there
6115 are any side effects. */
6116 if (TEST_BIT (ae_gen[bb->index], ptr->index))
6118 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
6120 fprintf (dump_file, "Removing redundant store:\n");
6121 replace_store_insn (r, XEXP (st, 0), bb, ptr);
6124 SET_BIT (ae_gen[bb->index], ptr->index);
6127 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6129 insn = XEXP (st, 0);
6130 bb = BLOCK_FOR_INSN (insn);
6131 SET_BIT (st_antloc[bb->index], ptr->index);
6135 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
6136 sbitmap_vector_zero (ae_kill, last_basic_block);
6138 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
6139 sbitmap_vector_zero (transp, last_basic_block);
6140 regs_set_in_block = XNEWVEC (int, max_gcse_regno);
6144 for (regno = 0; regno < max_gcse_regno; regno++)
6145 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
6147 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6149 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
6150 bb, regs_set_in_block, NULL))
6152 /* It should not be necessary to consider the expression
6153 killed if it is both anticipatable and available. */
6154 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
6155 || !TEST_BIT (ae_gen[bb->index], ptr->index))
6156 SET_BIT (ae_kill[bb->index], ptr->index);
6159 SET_BIT (transp[bb->index], ptr->index);
6163 free (regs_set_in_block);
6167 dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
6168 dump_sbitmap_vector (dump_file, "st_kill", "", ae_kill, last_basic_block);
6169 dump_sbitmap_vector (dump_file, "Transpt", "", transp, last_basic_block);
6170 dump_sbitmap_vector (dump_file, "st_avloc", "", ae_gen, last_basic_block);
6174 /* Insert an instruction at the beginning of a basic block, and update
6175 the BB_HEAD if needed. */
6178 insert_insn_start_basic_block (rtx insn, basic_block bb)
6180 /* Insert at start of successor block. */
6181 rtx prev = PREV_INSN (BB_HEAD (bb));
6182 rtx before = BB_HEAD (bb);
6185 if (! LABEL_P (before)
6186 && !NOTE_INSN_BASIC_BLOCK_P (before))
6189 if (prev == BB_END (bb))
6191 before = NEXT_INSN (before);
6194 insn = emit_insn_after_noloc (insn, prev, bb);
6198 fprintf (dump_file, "STORE_MOTION insert store at start of BB %d:\n",
6200 print_inline_rtx (dump_file, insn, 6);
6201 fprintf (dump_file, "\n");
6205 /* This routine will insert a store on an edge. EXPR is the ldst entry for
6206 the memory reference, and E is the edge to insert it on. Returns nonzero
6207 if an edge insertion was performed. */
6210 insert_store (struct ls_expr * expr, edge e)
6217 /* We did all the deleted before this insert, so if we didn't delete a
6218 store, then we haven't set the reaching reg yet either. */
6219 if (expr->reaching_reg == NULL_RTX)
6222 if (e->flags & EDGE_FAKE)
6225 reg = expr->reaching_reg;
6226 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
6228 /* If we are inserting this expression on ALL predecessor edges of a BB,
6229 insert it at the start of the BB, and reset the insert bits on the other
6230 edges so we don't try to insert it on the other edges. */
6232 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
6233 if (!(tmp->flags & EDGE_FAKE))
6235 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6237 gcc_assert (index != EDGE_INDEX_NO_EDGE);
6238 if (! TEST_BIT (pre_insert_map[index], expr->index))
6242 /* If tmp is NULL, we found an insertion on every edge, blank the
6243 insertion vector for these edges, and insert at the start of the BB. */
6244 if (!tmp && bb != EXIT_BLOCK_PTR)
6246 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
6248 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6249 RESET_BIT (pre_insert_map[index], expr->index);
6251 insert_insn_start_basic_block (insn, bb);
6255 /* We can't put stores in the front of blocks pointed to by abnormal
6256 edges since that may put a store where one didn't used to be. */
6257 gcc_assert (!(e->flags & EDGE_ABNORMAL));
6259 insert_insn_on_edge (insn, e);
6263 fprintf (dump_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
6264 e->src->index, e->dest->index);
6265 print_inline_rtx (dump_file, insn, 6);
6266 fprintf (dump_file, "\n");
6272 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
6273 memory location in SMEXPR set in basic block BB.
6275 This could be rather expensive. */
6278 remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
6280 edge_iterator *stack, ei;
6283 sbitmap visited = sbitmap_alloc (last_basic_block);
6284 rtx last, insn, note;
6285 rtx mem = smexpr->pattern;
6287 stack = XNEWVEC (edge_iterator, n_basic_blocks);
6289 ei = ei_start (bb->succs);
6291 sbitmap_zero (visited);
6293 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6301 sbitmap_free (visited);
6304 act = ei_edge (stack[--sp]);
6308 if (bb == EXIT_BLOCK_PTR
6309 || TEST_BIT (visited, bb->index))
6313 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6316 SET_BIT (visited, bb->index);
6318 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
6320 for (last = ANTIC_STORE_LIST (smexpr);
6321 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
6322 last = XEXP (last, 1))
6324 last = XEXP (last, 0);
6327 last = NEXT_INSN (BB_END (bb));
6329 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
6332 note = find_reg_equal_equiv_note (insn);
6333 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6337 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6339 remove_note (insn, note);
6344 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6346 if (EDGE_COUNT (bb->succs) > 0)
6350 ei = ei_start (bb->succs);
6351 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6356 /* This routine will replace a store with a SET to a specified register. */
6359 replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
6361 rtx insn, mem, note, set, ptr, pair;
6363 mem = smexpr->pattern;
6364 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
6366 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
6367 if (XEXP (ptr, 0) == del)
6369 XEXP (ptr, 0) = insn;
6373 /* Move the notes from the deleted insn to its replacement, and patch
6374 up the LIBCALL notes. */
6375 REG_NOTES (insn) = REG_NOTES (del);
6377 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
6380 pair = XEXP (note, 0);
6381 note = find_reg_note (pair, REG_LIBCALL, NULL_RTX);
6382 XEXP (note, 0) = insn;
6384 note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
6387 pair = XEXP (note, 0);
6388 note = find_reg_note (pair, REG_RETVAL, NULL_RTX);
6389 XEXP (note, 0) = insn;
6392 /* Emit the insn AFTER all the notes are transferred.
6393 This is cheaper since we avoid df rescanning for the note change. */
6394 insn = emit_insn_after (insn, del);
6399 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
6400 print_inline_rtx (dump_file, del, 6);
6401 fprintf (dump_file, "\nSTORE MOTION replaced with insn:\n ");
6402 print_inline_rtx (dump_file, insn, 6);
6403 fprintf (dump_file, "\n");
6408 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
6409 they are no longer accurate provided that they are reached by this
6410 definition, so drop them. */
6411 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
6414 set = single_set (insn);
6417 if (expr_equiv_p (SET_DEST (set), mem))
6419 note = find_reg_equal_equiv_note (insn);
6420 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6424 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6426 remove_note (insn, note);
6428 remove_reachable_equiv_notes (bb, smexpr);
6432 /* Delete a store, but copy the value that would have been stored into
6433 the reaching_reg for later storing. */
6436 delete_store (struct ls_expr * expr, basic_block bb)
6440 if (expr->reaching_reg == NULL_RTX)
6441 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
6443 reg = expr->reaching_reg;
6445 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
6448 if (BLOCK_FOR_INSN (del) == bb)
6450 /* We know there is only one since we deleted redundant
6451 ones during the available computation. */
6452 replace_store_insn (reg, del, bb, expr);
6458 /* Free memory used by store motion. */
6461 free_store_memory (void)
6466 sbitmap_vector_free (ae_gen);
6468 sbitmap_vector_free (ae_kill);
6470 sbitmap_vector_free (transp);
6472 sbitmap_vector_free (st_antloc);
6474 sbitmap_vector_free (pre_insert_map);
6476 sbitmap_vector_free (pre_delete_map);
6477 if (reg_set_in_block)
6478 sbitmap_vector_free (reg_set_in_block);
6480 ae_gen = ae_kill = transp = st_antloc = NULL;
6481 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
6484 /* Perform store motion. Much like gcse, except we move expressions the
6485 other way by looking at the flowgraph in reverse. */
6492 struct ls_expr * ptr;
6493 int update_flow = 0;
6497 fprintf (dump_file, "before store motion\n");
6498 print_rtl (dump_file, get_insns ());
6501 init_alias_analysis ();
6503 /* Find all the available and anticipatable stores. */
6504 num_stores = compute_store_table ();
6505 if (num_stores == 0)
6507 htab_delete (pre_ldst_table);
6508 pre_ldst_table = NULL;
6509 sbitmap_vector_free (reg_set_in_block);
6510 end_alias_analysis ();
6514 /* Now compute kill & transp vectors. */
6515 build_store_vectors ();
6516 add_noreturn_fake_exit_edges ();
6517 connect_infinite_loops_to_exit ();
6519 edge_list = pre_edge_rev_lcm (num_stores, transp, ae_gen,
6520 st_antloc, ae_kill, &pre_insert_map,
6523 /* Now we want to insert the new stores which are going to be needed. */
6524 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6526 /* If any of the edges we have above are abnormal, we can't move this
6528 for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
6529 if (TEST_BIT (pre_insert_map[x], ptr->index)
6530 && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
6535 if (dump_file != NULL)
6537 "Can't replace store %d: abnormal edge from %d to %d\n",
6538 ptr->index, INDEX_EDGE (edge_list, x)->src->index,
6539 INDEX_EDGE (edge_list, x)->dest->index);
6543 /* Now we want to insert the new stores which are going to be needed. */
6546 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
6547 delete_store (ptr, bb);
6549 for (x = 0; x < NUM_EDGES (edge_list); x++)
6550 if (TEST_BIT (pre_insert_map[x], ptr->index))
6551 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
6555 commit_edge_insertions ();
6557 free_store_memory ();
6558 free_edge_list (edge_list);
6559 remove_fake_exit_edges ();
6560 end_alias_analysis ();
6564 /* Entry point for jump bypassing optimization pass. */
6571 /* We do not construct an accurate cfg in functions which call
6572 setjmp, so just punt to be safe. */
6573 if (current_function_calls_setjmp)
6576 /* Identify the basic block information for this function, including
6577 successors and predecessors. */
6578 max_gcse_regno = max_reg_num ();
6581 dump_flow_info (dump_file, dump_flags);
6583 /* Return if there's nothing to do, or it is too expensive. */
6584 if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1
6585 || is_too_expensive (_ ("jump bypassing disabled")))
6588 gcc_obstack_init (&gcse_obstack);
6591 /* We need alias. */
6592 init_alias_analysis ();
6594 /* Record where pseudo-registers are set. This data is kept accurate
6595 during each pass. ??? We could also record hard-reg information here
6596 [since it's unchanging], however it is currently done during hash table
6599 It may be tempting to compute MEM set information here too, but MEM sets
6600 will be subject to code motion one day and thus we need to compute
6601 information about memory sets when we build the hash tables. */
6603 alloc_reg_set_mem (max_gcse_regno);
6606 max_gcse_regno = max_reg_num ();
6608 changed = one_cprop_pass (MAX_GCSE_PASSES + 2, true, true);
6613 fprintf (dump_file, "BYPASS of %s: %d basic blocks, ",
6614 current_function_name (), n_basic_blocks);
6615 fprintf (dump_file, "%d bytes\n\n", bytes_used);
6618 obstack_free (&gcse_obstack, NULL);
6619 free_reg_set_mem ();
6621 /* We are finished with alias. */
6622 end_alias_analysis ();
6627 /* Return true if the graph is too expensive to optimize. PASS is the
6628 optimization about to be performed. */
6631 is_too_expensive (const char *pass)
6633 /* Trying to perform global optimizations on flow graphs which have
6634 a high connectivity will take a long time and is unlikely to be
6635 particularly useful.
6637 In normal circumstances a cfg should have about twice as many
6638 edges as blocks. But we do not want to punish small functions
6639 which have a couple switch statements. Rather than simply
6640 threshold the number of blocks, uses something with a more
6641 graceful degradation. */
6642 if (n_edges > 20000 + n_basic_blocks * 4)
6644 warning (OPT_Wdisabled_optimization,
6645 "%s: %d basic blocks and %d edges/basic block",
6646 pass, n_basic_blocks, n_edges / n_basic_blocks);
6651 /* If allocating memory for the cprop bitmap would take up too much
6652 storage it's better just to disable the optimization. */
6654 * SBITMAP_SET_SIZE (max_reg_num ())
6655 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
6657 warning (OPT_Wdisabled_optimization,
6658 "%s: %d basic blocks and %d registers",
6659 pass, n_basic_blocks, max_reg_num ());
6668 gate_handle_jump_bypass (void)
6670 return optimize > 0 && flag_gcse;
6673 /* Perform jump bypassing and control flow optimizations. */
6675 rest_of_handle_jump_bypass (void)
6677 delete_unreachable_blocks ();
6678 if (bypass_jumps ())
6680 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6681 rebuild_jump_labels (get_insns ());
6687 struct tree_opt_pass pass_jump_bypass =
6689 "bypass", /* name */
6690 gate_handle_jump_bypass, /* gate */
6691 rest_of_handle_jump_bypass, /* execute */
6694 0, /* static_pass_number */
6695 TV_BYPASS, /* tv_id */
6696 0, /* properties_required */
6697 0, /* properties_provided */
6698 0, /* properties_destroyed */
6699 0, /* todo_flags_start */
6701 TODO_ggc_collect | TODO_verify_flow, /* todo_flags_finish */
6707 gate_handle_gcse (void)
6709 return optimize > 0 && flag_gcse;
6714 rest_of_handle_gcse (void)
6716 int save_csb, save_cfj;
6718 tem = gcse_main (get_insns ());
6719 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6720 rebuild_jump_labels (get_insns ());
6721 save_csb = flag_cse_skip_blocks;
6722 save_cfj = flag_cse_follow_jumps;
6723 flag_cse_skip_blocks = flag_cse_follow_jumps = 0;
6725 /* If -fexpensive-optimizations, re-run CSE to clean up things done
6727 if (flag_expensive_optimizations)
6729 timevar_push (TV_CSE);
6730 tem2 = cse_main (get_insns (), max_reg_num ());
6731 df_finish_pass (false);
6732 purge_all_dead_edges ();
6733 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6734 timevar_pop (TV_CSE);
6735 cse_not_expected = !flag_rerun_cse_after_loop;
6738 /* If gcse or cse altered any jumps, rerun jump optimizations to clean
6742 timevar_push (TV_JUMP);
6743 rebuild_jump_labels (get_insns ());
6745 timevar_pop (TV_JUMP);
6748 flag_cse_skip_blocks = save_csb;
6749 flag_cse_follow_jumps = save_cfj;
6753 struct tree_opt_pass pass_gcse =
6756 gate_handle_gcse, /* gate */
6757 rest_of_handle_gcse, /* execute */
6760 0, /* static_pass_number */
6761 TV_GCSE, /* tv_id */
6762 0, /* properties_required */
6763 0, /* properties_provided */
6764 0, /* properties_destroyed */
6765 0, /* todo_flags_start */
6766 TODO_df_finish | TODO_verify_rtl_sharing |
6768 TODO_verify_flow | TODO_ggc_collect, /* todo_flags_finish */
6773 #include "gt-gcse.h"