1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
43 Global Optimization by Suppression of Partial Redundancies
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
115 Rice University Ph.D. thesis, Apr. 1996
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
125 Advanced Compiler Design and Implementation
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
148 #include "coretypes.h"
156 #include "hard-reg-set.h"
159 #include "insn-config.h"
161 #include "basic-block.h"
163 #include "function.h"
173 /* Propagate flow information through back edges and thus enable PRE's
174 moving loop invariant calculations out of loops.
176 Originally this tended to create worse overall code, but several
177 improvements during the development of PRE seem to have made following
178 back edges generally a win.
180 Note much of the loop invariant code motion done here would normally
181 be done by loop.c, which has more heuristics for when to move invariants
182 out of loops. At some point we might need to move some of those
183 heuristics into gcse.c. */
185 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
186 are a superset of those done by GCSE.
188 We perform the following steps:
190 1) Compute basic block information.
192 2) Compute table of places where registers are set.
194 3) Perform copy/constant propagation.
196 4) Perform global cse using lazy code motion if not optimizing
197 for size, or code hoisting if we are.
199 5) Perform another pass of copy/constant propagation.
201 Two passes of copy/constant propagation are done because the first one
202 enables more GCSE and the second one helps to clean up the copies that
203 GCSE creates. This is needed more for PRE than for Classic because Classic
204 GCSE will try to use an existing register containing the common
205 subexpression rather than create a new one. This is harder to do for PRE
206 because of the code motion (which Classic GCSE doesn't do).
208 Expressions we are interested in GCSE-ing are of the form
209 (set (pseudo-reg) (expression)).
210 Function want_to_gcse_p says what these are.
212 PRE handles moving invariant expressions out of loops (by treating them as
213 partially redundant).
215 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
216 assignment) based GVN (global value numbering). L. T. Simpson's paper
217 (Rice University) on value numbering is a useful reference for this.
219 **********************
221 We used to support multiple passes but there are diminishing returns in
222 doing so. The first pass usually makes 90% of the changes that are doable.
223 A second pass can make a few more changes made possible by the first pass.
224 Experiments show any further passes don't make enough changes to justify
227 A study of spec92 using an unlimited number of passes:
228 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
229 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
230 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
232 It was found doing copy propagation between each pass enables further
235 PRE is quite expensive in complicated functions because the DFA can take
236 a while to converge. Hence we only perform one pass. The parameter
237 max-gcse-passes can be modified if one wants to experiment.
239 **********************
241 The steps for PRE are:
243 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
245 2) Perform the data flow analysis for PRE.
247 3) Delete the redundant instructions
249 4) Insert the required copies [if any] that make the partially
250 redundant instructions fully redundant.
252 5) For other reaching expressions, insert an instruction to copy the value
253 to a newly created pseudo that will reach the redundant instruction.
255 The deletion is done first so that when we do insertions we
256 know which pseudo reg to use.
258 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
259 argue it is not. The number of iterations for the algorithm to converge
260 is typically 2-4 so I don't view it as that expensive (relatively speaking).
262 PRE GCSE depends heavily on the second CSE pass to clean up the copies
263 we create. To make an expression reach the place where it's redundant,
264 the result of the expression is copied to a new register, and the redundant
265 expression is deleted by replacing it with this new register. Classic GCSE
266 doesn't have this problem as much as it computes the reaching defs of
267 each register in each block and thus can try to use an existing register.
269 **********************
271 A fair bit of simplicity is created by creating small functions for simple
272 tasks, even when the function is only called in one place. This may
273 measurably slow things down [or may not] by creating more function call
274 overhead than is necessary. The source is laid out so that it's trivial
275 to make the affected functions inline so that one can measure what speed
276 up, if any, can be achieved, and maybe later when things settle things can
279 Help stamp out big monolithic functions! */
281 /* GCSE global vars. */
284 static FILE *gcse_file;
286 /* Note whether or not we should run jump optimization after gcse. We
287 want to do this for two cases.
289 * If we changed any jumps via cprop.
291 * If we added any labels via edge splitting. */
292 static int run_jump_opt_after_gcse;
294 /* Bitmaps are normally not included in debugging dumps.
295 However it's useful to be able to print them from GDB.
296 We could create special functions for this, but it's simpler to
297 just allow passing stderr to the dump_foo fns. Since stderr can
298 be a macro, we store a copy here. */
299 static FILE *debug_stderr;
301 /* An obstack for our working variables. */
302 static struct obstack gcse_obstack;
304 struct reg_use {rtx reg_rtx; };
306 /* Hash table of expressions. */
310 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
312 /* Index in the available expression bitmaps. */
314 /* Next entry with the same hash. */
315 struct expr *next_same_hash;
316 /* List of anticipatable occurrences in basic blocks in the function.
317 An "anticipatable occurrence" is one that is the first occurrence in the
318 basic block, the operands are not modified in the basic block prior
319 to the occurrence and the output is not used between the start of
320 the block and the occurrence. */
321 struct occr *antic_occr;
322 /* List of available occurrence in basic blocks in the function.
323 An "available occurrence" is one that is the last occurrence in the
324 basic block and the operands are not modified by following statements in
325 the basic block [including this insn]. */
326 struct occr *avail_occr;
327 /* Non-null if the computation is PRE redundant.
328 The value is the newly created pseudo-reg to record a copy of the
329 expression in all the places that reach the redundant copy. */
333 /* Occurrence of an expression.
334 There is one per basic block. If a pattern appears more than once the
335 last appearance is used [or first for anticipatable expressions]. */
339 /* Next occurrence of this expression. */
341 /* The insn that computes the expression. */
343 /* Nonzero if this [anticipatable] occurrence has been deleted. */
345 /* Nonzero if this [available] occurrence has been copied to
347 /* ??? This is mutually exclusive with deleted_p, so they could share
352 /* Expression and copy propagation hash tables.
353 Each hash table is an array of buckets.
354 ??? It is known that if it were an array of entries, structure elements
355 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
356 not clear whether in the final analysis a sufficient amount of memory would
357 be saved as the size of the available expression bitmaps would be larger
358 [one could build a mapping table without holes afterwards though].
359 Someday I'll perform the computation and figure it out. */
364 This is an array of `expr_hash_table_size' elements. */
367 /* Size of the hash table, in elements. */
370 /* Number of hash table elements. */
371 unsigned int n_elems;
373 /* Whether the table is expression of copy propagation one. */
377 /* Expression hash table. */
378 static struct hash_table expr_hash_table;
380 /* Copy propagation hash table. */
381 static struct hash_table set_hash_table;
383 /* Mapping of uids to cuids.
384 Only real insns get cuids. */
385 static int *uid_cuid;
387 /* Highest UID in UID_CUID. */
390 /* Get the cuid of an insn. */
391 #ifdef ENABLE_CHECKING
392 #define INSN_CUID(INSN) \
393 (gcc_assert (INSN_UID (INSN) <= max_uid), uid_cuid[INSN_UID (INSN)])
395 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
398 /* Number of cuids. */
401 /* Mapping of cuids to insns. */
402 static rtx *cuid_insn;
404 /* Get insn from cuid. */
405 #define CUID_INSN(CUID) (cuid_insn[CUID])
407 /* Maximum register number in function prior to doing gcse + 1.
408 Registers created during this pass have regno >= max_gcse_regno.
409 This is named with "gcse" to not collide with global of same name. */
410 static unsigned int max_gcse_regno;
412 /* Table of registers that are modified.
414 For each register, each element is a list of places where the pseudo-reg
417 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
418 requires knowledge of which blocks kill which regs [and thus could use
419 a bitmap instead of the lists `reg_set_table' uses].
421 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
422 num-regs) [however perhaps it may be useful to keep the data as is]. One
423 advantage of recording things this way is that `reg_set_table' is fairly
424 sparse with respect to pseudo regs but for hard regs could be fairly dense
425 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
426 up functions like compute_transp since in the case of pseudo-regs we only
427 need to iterate over the number of times a pseudo-reg is set, not over the
428 number of basic blocks [clearly there is a bit of a slow down in the cases
429 where a pseudo is set more than once in a block, however it is believed
430 that the net effect is to speed things up]. This isn't done for hard-regs
431 because recording call-clobbered hard-regs in `reg_set_table' at each
432 function call can consume a fair bit of memory, and iterating over
433 hard-regs stored this way in compute_transp will be more expensive. */
435 typedef struct reg_set
437 /* The next setting of this register. */
438 struct reg_set *next;
439 /* The index of the block where it was set. */
443 static reg_set **reg_set_table;
445 /* Size of `reg_set_table'.
446 The table starts out at max_gcse_regno + slop, and is enlarged as
448 static int reg_set_table_size;
450 /* Amount to grow `reg_set_table' by when it's full. */
451 #define REG_SET_TABLE_SLOP 100
453 /* This is a list of expressions which are MEMs and will be used by load
455 Load motion tracks MEMs which aren't killed by
456 anything except itself. (i.e., loads and stores to a single location).
457 We can then allow movement of these MEM refs with a little special
458 allowance. (all stores copy the same value to the reaching reg used
459 for the loads). This means all values used to store into memory must have
460 no side effects so we can re-issue the setter value.
461 Store Motion uses this structure as an expression table to track stores
462 which look interesting, and might be moveable towards the exit block. */
466 struct expr * expr; /* Gcse expression reference for LM. */
467 rtx pattern; /* Pattern of this mem. */
468 rtx pattern_regs; /* List of registers mentioned by the mem. */
469 rtx loads; /* INSN list of loads seen. */
470 rtx stores; /* INSN list of stores seen. */
471 struct ls_expr * next; /* Next in the list. */
472 int invalid; /* Invalid for some reason. */
473 int index; /* If it maps to a bitmap index. */
474 unsigned int hash_index; /* Index when in a hash table. */
475 rtx reaching_reg; /* Register to use when re-writing. */
478 /* Array of implicit set patterns indexed by basic block index. */
479 static rtx *implicit_sets;
481 /* Head of the list of load/store memory refs. */
482 static struct ls_expr * pre_ldst_mems = NULL;
484 /* Bitmap containing one bit for each register in the program.
485 Used when performing GCSE to track which registers have been set since
486 the start of the basic block. */
487 static regset reg_set_bitmap;
489 /* For each block, a bitmap of registers set in the block.
490 This is used by compute_transp.
491 It is computed during hash table computation and not by compute_sets
492 as it includes registers added since the last pass (or between cprop and
493 gcse) and it's currently not easy to realloc sbitmap vectors. */
494 static sbitmap *reg_set_in_block;
496 /* Array, indexed by basic block number for a list of insns which modify
497 memory within that block. */
498 static rtx * modify_mem_list;
499 static bitmap modify_mem_list_set;
501 /* This array parallels modify_mem_list, but is kept canonicalized. */
502 static rtx * canon_modify_mem_list;
504 /* Bitmap indexed by block numbers to record which blocks contain
506 static bitmap blocks_with_calls;
508 /* Various variables for statistics gathering. */
510 /* Memory used in a pass.
511 This isn't intended to be absolutely precise. Its intent is only
512 to keep an eye on memory usage. */
513 static int bytes_used;
515 /* GCSE substitutions made. */
516 static int gcse_subst_count;
517 /* Number of copy instructions created. */
518 static int gcse_create_count;
519 /* Number of local constants propagated. */
520 static int local_const_prop_count;
521 /* Number of local copys propagated. */
522 static int local_copy_prop_count;
523 /* Number of global constants propagated. */
524 static int global_const_prop_count;
525 /* Number of global copys propagated. */
526 static int global_copy_prop_count;
528 /* For available exprs */
529 static sbitmap *ae_kill, *ae_gen;
531 static void compute_can_copy (void);
532 static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
533 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
534 static void *grealloc (void *, size_t);
535 static void *gcse_alloc (unsigned long);
536 static void alloc_gcse_mem (rtx);
537 static void free_gcse_mem (void);
538 static void alloc_reg_set_mem (int);
539 static void free_reg_set_mem (void);
540 static void record_one_set (int, rtx);
541 static void record_set_info (rtx, rtx, void *);
542 static void compute_sets (rtx);
543 static void hash_scan_insn (rtx, struct hash_table *, int);
544 static void hash_scan_set (rtx, rtx, struct hash_table *);
545 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
546 static void hash_scan_call (rtx, rtx, struct hash_table *);
547 static int want_to_gcse_p (rtx);
548 static bool can_assign_to_reg_p (rtx);
549 static bool gcse_constant_p (rtx);
550 static int oprs_unchanged_p (rtx, rtx, int);
551 static int oprs_anticipatable_p (rtx, rtx);
552 static int oprs_available_p (rtx, rtx);
553 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
554 struct hash_table *);
555 static void insert_set_in_table (rtx, rtx, struct hash_table *);
556 static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
557 static unsigned int hash_set (int, int);
558 static int expr_equiv_p (rtx, rtx);
559 static void record_last_reg_set_info (rtx, int);
560 static void record_last_mem_set_info (rtx);
561 static void record_last_set_info (rtx, rtx, void *);
562 static void compute_hash_table (struct hash_table *);
563 static void alloc_hash_table (int, struct hash_table *, int);
564 static void free_hash_table (struct hash_table *);
565 static void compute_hash_table_work (struct hash_table *);
566 static void dump_hash_table (FILE *, const char *, struct hash_table *);
567 static struct expr *lookup_set (unsigned int, struct hash_table *);
568 static struct expr *next_set (unsigned int, struct expr *);
569 static void reset_opr_set_tables (void);
570 static int oprs_not_set_p (rtx, rtx);
571 static void mark_call (rtx);
572 static void mark_set (rtx, rtx);
573 static void mark_clobber (rtx, rtx);
574 static void mark_oprs_set (rtx);
575 static void alloc_cprop_mem (int, int);
576 static void free_cprop_mem (void);
577 static void compute_transp (rtx, int, sbitmap *, int);
578 static void compute_transpout (void);
579 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
580 struct hash_table *);
581 static void compute_cprop_data (void);
582 static void find_used_regs (rtx *, void *);
583 static int try_replace_reg (rtx, rtx, rtx);
584 static struct expr *find_avail_set (int, rtx);
585 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
586 static void mems_conflict_for_gcse_p (rtx, rtx, void *);
587 static int load_killed_in_block_p (basic_block, int, rtx, int);
588 static void canon_list_insert (rtx, rtx, void *);
589 static int cprop_insn (rtx, int);
590 static int cprop (int);
591 static void find_implicit_sets (void);
592 static int one_cprop_pass (int, int, int);
593 static bool constprop_register (rtx, rtx, rtx, int);
594 static struct expr *find_bypass_set (int, int);
595 static bool reg_killed_on_edge (rtx, edge);
596 static int bypass_block (basic_block, rtx, rtx);
597 static int bypass_conditional_jumps (void);
598 static void alloc_pre_mem (int, int);
599 static void free_pre_mem (void);
600 static void compute_pre_data (void);
601 static int pre_expr_reaches_here_p (basic_block, struct expr *,
603 static void insert_insn_end_bb (struct expr *, basic_block, int);
604 static void pre_insert_copy_insn (struct expr *, rtx);
605 static void pre_insert_copies (void);
606 static int pre_delete (void);
607 static int pre_gcse (void);
608 static int one_pre_gcse_pass (int);
609 static void add_label_notes (rtx, rtx);
610 static void alloc_code_hoist_mem (int, int);
611 static void free_code_hoist_mem (void);
612 static void compute_code_hoist_vbeinout (void);
613 static void compute_code_hoist_data (void);
614 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
615 static void hoist_code (void);
616 static int one_code_hoisting_pass (void);
617 static rtx process_insert_insn (struct expr *);
618 static int pre_edge_insert (struct edge_list *, struct expr **);
619 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
620 basic_block, char *);
621 static struct ls_expr * ldst_entry (rtx);
622 static void free_ldst_entry (struct ls_expr *);
623 static void free_ldst_mems (void);
624 static void print_ldst_list (FILE *);
625 static struct ls_expr * find_rtx_in_ldst (rtx);
626 static int enumerate_ldsts (void);
627 static inline struct ls_expr * first_ls_expr (void);
628 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
629 static int simple_mem (rtx);
630 static void invalidate_any_buried_refs (rtx);
631 static void compute_ld_motion_mems (void);
632 static void trim_ld_motion_mems (void);
633 static void update_ld_motion_stores (struct expr *);
634 static void reg_set_info (rtx, rtx, void *);
635 static void reg_clear_last_set (rtx, rtx, void *);
636 static bool store_ops_ok (rtx, int *);
637 static rtx extract_mentioned_regs (rtx);
638 static rtx extract_mentioned_regs_helper (rtx, rtx);
639 static void find_moveable_store (rtx, int *, int *);
640 static int compute_store_table (void);
641 static bool load_kills_store (rtx, rtx, int);
642 static bool find_loads (rtx, rtx, int);
643 static bool store_killed_in_insn (rtx, rtx, rtx, int);
644 static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
645 static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
646 static void build_store_vectors (void);
647 static void insert_insn_start_bb (rtx, basic_block);
648 static int insert_store (struct ls_expr *, edge);
649 static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
650 static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
651 static void delete_store (struct ls_expr *, basic_block);
652 static void free_store_memory (void);
653 static void store_motion (void);
654 static void free_insn_expr_list_list (rtx *);
655 static void clear_modify_mem_tables (void);
656 static void free_modify_mem_tables (void);
657 static rtx gcse_emit_move_after (rtx, rtx, rtx);
658 static void local_cprop_find_used_regs (rtx *, void *);
659 static bool do_local_cprop (rtx, rtx, int, rtx*);
660 static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
661 static void local_cprop_pass (int);
662 static bool is_too_expensive (const char *);
665 /* Entry point for global common subexpression elimination.
666 F is the first instruction in the function. Return nonzero if a
670 gcse_main (rtx f, FILE *file)
673 /* Bytes used at start of pass. */
674 int initial_bytes_used;
675 /* Maximum number of bytes used by a pass. */
677 /* Point to release obstack data from for each pass. */
678 char *gcse_obstack_bottom;
680 /* We do not construct an accurate cfg in functions which call
681 setjmp, so just punt to be safe. */
682 if (current_function_calls_setjmp)
685 /* Assume that we do not need to run jump optimizations after gcse. */
686 run_jump_opt_after_gcse = 0;
688 /* For calling dump_foo fns from gdb. */
689 debug_stderr = stderr;
692 /* Identify the basic block information for this function, including
693 successors and predecessors. */
694 max_gcse_regno = max_reg_num ();
697 dump_flow_info (file);
699 /* Return if there's nothing to do, or it is too expensive. */
700 if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
703 gcc_obstack_init (&gcse_obstack);
707 init_alias_analysis ();
708 /* Record where pseudo-registers are set. This data is kept accurate
709 during each pass. ??? We could also record hard-reg information here
710 [since it's unchanging], however it is currently done during hash table
713 It may be tempting to compute MEM set information here too, but MEM sets
714 will be subject to code motion one day and thus we need to compute
715 information about memory sets when we build the hash tables. */
717 alloc_reg_set_mem (max_gcse_regno);
721 initial_bytes_used = bytes_used;
723 gcse_obstack_bottom = gcse_alloc (1);
725 while (changed && pass < MAX_GCSE_PASSES)
729 fprintf (file, "GCSE pass %d\n\n", pass + 1);
731 /* Initialize bytes_used to the space for the pred/succ lists,
732 and the reg_set_table data. */
733 bytes_used = initial_bytes_used;
735 /* Each pass may create new registers, so recalculate each time. */
736 max_gcse_regno = max_reg_num ();
740 /* Don't allow constant propagation to modify jumps
742 timevar_push (TV_CPROP1);
743 changed = one_cprop_pass (pass + 1, 0, 0);
744 timevar_pop (TV_CPROP1);
750 timevar_push (TV_PRE);
751 changed |= one_pre_gcse_pass (pass + 1);
752 /* We may have just created new basic blocks. Release and
753 recompute various things which are sized on the number of
757 free_modify_mem_tables ();
758 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
759 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
762 alloc_reg_set_mem (max_reg_num ());
764 run_jump_opt_after_gcse = 1;
765 timevar_pop (TV_PRE);
768 if (max_pass_bytes < bytes_used)
769 max_pass_bytes = bytes_used;
771 /* Free up memory, then reallocate for code hoisting. We can
772 not re-use the existing allocated memory because the tables
773 will not have info for the insns or registers created by
774 partial redundancy elimination. */
777 /* It does not make sense to run code hoisting unless we are optimizing
778 for code size -- it rarely makes programs faster, and can make
779 them bigger if we did partial redundancy elimination (when optimizing
780 for space, we don't run the partial redundancy algorithms). */
783 timevar_push (TV_HOIST);
784 max_gcse_regno = max_reg_num ();
786 changed |= one_code_hoisting_pass ();
789 if (max_pass_bytes < bytes_used)
790 max_pass_bytes = bytes_used;
791 timevar_pop (TV_HOIST);
796 fprintf (file, "\n");
800 obstack_free (&gcse_obstack, gcse_obstack_bottom);
804 /* Do one last pass of copy propagation, including cprop into
805 conditional jumps. */
807 max_gcse_regno = max_reg_num ();
809 /* This time, go ahead and allow cprop to alter jumps. */
810 timevar_push (TV_CPROP2);
811 one_cprop_pass (pass + 1, 1, 0);
812 timevar_pop (TV_CPROP2);
817 fprintf (file, "GCSE of %s: %d basic blocks, ",
818 current_function_name (), n_basic_blocks);
819 fprintf (file, "%d pass%s, %d bytes\n\n",
820 pass, pass > 1 ? "es" : "", max_pass_bytes);
823 obstack_free (&gcse_obstack, NULL);
826 /* We are finished with alias. */
827 end_alias_analysis ();
828 allocate_reg_info (max_reg_num (), FALSE, FALSE);
830 if (!optimize_size && flag_gcse_sm)
832 timevar_push (TV_LSM);
834 timevar_pop (TV_LSM);
837 /* Record where pseudo-registers are set. */
838 return run_jump_opt_after_gcse;
841 /* Misc. utilities. */
843 /* Nonzero for each mode that supports (set (reg) (reg)).
844 This is trivially true for integer and floating point values.
845 It may or may not be true for condition codes. */
846 static char can_copy[(int) NUM_MACHINE_MODES];
848 /* Compute which modes support reg/reg copy operations. */
851 compute_can_copy (void)
854 #ifndef AVOID_CCMODE_COPIES
857 memset (can_copy, 0, NUM_MACHINE_MODES);
860 for (i = 0; i < NUM_MACHINE_MODES; i++)
861 if (GET_MODE_CLASS (i) == MODE_CC)
863 #ifdef AVOID_CCMODE_COPIES
866 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
867 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
868 if (recog (PATTERN (insn), insn, NULL) >= 0)
878 /* Returns whether the mode supports reg/reg copy operations. */
881 can_copy_p (enum machine_mode mode)
883 static bool can_copy_init_p = false;
885 if (! can_copy_init_p)
888 can_copy_init_p = true;
891 return can_copy[mode] != 0;
894 /* Cover function to xmalloc to record bytes allocated. */
897 gmalloc (size_t size)
900 return xmalloc (size);
903 /* Cover function to xcalloc to record bytes allocated. */
906 gcalloc (size_t nelem, size_t elsize)
908 bytes_used += nelem * elsize;
909 return xcalloc (nelem, elsize);
912 /* Cover function to xrealloc.
913 We don't record the additional size since we don't know it.
914 It won't affect memory usage stats much anyway. */
917 grealloc (void *ptr, size_t size)
919 return xrealloc (ptr, size);
922 /* Cover function to obstack_alloc. */
925 gcse_alloc (unsigned long size)
928 return obstack_alloc (&gcse_obstack, size);
931 /* Allocate memory for the cuid mapping array,
932 and reg/memory set tracking tables.
934 This is called at the start of each pass. */
937 alloc_gcse_mem (rtx f)
942 /* Find the largest UID and create a mapping from UIDs to CUIDs.
943 CUIDs are like UIDs except they increase monotonically, have no gaps,
944 and only apply to real insns. */
946 max_uid = get_max_uid ();
947 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
948 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
951 uid_cuid[INSN_UID (insn)] = i++;
953 uid_cuid[INSN_UID (insn)] = i;
956 /* Create a table mapping cuids to insns. */
959 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
960 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
962 CUID_INSN (i++) = insn;
964 /* Allocate vars to track sets of regs. */
965 reg_set_bitmap = BITMAP_ALLOC (NULL);
967 /* Allocate vars to track sets of regs, memory per block. */
968 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
969 /* Allocate array to keep a list of insns which modify memory in each
971 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
972 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
973 modify_mem_list_set = BITMAP_ALLOC (NULL);
974 blocks_with_calls = BITMAP_ALLOC (NULL);
977 /* Free memory allocated by alloc_gcse_mem. */
985 BITMAP_FREE (reg_set_bitmap);
987 sbitmap_vector_free (reg_set_in_block);
988 free_modify_mem_tables ();
989 BITMAP_FREE (modify_mem_list_set);
990 BITMAP_FREE (blocks_with_calls);
993 /* Compute the local properties of each recorded expression.
995 Local properties are those that are defined by the block, irrespective of
998 An expression is transparent in a block if its operands are not modified
1001 An expression is computed (locally available) in a block if it is computed
1002 at least once and expression would contain the same value if the
1003 computation was moved to the end of the block.
1005 An expression is locally anticipatable in a block if it is computed at
1006 least once and expression would contain the same value if the computation
1007 was moved to the beginning of the block.
1009 We call this routine for cprop, pre and code hoisting. They all compute
1010 basically the same information and thus can easily share this code.
1012 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1013 properties. If NULL, then it is not necessary to compute or record that
1014 particular property.
1016 TABLE controls which hash table to look at. If it is set hash table,
1017 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1021 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
1022 struct hash_table *table)
1026 /* Initialize any bitmaps that were passed in. */
1030 sbitmap_vector_zero (transp, last_basic_block);
1032 sbitmap_vector_ones (transp, last_basic_block);
1036 sbitmap_vector_zero (comp, last_basic_block);
1038 sbitmap_vector_zero (antloc, last_basic_block);
1040 for (i = 0; i < table->size; i++)
1044 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1046 int indx = expr->bitmap_index;
1049 /* The expression is transparent in this block if it is not killed.
1050 We start by assuming all are transparent [none are killed], and
1051 then reset the bits for those that are. */
1053 compute_transp (expr->expr, indx, transp, table->set_p);
1055 /* The occurrences recorded in antic_occr are exactly those that
1056 we want to set to nonzero in ANTLOC. */
1058 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1060 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1062 /* While we're scanning the table, this is a good place to
1064 occr->deleted_p = 0;
1067 /* The occurrences recorded in avail_occr are exactly those that
1068 we want to set to nonzero in COMP. */
1070 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1072 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1074 /* While we're scanning the table, this is a good place to
1079 /* While we're scanning the table, this is a good place to
1081 expr->reaching_reg = 0;
1086 /* Register set information.
1088 `reg_set_table' records where each register is set or otherwise
1091 static struct obstack reg_set_obstack;
1094 alloc_reg_set_mem (int n_regs)
1096 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1097 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
1099 gcc_obstack_init (®_set_obstack);
1103 free_reg_set_mem (void)
1105 free (reg_set_table);
1106 obstack_free (®_set_obstack, NULL);
1109 /* Record REGNO in the reg_set table. */
1112 record_one_set (int regno, rtx insn)
1114 /* Allocate a new reg_set element and link it onto the list. */
1115 struct reg_set *new_reg_info;
1117 /* If the table isn't big enough, enlarge it. */
1118 if (regno >= reg_set_table_size)
1120 int new_size = regno + REG_SET_TABLE_SLOP;
1122 reg_set_table = grealloc (reg_set_table,
1123 new_size * sizeof (struct reg_set *));
1124 memset (reg_set_table + reg_set_table_size, 0,
1125 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1126 reg_set_table_size = new_size;
1129 new_reg_info = obstack_alloc (®_set_obstack, sizeof (struct reg_set));
1130 bytes_used += sizeof (struct reg_set);
1131 new_reg_info->bb_index = BLOCK_NUM (insn);
1132 new_reg_info->next = reg_set_table[regno];
1133 reg_set_table[regno] = new_reg_info;
1136 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1137 an insn. The DATA is really the instruction in which the SET is
1141 record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
1143 rtx record_set_insn = (rtx) data;
1145 if (REG_P (dest) && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1146 record_one_set (REGNO (dest), record_set_insn);
1149 /* Scan the function and record each set of each pseudo-register.
1151 This is called once, at the start of the gcse pass. See the comments for
1152 `reg_set_table' for further documentation. */
1155 compute_sets (rtx f)
1159 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1161 note_stores (PATTERN (insn), record_set_info, insn);
1164 /* Hash table support. */
1166 struct reg_avail_info
1168 basic_block last_bb;
1173 static struct reg_avail_info *reg_avail_info;
1174 static basic_block current_bb;
1177 /* See whether X, the source of a set, is something we want to consider for
1181 want_to_gcse_p (rtx x)
1183 switch (GET_CODE (x))
1194 return can_assign_to_reg_p (x);
1198 /* Used internally by can_assign_to_reg_p. */
1200 static GTY(()) rtx test_insn;
1202 /* Return true if we can assign X to a pseudo register. */
1205 can_assign_to_reg_p (rtx x)
1207 int num_clobbers = 0;
1210 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1211 if (general_operand (x, GET_MODE (x)))
1213 else if (GET_MODE (x) == VOIDmode)
1216 /* Otherwise, check if we can make a valid insn from it. First initialize
1217 our test insn if we haven't already. */
1221 = make_insn_raw (gen_rtx_SET (VOIDmode,
1222 gen_rtx_REG (word_mode,
1223 FIRST_PSEUDO_REGISTER * 2),
1225 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1228 /* Now make an insn like the one we would make when GCSE'ing and see if
1230 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1231 SET_SRC (PATTERN (test_insn)) = x;
1232 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1233 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1236 /* Return nonzero if the operands of expression X are unchanged from the
1237 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1238 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1241 oprs_unchanged_p (rtx x, rtx insn, int avail_p)
1250 code = GET_CODE (x);
1255 struct reg_avail_info *info = ®_avail_info[REGNO (x)];
1257 if (info->last_bb != current_bb)
1260 return info->last_set < INSN_CUID (insn);
1262 return info->first_set >= INSN_CUID (insn);
1266 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1270 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1296 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1300 /* If we are about to do the last recursive call needed at this
1301 level, change it into iteration. This function is called enough
1304 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1306 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1309 else if (fmt[i] == 'E')
1310 for (j = 0; j < XVECLEN (x, i); j++)
1311 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1318 /* Used for communication between mems_conflict_for_gcse_p and
1319 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1320 conflict between two memory references. */
1321 static int gcse_mems_conflict_p;
1323 /* Used for communication between mems_conflict_for_gcse_p and
1324 load_killed_in_block_p. A memory reference for a load instruction,
1325 mems_conflict_for_gcse_p will see if a memory store conflicts with
1326 this memory load. */
1327 static rtx gcse_mem_operand;
1329 /* DEST is the output of an instruction. If it is a memory reference, and
1330 possibly conflicts with the load found in gcse_mem_operand, then set
1331 gcse_mems_conflict_p to a nonzero value. */
1334 mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1335 void *data ATTRIBUTE_UNUSED)
1337 while (GET_CODE (dest) == SUBREG
1338 || GET_CODE (dest) == ZERO_EXTRACT
1339 || GET_CODE (dest) == STRICT_LOW_PART)
1340 dest = XEXP (dest, 0);
1342 /* If DEST is not a MEM, then it will not conflict with the load. Note
1343 that function calls are assumed to clobber memory, but are handled
1348 /* If we are setting a MEM in our list of specially recognized MEMs,
1349 don't mark as killed this time. */
1351 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1353 if (!find_rtx_in_ldst (dest))
1354 gcse_mems_conflict_p = 1;
1358 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1360 gcse_mems_conflict_p = 1;
1363 /* Return nonzero if the expression in X (a memory reference) is killed
1364 in block BB before or after the insn with the CUID in UID_LIMIT.
1365 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1368 To check the entire block, set UID_LIMIT to max_uid + 1 and
1372 load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
1374 rtx list_entry = modify_mem_list[bb->index];
1378 /* Ignore entries in the list that do not apply. */
1380 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1382 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1384 list_entry = XEXP (list_entry, 1);
1388 setter = XEXP (list_entry, 0);
1390 /* If SETTER is a call everything is clobbered. Note that calls
1391 to pure functions are never put on the list, so we need not
1392 worry about them. */
1393 if (CALL_P (setter))
1396 /* SETTER must be an INSN of some kind that sets memory. Call
1397 note_stores to examine each hunk of memory that is modified.
1399 The note_stores interface is pretty limited, so we have to
1400 communicate via global variables. Yuk. */
1401 gcse_mem_operand = x;
1402 gcse_mems_conflict_p = 0;
1403 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1404 if (gcse_mems_conflict_p)
1406 list_entry = XEXP (list_entry, 1);
1411 /* Return nonzero if the operands of expression X are unchanged from
1412 the start of INSN's basic block up to but not including INSN. */
1415 oprs_anticipatable_p (rtx x, rtx insn)
1417 return oprs_unchanged_p (x, insn, 0);
1420 /* Return nonzero if the operands of expression X are unchanged from
1421 INSN to the end of INSN's basic block. */
1424 oprs_available_p (rtx x, rtx insn)
1426 return oprs_unchanged_p (x, insn, 1);
1429 /* Hash expression X.
1431 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1432 indicating if a volatile operand is found or if the expression contains
1433 something we don't want to insert in the table. HASH_TABLE_SIZE is
1434 the current size of the hash table to be probed. */
1437 hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
1438 int hash_table_size)
1442 *do_not_record_p = 0;
1444 hash = hash_rtx (x, mode, do_not_record_p,
1445 NULL, /*have_reg_qty=*/false);
1446 return hash % hash_table_size;
1449 /* Hash a set of register REGNO.
1451 Sets are hashed on the register that is set. This simplifies the PRE copy
1454 ??? May need to make things more elaborate. Later, as necessary. */
1457 hash_set (int regno, int hash_table_size)
1462 return hash % hash_table_size;
1465 /* Return nonzero if exp1 is equivalent to exp2. */
1468 expr_equiv_p (rtx x, rtx y)
1470 return exp_equiv_p (x, y, 0, true);
1473 /* Insert expression X in INSN in the hash TABLE.
1474 If it is already present, record it as the last occurrence in INSN's
1477 MODE is the mode of the value X is being stored into.
1478 It is only used if X is a CONST_INT.
1480 ANTIC_P is nonzero if X is an anticipatable expression.
1481 AVAIL_P is nonzero if X is an available expression. */
1484 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1485 int avail_p, struct hash_table *table)
1487 int found, do_not_record_p;
1489 struct expr *cur_expr, *last_expr = NULL;
1490 struct occr *antic_occr, *avail_occr;
1492 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1494 /* Do not insert expression in table if it contains volatile operands,
1495 or if hash_expr determines the expression is something we don't want
1496 to or can't handle. */
1497 if (do_not_record_p)
1500 cur_expr = table->table[hash];
1503 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1505 /* If the expression isn't found, save a pointer to the end of
1507 last_expr = cur_expr;
1508 cur_expr = cur_expr->next_same_hash;
1513 cur_expr = gcse_alloc (sizeof (struct expr));
1514 bytes_used += sizeof (struct expr);
1515 if (table->table[hash] == NULL)
1516 /* This is the first pattern that hashed to this index. */
1517 table->table[hash] = cur_expr;
1519 /* Add EXPR to end of this hash chain. */
1520 last_expr->next_same_hash = cur_expr;
1522 /* Set the fields of the expr element. */
1524 cur_expr->bitmap_index = table->n_elems++;
1525 cur_expr->next_same_hash = NULL;
1526 cur_expr->antic_occr = NULL;
1527 cur_expr->avail_occr = NULL;
1530 /* Now record the occurrence(s). */
1533 antic_occr = cur_expr->antic_occr;
1535 if (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1539 /* Found another instance of the expression in the same basic block.
1540 Prefer the currently recorded one. We want the first one in the
1541 block and the block is scanned from start to end. */
1542 ; /* nothing to do */
1545 /* First occurrence of this expression in this basic block. */
1546 antic_occr = gcse_alloc (sizeof (struct occr));
1547 bytes_used += sizeof (struct occr);
1548 antic_occr->insn = insn;
1549 antic_occr->next = cur_expr->antic_occr;
1550 antic_occr->deleted_p = 0;
1551 cur_expr->antic_occr = antic_occr;
1557 avail_occr = cur_expr->avail_occr;
1559 if (avail_occr && BLOCK_NUM (avail_occr->insn) == BLOCK_NUM (insn))
1561 /* Found another instance of the expression in the same basic block.
1562 Prefer this occurrence to the currently recorded one. We want
1563 the last one in the block and the block is scanned from start
1565 avail_occr->insn = insn;
1569 /* First occurrence of this expression in this basic block. */
1570 avail_occr = gcse_alloc (sizeof (struct occr));
1571 bytes_used += sizeof (struct occr);
1572 avail_occr->insn = insn;
1573 avail_occr->next = cur_expr->avail_occr;
1574 avail_occr->deleted_p = 0;
1575 cur_expr->avail_occr = avail_occr;
1580 /* Insert pattern X in INSN in the hash table.
1581 X is a SET of a reg to either another reg or a constant.
1582 If it is already present, record it as the last occurrence in INSN's
1586 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
1590 struct expr *cur_expr, *last_expr = NULL;
1591 struct occr *cur_occr;
1593 gcc_assert (GET_CODE (x) == SET && REG_P (SET_DEST (x)));
1595 hash = hash_set (REGNO (SET_DEST (x)), table->size);
1597 cur_expr = table->table[hash];
1600 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1602 /* If the expression isn't found, save a pointer to the end of
1604 last_expr = cur_expr;
1605 cur_expr = cur_expr->next_same_hash;
1610 cur_expr = gcse_alloc (sizeof (struct expr));
1611 bytes_used += sizeof (struct expr);
1612 if (table->table[hash] == NULL)
1613 /* This is the first pattern that hashed to this index. */
1614 table->table[hash] = cur_expr;
1616 /* Add EXPR to end of this hash chain. */
1617 last_expr->next_same_hash = cur_expr;
1619 /* Set the fields of the expr element.
1620 We must copy X because it can be modified when copy propagation is
1621 performed on its operands. */
1622 cur_expr->expr = copy_rtx (x);
1623 cur_expr->bitmap_index = table->n_elems++;
1624 cur_expr->next_same_hash = NULL;
1625 cur_expr->antic_occr = NULL;
1626 cur_expr->avail_occr = NULL;
1629 /* Now record the occurrence. */
1630 cur_occr = cur_expr->avail_occr;
1632 if (cur_occr && BLOCK_NUM (cur_occr->insn) == BLOCK_NUM (insn))
1634 /* Found another instance of the expression in the same basic block.
1635 Prefer this occurrence to the currently recorded one. We want
1636 the last one in the block and the block is scanned from start
1638 cur_occr->insn = insn;
1642 /* First occurrence of this expression in this basic block. */
1643 cur_occr = gcse_alloc (sizeof (struct occr));
1644 bytes_used += sizeof (struct occr);
1646 cur_occr->insn = insn;
1647 cur_occr->next = cur_expr->avail_occr;
1648 cur_occr->deleted_p = 0;
1649 cur_expr->avail_occr = cur_occr;
1653 /* Determine whether the rtx X should be treated as a constant for
1654 the purposes of GCSE's constant propagation. */
1657 gcse_constant_p (rtx x)
1659 /* Consider a COMPARE of two integers constant. */
1660 if (GET_CODE (x) == COMPARE
1661 && GET_CODE (XEXP (x, 0)) == CONST_INT
1662 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1665 /* Consider a COMPARE of the same registers is a constant
1666 if they are not floating point registers. */
1667 if (GET_CODE(x) == COMPARE
1668 && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1))
1669 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
1670 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
1671 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
1674 return CONSTANT_P (x);
1677 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
1681 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
1683 rtx src = SET_SRC (pat);
1684 rtx dest = SET_DEST (pat);
1687 if (GET_CODE (src) == CALL)
1688 hash_scan_call (src, insn, table);
1690 else if (REG_P (dest))
1692 unsigned int regno = REGNO (dest);
1695 /* If this is a single set and we are doing constant propagation,
1696 see if a REG_NOTE shows this equivalent to a constant. */
1697 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
1698 && gcse_constant_p (XEXP (note, 0)))
1699 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
1701 /* Only record sets of pseudo-regs in the hash table. */
1703 && regno >= FIRST_PSEUDO_REGISTER
1704 /* Don't GCSE something if we can't do a reg/reg copy. */
1705 && can_copy_p (GET_MODE (dest))
1706 /* GCSE commonly inserts instruction after the insn. We can't
1707 do that easily for EH_REGION notes so disable GCSE on these
1709 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1710 /* Is SET_SRC something we want to gcse? */
1711 && want_to_gcse_p (src)
1712 /* Don't CSE a nop. */
1713 && ! set_noop_p (pat)
1714 /* Don't GCSE if it has attached REG_EQUIV note.
1715 At this point this only function parameters should have
1716 REG_EQUIV notes and if the argument slot is used somewhere
1717 explicitly, it means address of parameter has been taken,
1718 so we should not extend the lifetime of the pseudo. */
1719 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
1720 || ! MEM_P (XEXP (note, 0))))
1722 /* An expression is not anticipatable if its operands are
1723 modified before this insn or if this is not the only SET in
1725 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
1726 /* An expression is not available if its operands are
1727 subsequently modified, including this insn. It's also not
1728 available if this is a branch, because we can't insert
1729 a set after the branch. */
1730 int avail_p = (oprs_available_p (src, insn)
1731 && ! JUMP_P (insn));
1733 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
1736 /* Record sets for constant/copy propagation. */
1737 else if (table->set_p
1738 && regno >= FIRST_PSEUDO_REGISTER
1740 && REGNO (src) >= FIRST_PSEUDO_REGISTER
1741 && can_copy_p (GET_MODE (dest))
1742 && REGNO (src) != regno)
1743 || gcse_constant_p (src))
1744 /* A copy is not available if its src or dest is subsequently
1745 modified. Here we want to search from INSN+1 on, but
1746 oprs_available_p searches from INSN on. */
1747 && (insn == BB_END (BLOCK_FOR_INSN (insn))
1748 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
1749 && oprs_available_p (pat, tmp))))
1750 insert_set_in_table (pat, insn, table);
1752 /* In case of store we want to consider the memory value as available in
1753 the REG stored in that memory. This makes it possible to remove
1754 redundant loads from due to stores to the same location. */
1755 else if (flag_gcse_las && REG_P (src) && MEM_P (dest))
1757 unsigned int regno = REGNO (src);
1759 /* Do not do this for constant/copy propagation. */
1761 /* Only record sets of pseudo-regs in the hash table. */
1762 && regno >= FIRST_PSEUDO_REGISTER
1763 /* Don't GCSE something if we can't do a reg/reg copy. */
1764 && can_copy_p (GET_MODE (src))
1765 /* GCSE commonly inserts instruction after the insn. We can't
1766 do that easily for EH_REGION notes so disable GCSE on these
1768 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1769 /* Is SET_DEST something we want to gcse? */
1770 && want_to_gcse_p (dest)
1771 /* Don't CSE a nop. */
1772 && ! set_noop_p (pat)
1773 /* Don't GCSE if it has attached REG_EQUIV note.
1774 At this point this only function parameters should have
1775 REG_EQUIV notes and if the argument slot is used somewhere
1776 explicitly, it means address of parameter has been taken,
1777 so we should not extend the lifetime of the pseudo. */
1778 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
1779 || ! MEM_P (XEXP (note, 0))))
1781 /* Stores are never anticipatable. */
1783 /* An expression is not available if its operands are
1784 subsequently modified, including this insn. It's also not
1785 available if this is a branch, because we can't insert
1786 a set after the branch. */
1787 int avail_p = oprs_available_p (dest, insn)
1790 /* Record the memory expression (DEST) in the hash table. */
1791 insert_expr_in_table (dest, GET_MODE (dest), insn,
1792 antic_p, avail_p, table);
1798 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1799 struct hash_table *table ATTRIBUTE_UNUSED)
1801 /* Currently nothing to do. */
1805 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1806 struct hash_table *table ATTRIBUTE_UNUSED)
1808 /* Currently nothing to do. */
1811 /* Process INSN and add hash table entries as appropriate.
1813 Only available expressions that set a single pseudo-reg are recorded.
1815 Single sets in a PARALLEL could be handled, but it's an extra complication
1816 that isn't dealt with right now. The trick is handling the CLOBBERs that
1817 are also in the PARALLEL. Later.
1819 If SET_P is nonzero, this is for the assignment hash table,
1820 otherwise it is for the expression hash table.
1821 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
1822 not record any expressions. */
1825 hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
1827 rtx pat = PATTERN (insn);
1830 if (in_libcall_block)
1833 /* Pick out the sets of INSN and for other forms of instructions record
1834 what's been modified. */
1836 if (GET_CODE (pat) == SET)
1837 hash_scan_set (pat, insn, table);
1838 else if (GET_CODE (pat) == PARALLEL)
1839 for (i = 0; i < XVECLEN (pat, 0); i++)
1841 rtx x = XVECEXP (pat, 0, i);
1843 if (GET_CODE (x) == SET)
1844 hash_scan_set (x, insn, table);
1845 else if (GET_CODE (x) == CLOBBER)
1846 hash_scan_clobber (x, insn, table);
1847 else if (GET_CODE (x) == CALL)
1848 hash_scan_call (x, insn, table);
1851 else if (GET_CODE (pat) == CLOBBER)
1852 hash_scan_clobber (pat, insn, table);
1853 else if (GET_CODE (pat) == CALL)
1854 hash_scan_call (pat, insn, table);
1858 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
1861 /* Flattened out table, so it's printed in proper order. */
1862 struct expr **flat_table;
1863 unsigned int *hash_val;
1866 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
1867 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
1869 for (i = 0; i < (int) table->size; i++)
1870 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1872 flat_table[expr->bitmap_index] = expr;
1873 hash_val[expr->bitmap_index] = i;
1876 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
1877 name, table->size, table->n_elems);
1879 for (i = 0; i < (int) table->n_elems; i++)
1880 if (flat_table[i] != 0)
1882 expr = flat_table[i];
1883 fprintf (file, "Index %d (hash value %d)\n ",
1884 expr->bitmap_index, hash_val[i]);
1885 print_rtl (file, expr->expr);
1886 fprintf (file, "\n");
1889 fprintf (file, "\n");
1895 /* Record register first/last/block set information for REGNO in INSN.
1897 first_set records the first place in the block where the register
1898 is set and is used to compute "anticipatability".
1900 last_set records the last place in the block where the register
1901 is set and is used to compute "availability".
1903 last_bb records the block for which first_set and last_set are
1904 valid, as a quick test to invalidate them.
1906 reg_set_in_block records whether the register is set in the block
1907 and is used to compute "transparency". */
1910 record_last_reg_set_info (rtx insn, int regno)
1912 struct reg_avail_info *info = ®_avail_info[regno];
1913 int cuid = INSN_CUID (insn);
1915 info->last_set = cuid;
1916 if (info->last_bb != current_bb)
1918 info->last_bb = current_bb;
1919 info->first_set = cuid;
1920 SET_BIT (reg_set_in_block[current_bb->index], regno);
1925 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
1926 Note we store a pair of elements in the list, so they have to be
1927 taken off pairwise. */
1930 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
1933 rtx dest_addr, insn;
1936 while (GET_CODE (dest) == SUBREG
1937 || GET_CODE (dest) == ZERO_EXTRACT
1938 || GET_CODE (dest) == STRICT_LOW_PART)
1939 dest = XEXP (dest, 0);
1941 /* If DEST is not a MEM, then it will not conflict with a load. Note
1942 that function calls are assumed to clobber memory, but are handled
1948 dest_addr = get_addr (XEXP (dest, 0));
1949 dest_addr = canon_rtx (dest_addr);
1950 insn = (rtx) v_insn;
1951 bb = BLOCK_NUM (insn);
1953 canon_modify_mem_list[bb] =
1954 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
1955 canon_modify_mem_list[bb] =
1956 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
1959 /* Record memory modification information for INSN. We do not actually care
1960 about the memory location(s) that are set, or even how they are set (consider
1961 a CALL_INSN). We merely need to record which insns modify memory. */
1964 record_last_mem_set_info (rtx insn)
1966 int bb = BLOCK_NUM (insn);
1968 /* load_killed_in_block_p will handle the case of calls clobbering
1970 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
1971 bitmap_set_bit (modify_mem_list_set, bb);
1975 /* Note that traversals of this loop (other than for free-ing)
1976 will break after encountering a CALL_INSN. So, there's no
1977 need to insert a pair of items, as canon_list_insert does. */
1978 canon_modify_mem_list[bb] =
1979 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
1980 bitmap_set_bit (blocks_with_calls, bb);
1983 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
1986 /* Called from compute_hash_table via note_stores to handle one
1987 SET or CLOBBER in an insn. DATA is really the instruction in which
1988 the SET is taking place. */
1991 record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
1993 rtx last_set_insn = (rtx) data;
1995 if (GET_CODE (dest) == SUBREG)
1996 dest = SUBREG_REG (dest);
1999 record_last_reg_set_info (last_set_insn, REGNO (dest));
2000 else if (MEM_P (dest)
2001 /* Ignore pushes, they clobber nothing. */
2002 && ! push_operand (dest, GET_MODE (dest)))
2003 record_last_mem_set_info (last_set_insn);
2006 /* Top level function to create an expression or assignment hash table.
2008 Expression entries are placed in the hash table if
2009 - they are of the form (set (pseudo-reg) src),
2010 - src is something we want to perform GCSE on,
2011 - none of the operands are subsequently modified in the block
2013 Assignment entries are placed in the hash table if
2014 - they are of the form (set (pseudo-reg) src),
2015 - src is something we want to perform const/copy propagation on,
2016 - none of the operands or target are subsequently modified in the block
2018 Currently src must be a pseudo-reg or a const_int.
2020 TABLE is the table computed. */
2023 compute_hash_table_work (struct hash_table *table)
2027 /* While we compute the hash table we also compute a bit array of which
2028 registers are set in which blocks.
2029 ??? This isn't needed during const/copy propagation, but it's cheap to
2031 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2033 /* re-Cache any INSN_LIST nodes we have allocated. */
2034 clear_modify_mem_tables ();
2035 /* Some working arrays used to track first and last set in each block. */
2036 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2038 for (i = 0; i < max_gcse_regno; ++i)
2039 reg_avail_info[i].last_bb = NULL;
2041 FOR_EACH_BB (current_bb)
2045 int in_libcall_block;
2047 /* First pass over the instructions records information used to
2048 determine when registers and memory are first and last set.
2049 ??? hard-reg reg_set_in_block computation
2050 could be moved to compute_sets since they currently don't change. */
2052 for (insn = BB_HEAD (current_bb);
2053 insn && insn != NEXT_INSN (BB_END (current_bb));
2054 insn = NEXT_INSN (insn))
2056 if (! INSN_P (insn))
2061 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2062 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2063 record_last_reg_set_info (insn, regno);
2068 note_stores (PATTERN (insn), record_last_set_info, insn);
2071 /* Insert implicit sets in the hash table. */
2073 && implicit_sets[current_bb->index] != NULL_RTX)
2074 hash_scan_set (implicit_sets[current_bb->index],
2075 BB_HEAD (current_bb), table);
2077 /* The next pass builds the hash table. */
2079 for (insn = BB_HEAD (current_bb), in_libcall_block = 0;
2080 insn && insn != NEXT_INSN (BB_END (current_bb));
2081 insn = NEXT_INSN (insn))
2084 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2085 in_libcall_block = 1;
2086 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2087 in_libcall_block = 0;
2088 hash_scan_insn (insn, table, in_libcall_block);
2089 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2090 in_libcall_block = 0;
2094 free (reg_avail_info);
2095 reg_avail_info = NULL;
2098 /* Allocate space for the set/expr hash TABLE.
2099 N_INSNS is the number of instructions in the function.
2100 It is used to determine the number of buckets to use.
2101 SET_P determines whether set or expression table will
2105 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2109 table->size = n_insns / 4;
2110 if (table->size < 11)
2113 /* Attempt to maintain efficient use of hash table.
2114 Making it an odd number is simplest for now.
2115 ??? Later take some measurements. */
2117 n = table->size * sizeof (struct expr *);
2118 table->table = gmalloc (n);
2119 table->set_p = set_p;
2122 /* Free things allocated by alloc_hash_table. */
2125 free_hash_table (struct hash_table *table)
2127 free (table->table);
2130 /* Compute the hash TABLE for doing copy/const propagation or
2131 expression hash table. */
2134 compute_hash_table (struct hash_table *table)
2136 /* Initialize count of number of entries in hash table. */
2138 memset (table->table, 0, table->size * sizeof (struct expr *));
2140 compute_hash_table_work (table);
2143 /* Expression tracking support. */
2145 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2146 table entry, or NULL if not found. */
2148 static struct expr *
2149 lookup_set (unsigned int regno, struct hash_table *table)
2151 unsigned int hash = hash_set (regno, table->size);
2154 expr = table->table[hash];
2156 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2157 expr = expr->next_same_hash;
2162 /* Return the next entry for REGNO in list EXPR. */
2164 static struct expr *
2165 next_set (unsigned int regno, struct expr *expr)
2168 expr = expr->next_same_hash;
2169 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2174 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2175 types may be mixed. */
2178 free_insn_expr_list_list (rtx *listp)
2182 for (list = *listp; list ; list = next)
2184 next = XEXP (list, 1);
2185 if (GET_CODE (list) == EXPR_LIST)
2186 free_EXPR_LIST_node (list);
2188 free_INSN_LIST_node (list);
2194 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2196 clear_modify_mem_tables (void)
2201 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
2203 free_INSN_LIST_list (modify_mem_list + i);
2204 free_insn_expr_list_list (canon_modify_mem_list + i);
2206 bitmap_clear (modify_mem_list_set);
2207 bitmap_clear (blocks_with_calls);
2210 /* Release memory used by modify_mem_list_set. */
2213 free_modify_mem_tables (void)
2215 clear_modify_mem_tables ();
2216 free (modify_mem_list);
2217 free (canon_modify_mem_list);
2218 modify_mem_list = 0;
2219 canon_modify_mem_list = 0;
2222 /* Reset tables used to keep track of what's still available [since the
2223 start of the block]. */
2226 reset_opr_set_tables (void)
2228 /* Maintain a bitmap of which regs have been set since beginning of
2230 CLEAR_REG_SET (reg_set_bitmap);
2232 /* Also keep a record of the last instruction to modify memory.
2233 For now this is very trivial, we only record whether any memory
2234 location has been modified. */
2235 clear_modify_mem_tables ();
2238 /* Return nonzero if the operands of X are not set before INSN in
2239 INSN's basic block. */
2242 oprs_not_set_p (rtx x, rtx insn)
2251 code = GET_CODE (x);
2267 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2268 INSN_CUID (insn), x, 0))
2271 return oprs_not_set_p (XEXP (x, 0), insn);
2274 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2280 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2284 /* If we are about to do the last recursive call
2285 needed at this level, change it into iteration.
2286 This function is called enough to be worth it. */
2288 return oprs_not_set_p (XEXP (x, i), insn);
2290 if (! oprs_not_set_p (XEXP (x, i), insn))
2293 else if (fmt[i] == 'E')
2294 for (j = 0; j < XVECLEN (x, i); j++)
2295 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2302 /* Mark things set by a CALL. */
2305 mark_call (rtx insn)
2307 if (! CONST_OR_PURE_CALL_P (insn))
2308 record_last_mem_set_info (insn);
2311 /* Mark things set by a SET. */
2314 mark_set (rtx pat, rtx insn)
2316 rtx dest = SET_DEST (pat);
2318 while (GET_CODE (dest) == SUBREG
2319 || GET_CODE (dest) == ZERO_EXTRACT
2320 || GET_CODE (dest) == STRICT_LOW_PART)
2321 dest = XEXP (dest, 0);
2324 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2325 else if (MEM_P (dest))
2326 record_last_mem_set_info (insn);
2328 if (GET_CODE (SET_SRC (pat)) == CALL)
2332 /* Record things set by a CLOBBER. */
2335 mark_clobber (rtx pat, rtx insn)
2337 rtx clob = XEXP (pat, 0);
2339 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2340 clob = XEXP (clob, 0);
2343 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2345 record_last_mem_set_info (insn);
2348 /* Record things set by INSN.
2349 This data is used by oprs_not_set_p. */
2352 mark_oprs_set (rtx insn)
2354 rtx pat = PATTERN (insn);
2357 if (GET_CODE (pat) == SET)
2358 mark_set (pat, insn);
2359 else if (GET_CODE (pat) == PARALLEL)
2360 for (i = 0; i < XVECLEN (pat, 0); i++)
2362 rtx x = XVECEXP (pat, 0, i);
2364 if (GET_CODE (x) == SET)
2366 else if (GET_CODE (x) == CLOBBER)
2367 mark_clobber (x, insn);
2368 else if (GET_CODE (x) == CALL)
2372 else if (GET_CODE (pat) == CLOBBER)
2373 mark_clobber (pat, insn);
2374 else if (GET_CODE (pat) == CALL)
2379 /* Compute copy/constant propagation working variables. */
2381 /* Local properties of assignments. */
2382 static sbitmap *cprop_pavloc;
2383 static sbitmap *cprop_absaltered;
2385 /* Global properties of assignments (computed from the local properties). */
2386 static sbitmap *cprop_avin;
2387 static sbitmap *cprop_avout;
2389 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
2390 basic blocks. N_SETS is the number of sets. */
2393 alloc_cprop_mem (int n_blocks, int n_sets)
2395 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
2396 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
2398 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
2399 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
2402 /* Free vars used by copy/const propagation. */
2405 free_cprop_mem (void)
2407 sbitmap_vector_free (cprop_pavloc);
2408 sbitmap_vector_free (cprop_absaltered);
2409 sbitmap_vector_free (cprop_avin);
2410 sbitmap_vector_free (cprop_avout);
2413 /* For each block, compute whether X is transparent. X is either an
2414 expression or an assignment [though we don't care which, for this context
2415 an assignment is treated as an expression]. For each block where an
2416 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
2420 compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
2428 /* repeat is used to turn tail-recursion into iteration since GCC
2429 can't do it when there's no return value. */
2435 code = GET_CODE (x);
2441 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2444 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2445 SET_BIT (bmap[bb->index], indx);
2449 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2450 SET_BIT (bmap[r->bb_index], indx);
2455 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2458 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2459 RESET_BIT (bmap[bb->index], indx);
2463 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2464 RESET_BIT (bmap[r->bb_index], indx);
2475 /* First handle all the blocks with calls. We don't need to
2476 do any list walking for them. */
2477 EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
2480 SET_BIT (bmap[bb_index], indx);
2482 RESET_BIT (bmap[bb_index], indx);
2485 /* Now iterate over the blocks which have memory modifications
2486 but which do not have any calls. */
2487 EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set, blocks_with_calls,
2490 rtx list_entry = canon_modify_mem_list[bb_index];
2494 rtx dest, dest_addr;
2496 /* LIST_ENTRY must be an INSN of some kind that sets memory.
2497 Examine each hunk of memory that is modified. */
2499 dest = XEXP (list_entry, 0);
2500 list_entry = XEXP (list_entry, 1);
2501 dest_addr = XEXP (list_entry, 0);
2503 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
2504 x, rtx_addr_varies_p))
2507 SET_BIT (bmap[bb_index], indx);
2509 RESET_BIT (bmap[bb_index], indx);
2512 list_entry = XEXP (list_entry, 1);
2536 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2540 /* If we are about to do the last recursive call
2541 needed at this level, change it into iteration.
2542 This function is called enough to be worth it. */
2549 compute_transp (XEXP (x, i), indx, bmap, set_p);
2551 else if (fmt[i] == 'E')
2552 for (j = 0; j < XVECLEN (x, i); j++)
2553 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
2557 /* Top level routine to do the dataflow analysis needed by copy/const
2561 compute_cprop_data (void)
2563 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
2564 compute_available (cprop_pavloc, cprop_absaltered,
2565 cprop_avout, cprop_avin);
2568 /* Copy/constant propagation. */
2570 /* Maximum number of register uses in an insn that we handle. */
2573 /* Table of uses found in an insn.
2574 Allocated statically to avoid alloc/free complexity and overhead. */
2575 static struct reg_use reg_use_table[MAX_USES];
2577 /* Index into `reg_use_table' while building it. */
2578 static int reg_use_count;
2580 /* Set up a list of register numbers used in INSN. The found uses are stored
2581 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
2582 and contains the number of uses in the table upon exit.
2584 ??? If a register appears multiple times we will record it multiple times.
2585 This doesn't hurt anything but it will slow things down. */
2588 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
2595 /* repeat is used to turn tail-recursion into iteration since GCC
2596 can't do it when there's no return value. */
2601 code = GET_CODE (x);
2604 if (reg_use_count == MAX_USES)
2607 reg_use_table[reg_use_count].reg_rtx = x;
2611 /* Recursively scan the operands of this expression. */
2613 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2617 /* If we are about to do the last recursive call
2618 needed at this level, change it into iteration.
2619 This function is called enough to be worth it. */
2626 find_used_regs (&XEXP (x, i), data);
2628 else if (fmt[i] == 'E')
2629 for (j = 0; j < XVECLEN (x, i); j++)
2630 find_used_regs (&XVECEXP (x, i, j), data);
2634 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
2635 Returns nonzero is successful. */
2638 try_replace_reg (rtx from, rtx to, rtx insn)
2640 rtx note = find_reg_equal_equiv_note (insn);
2643 rtx set = single_set (insn);
2645 validate_replace_src_group (from, to, insn);
2646 if (num_changes_pending () && apply_change_group ())
2649 /* Try to simplify SET_SRC if we have substituted a constant. */
2650 if (success && set && CONSTANT_P (to))
2652 src = simplify_rtx (SET_SRC (set));
2655 validate_change (insn, &SET_SRC (set), src, 0);
2658 /* If there is already a NOTE, update the expression in it with our
2661 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
2663 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
2665 /* If above failed and this is a single set, try to simplify the source of
2666 the set given our substitution. We could perhaps try this for multiple
2667 SETs, but it probably won't buy us anything. */
2668 src = simplify_replace_rtx (SET_SRC (set), from, to);
2670 if (!rtx_equal_p (src, SET_SRC (set))
2671 && validate_change (insn, &SET_SRC (set), src, 0))
2674 /* If we've failed to do replacement, have a single SET, don't already
2675 have a note, and have no special SET, add a REG_EQUAL note to not
2676 lose information. */
2677 if (!success && note == 0 && set != 0
2678 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT)
2679 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
2682 /* REG_EQUAL may get simplified into register.
2683 We don't allow that. Remove that note. This code ought
2684 not to happen, because previous code ought to synthesize
2685 reg-reg move, but be on the safe side. */
2686 if (note && REG_P (XEXP (note, 0)))
2687 remove_note (insn, note);
2692 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
2693 NULL no such set is found. */
2695 static struct expr *
2696 find_avail_set (int regno, rtx insn)
2698 /* SET1 contains the last set found that can be returned to the caller for
2699 use in a substitution. */
2700 struct expr *set1 = 0;
2702 /* Loops are not possible here. To get a loop we would need two sets
2703 available at the start of the block containing INSN. i.e. we would
2704 need two sets like this available at the start of the block:
2706 (set (reg X) (reg Y))
2707 (set (reg Y) (reg X))
2709 This can not happen since the set of (reg Y) would have killed the
2710 set of (reg X) making it unavailable at the start of this block. */
2714 struct expr *set = lookup_set (regno, &set_hash_table);
2716 /* Find a set that is available at the start of the block
2717 which contains INSN. */
2720 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
2722 set = next_set (regno, set);
2725 /* If no available set was found we've reached the end of the
2726 (possibly empty) copy chain. */
2730 gcc_assert (GET_CODE (set->expr) == SET);
2732 src = SET_SRC (set->expr);
2734 /* We know the set is available.
2735 Now check that SRC is ANTLOC (i.e. none of the source operands
2736 have changed since the start of the block).
2738 If the source operand changed, we may still use it for the next
2739 iteration of this loop, but we may not use it for substitutions. */
2741 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
2744 /* If the source of the set is anything except a register, then
2745 we have reached the end of the copy chain. */
2749 /* Follow the copy chain, i.e. start another iteration of the loop
2750 and see if we have an available copy into SRC. */
2751 regno = REGNO (src);
2754 /* SET1 holds the last set that was available and anticipatable at
2759 /* Subroutine of cprop_insn that tries to propagate constants into
2760 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
2761 it is the instruction that immediately precedes JUMP, and must be a
2762 single SET of a register. FROM is what we will try to replace,
2763 SRC is the constant we will try to substitute for it. Returns nonzero
2764 if a change was made. */
2767 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
2769 rtx new, set_src, note_src;
2770 rtx set = pc_set (jump);
2771 rtx note = find_reg_equal_equiv_note (jump);
2775 note_src = XEXP (note, 0);
2776 if (GET_CODE (note_src) == EXPR_LIST)
2777 note_src = NULL_RTX;
2779 else note_src = NULL_RTX;
2781 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
2782 set_src = note_src ? note_src : SET_SRC (set);
2784 /* First substitute the SETCC condition into the JUMP instruction,
2785 then substitute that given values into this expanded JUMP. */
2786 if (setcc != NULL_RTX
2787 && !modified_between_p (from, setcc, jump)
2788 && !modified_between_p (src, setcc, jump))
2791 rtx setcc_set = single_set (setcc);
2792 rtx setcc_note = find_reg_equal_equiv_note (setcc);
2793 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
2794 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
2795 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
2801 new = simplify_replace_rtx (set_src, from, src);
2803 /* If no simplification can be made, then try the next register. */
2804 if (rtx_equal_p (new, SET_SRC (set)))
2807 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
2812 /* Ensure the value computed inside the jump insn to be equivalent
2813 to one computed by setcc. */
2814 if (setcc && modified_in_p (new, setcc))
2816 if (! validate_change (jump, &SET_SRC (set), new, 0))
2818 /* When (some) constants are not valid in a comparison, and there
2819 are two registers to be replaced by constants before the entire
2820 comparison can be folded into a constant, we need to keep
2821 intermediate information in REG_EQUAL notes. For targets with
2822 separate compare insns, such notes are added by try_replace_reg.
2823 When we have a combined compare-and-branch instruction, however,
2824 we need to attach a note to the branch itself to make this
2825 optimization work. */
2827 if (!rtx_equal_p (new, note_src))
2828 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
2832 /* Remove REG_EQUAL note after simplification. */
2834 remove_note (jump, note);
2836 /* If this has turned into an unconditional jump,
2837 then put a barrier after it so that the unreachable
2838 code will be deleted. */
2839 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
2840 emit_barrier_after (jump);
2844 /* Delete the cc0 setter. */
2845 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
2846 delete_insn (setcc);
2849 run_jump_opt_after_gcse = 1;
2851 global_const_prop_count++;
2852 if (gcse_file != NULL)
2855 "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
2856 REGNO (from), INSN_UID (jump));
2857 print_rtl (gcse_file, src);
2858 fprintf (gcse_file, "\n");
2860 purge_dead_edges (bb);
2866 constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
2870 /* Check for reg or cc0 setting instructions followed by
2871 conditional branch instructions first. */
2873 && (sset = single_set (insn)) != NULL
2875 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
2877 rtx dest = SET_DEST (sset);
2878 if ((REG_P (dest) || CC0_P (dest))
2879 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
2883 /* Handle normal insns next. */
2884 if (NONJUMP_INSN_P (insn)
2885 && try_replace_reg (from, to, insn))
2888 /* Try to propagate a CONST_INT into a conditional jump.
2889 We're pretty specific about what we will handle in this
2890 code, we can extend this as necessary over time.
2892 Right now the insn in question must look like
2893 (set (pc) (if_then_else ...)) */
2894 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
2895 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
2899 /* Perform constant and copy propagation on INSN.
2900 The result is nonzero if a change was made. */
2903 cprop_insn (rtx insn, int alter_jumps)
2905 struct reg_use *reg_used;
2913 note_uses (&PATTERN (insn), find_used_regs, NULL);
2915 note = find_reg_equal_equiv_note (insn);
2917 /* We may win even when propagating constants into notes. */
2919 find_used_regs (&XEXP (note, 0), NULL);
2921 for (reg_used = ®_use_table[0]; reg_use_count > 0;
2922 reg_used++, reg_use_count--)
2924 unsigned int regno = REGNO (reg_used->reg_rtx);
2928 /* Ignore registers created by GCSE.
2929 We do this because ... */
2930 if (regno >= max_gcse_regno)
2933 /* If the register has already been set in this block, there's
2934 nothing we can do. */
2935 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
2938 /* Find an assignment that sets reg_used and is available
2939 at the start of the block. */
2940 set = find_avail_set (regno, insn);
2945 /* ??? We might be able to handle PARALLELs. Later. */
2946 gcc_assert (GET_CODE (pat) == SET);
2948 src = SET_SRC (pat);
2950 /* Constant propagation. */
2951 if (gcse_constant_p (src))
2953 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
2956 global_const_prop_count++;
2957 if (gcse_file != NULL)
2959 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
2960 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
2961 print_rtl (gcse_file, src);
2962 fprintf (gcse_file, "\n");
2964 if (INSN_DELETED_P (insn))
2968 else if (REG_P (src)
2969 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2970 && REGNO (src) != regno)
2972 if (try_replace_reg (reg_used->reg_rtx, src, insn))
2975 global_copy_prop_count++;
2976 if (gcse_file != NULL)
2978 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
2979 regno, INSN_UID (insn));
2980 fprintf (gcse_file, " with reg %d\n", REGNO (src));
2983 /* The original insn setting reg_used may or may not now be
2984 deletable. We leave the deletion to flow. */
2985 /* FIXME: If it turns out that the insn isn't deletable,
2986 then we may have unnecessarily extended register lifetimes
2987 and made things worse. */
2995 /* Like find_used_regs, but avoid recording uses that appear in
2996 input-output contexts such as zero_extract or pre_dec. This
2997 restricts the cases we consider to those for which local cprop
2998 can legitimately make replacements. */
3001 local_cprop_find_used_regs (rtx *xptr, void *data)
3008 switch (GET_CODE (x))
3012 case STRICT_LOW_PART:
3021 /* Can only legitimately appear this early in the context of
3022 stack pushes for function arguments, but handle all of the
3023 codes nonetheless. */
3027 /* Setting a subreg of a register larger than word_mode leaves
3028 the non-written words unchanged. */
3029 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
3037 find_used_regs (xptr, data);
3040 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3041 their REG_EQUAL notes need updating. */
3044 do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
3046 rtx newreg = NULL, newcnst = NULL;
3048 /* Rule out USE instructions and ASM statements as we don't want to
3049 change the hard registers mentioned. */
3051 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
3052 || (GET_CODE (PATTERN (insn)) != USE
3053 && asm_noperands (PATTERN (insn)) < 0)))
3055 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
3056 struct elt_loc_list *l;
3060 for (l = val->locs; l; l = l->next)
3062 rtx this_rtx = l->loc;
3065 /* Don't CSE non-constant values out of libcall blocks. */
3066 if (l->in_libcall && ! CONSTANT_P (this_rtx))
3069 if (gcse_constant_p (this_rtx))
3071 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
3072 /* Don't copy propagate if it has attached REG_EQUIV note.
3073 At this point this only function parameters should have
3074 REG_EQUIV notes and if the argument slot is used somewhere
3075 explicitly, it means address of parameter has been taken,
3076 so we should not extend the lifetime of the pseudo. */
3077 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
3078 || ! MEM_P (XEXP (note, 0))))
3081 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
3083 /* If we find a case where we can't fix the retval REG_EQUAL notes
3084 match the new register, we either have to abandon this replacement
3085 or fix delete_trivially_dead_insns to preserve the setting insn,
3086 or make it delete the REG_EUAQL note, and fix up all passes that
3087 require the REG_EQUAL note there. */
3090 adjusted = adjust_libcall_notes (x, newcnst, insn, libcall_sp);
3091 gcc_assert (adjusted);
3093 if (gcse_file != NULL)
3095 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
3097 fprintf (gcse_file, "insn %d with constant ",
3099 print_rtl (gcse_file, newcnst);
3100 fprintf (gcse_file, "\n");
3102 local_const_prop_count++;
3105 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
3107 adjust_libcall_notes (x, newreg, insn, libcall_sp);
3108 if (gcse_file != NULL)
3111 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
3112 REGNO (x), INSN_UID (insn));
3113 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
3115 local_copy_prop_count++;
3122 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3123 their REG_EQUAL notes need updating to reflect that OLDREG has been
3124 replaced with NEWVAL in INSN. Return true if all substitutions could
3127 adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
3131 while ((end = *libcall_sp++))
3133 rtx note = find_reg_equal_equiv_note (end);
3140 if (reg_set_between_p (newval, PREV_INSN (insn), end))
3144 note = find_reg_equal_equiv_note (end);
3147 if (reg_mentioned_p (newval, XEXP (note, 0)))
3150 while ((end = *libcall_sp++));
3154 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), oldreg, newval);
3160 #define MAX_NESTED_LIBCALLS 9
3163 local_cprop_pass (int alter_jumps)
3166 struct reg_use *reg_used;
3167 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
3168 bool changed = false;
3170 cselib_init (false);
3171 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
3173 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3177 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
3181 gcc_assert (libcall_sp != libcall_stack);
3182 *--libcall_sp = XEXP (note, 0);
3184 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3187 note = find_reg_equal_equiv_note (insn);
3191 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
3193 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
3195 for (reg_used = ®_use_table[0]; reg_use_count > 0;
3196 reg_used++, reg_use_count--)
3197 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
3203 if (INSN_DELETED_P (insn))
3206 while (reg_use_count);
3208 cselib_process_insn (insn);
3211 /* Global analysis may get into infinite loops for unreachable blocks. */
3212 if (changed && alter_jumps)
3214 delete_unreachable_blocks ();
3215 free_reg_set_mem ();
3216 alloc_reg_set_mem (max_reg_num ());
3217 compute_sets (get_insns ());
3221 /* Forward propagate copies. This includes copies and constants. Return
3222 nonzero if a change was made. */
3225 cprop (int alter_jumps)
3231 /* Note we start at block 1. */
3232 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3234 if (gcse_file != NULL)
3235 fprintf (gcse_file, "\n");
3240 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3242 /* Reset tables used to keep track of what's still valid [since the
3243 start of the block]. */
3244 reset_opr_set_tables ();
3246 for (insn = BB_HEAD (bb);
3247 insn != NULL && insn != NEXT_INSN (BB_END (bb));
3248 insn = NEXT_INSN (insn))
3251 changed |= cprop_insn (insn, alter_jumps);
3253 /* Keep track of everything modified by this insn. */
3254 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
3255 call mark_oprs_set if we turned the insn into a NOTE. */
3256 if (! NOTE_P (insn))
3257 mark_oprs_set (insn);
3261 if (gcse_file != NULL)
3262 fprintf (gcse_file, "\n");
3267 /* Similar to get_condition, only the resulting condition must be
3268 valid at JUMP, instead of at EARLIEST.
3270 This differs from noce_get_condition in ifcvt.c in that we prefer not to
3271 settle for the condition variable in the jump instruction being integral.
3272 We prefer to be able to record the value of a user variable, rather than
3273 the value of a temporary used in a condition. This could be solved by
3274 recording the value of *every* register scaned by canonicalize_condition,
3275 but this would require some code reorganization. */
3278 fis_get_condition (rtx jump)
3280 return get_condition (jump, NULL, false, true);
3283 /* Check the comparison COND to see if we can safely form an implicit set from
3284 it. COND is either an EQ or NE comparison. */
3287 implicit_set_cond_p (rtx cond)
3289 enum machine_mode mode = GET_MODE (XEXP (cond, 0));
3290 rtx cst = XEXP (cond, 1);
3292 /* We can't perform this optimization if either operand might be or might
3293 contain a signed zero. */
3294 if (HONOR_SIGNED_ZEROS (mode))
3296 /* It is sufficient to check if CST is or contains a zero. We must
3297 handle float, complex, and vector. If any subpart is a zero, then
3298 the optimization can't be performed. */
3299 /* ??? The complex and vector checks are not implemented yet. We just
3300 always return zero for them. */
3301 if (GET_CODE (cst) == CONST_DOUBLE)
3304 REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
3305 if (REAL_VALUES_EQUAL (d, dconst0))
3312 return gcse_constant_p (cst);
3315 /* Find the implicit sets of a function. An "implicit set" is a constraint
3316 on the value of a variable, implied by a conditional jump. For example,
3317 following "if (x == 2)", the then branch may be optimized as though the
3318 conditional performed an "explicit set", in this example, "x = 2". This
3319 function records the set patterns that are implicit at the start of each
3323 find_implicit_sets (void)
3325 basic_block bb, dest;
3331 /* Check for more than one successor. */
3332 if (EDGE_COUNT (bb->succs) > 1)
3334 cond = fis_get_condition (BB_END (bb));
3337 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
3338 && REG_P (XEXP (cond, 0))
3339 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
3340 && implicit_set_cond_p (cond))
3342 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
3343 : FALLTHRU_EDGE (bb)->dest;
3345 if (dest && EDGE_COUNT (dest->preds) == 1
3346 && dest != EXIT_BLOCK_PTR)
3348 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
3350 implicit_sets[dest->index] = new;
3353 fprintf(gcse_file, "Implicit set of reg %d in ",
3354 REGNO (XEXP (cond, 0)));
3355 fprintf(gcse_file, "basic block %d\n", dest->index);
3363 fprintf (gcse_file, "Found %d implicit sets\n", count);
3366 /* Perform one copy/constant propagation pass.
3367 PASS is the pass count. If CPROP_JUMPS is true, perform constant
3368 propagation into conditional jumps. If BYPASS_JUMPS is true,
3369 perform conditional jump bypassing optimizations. */
3372 one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
3376 global_const_prop_count = local_const_prop_count = 0;
3377 global_copy_prop_count = local_copy_prop_count = 0;
3379 local_cprop_pass (cprop_jumps);
3381 /* Determine implicit sets. */
3382 implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
3383 find_implicit_sets ();
3385 alloc_hash_table (max_cuid, &set_hash_table, 1);
3386 compute_hash_table (&set_hash_table);
3388 /* Free implicit_sets before peak usage. */
3389 free (implicit_sets);
3390 implicit_sets = NULL;
3393 dump_hash_table (gcse_file, "SET", &set_hash_table);
3394 if (set_hash_table.n_elems > 0)
3396 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
3397 compute_cprop_data ();
3398 changed = cprop (cprop_jumps);
3400 changed |= bypass_conditional_jumps ();
3404 free_hash_table (&set_hash_table);
3408 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
3409 current_function_name (), pass, bytes_used);
3410 fprintf (gcse_file, "%d local const props, %d local copy props\n\n",
3411 local_const_prop_count, local_copy_prop_count);
3412 fprintf (gcse_file, "%d global const props, %d global copy props\n\n",
3413 global_const_prop_count, global_copy_prop_count);
3415 /* Global analysis may get into infinite loops for unreachable blocks. */
3416 if (changed && cprop_jumps)
3417 delete_unreachable_blocks ();
3422 /* Bypass conditional jumps. */
3424 /* The value of last_basic_block at the beginning of the jump_bypass
3425 pass. The use of redirect_edge_and_branch_force may introduce new
3426 basic blocks, but the data flow analysis is only valid for basic
3427 block indices less than bypass_last_basic_block. */
3429 static int bypass_last_basic_block;
3431 /* Find a set of REGNO to a constant that is available at the end of basic
3432 block BB. Returns NULL if no such set is found. Based heavily upon
3435 static struct expr *
3436 find_bypass_set (int regno, int bb)
3438 struct expr *result = 0;
3443 struct expr *set = lookup_set (regno, &set_hash_table);
3447 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
3449 set = next_set (regno, set);
3455 gcc_assert (GET_CODE (set->expr) == SET);
3457 src = SET_SRC (set->expr);
3458 if (gcse_constant_p (src))
3464 regno = REGNO (src);
3470 /* Subroutine of bypass_block that checks whether a pseudo is killed by
3471 any of the instructions inserted on an edge. Jump bypassing places
3472 condition code setters on CFG edges using insert_insn_on_edge. This
3473 function is required to check that our data flow analysis is still
3474 valid prior to commit_edge_insertions. */
3477 reg_killed_on_edge (rtx reg, edge e)
3481 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
3482 if (INSN_P (insn) && reg_set_p (reg, insn))
3488 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
3489 basic block BB which has more than one predecessor. If not NULL, SETCC
3490 is the first instruction of BB, which is immediately followed by JUMP_INSN
3491 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
3492 Returns nonzero if a change was made.
3494 During the jump bypassing pass, we may place copies of SETCC instructions
3495 on CFG edges. The following routine must be careful to pay attention to
3496 these inserted insns when performing its transformations. */
3499 bypass_block (basic_block bb, rtx setcc, rtx jump)
3504 int may_be_loop_header;
3508 insn = (setcc != NULL) ? setcc : jump;
3510 /* Determine set of register uses in INSN. */
3512 note_uses (&PATTERN (insn), find_used_regs, NULL);
3513 note = find_reg_equal_equiv_note (insn);
3515 find_used_regs (&XEXP (note, 0), NULL);
3517 may_be_loop_header = false;
3518 FOR_EACH_EDGE (e, ei, bb->preds)
3519 if (e->flags & EDGE_DFS_BACK)
3521 may_be_loop_header = true;
3526 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3530 if (e->flags & EDGE_COMPLEX)
3536 /* We can't redirect edges from new basic blocks. */
3537 if (e->src->index >= bypass_last_basic_block)
3543 /* The irreducible loops created by redirecting of edges entering the
3544 loop from outside would decrease effectiveness of some of the following
3545 optimizations, so prevent this. */
3546 if (may_be_loop_header
3547 && !(e->flags & EDGE_DFS_BACK))
3553 for (i = 0; i < reg_use_count; i++)
3555 struct reg_use *reg_used = ®_use_table[i];
3556 unsigned int regno = REGNO (reg_used->reg_rtx);
3557 basic_block dest, old_dest;
3561 if (regno >= max_gcse_regno)
3564 set = find_bypass_set (regno, e->src->index);
3569 /* Check the data flow is valid after edge insertions. */
3570 if (e->insns.r && reg_killed_on_edge (reg_used->reg_rtx, e))
3573 src = SET_SRC (pc_set (jump));
3576 src = simplify_replace_rtx (src,
3577 SET_DEST (PATTERN (setcc)),
3578 SET_SRC (PATTERN (setcc)));
3580 new = simplify_replace_rtx (src, reg_used->reg_rtx,
3581 SET_SRC (set->expr));
3583 /* Jump bypassing may have already placed instructions on
3584 edges of the CFG. We can't bypass an outgoing edge that
3585 has instructions associated with it, as these insns won't
3586 get executed if the incoming edge is redirected. */
3590 edest = FALLTHRU_EDGE (bb);
3591 dest = edest->insns.r ? NULL : edest->dest;
3593 else if (GET_CODE (new) == LABEL_REF)
3597 dest = BLOCK_FOR_INSN (XEXP (new, 0));
3598 /* Don't bypass edges containing instructions. */
3599 FOR_EACH_EDGE (edest, ei2, bb->succs)
3600 if (edest->dest == dest && edest->insns.r)
3609 /* Avoid unification of the edge with other edges from original
3610 branch. We would end up emitting the instruction on "both"
3613 if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc))))
3618 FOR_EACH_EDGE (e2, ei2, e->src->succs)
3619 if (e2->dest == dest)
3629 && dest != EXIT_BLOCK_PTR)
3631 redirect_edge_and_branch_force (e, dest);
3633 /* Copy the register setter to the redirected edge.
3634 Don't copy CC0 setters, as CC0 is dead after jump. */
3637 rtx pat = PATTERN (setcc);
3638 if (!CC0_P (SET_DEST (pat)))
3639 insert_insn_on_edge (copy_insn (pat), e);
3642 if (gcse_file != NULL)
3644 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d "
3645 "in jump_insn %d equals constant ",
3646 regno, INSN_UID (jump));
3647 print_rtl (gcse_file, SET_SRC (set->expr));
3648 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
3649 e->src->index, old_dest->index, dest->index);
3662 /* Find basic blocks with more than one predecessor that only contain a
3663 single conditional jump. If the result of the comparison is known at
3664 compile-time from any incoming edge, redirect that edge to the
3665 appropriate target. Returns nonzero if a change was made.
3667 This function is now mis-named, because we also handle indirect jumps. */
3670 bypass_conditional_jumps (void)
3678 /* Note we start at block 1. */
3679 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3682 bypass_last_basic_block = last_basic_block;
3683 mark_dfs_back_edges ();
3686 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
3687 EXIT_BLOCK_PTR, next_bb)
3689 /* Check for more than one predecessor. */
3690 if (EDGE_COUNT (bb->preds) > 1)
3693 for (insn = BB_HEAD (bb);
3694 insn != NULL && insn != NEXT_INSN (BB_END (bb));
3695 insn = NEXT_INSN (insn))
3696 if (NONJUMP_INSN_P (insn))
3700 if (GET_CODE (PATTERN (insn)) != SET)
3703 dest = SET_DEST (PATTERN (insn));
3704 if (REG_P (dest) || CC0_P (dest))
3709 else if (JUMP_P (insn))
3711 if ((any_condjump_p (insn) || computed_jump_p (insn))
3712 && onlyjump_p (insn))
3713 changed |= bypass_block (bb, setcc, insn);
3716 else if (INSN_P (insn))
3721 /* If we bypassed any register setting insns, we inserted a
3722 copy on the redirected edge. These need to be committed. */
3724 commit_edge_insertions();
3729 /* Compute PRE+LCM working variables. */
3731 /* Local properties of expressions. */
3732 /* Nonzero for expressions that are transparent in the block. */
3733 static sbitmap *transp;
3735 /* Nonzero for expressions that are transparent at the end of the block.
3736 This is only zero for expressions killed by abnormal critical edge
3737 created by a calls. */
3738 static sbitmap *transpout;
3740 /* Nonzero for expressions that are computed (available) in the block. */
3741 static sbitmap *comp;
3743 /* Nonzero for expressions that are locally anticipatable in the block. */
3744 static sbitmap *antloc;
3746 /* Nonzero for expressions where this block is an optimal computation
3748 static sbitmap *pre_optimal;
3750 /* Nonzero for expressions which are redundant in a particular block. */
3751 static sbitmap *pre_redundant;
3753 /* Nonzero for expressions which should be inserted on a specific edge. */
3754 static sbitmap *pre_insert_map;
3756 /* Nonzero for expressions which should be deleted in a specific block. */
3757 static sbitmap *pre_delete_map;
3759 /* Contains the edge_list returned by pre_edge_lcm. */
3760 static struct edge_list *edge_list;
3762 /* Redundant insns. */
3763 static sbitmap pre_redundant_insns;
3765 /* Allocate vars used for PRE analysis. */
3768 alloc_pre_mem (int n_blocks, int n_exprs)
3770 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
3771 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
3772 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
3775 pre_redundant = NULL;
3776 pre_insert_map = NULL;
3777 pre_delete_map = NULL;
3778 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
3780 /* pre_insert and pre_delete are allocated later. */
3783 /* Free vars used for PRE analysis. */
3788 sbitmap_vector_free (transp);
3789 sbitmap_vector_free (comp);
3791 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
3794 sbitmap_vector_free (pre_optimal);
3796 sbitmap_vector_free (pre_redundant);
3798 sbitmap_vector_free (pre_insert_map);
3800 sbitmap_vector_free (pre_delete_map);
3802 transp = comp = NULL;
3803 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
3806 /* Top level routine to do the dataflow analysis needed by PRE. */
3809 compute_pre_data (void)
3811 sbitmap trapping_expr;
3815 compute_local_properties (transp, comp, antloc, &expr_hash_table);
3816 sbitmap_vector_zero (ae_kill, last_basic_block);
3818 /* Collect expressions which might trap. */
3819 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
3820 sbitmap_zero (trapping_expr);
3821 for (ui = 0; ui < expr_hash_table.size; ui++)
3824 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
3825 if (may_trap_p (e->expr))
3826 SET_BIT (trapping_expr, e->bitmap_index);
3829 /* Compute ae_kill for each basic block using:
3839 /* If the current block is the destination of an abnormal edge, we
3840 kill all trapping expressions because we won't be able to properly
3841 place the instruction on the edge. So make them neither
3842 anticipatable nor transparent. This is fairly conservative. */
3843 FOR_EACH_EDGE (e, ei, bb->preds)
3844 if (e->flags & EDGE_ABNORMAL)
3846 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
3847 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
3851 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
3852 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
3855 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
3856 ae_kill, &pre_insert_map, &pre_delete_map);
3857 sbitmap_vector_free (antloc);
3859 sbitmap_vector_free (ae_kill);
3861 sbitmap_free (trapping_expr);
3866 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
3869 VISITED is a pointer to a working buffer for tracking which BB's have
3870 been visited. It is NULL for the top-level call.
3872 We treat reaching expressions that go through blocks containing the same
3873 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3874 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3875 2 as not reaching. The intent is to improve the probability of finding
3876 only one reaching expression and to reduce register lifetimes by picking
3877 the closest such expression. */
3880 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
3885 FOR_EACH_EDGE (pred, ei, bb->preds)
3887 basic_block pred_bb = pred->src;
3889 if (pred->src == ENTRY_BLOCK_PTR
3890 /* Has predecessor has already been visited? */
3891 || visited[pred_bb->index])
3892 ;/* Nothing to do. */
3894 /* Does this predecessor generate this expression? */
3895 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
3897 /* Is this the occurrence we're looking for?
3898 Note that there's only one generating occurrence per block
3899 so we just need to check the block number. */
3900 if (occr_bb == pred_bb)
3903 visited[pred_bb->index] = 1;
3905 /* Ignore this predecessor if it kills the expression. */
3906 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
3907 visited[pred_bb->index] = 1;
3909 /* Neither gen nor kill. */
3912 visited[pred_bb->index] = 1;
3913 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
3918 /* All paths have been checked. */
3922 /* The wrapper for pre_expr_reaches_here_work that ensures that any
3923 memory allocated for that function is returned. */
3926 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
3929 char *visited = xcalloc (last_basic_block, 1);
3931 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
3938 /* Given an expr, generate RTL which we can insert at the end of a BB,
3939 or on an edge. Set the block number of any insns generated to
3943 process_insert_insn (struct expr *expr)
3945 rtx reg = expr->reaching_reg;
3946 rtx exp = copy_rtx (expr->expr);
3951 /* If the expression is something that's an operand, like a constant,
3952 just copy it to a register. */
3953 if (general_operand (exp, GET_MODE (reg)))
3954 emit_move_insn (reg, exp);
3956 /* Otherwise, make a new insn to compute this expression and make sure the
3957 insn will be recognized (this also adds any needed CLOBBERs). Copy the
3958 expression to make sure we don't have any sharing issues. */
3961 rtx insn = emit_insn (gen_rtx_SET (VOIDmode, reg, exp));
3963 if (insn_invalid_p (insn))
3974 /* Add EXPR to the end of basic block BB.
3976 This is used by both the PRE and code hoisting.
3978 For PRE, we want to verify that the expr is either transparent
3979 or locally anticipatable in the target block. This check makes
3980 no sense for code hoisting. */
3983 insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
3985 rtx insn = BB_END (bb);
3987 rtx reg = expr->reaching_reg;
3988 int regno = REGNO (reg);
3991 pat = process_insert_insn (expr);
3992 gcc_assert (pat && INSN_P (pat));
3995 while (NEXT_INSN (pat_end) != NULL_RTX)
3996 pat_end = NEXT_INSN (pat_end);
3998 /* If the last insn is a jump, insert EXPR in front [taking care to
3999 handle cc0, etc. properly]. Similarly we need to care trapping
4000 instructions in presence of non-call exceptions. */
4003 || (NONJUMP_INSN_P (insn)
4004 && (EDGE_COUNT (bb->succs) > 1
4005 || EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL)))
4010 /* It should always be the case that we can put these instructions
4011 anywhere in the basic block with performing PRE optimizations.
4013 gcc_assert (!NONJUMP_INSN_P (insn) || !pre
4014 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4015 || TEST_BIT (transp[bb->index], expr->bitmap_index));
4017 /* If this is a jump table, then we can't insert stuff here. Since
4018 we know the previous real insn must be the tablejump, we insert
4019 the new instruction just before the tablejump. */
4020 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4021 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4022 insn = prev_real_insn (insn);
4025 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4026 if cc0 isn't set. */
4027 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4029 insn = XEXP (note, 0);
4032 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4033 if (maybe_cc0_setter
4034 && INSN_P (maybe_cc0_setter)
4035 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4036 insn = maybe_cc0_setter;
4039 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4040 new_insn = emit_insn_before_noloc (pat, insn);
4043 /* Likewise if the last insn is a call, as will happen in the presence
4044 of exception handling. */
4045 else if (CALL_P (insn)
4046 && (EDGE_COUNT (bb->succs) > 1 || EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL))
4048 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4049 we search backward and place the instructions before the first
4050 parameter is loaded. Do this for everyone for consistency and a
4051 presumption that we'll get better code elsewhere as well.
4053 It should always be the case that we can put these instructions
4054 anywhere in the basic block with performing PRE optimizations.
4058 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4059 || TEST_BIT (transp[bb->index], expr->bitmap_index));
4061 /* Since different machines initialize their parameter registers
4062 in different orders, assume nothing. Collect the set of all
4063 parameter registers. */
4064 insn = find_first_parameter_load (insn, BB_HEAD (bb));
4066 /* If we found all the parameter loads, then we want to insert
4067 before the first parameter load.
4069 If we did not find all the parameter loads, then we might have
4070 stopped on the head of the block, which could be a CODE_LABEL.
4071 If we inserted before the CODE_LABEL, then we would be putting
4072 the insn in the wrong basic block. In that case, put the insn
4073 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
4074 while (LABEL_P (insn)
4075 || NOTE_INSN_BASIC_BLOCK_P (insn))
4076 insn = NEXT_INSN (insn);
4078 new_insn = emit_insn_before_noloc (pat, insn);
4081 new_insn = emit_insn_after_noloc (pat, insn);
4087 add_label_notes (PATTERN (pat), new_insn);
4088 note_stores (PATTERN (pat), record_set_info, pat);
4092 pat = NEXT_INSN (pat);
4095 gcse_create_count++;
4099 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
4100 bb->index, INSN_UID (new_insn));
4101 fprintf (gcse_file, "copying expression %d to reg %d\n",
4102 expr->bitmap_index, regno);
4106 /* Insert partially redundant expressions on edges in the CFG to make
4107 the expressions fully redundant. */
4110 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
4112 int e, i, j, num_edges, set_size, did_insert = 0;
4115 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4116 if it reaches any of the deleted expressions. */
4118 set_size = pre_insert_map[0]->size;
4119 num_edges = NUM_EDGES (edge_list);
4120 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
4121 sbitmap_vector_zero (inserted, num_edges);
4123 for (e = 0; e < num_edges; e++)
4126 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
4128 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
4130 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
4132 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
4133 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4135 struct expr *expr = index_map[j];
4138 /* Now look at each deleted occurrence of this expression. */
4139 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4141 if (! occr->deleted_p)
4144 /* Insert this expression on this edge if if it would
4145 reach the deleted occurrence in BB. */
4146 if (!TEST_BIT (inserted[e], j))
4149 edge eg = INDEX_EDGE (edge_list, e);
4151 /* We can't insert anything on an abnormal and
4152 critical edge, so we insert the insn at the end of
4153 the previous block. There are several alternatives
4154 detailed in Morgans book P277 (sec 10.5) for
4155 handling this situation. This one is easiest for
4158 if (eg->flags & EDGE_ABNORMAL)
4159 insert_insn_end_bb (index_map[j], bb, 0);
4162 insn = process_insert_insn (index_map[j]);
4163 insert_insn_on_edge (insn, eg);
4168 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
4170 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
4171 fprintf (gcse_file, "copy expression %d\n",
4172 expr->bitmap_index);
4175 update_ld_motion_stores (expr);
4176 SET_BIT (inserted[e], j);
4178 gcse_create_count++;
4185 sbitmap_vector_free (inserted);
4189 /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
4190 Given "old_reg <- expr" (INSN), instead of adding after it
4191 reaching_reg <- old_reg
4192 it's better to do the following:
4193 reaching_reg <- expr
4194 old_reg <- reaching_reg
4195 because this way copy propagation can discover additional PRE
4196 opportunities. But if this fails, we try the old way.
4197 When "expr" is a store, i.e.
4198 given "MEM <- old_reg", instead of adding after it
4199 reaching_reg <- old_reg
4200 it's better to add it before as follows:
4201 reaching_reg <- old_reg
4202 MEM <- reaching_reg. */
4205 pre_insert_copy_insn (struct expr *expr, rtx insn)
4207 rtx reg = expr->reaching_reg;
4208 int regno = REGNO (reg);
4209 int indx = expr->bitmap_index;
4210 rtx pat = PATTERN (insn);
4215 /* This block matches the logic in hash_scan_insn. */
4216 switch (GET_CODE (pat))
4223 /* Search through the parallel looking for the set whose
4224 source was the expression that we're interested in. */
4226 for (i = 0; i < XVECLEN (pat, 0); i++)
4228 rtx x = XVECEXP (pat, 0, i);
4229 if (GET_CODE (x) == SET
4230 && expr_equiv_p (SET_SRC (x), expr->expr))
4242 if (REG_P (SET_DEST (set)))
4244 old_reg = SET_DEST (set);
4245 /* Check if we can modify the set destination in the original insn. */
4246 if (validate_change (insn, &SET_DEST (set), reg, 0))
4248 new_insn = gen_move_insn (old_reg, reg);
4249 new_insn = emit_insn_after (new_insn, insn);
4251 /* Keep register set table up to date. */
4252 record_one_set (regno, insn);
4256 new_insn = gen_move_insn (reg, old_reg);
4257 new_insn = emit_insn_after (new_insn, insn);
4259 /* Keep register set table up to date. */
4260 record_one_set (regno, new_insn);
4263 else /* This is possible only in case of a store to memory. */
4265 old_reg = SET_SRC (set);
4266 new_insn = gen_move_insn (reg, old_reg);
4268 /* Check if we can modify the set source in the original insn. */
4269 if (validate_change (insn, &SET_SRC (set), reg, 0))
4270 new_insn = emit_insn_before (new_insn, insn);
4272 new_insn = emit_insn_after (new_insn, insn);
4274 /* Keep register set table up to date. */
4275 record_one_set (regno, new_insn);
4278 gcse_create_count++;
4282 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4283 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4284 INSN_UID (insn), regno);
4287 /* Copy available expressions that reach the redundant expression
4288 to `reaching_reg'. */
4291 pre_insert_copies (void)
4293 unsigned int i, added_copy;
4298 /* For each available expression in the table, copy the result to
4299 `reaching_reg' if the expression reaches a deleted one.
4301 ??? The current algorithm is rather brute force.
4302 Need to do some profiling. */
4304 for (i = 0; i < expr_hash_table.size; i++)
4305 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4307 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4308 we don't want to insert a copy here because the expression may not
4309 really be redundant. So only insert an insn if the expression was
4310 deleted. This test also avoids further processing if the
4311 expression wasn't deleted anywhere. */
4312 if (expr->reaching_reg == NULL)
4315 /* Set when we add a copy for that expression. */
4318 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4320 if (! occr->deleted_p)
4323 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4325 rtx insn = avail->insn;
4327 /* No need to handle this one if handled already. */
4328 if (avail->copied_p)
4331 /* Don't handle this one if it's a redundant one. */
4332 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4335 /* Or if the expression doesn't reach the deleted one. */
4336 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
4338 BLOCK_FOR_INSN (occr->insn)))
4343 /* Copy the result of avail to reaching_reg. */
4344 pre_insert_copy_insn (expr, insn);
4345 avail->copied_p = 1;
4350 update_ld_motion_stores (expr);
4354 /* Emit move from SRC to DEST noting the equivalence with expression computed
4357 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
4360 rtx set = single_set (insn), set2;
4364 /* This should never fail since we're creating a reg->reg copy
4365 we've verified to be valid. */
4367 new = emit_insn_after (gen_move_insn (dest, src), insn);
4369 /* Note the equivalence for local CSE pass. */
4370 set2 = single_set (new);
4371 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
4373 if ((note = find_reg_equal_equiv_note (insn)))
4374 eqv = XEXP (note, 0);
4376 eqv = SET_SRC (set);
4378 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
4383 /* Delete redundant computations.
4384 Deletion is done by changing the insn to copy the `reaching_reg' of
4385 the expression into the result of the SET. It is left to later passes
4386 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4388 Returns nonzero if a change is made. */
4399 for (i = 0; i < expr_hash_table.size; i++)
4400 for (expr = expr_hash_table.table[i];
4402 expr = expr->next_same_hash)
4404 int indx = expr->bitmap_index;
4406 /* We only need to search antic_occr since we require
4409 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4411 rtx insn = occr->insn;
4413 basic_block bb = BLOCK_FOR_INSN (insn);
4415 /* We only delete insns that have a single_set. */
4416 if (TEST_BIT (pre_delete_map[bb->index], indx)
4417 && (set = single_set (insn)) != 0)
4419 /* Create a pseudo-reg to store the result of reaching
4420 expressions into. Get the mode for the new pseudo from
4421 the mode of the original destination pseudo. */
4422 if (expr->reaching_reg == NULL)
4424 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4426 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4428 occr->deleted_p = 1;
4429 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4436 "PRE: redundant insn %d (expression %d) in ",
4437 INSN_UID (insn), indx);
4438 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
4439 bb->index, REGNO (expr->reaching_reg));
4448 /* Perform GCSE optimizations using PRE.
4449 This is called by one_pre_gcse_pass after all the dataflow analysis
4452 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4453 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4454 Compiler Design and Implementation.
4456 ??? A new pseudo reg is created to hold the reaching expression. The nice
4457 thing about the classical approach is that it would try to use an existing
4458 reg. If the register can't be adequately optimized [i.e. we introduce
4459 reload problems], one could add a pass here to propagate the new register
4462 ??? We don't handle single sets in PARALLELs because we're [currently] not
4463 able to copy the rest of the parallel when we insert copies to create full
4464 redundancies from partial redundancies. However, there's no reason why we
4465 can't handle PARALLELs in the cases where there are no partial
4472 int did_insert, changed;
4473 struct expr **index_map;
4476 /* Compute a mapping from expression number (`bitmap_index') to
4477 hash table entry. */
4479 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
4480 for (i = 0; i < expr_hash_table.size; i++)
4481 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4482 index_map[expr->bitmap_index] = expr;
4484 /* Reset bitmap used to track which insns are redundant. */
4485 pre_redundant_insns = sbitmap_alloc (max_cuid);
4486 sbitmap_zero (pre_redundant_insns);
4488 /* Delete the redundant insns first so that
4489 - we know what register to use for the new insns and for the other
4490 ones with reaching expressions
4491 - we know which insns are redundant when we go to create copies */
4493 changed = pre_delete ();
4495 did_insert = pre_edge_insert (edge_list, index_map);
4497 /* In other places with reaching expressions, copy the expression to the
4498 specially allocated pseudo-reg that reaches the redundant expr. */
4499 pre_insert_copies ();
4502 commit_edge_insertions ();
4507 sbitmap_free (pre_redundant_insns);
4511 /* Top level routine to perform one PRE GCSE pass.
4513 Return nonzero if a change was made. */
4516 one_pre_gcse_pass (int pass)
4520 gcse_subst_count = 0;
4521 gcse_create_count = 0;
4523 alloc_hash_table (max_cuid, &expr_hash_table, 0);
4524 add_noreturn_fake_exit_edges ();
4526 compute_ld_motion_mems ();
4528 compute_hash_table (&expr_hash_table);
4529 trim_ld_motion_mems ();
4531 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
4533 if (expr_hash_table.n_elems > 0)
4535 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
4536 compute_pre_data ();
4537 changed |= pre_gcse ();
4538 free_edge_list (edge_list);
4543 remove_fake_exit_edges ();
4544 free_hash_table (&expr_hash_table);
4548 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
4549 current_function_name (), pass, bytes_used);
4550 fprintf (gcse_file, "%d substs, %d insns created\n",
4551 gcse_subst_count, gcse_create_count);
4557 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
4558 If notes are added to an insn which references a CODE_LABEL, the
4559 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
4560 because the following loop optimization pass requires them. */
4562 /* ??? This is very similar to the loop.c add_label_notes function. We
4563 could probably share code here. */
4565 /* ??? If there was a jump optimization pass after gcse and before loop,
4566 then we would not need to do this here, because jump would add the
4567 necessary REG_LABEL notes. */
4570 add_label_notes (rtx x, rtx insn)
4572 enum rtx_code code = GET_CODE (x);
4576 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4578 /* This code used to ignore labels that referred to dispatch tables to
4579 avoid flow generating (slightly) worse code.
4581 We no longer ignore such label references (see LABEL_REF handling in
4582 mark_jump_label for additional information). */
4584 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
4586 if (LABEL_P (XEXP (x, 0)))
4587 LABEL_NUSES (XEXP (x, 0))++;
4591 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
4594 add_label_notes (XEXP (x, i), insn);
4595 else if (fmt[i] == 'E')
4596 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4597 add_label_notes (XVECEXP (x, i, j), insn);
4601 /* Compute transparent outgoing information for each block.
4603 An expression is transparent to an edge unless it is killed by
4604 the edge itself. This can only happen with abnormal control flow,
4605 when the edge is traversed through a call. This happens with
4606 non-local labels and exceptions.
4608 This would not be necessary if we split the edge. While this is
4609 normally impossible for abnormal critical edges, with some effort
4610 it should be possible with exception handling, since we still have
4611 control over which handler should be invoked. But due to increased
4612 EH table sizes, this may not be worthwhile. */
4615 compute_transpout (void)
4621 sbitmap_vector_ones (transpout, last_basic_block);
4625 /* Note that flow inserted a nop a the end of basic blocks that
4626 end in call instructions for reasons other than abnormal
4628 if (! CALL_P (BB_END (bb)))
4631 for (i = 0; i < expr_hash_table.size; i++)
4632 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
4633 if (MEM_P (expr->expr))
4635 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
4636 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
4639 /* ??? Optimally, we would use interprocedural alias
4640 analysis to determine if this mem is actually killed
4642 RESET_BIT (transpout[bb->index], expr->bitmap_index);
4647 /* Code Hoisting variables and subroutines. */
4649 /* Very busy expressions. */
4650 static sbitmap *hoist_vbein;
4651 static sbitmap *hoist_vbeout;
4653 /* Hoistable expressions. */
4654 static sbitmap *hoist_exprs;
4656 /* ??? We could compute post dominators and run this algorithm in
4657 reverse to perform tail merging, doing so would probably be
4658 more effective than the tail merging code in jump.c.
4660 It's unclear if tail merging could be run in parallel with
4661 code hoisting. It would be nice. */
4663 /* Allocate vars used for code hoisting analysis. */
4666 alloc_code_hoist_mem (int n_blocks, int n_exprs)
4668 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4669 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4670 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4672 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
4673 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
4674 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
4675 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
4678 /* Free vars used for code hoisting analysis. */
4681 free_code_hoist_mem (void)
4683 sbitmap_vector_free (antloc);
4684 sbitmap_vector_free (transp);
4685 sbitmap_vector_free (comp);
4687 sbitmap_vector_free (hoist_vbein);
4688 sbitmap_vector_free (hoist_vbeout);
4689 sbitmap_vector_free (hoist_exprs);
4690 sbitmap_vector_free (transpout);
4692 free_dominance_info (CDI_DOMINATORS);
4695 /* Compute the very busy expressions at entry/exit from each block.
4697 An expression is very busy if all paths from a given point
4698 compute the expression. */
4701 compute_code_hoist_vbeinout (void)
4703 int changed, passes;
4706 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
4707 sbitmap_vector_zero (hoist_vbein, last_basic_block);
4716 /* We scan the blocks in the reverse order to speed up
4718 FOR_EACH_BB_REVERSE (bb)
4720 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
4721 hoist_vbeout[bb->index], transp[bb->index]);
4722 if (bb->next_bb != EXIT_BLOCK_PTR)
4723 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
4730 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
4733 /* Top level routine to do the dataflow analysis needed by code hoisting. */
4736 compute_code_hoist_data (void)
4738 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4739 compute_transpout ();
4740 compute_code_hoist_vbeinout ();
4741 calculate_dominance_info (CDI_DOMINATORS);
4743 fprintf (gcse_file, "\n");
4746 /* Determine if the expression identified by EXPR_INDEX would
4747 reach BB unimpared if it was placed at the end of EXPR_BB.
4749 It's unclear exactly what Muchnick meant by "unimpared". It seems
4750 to me that the expression must either be computed or transparent in
4751 *every* block in the path(s) from EXPR_BB to BB. Any other definition
4752 would allow the expression to be hoisted out of loops, even if
4753 the expression wasn't a loop invariant.
4755 Contrast this to reachability for PRE where an expression is
4756 considered reachable if *any* path reaches instead of *all*
4760 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
4764 int visited_allocated_locally = 0;
4767 if (visited == NULL)
4769 visited_allocated_locally = 1;
4770 visited = xcalloc (last_basic_block, 1);
4773 FOR_EACH_EDGE (pred, ei, bb->preds)
4775 basic_block pred_bb = pred->src;
4777 if (pred->src == ENTRY_BLOCK_PTR)
4779 else if (pred_bb == expr_bb)
4781 else if (visited[pred_bb->index])
4784 /* Does this predecessor generate this expression? */
4785 else if (TEST_BIT (comp[pred_bb->index], expr_index))
4787 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
4793 visited[pred_bb->index] = 1;
4794 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
4799 if (visited_allocated_locally)
4802 return (pred == NULL);
4805 /* Actually perform code hoisting. */
4810 basic_block bb, dominated;
4812 unsigned int domby_len;
4814 struct expr **index_map;
4817 sbitmap_vector_zero (hoist_exprs, last_basic_block);
4819 /* Compute a mapping from expression number (`bitmap_index') to
4820 hash table entry. */
4822 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
4823 for (i = 0; i < expr_hash_table.size; i++)
4824 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4825 index_map[expr->bitmap_index] = expr;
4827 /* Walk over each basic block looking for potentially hoistable
4828 expressions, nothing gets hoisted from the entry block. */
4832 int insn_inserted_p;
4834 domby_len = get_dominated_by (CDI_DOMINATORS, bb, &domby);
4835 /* Examine each expression that is very busy at the exit of this
4836 block. These are the potentially hoistable expressions. */
4837 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
4841 if (TEST_BIT (hoist_vbeout[bb->index], i)
4842 && TEST_BIT (transpout[bb->index], i))
4844 /* We've found a potentially hoistable expression, now
4845 we look at every block BB dominates to see if it
4846 computes the expression. */
4847 for (j = 0; j < domby_len; j++)
4849 dominated = domby[j];
4850 /* Ignore self dominance. */
4851 if (bb == dominated)
4853 /* We've found a dominated block, now see if it computes
4854 the busy expression and whether or not moving that
4855 expression to the "beginning" of that block is safe. */
4856 if (!TEST_BIT (antloc[dominated->index], i))
4859 /* Note if the expression would reach the dominated block
4860 unimpared if it was placed at the end of BB.
4862 Keep track of how many times this expression is hoistable
4863 from a dominated block into BB. */
4864 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4868 /* If we found more than one hoistable occurrence of this
4869 expression, then note it in the bitmap of expressions to
4870 hoist. It makes no sense to hoist things which are computed
4871 in only one BB, and doing so tends to pessimize register
4872 allocation. One could increase this value to try harder
4873 to avoid any possible code expansion due to register
4874 allocation issues; however experiments have shown that
4875 the vast majority of hoistable expressions are only movable
4876 from two successors, so raising this threshold is likely
4877 to nullify any benefit we get from code hoisting. */
4880 SET_BIT (hoist_exprs[bb->index], i);
4885 /* If we found nothing to hoist, then quit now. */
4892 /* Loop over all the hoistable expressions. */
4893 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
4895 /* We want to insert the expression into BB only once, so
4896 note when we've inserted it. */
4897 insn_inserted_p = 0;
4899 /* These tests should be the same as the tests above. */
4900 if (TEST_BIT (hoist_vbeout[bb->index], i))
4902 /* We've found a potentially hoistable expression, now
4903 we look at every block BB dominates to see if it
4904 computes the expression. */
4905 for (j = 0; j < domby_len; j++)
4907 dominated = domby[j];
4908 /* Ignore self dominance. */
4909 if (bb == dominated)
4912 /* We've found a dominated block, now see if it computes
4913 the busy expression and whether or not moving that
4914 expression to the "beginning" of that block is safe. */
4915 if (!TEST_BIT (antloc[dominated->index], i))
4918 /* The expression is computed in the dominated block and
4919 it would be safe to compute it at the start of the
4920 dominated block. Now we have to determine if the
4921 expression would reach the dominated block if it was
4922 placed at the end of BB. */
4923 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4925 struct expr *expr = index_map[i];
4926 struct occr *occr = expr->antic_occr;
4930 /* Find the right occurrence of this expression. */
4931 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
4936 set = single_set (insn);
4939 /* Create a pseudo-reg to store the result of reaching
4940 expressions into. Get the mode for the new pseudo
4941 from the mode of the original destination pseudo. */
4942 if (expr->reaching_reg == NULL)
4944 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4946 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4948 occr->deleted_p = 1;
4949 if (!insn_inserted_p)
4951 insert_insn_end_bb (index_map[i], bb, 0);
4952 insn_inserted_p = 1;
4964 /* Top level routine to perform one code hoisting (aka unification) pass
4966 Return nonzero if a change was made. */
4969 one_code_hoisting_pass (void)
4973 alloc_hash_table (max_cuid, &expr_hash_table, 0);
4974 compute_hash_table (&expr_hash_table);
4976 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
4978 if (expr_hash_table.n_elems > 0)
4980 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
4981 compute_code_hoist_data ();
4983 free_code_hoist_mem ();
4986 free_hash_table (&expr_hash_table);
4991 /* Here we provide the things required to do store motion towards
4992 the exit. In order for this to be effective, gcse also needed to
4993 be taught how to move a load when it is kill only by a store to itself.
4998 void foo(float scale)
5000 for (i=0; i<10; i++)
5004 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
5005 the load out since its live around the loop, and stored at the bottom
5008 The 'Load Motion' referred to and implemented in this file is
5009 an enhancement to gcse which when using edge based lcm, recognizes
5010 this situation and allows gcse to move the load out of the loop.
5012 Once gcse has hoisted the load, store motion can then push this
5013 load towards the exit, and we end up with no loads or stores of 'i'
5016 /* This will search the ldst list for a matching expression. If it
5017 doesn't find one, we create one and initialize it. */
5019 static struct ls_expr *
5022 int do_not_record_p = 0;
5023 struct ls_expr * ptr;
5026 hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
5027 NULL, /*have_reg_qty=*/false);
5029 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5030 if (ptr->hash_index == hash && expr_equiv_p (ptr->pattern, x))
5033 ptr = xmalloc (sizeof (struct ls_expr));
5035 ptr->next = pre_ldst_mems;
5038 ptr->pattern_regs = NULL_RTX;
5039 ptr->loads = NULL_RTX;
5040 ptr->stores = NULL_RTX;
5041 ptr->reaching_reg = NULL_RTX;
5044 ptr->hash_index = hash;
5045 pre_ldst_mems = ptr;
5050 /* Free up an individual ldst entry. */
5053 free_ldst_entry (struct ls_expr * ptr)
5055 free_INSN_LIST_list (& ptr->loads);
5056 free_INSN_LIST_list (& ptr->stores);
5061 /* Free up all memory associated with the ldst list. */
5064 free_ldst_mems (void)
5066 while (pre_ldst_mems)
5068 struct ls_expr * tmp = pre_ldst_mems;
5070 pre_ldst_mems = pre_ldst_mems->next;
5072 free_ldst_entry (tmp);
5075 pre_ldst_mems = NULL;
5078 /* Dump debugging info about the ldst list. */
5081 print_ldst_list (FILE * file)
5083 struct ls_expr * ptr;
5085 fprintf (file, "LDST list: \n");
5087 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
5089 fprintf (file, " Pattern (%3d): ", ptr->index);
5091 print_rtl (file, ptr->pattern);
5093 fprintf (file, "\n Loads : ");
5096 print_rtl (file, ptr->loads);
5098 fprintf (file, "(nil)");
5100 fprintf (file, "\n Stores : ");
5103 print_rtl (file, ptr->stores);
5105 fprintf (file, "(nil)");
5107 fprintf (file, "\n\n");
5110 fprintf (file, "\n");
5113 /* Returns 1 if X is in the list of ldst only expressions. */
5115 static struct ls_expr *
5116 find_rtx_in_ldst (rtx x)
5118 struct ls_expr * ptr;
5120 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5121 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
5127 /* Assign each element of the list of mems a monotonically increasing value. */
5130 enumerate_ldsts (void)
5132 struct ls_expr * ptr;
5135 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5141 /* Return first item in the list. */
5143 static inline struct ls_expr *
5144 first_ls_expr (void)
5146 return pre_ldst_mems;
5149 /* Return the next item in the list after the specified one. */
5151 static inline struct ls_expr *
5152 next_ls_expr (struct ls_expr * ptr)
5157 /* Load Motion for loads which only kill themselves. */
5159 /* Return true if x is a simple MEM operation, with no registers or
5160 side effects. These are the types of loads we consider for the
5161 ld_motion list, otherwise we let the usual aliasing take care of it. */
5169 if (MEM_VOLATILE_P (x))
5172 if (GET_MODE (x) == BLKmode)
5175 /* If we are handling exceptions, we must be careful with memory references
5176 that may trap. If we are not, the behavior is undefined, so we may just
5178 if (flag_non_call_exceptions && may_trap_p (x))
5181 if (side_effects_p (x))
5184 /* Do not consider function arguments passed on stack. */
5185 if (reg_mentioned_p (stack_pointer_rtx, x))
5188 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
5194 /* Make sure there isn't a buried reference in this pattern anywhere.
5195 If there is, invalidate the entry for it since we're not capable
5196 of fixing it up just yet.. We have to be sure we know about ALL
5197 loads since the aliasing code will allow all entries in the
5198 ld_motion list to not-alias itself. If we miss a load, we will get
5199 the wrong value since gcse might common it and we won't know to
5203 invalidate_any_buried_refs (rtx x)
5207 struct ls_expr * ptr;
5209 /* Invalidate it in the list. */
5210 if (MEM_P (x) && simple_mem (x))
5212 ptr = ldst_entry (x);
5216 /* Recursively process the insn. */
5217 fmt = GET_RTX_FORMAT (GET_CODE (x));
5219 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5222 invalidate_any_buried_refs (XEXP (x, i));
5223 else if (fmt[i] == 'E')
5224 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5225 invalidate_any_buried_refs (XVECEXP (x, i, j));
5229 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
5230 being defined as MEM loads and stores to symbols, with no side effects
5231 and no registers in the expression. For a MEM destination, we also
5232 check that the insn is still valid if we replace the destination with a
5233 REG, as is done in update_ld_motion_stores. If there are any uses/defs
5234 which don't match this criteria, they are invalidated and trimmed out
5238 compute_ld_motion_mems (void)
5240 struct ls_expr * ptr;
5244 pre_ldst_mems = NULL;
5248 for (insn = BB_HEAD (bb);
5249 insn && insn != NEXT_INSN (BB_END (bb));
5250 insn = NEXT_INSN (insn))
5254 if (GET_CODE (PATTERN (insn)) == SET)
5256 rtx src = SET_SRC (PATTERN (insn));
5257 rtx dest = SET_DEST (PATTERN (insn));
5259 /* Check for a simple LOAD... */
5260 if (MEM_P (src) && simple_mem (src))
5262 ptr = ldst_entry (src);
5264 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
5270 /* Make sure there isn't a buried load somewhere. */
5271 invalidate_any_buried_refs (src);
5274 /* Check for stores. Don't worry about aliased ones, they
5275 will block any movement we might do later. We only care
5276 about this exact pattern since those are the only
5277 circumstance that we will ignore the aliasing info. */
5278 if (MEM_P (dest) && simple_mem (dest))
5280 ptr = ldst_entry (dest);
5283 && GET_CODE (src) != ASM_OPERANDS
5284 /* Check for REG manually since want_to_gcse_p
5285 returns 0 for all REGs. */
5286 && can_assign_to_reg_p (src))
5287 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
5293 invalidate_any_buried_refs (PATTERN (insn));
5299 /* Remove any references that have been either invalidated or are not in the
5300 expression list for pre gcse. */
5303 trim_ld_motion_mems (void)
5305 struct ls_expr * * last = & pre_ldst_mems;
5306 struct ls_expr * ptr = pre_ldst_mems;
5312 /* Delete if entry has been made invalid. */
5315 /* Delete if we cannot find this mem in the expression list. */
5316 unsigned int hash = ptr->hash_index % expr_hash_table.size;
5318 for (expr = expr_hash_table.table[hash];
5320 expr = expr->next_same_hash)
5321 if (expr_equiv_p (expr->expr, ptr->pattern))
5325 expr = (struct expr *) 0;
5329 /* Set the expression field if we are keeping it. */
5337 free_ldst_entry (ptr);
5342 /* Show the world what we've found. */
5343 if (gcse_file && pre_ldst_mems != NULL)
5344 print_ldst_list (gcse_file);
5347 /* This routine will take an expression which we are replacing with
5348 a reaching register, and update any stores that are needed if
5349 that expression is in the ld_motion list. Stores are updated by
5350 copying their SRC to the reaching register, and then storing
5351 the reaching register into the store location. These keeps the
5352 correct value in the reaching register for the loads. */
5355 update_ld_motion_stores (struct expr * expr)
5357 struct ls_expr * mem_ptr;
5359 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
5361 /* We can try to find just the REACHED stores, but is shouldn't
5362 matter to set the reaching reg everywhere... some might be
5363 dead and should be eliminated later. */
5365 /* We replace (set mem expr) with (set reg expr) (set mem reg)
5366 where reg is the reaching reg used in the load. We checked in
5367 compute_ld_motion_mems that we can replace (set mem expr) with
5368 (set reg expr) in that insn. */
5369 rtx list = mem_ptr->stores;
5371 for ( ; list != NULL_RTX; list = XEXP (list, 1))
5373 rtx insn = XEXP (list, 0);
5374 rtx pat = PATTERN (insn);
5375 rtx src = SET_SRC (pat);
5376 rtx reg = expr->reaching_reg;
5379 /* If we've already copied it, continue. */
5380 if (expr->reaching_reg == src)
5385 fprintf (gcse_file, "PRE: store updated with reaching reg ");
5386 print_rtl (gcse_file, expr->reaching_reg);
5387 fprintf (gcse_file, ":\n ");
5388 print_inline_rtx (gcse_file, insn, 8);
5389 fprintf (gcse_file, "\n");
5392 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
5393 new = emit_insn_before (copy, insn);
5394 record_one_set (REGNO (reg), new);
5395 SET_SRC (pat) = reg;
5397 /* un-recognize this pattern since it's probably different now. */
5398 INSN_CODE (insn) = -1;
5399 gcse_create_count++;
5404 /* Store motion code. */
5406 #define ANTIC_STORE_LIST(x) ((x)->loads)
5407 #define AVAIL_STORE_LIST(x) ((x)->stores)
5408 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
5410 /* This is used to communicate the target bitvector we want to use in the
5411 reg_set_info routine when called via the note_stores mechanism. */
5412 static int * regvec;
5414 /* And current insn, for the same routine. */
5415 static rtx compute_store_table_current_insn;
5417 /* Used in computing the reverse edge graph bit vectors. */
5418 static sbitmap * st_antloc;
5420 /* Global holding the number of store expressions we are dealing with. */
5421 static int num_stores;
5423 /* Checks to set if we need to mark a register set. Called from
5427 reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
5430 sbitmap bb_reg = data;
5432 if (GET_CODE (dest) == SUBREG)
5433 dest = SUBREG_REG (dest);
5437 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
5439 SET_BIT (bb_reg, REGNO (dest));
5443 /* Clear any mark that says that this insn sets dest. Called from
5447 reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED,
5450 int *dead_vec = data;
5452 if (GET_CODE (dest) == SUBREG)
5453 dest = SUBREG_REG (dest);
5456 dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
5457 dead_vec[REGNO (dest)] = 0;
5460 /* Return zero if some of the registers in list X are killed
5461 due to set of registers in bitmap REGS_SET. */
5464 store_ops_ok (rtx x, int *regs_set)
5468 for (; x; x = XEXP (x, 1))
5471 if (regs_set[REGNO(reg)])
5478 /* Returns a list of registers mentioned in X. */
5480 extract_mentioned_regs (rtx x)
5482 return extract_mentioned_regs_helper (x, NULL_RTX);
5485 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
5488 extract_mentioned_regs_helper (rtx x, rtx accum)
5494 /* Repeat is used to turn tail-recursion into iteration. */
5500 code = GET_CODE (x);
5504 return alloc_EXPR_LIST (0, x, accum);
5514 /* We do not run this function with arguments having side effects. */
5533 i = GET_RTX_LENGTH (code) - 1;
5534 fmt = GET_RTX_FORMAT (code);
5540 rtx tem = XEXP (x, i);
5542 /* If we are about to do the last recursive call
5543 needed at this level, change it into iteration. */
5550 accum = extract_mentioned_regs_helper (tem, accum);
5552 else if (fmt[i] == 'E')
5556 for (j = 0; j < XVECLEN (x, i); j++)
5557 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
5564 /* Determine whether INSN is MEM store pattern that we will consider moving.
5565 REGS_SET_BEFORE is bitmap of registers set before (and including) the
5566 current insn, REGS_SET_AFTER is bitmap of registers set after (and
5567 including) the insn in this basic block. We must be passing through BB from
5568 head to end, as we are using this fact to speed things up.
5570 The results are stored this way:
5572 -- the first anticipatable expression is added into ANTIC_STORE_LIST
5573 -- if the processed expression is not anticipatable, NULL_RTX is added
5574 there instead, so that we can use it as indicator that no further
5575 expression of this type may be anticipatable
5576 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
5577 consequently, all of them but this head are dead and may be deleted.
5578 -- if the expression is not available, the insn due to that it fails to be
5579 available is stored in reaching_reg.
5581 The things are complicated a bit by fact that there already may be stores
5582 to the same MEM from other blocks; also caller must take care of the
5583 necessary cleanup of the temporary markers after end of the basic block.
5587 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
5589 struct ls_expr * ptr;
5591 int check_anticipatable, check_available;
5592 basic_block bb = BLOCK_FOR_INSN (insn);
5594 set = single_set (insn);
5598 dest = SET_DEST (set);
5600 if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
5601 || GET_MODE (dest) == BLKmode)
5604 if (side_effects_p (dest))
5607 /* If we are handling exceptions, we must be careful with memory references
5608 that may trap. If we are not, the behavior is undefined, so we may just
5610 if (flag_non_call_exceptions && may_trap_p (dest))
5613 /* Even if the destination cannot trap, the source may. In this case we'd
5614 need to handle updating the REG_EH_REGION note. */
5615 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
5618 ptr = ldst_entry (dest);
5619 if (!ptr->pattern_regs)
5620 ptr->pattern_regs = extract_mentioned_regs (dest);
5622 /* Do not check for anticipatability if we either found one anticipatable
5623 store already, or tested for one and found out that it was killed. */
5624 check_anticipatable = 0;
5625 if (!ANTIC_STORE_LIST (ptr))
5626 check_anticipatable = 1;
5629 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
5631 && BLOCK_FOR_INSN (tmp) != bb)
5632 check_anticipatable = 1;
5634 if (check_anticipatable)
5636 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
5640 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
5641 ANTIC_STORE_LIST (ptr));
5644 /* It is not necessary to check whether store is available if we did
5645 it successfully before; if we failed before, do not bother to check
5646 until we reach the insn that caused us to fail. */
5647 check_available = 0;
5648 if (!AVAIL_STORE_LIST (ptr))
5649 check_available = 1;
5652 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
5653 if (BLOCK_FOR_INSN (tmp) != bb)
5654 check_available = 1;
5656 if (check_available)
5658 /* Check that we have already reached the insn at that the check
5659 failed last time. */
5660 if (LAST_AVAIL_CHECK_FAILURE (ptr))
5662 for (tmp = BB_END (bb);
5663 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
5664 tmp = PREV_INSN (tmp))
5667 check_available = 0;
5670 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
5672 &LAST_AVAIL_CHECK_FAILURE (ptr));
5674 if (!check_available)
5675 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
5678 /* Find available and anticipatable stores. */
5681 compute_store_table (void)
5687 int *last_set_in, *already_set;
5688 struct ls_expr * ptr, **prev_next_ptr_ptr;
5690 max_gcse_regno = max_reg_num ();
5692 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
5694 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
5696 last_set_in = xcalloc (max_gcse_regno, sizeof (int));
5697 already_set = xmalloc (sizeof (int) * max_gcse_regno);
5699 /* Find all the stores we care about. */
5702 /* First compute the registers set in this block. */
5703 regvec = last_set_in;
5705 for (insn = BB_HEAD (bb);
5706 insn != NEXT_INSN (BB_END (bb));
5707 insn = NEXT_INSN (insn))
5709 if (! INSN_P (insn))
5714 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5715 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5717 last_set_in[regno] = INSN_UID (insn);
5718 SET_BIT (reg_set_in_block[bb->index], regno);
5722 pat = PATTERN (insn);
5723 compute_store_table_current_insn = insn;
5724 note_stores (pat, reg_set_info, reg_set_in_block[bb->index]);
5727 /* Now find the stores. */
5728 memset (already_set, 0, sizeof (int) * max_gcse_regno);
5729 regvec = already_set;
5730 for (insn = BB_HEAD (bb);
5731 insn != NEXT_INSN (BB_END (bb));
5732 insn = NEXT_INSN (insn))
5734 if (! INSN_P (insn))
5739 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5740 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5741 already_set[regno] = 1;
5744 pat = PATTERN (insn);
5745 note_stores (pat, reg_set_info, NULL);
5747 /* Now that we've marked regs, look for stores. */
5748 find_moveable_store (insn, already_set, last_set_in);
5750 /* Unmark regs that are no longer set. */
5751 compute_store_table_current_insn = insn;
5752 note_stores (pat, reg_clear_last_set, last_set_in);
5755 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5756 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
5757 && last_set_in[regno] == INSN_UID (insn))
5758 last_set_in[regno] = 0;
5762 #ifdef ENABLE_CHECKING
5763 /* last_set_in should now be all-zero. */
5764 for (regno = 0; regno < max_gcse_regno; regno++)
5765 gcc_assert (!last_set_in[regno]);
5768 /* Clear temporary marks. */
5769 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5771 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
5772 if (ANTIC_STORE_LIST (ptr)
5773 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
5774 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
5778 /* Remove the stores that are not available anywhere, as there will
5779 be no opportunity to optimize them. */
5780 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
5782 ptr = *prev_next_ptr_ptr)
5784 if (!AVAIL_STORE_LIST (ptr))
5786 *prev_next_ptr_ptr = ptr->next;
5787 free_ldst_entry (ptr);
5790 prev_next_ptr_ptr = &ptr->next;
5793 ret = enumerate_ldsts ();
5797 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
5798 print_ldst_list (gcse_file);
5806 /* Check to see if the load X is aliased with STORE_PATTERN.
5807 AFTER is true if we are checking the case when STORE_PATTERN occurs
5811 load_kills_store (rtx x, rtx store_pattern, int after)
5814 return anti_dependence (x, store_pattern);
5816 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
5820 /* Go through the entire insn X, looking for any loads which might alias
5821 STORE_PATTERN. Return true if found.
5822 AFTER is true if we are checking the case when STORE_PATTERN occurs
5823 after the insn X. */
5826 find_loads (rtx x, rtx store_pattern, int after)
5835 if (GET_CODE (x) == SET)
5840 if (load_kills_store (x, store_pattern, after))
5844 /* Recursively process the insn. */
5845 fmt = GET_RTX_FORMAT (GET_CODE (x));
5847 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
5850 ret |= find_loads (XEXP (x, i), store_pattern, after);
5851 else if (fmt[i] == 'E')
5852 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5853 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
5858 /* Check if INSN kills the store pattern X (is aliased with it).
5859 AFTER is true if we are checking the case when store X occurs
5860 after the insn. Return true if it it does. */
5863 store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
5865 rtx reg, base, note;
5872 /* A normal or pure call might read from pattern,
5873 but a const call will not. */
5874 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
5877 /* But even a const call reads its parameters. Check whether the
5878 base of some of registers used in mem is stack pointer. */
5879 for (reg = x_regs; reg; reg = XEXP (reg, 1))
5881 base = find_base_term (XEXP (reg, 0));
5883 || (GET_CODE (base) == ADDRESS
5884 && GET_MODE (base) == Pmode
5885 && XEXP (base, 0) == stack_pointer_rtx))
5892 if (GET_CODE (PATTERN (insn)) == SET)
5894 rtx pat = PATTERN (insn);
5895 rtx dest = SET_DEST (pat);
5897 if (GET_CODE (dest) == ZERO_EXTRACT)
5898 dest = XEXP (dest, 0);
5900 /* Check for memory stores to aliased objects. */
5902 && !expr_equiv_p (dest, x))
5906 if (output_dependence (dest, x))
5911 if (output_dependence (x, dest))
5915 if (find_loads (SET_SRC (pat), x, after))
5918 else if (find_loads (PATTERN (insn), x, after))
5921 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
5922 location aliased with X, then this insn kills X. */
5923 note = find_reg_equal_equiv_note (insn);
5926 note = XEXP (note, 0);
5928 /* However, if the note represents a must alias rather than a may
5929 alias relationship, then it does not kill X. */
5930 if (expr_equiv_p (note, x))
5933 /* See if there are any aliased loads in the note. */
5934 return find_loads (note, x, after);
5937 /* Returns true if the expression X is loaded or clobbered on or after INSN
5938 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
5939 or after the insn. X_REGS is list of registers mentioned in X. If the store
5940 is killed, return the last insn in that it occurs in FAIL_INSN. */
5943 store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
5944 int *regs_set_after, rtx *fail_insn)
5946 rtx last = BB_END (bb), act;
5948 if (!store_ops_ok (x_regs, regs_set_after))
5950 /* We do not know where it will happen. */
5952 *fail_insn = NULL_RTX;
5956 /* Scan from the end, so that fail_insn is determined correctly. */
5957 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
5958 if (store_killed_in_insn (x, x_regs, act, false))
5968 /* Returns true if the expression X is loaded or clobbered on or before INSN
5969 within basic block BB. X_REGS is list of registers mentioned in X.
5970 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
5972 store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
5973 int *regs_set_before)
5975 rtx first = BB_HEAD (bb);
5977 if (!store_ops_ok (x_regs, regs_set_before))
5980 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
5981 if (store_killed_in_insn (x, x_regs, insn, true))
5987 /* Fill in available, anticipatable, transparent and kill vectors in
5988 STORE_DATA, based on lists of available and anticipatable stores. */
5990 build_store_vectors (void)
5993 int *regs_set_in_block;
5995 struct ls_expr * ptr;
5998 /* Build the gen_vector. This is any store in the table which is not killed
5999 by aliasing later in its block. */
6000 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
6001 sbitmap_vector_zero (ae_gen, last_basic_block);
6003 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
6004 sbitmap_vector_zero (st_antloc, last_basic_block);
6006 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6008 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6010 insn = XEXP (st, 0);
6011 bb = BLOCK_FOR_INSN (insn);
6013 /* If we've already seen an available expression in this block,
6014 we can delete this one (It occurs earlier in the block). We'll
6015 copy the SRC expression to an unused register in case there
6016 are any side effects. */
6017 if (TEST_BIT (ae_gen[bb->index], ptr->index))
6019 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
6021 fprintf (gcse_file, "Removing redundant store:\n");
6022 replace_store_insn (r, XEXP (st, 0), bb, ptr);
6025 SET_BIT (ae_gen[bb->index], ptr->index);
6028 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6030 insn = XEXP (st, 0);
6031 bb = BLOCK_FOR_INSN (insn);
6032 SET_BIT (st_antloc[bb->index], ptr->index);
6036 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
6037 sbitmap_vector_zero (ae_kill, last_basic_block);
6039 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
6040 sbitmap_vector_zero (transp, last_basic_block);
6041 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
6045 for (regno = 0; regno < max_gcse_regno; regno++)
6046 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
6048 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6050 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
6051 bb, regs_set_in_block, NULL))
6053 /* It should not be necessary to consider the expression
6054 killed if it is both anticipatable and available. */
6055 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
6056 || !TEST_BIT (ae_gen[bb->index], ptr->index))
6057 SET_BIT (ae_kill[bb->index], ptr->index);
6060 SET_BIT (transp[bb->index], ptr->index);
6064 free (regs_set_in_block);
6068 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
6069 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
6070 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
6071 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
6075 /* Insert an instruction at the beginning of a basic block, and update
6076 the BB_HEAD if needed. */
6079 insert_insn_start_bb (rtx insn, basic_block bb)
6081 /* Insert at start of successor block. */
6082 rtx prev = PREV_INSN (BB_HEAD (bb));
6083 rtx before = BB_HEAD (bb);
6086 if (! LABEL_P (before)
6087 && (! NOTE_P (before)
6088 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
6091 if (prev == BB_END (bb))
6093 before = NEXT_INSN (before);
6096 insn = emit_insn_after_noloc (insn, prev);
6100 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
6102 print_inline_rtx (gcse_file, insn, 6);
6103 fprintf (gcse_file, "\n");
6107 /* This routine will insert a store on an edge. EXPR is the ldst entry for
6108 the memory reference, and E is the edge to insert it on. Returns nonzero
6109 if an edge insertion was performed. */
6112 insert_store (struct ls_expr * expr, edge e)
6119 /* We did all the deleted before this insert, so if we didn't delete a
6120 store, then we haven't set the reaching reg yet either. */
6121 if (expr->reaching_reg == NULL_RTX)
6124 if (e->flags & EDGE_FAKE)
6127 reg = expr->reaching_reg;
6128 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
6130 /* If we are inserting this expression on ALL predecessor edges of a BB,
6131 insert it at the start of the BB, and reset the insert bits on the other
6132 edges so we don't try to insert it on the other edges. */
6134 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
6135 if (!(tmp->flags & EDGE_FAKE))
6137 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6139 gcc_assert (index != EDGE_INDEX_NO_EDGE);
6140 if (! TEST_BIT (pre_insert_map[index], expr->index))
6144 /* If tmp is NULL, we found an insertion on every edge, blank the
6145 insertion vector for these edges, and insert at the start of the BB. */
6146 if (!tmp && bb != EXIT_BLOCK_PTR)
6148 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
6150 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6151 RESET_BIT (pre_insert_map[index], expr->index);
6153 insert_insn_start_bb (insn, bb);
6157 /* We can't put stores in the front of blocks pointed to by abnormal
6158 edges since that may put a store where one didn't used to be. */
6159 gcc_assert (!(e->flags & EDGE_ABNORMAL));
6161 insert_insn_on_edge (insn, e);
6165 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
6166 e->src->index, e->dest->index);
6167 print_inline_rtx (gcse_file, insn, 6);
6168 fprintf (gcse_file, "\n");
6174 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
6175 memory location in SMEXPR set in basic block BB.
6177 This could be rather expensive. */
6180 remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
6182 edge_iterator *stack, ei;
6185 sbitmap visited = sbitmap_alloc (last_basic_block);
6186 rtx last, insn, note;
6187 rtx mem = smexpr->pattern;
6189 stack = xmalloc (sizeof (edge_iterator) * n_basic_blocks);
6191 ei = ei_start (bb->succs);
6193 sbitmap_zero (visited);
6195 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6203 sbitmap_free (visited);
6206 act = ei_edge (stack[--sp]);
6210 if (bb == EXIT_BLOCK_PTR
6211 || TEST_BIT (visited, bb->index))
6215 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6218 SET_BIT (visited, bb->index);
6220 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
6222 for (last = ANTIC_STORE_LIST (smexpr);
6223 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
6224 last = XEXP (last, 1))
6226 last = XEXP (last, 0);
6229 last = NEXT_INSN (BB_END (bb));
6231 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
6234 note = find_reg_equal_equiv_note (insn);
6235 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6239 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6241 remove_note (insn, note);
6246 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6248 if (EDGE_COUNT (bb->succs) > 0)
6252 ei = ei_start (bb->succs);
6253 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6258 /* This routine will replace a store with a SET to a specified register. */
6261 replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
6263 rtx insn, mem, note, set, ptr, pair;
6265 mem = smexpr->pattern;
6266 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
6267 insn = emit_insn_after (insn, del);
6272 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
6273 print_inline_rtx (gcse_file, del, 6);
6274 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
6275 print_inline_rtx (gcse_file, insn, 6);
6276 fprintf (gcse_file, "\n");
6279 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
6280 if (XEXP (ptr, 0) == del)
6282 XEXP (ptr, 0) = insn;
6286 /* Move the notes from the deleted insn to its replacement, and patch
6287 up the LIBCALL notes. */
6288 REG_NOTES (insn) = REG_NOTES (del);
6290 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
6293 pair = XEXP (note, 0);
6294 note = find_reg_note (pair, REG_LIBCALL, NULL_RTX);
6295 XEXP (note, 0) = insn;
6297 note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
6300 pair = XEXP (note, 0);
6301 note = find_reg_note (pair, REG_RETVAL, NULL_RTX);
6302 XEXP (note, 0) = insn;
6307 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
6308 they are no longer accurate provided that they are reached by this
6309 definition, so drop them. */
6310 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
6313 set = single_set (insn);
6316 if (expr_equiv_p (SET_DEST (set), mem))
6318 note = find_reg_equal_equiv_note (insn);
6319 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6323 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6325 remove_note (insn, note);
6327 remove_reachable_equiv_notes (bb, smexpr);
6331 /* Delete a store, but copy the value that would have been stored into
6332 the reaching_reg for later storing. */
6335 delete_store (struct ls_expr * expr, basic_block bb)
6339 if (expr->reaching_reg == NULL_RTX)
6340 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
6342 reg = expr->reaching_reg;
6344 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
6347 if (BLOCK_FOR_INSN (del) == bb)
6349 /* We know there is only one since we deleted redundant
6350 ones during the available computation. */
6351 replace_store_insn (reg, del, bb, expr);
6357 /* Free memory used by store motion. */
6360 free_store_memory (void)
6365 sbitmap_vector_free (ae_gen);
6367 sbitmap_vector_free (ae_kill);
6369 sbitmap_vector_free (transp);
6371 sbitmap_vector_free (st_antloc);
6373 sbitmap_vector_free (pre_insert_map);
6375 sbitmap_vector_free (pre_delete_map);
6376 if (reg_set_in_block)
6377 sbitmap_vector_free (reg_set_in_block);
6379 ae_gen = ae_kill = transp = st_antloc = NULL;
6380 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
6383 /* Perform store motion. Much like gcse, except we move expressions the
6384 other way by looking at the flowgraph in reverse. */
6391 struct ls_expr * ptr;
6392 int update_flow = 0;
6396 fprintf (gcse_file, "before store motion\n");
6397 print_rtl (gcse_file, get_insns ());
6400 init_alias_analysis ();
6402 /* Find all the available and anticipatable stores. */
6403 num_stores = compute_store_table ();
6404 if (num_stores == 0)
6406 sbitmap_vector_free (reg_set_in_block);
6407 end_alias_analysis ();
6411 /* Now compute kill & transp vectors. */
6412 build_store_vectors ();
6413 add_noreturn_fake_exit_edges ();
6414 connect_infinite_loops_to_exit ();
6416 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
6417 st_antloc, ae_kill, &pre_insert_map,
6420 /* Now we want to insert the new stores which are going to be needed. */
6421 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6423 /* If any of the edges we have above are abnormal, we can't move this
6425 for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
6426 if (TEST_BIT (pre_insert_map[x], ptr->index)
6427 && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
6432 if (gcse_file != NULL)
6434 "Can't replace store %d: abnormal edge from %d to %d\n",
6435 ptr->index, INDEX_EDGE (edge_list, x)->src->index,
6436 INDEX_EDGE (edge_list, x)->dest->index);
6440 /* Now we want to insert the new stores which are going to be needed. */
6443 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
6444 delete_store (ptr, bb);
6446 for (x = 0; x < NUM_EDGES (edge_list); x++)
6447 if (TEST_BIT (pre_insert_map[x], ptr->index))
6448 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
6452 commit_edge_insertions ();
6454 free_store_memory ();
6455 free_edge_list (edge_list);
6456 remove_fake_exit_edges ();
6457 end_alias_analysis ();
6461 /* Entry point for jump bypassing optimization pass. */
6464 bypass_jumps (FILE *file)
6468 /* We do not construct an accurate cfg in functions which call
6469 setjmp, so just punt to be safe. */
6470 if (current_function_calls_setjmp)
6473 /* For calling dump_foo fns from gdb. */
6474 debug_stderr = stderr;
6477 /* Identify the basic block information for this function, including
6478 successors and predecessors. */
6479 max_gcse_regno = max_reg_num ();
6482 dump_flow_info (file);
6484 /* Return if there's nothing to do, or it is too expensive. */
6485 if (n_basic_blocks <= 1 || is_too_expensive (_ ("jump bypassing disabled")))
6488 gcc_obstack_init (&gcse_obstack);
6491 /* We need alias. */
6492 init_alias_analysis ();
6494 /* Record where pseudo-registers are set. This data is kept accurate
6495 during each pass. ??? We could also record hard-reg information here
6496 [since it's unchanging], however it is currently done during hash table
6499 It may be tempting to compute MEM set information here too, but MEM sets
6500 will be subject to code motion one day and thus we need to compute
6501 information about memory sets when we build the hash tables. */
6503 alloc_reg_set_mem (max_gcse_regno);
6504 compute_sets (get_insns ());
6506 max_gcse_regno = max_reg_num ();
6507 alloc_gcse_mem (get_insns ());
6508 changed = one_cprop_pass (MAX_GCSE_PASSES + 2, 1, 1);
6513 fprintf (file, "BYPASS of %s: %d basic blocks, ",
6514 current_function_name (), n_basic_blocks);
6515 fprintf (file, "%d bytes\n\n", bytes_used);
6518 obstack_free (&gcse_obstack, NULL);
6519 free_reg_set_mem ();
6521 /* We are finished with alias. */
6522 end_alias_analysis ();
6523 allocate_reg_info (max_reg_num (), FALSE, FALSE);
6528 /* Return true if the graph is too expensive to optimize. PASS is the
6529 optimization about to be performed. */
6532 is_too_expensive (const char *pass)
6534 /* Trying to perform global optimizations on flow graphs which have
6535 a high connectivity will take a long time and is unlikely to be
6536 particularly useful.
6538 In normal circumstances a cfg should have about twice as many
6539 edges as blocks. But we do not want to punish small functions
6540 which have a couple switch statements. Rather than simply
6541 threshold the number of blocks, uses something with a more
6542 graceful degradation. */
6543 if (n_edges > 20000 + n_basic_blocks * 4)
6545 if (warn_disabled_optimization)
6546 warning ("%s: %d basic blocks and %d edges/basic block",
6547 pass, n_basic_blocks, n_edges / n_basic_blocks);
6552 /* If allocating memory for the cprop bitmap would take up too much
6553 storage it's better just to disable the optimization. */
6555 * SBITMAP_SET_SIZE (max_reg_num ())
6556 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
6558 if (warn_disabled_optimization)
6559 warning ("%s: %d basic blocks and %d registers",
6560 pass, n_basic_blocks, max_reg_num ());
6568 #include "gt-gcse.h"