1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
43 Global Optimization by Suppression of Partial Redundancies
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
115 Rice University Ph.D. thesis, Apr. 1996
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
125 Advanced Compiler Design and Implementation
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
148 #include "coretypes.h"
156 #include "hard-reg-set.h"
159 #include "insn-config.h"
161 #include "basic-block.h"
163 #include "function.h"
173 /* Propagate flow information through back edges and thus enable PRE's
174 moving loop invariant calculations out of loops.
176 Originally this tended to create worse overall code, but several
177 improvements during the development of PRE seem to have made following
178 back edges generally a win.
180 Note much of the loop invariant code motion done here would normally
181 be done by loop.c, which has more heuristics for when to move invariants
182 out of loops. At some point we might need to move some of those
183 heuristics into gcse.c. */
185 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
186 are a superset of those done by GCSE.
188 We perform the following steps:
190 1) Compute basic block information.
192 2) Compute table of places where registers are set.
194 3) Perform copy/constant propagation.
196 4) Perform global cse using lazy code motion if not optimizing
197 for size, or code hoisting if we are.
199 5) Perform another pass of copy/constant propagation.
201 Two passes of copy/constant propagation are done because the first one
202 enables more GCSE and the second one helps to clean up the copies that
203 GCSE creates. This is needed more for PRE than for Classic because Classic
204 GCSE will try to use an existing register containing the common
205 subexpression rather than create a new one. This is harder to do for PRE
206 because of the code motion (which Classic GCSE doesn't do).
208 Expressions we are interested in GCSE-ing are of the form
209 (set (pseudo-reg) (expression)).
210 Function want_to_gcse_p says what these are.
212 PRE handles moving invariant expressions out of loops (by treating them as
213 partially redundant).
215 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
216 assignment) based GVN (global value numbering). L. T. Simpson's paper
217 (Rice University) on value numbering is a useful reference for this.
219 **********************
221 We used to support multiple passes but there are diminishing returns in
222 doing so. The first pass usually makes 90% of the changes that are doable.
223 A second pass can make a few more changes made possible by the first pass.
224 Experiments show any further passes don't make enough changes to justify
227 A study of spec92 using an unlimited number of passes:
228 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
229 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
230 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
232 It was found doing copy propagation between each pass enables further
235 PRE is quite expensive in complicated functions because the DFA can take
236 a while to converge. Hence we only perform one pass. The parameter
237 max-gcse-passes can be modified if one wants to experiment.
239 **********************
241 The steps for PRE are:
243 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
245 2) Perform the data flow analysis for PRE.
247 3) Delete the redundant instructions
249 4) Insert the required copies [if any] that make the partially
250 redundant instructions fully redundant.
252 5) For other reaching expressions, insert an instruction to copy the value
253 to a newly created pseudo that will reach the redundant instruction.
255 The deletion is done first so that when we do insertions we
256 know which pseudo reg to use.
258 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
259 argue it is not. The number of iterations for the algorithm to converge
260 is typically 2-4 so I don't view it as that expensive (relatively speaking).
262 PRE GCSE depends heavily on the second CSE pass to clean up the copies
263 we create. To make an expression reach the place where it's redundant,
264 the result of the expression is copied to a new register, and the redundant
265 expression is deleted by replacing it with this new register. Classic GCSE
266 doesn't have this problem as much as it computes the reaching defs of
267 each register in each block and thus can try to use an existing register.
269 **********************
271 A fair bit of simplicity is created by creating small functions for simple
272 tasks, even when the function is only called in one place. This may
273 measurably slow things down [or may not] by creating more function call
274 overhead than is necessary. The source is laid out so that it's trivial
275 to make the affected functions inline so that one can measure what speed
276 up, if any, can be achieved, and maybe later when things settle things can
279 Help stamp out big monolithic functions! */
281 /* GCSE global vars. */
284 static FILE *gcse_file;
286 /* Note whether or not we should run jump optimization after gcse. We
287 want to do this for two cases.
289 * If we changed any jumps via cprop.
291 * If we added any labels via edge splitting. */
292 static int run_jump_opt_after_gcse;
294 /* Bitmaps are normally not included in debugging dumps.
295 However it's useful to be able to print them from GDB.
296 We could create special functions for this, but it's simpler to
297 just allow passing stderr to the dump_foo fns. Since stderr can
298 be a macro, we store a copy here. */
299 static FILE *debug_stderr;
301 /* An obstack for our working variables. */
302 static struct obstack gcse_obstack;
304 struct reg_use {rtx reg_rtx; };
306 /* Hash table of expressions. */
310 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
312 /* Index in the available expression bitmaps. */
314 /* Next entry with the same hash. */
315 struct expr *next_same_hash;
316 /* List of anticipatable occurrences in basic blocks in the function.
317 An "anticipatable occurrence" is one that is the first occurrence in the
318 basic block, the operands are not modified in the basic block prior
319 to the occurrence and the output is not used between the start of
320 the block and the occurrence. */
321 struct occr *antic_occr;
322 /* List of available occurrence in basic blocks in the function.
323 An "available occurrence" is one that is the last occurrence in the
324 basic block and the operands are not modified by following statements in
325 the basic block [including this insn]. */
326 struct occr *avail_occr;
327 /* Non-null if the computation is PRE redundant.
328 The value is the newly created pseudo-reg to record a copy of the
329 expression in all the places that reach the redundant copy. */
333 /* Occurrence of an expression.
334 There is one per basic block. If a pattern appears more than once the
335 last appearance is used [or first for anticipatable expressions]. */
339 /* Next occurrence of this expression. */
341 /* The insn that computes the expression. */
343 /* Nonzero if this [anticipatable] occurrence has been deleted. */
345 /* Nonzero if this [available] occurrence has been copied to
347 /* ??? This is mutually exclusive with deleted_p, so they could share
352 /* Expression and copy propagation hash tables.
353 Each hash table is an array of buckets.
354 ??? It is known that if it were an array of entries, structure elements
355 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
356 not clear whether in the final analysis a sufficient amount of memory would
357 be saved as the size of the available expression bitmaps would be larger
358 [one could build a mapping table without holes afterwards though].
359 Someday I'll perform the computation and figure it out. */
364 This is an array of `expr_hash_table_size' elements. */
367 /* Size of the hash table, in elements. */
370 /* Number of hash table elements. */
371 unsigned int n_elems;
373 /* Whether the table is expression of copy propagation one. */
377 /* Expression hash table. */
378 static struct hash_table expr_hash_table;
380 /* Copy propagation hash table. */
381 static struct hash_table set_hash_table;
383 /* Mapping of uids to cuids.
384 Only real insns get cuids. */
385 static int *uid_cuid;
387 /* Highest UID in UID_CUID. */
390 /* Get the cuid of an insn. */
391 #ifdef ENABLE_CHECKING
392 #define INSN_CUID(INSN) \
393 (gcc_assert (INSN_UID (INSN) <= max_uid), uid_cuid[INSN_UID (INSN)])
395 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
398 /* Number of cuids. */
401 /* Mapping of cuids to insns. */
402 static rtx *cuid_insn;
404 /* Get insn from cuid. */
405 #define CUID_INSN(CUID) (cuid_insn[CUID])
407 /* Maximum register number in function prior to doing gcse + 1.
408 Registers created during this pass have regno >= max_gcse_regno.
409 This is named with "gcse" to not collide with global of same name. */
410 static unsigned int max_gcse_regno;
412 /* Table of registers that are modified.
414 For each register, each element is a list of places where the pseudo-reg
417 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
418 requires knowledge of which blocks kill which regs [and thus could use
419 a bitmap instead of the lists `reg_set_table' uses].
421 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
422 num-regs) [however perhaps it may be useful to keep the data as is]. One
423 advantage of recording things this way is that `reg_set_table' is fairly
424 sparse with respect to pseudo regs but for hard regs could be fairly dense
425 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
426 up functions like compute_transp since in the case of pseudo-regs we only
427 need to iterate over the number of times a pseudo-reg is set, not over the
428 number of basic blocks [clearly there is a bit of a slow down in the cases
429 where a pseudo is set more than once in a block, however it is believed
430 that the net effect is to speed things up]. This isn't done for hard-regs
431 because recording call-clobbered hard-regs in `reg_set_table' at each
432 function call can consume a fair bit of memory, and iterating over
433 hard-regs stored this way in compute_transp will be more expensive. */
435 typedef struct reg_set
437 /* The next setting of this register. */
438 struct reg_set *next;
439 /* The insn where it was set. */
443 static reg_set **reg_set_table;
445 /* Size of `reg_set_table'.
446 The table starts out at max_gcse_regno + slop, and is enlarged as
448 static int reg_set_table_size;
450 /* Amount to grow `reg_set_table' by when it's full. */
451 #define REG_SET_TABLE_SLOP 100
453 /* This is a list of expressions which are MEMs and will be used by load
455 Load motion tracks MEMs which aren't killed by
456 anything except itself. (i.e., loads and stores to a single location).
457 We can then allow movement of these MEM refs with a little special
458 allowance. (all stores copy the same value to the reaching reg used
459 for the loads). This means all values used to store into memory must have
460 no side effects so we can re-issue the setter value.
461 Store Motion uses this structure as an expression table to track stores
462 which look interesting, and might be moveable towards the exit block. */
466 struct expr * expr; /* Gcse expression reference for LM. */
467 rtx pattern; /* Pattern of this mem. */
468 rtx pattern_regs; /* List of registers mentioned by the mem. */
469 rtx loads; /* INSN list of loads seen. */
470 rtx stores; /* INSN list of stores seen. */
471 struct ls_expr * next; /* Next in the list. */
472 int invalid; /* Invalid for some reason. */
473 int index; /* If it maps to a bitmap index. */
474 unsigned int hash_index; /* Index when in a hash table. */
475 rtx reaching_reg; /* Register to use when re-writing. */
478 /* Array of implicit set patterns indexed by basic block index. */
479 static rtx *implicit_sets;
481 /* Head of the list of load/store memory refs. */
482 static struct ls_expr * pre_ldst_mems = NULL;
484 /* Bitmap containing one bit for each register in the program.
485 Used when performing GCSE to track which registers have been set since
486 the start of the basic block. */
487 static regset reg_set_bitmap;
489 /* For each block, a bitmap of registers set in the block.
490 This is used by compute_transp.
491 It is computed during hash table computation and not by compute_sets
492 as it includes registers added since the last pass (or between cprop and
493 gcse) and it's currently not easy to realloc sbitmap vectors. */
494 static sbitmap *reg_set_in_block;
496 /* Array, indexed by basic block number for a list of insns which modify
497 memory within that block. */
498 static rtx * modify_mem_list;
499 static bitmap modify_mem_list_set;
501 /* This array parallels modify_mem_list, but is kept canonicalized. */
502 static rtx * canon_modify_mem_list;
503 static bitmap canon_modify_mem_list_set;
505 /* Various variables for statistics gathering. */
507 /* Memory used in a pass.
508 This isn't intended to be absolutely precise. Its intent is only
509 to keep an eye on memory usage. */
510 static int bytes_used;
512 /* GCSE substitutions made. */
513 static int gcse_subst_count;
514 /* Number of copy instructions created. */
515 static int gcse_create_count;
516 /* Number of local constants propagated. */
517 static int local_const_prop_count;
518 /* Number of local copys propagated. */
519 static int local_copy_prop_count;
520 /* Number of global constants propagated. */
521 static int global_const_prop_count;
522 /* Number of global copys propagated. */
523 static int global_copy_prop_count;
525 /* For available exprs */
526 static sbitmap *ae_kill, *ae_gen;
528 /* Objects of this type are passed around by the null-pointer check
530 struct null_pointer_info
532 /* The basic block being processed. */
533 basic_block current_block;
534 /* The first register to be handled in this pass. */
535 unsigned int min_reg;
536 /* One greater than the last register to be handled in this pass. */
537 unsigned int max_reg;
538 sbitmap *nonnull_local;
539 sbitmap *nonnull_killed;
542 static void compute_can_copy (void);
543 static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
544 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
545 static void *grealloc (void *, size_t);
546 static void *gcse_alloc (unsigned long);
547 static void alloc_gcse_mem (rtx);
548 static void free_gcse_mem (void);
549 static void alloc_reg_set_mem (int);
550 static void free_reg_set_mem (void);
551 static void record_one_set (int, rtx);
552 static void replace_one_set (int, rtx, rtx);
553 static void record_set_info (rtx, rtx, void *);
554 static void compute_sets (rtx);
555 static void hash_scan_insn (rtx, struct hash_table *, int);
556 static void hash_scan_set (rtx, rtx, struct hash_table *);
557 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
558 static void hash_scan_call (rtx, rtx, struct hash_table *);
559 static int want_to_gcse_p (rtx);
560 static bool can_assign_to_reg_p (rtx);
561 static bool gcse_constant_p (rtx);
562 static int oprs_unchanged_p (rtx, rtx, int);
563 static int oprs_anticipatable_p (rtx, rtx);
564 static int oprs_available_p (rtx, rtx);
565 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
566 struct hash_table *);
567 static void insert_set_in_table (rtx, rtx, struct hash_table *);
568 static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
569 static unsigned int hash_set (int, int);
570 static int expr_equiv_p (rtx, rtx);
571 static void record_last_reg_set_info (rtx, int);
572 static void record_last_mem_set_info (rtx);
573 static void record_last_set_info (rtx, rtx, void *);
574 static void compute_hash_table (struct hash_table *);
575 static void alloc_hash_table (int, struct hash_table *, int);
576 static void free_hash_table (struct hash_table *);
577 static void compute_hash_table_work (struct hash_table *);
578 static void dump_hash_table (FILE *, const char *, struct hash_table *);
579 static struct expr *lookup_set (unsigned int, struct hash_table *);
580 static struct expr *next_set (unsigned int, struct expr *);
581 static void reset_opr_set_tables (void);
582 static int oprs_not_set_p (rtx, rtx);
583 static void mark_call (rtx);
584 static void mark_set (rtx, rtx);
585 static void mark_clobber (rtx, rtx);
586 static void mark_oprs_set (rtx);
587 static void alloc_cprop_mem (int, int);
588 static void free_cprop_mem (void);
589 static void compute_transp (rtx, int, sbitmap *, int);
590 static void compute_transpout (void);
591 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
592 struct hash_table *);
593 static void compute_cprop_data (void);
594 static void find_used_regs (rtx *, void *);
595 static int try_replace_reg (rtx, rtx, rtx);
596 static struct expr *find_avail_set (int, rtx);
597 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
598 static void mems_conflict_for_gcse_p (rtx, rtx, void *);
599 static int load_killed_in_block_p (basic_block, int, rtx, int);
600 static void canon_list_insert (rtx, rtx, void *);
601 static int cprop_insn (rtx, int);
602 static int cprop (int);
603 static void find_implicit_sets (void);
604 static int one_cprop_pass (int, int, int);
605 static bool constprop_register (rtx, rtx, rtx, int);
606 static struct expr *find_bypass_set (int, int);
607 static bool reg_killed_on_edge (rtx, edge);
608 static int bypass_block (basic_block, rtx, rtx);
609 static int bypass_conditional_jumps (void);
610 static void alloc_pre_mem (int, int);
611 static void free_pre_mem (void);
612 static void compute_pre_data (void);
613 static int pre_expr_reaches_here_p (basic_block, struct expr *,
615 static void insert_insn_end_bb (struct expr *, basic_block, int);
616 static void pre_insert_copy_insn (struct expr *, rtx);
617 static void pre_insert_copies (void);
618 static int pre_delete (void);
619 static int pre_gcse (void);
620 static int one_pre_gcse_pass (int);
621 static void add_label_notes (rtx, rtx);
622 static void alloc_code_hoist_mem (int, int);
623 static void free_code_hoist_mem (void);
624 static void compute_code_hoist_vbeinout (void);
625 static void compute_code_hoist_data (void);
626 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
627 static void hoist_code (void);
628 static int one_code_hoisting_pass (void);
629 static rtx process_insert_insn (struct expr *);
630 static int pre_edge_insert (struct edge_list *, struct expr **);
631 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
632 basic_block, char *);
633 static struct ls_expr * ldst_entry (rtx);
634 static void free_ldst_entry (struct ls_expr *);
635 static void free_ldst_mems (void);
636 static void print_ldst_list (FILE *);
637 static struct ls_expr * find_rtx_in_ldst (rtx);
638 static int enumerate_ldsts (void);
639 static inline struct ls_expr * first_ls_expr (void);
640 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
641 static int simple_mem (rtx);
642 static void invalidate_any_buried_refs (rtx);
643 static void compute_ld_motion_mems (void);
644 static void trim_ld_motion_mems (void);
645 static void update_ld_motion_stores (struct expr *);
646 static void reg_set_info (rtx, rtx, void *);
647 static void reg_clear_last_set (rtx, rtx, void *);
648 static bool store_ops_ok (rtx, int *);
649 static rtx extract_mentioned_regs (rtx);
650 static rtx extract_mentioned_regs_helper (rtx, rtx);
651 static void find_moveable_store (rtx, int *, int *);
652 static int compute_store_table (void);
653 static bool load_kills_store (rtx, rtx, int);
654 static bool find_loads (rtx, rtx, int);
655 static bool store_killed_in_insn (rtx, rtx, rtx, int);
656 static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
657 static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
658 static void build_store_vectors (void);
659 static void insert_insn_start_bb (rtx, basic_block);
660 static int insert_store (struct ls_expr *, edge);
661 static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
662 static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
663 static void delete_store (struct ls_expr *, basic_block);
664 static void free_store_memory (void);
665 static void store_motion (void);
666 static void free_insn_expr_list_list (rtx *);
667 static void clear_modify_mem_tables (void);
668 static void free_modify_mem_tables (void);
669 static rtx gcse_emit_move_after (rtx, rtx, rtx);
670 static void local_cprop_find_used_regs (rtx *, void *);
671 static bool do_local_cprop (rtx, rtx, int, rtx*);
672 static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
673 static void local_cprop_pass (int);
674 static bool is_too_expensive (const char *);
677 /* Entry point for global common subexpression elimination.
678 F is the first instruction in the function. */
681 gcse_main (rtx f, FILE *file)
684 /* Bytes used at start of pass. */
685 int initial_bytes_used;
686 /* Maximum number of bytes used by a pass. */
688 /* Point to release obstack data from for each pass. */
689 char *gcse_obstack_bottom;
691 /* We do not construct an accurate cfg in functions which call
692 setjmp, so just punt to be safe. */
693 if (current_function_calls_setjmp)
696 /* Assume that we do not need to run jump optimizations after gcse. */
697 run_jump_opt_after_gcse = 0;
699 /* For calling dump_foo fns from gdb. */
700 debug_stderr = stderr;
703 /* Identify the basic block information for this function, including
704 successors and predecessors. */
705 max_gcse_regno = max_reg_num ();
708 dump_flow_info (file);
710 /* Return if there's nothing to do, or it is too expensive. */
711 if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
714 gcc_obstack_init (&gcse_obstack);
718 init_alias_analysis ();
719 /* Record where pseudo-registers are set. This data is kept accurate
720 during each pass. ??? We could also record hard-reg information here
721 [since it's unchanging], however it is currently done during hash table
724 It may be tempting to compute MEM set information here too, but MEM sets
725 will be subject to code motion one day and thus we need to compute
726 information about memory sets when we build the hash tables. */
728 alloc_reg_set_mem (max_gcse_regno);
732 initial_bytes_used = bytes_used;
734 gcse_obstack_bottom = gcse_alloc (1);
736 while (changed && pass < MAX_GCSE_PASSES)
740 fprintf (file, "GCSE pass %d\n\n", pass + 1);
742 /* Initialize bytes_used to the space for the pred/succ lists,
743 and the reg_set_table data. */
744 bytes_used = initial_bytes_used;
746 /* Each pass may create new registers, so recalculate each time. */
747 max_gcse_regno = max_reg_num ();
751 /* Don't allow constant propagation to modify jumps
753 timevar_push (TV_CPROP1);
754 changed = one_cprop_pass (pass + 1, 0, 0);
755 timevar_pop (TV_CPROP1);
761 timevar_push (TV_PRE);
762 changed |= one_pre_gcse_pass (pass + 1);
763 /* We may have just created new basic blocks. Release and
764 recompute various things which are sized on the number of
768 free_modify_mem_tables ();
769 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
770 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
773 alloc_reg_set_mem (max_reg_num ());
775 run_jump_opt_after_gcse = 1;
776 timevar_pop (TV_PRE);
779 if (max_pass_bytes < bytes_used)
780 max_pass_bytes = bytes_used;
782 /* Free up memory, then reallocate for code hoisting. We can
783 not re-use the existing allocated memory because the tables
784 will not have info for the insns or registers created by
785 partial redundancy elimination. */
788 /* It does not make sense to run code hoisting unless we are optimizing
789 for code size -- it rarely makes programs faster, and can make
790 them bigger if we did partial redundancy elimination (when optimizing
791 for space, we don't run the partial redundancy algorithms). */
794 timevar_push (TV_HOIST);
795 max_gcse_regno = max_reg_num ();
797 changed |= one_code_hoisting_pass ();
800 if (max_pass_bytes < bytes_used)
801 max_pass_bytes = bytes_used;
802 timevar_pop (TV_HOIST);
807 fprintf (file, "\n");
811 obstack_free (&gcse_obstack, gcse_obstack_bottom);
815 /* Do one last pass of copy propagation, including cprop into
816 conditional jumps. */
818 max_gcse_regno = max_reg_num ();
820 /* This time, go ahead and allow cprop to alter jumps. */
821 timevar_push (TV_CPROP2);
822 one_cprop_pass (pass + 1, 1, 0);
823 timevar_pop (TV_CPROP2);
828 fprintf (file, "GCSE of %s: %d basic blocks, ",
829 current_function_name (), n_basic_blocks);
830 fprintf (file, "%d pass%s, %d bytes\n\n",
831 pass, pass > 1 ? "es" : "", max_pass_bytes);
834 obstack_free (&gcse_obstack, NULL);
837 /* We are finished with alias. */
838 end_alias_analysis ();
839 allocate_reg_info (max_reg_num (), FALSE, FALSE);
841 if (!optimize_size && flag_gcse_sm)
843 timevar_push (TV_LSM);
845 timevar_pop (TV_LSM);
848 /* Record where pseudo-registers are set. */
849 return run_jump_opt_after_gcse;
852 /* Misc. utilities. */
854 /* Nonzero for each mode that supports (set (reg) (reg)).
855 This is trivially true for integer and floating point values.
856 It may or may not be true for condition codes. */
857 static char can_copy[(int) NUM_MACHINE_MODES];
859 /* Compute which modes support reg/reg copy operations. */
862 compute_can_copy (void)
865 #ifndef AVOID_CCMODE_COPIES
868 memset (can_copy, 0, NUM_MACHINE_MODES);
871 for (i = 0; i < NUM_MACHINE_MODES; i++)
872 if (GET_MODE_CLASS (i) == MODE_CC)
874 #ifdef AVOID_CCMODE_COPIES
877 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
878 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
879 if (recog (PATTERN (insn), insn, NULL) >= 0)
889 /* Returns whether the mode supports reg/reg copy operations. */
892 can_copy_p (enum machine_mode mode)
894 static bool can_copy_init_p = false;
896 if (! can_copy_init_p)
899 can_copy_init_p = true;
902 return can_copy[mode] != 0;
905 /* Cover function to xmalloc to record bytes allocated. */
908 gmalloc (size_t size)
911 return xmalloc (size);
914 /* Cover function to xcalloc to record bytes allocated. */
917 gcalloc (size_t nelem, size_t elsize)
919 bytes_used += nelem * elsize;
920 return xcalloc (nelem, elsize);
923 /* Cover function to xrealloc.
924 We don't record the additional size since we don't know it.
925 It won't affect memory usage stats much anyway. */
928 grealloc (void *ptr, size_t size)
930 return xrealloc (ptr, size);
933 /* Cover function to obstack_alloc. */
936 gcse_alloc (unsigned long size)
939 return obstack_alloc (&gcse_obstack, size);
942 /* Allocate memory for the cuid mapping array,
943 and reg/memory set tracking tables.
945 This is called at the start of each pass. */
948 alloc_gcse_mem (rtx f)
953 /* Find the largest UID and create a mapping from UIDs to CUIDs.
954 CUIDs are like UIDs except they increase monotonically, have no gaps,
955 and only apply to real insns. */
957 max_uid = get_max_uid ();
958 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
959 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
962 uid_cuid[INSN_UID (insn)] = i++;
964 uid_cuid[INSN_UID (insn)] = i;
967 /* Create a table mapping cuids to insns. */
970 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
971 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
973 CUID_INSN (i++) = insn;
975 /* Allocate vars to track sets of regs. */
976 reg_set_bitmap = BITMAP_XMALLOC ();
978 /* Allocate vars to track sets of regs, memory per block. */
979 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
980 /* Allocate array to keep a list of insns which modify memory in each
982 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
983 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
984 modify_mem_list_set = BITMAP_XMALLOC ();
985 canon_modify_mem_list_set = BITMAP_XMALLOC ();
988 /* Free memory allocated by alloc_gcse_mem. */
996 BITMAP_XFREE (reg_set_bitmap);
998 sbitmap_vector_free (reg_set_in_block);
999 free_modify_mem_tables ();
1000 BITMAP_XFREE (modify_mem_list_set);
1001 BITMAP_XFREE (canon_modify_mem_list_set);
1004 /* Compute the local properties of each recorded expression.
1006 Local properties are those that are defined by the block, irrespective of
1009 An expression is transparent in a block if its operands are not modified
1012 An expression is computed (locally available) in a block if it is computed
1013 at least once and expression would contain the same value if the
1014 computation was moved to the end of the block.
1016 An expression is locally anticipatable in a block if it is computed at
1017 least once and expression would contain the same value if the computation
1018 was moved to the beginning of the block.
1020 We call this routine for cprop, pre and code hoisting. They all compute
1021 basically the same information and thus can easily share this code.
1023 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1024 properties. If NULL, then it is not necessary to compute or record that
1025 particular property.
1027 TABLE controls which hash table to look at. If it is set hash table,
1028 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1032 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
1033 struct hash_table *table)
1037 /* Initialize any bitmaps that were passed in. */
1041 sbitmap_vector_zero (transp, last_basic_block);
1043 sbitmap_vector_ones (transp, last_basic_block);
1047 sbitmap_vector_zero (comp, last_basic_block);
1049 sbitmap_vector_zero (antloc, last_basic_block);
1051 for (i = 0; i < table->size; i++)
1055 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1057 int indx = expr->bitmap_index;
1060 /* The expression is transparent in this block if it is not killed.
1061 We start by assuming all are transparent [none are killed], and
1062 then reset the bits for those that are. */
1064 compute_transp (expr->expr, indx, transp, table->set_p);
1066 /* The occurrences recorded in antic_occr are exactly those that
1067 we want to set to nonzero in ANTLOC. */
1069 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1071 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1073 /* While we're scanning the table, this is a good place to
1075 occr->deleted_p = 0;
1078 /* The occurrences recorded in avail_occr are exactly those that
1079 we want to set to nonzero in COMP. */
1081 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1083 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1085 /* While we're scanning the table, this is a good place to
1090 /* While we're scanning the table, this is a good place to
1092 expr->reaching_reg = 0;
1097 /* Register set information.
1099 `reg_set_table' records where each register is set or otherwise
1102 static struct obstack reg_set_obstack;
1105 alloc_reg_set_mem (int n_regs)
1107 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1108 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
1110 gcc_obstack_init (®_set_obstack);
1114 free_reg_set_mem (void)
1116 free (reg_set_table);
1117 obstack_free (®_set_obstack, NULL);
1120 /* An OLD_INSN that used to set REGNO was replaced by NEW_INSN.
1121 Update the corresponding `reg_set_table' entry accordingly.
1122 We assume that NEW_INSN is not already recorded in reg_set_table[regno]. */
1125 replace_one_set (int regno, rtx old_insn, rtx new_insn)
1127 struct reg_set *reg_info;
1128 if (regno >= reg_set_table_size)
1130 for (reg_info = reg_set_table[regno]; reg_info; reg_info = reg_info->next)
1131 if (reg_info->insn == old_insn)
1133 reg_info->insn = new_insn;
1138 /* Record REGNO in the reg_set table. */
1141 record_one_set (int regno, rtx insn)
1143 /* Allocate a new reg_set element and link it onto the list. */
1144 struct reg_set *new_reg_info;
1146 /* If the table isn't big enough, enlarge it. */
1147 if (regno >= reg_set_table_size)
1149 int new_size = regno + REG_SET_TABLE_SLOP;
1151 reg_set_table = grealloc (reg_set_table,
1152 new_size * sizeof (struct reg_set *));
1153 memset (reg_set_table + reg_set_table_size, 0,
1154 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1155 reg_set_table_size = new_size;
1158 new_reg_info = obstack_alloc (®_set_obstack, sizeof (struct reg_set));
1159 bytes_used += sizeof (struct reg_set);
1160 new_reg_info->insn = insn;
1161 new_reg_info->next = reg_set_table[regno];
1162 reg_set_table[regno] = new_reg_info;
1165 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1166 an insn. The DATA is really the instruction in which the SET is
1170 record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
1172 rtx record_set_insn = (rtx) data;
1174 if (REG_P (dest) && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1175 record_one_set (REGNO (dest), record_set_insn);
1178 /* Scan the function and record each set of each pseudo-register.
1180 This is called once, at the start of the gcse pass. See the comments for
1181 `reg_set_table' for further documentation. */
1184 compute_sets (rtx f)
1188 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1190 note_stores (PATTERN (insn), record_set_info, insn);
1193 /* Hash table support. */
1195 struct reg_avail_info
1197 basic_block last_bb;
1202 static struct reg_avail_info *reg_avail_info;
1203 static basic_block current_bb;
1206 /* See whether X, the source of a set, is something we want to consider for
1210 want_to_gcse_p (rtx x)
1212 switch (GET_CODE (x))
1223 return can_assign_to_reg_p (x);
1227 /* Used internally by can_assign_to_reg_p. */
1229 static GTY(()) rtx test_insn;
1231 /* Return true if we can assign X to a pseudo register. */
1234 can_assign_to_reg_p (rtx x)
1236 int num_clobbers = 0;
1239 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1240 if (general_operand (x, GET_MODE (x)))
1242 else if (GET_MODE (x) == VOIDmode)
1245 /* Otherwise, check if we can make a valid insn from it. First initialize
1246 our test insn if we haven't already. */
1250 = make_insn_raw (gen_rtx_SET (VOIDmode,
1251 gen_rtx_REG (word_mode,
1252 FIRST_PSEUDO_REGISTER * 2),
1254 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1257 /* Now make an insn like the one we would make when GCSE'ing and see if
1259 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1260 SET_SRC (PATTERN (test_insn)) = x;
1261 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1262 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1265 /* Return nonzero if the operands of expression X are unchanged from the
1266 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1267 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1270 oprs_unchanged_p (rtx x, rtx insn, int avail_p)
1279 code = GET_CODE (x);
1284 struct reg_avail_info *info = ®_avail_info[REGNO (x)];
1286 if (info->last_bb != current_bb)
1289 return info->last_set < INSN_CUID (insn);
1291 return info->first_set >= INSN_CUID (insn);
1295 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1299 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1325 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1329 /* If we are about to do the last recursive call needed at this
1330 level, change it into iteration. This function is called enough
1333 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1335 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1338 else if (fmt[i] == 'E')
1339 for (j = 0; j < XVECLEN (x, i); j++)
1340 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1347 /* Used for communication between mems_conflict_for_gcse_p and
1348 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1349 conflict between two memory references. */
1350 static int gcse_mems_conflict_p;
1352 /* Used for communication between mems_conflict_for_gcse_p and
1353 load_killed_in_block_p. A memory reference for a load instruction,
1354 mems_conflict_for_gcse_p will see if a memory store conflicts with
1355 this memory load. */
1356 static rtx gcse_mem_operand;
1358 /* DEST is the output of an instruction. If it is a memory reference, and
1359 possibly conflicts with the load found in gcse_mem_operand, then set
1360 gcse_mems_conflict_p to a nonzero value. */
1363 mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1364 void *data ATTRIBUTE_UNUSED)
1366 while (GET_CODE (dest) == SUBREG
1367 || GET_CODE (dest) == ZERO_EXTRACT
1368 || GET_CODE (dest) == SIGN_EXTRACT
1369 || GET_CODE (dest) == STRICT_LOW_PART)
1370 dest = XEXP (dest, 0);
1372 /* If DEST is not a MEM, then it will not conflict with the load. Note
1373 that function calls are assumed to clobber memory, but are handled
1378 /* If we are setting a MEM in our list of specially recognized MEMs,
1379 don't mark as killed this time. */
1381 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1383 if (!find_rtx_in_ldst (dest))
1384 gcse_mems_conflict_p = 1;
1388 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1390 gcse_mems_conflict_p = 1;
1393 /* Return nonzero if the expression in X (a memory reference) is killed
1394 in block BB before or after the insn with the CUID in UID_LIMIT.
1395 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1398 To check the entire block, set UID_LIMIT to max_uid + 1 and
1402 load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
1404 rtx list_entry = modify_mem_list[bb->index];
1408 /* Ignore entries in the list that do not apply. */
1410 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1412 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1414 list_entry = XEXP (list_entry, 1);
1418 setter = XEXP (list_entry, 0);
1420 /* If SETTER is a call everything is clobbered. Note that calls
1421 to pure functions are never put on the list, so we need not
1422 worry about them. */
1423 if (CALL_P (setter))
1426 /* SETTER must be an INSN of some kind that sets memory. Call
1427 note_stores to examine each hunk of memory that is modified.
1429 The note_stores interface is pretty limited, so we have to
1430 communicate via global variables. Yuk. */
1431 gcse_mem_operand = x;
1432 gcse_mems_conflict_p = 0;
1433 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1434 if (gcse_mems_conflict_p)
1436 list_entry = XEXP (list_entry, 1);
1441 /* Return nonzero if the operands of expression X are unchanged from
1442 the start of INSN's basic block up to but not including INSN. */
1445 oprs_anticipatable_p (rtx x, rtx insn)
1447 return oprs_unchanged_p (x, insn, 0);
1450 /* Return nonzero if the operands of expression X are unchanged from
1451 INSN to the end of INSN's basic block. */
1454 oprs_available_p (rtx x, rtx insn)
1456 return oprs_unchanged_p (x, insn, 1);
1459 /* Hash expression X.
1461 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1462 indicating if a volatile operand is found or if the expression contains
1463 something we don't want to insert in the table. HASH_TABLE_SIZE is
1464 the current size of the hash table to be probed. */
1467 hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
1468 int hash_table_size)
1472 *do_not_record_p = 0;
1474 hash = hash_rtx (x, mode, do_not_record_p,
1475 NULL, /*have_reg_qty=*/false);
1476 return hash % hash_table_size;
1479 /* Hash a set of register REGNO.
1481 Sets are hashed on the register that is set. This simplifies the PRE copy
1484 ??? May need to make things more elaborate. Later, as necessary. */
1487 hash_set (int regno, int hash_table_size)
1492 return hash % hash_table_size;
1495 /* Return nonzero if exp1 is equivalent to exp2. */
1498 expr_equiv_p (rtx x, rtx y)
1500 return exp_equiv_p (x, y, 0, true);
1503 /* Insert expression X in INSN in the hash TABLE.
1504 If it is already present, record it as the last occurrence in INSN's
1507 MODE is the mode of the value X is being stored into.
1508 It is only used if X is a CONST_INT.
1510 ANTIC_P is nonzero if X is an anticipatable expression.
1511 AVAIL_P is nonzero if X is an available expression. */
1514 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1515 int avail_p, struct hash_table *table)
1517 int found, do_not_record_p;
1519 struct expr *cur_expr, *last_expr = NULL;
1520 struct occr *antic_occr, *avail_occr;
1521 struct occr *last_occr = NULL;
1523 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1525 /* Do not insert expression in table if it contains volatile operands,
1526 or if hash_expr determines the expression is something we don't want
1527 to or can't handle. */
1528 if (do_not_record_p)
1531 cur_expr = table->table[hash];
1534 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1536 /* If the expression isn't found, save a pointer to the end of
1538 last_expr = cur_expr;
1539 cur_expr = cur_expr->next_same_hash;
1544 cur_expr = gcse_alloc (sizeof (struct expr));
1545 bytes_used += sizeof (struct expr);
1546 if (table->table[hash] == NULL)
1547 /* This is the first pattern that hashed to this index. */
1548 table->table[hash] = cur_expr;
1550 /* Add EXPR to end of this hash chain. */
1551 last_expr->next_same_hash = cur_expr;
1553 /* Set the fields of the expr element. */
1555 cur_expr->bitmap_index = table->n_elems++;
1556 cur_expr->next_same_hash = NULL;
1557 cur_expr->antic_occr = NULL;
1558 cur_expr->avail_occr = NULL;
1561 /* Now record the occurrence(s). */
1564 antic_occr = cur_expr->antic_occr;
1566 /* Search for another occurrence in the same basic block. */
1567 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1569 /* If an occurrence isn't found, save a pointer to the end of
1571 last_occr = antic_occr;
1572 antic_occr = antic_occr->next;
1576 /* Found another instance of the expression in the same basic block.
1577 Prefer the currently recorded one. We want the first one in the
1578 block and the block is scanned from start to end. */
1579 ; /* nothing to do */
1582 /* First occurrence of this expression in this basic block. */
1583 antic_occr = gcse_alloc (sizeof (struct occr));
1584 bytes_used += sizeof (struct occr);
1585 /* First occurrence of this expression in any block? */
1586 if (cur_expr->antic_occr == NULL)
1587 cur_expr->antic_occr = antic_occr;
1589 last_occr->next = antic_occr;
1591 antic_occr->insn = insn;
1592 antic_occr->next = NULL;
1593 antic_occr->deleted_p = 0;
1599 avail_occr = cur_expr->avail_occr;
1601 /* Search for another occurrence in the same basic block. */
1602 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1604 /* If an occurrence isn't found, save a pointer to the end of
1606 last_occr = avail_occr;
1607 avail_occr = avail_occr->next;
1611 /* Found another instance of the expression in the same basic block.
1612 Prefer this occurrence to the currently recorded one. We want
1613 the last one in the block and the block is scanned from start
1615 avail_occr->insn = insn;
1618 /* First occurrence of this expression in this basic block. */
1619 avail_occr = gcse_alloc (sizeof (struct occr));
1620 bytes_used += sizeof (struct occr);
1622 /* First occurrence of this expression in any block? */
1623 if (cur_expr->avail_occr == NULL)
1624 cur_expr->avail_occr = avail_occr;
1626 last_occr->next = avail_occr;
1628 avail_occr->insn = insn;
1629 avail_occr->next = NULL;
1630 avail_occr->deleted_p = 0;
1635 /* Insert pattern X in INSN in the hash table.
1636 X is a SET of a reg to either another reg or a constant.
1637 If it is already present, record it as the last occurrence in INSN's
1641 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
1645 struct expr *cur_expr, *last_expr = NULL;
1646 struct occr *cur_occr, *last_occr = NULL;
1648 gcc_assert (GET_CODE (x) == SET && REG_P (SET_DEST (x)));
1650 hash = hash_set (REGNO (SET_DEST (x)), table->size);
1652 cur_expr = table->table[hash];
1655 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1657 /* If the expression isn't found, save a pointer to the end of
1659 last_expr = cur_expr;
1660 cur_expr = cur_expr->next_same_hash;
1665 cur_expr = gcse_alloc (sizeof (struct expr));
1666 bytes_used += sizeof (struct expr);
1667 if (table->table[hash] == NULL)
1668 /* This is the first pattern that hashed to this index. */
1669 table->table[hash] = cur_expr;
1671 /* Add EXPR to end of this hash chain. */
1672 last_expr->next_same_hash = cur_expr;
1674 /* Set the fields of the expr element.
1675 We must copy X because it can be modified when copy propagation is
1676 performed on its operands. */
1677 cur_expr->expr = copy_rtx (x);
1678 cur_expr->bitmap_index = table->n_elems++;
1679 cur_expr->next_same_hash = NULL;
1680 cur_expr->antic_occr = NULL;
1681 cur_expr->avail_occr = NULL;
1684 /* Now record the occurrence. */
1685 cur_occr = cur_expr->avail_occr;
1687 /* Search for another occurrence in the same basic block. */
1688 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
1690 /* If an occurrence isn't found, save a pointer to the end of
1692 last_occr = cur_occr;
1693 cur_occr = cur_occr->next;
1697 /* Found another instance of the expression in the same basic block.
1698 Prefer this occurrence to the currently recorded one. We want the
1699 last one in the block and the block is scanned from start to end. */
1700 cur_occr->insn = insn;
1703 /* First occurrence of this expression in this basic block. */
1704 cur_occr = gcse_alloc (sizeof (struct occr));
1705 bytes_used += sizeof (struct occr);
1707 /* First occurrence of this expression in any block? */
1708 if (cur_expr->avail_occr == NULL)
1709 cur_expr->avail_occr = cur_occr;
1711 last_occr->next = cur_occr;
1713 cur_occr->insn = insn;
1714 cur_occr->next = NULL;
1715 cur_occr->deleted_p = 0;
1719 /* Determine whether the rtx X should be treated as a constant for
1720 the purposes of GCSE's constant propagation. */
1723 gcse_constant_p (rtx x)
1725 /* Consider a COMPARE of two integers constant. */
1726 if (GET_CODE (x) == COMPARE
1727 && GET_CODE (XEXP (x, 0)) == CONST_INT
1728 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1731 /* Consider a COMPARE of the same registers is a constant
1732 if they are not floating point registers. */
1733 if (GET_CODE(x) == COMPARE
1734 && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1))
1735 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
1736 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
1737 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
1740 return CONSTANT_P (x);
1743 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
1747 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
1749 rtx src = SET_SRC (pat);
1750 rtx dest = SET_DEST (pat);
1753 if (GET_CODE (src) == CALL)
1754 hash_scan_call (src, insn, table);
1756 else if (REG_P (dest))
1758 unsigned int regno = REGNO (dest);
1761 /* If this is a single set and we are doing constant propagation,
1762 see if a REG_NOTE shows this equivalent to a constant. */
1763 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
1764 && gcse_constant_p (XEXP (note, 0)))
1765 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
1767 /* Only record sets of pseudo-regs in the hash table. */
1769 && regno >= FIRST_PSEUDO_REGISTER
1770 /* Don't GCSE something if we can't do a reg/reg copy. */
1771 && can_copy_p (GET_MODE (dest))
1772 /* GCSE commonly inserts instruction after the insn. We can't
1773 do that easily for EH_REGION notes so disable GCSE on these
1775 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1776 /* Is SET_SRC something we want to gcse? */
1777 && want_to_gcse_p (src)
1778 /* Don't CSE a nop. */
1779 && ! set_noop_p (pat)
1780 /* Don't GCSE if it has attached REG_EQUIV note.
1781 At this point this only function parameters should have
1782 REG_EQUIV notes and if the argument slot is used somewhere
1783 explicitly, it means address of parameter has been taken,
1784 so we should not extend the lifetime of the pseudo. */
1785 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
1786 || ! MEM_P (XEXP (note, 0))))
1788 /* An expression is not anticipatable if its operands are
1789 modified before this insn or if this is not the only SET in
1791 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
1792 /* An expression is not available if its operands are
1793 subsequently modified, including this insn. It's also not
1794 available if this is a branch, because we can't insert
1795 a set after the branch. */
1796 int avail_p = (oprs_available_p (src, insn)
1797 && ! JUMP_P (insn));
1799 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
1802 /* Record sets for constant/copy propagation. */
1803 else if (table->set_p
1804 && regno >= FIRST_PSEUDO_REGISTER
1806 && REGNO (src) >= FIRST_PSEUDO_REGISTER
1807 && can_copy_p (GET_MODE (dest))
1808 && REGNO (src) != regno)
1809 || gcse_constant_p (src))
1810 /* A copy is not available if its src or dest is subsequently
1811 modified. Here we want to search from INSN+1 on, but
1812 oprs_available_p searches from INSN on. */
1813 && (insn == BB_END (BLOCK_FOR_INSN (insn))
1814 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
1815 && oprs_available_p (pat, tmp))))
1816 insert_set_in_table (pat, insn, table);
1818 /* In case of store we want to consider the memory value as available in
1819 the REG stored in that memory. This makes it possible to remove
1820 redundant loads from due to stores to the same location. */
1821 else if (flag_gcse_las && REG_P (src) && MEM_P (dest))
1823 unsigned int regno = REGNO (src);
1825 /* Do not do this for constant/copy propagation. */
1827 /* Only record sets of pseudo-regs in the hash table. */
1828 && regno >= FIRST_PSEUDO_REGISTER
1829 /* Don't GCSE something if we can't do a reg/reg copy. */
1830 && can_copy_p (GET_MODE (src))
1831 /* GCSE commonly inserts instruction after the insn. We can't
1832 do that easily for EH_REGION notes so disable GCSE on these
1834 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1835 /* Is SET_DEST something we want to gcse? */
1836 && want_to_gcse_p (dest)
1837 /* Don't CSE a nop. */
1838 && ! set_noop_p (pat)
1839 /* Don't GCSE if it has attached REG_EQUIV note.
1840 At this point this only function parameters should have
1841 REG_EQUIV notes and if the argument slot is used somewhere
1842 explicitly, it means address of parameter has been taken,
1843 so we should not extend the lifetime of the pseudo. */
1844 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
1845 || ! MEM_P (XEXP (note, 0))))
1847 /* Stores are never anticipatable. */
1849 /* An expression is not available if its operands are
1850 subsequently modified, including this insn. It's also not
1851 available if this is a branch, because we can't insert
1852 a set after the branch. */
1853 int avail_p = oprs_available_p (dest, insn)
1856 /* Record the memory expression (DEST) in the hash table. */
1857 insert_expr_in_table (dest, GET_MODE (dest), insn,
1858 antic_p, avail_p, table);
1864 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1865 struct hash_table *table ATTRIBUTE_UNUSED)
1867 /* Currently nothing to do. */
1871 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1872 struct hash_table *table ATTRIBUTE_UNUSED)
1874 /* Currently nothing to do. */
1877 /* Process INSN and add hash table entries as appropriate.
1879 Only available expressions that set a single pseudo-reg are recorded.
1881 Single sets in a PARALLEL could be handled, but it's an extra complication
1882 that isn't dealt with right now. The trick is handling the CLOBBERs that
1883 are also in the PARALLEL. Later.
1885 If SET_P is nonzero, this is for the assignment hash table,
1886 otherwise it is for the expression hash table.
1887 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
1888 not record any expressions. */
1891 hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
1893 rtx pat = PATTERN (insn);
1896 if (in_libcall_block)
1899 /* Pick out the sets of INSN and for other forms of instructions record
1900 what's been modified. */
1902 if (GET_CODE (pat) == SET)
1903 hash_scan_set (pat, insn, table);
1904 else if (GET_CODE (pat) == PARALLEL)
1905 for (i = 0; i < XVECLEN (pat, 0); i++)
1907 rtx x = XVECEXP (pat, 0, i);
1909 if (GET_CODE (x) == SET)
1910 hash_scan_set (x, insn, table);
1911 else if (GET_CODE (x) == CLOBBER)
1912 hash_scan_clobber (x, insn, table);
1913 else if (GET_CODE (x) == CALL)
1914 hash_scan_call (x, insn, table);
1917 else if (GET_CODE (pat) == CLOBBER)
1918 hash_scan_clobber (pat, insn, table);
1919 else if (GET_CODE (pat) == CALL)
1920 hash_scan_call (pat, insn, table);
1924 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
1927 /* Flattened out table, so it's printed in proper order. */
1928 struct expr **flat_table;
1929 unsigned int *hash_val;
1932 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
1933 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
1935 for (i = 0; i < (int) table->size; i++)
1936 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1938 flat_table[expr->bitmap_index] = expr;
1939 hash_val[expr->bitmap_index] = i;
1942 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
1943 name, table->size, table->n_elems);
1945 for (i = 0; i < (int) table->n_elems; i++)
1946 if (flat_table[i] != 0)
1948 expr = flat_table[i];
1949 fprintf (file, "Index %d (hash value %d)\n ",
1950 expr->bitmap_index, hash_val[i]);
1951 print_rtl (file, expr->expr);
1952 fprintf (file, "\n");
1955 fprintf (file, "\n");
1961 /* Record register first/last/block set information for REGNO in INSN.
1963 first_set records the first place in the block where the register
1964 is set and is used to compute "anticipatability".
1966 last_set records the last place in the block where the register
1967 is set and is used to compute "availability".
1969 last_bb records the block for which first_set and last_set are
1970 valid, as a quick test to invalidate them.
1972 reg_set_in_block records whether the register is set in the block
1973 and is used to compute "transparency". */
1976 record_last_reg_set_info (rtx insn, int regno)
1978 struct reg_avail_info *info = ®_avail_info[regno];
1979 int cuid = INSN_CUID (insn);
1981 info->last_set = cuid;
1982 if (info->last_bb != current_bb)
1984 info->last_bb = current_bb;
1985 info->first_set = cuid;
1986 SET_BIT (reg_set_in_block[current_bb->index], regno);
1991 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
1992 Note we store a pair of elements in the list, so they have to be
1993 taken off pairwise. */
1996 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
1999 rtx dest_addr, insn;
2002 while (GET_CODE (dest) == SUBREG
2003 || GET_CODE (dest) == ZERO_EXTRACT
2004 || GET_CODE (dest) == SIGN_EXTRACT
2005 || GET_CODE (dest) == STRICT_LOW_PART)
2006 dest = XEXP (dest, 0);
2008 /* If DEST is not a MEM, then it will not conflict with a load. Note
2009 that function calls are assumed to clobber memory, but are handled
2015 dest_addr = get_addr (XEXP (dest, 0));
2016 dest_addr = canon_rtx (dest_addr);
2017 insn = (rtx) v_insn;
2018 bb = BLOCK_NUM (insn);
2020 canon_modify_mem_list[bb] =
2021 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2022 canon_modify_mem_list[bb] =
2023 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2024 bitmap_set_bit (canon_modify_mem_list_set, bb);
2027 /* Record memory modification information for INSN. We do not actually care
2028 about the memory location(s) that are set, or even how they are set (consider
2029 a CALL_INSN). We merely need to record which insns modify memory. */
2032 record_last_mem_set_info (rtx insn)
2034 int bb = BLOCK_NUM (insn);
2036 /* load_killed_in_block_p will handle the case of calls clobbering
2038 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2039 bitmap_set_bit (modify_mem_list_set, bb);
2043 /* Note that traversals of this loop (other than for free-ing)
2044 will break after encountering a CALL_INSN. So, there's no
2045 need to insert a pair of items, as canon_list_insert does. */
2046 canon_modify_mem_list[bb] =
2047 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2048 bitmap_set_bit (canon_modify_mem_list_set, bb);
2051 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2054 /* Called from compute_hash_table via note_stores to handle one
2055 SET or CLOBBER in an insn. DATA is really the instruction in which
2056 the SET is taking place. */
2059 record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
2061 rtx last_set_insn = (rtx) data;
2063 if (GET_CODE (dest) == SUBREG)
2064 dest = SUBREG_REG (dest);
2067 record_last_reg_set_info (last_set_insn, REGNO (dest));
2068 else if (MEM_P (dest)
2069 /* Ignore pushes, they clobber nothing. */
2070 && ! push_operand (dest, GET_MODE (dest)))
2071 record_last_mem_set_info (last_set_insn);
2074 /* Top level function to create an expression or assignment hash table.
2076 Expression entries are placed in the hash table if
2077 - they are of the form (set (pseudo-reg) src),
2078 - src is something we want to perform GCSE on,
2079 - none of the operands are subsequently modified in the block
2081 Assignment entries are placed in the hash table if
2082 - they are of the form (set (pseudo-reg) src),
2083 - src is something we want to perform const/copy propagation on,
2084 - none of the operands or target are subsequently modified in the block
2086 Currently src must be a pseudo-reg or a const_int.
2088 TABLE is the table computed. */
2091 compute_hash_table_work (struct hash_table *table)
2095 /* While we compute the hash table we also compute a bit array of which
2096 registers are set in which blocks.
2097 ??? This isn't needed during const/copy propagation, but it's cheap to
2099 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2101 /* re-Cache any INSN_LIST nodes we have allocated. */
2102 clear_modify_mem_tables ();
2103 /* Some working arrays used to track first and last set in each block. */
2104 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2106 for (i = 0; i < max_gcse_regno; ++i)
2107 reg_avail_info[i].last_bb = NULL;
2109 FOR_EACH_BB (current_bb)
2113 int in_libcall_block;
2115 /* First pass over the instructions records information used to
2116 determine when registers and memory are first and last set.
2117 ??? hard-reg reg_set_in_block computation
2118 could be moved to compute_sets since they currently don't change. */
2120 for (insn = BB_HEAD (current_bb);
2121 insn && insn != NEXT_INSN (BB_END (current_bb));
2122 insn = NEXT_INSN (insn))
2124 if (! INSN_P (insn))
2129 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2130 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2131 record_last_reg_set_info (insn, regno);
2136 note_stores (PATTERN (insn), record_last_set_info, insn);
2139 /* Insert implicit sets in the hash table. */
2141 && implicit_sets[current_bb->index] != NULL_RTX)
2142 hash_scan_set (implicit_sets[current_bb->index],
2143 BB_HEAD (current_bb), table);
2145 /* The next pass builds the hash table. */
2147 for (insn = BB_HEAD (current_bb), in_libcall_block = 0;
2148 insn && insn != NEXT_INSN (BB_END (current_bb));
2149 insn = NEXT_INSN (insn))
2152 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2153 in_libcall_block = 1;
2154 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2155 in_libcall_block = 0;
2156 hash_scan_insn (insn, table, in_libcall_block);
2157 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2158 in_libcall_block = 0;
2162 free (reg_avail_info);
2163 reg_avail_info = NULL;
2166 /* Allocate space for the set/expr hash TABLE.
2167 N_INSNS is the number of instructions in the function.
2168 It is used to determine the number of buckets to use.
2169 SET_P determines whether set or expression table will
2173 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2177 table->size = n_insns / 4;
2178 if (table->size < 11)
2181 /* Attempt to maintain efficient use of hash table.
2182 Making it an odd number is simplest for now.
2183 ??? Later take some measurements. */
2185 n = table->size * sizeof (struct expr *);
2186 table->table = gmalloc (n);
2187 table->set_p = set_p;
2190 /* Free things allocated by alloc_hash_table. */
2193 free_hash_table (struct hash_table *table)
2195 free (table->table);
2198 /* Compute the hash TABLE for doing copy/const propagation or
2199 expression hash table. */
2202 compute_hash_table (struct hash_table *table)
2204 /* Initialize count of number of entries in hash table. */
2206 memset (table->table, 0, table->size * sizeof (struct expr *));
2208 compute_hash_table_work (table);
2211 /* Expression tracking support. */
2213 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2214 table entry, or NULL if not found. */
2216 static struct expr *
2217 lookup_set (unsigned int regno, struct hash_table *table)
2219 unsigned int hash = hash_set (regno, table->size);
2222 expr = table->table[hash];
2224 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2225 expr = expr->next_same_hash;
2230 /* Return the next entry for REGNO in list EXPR. */
2232 static struct expr *
2233 next_set (unsigned int regno, struct expr *expr)
2236 expr = expr->next_same_hash;
2237 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2242 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2243 types may be mixed. */
2246 free_insn_expr_list_list (rtx *listp)
2250 for (list = *listp; list ; list = next)
2252 next = XEXP (list, 1);
2253 if (GET_CODE (list) == EXPR_LIST)
2254 free_EXPR_LIST_node (list);
2256 free_INSN_LIST_node (list);
2262 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2264 clear_modify_mem_tables (void)
2269 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
2271 free_INSN_LIST_list (modify_mem_list + i);
2273 bitmap_clear (modify_mem_list_set);
2275 EXECUTE_IF_SET_IN_BITMAP (canon_modify_mem_list_set, 0, i, bi)
2277 free_insn_expr_list_list (canon_modify_mem_list + i);
2279 bitmap_clear (canon_modify_mem_list_set);
2282 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2285 free_modify_mem_tables (void)
2287 clear_modify_mem_tables ();
2288 free (modify_mem_list);
2289 free (canon_modify_mem_list);
2290 modify_mem_list = 0;
2291 canon_modify_mem_list = 0;
2294 /* Reset tables used to keep track of what's still available [since the
2295 start of the block]. */
2298 reset_opr_set_tables (void)
2300 /* Maintain a bitmap of which regs have been set since beginning of
2302 CLEAR_REG_SET (reg_set_bitmap);
2304 /* Also keep a record of the last instruction to modify memory.
2305 For now this is very trivial, we only record whether any memory
2306 location has been modified. */
2307 clear_modify_mem_tables ();
2310 /* Return nonzero if the operands of X are not set before INSN in
2311 INSN's basic block. */
2314 oprs_not_set_p (rtx x, rtx insn)
2323 code = GET_CODE (x);
2339 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2340 INSN_CUID (insn), x, 0))
2343 return oprs_not_set_p (XEXP (x, 0), insn);
2346 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2352 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2356 /* If we are about to do the last recursive call
2357 needed at this level, change it into iteration.
2358 This function is called enough to be worth it. */
2360 return oprs_not_set_p (XEXP (x, i), insn);
2362 if (! oprs_not_set_p (XEXP (x, i), insn))
2365 else if (fmt[i] == 'E')
2366 for (j = 0; j < XVECLEN (x, i); j++)
2367 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2374 /* Mark things set by a CALL. */
2377 mark_call (rtx insn)
2379 if (! CONST_OR_PURE_CALL_P (insn))
2380 record_last_mem_set_info (insn);
2383 /* Mark things set by a SET. */
2386 mark_set (rtx pat, rtx insn)
2388 rtx dest = SET_DEST (pat);
2390 while (GET_CODE (dest) == SUBREG
2391 || GET_CODE (dest) == ZERO_EXTRACT
2392 || GET_CODE (dest) == SIGN_EXTRACT
2393 || GET_CODE (dest) == STRICT_LOW_PART)
2394 dest = XEXP (dest, 0);
2397 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2398 else if (MEM_P (dest))
2399 record_last_mem_set_info (insn);
2401 if (GET_CODE (SET_SRC (pat)) == CALL)
2405 /* Record things set by a CLOBBER. */
2408 mark_clobber (rtx pat, rtx insn)
2410 rtx clob = XEXP (pat, 0);
2412 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2413 clob = XEXP (clob, 0);
2416 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2418 record_last_mem_set_info (insn);
2421 /* Record things set by INSN.
2422 This data is used by oprs_not_set_p. */
2425 mark_oprs_set (rtx insn)
2427 rtx pat = PATTERN (insn);
2430 if (GET_CODE (pat) == SET)
2431 mark_set (pat, insn);
2432 else if (GET_CODE (pat) == PARALLEL)
2433 for (i = 0; i < XVECLEN (pat, 0); i++)
2435 rtx x = XVECEXP (pat, 0, i);
2437 if (GET_CODE (x) == SET)
2439 else if (GET_CODE (x) == CLOBBER)
2440 mark_clobber (x, insn);
2441 else if (GET_CODE (x) == CALL)
2445 else if (GET_CODE (pat) == CLOBBER)
2446 mark_clobber (pat, insn);
2447 else if (GET_CODE (pat) == CALL)
2452 /* Compute copy/constant propagation working variables. */
2454 /* Local properties of assignments. */
2455 static sbitmap *cprop_pavloc;
2456 static sbitmap *cprop_absaltered;
2458 /* Global properties of assignments (computed from the local properties). */
2459 static sbitmap *cprop_avin;
2460 static sbitmap *cprop_avout;
2462 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
2463 basic blocks. N_SETS is the number of sets. */
2466 alloc_cprop_mem (int n_blocks, int n_sets)
2468 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
2469 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
2471 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
2472 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
2475 /* Free vars used by copy/const propagation. */
2478 free_cprop_mem (void)
2480 sbitmap_vector_free (cprop_pavloc);
2481 sbitmap_vector_free (cprop_absaltered);
2482 sbitmap_vector_free (cprop_avin);
2483 sbitmap_vector_free (cprop_avout);
2486 /* For each block, compute whether X is transparent. X is either an
2487 expression or an assignment [though we don't care which, for this context
2488 an assignment is treated as an expression]. For each block where an
2489 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
2493 compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
2501 /* repeat is used to turn tail-recursion into iteration since GCC
2502 can't do it when there's no return value. */
2508 code = GET_CODE (x);
2514 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2517 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2518 SET_BIT (bmap[bb->index], indx);
2522 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2523 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
2528 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2531 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2532 RESET_BIT (bmap[bb->index], indx);
2536 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2537 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
2546 rtx list_entry = canon_modify_mem_list[bb->index];
2550 rtx dest, dest_addr;
2552 if (CALL_P (XEXP (list_entry, 0)))
2555 SET_BIT (bmap[bb->index], indx);
2557 RESET_BIT (bmap[bb->index], indx);
2560 /* LIST_ENTRY must be an INSN of some kind that sets memory.
2561 Examine each hunk of memory that is modified. */
2563 dest = XEXP (list_entry, 0);
2564 list_entry = XEXP (list_entry, 1);
2565 dest_addr = XEXP (list_entry, 0);
2567 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
2568 x, rtx_addr_varies_p))
2571 SET_BIT (bmap[bb->index], indx);
2573 RESET_BIT (bmap[bb->index], indx);
2576 list_entry = XEXP (list_entry, 1);
2599 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2603 /* If we are about to do the last recursive call
2604 needed at this level, change it into iteration.
2605 This function is called enough to be worth it. */
2612 compute_transp (XEXP (x, i), indx, bmap, set_p);
2614 else if (fmt[i] == 'E')
2615 for (j = 0; j < XVECLEN (x, i); j++)
2616 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
2620 /* Top level routine to do the dataflow analysis needed by copy/const
2624 compute_cprop_data (void)
2626 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
2627 compute_available (cprop_pavloc, cprop_absaltered,
2628 cprop_avout, cprop_avin);
2631 /* Copy/constant propagation. */
2633 /* Maximum number of register uses in an insn that we handle. */
2636 /* Table of uses found in an insn.
2637 Allocated statically to avoid alloc/free complexity and overhead. */
2638 static struct reg_use reg_use_table[MAX_USES];
2640 /* Index into `reg_use_table' while building it. */
2641 static int reg_use_count;
2643 /* Set up a list of register numbers used in INSN. The found uses are stored
2644 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
2645 and contains the number of uses in the table upon exit.
2647 ??? If a register appears multiple times we will record it multiple times.
2648 This doesn't hurt anything but it will slow things down. */
2651 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
2658 /* repeat is used to turn tail-recursion into iteration since GCC
2659 can't do it when there's no return value. */
2664 code = GET_CODE (x);
2667 if (reg_use_count == MAX_USES)
2670 reg_use_table[reg_use_count].reg_rtx = x;
2674 /* Recursively scan the operands of this expression. */
2676 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2680 /* If we are about to do the last recursive call
2681 needed at this level, change it into iteration.
2682 This function is called enough to be worth it. */
2689 find_used_regs (&XEXP (x, i), data);
2691 else if (fmt[i] == 'E')
2692 for (j = 0; j < XVECLEN (x, i); j++)
2693 find_used_regs (&XVECEXP (x, i, j), data);
2697 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
2698 Returns nonzero is successful. */
2701 try_replace_reg (rtx from, rtx to, rtx insn)
2703 rtx note = find_reg_equal_equiv_note (insn);
2706 rtx set = single_set (insn);
2708 validate_replace_src_group (from, to, insn);
2709 if (num_changes_pending () && apply_change_group ())
2712 /* Try to simplify SET_SRC if we have substituted a constant. */
2713 if (success && set && CONSTANT_P (to))
2715 src = simplify_rtx (SET_SRC (set));
2718 validate_change (insn, &SET_SRC (set), src, 0);
2721 /* If there is already a NOTE, update the expression in it with our
2724 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
2726 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
2728 /* If above failed and this is a single set, try to simplify the source of
2729 the set given our substitution. We could perhaps try this for multiple
2730 SETs, but it probably won't buy us anything. */
2731 src = simplify_replace_rtx (SET_SRC (set), from, to);
2733 if (!rtx_equal_p (src, SET_SRC (set))
2734 && validate_change (insn, &SET_SRC (set), src, 0))
2737 /* If we've failed to do replacement, have a single SET, don't already
2738 have a note, and have no special SET, add a REG_EQUAL note to not
2739 lose information. */
2740 if (!success && note == 0 && set != 0
2741 && GET_CODE (XEXP (set, 0)) != ZERO_EXTRACT
2742 && GET_CODE (XEXP (set, 0)) != SIGN_EXTRACT)
2743 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
2746 /* REG_EQUAL may get simplified into register.
2747 We don't allow that. Remove that note. This code ought
2748 not to happen, because previous code ought to synthesize
2749 reg-reg move, but be on the safe side. */
2750 if (note && REG_P (XEXP (note, 0)))
2751 remove_note (insn, note);
2756 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
2757 NULL no such set is found. */
2759 static struct expr *
2760 find_avail_set (int regno, rtx insn)
2762 /* SET1 contains the last set found that can be returned to the caller for
2763 use in a substitution. */
2764 struct expr *set1 = 0;
2766 /* Loops are not possible here. To get a loop we would need two sets
2767 available at the start of the block containing INSN. i.e. we would
2768 need two sets like this available at the start of the block:
2770 (set (reg X) (reg Y))
2771 (set (reg Y) (reg X))
2773 This can not happen since the set of (reg Y) would have killed the
2774 set of (reg X) making it unavailable at the start of this block. */
2778 struct expr *set = lookup_set (regno, &set_hash_table);
2780 /* Find a set that is available at the start of the block
2781 which contains INSN. */
2784 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
2786 set = next_set (regno, set);
2789 /* If no available set was found we've reached the end of the
2790 (possibly empty) copy chain. */
2794 gcc_assert (GET_CODE (set->expr) == SET);
2796 src = SET_SRC (set->expr);
2798 /* We know the set is available.
2799 Now check that SRC is ANTLOC (i.e. none of the source operands
2800 have changed since the start of the block).
2802 If the source operand changed, we may still use it for the next
2803 iteration of this loop, but we may not use it for substitutions. */
2805 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
2808 /* If the source of the set is anything except a register, then
2809 we have reached the end of the copy chain. */
2813 /* Follow the copy chain, i.e. start another iteration of the loop
2814 and see if we have an available copy into SRC. */
2815 regno = REGNO (src);
2818 /* SET1 holds the last set that was available and anticipatable at
2823 /* Subroutine of cprop_insn that tries to propagate constants into
2824 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
2825 it is the instruction that immediately precedes JUMP, and must be a
2826 single SET of a register. FROM is what we will try to replace,
2827 SRC is the constant we will try to substitute for it. Returns nonzero
2828 if a change was made. */
2831 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
2833 rtx new, set_src, note_src;
2834 rtx set = pc_set (jump);
2835 rtx note = find_reg_equal_equiv_note (jump);
2839 note_src = XEXP (note, 0);
2840 if (GET_CODE (note_src) == EXPR_LIST)
2841 note_src = NULL_RTX;
2843 else note_src = NULL_RTX;
2845 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
2846 set_src = note_src ? note_src : SET_SRC (set);
2848 /* First substitute the SETCC condition into the JUMP instruction,
2849 then substitute that given values into this expanded JUMP. */
2850 if (setcc != NULL_RTX
2851 && !modified_between_p (from, setcc, jump)
2852 && !modified_between_p (src, setcc, jump))
2855 rtx setcc_set = single_set (setcc);
2856 rtx setcc_note = find_reg_equal_equiv_note (setcc);
2857 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
2858 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
2859 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
2865 new = simplify_replace_rtx (set_src, from, src);
2867 /* If no simplification can be made, then try the next register. */
2868 if (rtx_equal_p (new, SET_SRC (set)))
2871 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
2876 /* Ensure the value computed inside the jump insn to be equivalent
2877 to one computed by setcc. */
2878 if (setcc && modified_in_p (new, setcc))
2880 if (! validate_change (jump, &SET_SRC (set), new, 0))
2882 /* When (some) constants are not valid in a comparison, and there
2883 are two registers to be replaced by constants before the entire
2884 comparison can be folded into a constant, we need to keep
2885 intermediate information in REG_EQUAL notes. For targets with
2886 separate compare insns, such notes are added by try_replace_reg.
2887 When we have a combined compare-and-branch instruction, however,
2888 we need to attach a note to the branch itself to make this
2889 optimization work. */
2891 if (!rtx_equal_p (new, note_src))
2892 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
2896 /* Remove REG_EQUAL note after simplification. */
2898 remove_note (jump, note);
2900 /* If this has turned into an unconditional jump,
2901 then put a barrier after it so that the unreachable
2902 code will be deleted. */
2903 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
2904 emit_barrier_after (jump);
2908 /* Delete the cc0 setter. */
2909 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
2910 delete_insn (setcc);
2913 run_jump_opt_after_gcse = 1;
2915 global_const_prop_count++;
2916 if (gcse_file != NULL)
2919 "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
2920 REGNO (from), INSN_UID (jump));
2921 print_rtl (gcse_file, src);
2922 fprintf (gcse_file, "\n");
2924 purge_dead_edges (bb);
2930 constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
2934 /* Check for reg or cc0 setting instructions followed by
2935 conditional branch instructions first. */
2937 && (sset = single_set (insn)) != NULL
2939 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
2941 rtx dest = SET_DEST (sset);
2942 if ((REG_P (dest) || CC0_P (dest))
2943 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
2947 /* Handle normal insns next. */
2948 if (NONJUMP_INSN_P (insn)
2949 && try_replace_reg (from, to, insn))
2952 /* Try to propagate a CONST_INT into a conditional jump.
2953 We're pretty specific about what we will handle in this
2954 code, we can extend this as necessary over time.
2956 Right now the insn in question must look like
2957 (set (pc) (if_then_else ...)) */
2958 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
2959 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
2963 /* Perform constant and copy propagation on INSN.
2964 The result is nonzero if a change was made. */
2967 cprop_insn (rtx insn, int alter_jumps)
2969 struct reg_use *reg_used;
2977 note_uses (&PATTERN (insn), find_used_regs, NULL);
2979 note = find_reg_equal_equiv_note (insn);
2981 /* We may win even when propagating constants into notes. */
2983 find_used_regs (&XEXP (note, 0), NULL);
2985 for (reg_used = ®_use_table[0]; reg_use_count > 0;
2986 reg_used++, reg_use_count--)
2988 unsigned int regno = REGNO (reg_used->reg_rtx);
2992 /* Ignore registers created by GCSE.
2993 We do this because ... */
2994 if (regno >= max_gcse_regno)
2997 /* If the register has already been set in this block, there's
2998 nothing we can do. */
2999 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
3002 /* Find an assignment that sets reg_used and is available
3003 at the start of the block. */
3004 set = find_avail_set (regno, insn);
3009 /* ??? We might be able to handle PARALLELs. Later. */
3010 gcc_assert (GET_CODE (pat) == SET);
3012 src = SET_SRC (pat);
3014 /* Constant propagation. */
3015 if (gcse_constant_p (src))
3017 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
3020 global_const_prop_count++;
3021 if (gcse_file != NULL)
3023 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
3024 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
3025 print_rtl (gcse_file, src);
3026 fprintf (gcse_file, "\n");
3028 if (INSN_DELETED_P (insn))
3032 else if (REG_P (src)
3033 && REGNO (src) >= FIRST_PSEUDO_REGISTER
3034 && REGNO (src) != regno)
3036 if (try_replace_reg (reg_used->reg_rtx, src, insn))
3039 global_copy_prop_count++;
3040 if (gcse_file != NULL)
3042 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
3043 regno, INSN_UID (insn));
3044 fprintf (gcse_file, " with reg %d\n", REGNO (src));
3047 /* The original insn setting reg_used may or may not now be
3048 deletable. We leave the deletion to flow. */
3049 /* FIXME: If it turns out that the insn isn't deletable,
3050 then we may have unnecessarily extended register lifetimes
3051 and made things worse. */
3059 /* Like find_used_regs, but avoid recording uses that appear in
3060 input-output contexts such as zero_extract or pre_dec. This
3061 restricts the cases we consider to those for which local cprop
3062 can legitimately make replacements. */
3065 local_cprop_find_used_regs (rtx *xptr, void *data)
3072 switch (GET_CODE (x))
3076 case STRICT_LOW_PART:
3085 /* Can only legitimately appear this early in the context of
3086 stack pushes for function arguments, but handle all of the
3087 codes nonetheless. */
3091 /* Setting a subreg of a register larger than word_mode leaves
3092 the non-written words unchanged. */
3093 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
3101 find_used_regs (xptr, data);
3104 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3105 their REG_EQUAL notes need updating. */
3108 do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
3110 rtx newreg = NULL, newcnst = NULL;
3112 /* Rule out USE instructions and ASM statements as we don't want to
3113 change the hard registers mentioned. */
3115 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
3116 || (GET_CODE (PATTERN (insn)) != USE
3117 && asm_noperands (PATTERN (insn)) < 0)))
3119 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
3120 struct elt_loc_list *l;
3124 for (l = val->locs; l; l = l->next)
3126 rtx this_rtx = l->loc;
3129 /* Don't CSE non-constant values out of libcall blocks. */
3130 if (l->in_libcall && ! CONSTANT_P (this_rtx))
3133 if (gcse_constant_p (this_rtx))
3135 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
3136 /* Don't copy propagate if it has attached REG_EQUIV note.
3137 At this point this only function parameters should have
3138 REG_EQUIV notes and if the argument slot is used somewhere
3139 explicitly, it means address of parameter has been taken,
3140 so we should not extend the lifetime of the pseudo. */
3141 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
3142 || ! MEM_P (XEXP (note, 0))))
3145 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
3147 /* If we find a case where we can't fix the retval REG_EQUAL notes
3148 match the new register, we either have to abandon this replacement
3149 or fix delete_trivially_dead_insns to preserve the setting insn,
3150 or make it delete the REG_EUAQL note, and fix up all passes that
3151 require the REG_EQUAL note there. */
3154 adjusted = adjust_libcall_notes (x, newcnst, insn, libcall_sp);
3155 gcc_assert (adjusted);
3157 if (gcse_file != NULL)
3159 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
3161 fprintf (gcse_file, "insn %d with constant ",
3163 print_rtl (gcse_file, newcnst);
3164 fprintf (gcse_file, "\n");
3166 local_const_prop_count++;
3169 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
3171 adjust_libcall_notes (x, newreg, insn, libcall_sp);
3172 if (gcse_file != NULL)
3175 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
3176 REGNO (x), INSN_UID (insn));
3177 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
3179 local_copy_prop_count++;
3186 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3187 their REG_EQUAL notes need updating to reflect that OLDREG has been
3188 replaced with NEWVAL in INSN. Return true if all substitutions could
3191 adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
3195 while ((end = *libcall_sp++))
3197 rtx note = find_reg_equal_equiv_note (end);
3204 if (reg_set_between_p (newval, PREV_INSN (insn), end))
3208 note = find_reg_equal_equiv_note (end);
3211 if (reg_mentioned_p (newval, XEXP (note, 0)))
3214 while ((end = *libcall_sp++));
3218 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), oldreg, newval);
3224 #define MAX_NESTED_LIBCALLS 9
3227 local_cprop_pass (int alter_jumps)
3230 struct reg_use *reg_used;
3231 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
3232 bool changed = false;
3234 cselib_init (false);
3235 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
3237 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3241 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
3245 gcc_assert (libcall_sp != libcall_stack);
3246 *--libcall_sp = XEXP (note, 0);
3248 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3251 note = find_reg_equal_equiv_note (insn);
3255 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
3257 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
3259 for (reg_used = ®_use_table[0]; reg_use_count > 0;
3260 reg_used++, reg_use_count--)
3261 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
3267 if (INSN_DELETED_P (insn))
3270 while (reg_use_count);
3272 cselib_process_insn (insn);
3275 /* Global analysis may get into infinite loops for unreachable blocks. */
3276 if (changed && alter_jumps)
3278 delete_unreachable_blocks ();
3279 free_reg_set_mem ();
3280 alloc_reg_set_mem (max_reg_num ());
3281 compute_sets (get_insns ());
3285 /* Forward propagate copies. This includes copies and constants. Return
3286 nonzero if a change was made. */
3289 cprop (int alter_jumps)
3295 /* Note we start at block 1. */
3296 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3298 if (gcse_file != NULL)
3299 fprintf (gcse_file, "\n");
3304 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3306 /* Reset tables used to keep track of what's still valid [since the
3307 start of the block]. */
3308 reset_opr_set_tables ();
3310 for (insn = BB_HEAD (bb);
3311 insn != NULL && insn != NEXT_INSN (BB_END (bb));
3312 insn = NEXT_INSN (insn))
3315 changed |= cprop_insn (insn, alter_jumps);
3317 /* Keep track of everything modified by this insn. */
3318 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
3319 call mark_oprs_set if we turned the insn into a NOTE. */
3320 if (! NOTE_P (insn))
3321 mark_oprs_set (insn);
3325 if (gcse_file != NULL)
3326 fprintf (gcse_file, "\n");
3331 /* Similar to get_condition, only the resulting condition must be
3332 valid at JUMP, instead of at EARLIEST.
3334 This differs from noce_get_condition in ifcvt.c in that we prefer not to
3335 settle for the condition variable in the jump instruction being integral.
3336 We prefer to be able to record the value of a user variable, rather than
3337 the value of a temporary used in a condition. This could be solved by
3338 recording the value of *every* register scaned by canonicalize_condition,
3339 but this would require some code reorganization. */
3342 fis_get_condition (rtx jump)
3344 return get_condition (jump, NULL, false, true);
3347 /* Check the comparison COND to see if we can safely form an implicit set from
3348 it. COND is either an EQ or NE comparison. */
3351 implicit_set_cond_p (rtx cond)
3353 enum machine_mode mode = GET_MODE (XEXP (cond, 0));
3354 rtx cst = XEXP (cond, 1);
3356 /* We can't perform this optimization if either operand might be or might
3357 contain a signed zero. */
3358 if (HONOR_SIGNED_ZEROS (mode))
3360 /* It is sufficient to check if CST is or contains a zero. We must
3361 handle float, complex, and vector. If any subpart is a zero, then
3362 the optimization can't be performed. */
3363 /* ??? The complex and vector checks are not implemented yet. We just
3364 always return zero for them. */
3365 if (GET_CODE (cst) == CONST_DOUBLE)
3368 REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
3369 if (REAL_VALUES_EQUAL (d, dconst0))
3376 return gcse_constant_p (cst);
3379 /* Find the implicit sets of a function. An "implicit set" is a constraint
3380 on the value of a variable, implied by a conditional jump. For example,
3381 following "if (x == 2)", the then branch may be optimized as though the
3382 conditional performed an "explicit set", in this example, "x = 2". This
3383 function records the set patterns that are implicit at the start of each
3387 find_implicit_sets (void)
3389 basic_block bb, dest;
3395 /* Check for more than one successor. */
3396 if (EDGE_COUNT (bb->succs) > 1)
3398 cond = fis_get_condition (BB_END (bb));
3401 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
3402 && REG_P (XEXP (cond, 0))
3403 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
3404 && implicit_set_cond_p (cond))
3406 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
3407 : FALLTHRU_EDGE (bb)->dest;
3409 if (dest && EDGE_COUNT (dest->preds) == 1
3410 && dest != EXIT_BLOCK_PTR)
3412 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
3414 implicit_sets[dest->index] = new;
3417 fprintf(gcse_file, "Implicit set of reg %d in ",
3418 REGNO (XEXP (cond, 0)));
3419 fprintf(gcse_file, "basic block %d\n", dest->index);
3427 fprintf (gcse_file, "Found %d implicit sets\n", count);
3430 /* Perform one copy/constant propagation pass.
3431 PASS is the pass count. If CPROP_JUMPS is true, perform constant
3432 propagation into conditional jumps. If BYPASS_JUMPS is true,
3433 perform conditional jump bypassing optimizations. */
3436 one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
3440 global_const_prop_count = local_const_prop_count = 0;
3441 global_copy_prop_count = local_copy_prop_count = 0;
3443 local_cprop_pass (cprop_jumps);
3445 /* Determine implicit sets. */
3446 implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
3447 find_implicit_sets ();
3449 alloc_hash_table (max_cuid, &set_hash_table, 1);
3450 compute_hash_table (&set_hash_table);
3452 /* Free implicit_sets before peak usage. */
3453 free (implicit_sets);
3454 implicit_sets = NULL;
3457 dump_hash_table (gcse_file, "SET", &set_hash_table);
3458 if (set_hash_table.n_elems > 0)
3460 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
3461 compute_cprop_data ();
3462 changed = cprop (cprop_jumps);
3464 changed |= bypass_conditional_jumps ();
3468 free_hash_table (&set_hash_table);
3472 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
3473 current_function_name (), pass, bytes_used);
3474 fprintf (gcse_file, "%d local const props, %d local copy props\n\n",
3475 local_const_prop_count, local_copy_prop_count);
3476 fprintf (gcse_file, "%d global const props, %d global copy props\n\n",
3477 global_const_prop_count, global_copy_prop_count);
3479 /* Global analysis may get into infinite loops for unreachable blocks. */
3480 if (changed && cprop_jumps)
3481 delete_unreachable_blocks ();
3486 /* Bypass conditional jumps. */
3488 /* The value of last_basic_block at the beginning of the jump_bypass
3489 pass. The use of redirect_edge_and_branch_force may introduce new
3490 basic blocks, but the data flow analysis is only valid for basic
3491 block indices less than bypass_last_basic_block. */
3493 static int bypass_last_basic_block;
3495 /* Find a set of REGNO to a constant that is available at the end of basic
3496 block BB. Returns NULL if no such set is found. Based heavily upon
3499 static struct expr *
3500 find_bypass_set (int regno, int bb)
3502 struct expr *result = 0;
3507 struct expr *set = lookup_set (regno, &set_hash_table);
3511 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
3513 set = next_set (regno, set);
3519 gcc_assert (GET_CODE (set->expr) == SET);
3521 src = SET_SRC (set->expr);
3522 if (gcse_constant_p (src))
3528 regno = REGNO (src);
3534 /* Subroutine of bypass_block that checks whether a pseudo is killed by
3535 any of the instructions inserted on an edge. Jump bypassing places
3536 condition code setters on CFG edges using insert_insn_on_edge. This
3537 function is required to check that our data flow analysis is still
3538 valid prior to commit_edge_insertions. */
3541 reg_killed_on_edge (rtx reg, edge e)
3545 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
3546 if (INSN_P (insn) && reg_set_p (reg, insn))
3552 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
3553 basic block BB which has more than one predecessor. If not NULL, SETCC
3554 is the first instruction of BB, which is immediately followed by JUMP_INSN
3555 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
3556 Returns nonzero if a change was made.
3558 During the jump bypassing pass, we may place copies of SETCC instructions
3559 on CFG edges. The following routine must be careful to pay attention to
3560 these inserted insns when performing its transformations. */
3563 bypass_block (basic_block bb, rtx setcc, rtx jump)
3568 int may_be_loop_header;
3572 insn = (setcc != NULL) ? setcc : jump;
3574 /* Determine set of register uses in INSN. */
3576 note_uses (&PATTERN (insn), find_used_regs, NULL);
3577 note = find_reg_equal_equiv_note (insn);
3579 find_used_regs (&XEXP (note, 0), NULL);
3581 may_be_loop_header = false;
3582 FOR_EACH_EDGE (e, ei, bb->preds)
3583 if (e->flags & EDGE_DFS_BACK)
3585 may_be_loop_header = true;
3590 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3594 if (e->flags & EDGE_COMPLEX)
3600 /* We can't redirect edges from new basic blocks. */
3601 if (e->src->index >= bypass_last_basic_block)
3607 /* The irreducible loops created by redirecting of edges entering the
3608 loop from outside would decrease effectiveness of some of the following
3609 optimizations, so prevent this. */
3610 if (may_be_loop_header
3611 && !(e->flags & EDGE_DFS_BACK))
3617 for (i = 0; i < reg_use_count; i++)
3619 struct reg_use *reg_used = ®_use_table[i];
3620 unsigned int regno = REGNO (reg_used->reg_rtx);
3621 basic_block dest, old_dest;
3625 if (regno >= max_gcse_regno)
3628 set = find_bypass_set (regno, e->src->index);
3633 /* Check the data flow is valid after edge insertions. */
3634 if (e->insns.r && reg_killed_on_edge (reg_used->reg_rtx, e))
3637 src = SET_SRC (pc_set (jump));
3640 src = simplify_replace_rtx (src,
3641 SET_DEST (PATTERN (setcc)),
3642 SET_SRC (PATTERN (setcc)));
3644 new = simplify_replace_rtx (src, reg_used->reg_rtx,
3645 SET_SRC (set->expr));
3647 /* Jump bypassing may have already placed instructions on
3648 edges of the CFG. We can't bypass an outgoing edge that
3649 has instructions associated with it, as these insns won't
3650 get executed if the incoming edge is redirected. */
3654 edest = FALLTHRU_EDGE (bb);
3655 dest = edest->insns.r ? NULL : edest->dest;
3657 else if (GET_CODE (new) == LABEL_REF)
3661 dest = BLOCK_FOR_INSN (XEXP (new, 0));
3662 /* Don't bypass edges containing instructions. */
3663 FOR_EACH_EDGE (edest, ei2, bb->succs)
3664 if (edest->dest == dest && edest->insns.r)
3673 /* Avoid unification of the edge with other edges from original
3674 branch. We would end up emitting the instruction on "both"
3677 if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc))))
3682 FOR_EACH_EDGE (e2, ei2, e->src->succs)
3683 if (e2->dest == dest)
3693 && dest != EXIT_BLOCK_PTR)
3695 redirect_edge_and_branch_force (e, dest);
3697 /* Copy the register setter to the redirected edge.
3698 Don't copy CC0 setters, as CC0 is dead after jump. */
3701 rtx pat = PATTERN (setcc);
3702 if (!CC0_P (SET_DEST (pat)))
3703 insert_insn_on_edge (copy_insn (pat), e);
3706 if (gcse_file != NULL)
3708 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d "
3709 "in jump_insn %d equals constant ",
3710 regno, INSN_UID (jump));
3711 print_rtl (gcse_file, SET_SRC (set->expr));
3712 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
3713 e->src->index, old_dest->index, dest->index);
3726 /* Find basic blocks with more than one predecessor that only contain a
3727 single conditional jump. If the result of the comparison is known at
3728 compile-time from any incoming edge, redirect that edge to the
3729 appropriate target. Returns nonzero if a change was made.
3731 This function is now mis-named, because we also handle indirect jumps. */
3734 bypass_conditional_jumps (void)
3742 /* Note we start at block 1. */
3743 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3746 bypass_last_basic_block = last_basic_block;
3747 mark_dfs_back_edges ();
3750 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
3751 EXIT_BLOCK_PTR, next_bb)
3753 /* Check for more than one predecessor. */
3754 if (EDGE_COUNT (bb->preds) > 1)
3757 for (insn = BB_HEAD (bb);
3758 insn != NULL && insn != NEXT_INSN (BB_END (bb));
3759 insn = NEXT_INSN (insn))
3760 if (NONJUMP_INSN_P (insn))
3764 if (GET_CODE (PATTERN (insn)) != SET)
3767 dest = SET_DEST (PATTERN (insn));
3768 if (REG_P (dest) || CC0_P (dest))
3773 else if (JUMP_P (insn))
3775 if ((any_condjump_p (insn) || computed_jump_p (insn))
3776 && onlyjump_p (insn))
3777 changed |= bypass_block (bb, setcc, insn);
3780 else if (INSN_P (insn))
3785 /* If we bypassed any register setting insns, we inserted a
3786 copy on the redirected edge. These need to be committed. */
3788 commit_edge_insertions();
3793 /* Compute PRE+LCM working variables. */
3795 /* Local properties of expressions. */
3796 /* Nonzero for expressions that are transparent in the block. */
3797 static sbitmap *transp;
3799 /* Nonzero for expressions that are transparent at the end of the block.
3800 This is only zero for expressions killed by abnormal critical edge
3801 created by a calls. */
3802 static sbitmap *transpout;
3804 /* Nonzero for expressions that are computed (available) in the block. */
3805 static sbitmap *comp;
3807 /* Nonzero for expressions that are locally anticipatable in the block. */
3808 static sbitmap *antloc;
3810 /* Nonzero for expressions where this block is an optimal computation
3812 static sbitmap *pre_optimal;
3814 /* Nonzero for expressions which are redundant in a particular block. */
3815 static sbitmap *pre_redundant;
3817 /* Nonzero for expressions which should be inserted on a specific edge. */
3818 static sbitmap *pre_insert_map;
3820 /* Nonzero for expressions which should be deleted in a specific block. */
3821 static sbitmap *pre_delete_map;
3823 /* Contains the edge_list returned by pre_edge_lcm. */
3824 static struct edge_list *edge_list;
3826 /* Redundant insns. */
3827 static sbitmap pre_redundant_insns;
3829 /* Allocate vars used for PRE analysis. */
3832 alloc_pre_mem (int n_blocks, int n_exprs)
3834 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
3835 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
3836 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
3839 pre_redundant = NULL;
3840 pre_insert_map = NULL;
3841 pre_delete_map = NULL;
3842 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
3844 /* pre_insert and pre_delete are allocated later. */
3847 /* Free vars used for PRE analysis. */
3852 sbitmap_vector_free (transp);
3853 sbitmap_vector_free (comp);
3855 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
3858 sbitmap_vector_free (pre_optimal);
3860 sbitmap_vector_free (pre_redundant);
3862 sbitmap_vector_free (pre_insert_map);
3864 sbitmap_vector_free (pre_delete_map);
3866 transp = comp = NULL;
3867 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
3870 /* Top level routine to do the dataflow analysis needed by PRE. */
3873 compute_pre_data (void)
3875 sbitmap trapping_expr;
3879 compute_local_properties (transp, comp, antloc, &expr_hash_table);
3880 sbitmap_vector_zero (ae_kill, last_basic_block);
3882 /* Collect expressions which might trap. */
3883 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
3884 sbitmap_zero (trapping_expr);
3885 for (ui = 0; ui < expr_hash_table.size; ui++)
3888 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
3889 if (may_trap_p (e->expr))
3890 SET_BIT (trapping_expr, e->bitmap_index);
3893 /* Compute ae_kill for each basic block using:
3903 /* If the current block is the destination of an abnormal edge, we
3904 kill all trapping expressions because we won't be able to properly
3905 place the instruction on the edge. So make them neither
3906 anticipatable nor transparent. This is fairly conservative. */
3907 FOR_EACH_EDGE (e, ei, bb->preds)
3908 if (e->flags & EDGE_ABNORMAL)
3910 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
3911 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
3915 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
3916 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
3919 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
3920 ae_kill, &pre_insert_map, &pre_delete_map);
3921 sbitmap_vector_free (antloc);
3923 sbitmap_vector_free (ae_kill);
3925 sbitmap_free (trapping_expr);
3930 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
3933 VISITED is a pointer to a working buffer for tracking which BB's have
3934 been visited. It is NULL for the top-level call.
3936 We treat reaching expressions that go through blocks containing the same
3937 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3938 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3939 2 as not reaching. The intent is to improve the probability of finding
3940 only one reaching expression and to reduce register lifetimes by picking
3941 the closest such expression. */
3944 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
3949 FOR_EACH_EDGE (pred, ei, bb->preds)
3951 basic_block pred_bb = pred->src;
3953 if (pred->src == ENTRY_BLOCK_PTR
3954 /* Has predecessor has already been visited? */
3955 || visited[pred_bb->index])
3956 ;/* Nothing to do. */
3958 /* Does this predecessor generate this expression? */
3959 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
3961 /* Is this the occurrence we're looking for?
3962 Note that there's only one generating occurrence per block
3963 so we just need to check the block number. */
3964 if (occr_bb == pred_bb)
3967 visited[pred_bb->index] = 1;
3969 /* Ignore this predecessor if it kills the expression. */
3970 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
3971 visited[pred_bb->index] = 1;
3973 /* Neither gen nor kill. */
3976 visited[pred_bb->index] = 1;
3977 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
3982 /* All paths have been checked. */
3986 /* The wrapper for pre_expr_reaches_here_work that ensures that any
3987 memory allocated for that function is returned. */
3990 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
3993 char *visited = xcalloc (last_basic_block, 1);
3995 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
4002 /* Given an expr, generate RTL which we can insert at the end of a BB,
4003 or on an edge. Set the block number of any insns generated to
4007 process_insert_insn (struct expr *expr)
4009 rtx reg = expr->reaching_reg;
4010 rtx exp = copy_rtx (expr->expr);
4015 /* If the expression is something that's an operand, like a constant,
4016 just copy it to a register. */
4017 if (general_operand (exp, GET_MODE (reg)))
4018 emit_move_insn (reg, exp);
4020 /* Otherwise, make a new insn to compute this expression and make sure the
4021 insn will be recognized (this also adds any needed CLOBBERs). Copy the
4022 expression to make sure we don't have any sharing issues. */
4025 rtx insn = emit_insn (gen_rtx_SET (VOIDmode, reg, exp));
4027 if (insn_invalid_p (insn))
4038 /* Add EXPR to the end of basic block BB.
4040 This is used by both the PRE and code hoisting.
4042 For PRE, we want to verify that the expr is either transparent
4043 or locally anticipatable in the target block. This check makes
4044 no sense for code hoisting. */
4047 insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
4049 rtx insn = BB_END (bb);
4051 rtx reg = expr->reaching_reg;
4052 int regno = REGNO (reg);
4055 pat = process_insert_insn (expr);
4056 gcc_assert (pat && INSN_P (pat));
4059 while (NEXT_INSN (pat_end) != NULL_RTX)
4060 pat_end = NEXT_INSN (pat_end);
4062 /* If the last insn is a jump, insert EXPR in front [taking care to
4063 handle cc0, etc. properly]. Similarly we need to care trapping
4064 instructions in presence of non-call exceptions. */
4067 || (NONJUMP_INSN_P (insn)
4068 && (EDGE_COUNT (bb->succs) > 1
4069 || EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL)))
4074 /* It should always be the case that we can put these instructions
4075 anywhere in the basic block with performing PRE optimizations.
4077 gcc_assert (!NONJUMP_INSN_P (insn) || !pre
4078 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4079 || TEST_BIT (transp[bb->index], expr->bitmap_index));
4081 /* If this is a jump table, then we can't insert stuff here. Since
4082 we know the previous real insn must be the tablejump, we insert
4083 the new instruction just before the tablejump. */
4084 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4085 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4086 insn = prev_real_insn (insn);
4089 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4090 if cc0 isn't set. */
4091 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4093 insn = XEXP (note, 0);
4096 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4097 if (maybe_cc0_setter
4098 && INSN_P (maybe_cc0_setter)
4099 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4100 insn = maybe_cc0_setter;
4103 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4104 new_insn = emit_insn_before_noloc (pat, insn);
4107 /* Likewise if the last insn is a call, as will happen in the presence
4108 of exception handling. */
4109 else if (CALL_P (insn)
4110 && (EDGE_COUNT (bb->succs) > 1 || EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL))
4112 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4113 we search backward and place the instructions before the first
4114 parameter is loaded. Do this for everyone for consistency and a
4115 presumption that we'll get better code elsewhere as well.
4117 It should always be the case that we can put these instructions
4118 anywhere in the basic block with performing PRE optimizations.
4122 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4123 || TEST_BIT (transp[bb->index], expr->bitmap_index));
4125 /* Since different machines initialize their parameter registers
4126 in different orders, assume nothing. Collect the set of all
4127 parameter registers. */
4128 insn = find_first_parameter_load (insn, BB_HEAD (bb));
4130 /* If we found all the parameter loads, then we want to insert
4131 before the first parameter load.
4133 If we did not find all the parameter loads, then we might have
4134 stopped on the head of the block, which could be a CODE_LABEL.
4135 If we inserted before the CODE_LABEL, then we would be putting
4136 the insn in the wrong basic block. In that case, put the insn
4137 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
4138 while (LABEL_P (insn)
4139 || NOTE_INSN_BASIC_BLOCK_P (insn))
4140 insn = NEXT_INSN (insn);
4142 new_insn = emit_insn_before_noloc (pat, insn);
4145 new_insn = emit_insn_after_noloc (pat, insn);
4151 add_label_notes (PATTERN (pat), new_insn);
4152 note_stores (PATTERN (pat), record_set_info, pat);
4156 pat = NEXT_INSN (pat);
4159 gcse_create_count++;
4163 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
4164 bb->index, INSN_UID (new_insn));
4165 fprintf (gcse_file, "copying expression %d to reg %d\n",
4166 expr->bitmap_index, regno);
4170 /* Insert partially redundant expressions on edges in the CFG to make
4171 the expressions fully redundant. */
4174 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
4176 int e, i, j, num_edges, set_size, did_insert = 0;
4179 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4180 if it reaches any of the deleted expressions. */
4182 set_size = pre_insert_map[0]->size;
4183 num_edges = NUM_EDGES (edge_list);
4184 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
4185 sbitmap_vector_zero (inserted, num_edges);
4187 for (e = 0; e < num_edges; e++)
4190 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
4192 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
4194 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
4196 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
4197 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4199 struct expr *expr = index_map[j];
4202 /* Now look at each deleted occurrence of this expression. */
4203 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4205 if (! occr->deleted_p)
4208 /* Insert this expression on this edge if if it would
4209 reach the deleted occurrence in BB. */
4210 if (!TEST_BIT (inserted[e], j))
4213 edge eg = INDEX_EDGE (edge_list, e);
4215 /* We can't insert anything on an abnormal and
4216 critical edge, so we insert the insn at the end of
4217 the previous block. There are several alternatives
4218 detailed in Morgans book P277 (sec 10.5) for
4219 handling this situation. This one is easiest for
4222 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
4223 insert_insn_end_bb (index_map[j], bb, 0);
4226 insn = process_insert_insn (index_map[j]);
4227 insert_insn_on_edge (insn, eg);
4232 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
4234 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
4235 fprintf (gcse_file, "copy expression %d\n",
4236 expr->bitmap_index);
4239 update_ld_motion_stores (expr);
4240 SET_BIT (inserted[e], j);
4242 gcse_create_count++;
4249 sbitmap_vector_free (inserted);
4253 /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
4254 Given "old_reg <- expr" (INSN), instead of adding after it
4255 reaching_reg <- old_reg
4256 it's better to do the following:
4257 reaching_reg <- expr
4258 old_reg <- reaching_reg
4259 because this way copy propagation can discover additional PRE
4260 opportunities. But if this fails, we try the old way.
4261 When "expr" is a store, i.e.
4262 given "MEM <- old_reg", instead of adding after it
4263 reaching_reg <- old_reg
4264 it's better to add it before as follows:
4265 reaching_reg <- old_reg
4266 MEM <- reaching_reg. */
4269 pre_insert_copy_insn (struct expr *expr, rtx insn)
4271 rtx reg = expr->reaching_reg;
4272 int regno = REGNO (reg);
4273 int indx = expr->bitmap_index;
4274 rtx pat = PATTERN (insn);
4279 /* This block matches the logic in hash_scan_insn. */
4280 switch (GET_CODE (pat))
4287 /* Search through the parallel looking for the set whose
4288 source was the expression that we're interested in. */
4290 for (i = 0; i < XVECLEN (pat, 0); i++)
4292 rtx x = XVECEXP (pat, 0, i);
4293 if (GET_CODE (x) == SET
4294 && expr_equiv_p (SET_SRC (x), expr->expr))
4306 if (REG_P (SET_DEST (set)))
4308 old_reg = SET_DEST (set);
4309 /* Check if we can modify the set destination in the original insn. */
4310 if (validate_change (insn, &SET_DEST (set), reg, 0))
4312 new_insn = gen_move_insn (old_reg, reg);
4313 new_insn = emit_insn_after (new_insn, insn);
4315 /* Keep register set table up to date. */
4316 replace_one_set (REGNO (old_reg), insn, new_insn);
4317 record_one_set (regno, insn);
4321 new_insn = gen_move_insn (reg, old_reg);
4322 new_insn = emit_insn_after (new_insn, insn);
4324 /* Keep register set table up to date. */
4325 record_one_set (regno, new_insn);
4328 else /* This is possible only in case of a store to memory. */
4330 old_reg = SET_SRC (set);
4331 new_insn = gen_move_insn (reg, old_reg);
4333 /* Check if we can modify the set source in the original insn. */
4334 if (validate_change (insn, &SET_SRC (set), reg, 0))
4335 new_insn = emit_insn_before (new_insn, insn);
4337 new_insn = emit_insn_after (new_insn, insn);
4339 /* Keep register set table up to date. */
4340 record_one_set (regno, new_insn);
4343 gcse_create_count++;
4347 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4348 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4349 INSN_UID (insn), regno);
4352 /* Copy available expressions that reach the redundant expression
4353 to `reaching_reg'. */
4356 pre_insert_copies (void)
4358 unsigned int i, added_copy;
4363 /* For each available expression in the table, copy the result to
4364 `reaching_reg' if the expression reaches a deleted one.
4366 ??? The current algorithm is rather brute force.
4367 Need to do some profiling. */
4369 for (i = 0; i < expr_hash_table.size; i++)
4370 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4372 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4373 we don't want to insert a copy here because the expression may not
4374 really be redundant. So only insert an insn if the expression was
4375 deleted. This test also avoids further processing if the
4376 expression wasn't deleted anywhere. */
4377 if (expr->reaching_reg == NULL)
4380 /* Set when we add a copy for that expression. */
4383 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4385 if (! occr->deleted_p)
4388 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4390 rtx insn = avail->insn;
4392 /* No need to handle this one if handled already. */
4393 if (avail->copied_p)
4396 /* Don't handle this one if it's a redundant one. */
4397 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4400 /* Or if the expression doesn't reach the deleted one. */
4401 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
4403 BLOCK_FOR_INSN (occr->insn)))
4408 /* Copy the result of avail to reaching_reg. */
4409 pre_insert_copy_insn (expr, insn);
4410 avail->copied_p = 1;
4415 update_ld_motion_stores (expr);
4419 /* Emit move from SRC to DEST noting the equivalence with expression computed
4422 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
4425 rtx set = single_set (insn), set2;
4429 /* This should never fail since we're creating a reg->reg copy
4430 we've verified to be valid. */
4432 new = emit_insn_after (gen_move_insn (dest, src), insn);
4434 /* Note the equivalence for local CSE pass. */
4435 set2 = single_set (new);
4436 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
4438 if ((note = find_reg_equal_equiv_note (insn)))
4439 eqv = XEXP (note, 0);
4441 eqv = SET_SRC (set);
4443 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
4448 /* Delete redundant computations.
4449 Deletion is done by changing the insn to copy the `reaching_reg' of
4450 the expression into the result of the SET. It is left to later passes
4451 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4453 Returns nonzero if a change is made. */
4464 for (i = 0; i < expr_hash_table.size; i++)
4465 for (expr = expr_hash_table.table[i];
4467 expr = expr->next_same_hash)
4469 int indx = expr->bitmap_index;
4471 /* We only need to search antic_occr since we require
4474 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4476 rtx insn = occr->insn;
4478 basic_block bb = BLOCK_FOR_INSN (insn);
4480 /* We only delete insns that have a single_set. */
4481 if (TEST_BIT (pre_delete_map[bb->index], indx)
4482 && (set = single_set (insn)) != 0)
4484 /* Create a pseudo-reg to store the result of reaching
4485 expressions into. Get the mode for the new pseudo from
4486 the mode of the original destination pseudo. */
4487 if (expr->reaching_reg == NULL)
4489 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4491 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4493 occr->deleted_p = 1;
4494 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4501 "PRE: redundant insn %d (expression %d) in ",
4502 INSN_UID (insn), indx);
4503 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
4504 bb->index, REGNO (expr->reaching_reg));
4513 /* Perform GCSE optimizations using PRE.
4514 This is called by one_pre_gcse_pass after all the dataflow analysis
4517 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4518 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4519 Compiler Design and Implementation.
4521 ??? A new pseudo reg is created to hold the reaching expression. The nice
4522 thing about the classical approach is that it would try to use an existing
4523 reg. If the register can't be adequately optimized [i.e. we introduce
4524 reload problems], one could add a pass here to propagate the new register
4527 ??? We don't handle single sets in PARALLELs because we're [currently] not
4528 able to copy the rest of the parallel when we insert copies to create full
4529 redundancies from partial redundancies. However, there's no reason why we
4530 can't handle PARALLELs in the cases where there are no partial
4537 int did_insert, changed;
4538 struct expr **index_map;
4541 /* Compute a mapping from expression number (`bitmap_index') to
4542 hash table entry. */
4544 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
4545 for (i = 0; i < expr_hash_table.size; i++)
4546 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4547 index_map[expr->bitmap_index] = expr;
4549 /* Reset bitmap used to track which insns are redundant. */
4550 pre_redundant_insns = sbitmap_alloc (max_cuid);
4551 sbitmap_zero (pre_redundant_insns);
4553 /* Delete the redundant insns first so that
4554 - we know what register to use for the new insns and for the other
4555 ones with reaching expressions
4556 - we know which insns are redundant when we go to create copies */
4558 changed = pre_delete ();
4560 did_insert = pre_edge_insert (edge_list, index_map);
4562 /* In other places with reaching expressions, copy the expression to the
4563 specially allocated pseudo-reg that reaches the redundant expr. */
4564 pre_insert_copies ();
4567 commit_edge_insertions ();
4572 sbitmap_free (pre_redundant_insns);
4576 /* Top level routine to perform one PRE GCSE pass.
4578 Return nonzero if a change was made. */
4581 one_pre_gcse_pass (int pass)
4585 gcse_subst_count = 0;
4586 gcse_create_count = 0;
4588 alloc_hash_table (max_cuid, &expr_hash_table, 0);
4589 add_noreturn_fake_exit_edges ();
4591 compute_ld_motion_mems ();
4593 compute_hash_table (&expr_hash_table);
4594 trim_ld_motion_mems ();
4596 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
4598 if (expr_hash_table.n_elems > 0)
4600 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
4601 compute_pre_data ();
4602 changed |= pre_gcse ();
4603 free_edge_list (edge_list);
4608 remove_fake_exit_edges ();
4609 free_hash_table (&expr_hash_table);
4613 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
4614 current_function_name (), pass, bytes_used);
4615 fprintf (gcse_file, "%d substs, %d insns created\n",
4616 gcse_subst_count, gcse_create_count);
4622 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
4623 If notes are added to an insn which references a CODE_LABEL, the
4624 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
4625 because the following loop optimization pass requires them. */
4627 /* ??? This is very similar to the loop.c add_label_notes function. We
4628 could probably share code here. */
4630 /* ??? If there was a jump optimization pass after gcse and before loop,
4631 then we would not need to do this here, because jump would add the
4632 necessary REG_LABEL notes. */
4635 add_label_notes (rtx x, rtx insn)
4637 enum rtx_code code = GET_CODE (x);
4641 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4643 /* This code used to ignore labels that referred to dispatch tables to
4644 avoid flow generating (slightly) worse code.
4646 We no longer ignore such label references (see LABEL_REF handling in
4647 mark_jump_label for additional information). */
4649 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
4651 if (LABEL_P (XEXP (x, 0)))
4652 LABEL_NUSES (XEXP (x, 0))++;
4656 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
4659 add_label_notes (XEXP (x, i), insn);
4660 else if (fmt[i] == 'E')
4661 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4662 add_label_notes (XVECEXP (x, i, j), insn);
4666 /* Compute transparent outgoing information for each block.
4668 An expression is transparent to an edge unless it is killed by
4669 the edge itself. This can only happen with abnormal control flow,
4670 when the edge is traversed through a call. This happens with
4671 non-local labels and exceptions.
4673 This would not be necessary if we split the edge. While this is
4674 normally impossible for abnormal critical edges, with some effort
4675 it should be possible with exception handling, since we still have
4676 control over which handler should be invoked. But due to increased
4677 EH table sizes, this may not be worthwhile. */
4680 compute_transpout (void)
4686 sbitmap_vector_ones (transpout, last_basic_block);
4690 /* Note that flow inserted a nop a the end of basic blocks that
4691 end in call instructions for reasons other than abnormal
4693 if (! CALL_P (BB_END (bb)))
4696 for (i = 0; i < expr_hash_table.size; i++)
4697 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
4698 if (MEM_P (expr->expr))
4700 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
4701 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
4704 /* ??? Optimally, we would use interprocedural alias
4705 analysis to determine if this mem is actually killed
4707 RESET_BIT (transpout[bb->index], expr->bitmap_index);
4712 /* Code Hoisting variables and subroutines. */
4714 /* Very busy expressions. */
4715 static sbitmap *hoist_vbein;
4716 static sbitmap *hoist_vbeout;
4718 /* Hoistable expressions. */
4719 static sbitmap *hoist_exprs;
4721 /* ??? We could compute post dominators and run this algorithm in
4722 reverse to perform tail merging, doing so would probably be
4723 more effective than the tail merging code in jump.c.
4725 It's unclear if tail merging could be run in parallel with
4726 code hoisting. It would be nice. */
4728 /* Allocate vars used for code hoisting analysis. */
4731 alloc_code_hoist_mem (int n_blocks, int n_exprs)
4733 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4734 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4735 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4737 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
4738 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
4739 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
4740 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
4743 /* Free vars used for code hoisting analysis. */
4746 free_code_hoist_mem (void)
4748 sbitmap_vector_free (antloc);
4749 sbitmap_vector_free (transp);
4750 sbitmap_vector_free (comp);
4752 sbitmap_vector_free (hoist_vbein);
4753 sbitmap_vector_free (hoist_vbeout);
4754 sbitmap_vector_free (hoist_exprs);
4755 sbitmap_vector_free (transpout);
4757 free_dominance_info (CDI_DOMINATORS);
4760 /* Compute the very busy expressions at entry/exit from each block.
4762 An expression is very busy if all paths from a given point
4763 compute the expression. */
4766 compute_code_hoist_vbeinout (void)
4768 int changed, passes;
4771 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
4772 sbitmap_vector_zero (hoist_vbein, last_basic_block);
4781 /* We scan the blocks in the reverse order to speed up
4783 FOR_EACH_BB_REVERSE (bb)
4785 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
4786 hoist_vbeout[bb->index], transp[bb->index]);
4787 if (bb->next_bb != EXIT_BLOCK_PTR)
4788 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
4795 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
4798 /* Top level routine to do the dataflow analysis needed by code hoisting. */
4801 compute_code_hoist_data (void)
4803 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4804 compute_transpout ();
4805 compute_code_hoist_vbeinout ();
4806 calculate_dominance_info (CDI_DOMINATORS);
4808 fprintf (gcse_file, "\n");
4811 /* Determine if the expression identified by EXPR_INDEX would
4812 reach BB unimpared if it was placed at the end of EXPR_BB.
4814 It's unclear exactly what Muchnick meant by "unimpared". It seems
4815 to me that the expression must either be computed or transparent in
4816 *every* block in the path(s) from EXPR_BB to BB. Any other definition
4817 would allow the expression to be hoisted out of loops, even if
4818 the expression wasn't a loop invariant.
4820 Contrast this to reachability for PRE where an expression is
4821 considered reachable if *any* path reaches instead of *all*
4825 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
4829 int visited_allocated_locally = 0;
4832 if (visited == NULL)
4834 visited_allocated_locally = 1;
4835 visited = xcalloc (last_basic_block, 1);
4838 FOR_EACH_EDGE (pred, ei, bb->preds)
4840 basic_block pred_bb = pred->src;
4842 if (pred->src == ENTRY_BLOCK_PTR)
4844 else if (pred_bb == expr_bb)
4846 else if (visited[pred_bb->index])
4849 /* Does this predecessor generate this expression? */
4850 else if (TEST_BIT (comp[pred_bb->index], expr_index))
4852 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
4858 visited[pred_bb->index] = 1;
4859 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
4864 if (visited_allocated_locally)
4867 return (pred == NULL);
4870 /* Actually perform code hoisting. */
4875 basic_block bb, dominated;
4877 unsigned int domby_len;
4879 struct expr **index_map;
4882 sbitmap_vector_zero (hoist_exprs, last_basic_block);
4884 /* Compute a mapping from expression number (`bitmap_index') to
4885 hash table entry. */
4887 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
4888 for (i = 0; i < expr_hash_table.size; i++)
4889 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4890 index_map[expr->bitmap_index] = expr;
4892 /* Walk over each basic block looking for potentially hoistable
4893 expressions, nothing gets hoisted from the entry block. */
4897 int insn_inserted_p;
4899 domby_len = get_dominated_by (CDI_DOMINATORS, bb, &domby);
4900 /* Examine each expression that is very busy at the exit of this
4901 block. These are the potentially hoistable expressions. */
4902 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
4906 if (TEST_BIT (hoist_vbeout[bb->index], i)
4907 && TEST_BIT (transpout[bb->index], i))
4909 /* We've found a potentially hoistable expression, now
4910 we look at every block BB dominates to see if it
4911 computes the expression. */
4912 for (j = 0; j < domby_len; j++)
4914 dominated = domby[j];
4915 /* Ignore self dominance. */
4916 if (bb == dominated)
4918 /* We've found a dominated block, now see if it computes
4919 the busy expression and whether or not moving that
4920 expression to the "beginning" of that block is safe. */
4921 if (!TEST_BIT (antloc[dominated->index], i))
4924 /* Note if the expression would reach the dominated block
4925 unimpared if it was placed at the end of BB.
4927 Keep track of how many times this expression is hoistable
4928 from a dominated block into BB. */
4929 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4933 /* If we found more than one hoistable occurrence of this
4934 expression, then note it in the bitmap of expressions to
4935 hoist. It makes no sense to hoist things which are computed
4936 in only one BB, and doing so tends to pessimize register
4937 allocation. One could increase this value to try harder
4938 to avoid any possible code expansion due to register
4939 allocation issues; however experiments have shown that
4940 the vast majority of hoistable expressions are only movable
4941 from two successors, so raising this threshold is likely
4942 to nullify any benefit we get from code hoisting. */
4945 SET_BIT (hoist_exprs[bb->index], i);
4950 /* If we found nothing to hoist, then quit now. */
4957 /* Loop over all the hoistable expressions. */
4958 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
4960 /* We want to insert the expression into BB only once, so
4961 note when we've inserted it. */
4962 insn_inserted_p = 0;
4964 /* These tests should be the same as the tests above. */
4965 if (TEST_BIT (hoist_vbeout[bb->index], i))
4967 /* We've found a potentially hoistable expression, now
4968 we look at every block BB dominates to see if it
4969 computes the expression. */
4970 for (j = 0; j < domby_len; j++)
4972 dominated = domby[j];
4973 /* Ignore self dominance. */
4974 if (bb == dominated)
4977 /* We've found a dominated block, now see if it computes
4978 the busy expression and whether or not moving that
4979 expression to the "beginning" of that block is safe. */
4980 if (!TEST_BIT (antloc[dominated->index], i))
4983 /* The expression is computed in the dominated block and
4984 it would be safe to compute it at the start of the
4985 dominated block. Now we have to determine if the
4986 expression would reach the dominated block if it was
4987 placed at the end of BB. */
4988 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4990 struct expr *expr = index_map[i];
4991 struct occr *occr = expr->antic_occr;
4995 /* Find the right occurrence of this expression. */
4996 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
5001 set = single_set (insn);
5004 /* Create a pseudo-reg to store the result of reaching
5005 expressions into. Get the mode for the new pseudo
5006 from the mode of the original destination pseudo. */
5007 if (expr->reaching_reg == NULL)
5009 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5011 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5013 occr->deleted_p = 1;
5014 if (!insn_inserted_p)
5016 insert_insn_end_bb (index_map[i], bb, 0);
5017 insn_inserted_p = 1;
5029 /* Top level routine to perform one code hoisting (aka unification) pass
5031 Return nonzero if a change was made. */
5034 one_code_hoisting_pass (void)
5038 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5039 compute_hash_table (&expr_hash_table);
5041 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
5043 if (expr_hash_table.n_elems > 0)
5045 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
5046 compute_code_hoist_data ();
5048 free_code_hoist_mem ();
5051 free_hash_table (&expr_hash_table);
5056 /* Here we provide the things required to do store motion towards
5057 the exit. In order for this to be effective, gcse also needed to
5058 be taught how to move a load when it is kill only by a store to itself.
5063 void foo(float scale)
5065 for (i=0; i<10; i++)
5069 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
5070 the load out since its live around the loop, and stored at the bottom
5073 The 'Load Motion' referred to and implemented in this file is
5074 an enhancement to gcse which when using edge based lcm, recognizes
5075 this situation and allows gcse to move the load out of the loop.
5077 Once gcse has hoisted the load, store motion can then push this
5078 load towards the exit, and we end up with no loads or stores of 'i'
5081 /* This will search the ldst list for a matching expression. If it
5082 doesn't find one, we create one and initialize it. */
5084 static struct ls_expr *
5087 int do_not_record_p = 0;
5088 struct ls_expr * ptr;
5091 hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
5092 NULL, /*have_reg_qty=*/false);
5094 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5095 if (ptr->hash_index == hash && expr_equiv_p (ptr->pattern, x))
5098 ptr = xmalloc (sizeof (struct ls_expr));
5100 ptr->next = pre_ldst_mems;
5103 ptr->pattern_regs = NULL_RTX;
5104 ptr->loads = NULL_RTX;
5105 ptr->stores = NULL_RTX;
5106 ptr->reaching_reg = NULL_RTX;
5109 ptr->hash_index = hash;
5110 pre_ldst_mems = ptr;
5115 /* Free up an individual ldst entry. */
5118 free_ldst_entry (struct ls_expr * ptr)
5120 free_INSN_LIST_list (& ptr->loads);
5121 free_INSN_LIST_list (& ptr->stores);
5126 /* Free up all memory associated with the ldst list. */
5129 free_ldst_mems (void)
5131 while (pre_ldst_mems)
5133 struct ls_expr * tmp = pre_ldst_mems;
5135 pre_ldst_mems = pre_ldst_mems->next;
5137 free_ldst_entry (tmp);
5140 pre_ldst_mems = NULL;
5143 /* Dump debugging info about the ldst list. */
5146 print_ldst_list (FILE * file)
5148 struct ls_expr * ptr;
5150 fprintf (file, "LDST list: \n");
5152 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
5154 fprintf (file, " Pattern (%3d): ", ptr->index);
5156 print_rtl (file, ptr->pattern);
5158 fprintf (file, "\n Loads : ");
5161 print_rtl (file, ptr->loads);
5163 fprintf (file, "(nil)");
5165 fprintf (file, "\n Stores : ");
5168 print_rtl (file, ptr->stores);
5170 fprintf (file, "(nil)");
5172 fprintf (file, "\n\n");
5175 fprintf (file, "\n");
5178 /* Returns 1 if X is in the list of ldst only expressions. */
5180 static struct ls_expr *
5181 find_rtx_in_ldst (rtx x)
5183 struct ls_expr * ptr;
5185 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5186 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
5192 /* Assign each element of the list of mems a monotonically increasing value. */
5195 enumerate_ldsts (void)
5197 struct ls_expr * ptr;
5200 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5206 /* Return first item in the list. */
5208 static inline struct ls_expr *
5209 first_ls_expr (void)
5211 return pre_ldst_mems;
5214 /* Return the next item in the list after the specified one. */
5216 static inline struct ls_expr *
5217 next_ls_expr (struct ls_expr * ptr)
5222 /* Load Motion for loads which only kill themselves. */
5224 /* Return true if x is a simple MEM operation, with no registers or
5225 side effects. These are the types of loads we consider for the
5226 ld_motion list, otherwise we let the usual aliasing take care of it. */
5234 if (MEM_VOLATILE_P (x))
5237 if (GET_MODE (x) == BLKmode)
5240 /* If we are handling exceptions, we must be careful with memory references
5241 that may trap. If we are not, the behavior is undefined, so we may just
5243 if (flag_non_call_exceptions && may_trap_p (x))
5246 if (side_effects_p (x))
5249 /* Do not consider function arguments passed on stack. */
5250 if (reg_mentioned_p (stack_pointer_rtx, x))
5253 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
5259 /* Make sure there isn't a buried reference in this pattern anywhere.
5260 If there is, invalidate the entry for it since we're not capable
5261 of fixing it up just yet.. We have to be sure we know about ALL
5262 loads since the aliasing code will allow all entries in the
5263 ld_motion list to not-alias itself. If we miss a load, we will get
5264 the wrong value since gcse might common it and we won't know to
5268 invalidate_any_buried_refs (rtx x)
5272 struct ls_expr * ptr;
5274 /* Invalidate it in the list. */
5275 if (MEM_P (x) && simple_mem (x))
5277 ptr = ldst_entry (x);
5281 /* Recursively process the insn. */
5282 fmt = GET_RTX_FORMAT (GET_CODE (x));
5284 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5287 invalidate_any_buried_refs (XEXP (x, i));
5288 else if (fmt[i] == 'E')
5289 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5290 invalidate_any_buried_refs (XVECEXP (x, i, j));
5294 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
5295 being defined as MEM loads and stores to symbols, with no side effects
5296 and no registers in the expression. For a MEM destination, we also
5297 check that the insn is still valid if we replace the destination with a
5298 REG, as is done in update_ld_motion_stores. If there are any uses/defs
5299 which don't match this criteria, they are invalidated and trimmed out
5303 compute_ld_motion_mems (void)
5305 struct ls_expr * ptr;
5309 pre_ldst_mems = NULL;
5313 for (insn = BB_HEAD (bb);
5314 insn && insn != NEXT_INSN (BB_END (bb));
5315 insn = NEXT_INSN (insn))
5319 if (GET_CODE (PATTERN (insn)) == SET)
5321 rtx src = SET_SRC (PATTERN (insn));
5322 rtx dest = SET_DEST (PATTERN (insn));
5324 /* Check for a simple LOAD... */
5325 if (MEM_P (src) && simple_mem (src))
5327 ptr = ldst_entry (src);
5329 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
5335 /* Make sure there isn't a buried load somewhere. */
5336 invalidate_any_buried_refs (src);
5339 /* Check for stores. Don't worry about aliased ones, they
5340 will block any movement we might do later. We only care
5341 about this exact pattern since those are the only
5342 circumstance that we will ignore the aliasing info. */
5343 if (MEM_P (dest) && simple_mem (dest))
5345 ptr = ldst_entry (dest);
5348 && GET_CODE (src) != ASM_OPERANDS
5349 /* Check for REG manually since want_to_gcse_p
5350 returns 0 for all REGs. */
5351 && can_assign_to_reg_p (src))
5352 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
5358 invalidate_any_buried_refs (PATTERN (insn));
5364 /* Remove any references that have been either invalidated or are not in the
5365 expression list for pre gcse. */
5368 trim_ld_motion_mems (void)
5370 struct ls_expr * * last = & pre_ldst_mems;
5371 struct ls_expr * ptr = pre_ldst_mems;
5377 /* Delete if entry has been made invalid. */
5380 /* Delete if we cannot find this mem in the expression list. */
5381 unsigned int hash = ptr->hash_index % expr_hash_table.size;
5383 for (expr = expr_hash_table.table[hash];
5385 expr = expr->next_same_hash)
5386 if (expr_equiv_p (expr->expr, ptr->pattern))
5390 expr = (struct expr *) 0;
5394 /* Set the expression field if we are keeping it. */
5402 free_ldst_entry (ptr);
5407 /* Show the world what we've found. */
5408 if (gcse_file && pre_ldst_mems != NULL)
5409 print_ldst_list (gcse_file);
5412 /* This routine will take an expression which we are replacing with
5413 a reaching register, and update any stores that are needed if
5414 that expression is in the ld_motion list. Stores are updated by
5415 copying their SRC to the reaching register, and then storing
5416 the reaching register into the store location. These keeps the
5417 correct value in the reaching register for the loads. */
5420 update_ld_motion_stores (struct expr * expr)
5422 struct ls_expr * mem_ptr;
5424 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
5426 /* We can try to find just the REACHED stores, but is shouldn't
5427 matter to set the reaching reg everywhere... some might be
5428 dead and should be eliminated later. */
5430 /* We replace (set mem expr) with (set reg expr) (set mem reg)
5431 where reg is the reaching reg used in the load. We checked in
5432 compute_ld_motion_mems that we can replace (set mem expr) with
5433 (set reg expr) in that insn. */
5434 rtx list = mem_ptr->stores;
5436 for ( ; list != NULL_RTX; list = XEXP (list, 1))
5438 rtx insn = XEXP (list, 0);
5439 rtx pat = PATTERN (insn);
5440 rtx src = SET_SRC (pat);
5441 rtx reg = expr->reaching_reg;
5444 /* If we've already copied it, continue. */
5445 if (expr->reaching_reg == src)
5450 fprintf (gcse_file, "PRE: store updated with reaching reg ");
5451 print_rtl (gcse_file, expr->reaching_reg);
5452 fprintf (gcse_file, ":\n ");
5453 print_inline_rtx (gcse_file, insn, 8);
5454 fprintf (gcse_file, "\n");
5457 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
5458 new = emit_insn_before (copy, insn);
5459 record_one_set (REGNO (reg), new);
5460 SET_SRC (pat) = reg;
5462 /* un-recognize this pattern since it's probably different now. */
5463 INSN_CODE (insn) = -1;
5464 gcse_create_count++;
5469 /* Store motion code. */
5471 #define ANTIC_STORE_LIST(x) ((x)->loads)
5472 #define AVAIL_STORE_LIST(x) ((x)->stores)
5473 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
5475 /* This is used to communicate the target bitvector we want to use in the
5476 reg_set_info routine when called via the note_stores mechanism. */
5477 static int * regvec;
5479 /* And current insn, for the same routine. */
5480 static rtx compute_store_table_current_insn;
5482 /* Used in computing the reverse edge graph bit vectors. */
5483 static sbitmap * st_antloc;
5485 /* Global holding the number of store expressions we are dealing with. */
5486 static int num_stores;
5488 /* Checks to set if we need to mark a register set. Called from
5492 reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
5495 sbitmap bb_reg = data;
5497 if (GET_CODE (dest) == SUBREG)
5498 dest = SUBREG_REG (dest);
5502 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
5504 SET_BIT (bb_reg, REGNO (dest));
5508 /* Clear any mark that says that this insn sets dest. Called from
5512 reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED,
5515 int *dead_vec = data;
5517 if (GET_CODE (dest) == SUBREG)
5518 dest = SUBREG_REG (dest);
5521 dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
5522 dead_vec[REGNO (dest)] = 0;
5525 /* Return zero if some of the registers in list X are killed
5526 due to set of registers in bitmap REGS_SET. */
5529 store_ops_ok (rtx x, int *regs_set)
5533 for (; x; x = XEXP (x, 1))
5536 if (regs_set[REGNO(reg)])
5543 /* Returns a list of registers mentioned in X. */
5545 extract_mentioned_regs (rtx x)
5547 return extract_mentioned_regs_helper (x, NULL_RTX);
5550 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
5553 extract_mentioned_regs_helper (rtx x, rtx accum)
5559 /* Repeat is used to turn tail-recursion into iteration. */
5565 code = GET_CODE (x);
5569 return alloc_EXPR_LIST (0, x, accum);
5579 /* We do not run this function with arguments having side effects. */
5598 i = GET_RTX_LENGTH (code) - 1;
5599 fmt = GET_RTX_FORMAT (code);
5605 rtx tem = XEXP (x, i);
5607 /* If we are about to do the last recursive call
5608 needed at this level, change it into iteration. */
5615 accum = extract_mentioned_regs_helper (tem, accum);
5617 else if (fmt[i] == 'E')
5621 for (j = 0; j < XVECLEN (x, i); j++)
5622 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
5629 /* Determine whether INSN is MEM store pattern that we will consider moving.
5630 REGS_SET_BEFORE is bitmap of registers set before (and including) the
5631 current insn, REGS_SET_AFTER is bitmap of registers set after (and
5632 including) the insn in this basic block. We must be passing through BB from
5633 head to end, as we are using this fact to speed things up.
5635 The results are stored this way:
5637 -- the first anticipatable expression is added into ANTIC_STORE_LIST
5638 -- if the processed expression is not anticipatable, NULL_RTX is added
5639 there instead, so that we can use it as indicator that no further
5640 expression of this type may be anticipatable
5641 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
5642 consequently, all of them but this head are dead and may be deleted.
5643 -- if the expression is not available, the insn due to that it fails to be
5644 available is stored in reaching_reg.
5646 The things are complicated a bit by fact that there already may be stores
5647 to the same MEM from other blocks; also caller must take care of the
5648 necessary cleanup of the temporary markers after end of the basic block.
5652 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
5654 struct ls_expr * ptr;
5656 int check_anticipatable, check_available;
5657 basic_block bb = BLOCK_FOR_INSN (insn);
5659 set = single_set (insn);
5663 dest = SET_DEST (set);
5665 if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
5666 || GET_MODE (dest) == BLKmode)
5669 if (side_effects_p (dest))
5672 /* If we are handling exceptions, we must be careful with memory references
5673 that may trap. If we are not, the behavior is undefined, so we may just
5675 if (flag_non_call_exceptions && may_trap_p (dest))
5678 /* Even if the destination cannot trap, the source may. In this case we'd
5679 need to handle updating the REG_EH_REGION note. */
5680 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
5683 ptr = ldst_entry (dest);
5684 if (!ptr->pattern_regs)
5685 ptr->pattern_regs = extract_mentioned_regs (dest);
5687 /* Do not check for anticipatability if we either found one anticipatable
5688 store already, or tested for one and found out that it was killed. */
5689 check_anticipatable = 0;
5690 if (!ANTIC_STORE_LIST (ptr))
5691 check_anticipatable = 1;
5694 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
5696 && BLOCK_FOR_INSN (tmp) != bb)
5697 check_anticipatable = 1;
5699 if (check_anticipatable)
5701 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
5705 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
5706 ANTIC_STORE_LIST (ptr));
5709 /* It is not necessary to check whether store is available if we did
5710 it successfully before; if we failed before, do not bother to check
5711 until we reach the insn that caused us to fail. */
5712 check_available = 0;
5713 if (!AVAIL_STORE_LIST (ptr))
5714 check_available = 1;
5717 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
5718 if (BLOCK_FOR_INSN (tmp) != bb)
5719 check_available = 1;
5721 if (check_available)
5723 /* Check that we have already reached the insn at that the check
5724 failed last time. */
5725 if (LAST_AVAIL_CHECK_FAILURE (ptr))
5727 for (tmp = BB_END (bb);
5728 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
5729 tmp = PREV_INSN (tmp))
5732 check_available = 0;
5735 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
5737 &LAST_AVAIL_CHECK_FAILURE (ptr));
5739 if (!check_available)
5740 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
5743 /* Find available and anticipatable stores. */
5746 compute_store_table (void)
5752 int *last_set_in, *already_set;
5753 struct ls_expr * ptr, **prev_next_ptr_ptr;
5755 max_gcse_regno = max_reg_num ();
5757 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
5759 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
5761 last_set_in = xcalloc (max_gcse_regno, sizeof (int));
5762 already_set = xmalloc (sizeof (int) * max_gcse_regno);
5764 /* Find all the stores we care about. */
5767 /* First compute the registers set in this block. */
5768 regvec = last_set_in;
5770 for (insn = BB_HEAD (bb);
5771 insn != NEXT_INSN (BB_END (bb));
5772 insn = NEXT_INSN (insn))
5774 if (! INSN_P (insn))
5779 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5780 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5782 last_set_in[regno] = INSN_UID (insn);
5783 SET_BIT (reg_set_in_block[bb->index], regno);
5787 pat = PATTERN (insn);
5788 compute_store_table_current_insn = insn;
5789 note_stores (pat, reg_set_info, reg_set_in_block[bb->index]);
5792 /* Now find the stores. */
5793 memset (already_set, 0, sizeof (int) * max_gcse_regno);
5794 regvec = already_set;
5795 for (insn = BB_HEAD (bb);
5796 insn != NEXT_INSN (BB_END (bb));
5797 insn = NEXT_INSN (insn))
5799 if (! INSN_P (insn))
5804 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5805 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5806 already_set[regno] = 1;
5809 pat = PATTERN (insn);
5810 note_stores (pat, reg_set_info, NULL);
5812 /* Now that we've marked regs, look for stores. */
5813 find_moveable_store (insn, already_set, last_set_in);
5815 /* Unmark regs that are no longer set. */
5816 compute_store_table_current_insn = insn;
5817 note_stores (pat, reg_clear_last_set, last_set_in);
5820 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5821 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
5822 && last_set_in[regno] == INSN_UID (insn))
5823 last_set_in[regno] = 0;
5827 #ifdef ENABLE_CHECKING
5828 /* last_set_in should now be all-zero. */
5829 for (regno = 0; regno < max_gcse_regno; regno++)
5830 gcc_assert (!last_set_in[regno]);
5833 /* Clear temporary marks. */
5834 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5836 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
5837 if (ANTIC_STORE_LIST (ptr)
5838 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
5839 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
5843 /* Remove the stores that are not available anywhere, as there will
5844 be no opportunity to optimize them. */
5845 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
5847 ptr = *prev_next_ptr_ptr)
5849 if (!AVAIL_STORE_LIST (ptr))
5851 *prev_next_ptr_ptr = ptr->next;
5852 free_ldst_entry (ptr);
5855 prev_next_ptr_ptr = &ptr->next;
5858 ret = enumerate_ldsts ();
5862 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
5863 print_ldst_list (gcse_file);
5871 /* Check to see if the load X is aliased with STORE_PATTERN.
5872 AFTER is true if we are checking the case when STORE_PATTERN occurs
5876 load_kills_store (rtx x, rtx store_pattern, int after)
5879 return anti_dependence (x, store_pattern);
5881 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
5885 /* Go through the entire insn X, looking for any loads which might alias
5886 STORE_PATTERN. Return true if found.
5887 AFTER is true if we are checking the case when STORE_PATTERN occurs
5888 after the insn X. */
5891 find_loads (rtx x, rtx store_pattern, int after)
5900 if (GET_CODE (x) == SET)
5905 if (load_kills_store (x, store_pattern, after))
5909 /* Recursively process the insn. */
5910 fmt = GET_RTX_FORMAT (GET_CODE (x));
5912 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
5915 ret |= find_loads (XEXP (x, i), store_pattern, after);
5916 else if (fmt[i] == 'E')
5917 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5918 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
5923 /* Check if INSN kills the store pattern X (is aliased with it).
5924 AFTER is true if we are checking the case when store X occurs
5925 after the insn. Return true if it it does. */
5928 store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
5930 rtx reg, base, note;
5937 /* A normal or pure call might read from pattern,
5938 but a const call will not. */
5939 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
5942 /* But even a const call reads its parameters. Check whether the
5943 base of some of registers used in mem is stack pointer. */
5944 for (reg = x_regs; reg; reg = XEXP (reg, 1))
5946 base = find_base_term (XEXP (reg, 0));
5948 || (GET_CODE (base) == ADDRESS
5949 && GET_MODE (base) == Pmode
5950 && XEXP (base, 0) == stack_pointer_rtx))
5957 if (GET_CODE (PATTERN (insn)) == SET)
5959 rtx pat = PATTERN (insn);
5960 rtx dest = SET_DEST (pat);
5962 if (GET_CODE (dest) == SIGN_EXTRACT
5963 || GET_CODE (dest) == ZERO_EXTRACT)
5964 dest = XEXP (dest, 0);
5966 /* Check for memory stores to aliased objects. */
5968 && !expr_equiv_p (dest, x))
5972 if (output_dependence (dest, x))
5977 if (output_dependence (x, dest))
5981 if (find_loads (SET_SRC (pat), x, after))
5984 else if (find_loads (PATTERN (insn), x, after))
5987 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
5988 location aliased with X, then this insn kills X. */
5989 note = find_reg_equal_equiv_note (insn);
5992 note = XEXP (note, 0);
5994 /* However, if the note represents a must alias rather than a may
5995 alias relationship, then it does not kill X. */
5996 if (expr_equiv_p (note, x))
5999 /* See if there are any aliased loads in the note. */
6000 return find_loads (note, x, after);
6003 /* Returns true if the expression X is loaded or clobbered on or after INSN
6004 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
6005 or after the insn. X_REGS is list of registers mentioned in X. If the store
6006 is killed, return the last insn in that it occurs in FAIL_INSN. */
6009 store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
6010 int *regs_set_after, rtx *fail_insn)
6012 rtx last = BB_END (bb), act;
6014 if (!store_ops_ok (x_regs, regs_set_after))
6016 /* We do not know where it will happen. */
6018 *fail_insn = NULL_RTX;
6022 /* Scan from the end, so that fail_insn is determined correctly. */
6023 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
6024 if (store_killed_in_insn (x, x_regs, act, false))
6034 /* Returns true if the expression X is loaded or clobbered on or before INSN
6035 within basic block BB. X_REGS is list of registers mentioned in X.
6036 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
6038 store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
6039 int *regs_set_before)
6041 rtx first = BB_HEAD (bb);
6043 if (!store_ops_ok (x_regs, regs_set_before))
6046 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
6047 if (store_killed_in_insn (x, x_regs, insn, true))
6053 /* Fill in available, anticipatable, transparent and kill vectors in
6054 STORE_DATA, based on lists of available and anticipatable stores. */
6056 build_store_vectors (void)
6059 int *regs_set_in_block;
6061 struct ls_expr * ptr;
6064 /* Build the gen_vector. This is any store in the table which is not killed
6065 by aliasing later in its block. */
6066 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
6067 sbitmap_vector_zero (ae_gen, last_basic_block);
6069 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
6070 sbitmap_vector_zero (st_antloc, last_basic_block);
6072 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6074 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6076 insn = XEXP (st, 0);
6077 bb = BLOCK_FOR_INSN (insn);
6079 /* If we've already seen an available expression in this block,
6080 we can delete this one (It occurs earlier in the block). We'll
6081 copy the SRC expression to an unused register in case there
6082 are any side effects. */
6083 if (TEST_BIT (ae_gen[bb->index], ptr->index))
6085 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
6087 fprintf (gcse_file, "Removing redundant store:\n");
6088 replace_store_insn (r, XEXP (st, 0), bb, ptr);
6091 SET_BIT (ae_gen[bb->index], ptr->index);
6094 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6096 insn = XEXP (st, 0);
6097 bb = BLOCK_FOR_INSN (insn);
6098 SET_BIT (st_antloc[bb->index], ptr->index);
6102 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
6103 sbitmap_vector_zero (ae_kill, last_basic_block);
6105 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
6106 sbitmap_vector_zero (transp, last_basic_block);
6107 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
6111 for (regno = 0; regno < max_gcse_regno; regno++)
6112 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
6114 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6116 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
6117 bb, regs_set_in_block, NULL))
6119 /* It should not be necessary to consider the expression
6120 killed if it is both anticipatable and available. */
6121 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
6122 || !TEST_BIT (ae_gen[bb->index], ptr->index))
6123 SET_BIT (ae_kill[bb->index], ptr->index);
6126 SET_BIT (transp[bb->index], ptr->index);
6130 free (regs_set_in_block);
6134 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
6135 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
6136 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
6137 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
6141 /* Insert an instruction at the beginning of a basic block, and update
6142 the BB_HEAD if needed. */
6145 insert_insn_start_bb (rtx insn, basic_block bb)
6147 /* Insert at start of successor block. */
6148 rtx prev = PREV_INSN (BB_HEAD (bb));
6149 rtx before = BB_HEAD (bb);
6152 if (! LABEL_P (before)
6153 && (! NOTE_P (before)
6154 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
6157 if (prev == BB_END (bb))
6159 before = NEXT_INSN (before);
6162 insn = emit_insn_after_noloc (insn, prev);
6166 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
6168 print_inline_rtx (gcse_file, insn, 6);
6169 fprintf (gcse_file, "\n");
6173 /* This routine will insert a store on an edge. EXPR is the ldst entry for
6174 the memory reference, and E is the edge to insert it on. Returns nonzero
6175 if an edge insertion was performed. */
6178 insert_store (struct ls_expr * expr, edge e)
6185 /* We did all the deleted before this insert, so if we didn't delete a
6186 store, then we haven't set the reaching reg yet either. */
6187 if (expr->reaching_reg == NULL_RTX)
6190 if (e->flags & EDGE_FAKE)
6193 reg = expr->reaching_reg;
6194 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
6196 /* If we are inserting this expression on ALL predecessor edges of a BB,
6197 insert it at the start of the BB, and reset the insert bits on the other
6198 edges so we don't try to insert it on the other edges. */
6200 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
6201 if (!(tmp->flags & EDGE_FAKE))
6203 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6205 gcc_assert (index != EDGE_INDEX_NO_EDGE);
6206 if (! TEST_BIT (pre_insert_map[index], expr->index))
6210 /* If tmp is NULL, we found an insertion on every edge, blank the
6211 insertion vector for these edges, and insert at the start of the BB. */
6212 if (!tmp && bb != EXIT_BLOCK_PTR)
6214 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
6216 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6217 RESET_BIT (pre_insert_map[index], expr->index);
6219 insert_insn_start_bb (insn, bb);
6223 /* We can't insert on this edge, so we'll insert at the head of the
6224 successors block. See Morgan, sec 10.5. */
6225 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
6227 insert_insn_start_bb (insn, bb);
6231 insert_insn_on_edge (insn, e);
6235 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
6236 e->src->index, e->dest->index);
6237 print_inline_rtx (gcse_file, insn, 6);
6238 fprintf (gcse_file, "\n");
6244 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
6245 memory location in SMEXPR set in basic block BB.
6247 This could be rather expensive. */
6250 remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
6252 edge_iterator *stack, ei;
6255 sbitmap visited = sbitmap_alloc (last_basic_block);
6256 rtx last, insn, note;
6257 rtx mem = smexpr->pattern;
6259 stack = xmalloc (sizeof (edge_iterator) * n_basic_blocks);
6261 ei = ei_start (bb->succs);
6263 sbitmap_zero (visited);
6265 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6273 sbitmap_free (visited);
6276 act = ei_edge (stack[--sp]);
6280 if (bb == EXIT_BLOCK_PTR
6281 || TEST_BIT (visited, bb->index))
6285 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6288 SET_BIT (visited, bb->index);
6290 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
6292 for (last = ANTIC_STORE_LIST (smexpr);
6293 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
6294 last = XEXP (last, 1))
6296 last = XEXP (last, 0);
6299 last = NEXT_INSN (BB_END (bb));
6301 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
6304 note = find_reg_equal_equiv_note (insn);
6305 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6309 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6311 remove_note (insn, note);
6316 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6318 if (EDGE_COUNT (bb->succs) > 0)
6322 ei = ei_start (bb->succs);
6323 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6328 /* This routine will replace a store with a SET to a specified register. */
6331 replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
6333 rtx insn, mem, note, set, ptr, pair;
6335 mem = smexpr->pattern;
6336 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
6337 insn = emit_insn_after (insn, del);
6342 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
6343 print_inline_rtx (gcse_file, del, 6);
6344 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
6345 print_inline_rtx (gcse_file, insn, 6);
6346 fprintf (gcse_file, "\n");
6349 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
6350 if (XEXP (ptr, 0) == del)
6352 XEXP (ptr, 0) = insn;
6356 /* Move the notes from the deleted insn to its replacement, and patch
6357 up the LIBCALL notes. */
6358 REG_NOTES (insn) = REG_NOTES (del);
6360 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
6363 pair = XEXP (note, 0);
6364 note = find_reg_note (pair, REG_LIBCALL, NULL_RTX);
6365 XEXP (note, 0) = insn;
6367 note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
6370 pair = XEXP (note, 0);
6371 note = find_reg_note (pair, REG_RETVAL, NULL_RTX);
6372 XEXP (note, 0) = insn;
6377 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
6378 they are no longer accurate provided that they are reached by this
6379 definition, so drop them. */
6380 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
6383 set = single_set (insn);
6386 if (expr_equiv_p (SET_DEST (set), mem))
6388 note = find_reg_equal_equiv_note (insn);
6389 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6393 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6395 remove_note (insn, note);
6397 remove_reachable_equiv_notes (bb, smexpr);
6401 /* Delete a store, but copy the value that would have been stored into
6402 the reaching_reg for later storing. */
6405 delete_store (struct ls_expr * expr, basic_block bb)
6409 if (expr->reaching_reg == NULL_RTX)
6410 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
6412 reg = expr->reaching_reg;
6414 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
6417 if (BLOCK_FOR_INSN (del) == bb)
6419 /* We know there is only one since we deleted redundant
6420 ones during the available computation. */
6421 replace_store_insn (reg, del, bb, expr);
6427 /* Free memory used by store motion. */
6430 free_store_memory (void)
6435 sbitmap_vector_free (ae_gen);
6437 sbitmap_vector_free (ae_kill);
6439 sbitmap_vector_free (transp);
6441 sbitmap_vector_free (st_antloc);
6443 sbitmap_vector_free (pre_insert_map);
6445 sbitmap_vector_free (pre_delete_map);
6446 if (reg_set_in_block)
6447 sbitmap_vector_free (reg_set_in_block);
6449 ae_gen = ae_kill = transp = st_antloc = NULL;
6450 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
6453 /* Perform store motion. Much like gcse, except we move expressions the
6454 other way by looking at the flowgraph in reverse. */
6461 struct ls_expr * ptr;
6462 int update_flow = 0;
6466 fprintf (gcse_file, "before store motion\n");
6467 print_rtl (gcse_file, get_insns ());
6470 init_alias_analysis ();
6472 /* Find all the available and anticipatable stores. */
6473 num_stores = compute_store_table ();
6474 if (num_stores == 0)
6476 sbitmap_vector_free (reg_set_in_block);
6477 end_alias_analysis ();
6481 /* Now compute kill & transp vectors. */
6482 build_store_vectors ();
6483 add_noreturn_fake_exit_edges ();
6484 connect_infinite_loops_to_exit ();
6486 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
6487 st_antloc, ae_kill, &pre_insert_map,
6490 /* Now we want to insert the new stores which are going to be needed. */
6491 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6494 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
6495 delete_store (ptr, bb);
6497 for (x = 0; x < NUM_EDGES (edge_list); x++)
6498 if (TEST_BIT (pre_insert_map[x], ptr->index))
6499 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
6503 commit_edge_insertions ();
6505 free_store_memory ();
6506 free_edge_list (edge_list);
6507 remove_fake_exit_edges ();
6508 end_alias_analysis ();
6512 /* Entry point for jump bypassing optimization pass. */
6515 bypass_jumps (FILE *file)
6519 /* We do not construct an accurate cfg in functions which call
6520 setjmp, so just punt to be safe. */
6521 if (current_function_calls_setjmp)
6524 /* For calling dump_foo fns from gdb. */
6525 debug_stderr = stderr;
6528 /* Identify the basic block information for this function, including
6529 successors and predecessors. */
6530 max_gcse_regno = max_reg_num ();
6533 dump_flow_info (file);
6535 /* Return if there's nothing to do, or it is too expensive. */
6536 if (n_basic_blocks <= 1 || is_too_expensive (_ ("jump bypassing disabled")))
6539 gcc_obstack_init (&gcse_obstack);
6542 /* We need alias. */
6543 init_alias_analysis ();
6545 /* Record where pseudo-registers are set. This data is kept accurate
6546 during each pass. ??? We could also record hard-reg information here
6547 [since it's unchanging], however it is currently done during hash table
6550 It may be tempting to compute MEM set information here too, but MEM sets
6551 will be subject to code motion one day and thus we need to compute
6552 information about memory sets when we build the hash tables. */
6554 alloc_reg_set_mem (max_gcse_regno);
6555 compute_sets (get_insns ());
6557 max_gcse_regno = max_reg_num ();
6558 alloc_gcse_mem (get_insns ());
6559 changed = one_cprop_pass (MAX_GCSE_PASSES + 2, 1, 1);
6564 fprintf (file, "BYPASS of %s: %d basic blocks, ",
6565 current_function_name (), n_basic_blocks);
6566 fprintf (file, "%d bytes\n\n", bytes_used);
6569 obstack_free (&gcse_obstack, NULL);
6570 free_reg_set_mem ();
6572 /* We are finished with alias. */
6573 end_alias_analysis ();
6574 allocate_reg_info (max_reg_num (), FALSE, FALSE);
6579 /* Return true if the graph is too expensive to optimize. PASS is the
6580 optimization about to be performed. */
6583 is_too_expensive (const char *pass)
6585 /* Trying to perform global optimizations on flow graphs which have
6586 a high connectivity will take a long time and is unlikely to be
6587 particularly useful.
6589 In normal circumstances a cfg should have about twice as many
6590 edges as blocks. But we do not want to punish small functions
6591 which have a couple switch statements. Rather than simply
6592 threshold the number of blocks, uses something with a more
6593 graceful degradation. */
6594 if (n_edges > 20000 + n_basic_blocks * 4)
6596 if (warn_disabled_optimization)
6597 warning ("%s: %d basic blocks and %d edges/basic block",
6598 pass, n_basic_blocks, n_edges / n_basic_blocks);
6603 /* If allocating memory for the cprop bitmap would take up too much
6604 storage it's better just to disable the optimization. */
6606 * SBITMAP_SET_SIZE (max_reg_num ())
6607 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
6609 if (warn_disabled_optimization)
6610 warning ("%s: %d basic blocks and %d registers",
6611 pass, n_basic_blocks, max_reg_num ());
6619 #include "gt-gcse.h"