1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 - reordering of memory allocation and freeing to be more space efficient
24 - do rough calc of how many regs are needed in each block, and a rough
25 calc of how many regs are available in each class and use that to
26 throttle back the code in cases where RTX_COST is minimal.
27 - a store to the same address as a load does not kill the load if the
28 source of the store is also the destination of the load. Handling this
29 allows more load motion, particularly out of loops.
30 - ability to realloc sbitmap vectors would allow one initial computation
31 of reg_set_in_block with only subsequent additions, rather than
32 recomputing it for each pass
36 /* References searched while implementing this.
38 Compilers Principles, Techniques and Tools
42 Global Optimization by Suppression of Partial Redundancies
44 communications of the acm, Vol. 22, Num. 2, Feb. 1979
46 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Stanford Ph.D. thesis, Dec. 1983
50 A Fast Algorithm for Code Movement Optimization
52 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
54 A Solution to a Problem with Morel and Renvoise's
55 Global Optimization by Suppression of Partial Redundancies
56 K-H Drechsler, M.P. Stadel
57 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
59 Practical Adaptation of the Global Optimization
60 Algorithm of Morel and Renvoise
62 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
64 Efficiently Computing Static Single Assignment Form and the Control
66 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
67 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 J. Knoop, O. Ruthing, B. Steffen
71 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
73 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
74 Time for Reducible Flow Control
76 ACM Letters on Programming Languages and Systems,
77 Vol. 2, Num. 1-4, Mar-Dec 1993
79 An Efficient Representation for Sparse Sets
80 Preston Briggs, Linda Torczon
81 ACM Letters on Programming Languages and Systems,
82 Vol. 2, Num. 1-4, Mar-Dec 1993
84 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
85 K-H Drechsler, M.P. Stadel
86 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
88 Partial Dead Code Elimination
89 J. Knoop, O. Ruthing, B. Steffen
90 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
92 Effective Partial Redundancy Elimination
93 P. Briggs, K.D. Cooper
94 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
96 The Program Structure Tree: Computing Control Regions in Linear Time
97 R. Johnson, D. Pearson, K. Pingali
98 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
100 Optimal Code Motion: Theory and Practice
101 J. Knoop, O. Ruthing, B. Steffen
102 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
104 The power of assignment motion
105 J. Knoop, O. Ruthing, B. Steffen
106 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
108 Global code motion / global value numbering
110 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
112 Value Driven Redundancy Elimination
114 Rice University Ph.D. thesis, Apr. 1996
118 Massively Scalar Compiler Project, Rice University, Sep. 1996
120 High Performance Compilers for Parallel Computing
124 Advanced Compiler Design and Implementation
126 Morgan Kaufmann, 1997
128 Building an Optimizing Compiler
132 People wishing to speed up the code here should read:
133 Elimination Algorithms for Data Flow Analysis
134 B.G. Ryder, M.C. Paull
135 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
137 How to Analyze Large Programs Efficiently and Informatively
138 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
139 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
141 People wishing to do something different can find various possibilities
142 in the above papers and elsewhere.
152 #include "hard-reg-set.h"
155 #include "insn-config.h"
157 #include "basic-block.h"
159 #include "function.h"
165 #define obstack_chunk_alloc gmalloc
166 #define obstack_chunk_free free
168 /* Propagate flow information through back edges and thus enable PRE's
169 moving loop invariant calculations out of loops.
171 Originally this tended to create worse overall code, but several
172 improvements during the development of PRE seem to have made following
173 back edges generally a win.
175 Note much of the loop invariant code motion done here would normally
176 be done by loop.c, which has more heuristics for when to move invariants
177 out of loops. At some point we might need to move some of those
178 heuristics into gcse.c. */
179 #define FOLLOW_BACK_EDGES 1
181 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
182 are a superset of those done by GCSE.
184 We perform the following steps:
186 1) Compute basic block information.
188 2) Compute table of places where registers are set.
190 3) Perform copy/constant propagation.
192 4) Perform global cse.
194 5) Perform another pass of copy/constant propagation.
196 Two passes of copy/constant propagation are done because the first one
197 enables more GCSE and the second one helps to clean up the copies that
198 GCSE creates. This is needed more for PRE than for Classic because Classic
199 GCSE will try to use an existing register containing the common
200 subexpression rather than create a new one. This is harder to do for PRE
201 because of the code motion (which Classic GCSE doesn't do).
203 Expressions we are interested in GCSE-ing are of the form
204 (set (pseudo-reg) (expression)).
205 Function want_to_gcse_p says what these are.
207 PRE handles moving invariant expressions out of loops (by treating them as
208 partially redundant).
210 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
211 assignment) based GVN (global value numbering). L. T. Simpson's paper
212 (Rice University) on value numbering is a useful reference for this.
214 **********************
216 We used to support multiple passes but there are diminishing returns in
217 doing so. The first pass usually makes 90% of the changes that are doable.
218 A second pass can make a few more changes made possible by the first pass.
219 Experiments show any further passes don't make enough changes to justify
222 A study of spec92 using an unlimited number of passes:
223 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
224 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
225 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
227 It was found doing copy propagation between each pass enables further
230 PRE is quite expensive in complicated functions because the DFA can take
231 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
232 be modified if one wants to experiment.
234 **********************
236 The steps for PRE are:
238 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
240 2) Perform the data flow analysis for PRE.
242 3) Delete the redundant instructions
244 4) Insert the required copies [if any] that make the partially
245 redundant instructions fully redundant.
247 5) For other reaching expressions, insert an instruction to copy the value
248 to a newly created pseudo that will reach the redundant instruction.
250 The deletion is done first so that when we do insertions we
251 know which pseudo reg to use.
253 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
254 argue it is not. The number of iterations for the algorithm to converge
255 is typically 2-4 so I don't view it as that expensive (relatively speaking).
257 PRE GCSE depends heavily on the second CSE pass to clean up the copies
258 we create. To make an expression reach the place where it's redundant,
259 the result of the expression is copied to a new register, and the redundant
260 expression is deleted by replacing it with this new register. Classic GCSE
261 doesn't have this problem as much as it computes the reaching defs of
262 each register in each block and thus can try to use an existing register.
264 **********************
266 A fair bit of simplicity is created by creating small functions for simple
267 tasks, even when the function is only called in one place. This may
268 measurably slow things down [or may not] by creating more function call
269 overhead than is necessary. The source is laid out so that it's trivial
270 to make the affected functions inline so that one can measure what speed
271 up, if any, can be achieved, and maybe later when things settle things can
274 Help stamp out big monolithic functions! */
276 /* GCSE global vars. */
279 static FILE *gcse_file;
281 /* Note whether or not we should run jump optimization after gcse. We
282 want to do this for two cases.
284 * If we changed any jumps via cprop.
286 * If we added any labels via edge splitting. */
288 static int run_jump_opt_after_gcse;
290 /* Bitmaps are normally not included in debugging dumps.
291 However it's useful to be able to print them from GDB.
292 We could create special functions for this, but it's simpler to
293 just allow passing stderr to the dump_foo fns. Since stderr can
294 be a macro, we store a copy here. */
295 static FILE *debug_stderr;
297 /* An obstack for our working variables. */
298 static struct obstack gcse_obstack;
300 /* Non-zero for each mode that supports (set (reg) (reg)).
301 This is trivially true for integer and floating point values.
302 It may or may not be true for condition codes. */
303 static char can_copy_p[(int) NUM_MACHINE_MODES];
305 /* Non-zero if can_copy_p has been initialized. */
306 static int can_copy_init_p;
308 struct reg_use {rtx reg_rtx; };
310 /* Hash table of expressions. */
314 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
316 /* Index in the available expression bitmaps. */
318 /* Next entry with the same hash. */
319 struct expr *next_same_hash;
320 /* List of anticipatable occurrences in basic blocks in the function.
321 An "anticipatable occurrence" is one that is the first occurrence in the
322 basic block, the operands are not modified in the basic block prior
323 to the occurrence and the output is not used between the start of
324 the block and the occurrence. */
325 struct occr *antic_occr;
326 /* List of available occurrence in basic blocks in the function.
327 An "available occurrence" is one that is the last occurrence in the
328 basic block and the operands are not modified by following statements in
329 the basic block [including this insn]. */
330 struct occr *avail_occr;
331 /* Non-null if the computation is PRE redundant.
332 The value is the newly created pseudo-reg to record a copy of the
333 expression in all the places that reach the redundant copy. */
337 /* Occurrence of an expression.
338 There is one per basic block. If a pattern appears more than once the
339 last appearance is used [or first for anticipatable expressions]. */
343 /* Next occurrence of this expression. */
345 /* The insn that computes the expression. */
347 /* Non-zero if this [anticipatable] occurrence has been deleted. */
349 /* Non-zero if this [available] occurrence has been copied to
351 /* ??? This is mutually exclusive with deleted_p, so they could share
356 /* Expression and copy propagation hash tables.
357 Each hash table is an array of buckets.
358 ??? It is known that if it were an array of entries, structure elements
359 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
360 not clear whether in the final analysis a sufficient amount of memory would
361 be saved as the size of the available expression bitmaps would be larger
362 [one could build a mapping table without holes afterwards though].
363 Someday I'll perform the computation and figure it out. */
365 /* Total size of the expression hash table, in elements. */
366 static unsigned int expr_hash_table_size;
369 This is an array of `expr_hash_table_size' elements. */
370 static struct expr **expr_hash_table;
372 /* Total size of the copy propagation hash table, in elements. */
373 static unsigned int set_hash_table_size;
376 This is an array of `set_hash_table_size' elements. */
377 static struct expr **set_hash_table;
379 /* Mapping of uids to cuids.
380 Only real insns get cuids. */
381 static int *uid_cuid;
383 /* Highest UID in UID_CUID. */
386 /* Get the cuid of an insn. */
387 #ifdef ENABLE_CHECKING
388 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
390 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
393 /* Number of cuids. */
396 /* Mapping of cuids to insns. */
397 static rtx *cuid_insn;
399 /* Get insn from cuid. */
400 #define CUID_INSN(CUID) (cuid_insn[CUID])
402 /* Maximum register number in function prior to doing gcse + 1.
403 Registers created during this pass have regno >= max_gcse_regno.
404 This is named with "gcse" to not collide with global of same name. */
405 static unsigned int max_gcse_regno;
407 /* Maximum number of cse-able expressions found. */
410 /* Maximum number of assignments for copy propagation found. */
413 /* Table of registers that are modified.
415 For each register, each element is a list of places where the pseudo-reg
418 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
419 requires knowledge of which blocks kill which regs [and thus could use
420 a bitmap instead of the lists `reg_set_table' uses].
422 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
423 num-regs) [however perhaps it may be useful to keep the data as is]. One
424 advantage of recording things this way is that `reg_set_table' is fairly
425 sparse with respect to pseudo regs but for hard regs could be fairly dense
426 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
427 up functions like compute_transp since in the case of pseudo-regs we only
428 need to iterate over the number of times a pseudo-reg is set, not over the
429 number of basic blocks [clearly there is a bit of a slow down in the cases
430 where a pseudo is set more than once in a block, however it is believed
431 that the net effect is to speed things up]. This isn't done for hard-regs
432 because recording call-clobbered hard-regs in `reg_set_table' at each
433 function call can consume a fair bit of memory, and iterating over
434 hard-regs stored this way in compute_transp will be more expensive. */
436 typedef struct reg_set
438 /* The next setting of this register. */
439 struct reg_set *next;
440 /* The insn where it was set. */
444 static reg_set **reg_set_table;
446 /* Size of `reg_set_table'.
447 The table starts out at max_gcse_regno + slop, and is enlarged as
449 static int reg_set_table_size;
451 /* Amount to grow `reg_set_table' by when it's full. */
452 #define REG_SET_TABLE_SLOP 100
454 /* This is a list of expressions which are MEMs and will be used by load
456 Load motion tracks MEMs which aren't killed by
457 anything except itself. (ie, loads and stores to a single location).
458 We can then allow movement of these MEM refs with a little special
459 allowance. (all stores copy the same value to the reaching reg used
460 for the loads). This means all values used to store into memory must have
461 no side effects so we can re-issue the setter value.
462 Store Motion uses this structure as an expression table to track stores
463 which look interesting, and might be moveable towards the exit block. */
467 struct expr * expr; /* Gcse expression reference for LM. */
468 rtx pattern; /* Pattern of this mem. */
469 rtx loads; /* INSN list of loads seen. */
470 rtx stores; /* INSN list of stores seen. */
471 struct ls_expr * next; /* Next in the list. */
472 int invalid; /* Invalid for some reason. */
473 int index; /* If it maps to a bitmap index. */
474 int hash_index; /* Index when in a hash table. */
475 rtx reaching_reg; /* Register to use when re-writing. */
478 /* Head of the list of load/store memory refs. */
479 static struct ls_expr * pre_ldst_mems = NULL;
481 /* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
484 static sbitmap reg_set_bitmap;
486 /* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491 static sbitmap *reg_set_in_block;
493 /* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495 static rtx * modify_mem_list;
497 /* This array parallels modify_mem_list, but is kept canonicalized. */
498 static rtx * canon_modify_mem_list;
499 /* Various variables for statistics gathering. */
501 /* Memory used in a pass.
502 This isn't intended to be absolutely precise. Its intent is only
503 to keep an eye on memory usage. */
504 static int bytes_used;
506 /* GCSE substitutions made. */
507 static int gcse_subst_count;
508 /* Number of copy instructions created. */
509 static int gcse_create_count;
510 /* Number of constants propagated. */
511 static int const_prop_count;
512 /* Number of copys propagated. */
513 static int copy_prop_count;
515 /* These variables are used by classic GCSE.
516 Normally they'd be defined a bit later, but `rd_gen' needs to
517 be declared sooner. */
519 /* Each block has a bitmap of each type.
520 The length of each blocks bitmap is:
522 max_cuid - for reaching definitions
523 n_exprs - for available expressions
525 Thus we view the bitmaps as 2 dimensional arrays. i.e.
526 rd_kill[block_num][cuid_num]
527 ae_kill[block_num][expr_num] */
529 /* For reaching defs */
530 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
532 /* for available exprs */
533 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
535 /* Objects of this type are passed around by the null-pointer check
537 struct null_pointer_info
539 /* The basic block being processed. */
541 /* The first register to be handled in this pass. */
542 unsigned int min_reg;
543 /* One greater than the last register to be handled in this pass. */
544 unsigned int max_reg;
545 sbitmap *nonnull_local;
546 sbitmap *nonnull_killed;
549 static void compute_can_copy PARAMS ((void));
550 static char *gmalloc PARAMS ((unsigned int));
551 static char *grealloc PARAMS ((char *, unsigned int));
552 static char *gcse_alloc PARAMS ((unsigned long));
553 static void alloc_gcse_mem PARAMS ((rtx));
554 static void free_gcse_mem PARAMS ((void));
555 static void alloc_reg_set_mem PARAMS ((int));
556 static void free_reg_set_mem PARAMS ((void));
557 static int get_bitmap_width PARAMS ((int, int, int));
558 static void record_one_set PARAMS ((int, rtx));
559 static void record_set_info PARAMS ((rtx, rtx, void *));
560 static void compute_sets PARAMS ((rtx));
561 static void hash_scan_insn PARAMS ((rtx, int, int));
562 static void hash_scan_set PARAMS ((rtx, rtx, int));
563 static void hash_scan_clobber PARAMS ((rtx, rtx));
564 static void hash_scan_call PARAMS ((rtx, rtx));
565 static int want_to_gcse_p PARAMS ((rtx));
566 static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
567 static int oprs_anticipatable_p PARAMS ((rtx, rtx));
568 static int oprs_available_p PARAMS ((rtx, rtx));
569 static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
571 static void insert_set_in_table PARAMS ((rtx, rtx));
572 static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
573 static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
574 static unsigned int hash_string_1 PARAMS ((const char *));
575 static unsigned int hash_set PARAMS ((int, int));
576 static int expr_equiv_p PARAMS ((rtx, rtx));
577 static void record_last_reg_set_info PARAMS ((rtx, int));
578 static void record_last_mem_set_info PARAMS ((rtx));
579 static void record_last_set_info PARAMS ((rtx, rtx, void *));
580 static void compute_hash_table PARAMS ((int));
581 static void alloc_set_hash_table PARAMS ((int));
582 static void free_set_hash_table PARAMS ((void));
583 static void compute_set_hash_table PARAMS ((void));
584 static void alloc_expr_hash_table PARAMS ((unsigned int));
585 static void free_expr_hash_table PARAMS ((void));
586 static void compute_expr_hash_table PARAMS ((void));
587 static void dump_hash_table PARAMS ((FILE *, const char *, struct expr **,
589 static struct expr *lookup_expr PARAMS ((rtx));
590 static struct expr *lookup_set PARAMS ((unsigned int, rtx));
591 static struct expr *next_set PARAMS ((unsigned int, struct expr *));
592 static void reset_opr_set_tables PARAMS ((void));
593 static int oprs_not_set_p PARAMS ((rtx, rtx));
594 static void mark_call PARAMS ((rtx));
595 static void mark_set PARAMS ((rtx, rtx));
596 static void mark_clobber PARAMS ((rtx, rtx));
597 static void mark_oprs_set PARAMS ((rtx));
598 static void alloc_cprop_mem PARAMS ((int, int));
599 static void free_cprop_mem PARAMS ((void));
600 static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
601 static void compute_transpout PARAMS ((void));
602 static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
604 static void compute_cprop_data PARAMS ((void));
605 static void find_used_regs PARAMS ((rtx *, void *));
606 static int try_replace_reg PARAMS ((rtx, rtx, rtx));
607 static struct expr *find_avail_set PARAMS ((int, rtx));
608 static int cprop_jump PARAMS ((rtx, rtx, rtx));
610 static int cprop_cc0_jump PARAMS ((rtx, struct reg_use *, rtx));
612 static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *));
613 static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int));
614 static void canon_list_insert PARAMS ((rtx, rtx, void *));
615 static int cprop_insn PARAMS ((rtx, int));
616 static int cprop PARAMS ((int));
617 static int one_cprop_pass PARAMS ((int, int));
618 static void alloc_pre_mem PARAMS ((int, int));
619 static void free_pre_mem PARAMS ((void));
620 static void compute_pre_data PARAMS ((void));
621 static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *,
623 static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int));
624 static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
625 static void pre_insert_copies PARAMS ((void));
626 static int pre_delete PARAMS ((void));
627 static int pre_gcse PARAMS ((void));
628 static int one_pre_gcse_pass PARAMS ((int));
629 static void add_label_notes PARAMS ((rtx, rtx));
630 static void alloc_code_hoist_mem PARAMS ((int, int));
631 static void free_code_hoist_mem PARAMS ((void));
632 static void compute_code_hoist_vbeinout PARAMS ((void));
633 static void compute_code_hoist_data PARAMS ((void));
634 static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block,
636 static void hoist_code PARAMS ((void));
637 static int one_code_hoisting_pass PARAMS ((void));
638 static void alloc_rd_mem PARAMS ((int, int));
639 static void free_rd_mem PARAMS ((void));
640 static void handle_rd_kill_set PARAMS ((rtx, int, basic_block));
641 static void compute_kill_rd PARAMS ((void));
642 static void compute_rd PARAMS ((void));
643 static void alloc_avail_expr_mem PARAMS ((int, int));
644 static void free_avail_expr_mem PARAMS ((void));
645 static void compute_ae_gen PARAMS ((void));
646 static int expr_killed_p PARAMS ((rtx, basic_block));
647 static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *));
648 static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
650 static rtx computing_insn PARAMS ((struct expr *, rtx));
651 static int def_reaches_here_p PARAMS ((rtx, rtx));
652 static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
653 static int handle_avail_expr PARAMS ((rtx, struct expr *));
654 static int classic_gcse PARAMS ((void));
655 static int one_classic_gcse_pass PARAMS ((int));
656 static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
657 static void delete_null_pointer_checks_1 PARAMS ((varray_type *, unsigned int *,
658 sbitmap *, sbitmap *,
659 struct null_pointer_info *));
660 static rtx process_insert_insn PARAMS ((struct expr *));
661 static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
662 static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
663 basic_block, int, char *));
664 static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *,
665 basic_block, char *));
666 static struct ls_expr * ldst_entry PARAMS ((rtx));
667 static void free_ldst_entry PARAMS ((struct ls_expr *));
668 static void free_ldst_mems PARAMS ((void));
669 static void print_ldst_list PARAMS ((FILE *));
670 static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx));
671 static int enumerate_ldsts PARAMS ((void));
672 static inline struct ls_expr * first_ls_expr PARAMS ((void));
673 static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *));
674 static int simple_mem PARAMS ((rtx));
675 static void invalidate_any_buried_refs PARAMS ((rtx));
676 static void compute_ld_motion_mems PARAMS ((void));
677 static void trim_ld_motion_mems PARAMS ((void));
678 static void update_ld_motion_stores PARAMS ((struct expr *));
679 static void reg_set_info PARAMS ((rtx, rtx, void *));
680 static int store_ops_ok PARAMS ((rtx, basic_block));
681 static void find_moveable_store PARAMS ((rtx));
682 static int compute_store_table PARAMS ((void));
683 static int load_kills_store PARAMS ((rtx, rtx));
684 static int find_loads PARAMS ((rtx, rtx));
685 static int store_killed_in_insn PARAMS ((rtx, rtx));
686 static int store_killed_after PARAMS ((rtx, rtx, basic_block));
687 static int store_killed_before PARAMS ((rtx, rtx, basic_block));
688 static void build_store_vectors PARAMS ((void));
689 static void insert_insn_start_bb PARAMS ((rtx, basic_block));
690 static int insert_store PARAMS ((struct ls_expr *, edge));
691 static void replace_store_insn PARAMS ((rtx, rtx, basic_block));
692 static void delete_store PARAMS ((struct ls_expr *,
694 static void free_store_memory PARAMS ((void));
695 static void store_motion PARAMS ((void));
697 /* Entry point for global common subexpression elimination.
698 F is the first instruction in the function. */
706 /* Bytes used at start of pass. */
707 int initial_bytes_used;
708 /* Maximum number of bytes used by a pass. */
710 /* Point to release obstack data from for each pass. */
711 char *gcse_obstack_bottom;
713 /* Insertion of instructions on edges can create new basic blocks; we
714 need the original basic block count so that we can properly deallocate
715 arrays sized on the number of basic blocks originally in the cfg. */
717 /* We do not construct an accurate cfg in functions which call
718 setjmp, so just punt to be safe. */
719 if (current_function_calls_setjmp)
722 /* Assume that we do not need to run jump optimizations after gcse. */
723 run_jump_opt_after_gcse = 0;
725 /* For calling dump_foo fns from gdb. */
726 debug_stderr = stderr;
729 /* Identify the basic block information for this function, including
730 successors and predecessors. */
731 max_gcse_regno = max_reg_num ();
734 dump_flow_info (file);
736 orig_bb_count = n_basic_blocks;
737 /* Return if there's nothing to do. */
738 if (n_basic_blocks <= 1)
741 /* Trying to perform global optimizations on flow graphs which have
742 a high connectivity will take a long time and is unlikely to be
745 In normal circumstances a cfg should have about twice as many edges
746 as blocks. But we do not want to punish small functions which have
747 a couple switch statements. So we require a relatively large number
748 of basic blocks and the ratio of edges to blocks to be high. */
749 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
751 if (warn_disabled_optimization)
752 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
753 n_basic_blocks, n_edges / n_basic_blocks);
757 /* If allocating memory for the cprop bitmap would take up too much
758 storage it's better just to disable the optimization. */
760 * SBITMAP_SET_SIZE (max_gcse_regno)
761 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
763 if (warn_disabled_optimization)
764 warning ("GCSE disabled: %d basic blocks and %d registers",
765 n_basic_blocks, max_gcse_regno);
770 /* See what modes support reg/reg copy operations. */
771 if (! can_copy_init_p)
777 gcc_obstack_init (&gcse_obstack);
781 init_alias_analysis ();
782 /* Record where pseudo-registers are set. This data is kept accurate
783 during each pass. ??? We could also record hard-reg information here
784 [since it's unchanging], however it is currently done during hash table
787 It may be tempting to compute MEM set information here too, but MEM sets
788 will be subject to code motion one day and thus we need to compute
789 information about memory sets when we build the hash tables. */
791 alloc_reg_set_mem (max_gcse_regno);
795 initial_bytes_used = bytes_used;
797 gcse_obstack_bottom = gcse_alloc (1);
799 while (changed && pass < MAX_GCSE_PASSES)
803 fprintf (file, "GCSE pass %d\n\n", pass + 1);
805 /* Initialize bytes_used to the space for the pred/succ lists,
806 and the reg_set_table data. */
807 bytes_used = initial_bytes_used;
809 /* Each pass may create new registers, so recalculate each time. */
810 max_gcse_regno = max_reg_num ();
814 /* Don't allow constant propagation to modify jumps
816 changed = one_cprop_pass (pass + 1, 0);
819 changed |= one_classic_gcse_pass (pass + 1);
822 changed |= one_pre_gcse_pass (pass + 1);
823 /* We may have just created new basic blocks. Release and
824 recompute various things which are sized on the number of
830 for (i = 0; i < orig_bb_count; i++)
832 if (modify_mem_list[i])
833 free_INSN_LIST_list (modify_mem_list + i);
834 if (canon_modify_mem_list[i])
835 free_INSN_LIST_list (canon_modify_mem_list + i);
838 = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
839 canon_modify_mem_list
840 = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
841 memset ((char *) modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
842 memset ((char *) canon_modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
843 orig_bb_count = n_basic_blocks;
846 alloc_reg_set_mem (max_reg_num ());
848 run_jump_opt_after_gcse = 1;
851 if (max_pass_bytes < bytes_used)
852 max_pass_bytes = bytes_used;
854 /* Free up memory, then reallocate for code hoisting. We can
855 not re-use the existing allocated memory because the tables
856 will not have info for the insns or registers created by
857 partial redundancy elimination. */
860 /* It does not make sense to run code hoisting unless we optimizing
861 for code size -- it rarely makes programs faster, and can make
862 them bigger if we did partial redundancy elimination (when optimizing
863 for space, we use a classic gcse algorithm instead of partial
864 redundancy algorithms). */
867 max_gcse_regno = max_reg_num ();
869 changed |= one_code_hoisting_pass ();
872 if (max_pass_bytes < bytes_used)
873 max_pass_bytes = bytes_used;
878 fprintf (file, "\n");
882 obstack_free (&gcse_obstack, gcse_obstack_bottom);
886 /* Do one last pass of copy propagation, including cprop into
887 conditional jumps. */
889 max_gcse_regno = max_reg_num ();
891 /* This time, go ahead and allow cprop to alter jumps. */
892 one_cprop_pass (pass + 1, 1);
897 fprintf (file, "GCSE of %s: %d basic blocks, ",
898 current_function_name, n_basic_blocks);
899 fprintf (file, "%d pass%s, %d bytes\n\n",
900 pass, pass > 1 ? "es" : "", max_pass_bytes);
903 obstack_free (&gcse_obstack, NULL);
905 /* We are finished with alias. */
906 end_alias_analysis ();
907 allocate_reg_info (max_reg_num (), FALSE, FALSE);
909 if (!optimize_size && flag_gcse_sm)
911 /* Record where pseudo-registers are set. */
912 return run_jump_opt_after_gcse;
915 /* Misc. utilities. */
917 /* Compute which modes support reg/reg copy operations. */
923 #ifndef AVOID_CCMODE_COPIES
926 memset (can_copy_p, 0, NUM_MACHINE_MODES);
929 for (i = 0; i < NUM_MACHINE_MODES; i++)
930 if (GET_MODE_CLASS (i) == MODE_CC)
932 #ifdef AVOID_CCMODE_COPIES
935 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
936 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
937 if (recog (PATTERN (insn), insn, NULL) >= 0)
947 /* Cover function to xmalloc to record bytes allocated. */
954 return xmalloc (size);
957 /* Cover function to xrealloc.
958 We don't record the additional size since we don't know it.
959 It won't affect memory usage stats much anyway. */
966 return xrealloc (ptr, size);
969 /* Cover function to obstack_alloc.
970 We don't need to record the bytes allocated here since
971 obstack_chunk_alloc is set to gmalloc. */
977 return (char *) obstack_alloc (&gcse_obstack, size);
980 /* Allocate memory for the cuid mapping array,
981 and reg/memory set tracking tables.
983 This is called at the start of each pass. */
992 /* Find the largest UID and create a mapping from UIDs to CUIDs.
993 CUIDs are like UIDs except they increase monotonically, have no gaps,
994 and only apply to real insns. */
996 max_uid = get_max_uid ();
997 n = (max_uid + 1) * sizeof (int);
998 uid_cuid = (int *) gmalloc (n);
999 memset ((char *) uid_cuid, 0, n);
1000 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1003 uid_cuid[INSN_UID (insn)] = i++;
1005 uid_cuid[INSN_UID (insn)] = i;
1008 /* Create a table mapping cuids to insns. */
1011 n = (max_cuid + 1) * sizeof (rtx);
1012 cuid_insn = (rtx *) gmalloc (n);
1013 memset ((char *) cuid_insn, 0, n);
1014 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1016 CUID_INSN (i++) = insn;
1018 /* Allocate vars to track sets of regs. */
1019 reg_set_bitmap = (sbitmap) sbitmap_alloc (max_gcse_regno);
1021 /* Allocate vars to track sets of regs, memory per block. */
1022 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks,
1024 /* Allocate array to keep a list of insns which modify memory in each
1026 modify_mem_list = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
1027 canon_modify_mem_list = (rtx *) gmalloc (n_basic_blocks * sizeof (rtx *));
1028 memset ((char *) modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
1029 memset ((char *) canon_modify_mem_list, 0, n_basic_blocks * sizeof (rtx *));
1032 /* Free memory allocated by alloc_gcse_mem. */
1040 free (reg_set_bitmap);
1042 sbitmap_vector_free (reg_set_in_block);
1043 /* re-Cache any INSN_LIST nodes we have allocated. */
1047 for (i = 0; i < n_basic_blocks; i++)
1049 if (modify_mem_list[i])
1050 free_INSN_LIST_list (modify_mem_list + i);
1051 if (canon_modify_mem_list[i])
1052 free_INSN_LIST_list (canon_modify_mem_list + i);
1055 free (modify_mem_list);
1056 free (canon_modify_mem_list);
1057 modify_mem_list = 0;
1058 canon_modify_mem_list = 0;
1062 /* Many of the global optimization algorithms work by solving dataflow
1063 equations for various expressions. Initially, some local value is
1064 computed for each expression in each block. Then, the values across the
1065 various blocks are combined (by following flow graph edges) to arrive at
1066 global values. Conceptually, each set of equations is independent. We
1067 may therefore solve all the equations in parallel, solve them one at a
1068 time, or pick any intermediate approach.
1070 When you're going to need N two-dimensional bitmaps, each X (say, the
1071 number of blocks) by Y (say, the number of expressions), call this
1072 function. It's not important what X and Y represent; only that Y
1073 correspond to the things that can be done in parallel. This function will
1074 return an appropriate chunking factor C; you should solve C sets of
1075 equations in parallel. By going through this function, we can easily
1076 trade space against time; by solving fewer equations in parallel we use
1080 get_bitmap_width (n, x, y)
1085 /* It's not really worth figuring out *exactly* how much memory will
1086 be used by a particular choice. The important thing is to get
1087 something approximately right. */
1088 size_t max_bitmap_memory = 10 * 1024 * 1024;
1090 /* The number of bytes we'd use for a single column of minimum
1092 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1094 /* Often, it's reasonable just to solve all the equations in
1096 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1099 /* Otherwise, pick the largest width we can, without going over the
1101 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1105 /* Compute the local properties of each recorded expression.
1107 Local properties are those that are defined by the block, irrespective of
1110 An expression is transparent in a block if its operands are not modified
1113 An expression is computed (locally available) in a block if it is computed
1114 at least once and expression would contain the same value if the
1115 computation was moved to the end of the block.
1117 An expression is locally anticipatable in a block if it is computed at
1118 least once and expression would contain the same value if the computation
1119 was moved to the beginning of the block.
1121 We call this routine for cprop, pre and code hoisting. They all compute
1122 basically the same information and thus can easily share this code.
1124 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1125 properties. If NULL, then it is not necessary to compute or record that
1126 particular property.
1128 SETP controls which hash table to look at. If zero, this routine looks at
1129 the expr hash table; if nonzero this routine looks at the set hash table.
1130 Additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1134 compute_local_properties (transp, comp, antloc, setp)
1140 unsigned int i, hash_table_size;
1141 struct expr **hash_table;
1143 /* Initialize any bitmaps that were passed in. */
1147 sbitmap_vector_zero (transp, n_basic_blocks);
1149 sbitmap_vector_ones (transp, n_basic_blocks);
1153 sbitmap_vector_zero (comp, n_basic_blocks);
1155 sbitmap_vector_zero (antloc, n_basic_blocks);
1157 /* We use the same code for cprop, pre and hoisting. For cprop
1158 we care about the set hash table, for pre and hoisting we
1159 care about the expr hash table. */
1160 hash_table_size = setp ? set_hash_table_size : expr_hash_table_size;
1161 hash_table = setp ? set_hash_table : expr_hash_table;
1163 for (i = 0; i < hash_table_size; i++)
1167 for (expr = hash_table[i]; expr != NULL; expr = expr->next_same_hash)
1169 int indx = expr->bitmap_index;
1172 /* The expression is transparent in this block if it is not killed.
1173 We start by assuming all are transparent [none are killed], and
1174 then reset the bits for those that are. */
1176 compute_transp (expr->expr, indx, transp, setp);
1178 /* The occurrences recorded in antic_occr are exactly those that
1179 we want to set to non-zero in ANTLOC. */
1181 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1183 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1185 /* While we're scanning the table, this is a good place to
1187 occr->deleted_p = 0;
1190 /* The occurrences recorded in avail_occr are exactly those that
1191 we want to set to non-zero in COMP. */
1193 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1195 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1197 /* While we're scanning the table, this is a good place to
1202 /* While we're scanning the table, this is a good place to
1204 expr->reaching_reg = 0;
1209 /* Register set information.
1211 `reg_set_table' records where each register is set or otherwise
1214 static struct obstack reg_set_obstack;
1217 alloc_reg_set_mem (n_regs)
1222 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1223 n = reg_set_table_size * sizeof (struct reg_set *);
1224 reg_set_table = (struct reg_set **) gmalloc (n);
1225 memset ((char *) reg_set_table, 0, n);
1227 gcc_obstack_init (®_set_obstack);
1233 free (reg_set_table);
1234 obstack_free (®_set_obstack, NULL);
1237 /* Record REGNO in the reg_set table. */
1240 record_one_set (regno, insn)
1244 /* Allocate a new reg_set element and link it onto the list. */
1245 struct reg_set *new_reg_info;
1247 /* If the table isn't big enough, enlarge it. */
1248 if (regno >= reg_set_table_size)
1250 int new_size = regno + REG_SET_TABLE_SLOP;
1253 = (struct reg_set **) grealloc ((char *) reg_set_table,
1254 new_size * sizeof (struct reg_set *));
1255 memset ((char *) (reg_set_table + reg_set_table_size), 0,
1256 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1257 reg_set_table_size = new_size;
1260 new_reg_info = (struct reg_set *) obstack_alloc (®_set_obstack,
1261 sizeof (struct reg_set));
1262 bytes_used += sizeof (struct reg_set);
1263 new_reg_info->insn = insn;
1264 new_reg_info->next = reg_set_table[regno];
1265 reg_set_table[regno] = new_reg_info;
1268 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1269 an insn. The DATA is really the instruction in which the SET is
1273 record_set_info (dest, setter, data)
1274 rtx dest, setter ATTRIBUTE_UNUSED;
1277 rtx record_set_insn = (rtx) data;
1279 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1280 record_one_set (REGNO (dest), record_set_insn);
1283 /* Scan the function and record each set of each pseudo-register.
1285 This is called once, at the start of the gcse pass. See the comments for
1286 `reg_set_table' for further documenation. */
1294 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1296 note_stores (PATTERN (insn), record_set_info, insn);
1299 /* Hash table support. */
1301 /* For each register, the cuid of the first/last insn in the block to set it,
1302 or -1 if not set. */
1303 #define NEVER_SET -1
1304 static int *reg_first_set;
1305 static int *reg_last_set;
1308 /* See whether X, the source of a set, is something we want to consider for
1315 static rtx test_insn = 0;
1316 int num_clobbers = 0;
1319 switch (GET_CODE (x))
1332 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1333 if (general_operand (x, GET_MODE (x)))
1335 else if (GET_MODE (x) == VOIDmode)
1338 /* Otherwise, check if we can make a valid insn from it. First initialize
1339 our test insn if we haven't already. */
1343 = make_insn_raw (gen_rtx_SET (VOIDmode,
1344 gen_rtx_REG (word_mode,
1345 FIRST_PSEUDO_REGISTER * 2),
1347 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1348 ggc_add_rtx_root (&test_insn, 1);
1351 /* Now make an insn like the one we would make when GCSE'ing and see if
1353 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1354 SET_SRC (PATTERN (test_insn)) = x;
1355 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1356 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1359 /* Return non-zero if the operands of expression X are unchanged from the
1360 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1361 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1364 oprs_unchanged_p (x, insn, avail_p)
1375 code = GET_CODE (x);
1380 return (reg_last_set[REGNO (x)] == NEVER_SET
1381 || reg_last_set[REGNO (x)] < INSN_CUID (insn));
1383 return (reg_first_set[REGNO (x)] == NEVER_SET
1384 || reg_first_set[REGNO (x)] >= INSN_CUID (insn));
1387 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn), INSN_CUID (insn),
1391 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1416 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1420 /* If we are about to do the last recursive call needed at this
1421 level, change it into iteration. This function is called enough
1424 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1426 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1429 else if (fmt[i] == 'E')
1430 for (j = 0; j < XVECLEN (x, i); j++)
1431 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1438 /* Used for communication between mems_conflict_for_gcse_p and
1439 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1440 conflict between two memory references. */
1441 static int gcse_mems_conflict_p;
1443 /* Used for communication between mems_conflict_for_gcse_p and
1444 load_killed_in_block_p. A memory reference for a load instruction,
1445 mems_conflict_for_gcse_p will see if a memory store conflicts with
1446 this memory load. */
1447 static rtx gcse_mem_operand;
1449 /* DEST is the output of an instruction. If it is a memory reference, and
1450 possibly conflicts with the load found in gcse_mem_operand, then set
1451 gcse_mems_conflict_p to a nonzero value. */
1454 mems_conflict_for_gcse_p (dest, setter, data)
1455 rtx dest, setter ATTRIBUTE_UNUSED;
1456 void *data ATTRIBUTE_UNUSED;
1458 while (GET_CODE (dest) == SUBREG
1459 || GET_CODE (dest) == ZERO_EXTRACT
1460 || GET_CODE (dest) == SIGN_EXTRACT
1461 || GET_CODE (dest) == STRICT_LOW_PART)
1462 dest = XEXP (dest, 0);
1464 /* If DEST is not a MEM, then it will not conflict with the load. Note
1465 that function calls are assumed to clobber memory, but are handled
1467 if (GET_CODE (dest) != MEM)
1470 /* If we are setting a MEM in our list of specially recognized MEMs,
1471 don't mark as killed this time. */
1473 if (dest == gcse_mem_operand && pre_ldst_mems != NULL)
1475 if (!find_rtx_in_ldst (dest))
1476 gcse_mems_conflict_p = 1;
1480 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1482 gcse_mems_conflict_p = 1;
1485 /* Return nonzero if the expression in X (a memory reference) is killed
1486 in block BB before or after the insn with the CUID in UID_LIMIT.
1487 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1490 To check the entire block, set UID_LIMIT to max_uid + 1 and
1494 load_killed_in_block_p (bb, uid_limit, x, avail_p)
1500 rtx list_entry = modify_mem_list[bb->index];
1504 /* Ignore entries in the list that do not apply. */
1506 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1508 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1510 list_entry = XEXP (list_entry, 1);
1514 setter = XEXP (list_entry, 0);
1516 /* If SETTER is a call everything is clobbered. Note that calls
1517 to pure functions are never put on the list, so we need not
1518 worry about them. */
1519 if (GET_CODE (setter) == CALL_INSN)
1522 /* SETTER must be an INSN of some kind that sets memory. Call
1523 note_stores to examine each hunk of memory that is modified.
1525 The note_stores interface is pretty limited, so we have to
1526 communicate via global variables. Yuk. */
1527 gcse_mem_operand = x;
1528 gcse_mems_conflict_p = 0;
1529 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1530 if (gcse_mems_conflict_p)
1532 list_entry = XEXP (list_entry, 1);
1537 /* Return non-zero if the operands of expression X are unchanged from
1538 the start of INSN's basic block up to but not including INSN. */
1541 oprs_anticipatable_p (x, insn)
1544 return oprs_unchanged_p (x, insn, 0);
1547 /* Return non-zero if the operands of expression X are unchanged from
1548 INSN to the end of INSN's basic block. */
1551 oprs_available_p (x, insn)
1554 return oprs_unchanged_p (x, insn, 1);
1557 /* Hash expression X.
1559 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1560 indicating if a volatile operand is found or if the expression contains
1561 something we don't want to insert in the table.
1563 ??? One might want to merge this with canon_hash. Later. */
1566 hash_expr (x, mode, do_not_record_p, hash_table_size)
1568 enum machine_mode mode;
1569 int *do_not_record_p;
1570 int hash_table_size;
1574 *do_not_record_p = 0;
1576 hash = hash_expr_1 (x, mode, do_not_record_p);
1577 return hash % hash_table_size;
1580 /* Hash a string. Just add its bytes up. */
1582 static inline unsigned
1587 const unsigned char *p = (const unsigned char *)ps;
1596 /* Subroutine of hash_expr to do the actual work. */
1599 hash_expr_1 (x, mode, do_not_record_p)
1601 enum machine_mode mode;
1602 int *do_not_record_p;
1609 /* Used to turn recursion into iteration. We can't rely on GCC's
1610 tail-recursion eliminatio since we need to keep accumulating values
1617 code = GET_CODE (x);
1621 hash += ((unsigned int) REG << 7) + REGNO (x);
1625 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1626 + (unsigned int) INTVAL (x));
1630 /* This is like the general case, except that it only counts
1631 the integers representing the constant. */
1632 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1633 if (GET_MODE (x) != VOIDmode)
1634 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1635 hash += (unsigned int) XWINT (x, i);
1637 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1638 + (unsigned int) CONST_DOUBLE_HIGH (x));
1641 /* Assume there is only one rtx object for any given label. */
1643 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1644 differences and differences between each stage's debugging dumps. */
1645 hash += (((unsigned int) LABEL_REF << 7)
1646 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1651 /* Don't hash on the symbol's address to avoid bootstrap differences.
1652 Different hash values may cause expressions to be recorded in
1653 different orders and thus different registers to be used in the
1654 final assembler. This also avoids differences in the dump files
1655 between various stages. */
1657 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1660 h += (h << 7) + *p++; /* ??? revisit */
1662 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1667 if (MEM_VOLATILE_P (x))
1669 *do_not_record_p = 1;
1673 hash += (unsigned int) MEM;
1674 hash += MEM_ALIAS_SET (x);
1685 case UNSPEC_VOLATILE:
1686 *do_not_record_p = 1;
1690 if (MEM_VOLATILE_P (x))
1692 *do_not_record_p = 1;
1697 /* We don't want to take the filename and line into account. */
1698 hash += (unsigned) code + (unsigned) GET_MODE (x)
1699 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1700 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1701 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1703 if (ASM_OPERANDS_INPUT_LENGTH (x))
1705 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1707 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1708 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1710 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1714 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1715 x = ASM_OPERANDS_INPUT (x, 0);
1716 mode = GET_MODE (x);
1726 hash += (unsigned) code + (unsigned) GET_MODE (x);
1727 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1731 /* If we are about to do the last recursive call
1732 needed at this level, change it into iteration.
1733 This function is called enough to be worth it. */
1740 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1741 if (*do_not_record_p)
1745 else if (fmt[i] == 'E')
1746 for (j = 0; j < XVECLEN (x, i); j++)
1748 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1749 if (*do_not_record_p)
1753 else if (fmt[i] == 's')
1754 hash += hash_string_1 (XSTR (x, i));
1755 else if (fmt[i] == 'i')
1756 hash += (unsigned int) XINT (x, i);
1764 /* Hash a set of register REGNO.
1766 Sets are hashed on the register that is set. This simplifies the PRE copy
1769 ??? May need to make things more elaborate. Later, as necessary. */
1772 hash_set (regno, hash_table_size)
1774 int hash_table_size;
1779 return hash % hash_table_size;
1782 /* Return non-zero if exp1 is equivalent to exp2.
1783 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1790 register enum rtx_code code;
1791 register const char *fmt;
1796 if (x == 0 || y == 0)
1799 code = GET_CODE (x);
1800 if (code != GET_CODE (y))
1803 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1804 if (GET_MODE (x) != GET_MODE (y))
1814 return INTVAL (x) == INTVAL (y);
1817 return XEXP (x, 0) == XEXP (y, 0);
1820 return XSTR (x, 0) == XSTR (y, 0);
1823 return REGNO (x) == REGNO (y);
1826 /* Can't merge two expressions in different alias sets, since we can
1827 decide that the expression is transparent in a block when it isn't,
1828 due to it being set with the different alias set. */
1829 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1833 /* For commutative operations, check both orders. */
1841 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1842 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1843 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1844 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1847 /* We don't use the generic code below because we want to
1848 disregard filename and line numbers. */
1850 /* A volatile asm isn't equivalent to any other. */
1851 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1854 if (GET_MODE (x) != GET_MODE (y)
1855 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1856 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1857 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1858 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1859 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1862 if (ASM_OPERANDS_INPUT_LENGTH (x))
1864 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1865 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1866 ASM_OPERANDS_INPUT (y, i))
1867 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1868 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1878 /* Compare the elements. If any pair of corresponding elements
1879 fail to match, return 0 for the whole thing. */
1881 fmt = GET_RTX_FORMAT (code);
1882 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1887 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1892 if (XVECLEN (x, i) != XVECLEN (y, i))
1894 for (j = 0; j < XVECLEN (x, i); j++)
1895 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1900 if (strcmp (XSTR (x, i), XSTR (y, i)))
1905 if (XINT (x, i) != XINT (y, i))
1910 if (XWINT (x, i) != XWINT (y, i))
1925 /* Insert expression X in INSN in the hash table.
1926 If it is already present, record it as the last occurrence in INSN's
1929 MODE is the mode of the value X is being stored into.
1930 It is only used if X is a CONST_INT.
1932 ANTIC_P is non-zero if X is an anticipatable expression.
1933 AVAIL_P is non-zero if X is an available expression. */
1936 insert_expr_in_table (x, mode, insn, antic_p, avail_p)
1938 enum machine_mode mode;
1940 int antic_p, avail_p;
1942 int found, do_not_record_p;
1944 struct expr *cur_expr, *last_expr = NULL;
1945 struct occr *antic_occr, *avail_occr;
1946 struct occr *last_occr = NULL;
1948 hash = hash_expr (x, mode, &do_not_record_p, expr_hash_table_size);
1950 /* Do not insert expression in table if it contains volatile operands,
1951 or if hash_expr determines the expression is something we don't want
1952 to or can't handle. */
1953 if (do_not_record_p)
1956 cur_expr = expr_hash_table[hash];
1959 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1961 /* If the expression isn't found, save a pointer to the end of
1963 last_expr = cur_expr;
1964 cur_expr = cur_expr->next_same_hash;
1969 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1970 bytes_used += sizeof (struct expr);
1971 if (expr_hash_table[hash] == NULL)
1972 /* This is the first pattern that hashed to this index. */
1973 expr_hash_table[hash] = cur_expr;
1975 /* Add EXPR to end of this hash chain. */
1976 last_expr->next_same_hash = cur_expr;
1978 /* Set the fields of the expr element. */
1980 cur_expr->bitmap_index = n_exprs++;
1981 cur_expr->next_same_hash = NULL;
1982 cur_expr->antic_occr = NULL;
1983 cur_expr->avail_occr = NULL;
1986 /* Now record the occurrence(s). */
1989 antic_occr = cur_expr->antic_occr;
1991 /* Search for another occurrence in the same basic block. */
1992 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1994 /* If an occurrence isn't found, save a pointer to the end of
1996 last_occr = antic_occr;
1997 antic_occr = antic_occr->next;
2001 /* Found another instance of the expression in the same basic block.
2002 Prefer the currently recorded one. We want the first one in the
2003 block and the block is scanned from start to end. */
2004 ; /* nothing to do */
2007 /* First occurrence of this expression in this basic block. */
2008 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2009 bytes_used += sizeof (struct occr);
2010 /* First occurrence of this expression in any block? */
2011 if (cur_expr->antic_occr == NULL)
2012 cur_expr->antic_occr = antic_occr;
2014 last_occr->next = antic_occr;
2016 antic_occr->insn = insn;
2017 antic_occr->next = NULL;
2023 avail_occr = cur_expr->avail_occr;
2025 /* Search for another occurrence in the same basic block. */
2026 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2028 /* If an occurrence isn't found, save a pointer to the end of
2030 last_occr = avail_occr;
2031 avail_occr = avail_occr->next;
2035 /* Found another instance of the expression in the same basic block.
2036 Prefer this occurrence to the currently recorded one. We want
2037 the last one in the block and the block is scanned from start
2039 avail_occr->insn = insn;
2042 /* First occurrence of this expression in this basic block. */
2043 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2044 bytes_used += sizeof (struct occr);
2046 /* First occurrence of this expression in any block? */
2047 if (cur_expr->avail_occr == NULL)
2048 cur_expr->avail_occr = avail_occr;
2050 last_occr->next = avail_occr;
2052 avail_occr->insn = insn;
2053 avail_occr->next = NULL;
2058 /* Insert pattern X in INSN in the hash table.
2059 X is a SET of a reg to either another reg or a constant.
2060 If it is already present, record it as the last occurrence in INSN's
2064 insert_set_in_table (x, insn)
2070 struct expr *cur_expr, *last_expr = NULL;
2071 struct occr *cur_occr, *last_occr = NULL;
2073 if (GET_CODE (x) != SET
2074 || GET_CODE (SET_DEST (x)) != REG)
2077 hash = hash_set (REGNO (SET_DEST (x)), set_hash_table_size);
2079 cur_expr = set_hash_table[hash];
2082 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2084 /* If the expression isn't found, save a pointer to the end of
2086 last_expr = cur_expr;
2087 cur_expr = cur_expr->next_same_hash;
2092 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2093 bytes_used += sizeof (struct expr);
2094 if (set_hash_table[hash] == NULL)
2095 /* This is the first pattern that hashed to this index. */
2096 set_hash_table[hash] = cur_expr;
2098 /* Add EXPR to end of this hash chain. */
2099 last_expr->next_same_hash = cur_expr;
2101 /* Set the fields of the expr element.
2102 We must copy X because it can be modified when copy propagation is
2103 performed on its operands. */
2104 cur_expr->expr = copy_rtx (x);
2105 cur_expr->bitmap_index = n_sets++;
2106 cur_expr->next_same_hash = NULL;
2107 cur_expr->antic_occr = NULL;
2108 cur_expr->avail_occr = NULL;
2111 /* Now record the occurrence. */
2112 cur_occr = cur_expr->avail_occr;
2114 /* Search for another occurrence in the same basic block. */
2115 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2117 /* If an occurrence isn't found, save a pointer to the end of
2119 last_occr = cur_occr;
2120 cur_occr = cur_occr->next;
2124 /* Found another instance of the expression in the same basic block.
2125 Prefer this occurrence to the currently recorded one. We want the
2126 last one in the block and the block is scanned from start to end. */
2127 cur_occr->insn = insn;
2130 /* First occurrence of this expression in this basic block. */
2131 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2132 bytes_used += sizeof (struct occr);
2134 /* First occurrence of this expression in any block? */
2135 if (cur_expr->avail_occr == NULL)
2136 cur_expr->avail_occr = cur_occr;
2138 last_occr->next = cur_occr;
2140 cur_occr->insn = insn;
2141 cur_occr->next = NULL;
2145 /* Scan pattern PAT of INSN and add an entry to the hash table. If SET_P is
2146 non-zero, this is for the assignment hash table, otherwise it is for the
2147 expression hash table. */
2150 hash_scan_set (pat, insn, set_p)
2154 rtx src = SET_SRC (pat);
2155 rtx dest = SET_DEST (pat);
2158 if (GET_CODE (src) == CALL)
2159 hash_scan_call (src, insn);
2161 else if (GET_CODE (dest) == REG)
2163 unsigned int regno = REGNO (dest);
2166 /* If this is a single set and we are doing constant propagation,
2167 see if a REG_NOTE shows this equivalent to a constant. */
2168 if (set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2169 && CONSTANT_P (XEXP (note, 0)))
2170 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2172 /* Only record sets of pseudo-regs in the hash table. */
2174 && regno >= FIRST_PSEUDO_REGISTER
2175 /* Don't GCSE something if we can't do a reg/reg copy. */
2176 && can_copy_p [GET_MODE (dest)]
2177 /* Is SET_SRC something we want to gcse? */
2178 && want_to_gcse_p (src)
2179 /* Don't CSE a nop. */
2180 && ! set_noop_p (pat)
2181 /* Don't GCSE if it has attached REG_EQUIV note.
2182 At this point this only function parameters should have
2183 REG_EQUIV notes and if the argument slot is used somewhere
2184 explicitely, it means address of parameter has been taken,
2185 so we should not extend the lifetime of the pseudo. */
2186 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2187 || GET_CODE (XEXP (note, 0)) != MEM))
2189 /* An expression is not anticipatable if its operands are
2190 modified before this insn or if this is not the only SET in
2192 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2193 /* An expression is not available if its operands are
2194 subsequently modified, including this insn. */
2195 int avail_p = oprs_available_p (src, insn);
2197 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p);
2200 /* Record sets for constant/copy propagation. */
2202 && regno >= FIRST_PSEUDO_REGISTER
2203 && ((GET_CODE (src) == REG
2204 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2205 && can_copy_p [GET_MODE (dest)]
2206 && REGNO (src) != regno)
2207 || GET_CODE (src) == CONST_INT
2208 || GET_CODE (src) == SYMBOL_REF
2209 || GET_CODE (src) == CONST_DOUBLE)
2210 /* A copy is not available if its src or dest is subsequently
2211 modified. Here we want to search from INSN+1 on, but
2212 oprs_available_p searches from INSN on. */
2213 && (insn == BLOCK_END (BLOCK_NUM (insn))
2214 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2215 && oprs_available_p (pat, tmp))))
2216 insert_set_in_table (pat, insn);
2221 hash_scan_clobber (x, insn)
2222 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2224 /* Currently nothing to do. */
2228 hash_scan_call (x, insn)
2229 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2231 /* Currently nothing to do. */
2234 /* Process INSN and add hash table entries as appropriate.
2236 Only available expressions that set a single pseudo-reg are recorded.
2238 Single sets in a PARALLEL could be handled, but it's an extra complication
2239 that isn't dealt with right now. The trick is handling the CLOBBERs that
2240 are also in the PARALLEL. Later.
2242 If SET_P is non-zero, this is for the assignment hash table,
2243 otherwise it is for the expression hash table.
2244 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2245 not record any expressions. */
2248 hash_scan_insn (insn, set_p, in_libcall_block)
2251 int in_libcall_block;
2253 rtx pat = PATTERN (insn);
2256 if (in_libcall_block)
2259 /* Pick out the sets of INSN and for other forms of instructions record
2260 what's been modified. */
2262 if (GET_CODE (pat) == SET)
2263 hash_scan_set (pat, insn, set_p);
2264 else if (GET_CODE (pat) == PARALLEL)
2265 for (i = 0; i < XVECLEN (pat, 0); i++)
2267 rtx x = XVECEXP (pat, 0, i);
2269 if (GET_CODE (x) == SET)
2270 hash_scan_set (x, insn, set_p);
2271 else if (GET_CODE (x) == CLOBBER)
2272 hash_scan_clobber (x, insn);
2273 else if (GET_CODE (x) == CALL)
2274 hash_scan_call (x, insn);
2277 else if (GET_CODE (pat) == CLOBBER)
2278 hash_scan_clobber (pat, insn);
2279 else if (GET_CODE (pat) == CALL)
2280 hash_scan_call (pat, insn);
2284 dump_hash_table (file, name, table, table_size, total_size)
2287 struct expr **table;
2288 int table_size, total_size;
2291 /* Flattened out table, so it's printed in proper order. */
2292 struct expr **flat_table;
2293 unsigned int *hash_val;
2297 = (struct expr **) xcalloc (total_size, sizeof (struct expr *));
2298 hash_val = (unsigned int *) xmalloc (total_size * sizeof (unsigned int));
2300 for (i = 0; i < table_size; i++)
2301 for (expr = table[i]; expr != NULL; expr = expr->next_same_hash)
2303 flat_table[expr->bitmap_index] = expr;
2304 hash_val[expr->bitmap_index] = i;
2307 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2308 name, table_size, total_size);
2310 for (i = 0; i < total_size; i++)
2311 if (flat_table[i] != 0)
2313 expr = flat_table[i];
2314 fprintf (file, "Index %d (hash value %d)\n ",
2315 expr->bitmap_index, hash_val[i]);
2316 print_rtl (file, expr->expr);
2317 fprintf (file, "\n");
2320 fprintf (file, "\n");
2326 /* Record register first/last/block set information for REGNO in INSN.
2328 reg_first_set records the first place in the block where the register
2329 is set and is used to compute "anticipatability".
2331 reg_last_set records the last place in the block where the register
2332 is set and is used to compute "availability".
2334 reg_set_in_block records whether the register is set in the block
2335 and is used to compute "transparency". */
2338 record_last_reg_set_info (insn, regno)
2342 if (reg_first_set[regno] == NEVER_SET)
2343 reg_first_set[regno] = INSN_CUID (insn);
2345 reg_last_set[regno] = INSN_CUID (insn);
2346 SET_BIT (reg_set_in_block[BLOCK_NUM (insn)], regno);
2350 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2351 Note we store a pair of elements in the list, so they have to be
2352 taken off pairwise. */
2355 canon_list_insert (dest, unused1, v_insn)
2356 rtx dest ATTRIBUTE_UNUSED;
2357 rtx unused1 ATTRIBUTE_UNUSED;
2360 rtx dest_addr, insn;
2362 while (GET_CODE (dest) == SUBREG
2363 || GET_CODE (dest) == ZERO_EXTRACT
2364 || GET_CODE (dest) == SIGN_EXTRACT
2365 || GET_CODE (dest) == STRICT_LOW_PART)
2366 dest = XEXP (dest, 0);
2368 /* If DEST is not a MEM, then it will not conflict with a load. Note
2369 that function calls are assumed to clobber memory, but are handled
2372 if (GET_CODE (dest) != MEM)
2375 dest_addr = get_addr (XEXP (dest, 0));
2376 dest_addr = canon_rtx (dest_addr);
2377 insn = (rtx) v_insn;
2379 canon_modify_mem_list[BLOCK_NUM (insn)] =
2380 alloc_INSN_LIST (dest_addr, canon_modify_mem_list[BLOCK_NUM (insn)]);
2381 canon_modify_mem_list[BLOCK_NUM (insn)] =
2382 alloc_INSN_LIST (dest, canon_modify_mem_list[BLOCK_NUM (insn)]);
2385 /* Record memory modification information for INSN. We do not actually care
2386 about the memory location(s) that are set, or even how they are set (consider
2387 a CALL_INSN). We merely need to record which insns modify memory. */
2390 record_last_mem_set_info (insn)
2393 /* load_killed_in_block_p will handle the case of calls clobbering
2395 modify_mem_list[BLOCK_NUM (insn)] =
2396 alloc_INSN_LIST (insn, modify_mem_list[BLOCK_NUM (insn)]);
2398 if (GET_CODE (insn) == CALL_INSN)
2400 /* Note that traversals of this loop (other than for free-ing)
2401 will break after encountering a CALL_INSN. So, there's no
2402 need to insert a pair of items, as canon_list_insert does. */
2403 canon_modify_mem_list[BLOCK_NUM (insn)] =
2404 alloc_INSN_LIST (insn, canon_modify_mem_list[BLOCK_NUM (insn)]);
2407 note_stores (PATTERN (insn), canon_list_insert, (void*)insn );
2410 /* Called from compute_hash_table via note_stores to handle one
2411 SET or CLOBBER in an insn. DATA is really the instruction in which
2412 the SET is taking place. */
2415 record_last_set_info (dest, setter, data)
2416 rtx dest, setter ATTRIBUTE_UNUSED;
2419 rtx last_set_insn = (rtx) data;
2421 if (GET_CODE (dest) == SUBREG)
2422 dest = SUBREG_REG (dest);
2424 if (GET_CODE (dest) == REG)
2425 record_last_reg_set_info (last_set_insn, REGNO (dest));
2426 else if (GET_CODE (dest) == MEM
2427 /* Ignore pushes, they clobber nothing. */
2428 && ! push_operand (dest, GET_MODE (dest)))
2429 record_last_mem_set_info (last_set_insn);
2432 /* Top level function to create an expression or assignment hash table.
2434 Expression entries are placed in the hash table if
2435 - they are of the form (set (pseudo-reg) src),
2436 - src is something we want to perform GCSE on,
2437 - none of the operands are subsequently modified in the block
2439 Assignment entries are placed in the hash table if
2440 - they are of the form (set (pseudo-reg) src),
2441 - src is something we want to perform const/copy propagation on,
2442 - none of the operands or target are subsequently modified in the block
2444 Currently src must be a pseudo-reg or a const_int.
2446 F is the first insn.
2447 SET_P is non-zero for computing the assignment hash table. */
2450 compute_hash_table (set_p)
2455 /* While we compute the hash table we also compute a bit array of which
2456 registers are set in which blocks.
2457 ??? This isn't needed during const/copy propagation, but it's cheap to
2459 sbitmap_vector_zero (reg_set_in_block, n_basic_blocks);
2461 /* re-Cache any INSN_LIST nodes we have allocated. */
2464 for (i = 0; i < n_basic_blocks; i++)
2466 if (modify_mem_list[i])
2467 free_INSN_LIST_list (modify_mem_list + i);
2468 if (canon_modify_mem_list[i])
2469 free_INSN_LIST_list (canon_modify_mem_list + i);
2472 /* Some working arrays used to track first and last set in each block. */
2473 /* ??? One could use alloca here, but at some size a threshold is crossed
2474 beyond which one should use malloc. Are we at that threshold here? */
2475 reg_first_set = (int *) gmalloc (max_gcse_regno * sizeof (int));
2476 reg_last_set = (int *) gmalloc (max_gcse_regno * sizeof (int));
2478 for (bb = 0; bb < n_basic_blocks; bb++)
2482 int in_libcall_block;
2485 /* First pass over the instructions records information used to
2486 determine when registers and memory are first and last set.
2487 ??? hard-reg reg_set_in_block computation
2488 could be moved to compute_sets since they currently don't change. */
2490 for (i = 0; i < max_gcse_regno; i++)
2491 reg_first_set[i] = reg_last_set[i] = NEVER_SET;
2494 for (insn = BLOCK_HEAD (bb);
2495 insn && insn != NEXT_INSN (BLOCK_END (bb));
2496 insn = NEXT_INSN (insn))
2498 #ifdef NON_SAVING_SETJMP
2499 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
2500 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
2502 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2503 record_last_reg_set_info (insn, regno);
2508 if (! INSN_P (insn))
2511 if (GET_CODE (insn) == CALL_INSN)
2513 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2514 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2515 record_last_reg_set_info (insn, regno);
2517 if (! CONST_CALL_P (insn))
2518 record_last_mem_set_info (insn);
2521 note_stores (PATTERN (insn), record_last_set_info, insn);
2524 /* The next pass builds the hash table. */
2526 for (insn = BLOCK_HEAD (bb), in_libcall_block = 0;
2527 insn && insn != NEXT_INSN (BLOCK_END (bb));
2528 insn = NEXT_INSN (insn))
2531 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2532 in_libcall_block = 1;
2533 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
2534 in_libcall_block = 0;
2535 hash_scan_insn (insn, set_p, in_libcall_block);
2539 free (reg_first_set);
2540 free (reg_last_set);
2542 /* Catch bugs early. */
2543 reg_first_set = reg_last_set = 0;
2546 /* Allocate space for the set hash table.
2547 N_INSNS is the number of instructions in the function.
2548 It is used to determine the number of buckets to use. */
2551 alloc_set_hash_table (n_insns)
2556 set_hash_table_size = n_insns / 4;
2557 if (set_hash_table_size < 11)
2558 set_hash_table_size = 11;
2560 /* Attempt to maintain efficient use of hash table.
2561 Making it an odd number is simplest for now.
2562 ??? Later take some measurements. */
2563 set_hash_table_size |= 1;
2564 n = set_hash_table_size * sizeof (struct expr *);
2565 set_hash_table = (struct expr **) gmalloc (n);
2568 /* Free things allocated by alloc_set_hash_table. */
2571 free_set_hash_table ()
2573 free (set_hash_table);
2576 /* Compute the hash table for doing copy/const propagation. */
2579 compute_set_hash_table ()
2581 /* Initialize count of number of entries in hash table. */
2583 memset ((char *) set_hash_table, 0,
2584 set_hash_table_size * sizeof (struct expr *));
2586 compute_hash_table (1);
2589 /* Allocate space for the expression hash table.
2590 N_INSNS is the number of instructions in the function.
2591 It is used to determine the number of buckets to use. */
2594 alloc_expr_hash_table (n_insns)
2595 unsigned int n_insns;
2599 expr_hash_table_size = n_insns / 2;
2600 /* Make sure the amount is usable. */
2601 if (expr_hash_table_size < 11)
2602 expr_hash_table_size = 11;
2604 /* Attempt to maintain efficient use of hash table.
2605 Making it an odd number is simplest for now.
2606 ??? Later take some measurements. */
2607 expr_hash_table_size |= 1;
2608 n = expr_hash_table_size * sizeof (struct expr *);
2609 expr_hash_table = (struct expr **) gmalloc (n);
2612 /* Free things allocated by alloc_expr_hash_table. */
2615 free_expr_hash_table ()
2617 free (expr_hash_table);
2620 /* Compute the hash table for doing GCSE. */
2623 compute_expr_hash_table ()
2625 /* Initialize count of number of entries in hash table. */
2627 memset ((char *) expr_hash_table, 0,
2628 expr_hash_table_size * sizeof (struct expr *));
2630 compute_hash_table (0);
2633 /* Expression tracking support. */
2635 /* Lookup pattern PAT in the expression table.
2636 The result is a pointer to the table entry, or NULL if not found. */
2638 static struct expr *
2642 int do_not_record_p;
2643 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2644 expr_hash_table_size);
2647 if (do_not_record_p)
2650 expr = expr_hash_table[hash];
2652 while (expr && ! expr_equiv_p (expr->expr, pat))
2653 expr = expr->next_same_hash;
2658 /* Lookup REGNO in the set table. If PAT is non-NULL look for the entry that
2659 matches it, otherwise return the first entry for REGNO. The result is a
2660 pointer to the table entry, or NULL if not found. */
2662 static struct expr *
2663 lookup_set (regno, pat)
2667 unsigned int hash = hash_set (regno, set_hash_table_size);
2670 expr = set_hash_table[hash];
2674 while (expr && ! expr_equiv_p (expr->expr, pat))
2675 expr = expr->next_same_hash;
2679 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2680 expr = expr->next_same_hash;
2686 /* Return the next entry for REGNO in list EXPR. */
2688 static struct expr *
2689 next_set (regno, expr)
2694 expr = expr->next_same_hash;
2695 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2700 /* Reset tables used to keep track of what's still available [since the
2701 start of the block]. */
2704 reset_opr_set_tables ()
2706 /* Maintain a bitmap of which regs have been set since beginning of
2708 sbitmap_zero (reg_set_bitmap);
2710 /* Also keep a record of the last instruction to modify memory.
2711 For now this is very trivial, we only record whether any memory
2712 location has been modified. */
2716 /* re-Cache any INSN_LIST nodes we have allocated. */
2717 for (i = 0; i < n_basic_blocks; i++)
2719 if (modify_mem_list[i])
2720 free_INSN_LIST_list (modify_mem_list + i);
2721 if (canon_modify_mem_list[i])
2722 free_INSN_LIST_list (canon_modify_mem_list + i);
2727 /* Return non-zero if the operands of X are not set before INSN in
2728 INSN's basic block. */
2731 oprs_not_set_p (x, insn)
2741 code = GET_CODE (x);
2756 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2757 INSN_CUID (insn), x, 0))
2760 return oprs_not_set_p (XEXP (x, 0), insn);
2763 return ! TEST_BIT (reg_set_bitmap, REGNO (x));
2769 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2773 /* If we are about to do the last recursive call
2774 needed at this level, change it into iteration.
2775 This function is called enough to be worth it. */
2777 return oprs_not_set_p (XEXP (x, i), insn);
2779 if (! oprs_not_set_p (XEXP (x, i), insn))
2782 else if (fmt[i] == 'E')
2783 for (j = 0; j < XVECLEN (x, i); j++)
2784 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2791 /* Mark things set by a CALL. */
2797 if (! CONST_CALL_P (insn))
2798 record_last_mem_set_info (insn);
2801 /* Mark things set by a SET. */
2804 mark_set (pat, insn)
2807 rtx dest = SET_DEST (pat);
2809 while (GET_CODE (dest) == SUBREG
2810 || GET_CODE (dest) == ZERO_EXTRACT
2811 || GET_CODE (dest) == SIGN_EXTRACT
2812 || GET_CODE (dest) == STRICT_LOW_PART)
2813 dest = XEXP (dest, 0);
2815 if (GET_CODE (dest) == REG)
2816 SET_BIT (reg_set_bitmap, REGNO (dest));
2817 else if (GET_CODE (dest) == MEM)
2818 record_last_mem_set_info (insn);
2820 if (GET_CODE (SET_SRC (pat)) == CALL)
2824 /* Record things set by a CLOBBER. */
2827 mark_clobber (pat, insn)
2830 rtx clob = XEXP (pat, 0);
2832 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2833 clob = XEXP (clob, 0);
2835 if (GET_CODE (clob) == REG)
2836 SET_BIT (reg_set_bitmap, REGNO (clob));
2838 record_last_mem_set_info (insn);
2841 /* Record things set by INSN.
2842 This data is used by oprs_not_set_p. */
2845 mark_oprs_set (insn)
2848 rtx pat = PATTERN (insn);
2851 if (GET_CODE (pat) == SET)
2852 mark_set (pat, insn);
2853 else if (GET_CODE (pat) == PARALLEL)
2854 for (i = 0; i < XVECLEN (pat, 0); i++)
2856 rtx x = XVECEXP (pat, 0, i);
2858 if (GET_CODE (x) == SET)
2860 else if (GET_CODE (x) == CLOBBER)
2861 mark_clobber (x, insn);
2862 else if (GET_CODE (x) == CALL)
2866 else if (GET_CODE (pat) == CLOBBER)
2867 mark_clobber (pat, insn);
2868 else if (GET_CODE (pat) == CALL)
2873 /* Classic GCSE reaching definition support. */
2875 /* Allocate reaching def variables. */
2878 alloc_rd_mem (n_blocks, n_insns)
2879 int n_blocks, n_insns;
2881 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2882 sbitmap_vector_zero (rd_kill, n_basic_blocks);
2884 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2885 sbitmap_vector_zero (rd_gen, n_basic_blocks);
2887 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2888 sbitmap_vector_zero (reaching_defs, n_basic_blocks);
2890 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2891 sbitmap_vector_zero (rd_out, n_basic_blocks);
2894 /* Free reaching def variables. */
2899 sbitmap_vector_free (rd_kill);
2900 sbitmap_vector_free (rd_gen);
2901 sbitmap_vector_free (reaching_defs);
2902 sbitmap_vector_free (rd_out);
2905 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2908 handle_rd_kill_set (insn, regno, bb)
2913 struct reg_set *this_reg;
2915 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2916 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2917 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2920 /* Compute the set of kill's for reaching definitions. */
2930 For each set bit in `gen' of the block (i.e each insn which
2931 generates a definition in the block)
2932 Call the reg set by the insn corresponding to that bit regx
2933 Look at the linked list starting at reg_set_table[regx]
2934 For each setting of regx in the linked list, which is not in
2936 Set the bit in `kill' corresponding to that insn. */
2937 for (bb = 0; bb < n_basic_blocks; bb++)
2938 for (cuid = 0; cuid < max_cuid; cuid++)
2939 if (TEST_BIT (rd_gen[bb], cuid))
2941 rtx insn = CUID_INSN (cuid);
2942 rtx pat = PATTERN (insn);
2944 if (GET_CODE (insn) == CALL_INSN)
2946 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2947 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2948 handle_rd_kill_set (insn, regno, BASIC_BLOCK (bb));
2951 if (GET_CODE (pat) == PARALLEL)
2953 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2955 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2957 if ((code == SET || code == CLOBBER)
2958 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2959 handle_rd_kill_set (insn,
2960 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2964 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2965 /* Each setting of this register outside of this block
2966 must be marked in the set of kills in this block. */
2967 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), BASIC_BLOCK (bb));
2971 /* Compute the reaching definitions as in
2972 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2973 Chapter 10. It is the same algorithm as used for computing available
2974 expressions but applied to the gens and kills of reaching definitions. */
2979 int bb, changed, passes;
2981 for (bb = 0; bb < n_basic_blocks; bb++)
2982 sbitmap_copy (rd_out[bb] /*dst*/, rd_gen[bb] /*src*/);
2989 for (bb = 0; bb < n_basic_blocks; bb++)
2991 sbitmap_union_of_preds (reaching_defs[bb], rd_out, bb);
2992 changed |= sbitmap_union_of_diff (rd_out[bb], rd_gen[bb],
2993 reaching_defs[bb], rd_kill[bb]);
2999 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3002 /* Classic GCSE available expression support. */
3004 /* Allocate memory for available expression computation. */
3007 alloc_avail_expr_mem (n_blocks, n_exprs)
3008 int n_blocks, n_exprs;
3010 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3011 sbitmap_vector_zero (ae_kill, n_basic_blocks);
3013 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3014 sbitmap_vector_zero (ae_gen, n_basic_blocks);
3016 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3017 sbitmap_vector_zero (ae_in, n_basic_blocks);
3019 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3020 sbitmap_vector_zero (ae_out, n_basic_blocks);
3024 free_avail_expr_mem ()
3026 sbitmap_vector_free (ae_kill);
3027 sbitmap_vector_free (ae_gen);
3028 sbitmap_vector_free (ae_in);
3029 sbitmap_vector_free (ae_out);
3032 /* Compute the set of available expressions generated in each basic block. */
3041 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3042 This is all we have to do because an expression is not recorded if it
3043 is not available, and the only expressions we want to work with are the
3044 ones that are recorded. */
3045 for (i = 0; i < expr_hash_table_size; i++)
3046 for (expr = expr_hash_table[i]; expr != 0; expr = expr->next_same_hash)
3047 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3048 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3051 /* Return non-zero if expression X is killed in BB. */
3054 expr_killed_p (x, bb)
3065 code = GET_CODE (x);
3069 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3072 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3075 return expr_killed_p (XEXP (x, 0), bb);
3092 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3096 /* If we are about to do the last recursive call
3097 needed at this level, change it into iteration.
3098 This function is called enough to be worth it. */
3100 return expr_killed_p (XEXP (x, i), bb);
3101 else if (expr_killed_p (XEXP (x, i), bb))
3104 else if (fmt[i] == 'E')
3105 for (j = 0; j < XVECLEN (x, i); j++)
3106 if (expr_killed_p (XVECEXP (x, i, j), bb))
3113 /* Compute the set of available expressions killed in each basic block. */
3116 compute_ae_kill (ae_gen, ae_kill)
3117 sbitmap *ae_gen, *ae_kill;
3123 for (bb = 0; bb < n_basic_blocks; bb++)
3124 for (i = 0; i < expr_hash_table_size; i++)
3125 for (expr = expr_hash_table[i]; expr; expr = expr->next_same_hash)
3127 /* Skip EXPR if generated in this block. */
3128 if (TEST_BIT (ae_gen[bb], expr->bitmap_index))
3131 if (expr_killed_p (expr->expr, BASIC_BLOCK (bb)))
3132 SET_BIT (ae_kill[bb], expr->bitmap_index);
3136 /* Actually perform the Classic GCSE optimizations. */
3138 /* Return non-zero if occurrence OCCR of expression EXPR reaches block BB.
3140 CHECK_SELF_LOOP is non-zero if we should consider a block reaching itself
3141 as a positive reach. We want to do this when there are two computations
3142 of the expression in the block.
3144 VISITED is a pointer to a working buffer for tracking which BB's have
3145 been visited. It is NULL for the top-level call.
3147 We treat reaching expressions that go through blocks containing the same
3148 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3149 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3150 2 as not reaching. The intent is to improve the probability of finding
3151 only one reaching expression and to reduce register lifetimes by picking
3152 the closest such expression. */
3155 expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
3159 int check_self_loop;
3164 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3166 basic_block pred_bb = pred->src;
3168 if (visited[pred_bb->index])
3169 /* This predecessor has already been visited. Nothing to do. */
3171 else if (pred_bb == bb)
3173 /* BB loops on itself. */
3175 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3176 && BLOCK_NUM (occr->insn) == pred_bb->index)
3179 visited[pred_bb->index] = 1;
3182 /* Ignore this predecessor if it kills the expression. */
3183 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3184 visited[pred_bb->index] = 1;
3186 /* Does this predecessor generate this expression? */
3187 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3189 /* Is this the occurrence we're looking for?
3190 Note that there's only one generating occurrence per block
3191 so we just need to check the block number. */
3192 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3195 visited[pred_bb->index] = 1;
3198 /* Neither gen nor kill. */
3201 visited[pred_bb->index] = 1;
3202 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3209 /* All paths have been checked. */
3213 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3214 memory allocated for that function is returned. */
3217 expr_reaches_here_p (occr, expr, bb, check_self_loop)
3221 int check_self_loop;
3224 char *visited = (char *) xcalloc (n_basic_blocks, 1);
3226 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3232 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3233 If there is more than one such instruction, return NULL.
3235 Called only by handle_avail_expr. */
3238 computing_insn (expr, insn)
3242 basic_block bb = BLOCK_FOR_INSN (insn);
3244 if (expr->avail_occr->next == NULL)
3246 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3247 /* The available expression is actually itself
3248 (i.e. a loop in the flow graph) so do nothing. */
3251 /* (FIXME) Case that we found a pattern that was created by
3252 a substitution that took place. */
3253 return expr->avail_occr->insn;
3257 /* Pattern is computed more than once.
3258 Search backwards from this insn to see how many of these
3259 computations actually reach this insn. */
3261 rtx insn_computes_expr = NULL;
3264 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3266 if (BLOCK_FOR_INSN (occr->insn) == bb)
3268 /* The expression is generated in this block.
3269 The only time we care about this is when the expression
3270 is generated later in the block [and thus there's a loop].
3271 We let the normal cse pass handle the other cases. */
3272 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3273 && expr_reaches_here_p (occr, expr, bb, 1))
3279 insn_computes_expr = occr->insn;
3282 else if (expr_reaches_here_p (occr, expr, bb, 0))
3288 insn_computes_expr = occr->insn;
3292 if (insn_computes_expr == NULL)
3295 return insn_computes_expr;
3299 /* Return non-zero if the definition in DEF_INSN can reach INSN.
3300 Only called by can_disregard_other_sets. */
3303 def_reaches_here_p (insn, def_insn)
3308 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3311 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3313 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3315 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3317 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3318 reg = XEXP (PATTERN (def_insn), 0);
3319 else if (GET_CODE (PATTERN (def_insn)) == SET)
3320 reg = SET_DEST (PATTERN (def_insn));
3324 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3333 /* Return non-zero if *ADDR_THIS_REG can only have one value at INSN. The
3334 value returned is the number of definitions that reach INSN. Returning a
3335 value of zero means that [maybe] more than one definition reaches INSN and
3336 the caller can't perform whatever optimization it is trying. i.e. it is
3337 always safe to return zero. */
3340 can_disregard_other_sets (addr_this_reg, insn, for_combine)
3341 struct reg_set **addr_this_reg;
3345 int number_of_reaching_defs = 0;
3346 struct reg_set *this_reg;
3348 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3349 if (def_reaches_here_p (insn, this_reg->insn))
3351 number_of_reaching_defs++;
3352 /* Ignore parallels for now. */
3353 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3357 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3358 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3359 SET_SRC (PATTERN (insn)))))
3360 /* A setting of the reg to a different value reaches INSN. */
3363 if (number_of_reaching_defs > 1)
3365 /* If in this setting the value the register is being set to is
3366 equal to the previous value the register was set to and this
3367 setting reaches the insn we are trying to do the substitution
3368 on then we are ok. */
3369 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3371 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3372 SET_SRC (PATTERN (insn))))
3376 *addr_this_reg = this_reg;
3379 return number_of_reaching_defs;
3382 /* Expression computed by insn is available and the substitution is legal,
3383 so try to perform the substitution.
3385 The result is non-zero if any changes were made. */
3388 handle_avail_expr (insn, expr)
3392 rtx pat, insn_computes_expr, expr_set;
3394 struct reg_set *this_reg;
3395 int found_setting, use_src;
3398 /* We only handle the case where one computation of the expression
3399 reaches this instruction. */
3400 insn_computes_expr = computing_insn (expr, insn);
3401 if (insn_computes_expr == NULL)
3403 expr_set = single_set (insn_computes_expr);
3410 /* At this point we know only one computation of EXPR outside of this
3411 block reaches this insn. Now try to find a register that the
3412 expression is computed into. */
3413 if (GET_CODE (SET_SRC (expr_set)) == REG)
3415 /* This is the case when the available expression that reaches
3416 here has already been handled as an available expression. */
3417 unsigned int regnum_for_replacing
3418 = REGNO (SET_SRC (expr_set));
3420 /* If the register was created by GCSE we can't use `reg_set_table',
3421 however we know it's set only once. */
3422 if (regnum_for_replacing >= max_gcse_regno
3423 /* If the register the expression is computed into is set only once,
3424 or only one set reaches this insn, we can use it. */
3425 || (((this_reg = reg_set_table[regnum_for_replacing]),
3426 this_reg->next == NULL)
3427 || can_disregard_other_sets (&this_reg, insn, 0)))
3436 unsigned int regnum_for_replacing
3437 = REGNO (SET_DEST (expr_set));
3439 /* This shouldn't happen. */
3440 if (regnum_for_replacing >= max_gcse_regno)
3443 this_reg = reg_set_table[regnum_for_replacing];
3445 /* If the register the expression is computed into is set only once,
3446 or only one set reaches this insn, use it. */
3447 if (this_reg->next == NULL
3448 || can_disregard_other_sets (&this_reg, insn, 0))
3454 pat = PATTERN (insn);
3456 to = SET_SRC (expr_set);
3458 to = SET_DEST (expr_set);
3459 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3461 /* We should be able to ignore the return code from validate_change but
3462 to play it safe we check. */
3466 if (gcse_file != NULL)
3468 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3470 fprintf (gcse_file, " reg %d %s insn %d\n",
3471 REGNO (to), use_src ? "from" : "set in",
3472 INSN_UID (insn_computes_expr));
3477 /* The register that the expr is computed into is set more than once. */
3478 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3480 /* Insert an insn after insnx that copies the reg set in insnx
3481 into a new pseudo register call this new register REGN.
3482 From insnb until end of basic block or until REGB is set
3483 replace all uses of REGB with REGN. */
3486 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3488 /* Generate the new insn. */
3489 /* ??? If the change fails, we return 0, even though we created
3490 an insn. I think this is ok. */
3492 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3493 SET_DEST (expr_set)),
3494 insn_computes_expr);
3496 /* Keep block number table up to date. */
3497 set_block_for_new_insns (new_insn, BLOCK_FOR_INSN (insn_computes_expr));
3499 /* Keep register set table up to date. */
3500 record_one_set (REGNO (to), new_insn);
3502 gcse_create_count++;
3503 if (gcse_file != NULL)
3505 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3506 INSN_UID (NEXT_INSN (insn_computes_expr)),
3507 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3508 fprintf (gcse_file, ", computed in insn %d,\n",
3509 INSN_UID (insn_computes_expr));
3510 fprintf (gcse_file, " into newly allocated reg %d\n",
3514 pat = PATTERN (insn);
3516 /* Do register replacement for INSN. */
3517 changed = validate_change (insn, &SET_SRC (pat),
3519 (NEXT_INSN (insn_computes_expr))),
3522 /* We should be able to ignore the return code from validate_change but
3523 to play it safe we check. */
3527 if (gcse_file != NULL)
3530 "GCSE: Replacing the source in insn %d with reg %d ",
3532 REGNO (SET_DEST (PATTERN (NEXT_INSN
3533 (insn_computes_expr)))));
3534 fprintf (gcse_file, "set in insn %d\n",
3535 INSN_UID (insn_computes_expr));
3543 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3544 the dataflow analysis has been done.
3546 The result is non-zero if a change was made. */
3554 /* Note we start at block 1. */
3557 for (bb = 1; bb < n_basic_blocks; bb++)
3559 /* Reset tables used to keep track of what's still valid [since the
3560 start of the block]. */
3561 reset_opr_set_tables ();
3563 for (insn = BLOCK_HEAD (bb);
3564 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
3565 insn = NEXT_INSN (insn))
3567 /* Is insn of form (set (pseudo-reg) ...)? */
3568 if (GET_CODE (insn) == INSN
3569 && GET_CODE (PATTERN (insn)) == SET
3570 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3571 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3573 rtx pat = PATTERN (insn);
3574 rtx src = SET_SRC (pat);
3577 if (want_to_gcse_p (src)
3578 /* Is the expression recorded? */
3579 && ((expr = lookup_expr (src)) != NULL)
3580 /* Is the expression available [at the start of the
3582 && TEST_BIT (ae_in[bb], expr->bitmap_index)
3583 /* Are the operands unchanged since the start of the
3585 && oprs_not_set_p (src, insn))
3586 changed |= handle_avail_expr (insn, expr);
3589 /* Keep track of everything modified by this insn. */
3590 /* ??? Need to be careful w.r.t. mods done to INSN. */
3592 mark_oprs_set (insn);
3599 /* Top level routine to perform one classic GCSE pass.
3601 Return non-zero if a change was made. */
3604 one_classic_gcse_pass (pass)
3609 gcse_subst_count = 0;
3610 gcse_create_count = 0;
3612 alloc_expr_hash_table (max_cuid);
3613 alloc_rd_mem (n_basic_blocks, max_cuid);
3614 compute_expr_hash_table ();
3616 dump_hash_table (gcse_file, "Expression", expr_hash_table,
3617 expr_hash_table_size, n_exprs);
3623 alloc_avail_expr_mem (n_basic_blocks, n_exprs);
3625 compute_ae_kill (ae_gen, ae_kill);
3626 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3627 changed = classic_gcse ();
3628 free_avail_expr_mem ();
3632 free_expr_hash_table ();
3636 fprintf (gcse_file, "\n");
3637 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3638 current_function_name, pass, bytes_used, gcse_subst_count);
3639 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3645 /* Compute copy/constant propagation working variables. */
3647 /* Local properties of assignments. */
3648 static sbitmap *cprop_pavloc;
3649 static sbitmap *cprop_absaltered;
3651 /* Global properties of assignments (computed from the local properties). */
3652 static sbitmap *cprop_avin;
3653 static sbitmap *cprop_avout;
3655 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3656 basic blocks. N_SETS is the number of sets. */
3659 alloc_cprop_mem (n_blocks, n_sets)
3660 int n_blocks, n_sets;
3662 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3663 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3665 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3666 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3669 /* Free vars used by copy/const propagation. */
3674 sbitmap_vector_free (cprop_pavloc);
3675 sbitmap_vector_free (cprop_absaltered);
3676 sbitmap_vector_free (cprop_avin);
3677 sbitmap_vector_free (cprop_avout);
3680 /* For each block, compute whether X is transparent. X is either an
3681 expression or an assignment [though we don't care which, for this context
3682 an assignment is treated as an expression]. For each block where an
3683 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3687 compute_transp (x, indx, bmap, set_p)
3698 /* repeat is used to turn tail-recursion into iteration since GCC
3699 can't do it when there's no return value. */
3705 code = GET_CODE (x);
3711 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3713 for (bb = 0; bb < n_basic_blocks; bb++)
3714 if (TEST_BIT (reg_set_in_block[bb], REGNO (x)))
3715 SET_BIT (bmap[bb], indx);
3719 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3720 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3725 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3727 for (bb = 0; bb < n_basic_blocks; bb++)
3728 if (TEST_BIT (reg_set_in_block[bb], REGNO (x)))
3729 RESET_BIT (bmap[bb], indx);
3733 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3734 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3741 for (bb = 0; bb < n_basic_blocks; bb++)
3743 rtx list_entry = canon_modify_mem_list[bb];
3747 rtx dest, dest_addr;
3749 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3752 SET_BIT (bmap[bb], indx);
3754 RESET_BIT (bmap[bb], indx);
3757 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3758 Examine each hunk of memory that is modified. */
3760 dest = XEXP (list_entry, 0);
3761 list_entry = XEXP (list_entry, 1);
3762 dest_addr = XEXP (list_entry, 0);
3764 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3765 x, rtx_addr_varies_p))
3768 SET_BIT (bmap[bb], indx);
3770 RESET_BIT (bmap[bb], indx);
3773 list_entry = XEXP (list_entry, 1);
3795 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3799 /* If we are about to do the last recursive call
3800 needed at this level, change it into iteration.
3801 This function is called enough to be worth it. */
3808 compute_transp (XEXP (x, i), indx, bmap, set_p);
3810 else if (fmt[i] == 'E')
3811 for (j = 0; j < XVECLEN (x, i); j++)
3812 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3816 /* Top level routine to do the dataflow analysis needed by copy/const
3820 compute_cprop_data ()
3822 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, 1);
3823 compute_available (cprop_pavloc, cprop_absaltered,
3824 cprop_avout, cprop_avin);
3827 /* Copy/constant propagation. */
3829 /* Maximum number of register uses in an insn that we handle. */
3832 /* Table of uses found in an insn.
3833 Allocated statically to avoid alloc/free complexity and overhead. */
3834 static struct reg_use reg_use_table[MAX_USES];
3836 /* Index into `reg_use_table' while building it. */
3837 static int reg_use_count;
3839 /* Set up a list of register numbers used in INSN. The found uses are stored
3840 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3841 and contains the number of uses in the table upon exit.
3843 ??? If a register appears multiple times we will record it multiple times.
3844 This doesn't hurt anything but it will slow things down. */
3847 find_used_regs (xptr, data)
3849 void *data ATTRIBUTE_UNUSED;
3856 /* repeat is used to turn tail-recursion into iteration since GCC
3857 can't do it when there's no return value. */
3862 code = GET_CODE (x);
3865 if (reg_use_count == MAX_USES)
3868 reg_use_table[reg_use_count].reg_rtx = x;
3872 /* Recursively scan the operands of this expression. */
3874 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3878 /* If we are about to do the last recursive call
3879 needed at this level, change it into iteration.
3880 This function is called enough to be worth it. */
3887 find_used_regs (&XEXP (x, i), data);
3889 else if (fmt[i] == 'E')
3890 for (j = 0; j < XVECLEN (x, i); j++)
3891 find_used_regs (&XVECEXP (x, i, j), data);
3895 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3896 Returns non-zero is successful. */
3899 try_replace_reg (from, to, insn)
3902 rtx note = find_reg_equal_equiv_note (insn);
3905 rtx set = single_set (insn);
3907 success = validate_replace_src (from, to, insn);
3909 /* If above failed and this is a single set, try to simplify the source of
3910 the set given our substitution. We could perhaps try this for multiple
3911 SETs, but it probably won't buy us anything. */
3912 if (!success && set != 0)
3914 src = simplify_replace_rtx (SET_SRC (set), from, to);
3916 if (!rtx_equal_p (src, SET_SRC (set))
3917 && validate_change (insn, &SET_SRC (set), src, 0))
3921 /* If we've failed to do replacement, have a single SET, and don't already
3922 have a note, add a REG_EQUAL note to not lose information. */
3923 if (!success && note == 0 && set != 0)
3924 note = REG_NOTES (insn)
3925 = gen_rtx_EXPR_LIST (REG_EQUAL, src, REG_NOTES (insn));
3927 /* If there is already a NOTE, update the expression in it with our
3930 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3932 /* REG_EQUAL may get simplified into register.
3933 We don't allow that. Remove that note. This code ought
3934 not to hapen, because previous code ought to syntetize
3935 reg-reg move, but be on the safe side. */
3936 if (note && REG_P (XEXP (note, 0)))
3937 remove_note (insn, note);
3942 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3943 NULL no such set is found. */
3945 static struct expr *
3946 find_avail_set (regno, insn)
3950 /* SET1 contains the last set found that can be returned to the caller for
3951 use in a substitution. */
3952 struct expr *set1 = 0;
3954 /* Loops are not possible here. To get a loop we would need two sets
3955 available at the start of the block containing INSN. ie we would
3956 need two sets like this available at the start of the block:
3958 (set (reg X) (reg Y))
3959 (set (reg Y) (reg X))
3961 This can not happen since the set of (reg Y) would have killed the
3962 set of (reg X) making it unavailable at the start of this block. */
3966 struct expr *set = lookup_set (regno, NULL_RTX);
3968 /* Find a set that is available at the start of the block
3969 which contains INSN. */
3972 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3974 set = next_set (regno, set);
3977 /* If no available set was found we've reached the end of the
3978 (possibly empty) copy chain. */
3982 if (GET_CODE (set->expr) != SET)
3985 src = SET_SRC (set->expr);
3987 /* We know the set is available.
3988 Now check that SRC is ANTLOC (i.e. none of the source operands
3989 have changed since the start of the block).
3991 If the source operand changed, we may still use it for the next
3992 iteration of this loop, but we may not use it for substitutions. */
3994 if (CONSTANT_P (src) || oprs_not_set_p (src, insn))
3997 /* If the source of the set is anything except a register, then
3998 we have reached the end of the copy chain. */
3999 if (GET_CODE (src) != REG)
4002 /* Follow the copy chain, ie start another iteration of the loop
4003 and see if we have an available copy into SRC. */
4004 regno = REGNO (src);
4007 /* SET1 holds the last set that was available and anticipatable at
4012 /* Subroutine of cprop_insn that tries to propagate constants into
4013 JUMP_INSNS. INSN must be a conditional jump. FROM is what we will try to
4014 replace, SRC is the constant we will try to substitute for it. Returns
4015 nonzero if a change was made. We know INSN has just a SET. */
4018 cprop_jump (insn, from, src)
4023 rtx set = PATTERN (insn);
4024 rtx new = simplify_replace_rtx (SET_SRC (set), from, src);
4026 /* If no simplification can be made, then try the next
4028 if (rtx_equal_p (new, SET_SRC (set)))
4031 /* If this is now a no-op leave it that way, but update LABEL_NUSED if
4035 SET_SRC (set) = new;
4037 if (JUMP_LABEL (insn) != 0)
4038 --LABEL_NUSES (JUMP_LABEL (insn));
4041 /* Otherwise, this must be a valid instruction. */
4042 else if (! validate_change (insn, &SET_SRC (set), new, 0))
4045 /* If this has turned into an unconditional jump,
4046 then put a barrier after it so that the unreachable
4047 code will be deleted. */
4048 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4049 emit_barrier_after (insn);
4051 run_jump_opt_after_gcse = 1;
4054 if (gcse_file != NULL)
4057 "CONST-PROP: Replacing reg %d in insn %d with constant ",
4058 REGNO (from), INSN_UID (insn));
4059 print_rtl (gcse_file, src);
4060 fprintf (gcse_file, "\n");
4068 /* Subroutine of cprop_insn that tries to propagate constants into JUMP_INSNS
4069 for machines that have CC0. INSN is a single set that stores into CC0;
4070 the insn following it is a conditional jump. REG_USED is the use we will
4071 try to replace, SRC is the constant we will try to substitute for it.
4072 Returns nonzero if a change was made. */
4075 cprop_cc0_jump (insn, reg_used, src)
4077 struct reg_use *reg_used;
4080 /* First substitute in the SET_SRC of INSN, then substitute that for
4082 rtx jump = NEXT_INSN (insn);
4083 rtx new_src = simplify_replace_rtx (SET_SRC (PATTERN (insn)),
4084 reg_used->reg_rtx, src);
4086 if (! cprop_jump (jump, cc0_rtx, new_src))
4089 /* If we succeeded, delete the cc0 setter. */
4090 PUT_CODE (insn, NOTE);
4091 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4092 NOTE_SOURCE_FILE (insn) = 0;
4098 /* Perform constant and copy propagation on INSN.
4099 The result is non-zero if a change was made. */
4102 cprop_insn (insn, alter_jumps)
4106 struct reg_use *reg_used;
4114 note_uses (&PATTERN (insn), find_used_regs, NULL);
4116 note = find_reg_equal_equiv_note (insn);
4118 /* We may win even when propagating constants into notes. */
4120 find_used_regs (&XEXP (note, 0), NULL);
4122 for (reg_used = ®_use_table[0]; reg_use_count > 0;
4123 reg_used++, reg_use_count--)
4125 unsigned int regno = REGNO (reg_used->reg_rtx);
4129 /* Ignore registers created by GCSE.
4130 We do this because ... */
4131 if (regno >= max_gcse_regno)
4134 /* If the register has already been set in this block, there's
4135 nothing we can do. */
4136 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4139 /* Find an assignment that sets reg_used and is available
4140 at the start of the block. */
4141 set = find_avail_set (regno, insn);
4146 /* ??? We might be able to handle PARALLELs. Later. */
4147 if (GET_CODE (pat) != SET)
4150 src = SET_SRC (pat);
4152 /* Constant propagation. */
4153 if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE
4154 || GET_CODE (src) == SYMBOL_REF)
4156 /* Handle normal insns first. */
4157 if (GET_CODE (insn) == INSN
4158 && try_replace_reg (reg_used->reg_rtx, src, insn))
4162 if (gcse_file != NULL)
4164 fprintf (gcse_file, "CONST-PROP: Replacing reg %d in ",
4166 fprintf (gcse_file, "insn %d with constant ",
4168 print_rtl (gcse_file, src);
4169 fprintf (gcse_file, "\n");
4172 /* The original insn setting reg_used may or may not now be
4173 deletable. We leave the deletion to flow. */
4176 /* Try to propagate a CONST_INT into a conditional jump.
4177 We're pretty specific about what we will handle in this
4178 code, we can extend this as necessary over time.
4180 Right now the insn in question must look like
4181 (set (pc) (if_then_else ...)) */
4182 else if (alter_jumps
4183 && GET_CODE (insn) == JUMP_INSN
4184 && condjump_p (insn)
4185 && ! simplejump_p (insn))
4186 changed |= cprop_jump (insn, reg_used->reg_rtx, src);
4189 /* Similar code for machines that use a pair of CC0 setter and
4190 conditional jump insn. */
4191 else if (alter_jumps
4192 && GET_CODE (PATTERN (insn)) == SET
4193 && SET_DEST (PATTERN (insn)) == cc0_rtx
4194 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
4195 && condjump_p (NEXT_INSN (insn))
4196 && ! simplejump_p (NEXT_INSN (insn))
4197 && cprop_cc0_jump (insn, reg_used, src))
4204 else if (GET_CODE (src) == REG
4205 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4206 && REGNO (src) != regno)
4208 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4212 if (gcse_file != NULL)
4214 fprintf (gcse_file, "COPY-PROP: Replacing reg %d in insn %d",
4215 regno, INSN_UID (insn));
4216 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4219 /* The original insn setting reg_used may or may not now be
4220 deletable. We leave the deletion to flow. */
4221 /* FIXME: If it turns out that the insn isn't deletable,
4222 then we may have unnecessarily extended register lifetimes
4223 and made things worse. */
4231 /* Forward propagate copies. This includes copies and constants. Return
4232 non-zero if a change was made. */
4241 /* Note we start at block 1. */
4244 for (bb = 1; bb < n_basic_blocks; bb++)
4246 /* Reset tables used to keep track of what's still valid [since the
4247 start of the block]. */
4248 reset_opr_set_tables ();
4250 for (insn = BLOCK_HEAD (bb);
4251 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
4252 insn = NEXT_INSN (insn))
4255 changed |= cprop_insn (insn, alter_jumps);
4257 /* Keep track of everything modified by this insn. */
4258 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4259 call mark_oprs_set if we turned the insn into a NOTE. */
4260 if (GET_CODE (insn) != NOTE)
4261 mark_oprs_set (insn);
4265 if (gcse_file != NULL)
4266 fprintf (gcse_file, "\n");
4271 /* Perform one copy/constant propagation pass.
4272 F is the first insn in the function.
4273 PASS is the pass count. */
4276 one_cprop_pass (pass, alter_jumps)
4282 const_prop_count = 0;
4283 copy_prop_count = 0;
4285 alloc_set_hash_table (max_cuid);
4286 compute_set_hash_table ();
4288 dump_hash_table (gcse_file, "SET", set_hash_table, set_hash_table_size,
4292 alloc_cprop_mem (n_basic_blocks, n_sets);
4293 compute_cprop_data ();
4294 changed = cprop (alter_jumps);
4298 free_set_hash_table ();
4302 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4303 current_function_name, pass, bytes_used);
4304 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4305 const_prop_count, copy_prop_count);
4311 /* Compute PRE+LCM working variables. */
4313 /* Local properties of expressions. */
4314 /* Nonzero for expressions that are transparent in the block. */
4315 static sbitmap *transp;
4317 /* Nonzero for expressions that are transparent at the end of the block.
4318 This is only zero for expressions killed by abnormal critical edge
4319 created by a calls. */
4320 static sbitmap *transpout;
4322 /* Nonzero for expressions that are computed (available) in the block. */
4323 static sbitmap *comp;
4325 /* Nonzero for expressions that are locally anticipatable in the block. */
4326 static sbitmap *antloc;
4328 /* Nonzero for expressions where this block is an optimal computation
4330 static sbitmap *pre_optimal;
4332 /* Nonzero for expressions which are redundant in a particular block. */
4333 static sbitmap *pre_redundant;
4335 /* Nonzero for expressions which should be inserted on a specific edge. */
4336 static sbitmap *pre_insert_map;
4338 /* Nonzero for expressions which should be deleted in a specific block. */
4339 static sbitmap *pre_delete_map;
4341 /* Contains the edge_list returned by pre_edge_lcm. */
4342 static struct edge_list *edge_list;
4344 /* Redundant insns. */
4345 static sbitmap pre_redundant_insns;
4347 /* Allocate vars used for PRE analysis. */
4350 alloc_pre_mem (n_blocks, n_exprs)
4351 int n_blocks, n_exprs;
4353 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4354 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4355 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4358 pre_redundant = NULL;
4359 pre_insert_map = NULL;
4360 pre_delete_map = NULL;
4363 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4365 /* pre_insert and pre_delete are allocated later. */
4368 /* Free vars used for PRE analysis. */
4373 sbitmap_vector_free (transp);
4374 sbitmap_vector_free (comp);
4376 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
4379 sbitmap_vector_free (pre_optimal);
4381 sbitmap_vector_free (pre_redundant);
4383 sbitmap_vector_free (pre_insert_map);
4385 sbitmap_vector_free (pre_delete_map);
4387 sbitmap_vector_free (ae_in);
4389 sbitmap_vector_free (ae_out);
4391 transp = comp = NULL;
4392 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
4393 ae_in = ae_out = NULL;
4396 /* Top level routine to do the dataflow analysis needed by PRE. */
4401 sbitmap trapping_expr;
4405 compute_local_properties (transp, comp, antloc, 0);
4406 sbitmap_vector_zero (ae_kill, n_basic_blocks);
4408 /* Collect expressions which might trap. */
4409 trapping_expr = sbitmap_alloc (n_exprs);
4410 sbitmap_zero (trapping_expr);
4411 for (ui = 0; ui < expr_hash_table_size; ui++)
4414 for (e = expr_hash_table[ui]; e != NULL; e = e->next_same_hash)
4415 if (may_trap_p (e->expr))
4416 SET_BIT (trapping_expr, e->bitmap_index);
4419 /* Compute ae_kill for each basic block using:
4423 This is significantly faster than compute_ae_kill. */
4425 for (i = 0; i < n_basic_blocks; i++)
4429 /* If the current block is the destination of an abnormal edge, we
4430 kill all trapping expressions because we won't be able to properly
4431 place the instruction on the edge. So make them neither
4432 anticipatable nor transparent. This is fairly conservative. */
4433 for (e = BASIC_BLOCK (i)->pred; e ; e = e->pred_next)
4434 if (e->flags & EDGE_ABNORMAL)
4436 sbitmap_difference (antloc[i], antloc[i], trapping_expr);
4437 sbitmap_difference (transp[i], transp[i], trapping_expr);
4441 sbitmap_a_or_b (ae_kill[i], transp[i], comp[i]);
4442 sbitmap_not (ae_kill[i], ae_kill[i]);
4445 edge_list = pre_edge_lcm (gcse_file, n_exprs, transp, comp, antloc,
4446 ae_kill, &pre_insert_map, &pre_delete_map);
4447 sbitmap_vector_free (antloc);
4449 sbitmap_vector_free (ae_kill);
4451 free (trapping_expr);
4456 /* Return non-zero if an occurrence of expression EXPR in OCCR_BB would reach
4459 VISITED is a pointer to a working buffer for tracking which BB's have
4460 been visited. It is NULL for the top-level call.
4462 We treat reaching expressions that go through blocks containing the same
4463 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4464 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4465 2 as not reaching. The intent is to improve the probability of finding
4466 only one reaching expression and to reduce register lifetimes by picking
4467 the closest such expression. */
4470 pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
4471 basic_block occr_bb;
4478 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
4480 basic_block pred_bb = pred->src;
4482 if (pred->src == ENTRY_BLOCK_PTR
4483 /* Has predecessor has already been visited? */
4484 || visited[pred_bb->index])
4485 ;/* Nothing to do. */
4487 /* Does this predecessor generate this expression? */
4488 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
4490 /* Is this the occurrence we're looking for?
4491 Note that there's only one generating occurrence per block
4492 so we just need to check the block number. */
4493 if (occr_bb == pred_bb)
4496 visited[pred_bb->index] = 1;
4498 /* Ignore this predecessor if it kills the expression. */
4499 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
4500 visited[pred_bb->index] = 1;
4502 /* Neither gen nor kill. */
4505 visited[pred_bb->index] = 1;
4506 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
4511 /* All paths have been checked. */
4515 /* The wrapper for pre_expr_reaches_here_work that ensures that any
4516 memory allocated for that function is returned. */
4519 pre_expr_reaches_here_p (occr_bb, expr, bb)
4520 basic_block occr_bb;
4525 char *visited = (char *) xcalloc (n_basic_blocks, 1);
4527 rval = pre_expr_reaches_here_p_work(occr_bb, expr, bb, visited);
4534 /* Given an expr, generate RTL which we can insert at the end of a BB,
4535 or on an edge. Set the block number of any insns generated to
4539 process_insert_insn (expr)
4542 rtx reg = expr->reaching_reg;
4543 rtx exp = copy_rtx (expr->expr);
4548 /* If the expression is something that's an operand, like a constant,
4549 just copy it to a register. */
4550 if (general_operand (exp, GET_MODE (reg)))
4551 emit_move_insn (reg, exp);
4553 /* Otherwise, make a new insn to compute this expression and make sure the
4554 insn will be recognized (this also adds any needed CLOBBERs). Copy the
4555 expression to make sure we don't have any sharing issues. */
4556 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
4559 pat = gen_sequence ();
4565 /* Add EXPR to the end of basic block BB.
4567 This is used by both the PRE and code hoisting.
4569 For PRE, we want to verify that the expr is either transparent
4570 or locally anticipatable in the target block. This check makes
4571 no sense for code hoisting. */
4574 insert_insn_end_bb (expr, bb, pre)
4581 rtx reg = expr->reaching_reg;
4582 int regno = REGNO (reg);
4586 pat = process_insert_insn (expr);
4588 /* If the last insn is a jump, insert EXPR in front [taking care to
4589 handle cc0, etc. properly]. */
4591 if (GET_CODE (insn) == JUMP_INSN)
4597 /* If this is a jump table, then we can't insert stuff here. Since
4598 we know the previous real insn must be the tablejump, we insert
4599 the new instruction just before the tablejump. */
4600 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4601 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4602 insn = prev_real_insn (insn);
4605 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4606 if cc0 isn't set. */
4607 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4609 insn = XEXP (note, 0);
4612 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4613 if (maybe_cc0_setter
4614 && INSN_P (maybe_cc0_setter)
4615 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4616 insn = maybe_cc0_setter;
4619 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4620 new_insn = emit_block_insn_before (pat, insn, bb);
4623 /* Likewise if the last insn is a call, as will happen in the presence
4624 of exception handling. */
4625 else if (GET_CODE (insn) == CALL_INSN)
4627 HARD_REG_SET parm_regs;
4631 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4632 we search backward and place the instructions before the first
4633 parameter is loaded. Do this for everyone for consistency and a
4634 presumtion that we'll get better code elsewhere as well.
4636 It should always be the case that we can put these instructions
4637 anywhere in the basic block with performing PRE optimizations.
4641 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
4642 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
4645 /* Since different machines initialize their parameter registers
4646 in different orders, assume nothing. Collect the set of all
4647 parameter registers. */
4648 CLEAR_HARD_REG_SET (parm_regs);
4650 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
4651 if (GET_CODE (XEXP (p, 0)) == USE
4652 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
4654 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
4657 /* We only care about registers which can hold function
4659 if (! FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
4662 SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
4666 /* Search backward for the first set of a register in this set. */
4667 while (nparm_regs && bb->head != insn)
4669 insn = PREV_INSN (insn);
4670 p = single_set (insn);
4671 if (p && GET_CODE (SET_DEST (p)) == REG
4672 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
4673 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
4675 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
4680 /* If we found all the parameter loads, then we want to insert
4681 before the first parameter load.
4683 If we did not find all the parameter loads, then we might have
4684 stopped on the head of the block, which could be a CODE_LABEL.
4685 If we inserted before the CODE_LABEL, then we would be putting
4686 the insn in the wrong basic block. In that case, put the insn
4687 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
4688 while (GET_CODE (insn) == CODE_LABEL
4689 || NOTE_INSN_BASIC_BLOCK_P (insn))
4690 insn = NEXT_INSN (insn);
4692 new_insn = emit_block_insn_before (pat, insn, bb);
4696 new_insn = emit_insn_after (pat, insn);
4700 /* Keep block number table up to date.
4701 Note, PAT could be a multiple insn sequence, we have to make
4702 sure that each insn in the sequence is handled. */
4703 if (GET_CODE (pat) == SEQUENCE)
4705 for (i = 0; i < XVECLEN (pat, 0); i++)
4707 rtx insn = XVECEXP (pat, 0, i);
4709 set_block_for_insn (insn, bb);
4711 add_label_notes (PATTERN (insn), new_insn);
4713 note_stores (PATTERN (insn), record_set_info, insn);
4718 add_label_notes (SET_SRC (pat), new_insn);
4719 set_block_for_new_insns (new_insn, bb);
4721 /* Keep register set table up to date. */
4722 record_one_set (regno, new_insn);
4725 gcse_create_count++;
4729 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
4730 bb->index, INSN_UID (new_insn));
4731 fprintf (gcse_file, "copying expression %d to reg %d\n",
4732 expr->bitmap_index, regno);
4736 /* Insert partially redundant expressions on edges in the CFG to make
4737 the expressions fully redundant. */
4740 pre_edge_insert (edge_list, index_map)
4741 struct edge_list *edge_list;
4742 struct expr **index_map;
4744 int e, i, j, num_edges, set_size, did_insert = 0;
4747 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4748 if it reaches any of the deleted expressions. */
4750 set_size = pre_insert_map[0]->size;
4751 num_edges = NUM_EDGES (edge_list);
4752 inserted = sbitmap_vector_alloc (num_edges, n_exprs);
4753 sbitmap_vector_zero (inserted, num_edges);
4755 for (e = 0; e < num_edges; e++)
4758 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
4760 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
4762 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
4764 for (j = indx; insert && j < n_exprs; j++, insert >>= 1)
4765 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4767 struct expr *expr = index_map[j];
4770 /* Now look at each deleted occurence of this expression. */
4771 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4773 if (! occr->deleted_p)
4776 /* Insert this expression on this edge if if it would
4777 reach the deleted occurence in BB. */
4778 if (!TEST_BIT (inserted[e], j))
4781 edge eg = INDEX_EDGE (edge_list, e);
4783 /* We can't insert anything on an abnormal and
4784 critical edge, so we insert the insn at the end of
4785 the previous block. There are several alternatives
4786 detailed in Morgans book P277 (sec 10.5) for
4787 handling this situation. This one is easiest for
4790 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
4791 insert_insn_end_bb (index_map[j], bb, 0);
4794 insn = process_insert_insn (index_map[j]);
4795 insert_insn_on_edge (insn, eg);
4800 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
4802 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
4803 fprintf (gcse_file, "copy expression %d\n",
4804 expr->bitmap_index);
4807 update_ld_motion_stores (expr);
4808 SET_BIT (inserted[e], j);
4810 gcse_create_count++;
4817 sbitmap_vector_free (inserted);
4821 /* Copy the result of INSN to REG. INDX is the expression number. */
4824 pre_insert_copy_insn (expr, insn)
4828 rtx reg = expr->reaching_reg;
4829 int regno = REGNO (reg);
4830 int indx = expr->bitmap_index;
4831 rtx set = single_set (insn);
4833 basic_block bb = BLOCK_FOR_INSN (insn);
4838 new_insn = emit_insn_after (gen_move_insn (reg, SET_DEST (set)), insn);
4840 /* Keep block number table up to date. */
4841 set_block_for_new_insns (new_insn, bb);
4843 /* Keep register set table up to date. */
4844 record_one_set (regno, new_insn);
4845 if (insn == bb->end)
4848 gcse_create_count++;
4852 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4853 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4854 INSN_UID (insn), regno);
4855 update_ld_motion_stores (expr);
4858 /* Copy available expressions that reach the redundant expression
4859 to `reaching_reg'. */
4862 pre_insert_copies ()
4869 /* For each available expression in the table, copy the result to
4870 `reaching_reg' if the expression reaches a deleted one.
4872 ??? The current algorithm is rather brute force.
4873 Need to do some profiling. */
4875 for (i = 0; i < expr_hash_table_size; i++)
4876 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4878 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4879 we don't want to insert a copy here because the expression may not
4880 really be redundant. So only insert an insn if the expression was
4881 deleted. This test also avoids further processing if the
4882 expression wasn't deleted anywhere. */
4883 if (expr->reaching_reg == NULL)
4886 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4888 if (! occr->deleted_p)
4891 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4893 rtx insn = avail->insn;
4895 /* No need to handle this one if handled already. */
4896 if (avail->copied_p)
4899 /* Don't handle this one if it's a redundant one. */
4900 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4903 /* Or if the expression doesn't reach the deleted one. */
4904 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
4906 BLOCK_FOR_INSN (occr->insn)))
4909 /* Copy the result of avail to reaching_reg. */
4910 pre_insert_copy_insn (expr, insn);
4911 avail->copied_p = 1;
4917 /* Delete redundant computations.
4918 Deletion is done by changing the insn to copy the `reaching_reg' of
4919 the expression into the result of the SET. It is left to later passes
4920 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4922 Returns non-zero if a change is made. */
4933 for (i = 0; i < expr_hash_table_size; i++)
4934 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4936 int indx = expr->bitmap_index;
4938 /* We only need to search antic_occr since we require
4941 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4943 rtx insn = occr->insn;
4945 basic_block bb = BLOCK_FOR_INSN (insn);
4947 if (TEST_BIT (pre_delete_map[bb->index], indx))
4949 set = single_set (insn);
4953 /* Create a pseudo-reg to store the result of reaching
4954 expressions into. Get the mode for the new pseudo from
4955 the mode of the original destination pseudo. */
4956 if (expr->reaching_reg == NULL)
4958 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4960 /* In theory this should never fail since we're creating
4963 However, on the x86 some of the movXX patterns actually
4964 contain clobbers of scratch regs. This may cause the
4965 insn created by validate_change to not match any pattern
4966 and thus cause validate_change to fail. */
4967 if (validate_change (insn, &SET_SRC (set),
4968 expr->reaching_reg, 0))
4970 occr->deleted_p = 1;
4971 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4979 "PRE: redundant insn %d (expression %d) in ",
4980 INSN_UID (insn), indx);
4981 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
4982 bb->index, REGNO (expr->reaching_reg));
4991 /* Perform GCSE optimizations using PRE.
4992 This is called by one_pre_gcse_pass after all the dataflow analysis
4995 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4996 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4997 Compiler Design and Implementation.
4999 ??? A new pseudo reg is created to hold the reaching expression. The nice
5000 thing about the classical approach is that it would try to use an existing
5001 reg. If the register can't be adequately optimized [i.e. we introduce
5002 reload problems], one could add a pass here to propagate the new register
5005 ??? We don't handle single sets in PARALLELs because we're [currently] not
5006 able to copy the rest of the parallel when we insert copies to create full
5007 redundancies from partial redundancies. However, there's no reason why we
5008 can't handle PARALLELs in the cases where there are no partial
5015 int did_insert, changed;
5016 struct expr **index_map;
5019 /* Compute a mapping from expression number (`bitmap_index') to
5020 hash table entry. */
5022 index_map = (struct expr **) xcalloc (n_exprs, sizeof (struct expr *));
5023 for (i = 0; i < expr_hash_table_size; i++)
5024 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
5025 index_map[expr->bitmap_index] = expr;
5027 /* Reset bitmap used to track which insns are redundant. */
5028 pre_redundant_insns = sbitmap_alloc (max_cuid);
5029 sbitmap_zero (pre_redundant_insns);
5031 /* Delete the redundant insns first so that
5032 - we know what register to use for the new insns and for the other
5033 ones with reaching expressions
5034 - we know which insns are redundant when we go to create copies */
5036 changed = pre_delete ();
5038 did_insert = pre_edge_insert (edge_list, index_map);
5040 /* In other places with reaching expressions, copy the expression to the
5041 specially allocated pseudo-reg that reaches the redundant expr. */
5042 pre_insert_copies ();
5045 commit_edge_insertions ();
5050 free (pre_redundant_insns);
5054 /* Top level routine to perform one PRE GCSE pass.
5056 Return non-zero if a change was made. */
5059 one_pre_gcse_pass (pass)
5064 gcse_subst_count = 0;
5065 gcse_create_count = 0;
5067 alloc_expr_hash_table (max_cuid);
5068 add_noreturn_fake_exit_edges ();
5070 compute_ld_motion_mems ();
5072 compute_expr_hash_table ();
5073 trim_ld_motion_mems ();
5075 dump_hash_table (gcse_file, "Expression", expr_hash_table,
5076 expr_hash_table_size, n_exprs);
5080 alloc_pre_mem (n_basic_blocks, n_exprs);
5081 compute_pre_data ();
5082 changed |= pre_gcse ();
5083 free_edge_list (edge_list);
5088 remove_fake_edges ();
5089 free_expr_hash_table ();
5093 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5094 current_function_name, pass, bytes_used);
5095 fprintf (gcse_file, "%d substs, %d insns created\n",
5096 gcse_subst_count, gcse_create_count);
5102 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5103 If notes are added to an insn which references a CODE_LABEL, the
5104 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5105 because the following loop optimization pass requires them. */
5107 /* ??? This is very similar to the loop.c add_label_notes function. We
5108 could probably share code here. */
5110 /* ??? If there was a jump optimization pass after gcse and before loop,
5111 then we would not need to do this here, because jump would add the
5112 necessary REG_LABEL notes. */
5115 add_label_notes (x, insn)
5119 enum rtx_code code = GET_CODE (x);
5123 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5125 /* This code used to ignore labels that referred to dispatch tables to
5126 avoid flow generating (slighly) worse code.
5128 We no longer ignore such label references (see LABEL_REF handling in
5129 mark_jump_label for additional information). */
5131 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, XEXP (x, 0),
5133 if (LABEL_P (XEXP (x, 0)))
5134 LABEL_NUSES (XEXP (x, 0))++;
5138 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5141 add_label_notes (XEXP (x, i), insn);
5142 else if (fmt[i] == 'E')
5143 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5144 add_label_notes (XVECEXP (x, i, j), insn);
5148 /* Compute transparent outgoing information for each block.
5150 An expression is transparent to an edge unless it is killed by
5151 the edge itself. This can only happen with abnormal control flow,
5152 when the edge is traversed through a call. This happens with
5153 non-local labels and exceptions.
5155 This would not be necessary if we split the edge. While this is
5156 normally impossible for abnormal critical edges, with some effort
5157 it should be possible with exception handling, since we still have
5158 control over which handler should be invoked. But due to increased
5159 EH table sizes, this may not be worthwhile. */
5162 compute_transpout ()
5168 sbitmap_vector_ones (transpout, n_basic_blocks);
5170 for (bb = 0; bb < n_basic_blocks; ++bb)
5172 /* Note that flow inserted a nop a the end of basic blocks that
5173 end in call instructions for reasons other than abnormal
5175 if (GET_CODE (BLOCK_END (bb)) != CALL_INSN)
5178 for (i = 0; i < expr_hash_table_size; i++)
5179 for (expr = expr_hash_table[i]; expr ; expr = expr->next_same_hash)
5180 if (GET_CODE (expr->expr) == MEM)
5182 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5183 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5186 /* ??? Optimally, we would use interprocedural alias
5187 analysis to determine if this mem is actually killed
5189 RESET_BIT (transpout[bb], expr->bitmap_index);
5194 /* Removal of useless null pointer checks */
5196 /* Called via note_stores. X is set by SETTER. If X is a register we must
5197 invalidate nonnull_local and set nonnull_killed. DATA is really a
5198 `null_pointer_info *'.
5200 We ignore hard registers. */
5203 invalidate_nonnull_info (x, setter, data)
5205 rtx setter ATTRIBUTE_UNUSED;
5209 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5211 while (GET_CODE (x) == SUBREG)
5214 /* Ignore anything that is not a register or is a hard register. */
5215 if (GET_CODE (x) != REG
5216 || REGNO (x) < npi->min_reg
5217 || REGNO (x) >= npi->max_reg)
5220 regno = REGNO (x) - npi->min_reg;
5222 RESET_BIT (npi->nonnull_local[npi->current_block], regno);
5223 SET_BIT (npi->nonnull_killed[npi->current_block], regno);
5226 /* Do null-pointer check elimination for the registers indicated in
5227 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5228 they are not our responsibility to free. */
5231 delete_null_pointer_checks_1 (delete_list, block_reg, nonnull_avin,
5233 varray_type *delete_list;
5234 unsigned int *block_reg;
5235 sbitmap *nonnull_avin;
5236 sbitmap *nonnull_avout;
5237 struct null_pointer_info *npi;
5241 sbitmap *nonnull_local = npi->nonnull_local;
5242 sbitmap *nonnull_killed = npi->nonnull_killed;
5244 /* Compute local properties, nonnull and killed. A register will have
5245 the nonnull property if at the end of the current block its value is
5246 known to be nonnull. The killed property indicates that somewhere in
5247 the block any information we had about the register is killed.
5249 Note that a register can have both properties in a single block. That
5250 indicates that it's killed, then later in the block a new value is
5252 sbitmap_vector_zero (nonnull_local, n_basic_blocks);
5253 sbitmap_vector_zero (nonnull_killed, n_basic_blocks);
5255 for (current_block = 0; current_block < n_basic_blocks; current_block++)
5257 rtx insn, stop_insn;
5259 /* Set the current block for invalidate_nonnull_info. */
5260 npi->current_block = current_block;
5262 /* Scan each insn in the basic block looking for memory references and
5264 stop_insn = NEXT_INSN (BLOCK_END (current_block));
5265 for (insn = BLOCK_HEAD (current_block);
5267 insn = NEXT_INSN (insn))
5272 /* Ignore anything that is not a normal insn. */
5273 if (! INSN_P (insn))
5276 /* Basically ignore anything that is not a simple SET. We do have
5277 to make sure to invalidate nonnull_local and set nonnull_killed
5278 for such insns though. */
5279 set = single_set (insn);
5282 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5286 /* See if we've got a useable memory load. We handle it first
5287 in case it uses its address register as a dest (which kills
5288 the nonnull property). */
5289 if (GET_CODE (SET_SRC (set)) == MEM
5290 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5291 && REGNO (reg) >= npi->min_reg
5292 && REGNO (reg) < npi->max_reg)
5293 SET_BIT (nonnull_local[current_block],
5294 REGNO (reg) - npi->min_reg);
5296 /* Now invalidate stuff clobbered by this insn. */
5297 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5299 /* And handle stores, we do these last since any sets in INSN can
5300 not kill the nonnull property if it is derived from a MEM
5301 appearing in a SET_DEST. */
5302 if (GET_CODE (SET_DEST (set)) == MEM
5303 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5304 && REGNO (reg) >= npi->min_reg
5305 && REGNO (reg) < npi->max_reg)
5306 SET_BIT (nonnull_local[current_block],
5307 REGNO (reg) - npi->min_reg);
5311 /* Now compute global properties based on the local properties. This
5312 is a classic global availablity algorithm. */
5313 compute_available (nonnull_local, nonnull_killed,
5314 nonnull_avout, nonnull_avin);
5316 /* Now look at each bb and see if it ends with a compare of a value
5318 for (bb = 0; bb < n_basic_blocks; bb++)
5320 rtx last_insn = BLOCK_END (bb);
5321 rtx condition, earliest;
5322 int compare_and_branch;
5324 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5325 since BLOCK_REG[BB] is zero if this block did not end with a
5326 comparison against zero, this condition works. */
5327 if (block_reg[bb] < npi->min_reg
5328 || block_reg[bb] >= npi->max_reg)
5331 /* LAST_INSN is a conditional jump. Get its condition. */
5332 condition = get_condition (last_insn, &earliest);
5334 /* If we can't determine the condition then skip. */
5338 /* Is the register known to have a nonzero value? */
5339 if (!TEST_BIT (nonnull_avout[bb], block_reg[bb] - npi->min_reg))
5342 /* Try to compute whether the compare/branch at the loop end is one or
5343 two instructions. */
5344 if (earliest == last_insn)
5345 compare_and_branch = 1;
5346 else if (earliest == prev_nonnote_insn (last_insn))
5347 compare_and_branch = 2;
5351 /* We know the register in this comparison is nonnull at exit from
5352 this block. We can optimize this comparison. */
5353 if (GET_CODE (condition) == NE)
5357 new_jump = emit_jump_insn_before (gen_jump (JUMP_LABEL (last_insn)),
5359 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5360 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5361 emit_barrier_after (new_jump);
5364 VARRAY_RTX_INIT (*delete_list, 10, "delete_list");
5366 VARRAY_PUSH_RTX (*delete_list, last_insn);
5367 if (compare_and_branch == 2)
5368 VARRAY_PUSH_RTX (*delete_list, earliest);
5370 /* Don't check this block again. (Note that BLOCK_END is
5371 invalid here; we deleted the last instruction in the
5377 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5380 This is conceptually similar to global constant/copy propagation and
5381 classic global CSE (it even uses the same dataflow equations as cprop).
5383 If a register is used as memory address with the form (mem (reg)), then we
5384 know that REG can not be zero at that point in the program. Any instruction
5385 which sets REG "kills" this property.
5387 So, if every path leading to a conditional branch has an available memory
5388 reference of that form, then we know the register can not have the value
5389 zero at the conditional branch.
5391 So we merely need to compute the local properies and propagate that data
5392 around the cfg, then optimize where possible.
5394 We run this pass two times. Once before CSE, then again after CSE. This
5395 has proven to be the most profitable approach. It is rare for new
5396 optimization opportunities of this nature to appear after the first CSE
5399 This could probably be integrated with global cprop with a little work. */
5402 delete_null_pointer_checks (f)
5403 rtx f ATTRIBUTE_UNUSED;
5405 sbitmap *nonnull_avin, *nonnull_avout;
5406 unsigned int *block_reg;
5407 varray_type delete_list = NULL;
5413 struct null_pointer_info npi;
5415 /* If we have only a single block, then there's nothing to do. */
5416 if (n_basic_blocks <= 1)
5419 /* Trying to perform global optimizations on flow graphs which have
5420 a high connectivity will take a long time and is unlikely to be
5421 particularly useful.
5423 In normal circumstances a cfg should have about twice as many edges
5424 as blocks. But we do not want to punish small functions which have
5425 a couple switch statements. So we require a relatively large number
5426 of basic blocks and the ratio of edges to blocks to be high. */
5427 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
5430 /* We need four bitmaps, each with a bit for each register in each
5432 max_reg = max_reg_num ();
5433 regs_per_pass = get_bitmap_width (4, n_basic_blocks, max_reg);
5435 /* Allocate bitmaps to hold local and global properties. */
5436 npi.nonnull_local = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5437 npi.nonnull_killed = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5438 nonnull_avin = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5439 nonnull_avout = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5441 /* Go through the basic blocks, seeing whether or not each block
5442 ends with a conditional branch whose condition is a comparison
5443 against zero. Record the register compared in BLOCK_REG. */
5444 block_reg = (unsigned int *) xcalloc (n_basic_blocks, sizeof (int));
5445 for (bb = 0; bb < n_basic_blocks; bb++)
5447 rtx last_insn = BLOCK_END (bb);
5448 rtx condition, earliest, reg;
5450 /* We only want conditional branches. */
5451 if (GET_CODE (last_insn) != JUMP_INSN
5452 || !any_condjump_p (last_insn)
5453 || !onlyjump_p (last_insn))
5456 /* LAST_INSN is a conditional jump. Get its condition. */
5457 condition = get_condition (last_insn, &earliest);
5459 /* If we were unable to get the condition, or it is not a equality
5460 comparison against zero then there's nothing we can do. */
5462 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
5463 || GET_CODE (XEXP (condition, 1)) != CONST_INT
5464 || (XEXP (condition, 1)
5465 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
5468 /* We must be checking a register against zero. */
5469 reg = XEXP (condition, 0);
5470 if (GET_CODE (reg) != REG)
5473 block_reg[bb] = REGNO (reg);
5476 /* Go through the algorithm for each block of registers. */
5477 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
5480 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
5481 delete_null_pointer_checks_1 (&delete_list, block_reg, nonnull_avin,
5482 nonnull_avout, &npi);
5485 /* Now delete the instructions all at once. This breaks the CFG. */
5488 for (i = 0; i < VARRAY_ACTIVE_SIZE (delete_list); i++)
5489 delete_insn (VARRAY_RTX (delete_list, i));
5490 VARRAY_FREE (delete_list);
5493 /* Free the table of registers compared at the end of every block. */
5497 sbitmap_vector_free (npi.nonnull_local);
5498 sbitmap_vector_free (npi.nonnull_killed);
5499 sbitmap_vector_free (nonnull_avin);
5500 sbitmap_vector_free (nonnull_avout);
5503 /* Code Hoisting variables and subroutines. */
5505 /* Very busy expressions. */
5506 static sbitmap *hoist_vbein;
5507 static sbitmap *hoist_vbeout;
5509 /* Hoistable expressions. */
5510 static sbitmap *hoist_exprs;
5512 /* Dominator bitmaps. */
5513 static sbitmap *dominators;
5515 /* ??? We could compute post dominators and run this algorithm in
5516 reverse to to perform tail merging, doing so would probably be
5517 more effective than the tail merging code in jump.c.
5519 It's unclear if tail merging could be run in parallel with
5520 code hoisting. It would be nice. */
5522 /* Allocate vars used for code hoisting analysis. */
5525 alloc_code_hoist_mem (n_blocks, n_exprs)
5526 int n_blocks, n_exprs;
5528 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5529 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5530 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5532 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
5533 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
5534 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
5535 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
5537 dominators = sbitmap_vector_alloc (n_blocks, n_blocks);
5540 /* Free vars used for code hoisting analysis. */
5543 free_code_hoist_mem ()
5545 sbitmap_vector_free (antloc);
5546 sbitmap_vector_free (transp);
5547 sbitmap_vector_free (comp);
5549 sbitmap_vector_free (hoist_vbein);
5550 sbitmap_vector_free (hoist_vbeout);
5551 sbitmap_vector_free (hoist_exprs);
5552 sbitmap_vector_free (transpout);
5554 sbitmap_vector_free (dominators);
5557 /* Compute the very busy expressions at entry/exit from each block.
5559 An expression is very busy if all paths from a given point
5560 compute the expression. */
5563 compute_code_hoist_vbeinout ()
5565 int bb, changed, passes;
5567 sbitmap_vector_zero (hoist_vbeout, n_basic_blocks);
5568 sbitmap_vector_zero (hoist_vbein, n_basic_blocks);
5577 /* We scan the blocks in the reverse order to speed up
5579 for (bb = n_basic_blocks - 1; bb >= 0; bb--)
5581 changed |= sbitmap_a_or_b_and_c (hoist_vbein[bb], antloc[bb],
5582 hoist_vbeout[bb], transp[bb]);
5583 if (bb != n_basic_blocks - 1)
5584 sbitmap_intersection_of_succs (hoist_vbeout[bb], hoist_vbein, bb);
5591 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
5594 /* Top level routine to do the dataflow analysis needed by code hoisting. */
5597 compute_code_hoist_data ()
5599 compute_local_properties (transp, comp, antloc, 0);
5600 compute_transpout ();
5601 compute_code_hoist_vbeinout ();
5602 calculate_dominance_info (NULL, dominators, CDI_DOMINATORS);
5604 fprintf (gcse_file, "\n");
5607 /* Determine if the expression identified by EXPR_INDEX would
5608 reach BB unimpared if it was placed at the end of EXPR_BB.
5610 It's unclear exactly what Muchnick meant by "unimpared". It seems
5611 to me that the expression must either be computed or transparent in
5612 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5613 would allow the expression to be hoisted out of loops, even if
5614 the expression wasn't a loop invariant.
5616 Contrast this to reachability for PRE where an expression is
5617 considered reachable if *any* path reaches instead of *all*
5621 hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
5622 basic_block expr_bb;
5628 int visited_allocated_locally = 0;
5631 if (visited == NULL)
5633 visited_allocated_locally = 1;
5634 visited = xcalloc (n_basic_blocks, 1);
5637 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
5639 basic_block pred_bb = pred->src;
5641 if (pred->src == ENTRY_BLOCK_PTR)
5643 else if (visited[pred_bb->index])
5646 /* Does this predecessor generate this expression? */
5647 else if (TEST_BIT (comp[pred_bb->index], expr_index))
5649 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
5655 visited[pred_bb->index] = 1;
5656 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
5661 if (visited_allocated_locally)
5664 return (pred == NULL);
5667 /* Actually perform code hoisting. */
5674 struct expr **index_map;
5677 sbitmap_vector_zero (hoist_exprs, n_basic_blocks);
5679 /* Compute a mapping from expression number (`bitmap_index') to
5680 hash table entry. */
5682 index_map = (struct expr **) xcalloc (n_exprs, sizeof (struct expr *));
5683 for (i = 0; i < expr_hash_table_size; i++)
5684 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
5685 index_map[expr->bitmap_index] = expr;
5687 /* Walk over each basic block looking for potentially hoistable
5688 expressions, nothing gets hoisted from the entry block. */
5689 for (bb = 0; bb < n_basic_blocks; bb++)
5692 int insn_inserted_p;
5694 /* Examine each expression that is very busy at the exit of this
5695 block. These are the potentially hoistable expressions. */
5696 for (i = 0; i < hoist_vbeout[bb]->n_bits; i++)
5700 if (TEST_BIT (hoist_vbeout[bb], i) && TEST_BIT (transpout[bb], i))
5702 /* We've found a potentially hoistable expression, now
5703 we look at every block BB dominates to see if it
5704 computes the expression. */
5705 for (dominated = 0; dominated < n_basic_blocks; dominated++)
5707 /* Ignore self dominance. */
5709 || ! TEST_BIT (dominators[dominated], bb))
5712 /* We've found a dominated block, now see if it computes
5713 the busy expression and whether or not moving that
5714 expression to the "beginning" of that block is safe. */
5715 if (!TEST_BIT (antloc[dominated], i))
5718 /* Note if the expression would reach the dominated block
5719 unimpared if it was placed at the end of BB.
5721 Keep track of how many times this expression is hoistable
5722 from a dominated block into BB. */
5723 if (hoist_expr_reaches_here_p (BASIC_BLOCK (bb), i,
5724 BASIC_BLOCK (dominated), NULL))
5728 /* If we found more than one hoistable occurence of this
5729 expression, then note it in the bitmap of expressions to
5730 hoist. It makes no sense to hoist things which are computed
5731 in only one BB, and doing so tends to pessimize register
5732 allocation. One could increase this value to try harder
5733 to avoid any possible code expansion due to register
5734 allocation issues; however experiments have shown that
5735 the vast majority of hoistable expressions are only movable
5736 from two successors, so raising this threshhold is likely
5737 to nullify any benefit we get from code hoisting. */
5740 SET_BIT (hoist_exprs[bb], i);
5746 /* If we found nothing to hoist, then quit now. */
5750 /* Loop over all the hoistable expressions. */
5751 for (i = 0; i < hoist_exprs[bb]->n_bits; i++)
5753 /* We want to insert the expression into BB only once, so
5754 note when we've inserted it. */
5755 insn_inserted_p = 0;
5757 /* These tests should be the same as the tests above. */
5758 if (TEST_BIT (hoist_vbeout[bb], i))
5760 /* We've found a potentially hoistable expression, now
5761 we look at every block BB dominates to see if it
5762 computes the expression. */
5763 for (dominated = 0; dominated < n_basic_blocks; dominated++)
5765 /* Ignore self dominance. */
5767 || ! TEST_BIT (dominators[dominated], bb))
5770 /* We've found a dominated block, now see if it computes
5771 the busy expression and whether or not moving that
5772 expression to the "beginning" of that block is safe. */
5773 if (!TEST_BIT (antloc[dominated], i))
5776 /* The expression is computed in the dominated block and
5777 it would be safe to compute it at the start of the
5778 dominated block. Now we have to determine if the
5779 expresion would reach the dominated block if it was
5780 placed at the end of BB. */
5781 if (hoist_expr_reaches_here_p (BASIC_BLOCK (bb), i,
5782 BASIC_BLOCK (dominated), NULL))
5784 struct expr *expr = index_map[i];
5785 struct occr *occr = expr->antic_occr;
5789 /* Find the right occurence of this expression. */
5790 while (BLOCK_NUM (occr->insn) != dominated && occr)
5793 /* Should never happen. */
5799 set = single_set (insn);
5803 /* Create a pseudo-reg to store the result of reaching
5804 expressions into. Get the mode for the new pseudo
5805 from the mode of the original destination pseudo. */
5806 if (expr->reaching_reg == NULL)
5808 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5810 /* In theory this should never fail since we're creating
5813 However, on the x86 some of the movXX patterns
5814 actually contain clobbers of scratch regs. This may
5815 cause the insn created by validate_change to not
5816 match any pattern and thus cause validate_change to
5818 if (validate_change (insn, &SET_SRC (set),
5819 expr->reaching_reg, 0))
5821 occr->deleted_p = 1;
5822 if (!insn_inserted_p)
5824 insert_insn_end_bb (index_map[i],
5825 BASIC_BLOCK (bb), 0);
5826 insn_inserted_p = 1;
5838 /* Top level routine to perform one code hoisting (aka unification) pass
5840 Return non-zero if a change was made. */
5843 one_code_hoisting_pass ()
5847 alloc_expr_hash_table (max_cuid);
5848 compute_expr_hash_table ();
5850 dump_hash_table (gcse_file, "Code Hosting Expressions", expr_hash_table,
5851 expr_hash_table_size, n_exprs);
5855 alloc_code_hoist_mem (n_basic_blocks, n_exprs);
5856 compute_code_hoist_data ();
5858 free_code_hoist_mem ();
5861 free_expr_hash_table ();
5866 /* Here we provide the things required to do store motion towards
5867 the exit. In order for this to be effective, gcse also needed to
5868 be taught how to move a load when it is kill only by a store to itself.
5873 void foo(float scale)
5875 for (i=0; i<10; i++)
5879 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
5880 the load out since its live around the loop, and stored at the bottom
5883 The 'Load Motion' referred to and implemented in this file is
5884 an enhancement to gcse which when using edge based lcm, recognizes
5885 this situation and allows gcse to move the load out of the loop.
5887 Once gcse has hoisted the load, store motion can then push this
5888 load towards the exit, and we end up with no loads or stores of 'i'
5891 /* This will search the ldst list for a matching expresion. If it
5892 doesn't find one, we create one and initialize it. */
5894 static struct ls_expr *
5898 struct ls_expr * ptr;
5900 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
5901 if (expr_equiv_p (ptr->pattern, x))
5906 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
5908 ptr->next = pre_ldst_mems;
5911 ptr->loads = NULL_RTX;
5912 ptr->stores = NULL_RTX;
5913 ptr->reaching_reg = NULL_RTX;
5916 ptr->hash_index = 0;
5917 pre_ldst_mems = ptr;
5923 /* Free up an individual ldst entry. */
5926 free_ldst_entry (ptr)
5927 struct ls_expr * ptr;
5929 free_INSN_LIST_list (& ptr->loads);
5930 free_INSN_LIST_list (& ptr->stores);
5935 /* Free up all memory associated with the ldst list. */
5940 while (pre_ldst_mems)
5942 struct ls_expr * tmp = pre_ldst_mems;
5944 pre_ldst_mems = pre_ldst_mems->next;
5946 free_ldst_entry (tmp);
5949 pre_ldst_mems = NULL;
5952 /* Dump debugging info about the ldst list. */
5955 print_ldst_list (file)
5958 struct ls_expr * ptr;
5960 fprintf (file, "LDST list: \n");
5962 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
5964 fprintf (file, " Pattern (%3d): ", ptr->index);
5966 print_rtl (file, ptr->pattern);
5968 fprintf (file, "\n Loads : ");
5971 print_rtl (file, ptr->loads);
5973 fprintf (file, "(nil)");
5975 fprintf (file, "\n Stores : ");
5978 print_rtl (file, ptr->stores);
5980 fprintf (file, "(nil)");
5982 fprintf (file, "\n\n");
5985 fprintf (file, "\n");
5988 /* Returns 1 if X is in the list of ldst only expressions. */
5990 static struct ls_expr *
5991 find_rtx_in_ldst (x)
5994 struct ls_expr * ptr;
5996 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5997 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6003 /* Assign each element of the list of mems a monotonically increasing value. */
6008 struct ls_expr * ptr;
6011 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6017 /* Return first item in the list. */
6019 static inline struct ls_expr *
6022 return pre_ldst_mems;
6025 /* Return the next item in ther list after the specified one. */
6027 static inline struct ls_expr *
6029 struct ls_expr * ptr;
6034 /* Load Motion for loads which only kill themselves. */
6036 /* Return true if x is a simple MEM operation, with no registers or
6037 side effects. These are the types of loads we consider for the
6038 ld_motion list, otherwise we let the usual aliasing take care of it. */
6044 if (GET_CODE (x) != MEM)
6047 if (MEM_VOLATILE_P (x))
6050 if (GET_MODE (x) == BLKmode)
6053 if (!rtx_varies_p (XEXP (x, 0), 0))
6059 /* Make sure there isn't a buried reference in this pattern anywhere.
6060 If there is, invalidate the entry for it since we're not capable
6061 of fixing it up just yet.. We have to be sure we know about ALL
6062 loads since the aliasing code will allow all entries in the
6063 ld_motion list to not-alias itself. If we miss a load, we will get
6064 the wrong value since gcse might common it and we won't know to
6068 invalidate_any_buried_refs (x)
6073 struct ls_expr * ptr;
6075 /* Invalidate it in the list. */
6076 if (GET_CODE (x) == MEM && simple_mem (x))
6078 ptr = ldst_entry (x);
6082 /* Recursively process the insn. */
6083 fmt = GET_RTX_FORMAT (GET_CODE (x));
6085 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6088 invalidate_any_buried_refs (XEXP (x, i));
6089 else if (fmt[i] == 'E')
6090 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6091 invalidate_any_buried_refs (XVECEXP (x, i, j));
6095 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6096 being defined as MEM loads and stores to symbols, with no
6097 side effects and no registers in the expression. If there are any
6098 uses/defs which dont match this criteria, it is invalidated and
6099 trimmed out later. */
6102 compute_ld_motion_mems ()
6104 struct ls_expr * ptr;
6108 pre_ldst_mems = NULL;
6110 for (bb = 0; bb < n_basic_blocks; bb++)
6112 for (insn = BLOCK_HEAD (bb);
6113 insn && insn != NEXT_INSN (BLOCK_END (bb));
6114 insn = NEXT_INSN (insn))
6116 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6118 if (GET_CODE (PATTERN (insn)) == SET)
6120 rtx src = SET_SRC (PATTERN (insn));
6121 rtx dest = SET_DEST (PATTERN (insn));
6123 /* Check for a simple LOAD... */
6124 if (GET_CODE (src) == MEM && simple_mem (src))
6126 ptr = ldst_entry (src);
6127 if (GET_CODE (dest) == REG)
6128 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6134 /* Make sure there isn't a buried load somewhere. */
6135 invalidate_any_buried_refs (src);
6138 /* Check for stores. Don't worry about aliased ones, they
6139 will block any movement we might do later. We only care
6140 about this exact pattern since those are the only
6141 circumstance that we will ignore the aliasing info. */
6142 if (GET_CODE (dest) == MEM && simple_mem (dest))
6144 ptr = ldst_entry (dest);
6146 if (GET_CODE (src) != MEM
6147 && GET_CODE (src) != ASM_OPERANDS)
6148 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6154 invalidate_any_buried_refs (PATTERN (insn));
6160 /* Remove any references that have been either invalidated or are not in the
6161 expression list for pre gcse. */
6164 trim_ld_motion_mems ()
6166 struct ls_expr * last = NULL;
6167 struct ls_expr * ptr = first_ls_expr ();
6171 int del = ptr->invalid;
6172 struct expr * expr = NULL;
6174 /* Delete if entry has been made invalid. */
6180 /* Delete if we cannot find this mem in the expression list. */
6181 for (i = 0; i < expr_hash_table_size && del; i++)
6183 for (expr = expr_hash_table[i];
6185 expr = expr->next_same_hash)
6186 if (expr_equiv_p (expr->expr, ptr->pattern))
6198 last->next = ptr->next;
6199 free_ldst_entry (ptr);
6204 pre_ldst_mems = pre_ldst_mems->next;
6205 free_ldst_entry (ptr);
6206 ptr = pre_ldst_mems;
6211 /* Set the expression field if we are keeping it. */
6218 /* Show the world what we've found. */
6219 if (gcse_file && pre_ldst_mems != NULL)
6220 print_ldst_list (gcse_file);
6223 /* This routine will take an expression which we are replacing with
6224 a reaching register, and update any stores that are needed if
6225 that expression is in the ld_motion list. Stores are updated by
6226 copying their SRC to the reaching register, and then storeing
6227 the reaching register into the store location. These keeps the
6228 correct value in the reaching register for the loads. */
6231 update_ld_motion_stores (expr)
6234 struct ls_expr * mem_ptr;
6236 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6238 /* We can try to find just the REACHED stores, but is shouldn't
6239 matter to set the reaching reg everywhere... some might be
6240 dead and should be eliminated later. */
6242 /* We replace SET mem = expr with
6244 SET mem = reg , where reg is the
6245 reaching reg used in the load. */
6246 rtx list = mem_ptr->stores;
6248 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6250 rtx insn = XEXP (list, 0);
6251 rtx pat = PATTERN (insn);
6252 rtx src = SET_SRC (pat);
6253 rtx reg = expr->reaching_reg;
6256 /* If we've already copied it, continue. */
6257 if (expr->reaching_reg == src)
6262 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6263 print_rtl (gcse_file, expr->reaching_reg);
6264 fprintf (gcse_file, ":\n ");
6265 print_inline_rtx (gcse_file, insn, 8);
6266 fprintf (gcse_file, "\n");
6269 copy = gen_move_insn ( reg, SET_SRC (pat));
6270 new = emit_insn_before (copy, insn);
6271 record_one_set (REGNO (reg), new);
6272 set_block_for_new_insns (new, BLOCK_FOR_INSN (insn));
6273 SET_SRC (pat) = reg;
6275 /* un-recognize this pattern since it's probably different now. */
6276 INSN_CODE (insn) = -1;
6277 gcse_create_count++;
6282 /* Store motion code. */
6284 /* This is used to communicate the target bitvector we want to use in the
6285 reg_set_info routine when called via the note_stores mechanism. */
6286 static sbitmap * regvec;
6288 /* Used in computing the reverse edge graph bit vectors. */
6289 static sbitmap * st_antloc;
6291 /* Global holding the number of store expressions we are dealing with. */
6292 static int num_stores;
6294 /* Checks to set if we need to mark a register set. Called from note_stores. */
6297 reg_set_info (dest, setter, data)
6298 rtx dest, setter ATTRIBUTE_UNUSED;
6299 void * data ATTRIBUTE_UNUSED;
6301 if (GET_CODE (dest) == SUBREG)
6302 dest = SUBREG_REG (dest);
6304 if (GET_CODE (dest) == REG)
6305 SET_BIT (*regvec, REGNO (dest));
6308 /* Return non-zero if the register operands of expression X are killed
6309 anywhere in basic block BB. */
6312 store_ops_ok (x, bb)
6320 /* Repeat is used to turn tail-recursion into iteration. */
6326 code = GET_CODE (x);
6330 /* If a reg has changed after us in this
6331 block, the operand has been killed. */
6332 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
6359 i = GET_RTX_LENGTH (code) - 1;
6360 fmt = GET_RTX_FORMAT (code);
6366 rtx tem = XEXP (x, i);
6368 /* If we are about to do the last recursive call
6369 needed at this level, change it into iteration.
6370 This function is called enough to be worth it. */
6377 if (! store_ops_ok (tem, bb))
6380 else if (fmt[i] == 'E')
6384 for (j = 0; j < XVECLEN (x, i); j++)
6386 if (! store_ops_ok (XVECEXP (x, i, j), bb))
6395 /* Determine whether insn is MEM store pattern that we will consider moving. */
6398 find_moveable_store (insn)
6401 struct ls_expr * ptr;
6402 rtx dest = PATTERN (insn);
6404 if (GET_CODE (dest) != SET
6405 || GET_CODE (SET_SRC (dest)) == ASM_OPERANDS)
6408 dest = SET_DEST (dest);
6410 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6411 || GET_MODE (dest) == BLKmode)
6414 if (GET_CODE (XEXP (dest, 0)) != SYMBOL_REF)
6417 if (rtx_varies_p (XEXP (dest, 0), 0))
6420 ptr = ldst_entry (dest);
6421 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6424 /* Perform store motion. Much like gcse, except we move expressions the
6425 other way by looking at the flowgraph in reverse. */
6428 compute_store_table ()
6434 max_gcse_regno = max_reg_num ();
6436 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks,
6438 sbitmap_vector_zero (reg_set_in_block, n_basic_blocks);
6441 /* Find all the stores we care about. */
6442 for (bb = 0; bb < n_basic_blocks; bb++)
6444 regvec = & (reg_set_in_block[bb]);
6445 for (insn = BLOCK_END (bb);
6446 insn && insn != PREV_INSN (BLOCK_HEAD (bb));
6447 insn = PREV_INSN (insn))
6449 #ifdef NON_SAVING_SETJMP
6450 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
6451 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
6453 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6454 SET_BIT (reg_set_in_block[bb], regno);
6458 /* Ignore anything that is not a normal insn. */
6459 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6462 if (GET_CODE (insn) == CALL_INSN)
6464 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6465 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
6466 SET_BIT (reg_set_in_block[bb], regno);
6469 pat = PATTERN (insn);
6470 note_stores (pat, reg_set_info, NULL);
6472 /* Now that we've marked regs, look for stores. */
6473 if (GET_CODE (pat) == SET)
6474 find_moveable_store (insn);
6478 ret = enumerate_ldsts ();
6482 fprintf (gcse_file, "Store Motion Expressions.\n");
6483 print_ldst_list (gcse_file);
6489 /* Check to see if the load X is aliased with STORE_PATTERN. */
6492 load_kills_store (x, store_pattern)
6493 rtx x, store_pattern;
6495 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
6500 /* Go through the entire insn X, looking for any loads which might alias
6501 STORE_PATTERN. Return 1 if found. */
6504 find_loads (x, store_pattern)
6505 rtx x, store_pattern;
6511 if (GET_CODE (x) == SET)
6514 if (GET_CODE (x) == MEM)
6516 if (load_kills_store (x, store_pattern))
6520 /* Recursively process the insn. */
6521 fmt = GET_RTX_FORMAT (GET_CODE (x));
6523 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
6526 ret |= find_loads (XEXP (x, i), store_pattern);
6527 else if (fmt[i] == 'E')
6528 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6529 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
6534 /* Check if INSN kills the store pattern X (is aliased with it).
6535 Return 1 if it it does. */
6538 store_killed_in_insn (x, insn)
6541 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6544 if (GET_CODE (insn) == CALL_INSN)
6546 if (CONST_CALL_P (insn))
6552 if (GET_CODE (PATTERN (insn)) == SET)
6554 rtx pat = PATTERN (insn);
6555 /* Check for memory stores to aliased objects. */
6556 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
6557 /* pretend its a load and check for aliasing. */
6558 if (find_loads (SET_DEST (pat), x))
6560 return find_loads (SET_SRC (pat), x);
6563 return find_loads (PATTERN (insn), x);
6566 /* Returns 1 if the expression X is loaded or clobbered on or after INSN
6567 within basic block BB. */
6570 store_killed_after (x, insn, bb)
6579 /* Check if the register operands of the store are OK in this block.
6580 Note that if registers are changed ANYWHERE in the block, we'll
6581 decide we can't move it, regardless of whether it changed above
6582 or below the store. This could be improved by checking the register
6583 operands while lookinng for aliasing in each insn. */
6584 if (!store_ops_ok (XEXP (x, 0), bb))
6587 for ( ; insn && insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
6588 if (store_killed_in_insn (x, insn))
6594 /* Returns 1 if the expression X is loaded or clobbered on or before INSN
6595 within basic block BB. */
6597 store_killed_before (x, insn, bb)
6601 rtx first = bb->head;
6604 return store_killed_in_insn (x, insn);
6606 /* Check if the register operands of the store are OK in this block.
6607 Note that if registers are changed ANYWHERE in the block, we'll
6608 decide we can't move it, regardless of whether it changed above
6609 or below the store. This could be improved by checking the register
6610 operands while lookinng for aliasing in each insn. */
6611 if (!store_ops_ok (XEXP (x, 0), bb))
6614 for ( ; insn && insn != PREV_INSN (first); insn = PREV_INSN (insn))
6615 if (store_killed_in_insn (x, insn))
6621 #define ANTIC_STORE_LIST(x) ((x)->loads)
6622 #define AVAIL_STORE_LIST(x) ((x)->stores)
6624 /* Given the table of available store insns at the end of blocks,
6625 determine which ones are not killed by aliasing, and generate
6626 the appropriate vectors for gen and killed. */
6628 build_store_vectors ()
6633 struct ls_expr * ptr;
6635 /* Build the gen_vector. This is any store in the table which is not killed
6636 by aliasing later in its block. */
6637 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
6638 sbitmap_vector_zero (ae_gen, n_basic_blocks);
6640 st_antloc = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
6641 sbitmap_vector_zero (st_antloc, n_basic_blocks);
6643 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6645 /* Put all the stores into either the antic list, or the avail list,
6647 rtx store_list = ptr->stores;
6648 ptr->stores = NULL_RTX;
6650 for (st = store_list; st != NULL; st = XEXP (st, 1))
6652 insn = XEXP (st, 0);
6653 bb = BLOCK_FOR_INSN (insn);
6655 if (!store_killed_after (ptr->pattern, insn, bb))
6657 /* If we've already seen an availale expression in this block,
6658 we can delete the one we saw already (It occurs earlier in
6659 the block), and replace it with this one). We'll copy the
6660 old SRC expression to an unused register in case there
6661 are any side effects. */
6662 if (TEST_BIT (ae_gen[bb->index], ptr->index))
6664 /* Find previous store. */
6666 for (st = AVAIL_STORE_LIST (ptr); st ; st = XEXP (st, 1))
6667 if (BLOCK_FOR_INSN (XEXP (st, 0)) == bb)
6671 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
6673 fprintf(gcse_file, "Removing redundant store:\n");
6674 replace_store_insn (r, XEXP (st, 0), bb);
6675 XEXP (st, 0) = insn;
6679 SET_BIT (ae_gen[bb->index], ptr->index);
6680 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
6681 AVAIL_STORE_LIST (ptr));
6684 if (!store_killed_before (ptr->pattern, insn, bb))
6686 SET_BIT (st_antloc[BLOCK_NUM (insn)], ptr->index);
6687 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
6688 ANTIC_STORE_LIST (ptr));
6692 /* Free the original list of store insns. */
6693 free_INSN_LIST_list (&store_list);
6696 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
6697 sbitmap_vector_zero (ae_kill, n_basic_blocks);
6699 transp = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks, num_stores);
6700 sbitmap_vector_zero (transp, n_basic_blocks);
6702 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6703 for (b = 0; b < n_basic_blocks; b++)
6705 if (store_killed_after (ptr->pattern, BLOCK_HEAD (b), BASIC_BLOCK (b)))
6707 /* The anticipatable expression is not killed if it's gen'd. */
6709 We leave this check out for now. If we have a code sequence
6710 in a block which looks like:
6714 We should flag this as having an ANTIC expression, NOT
6715 transparent, NOT killed, and AVAIL.
6716 Unfortunately, since we haven't re-written all loads to
6717 use the reaching reg, we'll end up doing an incorrect
6718 Load in the middle here if we push the store down. It happens in
6719 gcc.c-torture/execute/960311-1.c with -O3
6720 If we always kill it in this case, we'll sometimes do
6721 uneccessary work, but it shouldn't actually hurt anything.
6722 if (!TEST_BIT (ae_gen[b], ptr->index)). */
6723 SET_BIT (ae_kill[b], ptr->index);
6726 SET_BIT (transp[b], ptr->index);
6729 /* Any block with no exits calls some non-returning function, so
6730 we better mark the store killed here, or we might not store to
6731 it at all. If we knew it was abort, we wouldn't have to store,
6732 but we don't know that for sure. */
6735 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
6736 print_ldst_list (gcse_file);
6737 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, n_basic_blocks);
6738 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, n_basic_blocks);
6739 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, n_basic_blocks);
6740 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, n_basic_blocks);
6744 /* Insert an instruction at the begining of a basic block, and update
6745 the BLOCK_HEAD if needed. */
6748 insert_insn_start_bb (insn, bb)
6752 /* Insert at start of successor block. */
6753 rtx prev = PREV_INSN (bb->head);
6754 rtx before = bb->head;
6757 if (GET_CODE (before) != CODE_LABEL
6758 && (GET_CODE (before) != NOTE
6759 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
6762 if (prev == bb->end)
6764 before = NEXT_INSN (before);
6767 insn = emit_insn_after (insn, prev);
6769 if (prev == bb->end)
6772 set_block_for_new_insns (insn, bb);
6776 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
6778 print_inline_rtx (gcse_file, insn, 6);
6779 fprintf (gcse_file, "\n");
6783 /* This routine will insert a store on an edge. EXPR is the ldst entry for
6784 the memory reference, and E is the edge to insert it on. Returns non-zero
6785 if an edge insertion was performed. */
6788 insert_store (expr, e)
6789 struct ls_expr * expr;
6796 /* We did all the deleted before this insert, so if we didn't delete a
6797 store, then we haven't set the reaching reg yet either. */
6798 if (expr->reaching_reg == NULL_RTX)
6801 reg = expr->reaching_reg;
6802 insn = gen_move_insn (expr->pattern, reg);
6804 /* If we are inserting this expression on ALL predecessor edges of a BB,
6805 insert it at the start of the BB, and reset the insert bits on the other
6806 edges so we don;t try to insert it on the other edges. */
6808 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
6810 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6811 if (index == EDGE_INDEX_NO_EDGE)
6813 if (! TEST_BIT (pre_insert_map[index], expr->index))
6817 /* If tmp is NULL, we found an insertion on every edge, blank the
6818 insertion vector for these edges, and insert at the start of the BB. */
6819 if (!tmp && bb != EXIT_BLOCK_PTR)
6821 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
6823 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6824 RESET_BIT (pre_insert_map[index], expr->index);
6826 insert_insn_start_bb (insn, bb);
6830 /* We can't insert on this edge, so we'll insert at the head of the
6831 successors block. See Morgan, sec 10.5. */
6832 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
6834 insert_insn_start_bb (insn, bb);
6838 insert_insn_on_edge (insn, e);
6842 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
6843 e->src->index, e->dest->index);
6844 print_inline_rtx (gcse_file, insn, 6);
6845 fprintf (gcse_file, "\n");
6851 /* This routine will replace a store with a SET to a specified register. */
6854 replace_store_insn (reg, del, bb)
6860 insn = gen_move_insn (reg, SET_SRC (PATTERN (del)));
6861 insn = emit_insn_after (insn, del);
6862 set_block_for_new_insns (insn, bb);
6867 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
6868 print_inline_rtx (gcse_file, del, 6);
6869 fprintf(gcse_file, "\nSTORE MOTION replaced with insn:\n ");
6870 print_inline_rtx (gcse_file, insn, 6);
6871 fprintf(gcse_file, "\n");
6877 if (bb->head == del)
6884 /* Delete a store, but copy the value that would have been stored into
6885 the reaching_reg for later storing. */
6888 delete_store (expr, bb)
6889 struct ls_expr * expr;
6894 if (expr->reaching_reg == NULL_RTX)
6895 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
6898 /* If there is more than 1 store, the earlier ones will be dead,
6899 but it doesn't hurt to replace them here. */
6900 reg = expr->reaching_reg;
6902 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
6905 if (BLOCK_FOR_INSN (del) == bb)
6907 /* We know there is only one since we deleted redundant
6908 ones during the available computation. */
6909 replace_store_insn (reg, del, bb);
6915 /* Free memory used by store motion. */
6918 free_store_memory ()
6923 sbitmap_vector_free (ae_gen);
6925 sbitmap_vector_free (ae_kill);
6927 sbitmap_vector_free (transp);
6929 sbitmap_vector_free (st_antloc);
6931 sbitmap_vector_free (pre_insert_map);
6933 sbitmap_vector_free (pre_delete_map);
6934 if (reg_set_in_block)
6935 sbitmap_vector_free (reg_set_in_block);
6937 ae_gen = ae_kill = transp = st_antloc = NULL;
6938 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
6941 /* Perform store motion. Much like gcse, except we move expressions the
6942 other way by looking at the flowgraph in reverse. */
6948 struct ls_expr * ptr;
6949 int update_flow = 0;
6953 fprintf (gcse_file, "before store motion\n");
6954 print_rtl (gcse_file, get_insns ());
6958 init_alias_analysis ();
6960 /* Find all the stores that are live to the end of their block. */
6961 num_stores = compute_store_table ();
6962 if (num_stores == 0)
6964 sbitmap_vector_free (reg_set_in_block);
6965 end_alias_analysis ();
6969 /* Now compute whats actually available to move. */
6970 add_noreturn_fake_exit_edges ();
6971 build_store_vectors ();
6973 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
6974 st_antloc, ae_kill, &pre_insert_map,
6977 /* Now we want to insert the new stores which are going to be needed. */
6978 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6980 for (x = 0; x < n_basic_blocks; x++)
6981 if (TEST_BIT (pre_delete_map[x], ptr->index))
6982 delete_store (ptr, BASIC_BLOCK (x));
6984 for (x = 0; x < NUM_EDGES (edge_list); x++)
6985 if (TEST_BIT (pre_insert_map[x], ptr->index))
6986 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
6990 commit_edge_insertions ();
6992 free_store_memory ();
6993 free_edge_list (edge_list);
6994 remove_fake_edges ();
6995 end_alias_analysis ();