1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
43 Global Optimization by Suppression of Partial Redundancies
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
115 Rice University Ph.D. thesis, Apr. 1996
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
125 Advanced Compiler Design and Implementation
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
148 #include "coretypes.h"
155 #include "hard-reg-set.h"
158 #include "insn-config.h"
160 #include "basic-block.h"
162 #include "function.h"
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
281 static FILE *gcse_file;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack;
302 /* Nonzero for each mode that supports (set (reg) (reg)).
303 This is trivially true for integer and floating point values.
304 It may or may not be true for condition codes. */
305 static char can_copy_p[(int) NUM_MACHINE_MODES];
307 /* Nonzero if can_copy_p has been initialized. */
308 static int can_copy_init_p;
310 struct reg_use {rtx reg_rtx; };
312 /* Hash table of expressions. */
316 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
318 /* Index in the available expression bitmaps. */
320 /* Next entry with the same hash. */
321 struct expr *next_same_hash;
322 /* List of anticipatable occurrences in basic blocks in the function.
323 An "anticipatable occurrence" is one that is the first occurrence in the
324 basic block, the operands are not modified in the basic block prior
325 to the occurrence and the output is not used between the start of
326 the block and the occurrence. */
327 struct occr *antic_occr;
328 /* List of available occurrence in basic blocks in the function.
329 An "available occurrence" is one that is the last occurrence in the
330 basic block and the operands are not modified by following statements in
331 the basic block [including this insn]. */
332 struct occr *avail_occr;
333 /* Non-null if the computation is PRE redundant.
334 The value is the newly created pseudo-reg to record a copy of the
335 expression in all the places that reach the redundant copy. */
339 /* Occurrence of an expression.
340 There is one per basic block. If a pattern appears more than once the
341 last appearance is used [or first for anticipatable expressions]. */
345 /* Next occurrence of this expression. */
347 /* The insn that computes the expression. */
349 /* Nonzero if this [anticipatable] occurrence has been deleted. */
351 /* Nonzero if this [available] occurrence has been copied to
353 /* ??? This is mutually exclusive with deleted_p, so they could share
358 /* Expression and copy propagation hash tables.
359 Each hash table is an array of buckets.
360 ??? It is known that if it were an array of entries, structure elements
361 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
362 not clear whether in the final analysis a sufficient amount of memory would
363 be saved as the size of the available expression bitmaps would be larger
364 [one could build a mapping table without holes afterwards though].
365 Someday I'll perform the computation and figure it out. */
370 This is an array of `expr_hash_table_size' elements. */
373 /* Size of the hash table, in elements. */
376 /* Number of hash table elements. */
377 unsigned int n_elems;
379 /* Whether the table is expression of copy propagation one. */
383 /* Expression hash table. */
384 static struct hash_table expr_hash_table;
386 /* Copy propagation hash table. */
387 static struct hash_table set_hash_table;
389 /* Mapping of uids to cuids.
390 Only real insns get cuids. */
391 static int *uid_cuid;
393 /* Highest UID in UID_CUID. */
396 /* Get the cuid of an insn. */
397 #ifdef ENABLE_CHECKING
398 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
400 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
403 /* Number of cuids. */
406 /* Mapping of cuids to insns. */
407 static rtx *cuid_insn;
409 /* Get insn from cuid. */
410 #define CUID_INSN(CUID) (cuid_insn[CUID])
412 /* Maximum register number in function prior to doing gcse + 1.
413 Registers created during this pass have regno >= max_gcse_regno.
414 This is named with "gcse" to not collide with global of same name. */
415 static unsigned int max_gcse_regno;
417 /* Table of registers that are modified.
419 For each register, each element is a list of places where the pseudo-reg
422 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
423 requires knowledge of which blocks kill which regs [and thus could use
424 a bitmap instead of the lists `reg_set_table' uses].
426 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
427 num-regs) [however perhaps it may be useful to keep the data as is]. One
428 advantage of recording things this way is that `reg_set_table' is fairly
429 sparse with respect to pseudo regs but for hard regs could be fairly dense
430 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
431 up functions like compute_transp since in the case of pseudo-regs we only
432 need to iterate over the number of times a pseudo-reg is set, not over the
433 number of basic blocks [clearly there is a bit of a slow down in the cases
434 where a pseudo is set more than once in a block, however it is believed
435 that the net effect is to speed things up]. This isn't done for hard-regs
436 because recording call-clobbered hard-regs in `reg_set_table' at each
437 function call can consume a fair bit of memory, and iterating over
438 hard-regs stored this way in compute_transp will be more expensive. */
440 typedef struct reg_set
442 /* The next setting of this register. */
443 struct reg_set *next;
444 /* The insn where it was set. */
448 static reg_set **reg_set_table;
450 /* Size of `reg_set_table'.
451 The table starts out at max_gcse_regno + slop, and is enlarged as
453 static int reg_set_table_size;
455 /* Amount to grow `reg_set_table' by when it's full. */
456 #define REG_SET_TABLE_SLOP 100
458 /* This is a list of expressions which are MEMs and will be used by load
460 Load motion tracks MEMs which aren't killed by
461 anything except itself. (ie, loads and stores to a single location).
462 We can then allow movement of these MEM refs with a little special
463 allowance. (all stores copy the same value to the reaching reg used
464 for the loads). This means all values used to store into memory must have
465 no side effects so we can re-issue the setter value.
466 Store Motion uses this structure as an expression table to track stores
467 which look interesting, and might be moveable towards the exit block. */
471 struct expr * expr; /* Gcse expression reference for LM. */
472 rtx pattern; /* Pattern of this mem. */
473 rtx loads; /* INSN list of loads seen. */
474 rtx stores; /* INSN list of stores seen. */
475 struct ls_expr * next; /* Next in the list. */
476 int invalid; /* Invalid for some reason. */
477 int index; /* If it maps to a bitmap index. */
478 int hash_index; /* Index when in a hash table. */
479 rtx reaching_reg; /* Register to use when re-writing. */
482 /* Head of the list of load/store memory refs. */
483 static struct ls_expr * pre_ldst_mems = NULL;
485 /* Bitmap containing one bit for each register in the program.
486 Used when performing GCSE to track which registers have been set since
487 the start of the basic block. */
488 static regset reg_set_bitmap;
490 /* For each block, a bitmap of registers set in the block.
491 This is used by expr_killed_p and compute_transp.
492 It is computed during hash table computation and not by compute_sets
493 as it includes registers added since the last pass (or between cprop and
494 gcse) and it's currently not easy to realloc sbitmap vectors. */
495 static sbitmap *reg_set_in_block;
497 /* Array, indexed by basic block number for a list of insns which modify
498 memory within that block. */
499 static rtx * modify_mem_list;
500 bitmap modify_mem_list_set;
502 /* This array parallels modify_mem_list, but is kept canonicalized. */
503 static rtx * canon_modify_mem_list;
504 bitmap canon_modify_mem_list_set;
505 /* Various variables for statistics gathering. */
507 /* Memory used in a pass.
508 This isn't intended to be absolutely precise. Its intent is only
509 to keep an eye on memory usage. */
510 static int bytes_used;
512 /* GCSE substitutions made. */
513 static int gcse_subst_count;
514 /* Number of copy instructions created. */
515 static int gcse_create_count;
516 /* Number of constants propagated. */
517 static int const_prop_count;
518 /* Number of copys propagated. */
519 static int copy_prop_count;
521 /* These variables are used by classic GCSE.
522 Normally they'd be defined a bit later, but `rd_gen' needs to
523 be declared sooner. */
525 /* Each block has a bitmap of each type.
526 The length of each blocks bitmap is:
528 max_cuid - for reaching definitions
529 n_exprs - for available expressions
531 Thus we view the bitmaps as 2 dimensional arrays. i.e.
532 rd_kill[block_num][cuid_num]
533 ae_kill[block_num][expr_num] */
535 /* For reaching defs */
536 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
538 /* for available exprs */
539 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
541 /* Objects of this type are passed around by the null-pointer check
543 struct null_pointer_info
545 /* The basic block being processed. */
546 basic_block current_block;
547 /* The first register to be handled in this pass. */
548 unsigned int min_reg;
549 /* One greater than the last register to be handled in this pass. */
550 unsigned int max_reg;
551 sbitmap *nonnull_local;
552 sbitmap *nonnull_killed;
555 static void compute_can_copy PARAMS ((void));
556 static char *gmalloc PARAMS ((unsigned int));
557 static char *grealloc PARAMS ((char *, unsigned int));
558 static char *gcse_alloc PARAMS ((unsigned long));
559 static void alloc_gcse_mem PARAMS ((rtx));
560 static void free_gcse_mem PARAMS ((void));
561 static void alloc_reg_set_mem PARAMS ((int));
562 static void free_reg_set_mem PARAMS ((void));
563 static int get_bitmap_width PARAMS ((int, int, int));
564 static void record_one_set PARAMS ((int, rtx));
565 static void record_set_info PARAMS ((rtx, rtx, void *));
566 static void compute_sets PARAMS ((rtx));
567 static void hash_scan_insn PARAMS ((rtx, struct hash_table *, int));
568 static void hash_scan_set PARAMS ((rtx, rtx, struct hash_table *));
569 static void hash_scan_clobber PARAMS ((rtx, rtx, struct hash_table *));
570 static void hash_scan_call PARAMS ((rtx, rtx, struct hash_table *));
571 static int want_to_gcse_p PARAMS ((rtx));
572 static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
573 static int oprs_anticipatable_p PARAMS ((rtx, rtx));
574 static int oprs_available_p PARAMS ((rtx, rtx));
575 static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
576 int, int, struct hash_table *));
577 static void insert_set_in_table PARAMS ((rtx, rtx, struct hash_table *));
578 static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
579 static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
580 static unsigned int hash_string_1 PARAMS ((const char *));
581 static unsigned int hash_set PARAMS ((int, int));
582 static int expr_equiv_p PARAMS ((rtx, rtx));
583 static void record_last_reg_set_info PARAMS ((rtx, int));
584 static void record_last_mem_set_info PARAMS ((rtx));
585 static void record_last_set_info PARAMS ((rtx, rtx, void *));
586 static void compute_hash_table PARAMS ((struct hash_table *));
587 static void alloc_hash_table PARAMS ((int, struct hash_table *, int));
588 static void free_hash_table PARAMS ((struct hash_table *));
589 static void compute_hash_table_work PARAMS ((struct hash_table *));
590 static void dump_hash_table PARAMS ((FILE *, const char *,
591 struct hash_table *));
592 static struct expr *lookup_expr PARAMS ((rtx, struct hash_table *));
593 static struct expr *lookup_set PARAMS ((unsigned int, rtx, struct hash_table *));
594 static struct expr *next_set PARAMS ((unsigned int, struct expr *));
595 static void reset_opr_set_tables PARAMS ((void));
596 static int oprs_not_set_p PARAMS ((rtx, rtx));
597 static void mark_call PARAMS ((rtx));
598 static void mark_set PARAMS ((rtx, rtx));
599 static void mark_clobber PARAMS ((rtx, rtx));
600 static void mark_oprs_set PARAMS ((rtx));
601 static void alloc_cprop_mem PARAMS ((int, int));
602 static void free_cprop_mem PARAMS ((void));
603 static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
604 static void compute_transpout PARAMS ((void));
605 static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
606 struct hash_table *));
607 static void compute_cprop_data PARAMS ((void));
608 static void find_used_regs PARAMS ((rtx *, void *));
609 static int try_replace_reg PARAMS ((rtx, rtx, rtx));
610 static struct expr *find_avail_set PARAMS ((int, rtx));
611 static int cprop_jump PARAMS ((basic_block, rtx, rtx, rtx, rtx));
612 static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *));
613 static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int));
614 static void canon_list_insert PARAMS ((rtx, rtx, void *));
615 static int cprop_insn PARAMS ((rtx, int));
616 static int cprop PARAMS ((int));
617 static int one_cprop_pass PARAMS ((int, int, int));
618 static bool constprop_register PARAMS ((rtx, rtx, rtx, int));
619 static struct expr *find_bypass_set PARAMS ((int, int));
620 static int bypass_block PARAMS ((basic_block, rtx, rtx));
621 static int bypass_conditional_jumps PARAMS ((void));
622 static void alloc_pre_mem PARAMS ((int, int));
623 static void free_pre_mem PARAMS ((void));
624 static void compute_pre_data PARAMS ((void));
625 static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *,
627 static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int));
628 static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
629 static void pre_insert_copies PARAMS ((void));
630 static int pre_delete PARAMS ((void));
631 static int pre_gcse PARAMS ((void));
632 static int one_pre_gcse_pass PARAMS ((int));
633 static void add_label_notes PARAMS ((rtx, rtx));
634 static void alloc_code_hoist_mem PARAMS ((int, int));
635 static void free_code_hoist_mem PARAMS ((void));
636 static void compute_code_hoist_vbeinout PARAMS ((void));
637 static void compute_code_hoist_data PARAMS ((void));
638 static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block,
640 static void hoist_code PARAMS ((void));
641 static int one_code_hoisting_pass PARAMS ((void));
642 static void alloc_rd_mem PARAMS ((int, int));
643 static void free_rd_mem PARAMS ((void));
644 static void handle_rd_kill_set PARAMS ((rtx, int, basic_block));
645 static void compute_kill_rd PARAMS ((void));
646 static void compute_rd PARAMS ((void));
647 static void alloc_avail_expr_mem PARAMS ((int, int));
648 static void free_avail_expr_mem PARAMS ((void));
649 static void compute_ae_gen PARAMS ((struct hash_table *));
650 static int expr_killed_p PARAMS ((rtx, basic_block));
651 static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *, struct hash_table *));
652 static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
654 static rtx computing_insn PARAMS ((struct expr *, rtx));
655 static int def_reaches_here_p PARAMS ((rtx, rtx));
656 static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
657 static int handle_avail_expr PARAMS ((rtx, struct expr *));
658 static int classic_gcse PARAMS ((void));
659 static int one_classic_gcse_pass PARAMS ((int));
660 static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
661 static int delete_null_pointer_checks_1 PARAMS ((unsigned int *,
662 sbitmap *, sbitmap *,
663 struct null_pointer_info *));
664 static rtx process_insert_insn PARAMS ((struct expr *));
665 static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
666 static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
667 basic_block, int, char *));
668 static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *,
669 basic_block, char *));
670 static struct ls_expr * ldst_entry PARAMS ((rtx));
671 static void free_ldst_entry PARAMS ((struct ls_expr *));
672 static void free_ldst_mems PARAMS ((void));
673 static void print_ldst_list PARAMS ((FILE *));
674 static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx));
675 static int enumerate_ldsts PARAMS ((void));
676 static inline struct ls_expr * first_ls_expr PARAMS ((void));
677 static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *));
678 static int simple_mem PARAMS ((rtx));
679 static void invalidate_any_buried_refs PARAMS ((rtx));
680 static void compute_ld_motion_mems PARAMS ((void));
681 static void trim_ld_motion_mems PARAMS ((void));
682 static void update_ld_motion_stores PARAMS ((struct expr *));
683 static void reg_set_info PARAMS ((rtx, rtx, void *));
684 static int store_ops_ok PARAMS ((rtx, basic_block));
685 static void find_moveable_store PARAMS ((rtx));
686 static int compute_store_table PARAMS ((void));
687 static int load_kills_store PARAMS ((rtx, rtx));
688 static int find_loads PARAMS ((rtx, rtx));
689 static int store_killed_in_insn PARAMS ((rtx, rtx));
690 static int store_killed_after PARAMS ((rtx, rtx, basic_block));
691 static int store_killed_before PARAMS ((rtx, rtx, basic_block));
692 static void build_store_vectors PARAMS ((void));
693 static void insert_insn_start_bb PARAMS ((rtx, basic_block));
694 static int insert_store PARAMS ((struct ls_expr *, edge));
695 static void replace_store_insn PARAMS ((rtx, rtx, basic_block));
696 static void delete_store PARAMS ((struct ls_expr *,
698 static void free_store_memory PARAMS ((void));
699 static void store_motion PARAMS ((void));
700 static void free_insn_expr_list_list PARAMS ((rtx *));
701 static void clear_modify_mem_tables PARAMS ((void));
702 static void free_modify_mem_tables PARAMS ((void));
703 static rtx gcse_emit_move_after PARAMS ((rtx, rtx, rtx));
704 static bool do_local_cprop PARAMS ((rtx, rtx, int, rtx*));
705 static bool adjust_libcall_notes PARAMS ((rtx, rtx, rtx, rtx*));
706 static void local_cprop_pass PARAMS ((int));
708 /* Entry point for global common subexpression elimination.
709 F is the first instruction in the function. */
717 /* Bytes used at start of pass. */
718 int initial_bytes_used;
719 /* Maximum number of bytes used by a pass. */
721 /* Point to release obstack data from for each pass. */
722 char *gcse_obstack_bottom;
724 /* We do not construct an accurate cfg in functions which call
725 setjmp, so just punt to be safe. */
726 if (current_function_calls_setjmp)
729 /* Assume that we do not need to run jump optimizations after gcse. */
730 run_jump_opt_after_gcse = 0;
732 /* For calling dump_foo fns from gdb. */
733 debug_stderr = stderr;
736 /* Identify the basic block information for this function, including
737 successors and predecessors. */
738 max_gcse_regno = max_reg_num ();
741 dump_flow_info (file);
743 /* Return if there's nothing to do. */
744 if (n_basic_blocks <= 1)
747 /* Trying to perform global optimizations on flow graphs which have
748 a high connectivity will take a long time and is unlikely to be
751 In normal circumstances a cfg should have about twice as many edges
752 as blocks. But we do not want to punish small functions which have
753 a couple switch statements. So we require a relatively large number
754 of basic blocks and the ratio of edges to blocks to be high. */
755 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
757 if (warn_disabled_optimization)
758 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
759 n_basic_blocks, n_edges / n_basic_blocks);
763 /* If allocating memory for the cprop bitmap would take up too much
764 storage it's better just to disable the optimization. */
766 * SBITMAP_SET_SIZE (max_gcse_regno)
767 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
769 if (warn_disabled_optimization)
770 warning ("GCSE disabled: %d basic blocks and %d registers",
771 n_basic_blocks, max_gcse_regno);
776 /* See what modes support reg/reg copy operations. */
777 if (! can_copy_init_p)
783 gcc_obstack_init (&gcse_obstack);
787 init_alias_analysis ();
788 /* Record where pseudo-registers are set. This data is kept accurate
789 during each pass. ??? We could also record hard-reg information here
790 [since it's unchanging], however it is currently done during hash table
793 It may be tempting to compute MEM set information here too, but MEM sets
794 will be subject to code motion one day and thus we need to compute
795 information about memory sets when we build the hash tables. */
797 alloc_reg_set_mem (max_gcse_regno);
801 initial_bytes_used = bytes_used;
803 gcse_obstack_bottom = gcse_alloc (1);
805 while (changed && pass < MAX_GCSE_PASSES)
809 fprintf (file, "GCSE pass %d\n\n", pass + 1);
811 /* Initialize bytes_used to the space for the pred/succ lists,
812 and the reg_set_table data. */
813 bytes_used = initial_bytes_used;
815 /* Each pass may create new registers, so recalculate each time. */
816 max_gcse_regno = max_reg_num ();
820 /* Don't allow constant propagation to modify jumps
822 changed = one_cprop_pass (pass + 1, 0, 0);
825 changed |= one_classic_gcse_pass (pass + 1);
828 changed |= one_pre_gcse_pass (pass + 1);
829 /* We may have just created new basic blocks. Release and
830 recompute various things which are sized on the number of
834 free_modify_mem_tables ();
836 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
837 canon_modify_mem_list
838 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
839 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
840 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
843 alloc_reg_set_mem (max_reg_num ());
845 run_jump_opt_after_gcse = 1;
848 if (max_pass_bytes < bytes_used)
849 max_pass_bytes = bytes_used;
851 /* Free up memory, then reallocate for code hoisting. We can
852 not re-use the existing allocated memory because the tables
853 will not have info for the insns or registers created by
854 partial redundancy elimination. */
857 /* It does not make sense to run code hoisting unless we optimizing
858 for code size -- it rarely makes programs faster, and can make
859 them bigger if we did partial redundancy elimination (when optimizing
860 for space, we use a classic gcse algorithm instead of partial
861 redundancy algorithms). */
864 max_gcse_regno = max_reg_num ();
866 changed |= one_code_hoisting_pass ();
869 if (max_pass_bytes < bytes_used)
870 max_pass_bytes = bytes_used;
875 fprintf (file, "\n");
879 obstack_free (&gcse_obstack, gcse_obstack_bottom);
883 /* Do one last pass of copy propagation, including cprop into
884 conditional jumps. */
886 max_gcse_regno = max_reg_num ();
888 /* This time, go ahead and allow cprop to alter jumps. */
889 one_cprop_pass (pass + 1, 1, 0);
894 fprintf (file, "GCSE of %s: %d basic blocks, ",
895 current_function_name, n_basic_blocks);
896 fprintf (file, "%d pass%s, %d bytes\n\n",
897 pass, pass > 1 ? "es" : "", max_pass_bytes);
900 obstack_free (&gcse_obstack, NULL);
902 /* We are finished with alias. */
903 end_alias_analysis ();
904 allocate_reg_info (max_reg_num (), FALSE, FALSE);
906 /* Store motion disabled until it is fixed. */
907 if (0 && !optimize_size && flag_gcse_sm)
909 /* Record where pseudo-registers are set. */
910 return run_jump_opt_after_gcse;
913 /* Misc. utilities. */
915 /* Compute which modes support reg/reg copy operations. */
921 #ifndef AVOID_CCMODE_COPIES
924 memset (can_copy_p, 0, NUM_MACHINE_MODES);
927 for (i = 0; i < NUM_MACHINE_MODES; i++)
928 if (GET_MODE_CLASS (i) == MODE_CC)
930 #ifdef AVOID_CCMODE_COPIES
933 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
934 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
935 if (recog (PATTERN (insn), insn, NULL) >= 0)
945 /* Cover function to xmalloc to record bytes allocated. */
952 return xmalloc (size);
955 /* Cover function to xrealloc.
956 We don't record the additional size since we don't know it.
957 It won't affect memory usage stats much anyway. */
964 return xrealloc (ptr, size);
967 /* Cover function to obstack_alloc. */
974 return (char *) obstack_alloc (&gcse_obstack, size);
977 /* Allocate memory for the cuid mapping array,
978 and reg/memory set tracking tables.
980 This is called at the start of each pass. */
989 /* Find the largest UID and create a mapping from UIDs to CUIDs.
990 CUIDs are like UIDs except they increase monotonically, have no gaps,
991 and only apply to real insns. */
993 max_uid = get_max_uid ();
994 n = (max_uid + 1) * sizeof (int);
995 uid_cuid = (int *) gmalloc (n);
996 memset ((char *) uid_cuid, 0, n);
997 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1000 uid_cuid[INSN_UID (insn)] = i++;
1002 uid_cuid[INSN_UID (insn)] = i;
1005 /* Create a table mapping cuids to insns. */
1008 n = (max_cuid + 1) * sizeof (rtx);
1009 cuid_insn = (rtx *) gmalloc (n);
1010 memset ((char *) cuid_insn, 0, n);
1011 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1013 CUID_INSN (i++) = insn;
1015 /* Allocate vars to track sets of regs. */
1016 reg_set_bitmap = BITMAP_XMALLOC ();
1018 /* Allocate vars to track sets of regs, memory per block. */
1019 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
1021 /* Allocate array to keep a list of insns which modify memory in each
1023 modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1024 canon_modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1025 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
1026 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
1027 modify_mem_list_set = BITMAP_XMALLOC ();
1028 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1031 /* Free memory allocated by alloc_gcse_mem. */
1039 BITMAP_XFREE (reg_set_bitmap);
1041 sbitmap_vector_free (reg_set_in_block);
1042 free_modify_mem_tables ();
1043 BITMAP_XFREE (modify_mem_list_set);
1044 BITMAP_XFREE (canon_modify_mem_list_set);
1047 /* Many of the global optimization algorithms work by solving dataflow
1048 equations for various expressions. Initially, some local value is
1049 computed for each expression in each block. Then, the values across the
1050 various blocks are combined (by following flow graph edges) to arrive at
1051 global values. Conceptually, each set of equations is independent. We
1052 may therefore solve all the equations in parallel, solve them one at a
1053 time, or pick any intermediate approach.
1055 When you're going to need N two-dimensional bitmaps, each X (say, the
1056 number of blocks) by Y (say, the number of expressions), call this
1057 function. It's not important what X and Y represent; only that Y
1058 correspond to the things that can be done in parallel. This function will
1059 return an appropriate chunking factor C; you should solve C sets of
1060 equations in parallel. By going through this function, we can easily
1061 trade space against time; by solving fewer equations in parallel we use
1065 get_bitmap_width (n, x, y)
1070 /* It's not really worth figuring out *exactly* how much memory will
1071 be used by a particular choice. The important thing is to get
1072 something approximately right. */
1073 size_t max_bitmap_memory = 10 * 1024 * 1024;
1075 /* The number of bytes we'd use for a single column of minimum
1077 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1079 /* Often, it's reasonable just to solve all the equations in
1081 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1084 /* Otherwise, pick the largest width we can, without going over the
1086 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1090 /* Compute the local properties of each recorded expression.
1092 Local properties are those that are defined by the block, irrespective of
1095 An expression is transparent in a block if its operands are not modified
1098 An expression is computed (locally available) in a block if it is computed
1099 at least once and expression would contain the same value if the
1100 computation was moved to the end of the block.
1102 An expression is locally anticipatable in a block if it is computed at
1103 least once and expression would contain the same value if the computation
1104 was moved to the beginning of the block.
1106 We call this routine for cprop, pre and code hoisting. They all compute
1107 basically the same information and thus can easily share this code.
1109 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1110 properties. If NULL, then it is not necessary to compute or record that
1111 particular property.
1113 TABLE controls which hash table to look at. If it is set hash table,
1114 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1118 compute_local_properties (transp, comp, antloc, table)
1122 struct hash_table *table;
1126 /* Initialize any bitmaps that were passed in. */
1130 sbitmap_vector_zero (transp, last_basic_block);
1132 sbitmap_vector_ones (transp, last_basic_block);
1136 sbitmap_vector_zero (comp, last_basic_block);
1138 sbitmap_vector_zero (antloc, last_basic_block);
1140 for (i = 0; i < table->size; i++)
1144 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1146 int indx = expr->bitmap_index;
1149 /* The expression is transparent in this block if it is not killed.
1150 We start by assuming all are transparent [none are killed], and
1151 then reset the bits for those that are. */
1153 compute_transp (expr->expr, indx, transp, table->set_p);
1155 /* The occurrences recorded in antic_occr are exactly those that
1156 we want to set to nonzero in ANTLOC. */
1158 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1160 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1162 /* While we're scanning the table, this is a good place to
1164 occr->deleted_p = 0;
1167 /* The occurrences recorded in avail_occr are exactly those that
1168 we want to set to nonzero in COMP. */
1170 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1172 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1174 /* While we're scanning the table, this is a good place to
1179 /* While we're scanning the table, this is a good place to
1181 expr->reaching_reg = 0;
1186 /* Register set information.
1188 `reg_set_table' records where each register is set or otherwise
1191 static struct obstack reg_set_obstack;
1194 alloc_reg_set_mem (n_regs)
1199 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1200 n = reg_set_table_size * sizeof (struct reg_set *);
1201 reg_set_table = (struct reg_set **) gmalloc (n);
1202 memset ((char *) reg_set_table, 0, n);
1204 gcc_obstack_init (®_set_obstack);
1210 free (reg_set_table);
1211 obstack_free (®_set_obstack, NULL);
1214 /* Record REGNO in the reg_set table. */
1217 record_one_set (regno, insn)
1221 /* Allocate a new reg_set element and link it onto the list. */
1222 struct reg_set *new_reg_info;
1224 /* If the table isn't big enough, enlarge it. */
1225 if (regno >= reg_set_table_size)
1227 int new_size = regno + REG_SET_TABLE_SLOP;
1230 = (struct reg_set **) grealloc ((char *) reg_set_table,
1231 new_size * sizeof (struct reg_set *));
1232 memset ((char *) (reg_set_table + reg_set_table_size), 0,
1233 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1234 reg_set_table_size = new_size;
1237 new_reg_info = (struct reg_set *) obstack_alloc (®_set_obstack,
1238 sizeof (struct reg_set));
1239 bytes_used += sizeof (struct reg_set);
1240 new_reg_info->insn = insn;
1241 new_reg_info->next = reg_set_table[regno];
1242 reg_set_table[regno] = new_reg_info;
1245 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1246 an insn. The DATA is really the instruction in which the SET is
1250 record_set_info (dest, setter, data)
1251 rtx dest, setter ATTRIBUTE_UNUSED;
1254 rtx record_set_insn = (rtx) data;
1256 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1257 record_one_set (REGNO (dest), record_set_insn);
1260 /* Scan the function and record each set of each pseudo-register.
1262 This is called once, at the start of the gcse pass. See the comments for
1263 `reg_set_table' for further documentation. */
1271 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1273 note_stores (PATTERN (insn), record_set_info, insn);
1276 /* Hash table support. */
1278 struct reg_avail_info
1280 basic_block last_bb;
1285 static struct reg_avail_info *reg_avail_info;
1286 static basic_block current_bb;
1289 /* See whether X, the source of a set, is something we want to consider for
1292 static GTY(()) rtx test_insn;
1297 int num_clobbers = 0;
1300 switch (GET_CODE (x))
1314 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1315 if (general_operand (x, GET_MODE (x)))
1317 else if (GET_MODE (x) == VOIDmode)
1320 /* Otherwise, check if we can make a valid insn from it. First initialize
1321 our test insn if we haven't already. */
1325 = make_insn_raw (gen_rtx_SET (VOIDmode,
1326 gen_rtx_REG (word_mode,
1327 FIRST_PSEUDO_REGISTER * 2),
1329 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1332 /* Now make an insn like the one we would make when GCSE'ing and see if
1334 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1335 SET_SRC (PATTERN (test_insn)) = x;
1336 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1337 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1340 /* Return nonzero if the operands of expression X are unchanged from the
1341 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1342 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1345 oprs_unchanged_p (x, insn, avail_p)
1356 code = GET_CODE (x);
1361 struct reg_avail_info *info = ®_avail_info[REGNO (x)];
1363 if (info->last_bb != current_bb)
1366 return info->last_set < INSN_CUID (insn);
1368 return info->first_set >= INSN_CUID (insn);
1372 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1376 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1402 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1406 /* If we are about to do the last recursive call needed at this
1407 level, change it into iteration. This function is called enough
1410 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1412 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1415 else if (fmt[i] == 'E')
1416 for (j = 0; j < XVECLEN (x, i); j++)
1417 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1424 /* Used for communication between mems_conflict_for_gcse_p and
1425 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1426 conflict between two memory references. */
1427 static int gcse_mems_conflict_p;
1429 /* Used for communication between mems_conflict_for_gcse_p and
1430 load_killed_in_block_p. A memory reference for a load instruction,
1431 mems_conflict_for_gcse_p will see if a memory store conflicts with
1432 this memory load. */
1433 static rtx gcse_mem_operand;
1435 /* DEST is the output of an instruction. If it is a memory reference, and
1436 possibly conflicts with the load found in gcse_mem_operand, then set
1437 gcse_mems_conflict_p to a nonzero value. */
1440 mems_conflict_for_gcse_p (dest, setter, data)
1441 rtx dest, setter ATTRIBUTE_UNUSED;
1442 void *data ATTRIBUTE_UNUSED;
1444 while (GET_CODE (dest) == SUBREG
1445 || GET_CODE (dest) == ZERO_EXTRACT
1446 || GET_CODE (dest) == SIGN_EXTRACT
1447 || GET_CODE (dest) == STRICT_LOW_PART)
1448 dest = XEXP (dest, 0);
1450 /* If DEST is not a MEM, then it will not conflict with the load. Note
1451 that function calls are assumed to clobber memory, but are handled
1453 if (GET_CODE (dest) != MEM)
1456 /* If we are setting a MEM in our list of specially recognized MEMs,
1457 don't mark as killed this time. */
1459 if (dest == gcse_mem_operand && pre_ldst_mems != NULL)
1461 if (!find_rtx_in_ldst (dest))
1462 gcse_mems_conflict_p = 1;
1466 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1468 gcse_mems_conflict_p = 1;
1471 /* Return nonzero if the expression in X (a memory reference) is killed
1472 in block BB before or after the insn with the CUID in UID_LIMIT.
1473 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1476 To check the entire block, set UID_LIMIT to max_uid + 1 and
1480 load_killed_in_block_p (bb, uid_limit, x, avail_p)
1486 rtx list_entry = modify_mem_list[bb->index];
1490 /* Ignore entries in the list that do not apply. */
1492 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1494 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1496 list_entry = XEXP (list_entry, 1);
1500 setter = XEXP (list_entry, 0);
1502 /* If SETTER is a call everything is clobbered. Note that calls
1503 to pure functions are never put on the list, so we need not
1504 worry about them. */
1505 if (GET_CODE (setter) == CALL_INSN)
1508 /* SETTER must be an INSN of some kind that sets memory. Call
1509 note_stores to examine each hunk of memory that is modified.
1511 The note_stores interface is pretty limited, so we have to
1512 communicate via global variables. Yuk. */
1513 gcse_mem_operand = x;
1514 gcse_mems_conflict_p = 0;
1515 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1516 if (gcse_mems_conflict_p)
1518 list_entry = XEXP (list_entry, 1);
1523 /* Return nonzero if the operands of expression X are unchanged from
1524 the start of INSN's basic block up to but not including INSN. */
1527 oprs_anticipatable_p (x, insn)
1530 return oprs_unchanged_p (x, insn, 0);
1533 /* Return nonzero if the operands of expression X are unchanged from
1534 INSN to the end of INSN's basic block. */
1537 oprs_available_p (x, insn)
1540 return oprs_unchanged_p (x, insn, 1);
1543 /* Hash expression X.
1545 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1546 indicating if a volatile operand is found or if the expression contains
1547 something we don't want to insert in the table.
1549 ??? One might want to merge this with canon_hash. Later. */
1552 hash_expr (x, mode, do_not_record_p, hash_table_size)
1554 enum machine_mode mode;
1555 int *do_not_record_p;
1556 int hash_table_size;
1560 *do_not_record_p = 0;
1562 hash = hash_expr_1 (x, mode, do_not_record_p);
1563 return hash % hash_table_size;
1566 /* Hash a string. Just add its bytes up. */
1568 static inline unsigned
1573 const unsigned char *p = (const unsigned char *) ps;
1582 /* Subroutine of hash_expr to do the actual work. */
1585 hash_expr_1 (x, mode, do_not_record_p)
1587 enum machine_mode mode;
1588 int *do_not_record_p;
1595 /* Used to turn recursion into iteration. We can't rely on GCC's
1596 tail-recursion elimination since we need to keep accumulating values
1603 code = GET_CODE (x);
1607 hash += ((unsigned int) REG << 7) + REGNO (x);
1611 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1612 + (unsigned int) INTVAL (x));
1616 /* This is like the general case, except that it only counts
1617 the integers representing the constant. */
1618 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1619 if (GET_MODE (x) != VOIDmode)
1620 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1621 hash += (unsigned int) XWINT (x, i);
1623 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1624 + (unsigned int) CONST_DOUBLE_HIGH (x));
1632 units = CONST_VECTOR_NUNITS (x);
1634 for (i = 0; i < units; ++i)
1636 elt = CONST_VECTOR_ELT (x, i);
1637 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1643 /* Assume there is only one rtx object for any given label. */
1645 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1646 differences and differences between each stage's debugging dumps. */
1647 hash += (((unsigned int) LABEL_REF << 7)
1648 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1653 /* Don't hash on the symbol's address to avoid bootstrap differences.
1654 Different hash values may cause expressions to be recorded in
1655 different orders and thus different registers to be used in the
1656 final assembler. This also avoids differences in the dump files
1657 between various stages. */
1659 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1662 h += (h << 7) + *p++; /* ??? revisit */
1664 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1669 if (MEM_VOLATILE_P (x))
1671 *do_not_record_p = 1;
1675 hash += (unsigned int) MEM;
1676 /* We used alias set for hashing, but this is not good, since the alias
1677 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1678 causing the profiles to fail to match. */
1689 case UNSPEC_VOLATILE:
1690 *do_not_record_p = 1;
1694 if (MEM_VOLATILE_P (x))
1696 *do_not_record_p = 1;
1701 /* We don't want to take the filename and line into account. */
1702 hash += (unsigned) code + (unsigned) GET_MODE (x)
1703 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1704 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1705 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1707 if (ASM_OPERANDS_INPUT_LENGTH (x))
1709 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1711 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1712 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1714 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1718 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1719 x = ASM_OPERANDS_INPUT (x, 0);
1720 mode = GET_MODE (x);
1730 hash += (unsigned) code + (unsigned) GET_MODE (x);
1731 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1735 /* If we are about to do the last recursive call
1736 needed at this level, change it into iteration.
1737 This function is called enough to be worth it. */
1744 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1745 if (*do_not_record_p)
1749 else if (fmt[i] == 'E')
1750 for (j = 0; j < XVECLEN (x, i); j++)
1752 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1753 if (*do_not_record_p)
1757 else if (fmt[i] == 's')
1758 hash += hash_string_1 (XSTR (x, i));
1759 else if (fmt[i] == 'i')
1760 hash += (unsigned int) XINT (x, i);
1768 /* Hash a set of register REGNO.
1770 Sets are hashed on the register that is set. This simplifies the PRE copy
1773 ??? May need to make things more elaborate. Later, as necessary. */
1776 hash_set (regno, hash_table_size)
1778 int hash_table_size;
1783 return hash % hash_table_size;
1786 /* Return nonzero if exp1 is equivalent to exp2.
1787 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1800 if (x == 0 || y == 0)
1803 code = GET_CODE (x);
1804 if (code != GET_CODE (y))
1807 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1808 if (GET_MODE (x) != GET_MODE (y))
1818 return INTVAL (x) == INTVAL (y);
1821 return XEXP (x, 0) == XEXP (y, 0);
1824 return XSTR (x, 0) == XSTR (y, 0);
1827 return REGNO (x) == REGNO (y);
1830 /* Can't merge two expressions in different alias sets, since we can
1831 decide that the expression is transparent in a block when it isn't,
1832 due to it being set with the different alias set. */
1833 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1837 /* For commutative operations, check both orders. */
1845 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1846 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1847 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1848 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1851 /* We don't use the generic code below because we want to
1852 disregard filename and line numbers. */
1854 /* A volatile asm isn't equivalent to any other. */
1855 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1858 if (GET_MODE (x) != GET_MODE (y)
1859 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1860 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1861 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1862 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1863 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1866 if (ASM_OPERANDS_INPUT_LENGTH (x))
1868 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1869 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1870 ASM_OPERANDS_INPUT (y, i))
1871 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1872 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1882 /* Compare the elements. If any pair of corresponding elements
1883 fail to match, return 0 for the whole thing. */
1885 fmt = GET_RTX_FORMAT (code);
1886 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1891 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1896 if (XVECLEN (x, i) != XVECLEN (y, i))
1898 for (j = 0; j < XVECLEN (x, i); j++)
1899 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1904 if (strcmp (XSTR (x, i), XSTR (y, i)))
1909 if (XINT (x, i) != XINT (y, i))
1914 if (XWINT (x, i) != XWINT (y, i))
1929 /* Insert expression X in INSN in the hash TABLE.
1930 If it is already present, record it as the last occurrence in INSN's
1933 MODE is the mode of the value X is being stored into.
1934 It is only used if X is a CONST_INT.
1936 ANTIC_P is nonzero if X is an anticipatable expression.
1937 AVAIL_P is nonzero if X is an available expression. */
1940 insert_expr_in_table (x, mode, insn, antic_p, avail_p, table)
1942 enum machine_mode mode;
1944 int antic_p, avail_p;
1945 struct hash_table *table;
1947 int found, do_not_record_p;
1949 struct expr *cur_expr, *last_expr = NULL;
1950 struct occr *antic_occr, *avail_occr;
1951 struct occr *last_occr = NULL;
1953 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1955 /* Do not insert expression in table if it contains volatile operands,
1956 or if hash_expr determines the expression is something we don't want
1957 to or can't handle. */
1958 if (do_not_record_p)
1961 cur_expr = table->table[hash];
1964 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1966 /* If the expression isn't found, save a pointer to the end of
1968 last_expr = cur_expr;
1969 cur_expr = cur_expr->next_same_hash;
1974 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1975 bytes_used += sizeof (struct expr);
1976 if (table->table[hash] == NULL)
1977 /* This is the first pattern that hashed to this index. */
1978 table->table[hash] = cur_expr;
1980 /* Add EXPR to end of this hash chain. */
1981 last_expr->next_same_hash = cur_expr;
1983 /* Set the fields of the expr element. */
1985 cur_expr->bitmap_index = table->n_elems++;
1986 cur_expr->next_same_hash = NULL;
1987 cur_expr->antic_occr = NULL;
1988 cur_expr->avail_occr = NULL;
1991 /* Now record the occurrence(s). */
1994 antic_occr = cur_expr->antic_occr;
1996 /* Search for another occurrence in the same basic block. */
1997 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1999 /* If an occurrence isn't found, save a pointer to the end of
2001 last_occr = antic_occr;
2002 antic_occr = antic_occr->next;
2006 /* Found another instance of the expression in the same basic block.
2007 Prefer the currently recorded one. We want the first one in the
2008 block and the block is scanned from start to end. */
2009 ; /* nothing to do */
2012 /* First occurrence of this expression in this basic block. */
2013 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2014 bytes_used += sizeof (struct occr);
2015 /* First occurrence of this expression in any block? */
2016 if (cur_expr->antic_occr == NULL)
2017 cur_expr->antic_occr = antic_occr;
2019 last_occr->next = antic_occr;
2021 antic_occr->insn = insn;
2022 antic_occr->next = NULL;
2028 avail_occr = cur_expr->avail_occr;
2030 /* Search for another occurrence in the same basic block. */
2031 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2033 /* If an occurrence isn't found, save a pointer to the end of
2035 last_occr = avail_occr;
2036 avail_occr = avail_occr->next;
2040 /* Found another instance of the expression in the same basic block.
2041 Prefer this occurrence to the currently recorded one. We want
2042 the last one in the block and the block is scanned from start
2044 avail_occr->insn = insn;
2047 /* First occurrence of this expression in this basic block. */
2048 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2049 bytes_used += sizeof (struct occr);
2051 /* First occurrence of this expression in any block? */
2052 if (cur_expr->avail_occr == NULL)
2053 cur_expr->avail_occr = avail_occr;
2055 last_occr->next = avail_occr;
2057 avail_occr->insn = insn;
2058 avail_occr->next = NULL;
2063 /* Insert pattern X in INSN in the hash table.
2064 X is a SET of a reg to either another reg or a constant.
2065 If it is already present, record it as the last occurrence in INSN's
2069 insert_set_in_table (x, insn, table)
2072 struct hash_table *table;
2076 struct expr *cur_expr, *last_expr = NULL;
2077 struct occr *cur_occr, *last_occr = NULL;
2079 if (GET_CODE (x) != SET
2080 || GET_CODE (SET_DEST (x)) != REG)
2083 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2085 cur_expr = table->table[hash];
2088 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2090 /* If the expression isn't found, save a pointer to the end of
2092 last_expr = cur_expr;
2093 cur_expr = cur_expr->next_same_hash;
2098 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2099 bytes_used += sizeof (struct expr);
2100 if (table->table[hash] == NULL)
2101 /* This is the first pattern that hashed to this index. */
2102 table->table[hash] = cur_expr;
2104 /* Add EXPR to end of this hash chain. */
2105 last_expr->next_same_hash = cur_expr;
2107 /* Set the fields of the expr element.
2108 We must copy X because it can be modified when copy propagation is
2109 performed on its operands. */
2110 cur_expr->expr = copy_rtx (x);
2111 cur_expr->bitmap_index = table->n_elems++;
2112 cur_expr->next_same_hash = NULL;
2113 cur_expr->antic_occr = NULL;
2114 cur_expr->avail_occr = NULL;
2117 /* Now record the occurrence. */
2118 cur_occr = cur_expr->avail_occr;
2120 /* Search for another occurrence in the same basic block. */
2121 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2123 /* If an occurrence isn't found, save a pointer to the end of
2125 last_occr = cur_occr;
2126 cur_occr = cur_occr->next;
2130 /* Found another instance of the expression in the same basic block.
2131 Prefer this occurrence to the currently recorded one. We want the
2132 last one in the block and the block is scanned from start to end. */
2133 cur_occr->insn = insn;
2136 /* First occurrence of this expression in this basic block. */
2137 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2138 bytes_used += sizeof (struct occr);
2140 /* First occurrence of this expression in any block? */
2141 if (cur_expr->avail_occr == NULL)
2142 cur_expr->avail_occr = cur_occr;
2144 last_occr->next = cur_occr;
2146 cur_occr->insn = insn;
2147 cur_occr->next = NULL;
2151 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2155 hash_scan_set (pat, insn, table)
2157 struct hash_table *table;
2159 rtx src = SET_SRC (pat);
2160 rtx dest = SET_DEST (pat);
2163 if (GET_CODE (src) == CALL)
2164 hash_scan_call (src, insn, table);
2166 else if (GET_CODE (dest) == REG)
2168 unsigned int regno = REGNO (dest);
2171 /* If this is a single set and we are doing constant propagation,
2172 see if a REG_NOTE shows this equivalent to a constant. */
2173 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2174 && CONSTANT_P (XEXP (note, 0)))
2175 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2177 /* Only record sets of pseudo-regs in the hash table. */
2179 && regno >= FIRST_PSEUDO_REGISTER
2180 /* Don't GCSE something if we can't do a reg/reg copy. */
2181 && can_copy_p [GET_MODE (dest)]
2182 /* GCSE commonly inserts instruction after the insn. We can't
2183 do that easily for EH_REGION notes so disable GCSE on these
2185 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2186 /* Is SET_SRC something we want to gcse? */
2187 && want_to_gcse_p (src)
2188 /* Don't CSE a nop. */
2189 && ! set_noop_p (pat)
2190 /* Don't GCSE if it has attached REG_EQUIV note.
2191 At this point this only function parameters should have
2192 REG_EQUIV notes and if the argument slot is used somewhere
2193 explicitly, it means address of parameter has been taken,
2194 so we should not extend the lifetime of the pseudo. */
2195 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2196 || GET_CODE (XEXP (note, 0)) != MEM))
2198 /* An expression is not anticipatable if its operands are
2199 modified before this insn or if this is not the only SET in
2201 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2202 /* An expression is not available if its operands are
2203 subsequently modified, including this insn. It's also not
2204 available if this is a branch, because we can't insert
2205 a set after the branch. */
2206 int avail_p = (oprs_available_p (src, insn)
2207 && ! JUMP_P (insn));
2209 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2212 /* Record sets for constant/copy propagation. */
2213 else if (table->set_p
2214 && regno >= FIRST_PSEUDO_REGISTER
2215 && ((GET_CODE (src) == REG
2216 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2217 && can_copy_p [GET_MODE (dest)]
2218 && REGNO (src) != regno)
2219 || CONSTANT_P (src))
2220 /* A copy is not available if its src or dest is subsequently
2221 modified. Here we want to search from INSN+1 on, but
2222 oprs_available_p searches from INSN on. */
2223 && (insn == BLOCK_END (BLOCK_NUM (insn))
2224 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2225 && oprs_available_p (pat, tmp))))
2226 insert_set_in_table (pat, insn, table);
2231 hash_scan_clobber (x, insn, table)
2232 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2233 struct hash_table *table ATTRIBUTE_UNUSED;
2235 /* Currently nothing to do. */
2239 hash_scan_call (x, insn, table)
2240 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2241 struct hash_table *table ATTRIBUTE_UNUSED;
2243 /* Currently nothing to do. */
2246 /* Process INSN and add hash table entries as appropriate.
2248 Only available expressions that set a single pseudo-reg are recorded.
2250 Single sets in a PARALLEL could be handled, but it's an extra complication
2251 that isn't dealt with right now. The trick is handling the CLOBBERs that
2252 are also in the PARALLEL. Later.
2254 If SET_P is nonzero, this is for the assignment hash table,
2255 otherwise it is for the expression hash table.
2256 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2257 not record any expressions. */
2260 hash_scan_insn (insn, table, in_libcall_block)
2262 struct hash_table *table;
2263 int in_libcall_block;
2265 rtx pat = PATTERN (insn);
2268 if (in_libcall_block)
2271 /* Pick out the sets of INSN and for other forms of instructions record
2272 what's been modified. */
2274 if (GET_CODE (pat) == SET)
2275 hash_scan_set (pat, insn, table);
2276 else if (GET_CODE (pat) == PARALLEL)
2277 for (i = 0; i < XVECLEN (pat, 0); i++)
2279 rtx x = XVECEXP (pat, 0, i);
2281 if (GET_CODE (x) == SET)
2282 hash_scan_set (x, insn, table);
2283 else if (GET_CODE (x) == CLOBBER)
2284 hash_scan_clobber (x, insn, table);
2285 else if (GET_CODE (x) == CALL)
2286 hash_scan_call (x, insn, table);
2289 else if (GET_CODE (pat) == CLOBBER)
2290 hash_scan_clobber (pat, insn, table);
2291 else if (GET_CODE (pat) == CALL)
2292 hash_scan_call (pat, insn, table);
2296 dump_hash_table (file, name, table)
2299 struct hash_table *table;
2302 /* Flattened out table, so it's printed in proper order. */
2303 struct expr **flat_table;
2304 unsigned int *hash_val;
2308 = (struct expr **) xcalloc (table->n_elems, sizeof (struct expr *));
2309 hash_val = (unsigned int *) xmalloc (table->n_elems * sizeof (unsigned int));
2311 for (i = 0; i < (int) table->size; i++)
2312 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2314 flat_table[expr->bitmap_index] = expr;
2315 hash_val[expr->bitmap_index] = i;
2318 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2319 name, table->size, table->n_elems);
2321 for (i = 0; i < (int) table->n_elems; i++)
2322 if (flat_table[i] != 0)
2324 expr = flat_table[i];
2325 fprintf (file, "Index %d (hash value %d)\n ",
2326 expr->bitmap_index, hash_val[i]);
2327 print_rtl (file, expr->expr);
2328 fprintf (file, "\n");
2331 fprintf (file, "\n");
2337 /* Record register first/last/block set information for REGNO in INSN.
2339 first_set records the first place in the block where the register
2340 is set and is used to compute "anticipatability".
2342 last_set records the last place in the block where the register
2343 is set and is used to compute "availability".
2345 last_bb records the block for which first_set and last_set are
2346 valid, as a quick test to invalidate them.
2348 reg_set_in_block records whether the register is set in the block
2349 and is used to compute "transparency". */
2352 record_last_reg_set_info (insn, regno)
2356 struct reg_avail_info *info = ®_avail_info[regno];
2357 int cuid = INSN_CUID (insn);
2359 info->last_set = cuid;
2360 if (info->last_bb != current_bb)
2362 info->last_bb = current_bb;
2363 info->first_set = cuid;
2364 SET_BIT (reg_set_in_block[current_bb->index], regno);
2369 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2370 Note we store a pair of elements in the list, so they have to be
2371 taken off pairwise. */
2374 canon_list_insert (dest, unused1, v_insn)
2375 rtx dest ATTRIBUTE_UNUSED;
2376 rtx unused1 ATTRIBUTE_UNUSED;
2379 rtx dest_addr, insn;
2382 while (GET_CODE (dest) == SUBREG
2383 || GET_CODE (dest) == ZERO_EXTRACT
2384 || GET_CODE (dest) == SIGN_EXTRACT
2385 || GET_CODE (dest) == STRICT_LOW_PART)
2386 dest = XEXP (dest, 0);
2388 /* If DEST is not a MEM, then it will not conflict with a load. Note
2389 that function calls are assumed to clobber memory, but are handled
2392 if (GET_CODE (dest) != MEM)
2395 dest_addr = get_addr (XEXP (dest, 0));
2396 dest_addr = canon_rtx (dest_addr);
2397 insn = (rtx) v_insn;
2398 bb = BLOCK_NUM (insn);
2400 canon_modify_mem_list[bb] =
2401 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2402 canon_modify_mem_list[bb] =
2403 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2404 bitmap_set_bit (canon_modify_mem_list_set, bb);
2407 /* Record memory modification information for INSN. We do not actually care
2408 about the memory location(s) that are set, or even how they are set (consider
2409 a CALL_INSN). We merely need to record which insns modify memory. */
2412 record_last_mem_set_info (insn)
2415 int bb = BLOCK_NUM (insn);
2417 /* load_killed_in_block_p will handle the case of calls clobbering
2419 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2420 bitmap_set_bit (modify_mem_list_set, bb);
2422 if (GET_CODE (insn) == CALL_INSN)
2424 /* Note that traversals of this loop (other than for free-ing)
2425 will break after encountering a CALL_INSN. So, there's no
2426 need to insert a pair of items, as canon_list_insert does. */
2427 canon_modify_mem_list[bb] =
2428 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2429 bitmap_set_bit (canon_modify_mem_list_set, bb);
2432 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2435 /* Called from compute_hash_table via note_stores to handle one
2436 SET or CLOBBER in an insn. DATA is really the instruction in which
2437 the SET is taking place. */
2440 record_last_set_info (dest, setter, data)
2441 rtx dest, setter ATTRIBUTE_UNUSED;
2444 rtx last_set_insn = (rtx) data;
2446 if (GET_CODE (dest) == SUBREG)
2447 dest = SUBREG_REG (dest);
2449 if (GET_CODE (dest) == REG)
2450 record_last_reg_set_info (last_set_insn, REGNO (dest));
2451 else if (GET_CODE (dest) == MEM
2452 /* Ignore pushes, they clobber nothing. */
2453 && ! push_operand (dest, GET_MODE (dest)))
2454 record_last_mem_set_info (last_set_insn);
2457 /* Top level function to create an expression or assignment hash table.
2459 Expression entries are placed in the hash table if
2460 - they are of the form (set (pseudo-reg) src),
2461 - src is something we want to perform GCSE on,
2462 - none of the operands are subsequently modified in the block
2464 Assignment entries are placed in the hash table if
2465 - they are of the form (set (pseudo-reg) src),
2466 - src is something we want to perform const/copy propagation on,
2467 - none of the operands or target are subsequently modified in the block
2469 Currently src must be a pseudo-reg or a const_int.
2471 F is the first insn.
2472 TABLE is the table computed. */
2475 compute_hash_table_work (table)
2476 struct hash_table *table;
2480 /* While we compute the hash table we also compute a bit array of which
2481 registers are set in which blocks.
2482 ??? This isn't needed during const/copy propagation, but it's cheap to
2484 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2486 /* re-Cache any INSN_LIST nodes we have allocated. */
2487 clear_modify_mem_tables ();
2488 /* Some working arrays used to track first and last set in each block. */
2489 reg_avail_info = (struct reg_avail_info*)
2490 gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2492 for (i = 0; i < max_gcse_regno; ++i)
2493 reg_avail_info[i].last_bb = NULL;
2495 FOR_EACH_BB (current_bb)
2499 int in_libcall_block;
2501 /* First pass over the instructions records information used to
2502 determine when registers and memory are first and last set.
2503 ??? hard-reg reg_set_in_block computation
2504 could be moved to compute_sets since they currently don't change. */
2506 for (insn = current_bb->head;
2507 insn && insn != NEXT_INSN (current_bb->end);
2508 insn = NEXT_INSN (insn))
2510 if (! INSN_P (insn))
2513 if (GET_CODE (insn) == CALL_INSN)
2515 bool clobbers_all = false;
2516 #ifdef NON_SAVING_SETJMP
2517 if (NON_SAVING_SETJMP
2518 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2519 clobbers_all = true;
2522 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2524 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2525 record_last_reg_set_info (insn, regno);
2530 note_stores (PATTERN (insn), record_last_set_info, insn);
2533 /* The next pass builds the hash table. */
2535 for (insn = current_bb->head, in_libcall_block = 0;
2536 insn && insn != NEXT_INSN (current_bb->end);
2537 insn = NEXT_INSN (insn))
2540 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2541 in_libcall_block = 1;
2542 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2543 in_libcall_block = 0;
2544 hash_scan_insn (insn, table, in_libcall_block);
2545 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2546 in_libcall_block = 0;
2550 free (reg_avail_info);
2551 reg_avail_info = NULL;
2554 /* Allocate space for the set/expr hash TABLE.
2555 N_INSNS is the number of instructions in the function.
2556 It is used to determine the number of buckets to use.
2557 SET_P determines whether set or expression table will
2561 alloc_hash_table (n_insns, table, set_p)
2563 struct hash_table *table;
2568 table->size = n_insns / 4;
2569 if (table->size < 11)
2572 /* Attempt to maintain efficient use of hash table.
2573 Making it an odd number is simplest for now.
2574 ??? Later take some measurements. */
2576 n = table->size * sizeof (struct expr *);
2577 table->table = (struct expr **) gmalloc (n);
2578 table->set_p = set_p;
2581 /* Free things allocated by alloc_hash_table. */
2584 free_hash_table (table)
2585 struct hash_table *table;
2587 free (table->table);
2590 /* Compute the hash TABLE for doing copy/const propagation or
2591 expression hash table. */
2594 compute_hash_table (table)
2595 struct hash_table *table;
2597 /* Initialize count of number of entries in hash table. */
2599 memset ((char *) table->table, 0,
2600 table->size * sizeof (struct expr *));
2602 compute_hash_table_work (table);
2605 /* Expression tracking support. */
2607 /* Lookup pattern PAT in the expression TABLE.
2608 The result is a pointer to the table entry, or NULL if not found. */
2610 static struct expr *
2611 lookup_expr (pat, table)
2613 struct hash_table *table;
2615 int do_not_record_p;
2616 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2620 if (do_not_record_p)
2623 expr = table->table[hash];
2625 while (expr && ! expr_equiv_p (expr->expr, pat))
2626 expr = expr->next_same_hash;
2631 /* Lookup REGNO in the set TABLE. If PAT is non-NULL look for the entry that
2632 matches it, otherwise return the first entry for REGNO. The result is a
2633 pointer to the table entry, or NULL if not found. */
2635 static struct expr *
2636 lookup_set (regno, pat, table)
2639 struct hash_table *table;
2641 unsigned int hash = hash_set (regno, table->size);
2644 expr = table->table[hash];
2648 while (expr && ! expr_equiv_p (expr->expr, pat))
2649 expr = expr->next_same_hash;
2653 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2654 expr = expr->next_same_hash;
2660 /* Return the next entry for REGNO in list EXPR. */
2662 static struct expr *
2663 next_set (regno, expr)
2668 expr = expr->next_same_hash;
2669 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2674 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2675 types may be mixed. */
2678 free_insn_expr_list_list (listp)
2683 for (list = *listp; list ; list = next)
2685 next = XEXP (list, 1);
2686 if (GET_CODE (list) == EXPR_LIST)
2687 free_EXPR_LIST_node (list);
2689 free_INSN_LIST_node (list);
2695 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2697 clear_modify_mem_tables ()
2701 EXECUTE_IF_SET_IN_BITMAP
2702 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2703 bitmap_clear (modify_mem_list_set);
2705 EXECUTE_IF_SET_IN_BITMAP
2706 (canon_modify_mem_list_set, 0, i,
2707 free_insn_expr_list_list (canon_modify_mem_list + i));
2708 bitmap_clear (canon_modify_mem_list_set);
2711 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2714 free_modify_mem_tables ()
2716 clear_modify_mem_tables ();
2717 free (modify_mem_list);
2718 free (canon_modify_mem_list);
2719 modify_mem_list = 0;
2720 canon_modify_mem_list = 0;
2723 /* Reset tables used to keep track of what's still available [since the
2724 start of the block]. */
2727 reset_opr_set_tables ()
2729 /* Maintain a bitmap of which regs have been set since beginning of
2731 CLEAR_REG_SET (reg_set_bitmap);
2733 /* Also keep a record of the last instruction to modify memory.
2734 For now this is very trivial, we only record whether any memory
2735 location has been modified. */
2736 clear_modify_mem_tables ();
2739 /* Return nonzero if the operands of X are not set before INSN in
2740 INSN's basic block. */
2743 oprs_not_set_p (x, insn)
2753 code = GET_CODE (x);
2769 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2770 INSN_CUID (insn), x, 0))
2773 return oprs_not_set_p (XEXP (x, 0), insn);
2776 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2782 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2786 /* If we are about to do the last recursive call
2787 needed at this level, change it into iteration.
2788 This function is called enough to be worth it. */
2790 return oprs_not_set_p (XEXP (x, i), insn);
2792 if (! oprs_not_set_p (XEXP (x, i), insn))
2795 else if (fmt[i] == 'E')
2796 for (j = 0; j < XVECLEN (x, i); j++)
2797 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2804 /* Mark things set by a CALL. */
2810 if (! CONST_OR_PURE_CALL_P (insn))
2811 record_last_mem_set_info (insn);
2814 /* Mark things set by a SET. */
2817 mark_set (pat, insn)
2820 rtx dest = SET_DEST (pat);
2822 while (GET_CODE (dest) == SUBREG
2823 || GET_CODE (dest) == ZERO_EXTRACT
2824 || GET_CODE (dest) == SIGN_EXTRACT
2825 || GET_CODE (dest) == STRICT_LOW_PART)
2826 dest = XEXP (dest, 0);
2828 if (GET_CODE (dest) == REG)
2829 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2830 else if (GET_CODE (dest) == MEM)
2831 record_last_mem_set_info (insn);
2833 if (GET_CODE (SET_SRC (pat)) == CALL)
2837 /* Record things set by a CLOBBER. */
2840 mark_clobber (pat, insn)
2843 rtx clob = XEXP (pat, 0);
2845 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2846 clob = XEXP (clob, 0);
2848 if (GET_CODE (clob) == REG)
2849 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2851 record_last_mem_set_info (insn);
2854 /* Record things set by INSN.
2855 This data is used by oprs_not_set_p. */
2858 mark_oprs_set (insn)
2861 rtx pat = PATTERN (insn);
2864 if (GET_CODE (pat) == SET)
2865 mark_set (pat, insn);
2866 else if (GET_CODE (pat) == PARALLEL)
2867 for (i = 0; i < XVECLEN (pat, 0); i++)
2869 rtx x = XVECEXP (pat, 0, i);
2871 if (GET_CODE (x) == SET)
2873 else if (GET_CODE (x) == CLOBBER)
2874 mark_clobber (x, insn);
2875 else if (GET_CODE (x) == CALL)
2879 else if (GET_CODE (pat) == CLOBBER)
2880 mark_clobber (pat, insn);
2881 else if (GET_CODE (pat) == CALL)
2886 /* Classic GCSE reaching definition support. */
2888 /* Allocate reaching def variables. */
2891 alloc_rd_mem (n_blocks, n_insns)
2892 int n_blocks, n_insns;
2894 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2895 sbitmap_vector_zero (rd_kill, n_blocks);
2897 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2898 sbitmap_vector_zero (rd_gen, n_blocks);
2900 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2901 sbitmap_vector_zero (reaching_defs, n_blocks);
2903 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2904 sbitmap_vector_zero (rd_out, n_blocks);
2907 /* Free reaching def variables. */
2912 sbitmap_vector_free (rd_kill);
2913 sbitmap_vector_free (rd_gen);
2914 sbitmap_vector_free (reaching_defs);
2915 sbitmap_vector_free (rd_out);
2918 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2921 handle_rd_kill_set (insn, regno, bb)
2926 struct reg_set *this_reg;
2928 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2929 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2930 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2933 /* Compute the set of kill's for reaching definitions. */
2944 For each set bit in `gen' of the block (i.e each insn which
2945 generates a definition in the block)
2946 Call the reg set by the insn corresponding to that bit regx
2947 Look at the linked list starting at reg_set_table[regx]
2948 For each setting of regx in the linked list, which is not in
2950 Set the bit in `kill' corresponding to that insn. */
2952 for (cuid = 0; cuid < max_cuid; cuid++)
2953 if (TEST_BIT (rd_gen[bb->index], cuid))
2955 rtx insn = CUID_INSN (cuid);
2956 rtx pat = PATTERN (insn);
2958 if (GET_CODE (insn) == CALL_INSN)
2960 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2961 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2962 handle_rd_kill_set (insn, regno, bb);
2965 if (GET_CODE (pat) == PARALLEL)
2967 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2969 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2971 if ((code == SET || code == CLOBBER)
2972 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2973 handle_rd_kill_set (insn,
2974 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2978 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2979 /* Each setting of this register outside of this block
2980 must be marked in the set of kills in this block. */
2981 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2985 /* Compute the reaching definitions as in
2986 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2987 Chapter 10. It is the same algorithm as used for computing available
2988 expressions but applied to the gens and kills of reaching definitions. */
2993 int changed, passes;
2997 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
3006 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
3007 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
3008 reaching_defs[bb->index], rd_kill[bb->index]);
3014 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3017 /* Classic GCSE available expression support. */
3019 /* Allocate memory for available expression computation. */
3022 alloc_avail_expr_mem (n_blocks, n_exprs)
3023 int n_blocks, n_exprs;
3025 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3026 sbitmap_vector_zero (ae_kill, n_blocks);
3028 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3029 sbitmap_vector_zero (ae_gen, n_blocks);
3031 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3032 sbitmap_vector_zero (ae_in, n_blocks);
3034 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3035 sbitmap_vector_zero (ae_out, n_blocks);
3039 free_avail_expr_mem ()
3041 sbitmap_vector_free (ae_kill);
3042 sbitmap_vector_free (ae_gen);
3043 sbitmap_vector_free (ae_in);
3044 sbitmap_vector_free (ae_out);
3047 /* Compute the set of available expressions generated in each basic block. */
3050 compute_ae_gen (expr_hash_table)
3051 struct hash_table *expr_hash_table;
3057 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3058 This is all we have to do because an expression is not recorded if it
3059 is not available, and the only expressions we want to work with are the
3060 ones that are recorded. */
3061 for (i = 0; i < expr_hash_table->size; i++)
3062 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
3063 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3064 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3067 /* Return nonzero if expression X is killed in BB. */
3070 expr_killed_p (x, bb)
3081 code = GET_CODE (x);
3085 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3088 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3091 return expr_killed_p (XEXP (x, 0), bb);
3109 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3113 /* If we are about to do the last recursive call
3114 needed at this level, change it into iteration.
3115 This function is called enough to be worth it. */
3117 return expr_killed_p (XEXP (x, i), bb);
3118 else if (expr_killed_p (XEXP (x, i), bb))
3121 else if (fmt[i] == 'E')
3122 for (j = 0; j < XVECLEN (x, i); j++)
3123 if (expr_killed_p (XVECEXP (x, i, j), bb))
3130 /* Compute the set of available expressions killed in each basic block. */
3133 compute_ae_kill (ae_gen, ae_kill, expr_hash_table)
3134 sbitmap *ae_gen, *ae_kill;
3135 struct hash_table *expr_hash_table;
3142 for (i = 0; i < expr_hash_table->size; i++)
3143 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3145 /* Skip EXPR if generated in this block. */
3146 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3149 if (expr_killed_p (expr->expr, bb))
3150 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3154 /* Actually perform the Classic GCSE optimizations. */
3156 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3158 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3159 as a positive reach. We want to do this when there are two computations
3160 of the expression in the block.
3162 VISITED is a pointer to a working buffer for tracking which BB's have
3163 been visited. It is NULL for the top-level call.
3165 We treat reaching expressions that go through blocks containing the same
3166 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3167 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3168 2 as not reaching. The intent is to improve the probability of finding
3169 only one reaching expression and to reduce register lifetimes by picking
3170 the closest such expression. */
3173 expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
3177 int check_self_loop;
3182 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3184 basic_block pred_bb = pred->src;
3186 if (visited[pred_bb->index])
3187 /* This predecessor has already been visited. Nothing to do. */
3189 else if (pred_bb == bb)
3191 /* BB loops on itself. */
3193 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3194 && BLOCK_NUM (occr->insn) == pred_bb->index)
3197 visited[pred_bb->index] = 1;
3200 /* Ignore this predecessor if it kills the expression. */
3201 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3202 visited[pred_bb->index] = 1;
3204 /* Does this predecessor generate this expression? */
3205 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3207 /* Is this the occurrence we're looking for?
3208 Note that there's only one generating occurrence per block
3209 so we just need to check the block number. */
3210 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3213 visited[pred_bb->index] = 1;
3216 /* Neither gen nor kill. */
3219 visited[pred_bb->index] = 1;
3220 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3227 /* All paths have been checked. */
3231 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3232 memory allocated for that function is returned. */
3235 expr_reaches_here_p (occr, expr, bb, check_self_loop)
3239 int check_self_loop;
3242 char *visited = (char *) xcalloc (last_basic_block, 1);
3244 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3250 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3251 If there is more than one such instruction, return NULL.
3253 Called only by handle_avail_expr. */
3256 computing_insn (expr, insn)
3260 basic_block bb = BLOCK_FOR_INSN (insn);
3262 if (expr->avail_occr->next == NULL)
3264 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3265 /* The available expression is actually itself
3266 (i.e. a loop in the flow graph) so do nothing. */
3269 /* (FIXME) Case that we found a pattern that was created by
3270 a substitution that took place. */
3271 return expr->avail_occr->insn;
3275 /* Pattern is computed more than once.
3276 Search backwards from this insn to see how many of these
3277 computations actually reach this insn. */
3279 rtx insn_computes_expr = NULL;
3282 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3284 if (BLOCK_FOR_INSN (occr->insn) == bb)
3286 /* The expression is generated in this block.
3287 The only time we care about this is when the expression
3288 is generated later in the block [and thus there's a loop].
3289 We let the normal cse pass handle the other cases. */
3290 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3291 && expr_reaches_here_p (occr, expr, bb, 1))
3297 insn_computes_expr = occr->insn;
3300 else if (expr_reaches_here_p (occr, expr, bb, 0))
3306 insn_computes_expr = occr->insn;
3310 if (insn_computes_expr == NULL)
3313 return insn_computes_expr;
3317 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3318 Only called by can_disregard_other_sets. */
3321 def_reaches_here_p (insn, def_insn)
3326 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3329 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3331 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3333 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3335 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3336 reg = XEXP (PATTERN (def_insn), 0);
3337 else if (GET_CODE (PATTERN (def_insn)) == SET)
3338 reg = SET_DEST (PATTERN (def_insn));
3342 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3351 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3352 value returned is the number of definitions that reach INSN. Returning a
3353 value of zero means that [maybe] more than one definition reaches INSN and
3354 the caller can't perform whatever optimization it is trying. i.e. it is
3355 always safe to return zero. */
3358 can_disregard_other_sets (addr_this_reg, insn, for_combine)
3359 struct reg_set **addr_this_reg;
3363 int number_of_reaching_defs = 0;
3364 struct reg_set *this_reg;
3366 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3367 if (def_reaches_here_p (insn, this_reg->insn))
3369 number_of_reaching_defs++;
3370 /* Ignore parallels for now. */
3371 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3375 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3376 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3377 SET_SRC (PATTERN (insn)))))
3378 /* A setting of the reg to a different value reaches INSN. */
3381 if (number_of_reaching_defs > 1)
3383 /* If in this setting the value the register is being set to is
3384 equal to the previous value the register was set to and this
3385 setting reaches the insn we are trying to do the substitution
3386 on then we are ok. */
3387 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3389 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3390 SET_SRC (PATTERN (insn))))
3394 *addr_this_reg = this_reg;
3397 return number_of_reaching_defs;
3400 /* Expression computed by insn is available and the substitution is legal,
3401 so try to perform the substitution.
3403 The result is nonzero if any changes were made. */
3406 handle_avail_expr (insn, expr)
3410 rtx pat, insn_computes_expr, expr_set;
3412 struct reg_set *this_reg;
3413 int found_setting, use_src;
3416 /* We only handle the case where one computation of the expression
3417 reaches this instruction. */
3418 insn_computes_expr = computing_insn (expr, insn);
3419 if (insn_computes_expr == NULL)
3421 expr_set = single_set (insn_computes_expr);
3428 /* At this point we know only one computation of EXPR outside of this
3429 block reaches this insn. Now try to find a register that the
3430 expression is computed into. */
3431 if (GET_CODE (SET_SRC (expr_set)) == REG)
3433 /* This is the case when the available expression that reaches
3434 here has already been handled as an available expression. */
3435 unsigned int regnum_for_replacing
3436 = REGNO (SET_SRC (expr_set));
3438 /* If the register was created by GCSE we can't use `reg_set_table',
3439 however we know it's set only once. */
3440 if (regnum_for_replacing >= max_gcse_regno
3441 /* If the register the expression is computed into is set only once,
3442 or only one set reaches this insn, we can use it. */
3443 || (((this_reg = reg_set_table[regnum_for_replacing]),
3444 this_reg->next == NULL)
3445 || can_disregard_other_sets (&this_reg, insn, 0)))
3454 unsigned int regnum_for_replacing
3455 = REGNO (SET_DEST (expr_set));
3457 /* This shouldn't happen. */
3458 if (regnum_for_replacing >= max_gcse_regno)
3461 this_reg = reg_set_table[regnum_for_replacing];
3463 /* If the register the expression is computed into is set only once,
3464 or only one set reaches this insn, use it. */
3465 if (this_reg->next == NULL
3466 || can_disregard_other_sets (&this_reg, insn, 0))
3472 pat = PATTERN (insn);
3474 to = SET_SRC (expr_set);
3476 to = SET_DEST (expr_set);
3477 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3479 /* We should be able to ignore the return code from validate_change but
3480 to play it safe we check. */
3484 if (gcse_file != NULL)
3486 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3488 fprintf (gcse_file, " reg %d %s insn %d\n",
3489 REGNO (to), use_src ? "from" : "set in",
3490 INSN_UID (insn_computes_expr));
3495 /* The register that the expr is computed into is set more than once. */
3496 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3498 /* Insert an insn after insnx that copies the reg set in insnx
3499 into a new pseudo register call this new register REGN.
3500 From insnb until end of basic block or until REGB is set
3501 replace all uses of REGB with REGN. */
3504 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3506 /* Generate the new insn. */
3507 /* ??? If the change fails, we return 0, even though we created
3508 an insn. I think this is ok. */
3510 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3511 SET_DEST (expr_set)),
3512 insn_computes_expr);
3514 /* Keep register set table up to date. */
3515 record_one_set (REGNO (to), new_insn);
3517 gcse_create_count++;
3518 if (gcse_file != NULL)
3520 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3521 INSN_UID (NEXT_INSN (insn_computes_expr)),
3522 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3523 fprintf (gcse_file, ", computed in insn %d,\n",
3524 INSN_UID (insn_computes_expr));
3525 fprintf (gcse_file, " into newly allocated reg %d\n",
3529 pat = PATTERN (insn);
3531 /* Do register replacement for INSN. */
3532 changed = validate_change (insn, &SET_SRC (pat),
3534 (NEXT_INSN (insn_computes_expr))),
3537 /* We should be able to ignore the return code from validate_change but
3538 to play it safe we check. */
3542 if (gcse_file != NULL)
3545 "GCSE: Replacing the source in insn %d with reg %d ",
3547 REGNO (SET_DEST (PATTERN (NEXT_INSN
3548 (insn_computes_expr)))));
3549 fprintf (gcse_file, "set in insn %d\n",
3550 INSN_UID (insn_computes_expr));
3558 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3559 the dataflow analysis has been done.
3561 The result is nonzero if a change was made. */
3570 /* Note we start at block 1. */
3572 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3576 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3578 /* Reset tables used to keep track of what's still valid [since the
3579 start of the block]. */
3580 reset_opr_set_tables ();
3582 for (insn = bb->head;
3583 insn != NULL && insn != NEXT_INSN (bb->end);
3584 insn = NEXT_INSN (insn))
3586 /* Is insn of form (set (pseudo-reg) ...)? */
3587 if (GET_CODE (insn) == INSN
3588 && GET_CODE (PATTERN (insn)) == SET
3589 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3590 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3592 rtx pat = PATTERN (insn);
3593 rtx src = SET_SRC (pat);
3596 if (want_to_gcse_p (src)
3597 /* Is the expression recorded? */
3598 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3599 /* Is the expression available [at the start of the
3601 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3602 /* Are the operands unchanged since the start of the
3604 && oprs_not_set_p (src, insn))
3605 changed |= handle_avail_expr (insn, expr);
3608 /* Keep track of everything modified by this insn. */
3609 /* ??? Need to be careful w.r.t. mods done to INSN. */
3611 mark_oprs_set (insn);
3618 /* Top level routine to perform one classic GCSE pass.
3620 Return nonzero if a change was made. */
3623 one_classic_gcse_pass (pass)
3628 gcse_subst_count = 0;
3629 gcse_create_count = 0;
3631 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3632 alloc_rd_mem (last_basic_block, max_cuid);
3633 compute_hash_table (&expr_hash_table);
3635 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3637 if (expr_hash_table.n_elems > 0)
3641 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3642 compute_ae_gen (&expr_hash_table);
3643 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3644 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3645 changed = classic_gcse ();
3646 free_avail_expr_mem ();
3650 free_hash_table (&expr_hash_table);
3654 fprintf (gcse_file, "\n");
3655 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3656 current_function_name, pass, bytes_used, gcse_subst_count);
3657 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3663 /* Compute copy/constant propagation working variables. */
3665 /* Local properties of assignments. */
3666 static sbitmap *cprop_pavloc;
3667 static sbitmap *cprop_absaltered;
3669 /* Global properties of assignments (computed from the local properties). */
3670 static sbitmap *cprop_avin;
3671 static sbitmap *cprop_avout;
3673 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3674 basic blocks. N_SETS is the number of sets. */
3677 alloc_cprop_mem (n_blocks, n_sets)
3678 int n_blocks, n_sets;
3680 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3681 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3683 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3684 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3687 /* Free vars used by copy/const propagation. */
3692 sbitmap_vector_free (cprop_pavloc);
3693 sbitmap_vector_free (cprop_absaltered);
3694 sbitmap_vector_free (cprop_avin);
3695 sbitmap_vector_free (cprop_avout);
3698 /* For each block, compute whether X is transparent. X is either an
3699 expression or an assignment [though we don't care which, for this context
3700 an assignment is treated as an expression]. For each block where an
3701 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3705 compute_transp (x, indx, bmap, set_p)
3717 /* repeat is used to turn tail-recursion into iteration since GCC
3718 can't do it when there's no return value. */
3724 code = GET_CODE (x);
3730 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3733 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3734 SET_BIT (bmap[bb->index], indx);
3738 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3739 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3744 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3747 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3748 RESET_BIT (bmap[bb->index], indx);
3752 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3753 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3762 rtx list_entry = canon_modify_mem_list[bb->index];
3766 rtx dest, dest_addr;
3768 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3771 SET_BIT (bmap[bb->index], indx);
3773 RESET_BIT (bmap[bb->index], indx);
3776 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3777 Examine each hunk of memory that is modified. */
3779 dest = XEXP (list_entry, 0);
3780 list_entry = XEXP (list_entry, 1);
3781 dest_addr = XEXP (list_entry, 0);
3783 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3784 x, rtx_addr_varies_p))
3787 SET_BIT (bmap[bb->index], indx);
3789 RESET_BIT (bmap[bb->index], indx);
3792 list_entry = XEXP (list_entry, 1);
3815 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3819 /* If we are about to do the last recursive call
3820 needed at this level, change it into iteration.
3821 This function is called enough to be worth it. */
3828 compute_transp (XEXP (x, i), indx, bmap, set_p);
3830 else if (fmt[i] == 'E')
3831 for (j = 0; j < XVECLEN (x, i); j++)
3832 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3836 /* Top level routine to do the dataflow analysis needed by copy/const
3840 compute_cprop_data ()
3842 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3843 compute_available (cprop_pavloc, cprop_absaltered,
3844 cprop_avout, cprop_avin);
3847 /* Copy/constant propagation. */
3849 /* Maximum number of register uses in an insn that we handle. */
3852 /* Table of uses found in an insn.
3853 Allocated statically to avoid alloc/free complexity and overhead. */
3854 static struct reg_use reg_use_table[MAX_USES];
3856 /* Index into `reg_use_table' while building it. */
3857 static int reg_use_count;
3859 /* Set up a list of register numbers used in INSN. The found uses are stored
3860 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3861 and contains the number of uses in the table upon exit.
3863 ??? If a register appears multiple times we will record it multiple times.
3864 This doesn't hurt anything but it will slow things down. */
3867 find_used_regs (xptr, data)
3869 void *data ATTRIBUTE_UNUSED;
3876 /* repeat is used to turn tail-recursion into iteration since GCC
3877 can't do it when there's no return value. */
3882 code = GET_CODE (x);
3885 if (reg_use_count == MAX_USES)
3888 reg_use_table[reg_use_count].reg_rtx = x;
3892 /* Recursively scan the operands of this expression. */
3894 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3898 /* If we are about to do the last recursive call
3899 needed at this level, change it into iteration.
3900 This function is called enough to be worth it. */
3907 find_used_regs (&XEXP (x, i), data);
3909 else if (fmt[i] == 'E')
3910 for (j = 0; j < XVECLEN (x, i); j++)
3911 find_used_regs (&XVECEXP (x, i, j), data);
3915 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3916 Returns nonzero is successful. */
3919 try_replace_reg (from, to, insn)
3922 rtx note = find_reg_equal_equiv_note (insn);
3925 rtx set = single_set (insn);
3927 validate_replace_src_group (from, to, insn);
3928 if (num_changes_pending () && apply_change_group ())
3931 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3933 /* If above failed and this is a single set, try to simplify the source of
3934 the set given our substitution. We could perhaps try this for multiple
3935 SETs, but it probably won't buy us anything. */
3936 src = simplify_replace_rtx (SET_SRC (set), from, to);
3938 if (!rtx_equal_p (src, SET_SRC (set))
3939 && validate_change (insn, &SET_SRC (set), src, 0))
3942 /* If we've failed to do replacement, have a single SET, and don't already
3943 have a note, add a REG_EQUAL note to not lose information. */
3944 if (!success && note == 0 && set != 0)
3945 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3948 /* If there is already a NOTE, update the expression in it with our
3951 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3953 /* REG_EQUAL may get simplified into register.
3954 We don't allow that. Remove that note. This code ought
3955 not to happen, because previous code ought to synthesize
3956 reg-reg move, but be on the safe side. */
3957 if (note && REG_P (XEXP (note, 0)))
3958 remove_note (insn, note);
3963 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3964 NULL no such set is found. */
3966 static struct expr *
3967 find_avail_set (regno, insn)
3971 /* SET1 contains the last set found that can be returned to the caller for
3972 use in a substitution. */
3973 struct expr *set1 = 0;
3975 /* Loops are not possible here. To get a loop we would need two sets
3976 available at the start of the block containing INSN. ie we would
3977 need two sets like this available at the start of the block:
3979 (set (reg X) (reg Y))
3980 (set (reg Y) (reg X))
3982 This can not happen since the set of (reg Y) would have killed the
3983 set of (reg X) making it unavailable at the start of this block. */
3987 struct expr *set = lookup_set (regno, NULL_RTX, &set_hash_table);
3989 /* Find a set that is available at the start of the block
3990 which contains INSN. */
3993 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3995 set = next_set (regno, set);
3998 /* If no available set was found we've reached the end of the
3999 (possibly empty) copy chain. */
4003 if (GET_CODE (set->expr) != SET)
4006 src = SET_SRC (set->expr);
4008 /* We know the set is available.
4009 Now check that SRC is ANTLOC (i.e. none of the source operands
4010 have changed since the start of the block).
4012 If the source operand changed, we may still use it for the next
4013 iteration of this loop, but we may not use it for substitutions. */
4015 if (CONSTANT_P (src) || oprs_not_set_p (src, insn))
4018 /* If the source of the set is anything except a register, then
4019 we have reached the end of the copy chain. */
4020 if (GET_CODE (src) != REG)
4023 /* Follow the copy chain, ie start another iteration of the loop
4024 and see if we have an available copy into SRC. */
4025 regno = REGNO (src);
4028 /* SET1 holds the last set that was available and anticipatable at
4033 /* Subroutine of cprop_insn that tries to propagate constants into
4034 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
4035 it is the instruction that immediately precedes JUMP, and must be a
4036 single SET of a register. FROM is what we will try to replace,
4037 SRC is the constant we will try to substitute for it. Returns nonzero
4038 if a change was made. */
4041 cprop_jump (bb, setcc, jump, from, src)
4049 rtx set = pc_set (jump);
4051 /* First substitute in the INSN condition as the SET_SRC of the JUMP,
4052 then substitute that given values in this expanded JUMP. */
4054 && !modified_between_p (from, setcc, jump)
4055 && !modified_between_p (src, setcc, jump))
4057 rtx setcc_set = single_set (setcc);
4058 new_set = simplify_replace_rtx (SET_SRC (set),
4059 SET_DEST (setcc_set),
4060 SET_SRC (setcc_set));
4065 new = simplify_replace_rtx (new_set, from, src);
4067 /* If no simplification can be made, then try the next
4069 if (rtx_equal_p (new, new_set) || rtx_equal_p (new, SET_SRC (set)))
4072 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4077 /* Ensure the value computed inside the jump insn to be equivalent
4078 to one computed by setcc. */
4080 && modified_in_p (new, setcc))
4082 if (! validate_change (jump, &SET_SRC (set), new, 0))
4085 /* If this has turned into an unconditional jump,
4086 then put a barrier after it so that the unreachable
4087 code will be deleted. */
4088 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4089 emit_barrier_after (jump);
4093 /* Delete the cc0 setter. */
4094 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4095 delete_insn (setcc);
4098 run_jump_opt_after_gcse = 1;
4101 if (gcse_file != NULL)
4104 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4105 REGNO (from), INSN_UID (jump));
4106 print_rtl (gcse_file, src);
4107 fprintf (gcse_file, "\n");
4109 purge_dead_edges (bb);
4115 constprop_register (insn, from, to, alter_jumps)
4123 /* Check for reg or cc0 setting instructions followed by
4124 conditional branch instructions first. */
4126 && (sset = single_set (insn)) != NULL
4127 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4129 rtx dest = SET_DEST (sset);
4130 if ((REG_P (dest) || CC0_P (dest))
4131 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4135 /* Handle normal insns next. */
4136 if (GET_CODE (insn) == INSN
4137 && try_replace_reg (from, to, insn))
4140 /* Try to propagate a CONST_INT into a conditional jump.
4141 We're pretty specific about what we will handle in this
4142 code, we can extend this as necessary over time.
4144 Right now the insn in question must look like
4145 (set (pc) (if_then_else ...)) */
4146 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4147 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4151 /* Perform constant and copy propagation on INSN.
4152 The result is nonzero if a change was made. */
4155 cprop_insn (insn, alter_jumps)
4159 struct reg_use *reg_used;
4167 note_uses (&PATTERN (insn), find_used_regs, NULL);
4169 note = find_reg_equal_equiv_note (insn);
4171 /* We may win even when propagating constants into notes. */
4173 find_used_regs (&XEXP (note, 0), NULL);
4175 for (reg_used = ®_use_table[0]; reg_use_count > 0;
4176 reg_used++, reg_use_count--)
4178 unsigned int regno = REGNO (reg_used->reg_rtx);
4182 /* Ignore registers created by GCSE.
4183 We do this because ... */
4184 if (regno >= max_gcse_regno)
4187 /* If the register has already been set in this block, there's
4188 nothing we can do. */
4189 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4192 /* Find an assignment that sets reg_used and is available
4193 at the start of the block. */
4194 set = find_avail_set (regno, insn);
4199 /* ??? We might be able to handle PARALLELs. Later. */
4200 if (GET_CODE (pat) != SET)
4203 src = SET_SRC (pat);
4205 /* Constant propagation. */
4206 if (CONSTANT_P (src))
4208 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4212 if (gcse_file != NULL)
4214 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4215 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4216 print_rtl (gcse_file, src);
4217 fprintf (gcse_file, "\n");
4221 else if (GET_CODE (src) == REG
4222 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4223 && REGNO (src) != regno)
4225 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4229 if (gcse_file != NULL)
4231 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4232 regno, INSN_UID (insn));
4233 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4236 /* The original insn setting reg_used may or may not now be
4237 deletable. We leave the deletion to flow. */
4238 /* FIXME: If it turns out that the insn isn't deletable,
4239 then we may have unnecessarily extended register lifetimes
4240 and made things worse. */
4248 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4249 their REG_EQUAL notes need updating. */
4252 do_local_cprop (x, insn, alter_jumps, libcall_sp)
4258 rtx newreg = NULL, newcnst = NULL;
4260 /* Rule out USE instructions and ASM statements as we don't want to
4261 change the hard registers mentioned. */
4262 if (GET_CODE (x) == REG
4263 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4264 || (GET_CODE (PATTERN (insn)) != USE
4265 && asm_noperands (PATTERN (insn)) < 0)))
4267 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4268 struct elt_loc_list *l;
4272 for (l = val->locs; l; l = l->next)
4274 rtx this_rtx = l->loc;
4280 if (CONSTANT_P (this_rtx))
4282 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4283 /* Don't copy propagate if it has attached REG_EQUIV note.
4284 At this point this only function parameters should have
4285 REG_EQUIV notes and if the argument slot is used somewhere
4286 explicitly, it means address of parameter has been taken,
4287 so we should not extend the lifetime of the pseudo. */
4288 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4289 || GET_CODE (XEXP (note, 0)) != MEM))
4292 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4294 /* If we find a case where we can't fix the retval REG_EQUAL notes
4295 match the new register, we either have to abandon this replacement
4296 or fix delete_trivially_dead_insns to preserve the setting insn,
4297 or make it delete the REG_EUAQL note, and fix up all passes that
4298 require the REG_EQUAL note there. */
4299 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4301 if (gcse_file != NULL)
4303 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4305 fprintf (gcse_file, "insn %d with constant ",
4307 print_rtl (gcse_file, newcnst);
4308 fprintf (gcse_file, "\n");
4313 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4315 adjust_libcall_notes (x, newreg, insn, libcall_sp);
4316 if (gcse_file != NULL)
4319 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4320 REGNO (x), INSN_UID (insn));
4321 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4330 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4331 their REG_EQUAL notes need updating to reflect that OLDREG has been
4332 replaced with NEWVAL in INSN. Return true if all substitutions could
4335 adjust_libcall_notes (oldreg, newval, insn, libcall_sp)
4336 rtx oldreg, newval, insn, *libcall_sp;
4340 while ((end = *libcall_sp++))
4342 rtx note = find_reg_equal_equiv_note (end);
4349 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4353 note = find_reg_equal_equiv_note (end);
4356 if (reg_mentioned_p (newval, XEXP (note, 0)))
4359 while ((end = *libcall_sp++));
4363 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4369 #define MAX_NESTED_LIBCALLS 9
4372 local_cprop_pass (alter_jumps)
4376 struct reg_use *reg_used;
4377 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
4380 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4382 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4386 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
4390 if (libcall_sp == libcall_stack)
4392 *--libcall_sp = XEXP (note, 0);
4394 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4397 note = find_reg_equal_equiv_note (insn);
4401 note_uses (&PATTERN (insn), find_used_regs, NULL);
4403 find_used_regs (&XEXP (note, 0), NULL);
4405 for (reg_used = ®_use_table[0]; reg_use_count > 0;
4406 reg_used++, reg_use_count--)
4407 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4411 while (reg_use_count);
4413 cselib_process_insn (insn);
4418 /* Forward propagate copies. This includes copies and constants. Return
4419 nonzero if a change was made. */
4429 /* Note we start at block 1. */
4430 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4432 if (gcse_file != NULL)
4433 fprintf (gcse_file, "\n");
4438 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4440 /* Reset tables used to keep track of what's still valid [since the
4441 start of the block]. */
4442 reset_opr_set_tables ();
4444 for (insn = bb->head;
4445 insn != NULL && insn != NEXT_INSN (bb->end);
4446 insn = NEXT_INSN (insn))
4449 changed |= cprop_insn (insn, alter_jumps);
4451 /* Keep track of everything modified by this insn. */
4452 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4453 call mark_oprs_set if we turned the insn into a NOTE. */
4454 if (GET_CODE (insn) != NOTE)
4455 mark_oprs_set (insn);
4459 if (gcse_file != NULL)
4460 fprintf (gcse_file, "\n");
4465 /* Perform one copy/constant propagation pass.
4466 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4467 propagation into conditional jumps. If BYPASS_JUMPS is true,
4468 perform conditional jump bypassing optimizations. */
4471 one_cprop_pass (pass, cprop_jumps, bypass_jumps)
4478 const_prop_count = 0;
4479 copy_prop_count = 0;
4481 local_cprop_pass (cprop_jumps);
4483 alloc_hash_table (max_cuid, &set_hash_table, 1);
4484 compute_hash_table (&set_hash_table);
4486 dump_hash_table (gcse_file, "SET", &set_hash_table);
4487 if (set_hash_table.n_elems > 0)
4489 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4490 compute_cprop_data ();
4491 changed = cprop (cprop_jumps);
4493 changed |= bypass_conditional_jumps ();
4497 free_hash_table (&set_hash_table);
4501 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4502 current_function_name, pass, bytes_used);
4503 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4504 const_prop_count, copy_prop_count);
4510 /* Bypass conditional jumps. */
4512 /* Find a set of REGNO to a constant that is available at the end of basic
4513 block BB. Returns NULL if no such set is found. Based heavily upon
4516 static struct expr *
4517 find_bypass_set (regno, bb)
4521 struct expr *result = 0;
4526 struct expr *set = lookup_set (regno, NULL_RTX, &set_hash_table);
4530 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4532 set = next_set (regno, set);
4538 if (GET_CODE (set->expr) != SET)
4541 src = SET_SRC (set->expr);
4542 if (CONSTANT_P (src))
4545 if (GET_CODE (src) != REG)
4548 regno = REGNO (src);
4554 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4555 basic block BB which has more than one predecessor. If not NULL, SETCC
4556 is the first instruction of BB, which is immediately followed by JUMP_INSN
4557 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4558 Returns nonzero if a change was made. */
4561 bypass_block (bb, setcc, jump)
4569 insn = (setcc != NULL) ? setcc : jump;
4571 /* Determine set of register uses in INSN. */
4573 note_uses (&PATTERN (insn), find_used_regs, NULL);
4574 note = find_reg_equal_equiv_note (insn);
4576 find_used_regs (&XEXP (note, 0), NULL);
4579 for (e = bb->pred; e; e = enext)
4581 enext = e->pred_next;
4582 for (i = 0; i < reg_use_count; i++)
4584 struct reg_use *reg_used = ®_use_table[i];
4585 unsigned int regno = REGNO (reg_used->reg_rtx);
4586 basic_block dest, old_dest;
4590 if (regno >= max_gcse_regno)
4593 set = find_bypass_set (regno, e->src->index);
4598 src = SET_SRC (pc_set (jump));
4601 src = simplify_replace_rtx (src,
4602 SET_DEST (PATTERN (setcc)),
4603 SET_SRC (PATTERN (setcc)));
4605 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4606 SET_SRC (set->expr));
4609 dest = FALLTHRU_EDGE (bb)->dest;
4610 else if (GET_CODE (new) == LABEL_REF)
4611 dest = BRANCH_EDGE (bb)->dest;
4615 /* Once basic block indices are stable, we should be able
4616 to use redirect_edge_and_branch_force instead. */
4618 if (dest != NULL && dest != old_dest
4619 && redirect_edge_and_branch (e, dest))
4621 /* Copy the register setter to the redirected edge.
4622 Don't copy CC0 setters, as CC0 is dead after jump. */
4625 rtx pat = PATTERN (setcc);
4626 if (!CC0_P (SET_DEST (pat)))
4627 insert_insn_on_edge (copy_insn (pat), e);
4630 if (gcse_file != NULL)
4632 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4633 regno, INSN_UID (jump));
4634 print_rtl (gcse_file, SET_SRC (set->expr));
4635 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4636 e->src->index, old_dest->index, dest->index);
4646 /* Find basic blocks with more than one predecessor that only contain a
4647 single conditional jump. If the result of the comparison is known at
4648 compile-time from any incoming edge, redirect that edge to the
4649 appropriate target. Returns nonzero if a change was made. */
4652 bypass_conditional_jumps ()
4660 /* Note we start at block 1. */
4661 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4665 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
4666 EXIT_BLOCK_PTR, next_bb)
4668 /* Check for more than one predecessor. */
4669 if (bb->pred && bb->pred->pred_next)
4672 for (insn = bb->head;
4673 insn != NULL && insn != NEXT_INSN (bb->end);
4674 insn = NEXT_INSN (insn))
4675 if (GET_CODE (insn) == INSN)
4679 if (GET_CODE (PATTERN (insn)) != SET)
4682 dest = SET_DEST (PATTERN (insn));
4683 if (REG_P (dest) || CC0_P (dest))
4688 else if (GET_CODE (insn) == JUMP_INSN)
4690 if (any_condjump_p (insn) && onlyjump_p (insn))
4691 changed |= bypass_block (bb, setcc, insn);
4694 else if (INSN_P (insn))
4699 /* If we bypassed any register setting insns, we inserted a
4700 copy on the redirected edge. These need to be committed. */
4702 commit_edge_insertions();
4707 /* Compute PRE+LCM working variables. */
4709 /* Local properties of expressions. */
4710 /* Nonzero for expressions that are transparent in the block. */
4711 static sbitmap *transp;
4713 /* Nonzero for expressions that are transparent at the end of the block.
4714 This is only zero for expressions killed by abnormal critical edge
4715 created by a calls. */
4716 static sbitmap *transpout;
4718 /* Nonzero for expressions that are computed (available) in the block. */
4719 static sbitmap *comp;
4721 /* Nonzero for expressions that are locally anticipatable in the block. */
4722 static sbitmap *antloc;
4724 /* Nonzero for expressions where this block is an optimal computation
4726 static sbitmap *pre_optimal;
4728 /* Nonzero for expressions which are redundant in a particular block. */
4729 static sbitmap *pre_redundant;
4731 /* Nonzero for expressions which should be inserted on a specific edge. */
4732 static sbitmap *pre_insert_map;
4734 /* Nonzero for expressions which should be deleted in a specific block. */
4735 static sbitmap *pre_delete_map;
4737 /* Contains the edge_list returned by pre_edge_lcm. */
4738 static struct edge_list *edge_list;
4740 /* Redundant insns. */
4741 static sbitmap pre_redundant_insns;
4743 /* Allocate vars used for PRE analysis. */
4746 alloc_pre_mem (n_blocks, n_exprs)
4747 int n_blocks, n_exprs;
4749 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4750 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4751 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4754 pre_redundant = NULL;
4755 pre_insert_map = NULL;
4756 pre_delete_map = NULL;
4759 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4761 /* pre_insert and pre_delete are allocated later. */
4764 /* Free vars used for PRE analysis. */
4769 sbitmap_vector_free (transp);
4770 sbitmap_vector_free (comp);
4772 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
4775 sbitmap_vector_free (pre_optimal);
4777 sbitmap_vector_free (pre_redundant);
4779 sbitmap_vector_free (pre_insert_map);
4781 sbitmap_vector_free (pre_delete_map);
4783 sbitmap_vector_free (ae_in);
4785 sbitmap_vector_free (ae_out);
4787 transp = comp = NULL;
4788 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
4789 ae_in = ae_out = NULL;
4792 /* Top level routine to do the dataflow analysis needed by PRE. */
4797 sbitmap trapping_expr;
4801 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4802 sbitmap_vector_zero (ae_kill, last_basic_block);
4804 /* Collect expressions which might trap. */
4805 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
4806 sbitmap_zero (trapping_expr);
4807 for (ui = 0; ui < expr_hash_table.size; ui++)
4810 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
4811 if (may_trap_p (e->expr))
4812 SET_BIT (trapping_expr, e->bitmap_index);
4815 /* Compute ae_kill for each basic block using:
4819 This is significantly faster than compute_ae_kill. */
4825 /* If the current block is the destination of an abnormal edge, we
4826 kill all trapping expressions because we won't be able to properly
4827 place the instruction on the edge. So make them neither
4828 anticipatable nor transparent. This is fairly conservative. */
4829 for (e = bb->pred; e ; e = e->pred_next)
4830 if (e->flags & EDGE_ABNORMAL)
4832 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
4833 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
4837 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
4838 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
4841 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
4842 ae_kill, &pre_insert_map, &pre_delete_map);
4843 sbitmap_vector_free (antloc);
4845 sbitmap_vector_free (ae_kill);
4847 sbitmap_free (trapping_expr);
4852 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
4855 VISITED is a pointer to a working buffer for tracking which BB's have
4856 been visited. It is NULL for the top-level call.
4858 We treat reaching expressions that go through blocks containing the same
4859 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4860 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4861 2 as not reaching. The intent is to improve the probability of finding
4862 only one reaching expression and to reduce register lifetimes by picking
4863 the closest such expression. */
4866 pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
4867 basic_block occr_bb;
4874 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
4876 basic_block pred_bb = pred->src;
4878 if (pred->src == ENTRY_BLOCK_PTR
4879 /* Has predecessor has already been visited? */
4880 || visited[pred_bb->index])
4881 ;/* Nothing to do. */
4883 /* Does this predecessor generate this expression? */
4884 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
4886 /* Is this the occurrence we're looking for?
4887 Note that there's only one generating occurrence per block
4888 so we just need to check the block number. */
4889 if (occr_bb == pred_bb)
4892 visited[pred_bb->index] = 1;
4894 /* Ignore this predecessor if it kills the expression. */
4895 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
4896 visited[pred_bb->index] = 1;
4898 /* Neither gen nor kill. */
4901 visited[pred_bb->index] = 1;
4902 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
4907 /* All paths have been checked. */
4911 /* The wrapper for pre_expr_reaches_here_work that ensures that any
4912 memory allocated for that function is returned. */
4915 pre_expr_reaches_here_p (occr_bb, expr, bb)
4916 basic_block occr_bb;
4921 char *visited = (char *) xcalloc (last_basic_block, 1);
4923 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
4930 /* Given an expr, generate RTL which we can insert at the end of a BB,
4931 or on an edge. Set the block number of any insns generated to
4935 process_insert_insn (expr)
4938 rtx reg = expr->reaching_reg;
4939 rtx exp = copy_rtx (expr->expr);
4944 /* If the expression is something that's an operand, like a constant,
4945 just copy it to a register. */
4946 if (general_operand (exp, GET_MODE (reg)))
4947 emit_move_insn (reg, exp);
4949 /* Otherwise, make a new insn to compute this expression and make sure the
4950 insn will be recognized (this also adds any needed CLOBBERs). Copy the
4951 expression to make sure we don't have any sharing issues. */
4952 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
4961 /* Add EXPR to the end of basic block BB.
4963 This is used by both the PRE and code hoisting.
4965 For PRE, we want to verify that the expr is either transparent
4966 or locally anticipatable in the target block. This check makes
4967 no sense for code hoisting. */
4970 insert_insn_end_bb (expr, bb, pre)
4977 rtx reg = expr->reaching_reg;
4978 int regno = REGNO (reg);
4981 pat = process_insert_insn (expr);
4982 if (pat == NULL_RTX || ! INSN_P (pat))
4986 while (NEXT_INSN (pat_end) != NULL_RTX)
4987 pat_end = NEXT_INSN (pat_end);
4989 /* If the last insn is a jump, insert EXPR in front [taking care to
4990 handle cc0, etc. properly]. Similary we need to care trapping
4991 instructions in presence of non-call exceptions. */
4993 if (GET_CODE (insn) == JUMP_INSN
4994 || (GET_CODE (insn) == INSN
4995 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
5000 /* It should always be the case that we can put these instructions
5001 anywhere in the basic block with performing PRE optimizations.
5003 if (GET_CODE (insn) == INSN && pre
5004 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5005 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5008 /* If this is a jump table, then we can't insert stuff here. Since
5009 we know the previous real insn must be the tablejump, we insert
5010 the new instruction just before the tablejump. */
5011 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5012 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5013 insn = prev_real_insn (insn);
5016 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5017 if cc0 isn't set. */
5018 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5020 insn = XEXP (note, 0);
5023 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5024 if (maybe_cc0_setter
5025 && INSN_P (maybe_cc0_setter)
5026 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5027 insn = maybe_cc0_setter;
5030 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5031 new_insn = emit_insn_before (pat, insn);
5034 /* Likewise if the last insn is a call, as will happen in the presence
5035 of exception handling. */
5036 else if (GET_CODE (insn) == CALL_INSN
5037 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
5039 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5040 we search backward and place the instructions before the first
5041 parameter is loaded. Do this for everyone for consistency and a
5042 presumption that we'll get better code elsewhere as well.
5044 It should always be the case that we can put these instructions
5045 anywhere in the basic block with performing PRE optimizations.
5049 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5050 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5053 /* Since different machines initialize their parameter registers
5054 in different orders, assume nothing. Collect the set of all
5055 parameter registers. */
5056 insn = find_first_parameter_load (insn, bb->head);
5058 /* If we found all the parameter loads, then we want to insert
5059 before the first parameter load.
5061 If we did not find all the parameter loads, then we might have
5062 stopped on the head of the block, which could be a CODE_LABEL.
5063 If we inserted before the CODE_LABEL, then we would be putting
5064 the insn in the wrong basic block. In that case, put the insn
5065 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5066 while (GET_CODE (insn) == CODE_LABEL
5067 || NOTE_INSN_BASIC_BLOCK_P (insn))
5068 insn = NEXT_INSN (insn);
5070 new_insn = emit_insn_before (pat, insn);
5073 new_insn = emit_insn_after (pat, insn);
5079 add_label_notes (PATTERN (pat), new_insn);
5080 note_stores (PATTERN (pat), record_set_info, pat);
5084 pat = NEXT_INSN (pat);
5087 gcse_create_count++;
5091 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5092 bb->index, INSN_UID (new_insn));
5093 fprintf (gcse_file, "copying expression %d to reg %d\n",
5094 expr->bitmap_index, regno);
5098 /* Insert partially redundant expressions on edges in the CFG to make
5099 the expressions fully redundant. */
5102 pre_edge_insert (edge_list, index_map)
5103 struct edge_list *edge_list;
5104 struct expr **index_map;
5106 int e, i, j, num_edges, set_size, did_insert = 0;
5109 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5110 if it reaches any of the deleted expressions. */
5112 set_size = pre_insert_map[0]->size;
5113 num_edges = NUM_EDGES (edge_list);
5114 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5115 sbitmap_vector_zero (inserted, num_edges);
5117 for (e = 0; e < num_edges; e++)
5120 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5122 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5124 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5126 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5127 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5129 struct expr *expr = index_map[j];
5132 /* Now look at each deleted occurrence of this expression. */
5133 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5135 if (! occr->deleted_p)
5138 /* Insert this expression on this edge if if it would
5139 reach the deleted occurrence in BB. */
5140 if (!TEST_BIT (inserted[e], j))
5143 edge eg = INDEX_EDGE (edge_list, e);
5145 /* We can't insert anything on an abnormal and
5146 critical edge, so we insert the insn at the end of
5147 the previous block. There are several alternatives
5148 detailed in Morgans book P277 (sec 10.5) for
5149 handling this situation. This one is easiest for
5152 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5153 insert_insn_end_bb (index_map[j], bb, 0);
5156 insn = process_insert_insn (index_map[j]);
5157 insert_insn_on_edge (insn, eg);
5162 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5164 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5165 fprintf (gcse_file, "copy expression %d\n",
5166 expr->bitmap_index);
5169 update_ld_motion_stores (expr);
5170 SET_BIT (inserted[e], j);
5172 gcse_create_count++;
5179 sbitmap_vector_free (inserted);
5183 /* Copy the result of INSN to REG. INDX is the expression number. */
5186 pre_insert_copy_insn (expr, insn)
5190 rtx reg = expr->reaching_reg;
5191 int regno = REGNO (reg);
5192 int indx = expr->bitmap_index;
5193 rtx set = single_set (insn);
5199 new_insn = emit_insn_after (gen_move_insn (reg, SET_DEST (set)), insn);
5201 /* Keep register set table up to date. */
5202 record_one_set (regno, new_insn);
5204 gcse_create_count++;
5208 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5209 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5210 INSN_UID (insn), regno);
5211 update_ld_motion_stores (expr);
5214 /* Copy available expressions that reach the redundant expression
5215 to `reaching_reg'. */
5218 pre_insert_copies ()
5225 /* For each available expression in the table, copy the result to
5226 `reaching_reg' if the expression reaches a deleted one.
5228 ??? The current algorithm is rather brute force.
5229 Need to do some profiling. */
5231 for (i = 0; i < expr_hash_table.size; i++)
5232 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5234 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5235 we don't want to insert a copy here because the expression may not
5236 really be redundant. So only insert an insn if the expression was
5237 deleted. This test also avoids further processing if the
5238 expression wasn't deleted anywhere. */
5239 if (expr->reaching_reg == NULL)
5242 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5244 if (! occr->deleted_p)
5247 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5249 rtx insn = avail->insn;
5251 /* No need to handle this one if handled already. */
5252 if (avail->copied_p)
5255 /* Don't handle this one if it's a redundant one. */
5256 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5259 /* Or if the expression doesn't reach the deleted one. */
5260 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5262 BLOCK_FOR_INSN (occr->insn)))
5265 /* Copy the result of avail to reaching_reg. */
5266 pre_insert_copy_insn (expr, insn);
5267 avail->copied_p = 1;
5273 /* Emit move from SRC to DEST noting the equivalence with expression computed
5276 gcse_emit_move_after (src, dest, insn)
5277 rtx src, dest, insn;
5280 rtx set = single_set (insn), set2;
5284 /* This should never fail since we're creating a reg->reg copy
5285 we've verified to be valid. */
5287 new = emit_insn_after (gen_move_insn (dest, src), insn);
5289 /* Note the equivalence for local CSE pass. */
5290 set2 = single_set (new);
5291 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5293 if ((note = find_reg_equal_equiv_note (insn)))
5294 eqv = XEXP (note, 0);
5296 eqv = SET_SRC (set);
5298 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
5303 /* Delete redundant computations.
5304 Deletion is done by changing the insn to copy the `reaching_reg' of
5305 the expression into the result of the SET. It is left to later passes
5306 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5308 Returns nonzero if a change is made. */
5319 for (i = 0; i < expr_hash_table.size; i++)
5320 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5322 int indx = expr->bitmap_index;
5324 /* We only need to search antic_occr since we require
5327 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5329 rtx insn = occr->insn;
5331 basic_block bb = BLOCK_FOR_INSN (insn);
5333 if (TEST_BIT (pre_delete_map[bb->index], indx))
5335 set = single_set (insn);
5339 /* Create a pseudo-reg to store the result of reaching
5340 expressions into. Get the mode for the new pseudo from
5341 the mode of the original destination pseudo. */
5342 if (expr->reaching_reg == NULL)
5344 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5346 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5348 occr->deleted_p = 1;
5349 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5356 "PRE: redundant insn %d (expression %d) in ",
5357 INSN_UID (insn), indx);
5358 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5359 bb->index, REGNO (expr->reaching_reg));
5368 /* Perform GCSE optimizations using PRE.
5369 This is called by one_pre_gcse_pass after all the dataflow analysis
5372 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5373 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5374 Compiler Design and Implementation.
5376 ??? A new pseudo reg is created to hold the reaching expression. The nice
5377 thing about the classical approach is that it would try to use an existing
5378 reg. If the register can't be adequately optimized [i.e. we introduce
5379 reload problems], one could add a pass here to propagate the new register
5382 ??? We don't handle single sets in PARALLELs because we're [currently] not
5383 able to copy the rest of the parallel when we insert copies to create full
5384 redundancies from partial redundancies. However, there's no reason why we
5385 can't handle PARALLELs in the cases where there are no partial
5392 int did_insert, changed;
5393 struct expr **index_map;
5396 /* Compute a mapping from expression number (`bitmap_index') to
5397 hash table entry. */
5399 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5400 for (i = 0; i < expr_hash_table.size; i++)
5401 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5402 index_map[expr->bitmap_index] = expr;
5404 /* Reset bitmap used to track which insns are redundant. */
5405 pre_redundant_insns = sbitmap_alloc (max_cuid);
5406 sbitmap_zero (pre_redundant_insns);
5408 /* Delete the redundant insns first so that
5409 - we know what register to use for the new insns and for the other
5410 ones with reaching expressions
5411 - we know which insns are redundant when we go to create copies */
5413 changed = pre_delete ();
5415 did_insert = pre_edge_insert (edge_list, index_map);
5417 /* In other places with reaching expressions, copy the expression to the
5418 specially allocated pseudo-reg that reaches the redundant expr. */
5419 pre_insert_copies ();
5422 commit_edge_insertions ();
5427 sbitmap_free (pre_redundant_insns);
5431 /* Top level routine to perform one PRE GCSE pass.
5433 Return nonzero if a change was made. */
5436 one_pre_gcse_pass (pass)
5441 gcse_subst_count = 0;
5442 gcse_create_count = 0;
5444 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5445 add_noreturn_fake_exit_edges ();
5447 compute_ld_motion_mems ();
5449 compute_hash_table (&expr_hash_table);
5450 trim_ld_motion_mems ();
5452 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5454 if (expr_hash_table.n_elems > 0)
5456 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5457 compute_pre_data ();
5458 changed |= pre_gcse ();
5459 free_edge_list (edge_list);
5464 remove_fake_edges ();
5465 free_hash_table (&expr_hash_table);
5469 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5470 current_function_name, pass, bytes_used);
5471 fprintf (gcse_file, "%d substs, %d insns created\n",
5472 gcse_subst_count, gcse_create_count);
5478 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5479 If notes are added to an insn which references a CODE_LABEL, the
5480 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5481 because the following loop optimization pass requires them. */
5483 /* ??? This is very similar to the loop.c add_label_notes function. We
5484 could probably share code here. */
5486 /* ??? If there was a jump optimization pass after gcse and before loop,
5487 then we would not need to do this here, because jump would add the
5488 necessary REG_LABEL notes. */
5491 add_label_notes (x, insn)
5495 enum rtx_code code = GET_CODE (x);
5499 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5501 /* This code used to ignore labels that referred to dispatch tables to
5502 avoid flow generating (slighly) worse code.
5504 We no longer ignore such label references (see LABEL_REF handling in
5505 mark_jump_label for additional information). */
5507 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5509 if (LABEL_P (XEXP (x, 0)))
5510 LABEL_NUSES (XEXP (x, 0))++;
5514 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5517 add_label_notes (XEXP (x, i), insn);
5518 else if (fmt[i] == 'E')
5519 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5520 add_label_notes (XVECEXP (x, i, j), insn);
5524 /* Compute transparent outgoing information for each block.
5526 An expression is transparent to an edge unless it is killed by
5527 the edge itself. This can only happen with abnormal control flow,
5528 when the edge is traversed through a call. This happens with
5529 non-local labels and exceptions.
5531 This would not be necessary if we split the edge. While this is
5532 normally impossible for abnormal critical edges, with some effort
5533 it should be possible with exception handling, since we still have
5534 control over which handler should be invoked. But due to increased
5535 EH table sizes, this may not be worthwhile. */
5538 compute_transpout ()
5544 sbitmap_vector_ones (transpout, last_basic_block);
5548 /* Note that flow inserted a nop a the end of basic blocks that
5549 end in call instructions for reasons other than abnormal
5551 if (GET_CODE (bb->end) != CALL_INSN)
5554 for (i = 0; i < expr_hash_table.size; i++)
5555 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5556 if (GET_CODE (expr->expr) == MEM)
5558 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5559 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5562 /* ??? Optimally, we would use interprocedural alias
5563 analysis to determine if this mem is actually killed
5565 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5570 /* Removal of useless null pointer checks */
5572 /* Called via note_stores. X is set by SETTER. If X is a register we must
5573 invalidate nonnull_local and set nonnull_killed. DATA is really a
5574 `null_pointer_info *'.
5576 We ignore hard registers. */
5579 invalidate_nonnull_info (x, setter, data)
5581 rtx setter ATTRIBUTE_UNUSED;
5585 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5587 while (GET_CODE (x) == SUBREG)
5590 /* Ignore anything that is not a register or is a hard register. */
5591 if (GET_CODE (x) != REG
5592 || REGNO (x) < npi->min_reg
5593 || REGNO (x) >= npi->max_reg)
5596 regno = REGNO (x) - npi->min_reg;
5598 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5599 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5602 /* Do null-pointer check elimination for the registers indicated in
5603 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5604 they are not our responsibility to free. */
5607 delete_null_pointer_checks_1 (block_reg, nonnull_avin,
5609 unsigned int *block_reg;
5610 sbitmap *nonnull_avin;
5611 sbitmap *nonnull_avout;
5612 struct null_pointer_info *npi;
5614 basic_block bb, current_block;
5615 sbitmap *nonnull_local = npi->nonnull_local;
5616 sbitmap *nonnull_killed = npi->nonnull_killed;
5617 int something_changed = 0;
5619 /* Compute local properties, nonnull and killed. A register will have
5620 the nonnull property if at the end of the current block its value is
5621 known to be nonnull. The killed property indicates that somewhere in
5622 the block any information we had about the register is killed.
5624 Note that a register can have both properties in a single block. That
5625 indicates that it's killed, then later in the block a new value is
5627 sbitmap_vector_zero (nonnull_local, last_basic_block);
5628 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5630 FOR_EACH_BB (current_block)
5632 rtx insn, stop_insn;
5634 /* Set the current block for invalidate_nonnull_info. */
5635 npi->current_block = current_block;
5637 /* Scan each insn in the basic block looking for memory references and
5639 stop_insn = NEXT_INSN (current_block->end);
5640 for (insn = current_block->head;
5642 insn = NEXT_INSN (insn))
5647 /* Ignore anything that is not a normal insn. */
5648 if (! INSN_P (insn))
5651 /* Basically ignore anything that is not a simple SET. We do have
5652 to make sure to invalidate nonnull_local and set nonnull_killed
5653 for such insns though. */
5654 set = single_set (insn);
5657 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5661 /* See if we've got a usable memory load. We handle it first
5662 in case it uses its address register as a dest (which kills
5663 the nonnull property). */
5664 if (GET_CODE (SET_SRC (set)) == MEM
5665 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5666 && REGNO (reg) >= npi->min_reg
5667 && REGNO (reg) < npi->max_reg)
5668 SET_BIT (nonnull_local[current_block->index],
5669 REGNO (reg) - npi->min_reg);
5671 /* Now invalidate stuff clobbered by this insn. */
5672 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5674 /* And handle stores, we do these last since any sets in INSN can
5675 not kill the nonnull property if it is derived from a MEM
5676 appearing in a SET_DEST. */
5677 if (GET_CODE (SET_DEST (set)) == MEM
5678 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5679 && REGNO (reg) >= npi->min_reg
5680 && REGNO (reg) < npi->max_reg)
5681 SET_BIT (nonnull_local[current_block->index],
5682 REGNO (reg) - npi->min_reg);
5686 /* Now compute global properties based on the local properties. This
5687 is a classic global availability algorithm. */
5688 compute_available (nonnull_local, nonnull_killed,
5689 nonnull_avout, nonnull_avin);
5691 /* Now look at each bb and see if it ends with a compare of a value
5695 rtx last_insn = bb->end;
5696 rtx condition, earliest;
5697 int compare_and_branch;
5699 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5700 since BLOCK_REG[BB] is zero if this block did not end with a
5701 comparison against zero, this condition works. */
5702 if (block_reg[bb->index] < npi->min_reg
5703 || block_reg[bb->index] >= npi->max_reg)
5706 /* LAST_INSN is a conditional jump. Get its condition. */
5707 condition = get_condition (last_insn, &earliest);
5709 /* If we can't determine the condition then skip. */
5713 /* Is the register known to have a nonzero value? */
5714 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
5717 /* Try to compute whether the compare/branch at the loop end is one or
5718 two instructions. */
5719 if (earliest == last_insn)
5720 compare_and_branch = 1;
5721 else if (earliest == prev_nonnote_insn (last_insn))
5722 compare_and_branch = 2;
5726 /* We know the register in this comparison is nonnull at exit from
5727 this block. We can optimize this comparison. */
5728 if (GET_CODE (condition) == NE)
5732 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
5734 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5735 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5736 emit_barrier_after (new_jump);
5739 something_changed = 1;
5740 delete_insn (last_insn);
5741 if (compare_and_branch == 2)
5742 delete_insn (earliest);
5743 purge_dead_edges (bb);
5745 /* Don't check this block again. (Note that BLOCK_END is
5746 invalid here; we deleted the last instruction in the
5748 block_reg[bb->index] = 0;
5751 return something_changed;
5754 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5757 This is conceptually similar to global constant/copy propagation and
5758 classic global CSE (it even uses the same dataflow equations as cprop).
5760 If a register is used as memory address with the form (mem (reg)), then we
5761 know that REG can not be zero at that point in the program. Any instruction
5762 which sets REG "kills" this property.
5764 So, if every path leading to a conditional branch has an available memory
5765 reference of that form, then we know the register can not have the value
5766 zero at the conditional branch.
5768 So we merely need to compute the local properties and propagate that data
5769 around the cfg, then optimize where possible.
5771 We run this pass two times. Once before CSE, then again after CSE. This
5772 has proven to be the most profitable approach. It is rare for new
5773 optimization opportunities of this nature to appear after the first CSE
5776 This could probably be integrated with global cprop with a little work. */
5779 delete_null_pointer_checks (f)
5780 rtx f ATTRIBUTE_UNUSED;
5782 sbitmap *nonnull_avin, *nonnull_avout;
5783 unsigned int *block_reg;
5788 struct null_pointer_info npi;
5789 int something_changed = 0;
5791 /* If we have only a single block, then there's nothing to do. */
5792 if (n_basic_blocks <= 1)
5795 /* Trying to perform global optimizations on flow graphs which have
5796 a high connectivity will take a long time and is unlikely to be
5797 particularly useful.
5799 In normal circumstances a cfg should have about twice as many edges
5800 as blocks. But we do not want to punish small functions which have
5801 a couple switch statements. So we require a relatively large number
5802 of basic blocks and the ratio of edges to blocks to be high. */
5803 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
5806 /* We need four bitmaps, each with a bit for each register in each
5808 max_reg = max_reg_num ();
5809 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
5811 /* Allocate bitmaps to hold local and global properties. */
5812 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5813 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5814 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5815 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5817 /* Go through the basic blocks, seeing whether or not each block
5818 ends with a conditional branch whose condition is a comparison
5819 against zero. Record the register compared in BLOCK_REG. */
5820 block_reg = (unsigned int *) xcalloc (last_basic_block, sizeof (int));
5823 rtx last_insn = bb->end;
5824 rtx condition, earliest, reg;
5826 /* We only want conditional branches. */
5827 if (GET_CODE (last_insn) != JUMP_INSN
5828 || !any_condjump_p (last_insn)
5829 || !onlyjump_p (last_insn))
5832 /* LAST_INSN is a conditional jump. Get its condition. */
5833 condition = get_condition (last_insn, &earliest);
5835 /* If we were unable to get the condition, or it is not an equality
5836 comparison against zero then there's nothing we can do. */
5838 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
5839 || GET_CODE (XEXP (condition, 1)) != CONST_INT
5840 || (XEXP (condition, 1)
5841 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
5844 /* We must be checking a register against zero. */
5845 reg = XEXP (condition, 0);
5846 if (GET_CODE (reg) != REG)
5849 block_reg[bb->index] = REGNO (reg);
5852 /* Go through the algorithm for each block of registers. */
5853 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
5856 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
5857 something_changed |= delete_null_pointer_checks_1 (block_reg,
5863 /* Free the table of registers compared at the end of every block. */
5867 sbitmap_vector_free (npi.nonnull_local);
5868 sbitmap_vector_free (npi.nonnull_killed);
5869 sbitmap_vector_free (nonnull_avin);
5870 sbitmap_vector_free (nonnull_avout);
5872 return something_changed;
5875 /* Code Hoisting variables and subroutines. */
5877 /* Very busy expressions. */
5878 static sbitmap *hoist_vbein;
5879 static sbitmap *hoist_vbeout;
5881 /* Hoistable expressions. */
5882 static sbitmap *hoist_exprs;
5884 /* Dominator bitmaps. */
5885 dominance_info dominators;
5887 /* ??? We could compute post dominators and run this algorithm in
5888 reverse to perform tail merging, doing so would probably be
5889 more effective than the tail merging code in jump.c.
5891 It's unclear if tail merging could be run in parallel with
5892 code hoisting. It would be nice. */
5894 /* Allocate vars used for code hoisting analysis. */
5897 alloc_code_hoist_mem (n_blocks, n_exprs)
5898 int n_blocks, n_exprs;
5900 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5901 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5902 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5904 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
5905 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
5906 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
5907 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
5910 /* Free vars used for code hoisting analysis. */
5913 free_code_hoist_mem ()
5915 sbitmap_vector_free (antloc);
5916 sbitmap_vector_free (transp);
5917 sbitmap_vector_free (comp);
5919 sbitmap_vector_free (hoist_vbein);
5920 sbitmap_vector_free (hoist_vbeout);
5921 sbitmap_vector_free (hoist_exprs);
5922 sbitmap_vector_free (transpout);
5924 free_dominance_info (dominators);
5927 /* Compute the very busy expressions at entry/exit from each block.
5929 An expression is very busy if all paths from a given point
5930 compute the expression. */
5933 compute_code_hoist_vbeinout ()
5935 int changed, passes;
5938 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
5939 sbitmap_vector_zero (hoist_vbein, last_basic_block);
5948 /* We scan the blocks in the reverse order to speed up
5950 FOR_EACH_BB_REVERSE (bb)
5952 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
5953 hoist_vbeout[bb->index], transp[bb->index]);
5954 if (bb->next_bb != EXIT_BLOCK_PTR)
5955 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
5962 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
5965 /* Top level routine to do the dataflow analysis needed by code hoisting. */
5968 compute_code_hoist_data ()
5970 compute_local_properties (transp, comp, antloc, &expr_hash_table);
5971 compute_transpout ();
5972 compute_code_hoist_vbeinout ();
5973 dominators = calculate_dominance_info (CDI_DOMINATORS);
5975 fprintf (gcse_file, "\n");
5978 /* Determine if the expression identified by EXPR_INDEX would
5979 reach BB unimpared if it was placed at the end of EXPR_BB.
5981 It's unclear exactly what Muchnick meant by "unimpared". It seems
5982 to me that the expression must either be computed or transparent in
5983 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5984 would allow the expression to be hoisted out of loops, even if
5985 the expression wasn't a loop invariant.
5987 Contrast this to reachability for PRE where an expression is
5988 considered reachable if *any* path reaches instead of *all*
5992 hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
5993 basic_block expr_bb;
5999 int visited_allocated_locally = 0;
6002 if (visited == NULL)
6004 visited_allocated_locally = 1;
6005 visited = xcalloc (last_basic_block, 1);
6008 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
6010 basic_block pred_bb = pred->src;
6012 if (pred->src == ENTRY_BLOCK_PTR)
6014 else if (pred_bb == expr_bb)
6016 else if (visited[pred_bb->index])
6019 /* Does this predecessor generate this expression? */
6020 else if (TEST_BIT (comp[pred_bb->index], expr_index))
6022 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
6028 visited[pred_bb->index] = 1;
6029 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6034 if (visited_allocated_locally)
6037 return (pred == NULL);
6040 /* Actually perform code hoisting. */
6045 basic_block bb, dominated;
6047 unsigned int domby_len;
6049 struct expr **index_map;
6052 sbitmap_vector_zero (hoist_exprs, last_basic_block);
6054 /* Compute a mapping from expression number (`bitmap_index') to
6055 hash table entry. */
6057 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6058 for (i = 0; i < expr_hash_table.size; i++)
6059 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
6060 index_map[expr->bitmap_index] = expr;
6062 /* Walk over each basic block looking for potentially hoistable
6063 expressions, nothing gets hoisted from the entry block. */
6067 int insn_inserted_p;
6069 domby_len = get_dominated_by (dominators, bb, &domby);
6070 /* Examine each expression that is very busy at the exit of this
6071 block. These are the potentially hoistable expressions. */
6072 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
6076 if (TEST_BIT (hoist_vbeout[bb->index], i)
6077 && TEST_BIT (transpout[bb->index], i))
6079 /* We've found a potentially hoistable expression, now
6080 we look at every block BB dominates to see if it
6081 computes the expression. */
6082 for (j = 0; j < domby_len; j++)
6084 dominated = domby[j];
6085 /* Ignore self dominance. */
6086 if (bb == dominated)
6088 /* We've found a dominated block, now see if it computes
6089 the busy expression and whether or not moving that
6090 expression to the "beginning" of that block is safe. */
6091 if (!TEST_BIT (antloc[dominated->index], i))
6094 /* Note if the expression would reach the dominated block
6095 unimpared if it was placed at the end of BB.
6097 Keep track of how many times this expression is hoistable
6098 from a dominated block into BB. */
6099 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6103 /* If we found more than one hoistable occurrence of this
6104 expression, then note it in the bitmap of expressions to
6105 hoist. It makes no sense to hoist things which are computed
6106 in only one BB, and doing so tends to pessimize register
6107 allocation. One could increase this value to try harder
6108 to avoid any possible code expansion due to register
6109 allocation issues; however experiments have shown that
6110 the vast majority of hoistable expressions are only movable
6111 from two successors, so raising this threshhold is likely
6112 to nullify any benefit we get from code hoisting. */
6115 SET_BIT (hoist_exprs[bb->index], i);
6120 /* If we found nothing to hoist, then quit now. */
6127 /* Loop over all the hoistable expressions. */
6128 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6130 /* We want to insert the expression into BB only once, so
6131 note when we've inserted it. */
6132 insn_inserted_p = 0;
6134 /* These tests should be the same as the tests above. */
6135 if (TEST_BIT (hoist_vbeout[bb->index], i))
6137 /* We've found a potentially hoistable expression, now
6138 we look at every block BB dominates to see if it
6139 computes the expression. */
6140 for (j = 0; j < domby_len; j++)
6142 dominated = domby[j];
6143 /* Ignore self dominance. */
6144 if (bb == dominated)
6147 /* We've found a dominated block, now see if it computes
6148 the busy expression and whether or not moving that
6149 expression to the "beginning" of that block is safe. */
6150 if (!TEST_BIT (antloc[dominated->index], i))
6153 /* The expression is computed in the dominated block and
6154 it would be safe to compute it at the start of the
6155 dominated block. Now we have to determine if the
6156 expression would reach the dominated block if it was
6157 placed at the end of BB. */
6158 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6160 struct expr *expr = index_map[i];
6161 struct occr *occr = expr->antic_occr;
6165 /* Find the right occurrence of this expression. */
6166 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6169 /* Should never happen. */
6175 set = single_set (insn);
6179 /* Create a pseudo-reg to store the result of reaching
6180 expressions into. Get the mode for the new pseudo
6181 from the mode of the original destination pseudo. */
6182 if (expr->reaching_reg == NULL)
6184 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6186 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6188 occr->deleted_p = 1;
6189 if (!insn_inserted_p)
6191 insert_insn_end_bb (index_map[i], bb, 0);
6192 insn_inserted_p = 1;
6204 /* Top level routine to perform one code hoisting (aka unification) pass
6206 Return nonzero if a change was made. */
6209 one_code_hoisting_pass ()
6213 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6214 compute_hash_table (&expr_hash_table);
6216 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6218 if (expr_hash_table.n_elems > 0)
6220 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6221 compute_code_hoist_data ();
6223 free_code_hoist_mem ();
6226 free_hash_table (&expr_hash_table);
6231 /* Here we provide the things required to do store motion towards
6232 the exit. In order for this to be effective, gcse also needed to
6233 be taught how to move a load when it is kill only by a store to itself.
6238 void foo(float scale)
6240 for (i=0; i<10; i++)
6244 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6245 the load out since its live around the loop, and stored at the bottom
6248 The 'Load Motion' referred to and implemented in this file is
6249 an enhancement to gcse which when using edge based lcm, recognizes
6250 this situation and allows gcse to move the load out of the loop.
6252 Once gcse has hoisted the load, store motion can then push this
6253 load towards the exit, and we end up with no loads or stores of 'i'
6256 /* This will search the ldst list for a matching expression. If it
6257 doesn't find one, we create one and initialize it. */
6259 static struct ls_expr *
6263 struct ls_expr * ptr;
6265 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6266 if (expr_equiv_p (ptr->pattern, x))
6271 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
6273 ptr->next = pre_ldst_mems;
6276 ptr->loads = NULL_RTX;
6277 ptr->stores = NULL_RTX;
6278 ptr->reaching_reg = NULL_RTX;
6281 ptr->hash_index = 0;
6282 pre_ldst_mems = ptr;
6288 /* Free up an individual ldst entry. */
6291 free_ldst_entry (ptr)
6292 struct ls_expr * ptr;
6294 free_INSN_LIST_list (& ptr->loads);
6295 free_INSN_LIST_list (& ptr->stores);
6300 /* Free up all memory associated with the ldst list. */
6305 while (pre_ldst_mems)
6307 struct ls_expr * tmp = pre_ldst_mems;
6309 pre_ldst_mems = pre_ldst_mems->next;
6311 free_ldst_entry (tmp);
6314 pre_ldst_mems = NULL;
6317 /* Dump debugging info about the ldst list. */
6320 print_ldst_list (file)
6323 struct ls_expr * ptr;
6325 fprintf (file, "LDST list: \n");
6327 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6329 fprintf (file, " Pattern (%3d): ", ptr->index);
6331 print_rtl (file, ptr->pattern);
6333 fprintf (file, "\n Loads : ");
6336 print_rtl (file, ptr->loads);
6338 fprintf (file, "(nil)");
6340 fprintf (file, "\n Stores : ");
6343 print_rtl (file, ptr->stores);
6345 fprintf (file, "(nil)");
6347 fprintf (file, "\n\n");
6350 fprintf (file, "\n");
6353 /* Returns 1 if X is in the list of ldst only expressions. */
6355 static struct ls_expr *
6356 find_rtx_in_ldst (x)
6359 struct ls_expr * ptr;
6361 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6362 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6368 /* Assign each element of the list of mems a monotonically increasing value. */
6373 struct ls_expr * ptr;
6376 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6382 /* Return first item in the list. */
6384 static inline struct ls_expr *
6387 return pre_ldst_mems;
6390 /* Return the next item in ther list after the specified one. */
6392 static inline struct ls_expr *
6394 struct ls_expr * ptr;
6399 /* Load Motion for loads which only kill themselves. */
6401 /* Return true if x is a simple MEM operation, with no registers or
6402 side effects. These are the types of loads we consider for the
6403 ld_motion list, otherwise we let the usual aliasing take care of it. */
6409 if (GET_CODE (x) != MEM)
6412 if (MEM_VOLATILE_P (x))
6415 if (GET_MODE (x) == BLKmode)
6418 if (!rtx_varies_p (XEXP (x, 0), 0))
6424 /* Make sure there isn't a buried reference in this pattern anywhere.
6425 If there is, invalidate the entry for it since we're not capable
6426 of fixing it up just yet.. We have to be sure we know about ALL
6427 loads since the aliasing code will allow all entries in the
6428 ld_motion list to not-alias itself. If we miss a load, we will get
6429 the wrong value since gcse might common it and we won't know to
6433 invalidate_any_buried_refs (x)
6438 struct ls_expr * ptr;
6440 /* Invalidate it in the list. */
6441 if (GET_CODE (x) == MEM && simple_mem (x))
6443 ptr = ldst_entry (x);
6447 /* Recursively process the insn. */
6448 fmt = GET_RTX_FORMAT (GET_CODE (x));
6450 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6453 invalidate_any_buried_refs (XEXP (x, i));
6454 else if (fmt[i] == 'E')
6455 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6456 invalidate_any_buried_refs (XVECEXP (x, i, j));
6460 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6461 being defined as MEM loads and stores to symbols, with no
6462 side effects and no registers in the expression. If there are any
6463 uses/defs which don't match this criteria, it is invalidated and
6464 trimmed out later. */
6467 compute_ld_motion_mems ()
6469 struct ls_expr * ptr;
6473 pre_ldst_mems = NULL;
6477 for (insn = bb->head;
6478 insn && insn != NEXT_INSN (bb->end);
6479 insn = NEXT_INSN (insn))
6481 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6483 if (GET_CODE (PATTERN (insn)) == SET)
6485 rtx src = SET_SRC (PATTERN (insn));
6486 rtx dest = SET_DEST (PATTERN (insn));
6488 /* Check for a simple LOAD... */
6489 if (GET_CODE (src) == MEM && simple_mem (src))
6491 ptr = ldst_entry (src);
6492 if (GET_CODE (dest) == REG)
6493 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6499 /* Make sure there isn't a buried load somewhere. */
6500 invalidate_any_buried_refs (src);
6503 /* Check for stores. Don't worry about aliased ones, they
6504 will block any movement we might do later. We only care
6505 about this exact pattern since those are the only
6506 circumstance that we will ignore the aliasing info. */
6507 if (GET_CODE (dest) == MEM && simple_mem (dest))
6509 ptr = ldst_entry (dest);
6511 if (GET_CODE (src) != MEM
6512 && GET_CODE (src) != ASM_OPERANDS)
6513 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6519 invalidate_any_buried_refs (PATTERN (insn));
6525 /* Remove any references that have been either invalidated or are not in the
6526 expression list for pre gcse. */
6529 trim_ld_motion_mems ()
6531 struct ls_expr * last = NULL;
6532 struct ls_expr * ptr = first_ls_expr ();
6536 int del = ptr->invalid;
6537 struct expr * expr = NULL;
6539 /* Delete if entry has been made invalid. */
6545 /* Delete if we cannot find this mem in the expression list. */
6546 for (i = 0; i < expr_hash_table.size && del; i++)
6548 for (expr = expr_hash_table.table[i];
6550 expr = expr->next_same_hash)
6551 if (expr_equiv_p (expr->expr, ptr->pattern))
6563 last->next = ptr->next;
6564 free_ldst_entry (ptr);
6569 pre_ldst_mems = pre_ldst_mems->next;
6570 free_ldst_entry (ptr);
6571 ptr = pre_ldst_mems;
6576 /* Set the expression field if we are keeping it. */
6583 /* Show the world what we've found. */
6584 if (gcse_file && pre_ldst_mems != NULL)
6585 print_ldst_list (gcse_file);
6588 /* This routine will take an expression which we are replacing with
6589 a reaching register, and update any stores that are needed if
6590 that expression is in the ld_motion list. Stores are updated by
6591 copying their SRC to the reaching register, and then storeing
6592 the reaching register into the store location. These keeps the
6593 correct value in the reaching register for the loads. */
6596 update_ld_motion_stores (expr)
6599 struct ls_expr * mem_ptr;
6601 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6603 /* We can try to find just the REACHED stores, but is shouldn't
6604 matter to set the reaching reg everywhere... some might be
6605 dead and should be eliminated later. */
6607 /* We replace SET mem = expr with
6609 SET mem = reg , where reg is the
6610 reaching reg used in the load. */
6611 rtx list = mem_ptr->stores;
6613 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6615 rtx insn = XEXP (list, 0);
6616 rtx pat = PATTERN (insn);
6617 rtx src = SET_SRC (pat);
6618 rtx reg = expr->reaching_reg;
6621 /* If we've already copied it, continue. */
6622 if (expr->reaching_reg == src)
6627 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6628 print_rtl (gcse_file, expr->reaching_reg);
6629 fprintf (gcse_file, ":\n ");
6630 print_inline_rtx (gcse_file, insn, 8);
6631 fprintf (gcse_file, "\n");
6634 copy = gen_move_insn ( reg, SET_SRC (pat));
6635 new = emit_insn_before (copy, insn);
6636 record_one_set (REGNO (reg), new);
6637 SET_SRC (pat) = reg;
6639 /* un-recognize this pattern since it's probably different now. */
6640 INSN_CODE (insn) = -1;
6641 gcse_create_count++;
6646 /* Store motion code. */
6648 /* This is used to communicate the target bitvector we want to use in the
6649 reg_set_info routine when called via the note_stores mechanism. */
6650 static sbitmap * regvec;
6652 /* Used in computing the reverse edge graph bit vectors. */
6653 static sbitmap * st_antloc;
6655 /* Global holding the number of store expressions we are dealing with. */
6656 static int num_stores;
6658 /* Checks to set if we need to mark a register set. Called from note_stores. */
6661 reg_set_info (dest, setter, data)
6662 rtx dest, setter ATTRIBUTE_UNUSED;
6663 void * data ATTRIBUTE_UNUSED;
6665 if (GET_CODE (dest) == SUBREG)
6666 dest = SUBREG_REG (dest);
6668 if (GET_CODE (dest) == REG)
6669 SET_BIT (*regvec, REGNO (dest));
6672 /* Return nonzero if the register operands of expression X are killed
6673 anywhere in basic block BB. */
6676 store_ops_ok (x, bb)
6684 /* Repeat is used to turn tail-recursion into iteration. */
6690 code = GET_CODE (x);
6694 /* If a reg has changed after us in this
6695 block, the operand has been killed. */
6696 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
6724 i = GET_RTX_LENGTH (code) - 1;
6725 fmt = GET_RTX_FORMAT (code);
6731 rtx tem = XEXP (x, i);
6733 /* If we are about to do the last recursive call
6734 needed at this level, change it into iteration.
6735 This function is called enough to be worth it. */
6742 if (! store_ops_ok (tem, bb))
6745 else if (fmt[i] == 'E')
6749 for (j = 0; j < XVECLEN (x, i); j++)
6751 if (! store_ops_ok (XVECEXP (x, i, j), bb))
6760 /* Determine whether insn is MEM store pattern that we will consider moving. */
6763 find_moveable_store (insn)
6766 struct ls_expr * ptr;
6767 rtx dest = PATTERN (insn);
6769 if (GET_CODE (dest) != SET
6770 || GET_CODE (SET_SRC (dest)) == ASM_OPERANDS)
6773 dest = SET_DEST (dest);
6775 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6776 || GET_MODE (dest) == BLKmode)
6779 if (GET_CODE (XEXP (dest, 0)) != SYMBOL_REF)
6782 if (rtx_varies_p (XEXP (dest, 0), 0))
6785 ptr = ldst_entry (dest);
6786 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6789 /* Perform store motion. Much like gcse, except we move expressions the
6790 other way by looking at the flowgraph in reverse. */
6793 compute_store_table ()
6800 max_gcse_regno = max_reg_num ();
6802 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
6804 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
6807 /* Find all the stores we care about. */
6810 regvec = & (reg_set_in_block[bb->index]);
6811 for (insn = bb->end;
6812 insn && insn != PREV_INSN (bb->end);
6813 insn = PREV_INSN (insn))
6815 /* Ignore anything that is not a normal insn. */
6816 if (! INSN_P (insn))
6819 if (GET_CODE (insn) == CALL_INSN)
6821 bool clobbers_all = false;
6822 #ifdef NON_SAVING_SETJMP
6823 if (NON_SAVING_SETJMP
6824 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
6825 clobbers_all = true;
6828 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6830 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
6831 SET_BIT (reg_set_in_block[bb->index], regno);
6834 pat = PATTERN (insn);
6835 note_stores (pat, reg_set_info, NULL);
6837 /* Now that we've marked regs, look for stores. */
6838 if (GET_CODE (pat) == SET)
6839 find_moveable_store (insn);
6843 ret = enumerate_ldsts ();
6847 fprintf (gcse_file, "Store Motion Expressions.\n");
6848 print_ldst_list (gcse_file);
6854 /* Check to see if the load X is aliased with STORE_PATTERN. */
6857 load_kills_store (x, store_pattern)
6858 rtx x, store_pattern;
6860 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
6865 /* Go through the entire insn X, looking for any loads which might alias
6866 STORE_PATTERN. Return 1 if found. */
6869 find_loads (x, store_pattern)
6870 rtx x, store_pattern;
6879 if (GET_CODE (x) == SET)
6882 if (GET_CODE (x) == MEM)
6884 if (load_kills_store (x, store_pattern))
6888 /* Recursively process the insn. */
6889 fmt = GET_RTX_FORMAT (GET_CODE (x));
6891 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
6894 ret |= find_loads (XEXP (x, i), store_pattern);
6895 else if (fmt[i] == 'E')
6896 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6897 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
6902 /* Check if INSN kills the store pattern X (is aliased with it).
6903 Return 1 if it it does. */
6906 store_killed_in_insn (x, insn)
6909 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6912 if (GET_CODE (insn) == CALL_INSN)
6914 /* A normal or pure call might read from pattern,
6915 but a const call will not. */
6916 return ! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn);
6919 if (GET_CODE (PATTERN (insn)) == SET)
6921 rtx pat = PATTERN (insn);
6922 /* Check for memory stores to aliased objects. */
6923 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
6924 /* pretend its a load and check for aliasing. */
6925 if (find_loads (SET_DEST (pat), x))
6927 return find_loads (SET_SRC (pat), x);
6930 return find_loads (PATTERN (insn), x);
6933 /* Returns 1 if the expression X is loaded or clobbered on or after INSN
6934 within basic block BB. */
6937 store_killed_after (x, insn, bb)
6946 /* Check if the register operands of the store are OK in this block.
6947 Note that if registers are changed ANYWHERE in the block, we'll
6948 decide we can't move it, regardless of whether it changed above
6949 or below the store. This could be improved by checking the register
6950 operands while looking for aliasing in each insn. */
6951 if (!store_ops_ok (XEXP (x, 0), bb))
6954 for ( ; insn && insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
6955 if (store_killed_in_insn (x, insn))
6961 /* Returns 1 if the expression X is loaded or clobbered on or before INSN
6962 within basic block BB. */
6964 store_killed_before (x, insn, bb)
6968 rtx first = bb->head;
6971 return store_killed_in_insn (x, insn);
6973 /* Check if the register operands of the store are OK in this block.
6974 Note that if registers are changed ANYWHERE in the block, we'll
6975 decide we can't move it, regardless of whether it changed above
6976 or below the store. This could be improved by checking the register
6977 operands while looking for aliasing in each insn. */
6978 if (!store_ops_ok (XEXP (x, 0), bb))
6981 for ( ; insn && insn != PREV_INSN (first); insn = PREV_INSN (insn))
6982 if (store_killed_in_insn (x, insn))
6988 #define ANTIC_STORE_LIST(x) ((x)->loads)
6989 #define AVAIL_STORE_LIST(x) ((x)->stores)
6991 /* Given the table of available store insns at the end of blocks,
6992 determine which ones are not killed by aliasing, and generate
6993 the appropriate vectors for gen and killed. */
6995 build_store_vectors ()
6999 struct ls_expr * ptr;
7001 /* Build the gen_vector. This is any store in the table which is not killed
7002 by aliasing later in its block. */
7003 ae_gen = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7004 sbitmap_vector_zero (ae_gen, last_basic_block);
7006 st_antloc = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7007 sbitmap_vector_zero (st_antloc, last_basic_block);
7009 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7011 /* Put all the stores into either the antic list, or the avail list,
7013 rtx store_list = ptr->stores;
7014 ptr->stores = NULL_RTX;
7016 for (st = store_list; st != NULL; st = XEXP (st, 1))
7018 insn = XEXP (st, 0);
7019 bb = BLOCK_FOR_INSN (insn);
7021 if (!store_killed_after (ptr->pattern, insn, bb))
7023 /* If we've already seen an available expression in this block,
7024 we can delete the one we saw already (It occurs earlier in
7025 the block), and replace it with this one). We'll copy the
7026 old SRC expression to an unused register in case there
7027 are any side effects. */
7028 if (TEST_BIT (ae_gen[bb->index], ptr->index))
7030 /* Find previous store. */
7032 for (st = AVAIL_STORE_LIST (ptr); st ; st = XEXP (st, 1))
7033 if (BLOCK_FOR_INSN (XEXP (st, 0)) == bb)
7037 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7039 fprintf (gcse_file, "Removing redundant store:\n");
7040 replace_store_insn (r, XEXP (st, 0), bb);
7041 XEXP (st, 0) = insn;
7045 SET_BIT (ae_gen[bb->index], ptr->index);
7046 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
7047 AVAIL_STORE_LIST (ptr));
7050 if (!store_killed_before (ptr->pattern, insn, bb))
7052 SET_BIT (st_antloc[BLOCK_NUM (insn)], ptr->index);
7053 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
7054 ANTIC_STORE_LIST (ptr));
7058 /* Free the original list of store insns. */
7059 free_INSN_LIST_list (&store_list);
7062 ae_kill = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7063 sbitmap_vector_zero (ae_kill, last_basic_block);
7065 transp = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7066 sbitmap_vector_zero (transp, last_basic_block);
7068 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7071 if (store_killed_after (ptr->pattern, b->head, b))
7073 /* The anticipatable expression is not killed if it's gen'd. */
7075 We leave this check out for now. If we have a code sequence
7076 in a block which looks like:
7080 We should flag this as having an ANTIC expression, NOT
7081 transparent, NOT killed, and AVAIL.
7082 Unfortunately, since we haven't re-written all loads to
7083 use the reaching reg, we'll end up doing an incorrect
7084 Load in the middle here if we push the store down. It happens in
7085 gcc.c-torture/execute/960311-1.c with -O3
7086 If we always kill it in this case, we'll sometimes do
7087 unnecessary work, but it shouldn't actually hurt anything.
7088 if (!TEST_BIT (ae_gen[b], ptr->index)). */
7089 SET_BIT (ae_kill[b->index], ptr->index);
7092 SET_BIT (transp[b->index], ptr->index);
7095 /* Any block with no exits calls some non-returning function, so
7096 we better mark the store killed here, or we might not store to
7097 it at all. If we knew it was abort, we wouldn't have to store,
7098 but we don't know that for sure. */
7101 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7102 print_ldst_list (gcse_file);
7103 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7104 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7105 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7106 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7110 /* Insert an instruction at the beginning of a basic block, and update
7111 the BLOCK_HEAD if needed. */
7114 insert_insn_start_bb (insn, bb)
7118 /* Insert at start of successor block. */
7119 rtx prev = PREV_INSN (bb->head);
7120 rtx before = bb->head;
7123 if (GET_CODE (before) != CODE_LABEL
7124 && (GET_CODE (before) != NOTE
7125 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7128 if (prev == bb->end)
7130 before = NEXT_INSN (before);
7133 insn = emit_insn_after (insn, prev);
7137 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7139 print_inline_rtx (gcse_file, insn, 6);
7140 fprintf (gcse_file, "\n");
7144 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7145 the memory reference, and E is the edge to insert it on. Returns nonzero
7146 if an edge insertion was performed. */
7149 insert_store (expr, e)
7150 struct ls_expr * expr;
7157 /* We did all the deleted before this insert, so if we didn't delete a
7158 store, then we haven't set the reaching reg yet either. */
7159 if (expr->reaching_reg == NULL_RTX)
7162 reg = expr->reaching_reg;
7163 insn = gen_move_insn (expr->pattern, reg);
7165 /* If we are inserting this expression on ALL predecessor edges of a BB,
7166 insert it at the start of the BB, and reset the insert bits on the other
7167 edges so we don't try to insert it on the other edges. */
7169 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7171 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7172 if (index == EDGE_INDEX_NO_EDGE)
7174 if (! TEST_BIT (pre_insert_map[index], expr->index))
7178 /* If tmp is NULL, we found an insertion on every edge, blank the
7179 insertion vector for these edges, and insert at the start of the BB. */
7180 if (!tmp && bb != EXIT_BLOCK_PTR)
7182 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7184 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7185 RESET_BIT (pre_insert_map[index], expr->index);
7187 insert_insn_start_bb (insn, bb);
7191 /* We can't insert on this edge, so we'll insert at the head of the
7192 successors block. See Morgan, sec 10.5. */
7193 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7195 insert_insn_start_bb (insn, bb);
7199 insert_insn_on_edge (insn, e);
7203 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7204 e->src->index, e->dest->index);
7205 print_inline_rtx (gcse_file, insn, 6);
7206 fprintf (gcse_file, "\n");
7212 /* This routine will replace a store with a SET to a specified register. */
7215 replace_store_insn (reg, del, bb)
7221 insn = gen_move_insn (reg, SET_SRC (PATTERN (del)));
7222 insn = emit_insn_after (insn, del);
7227 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7228 print_inline_rtx (gcse_file, del, 6);
7229 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7230 print_inline_rtx (gcse_file, insn, 6);
7231 fprintf (gcse_file, "\n");
7238 /* Delete a store, but copy the value that would have been stored into
7239 the reaching_reg for later storing. */
7242 delete_store (expr, bb)
7243 struct ls_expr * expr;
7248 if (expr->reaching_reg == NULL_RTX)
7249 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7252 /* If there is more than 1 store, the earlier ones will be dead,
7253 but it doesn't hurt to replace them here. */
7254 reg = expr->reaching_reg;
7256 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7259 if (BLOCK_FOR_INSN (del) == bb)
7261 /* We know there is only one since we deleted redundant
7262 ones during the available computation. */
7263 replace_store_insn (reg, del, bb);
7269 /* Free memory used by store motion. */
7272 free_store_memory ()
7277 sbitmap_vector_free (ae_gen);
7279 sbitmap_vector_free (ae_kill);
7281 sbitmap_vector_free (transp);
7283 sbitmap_vector_free (st_antloc);
7285 sbitmap_vector_free (pre_insert_map);
7287 sbitmap_vector_free (pre_delete_map);
7288 if (reg_set_in_block)
7289 sbitmap_vector_free (reg_set_in_block);
7291 ae_gen = ae_kill = transp = st_antloc = NULL;
7292 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7295 /* Perform store motion. Much like gcse, except we move expressions the
7296 other way by looking at the flowgraph in reverse. */
7303 struct ls_expr * ptr;
7304 int update_flow = 0;
7308 fprintf (gcse_file, "before store motion\n");
7309 print_rtl (gcse_file, get_insns ());
7313 init_alias_analysis ();
7315 /* Find all the stores that are live to the end of their block. */
7316 num_stores = compute_store_table ();
7317 if (num_stores == 0)
7319 sbitmap_vector_free (reg_set_in_block);
7320 end_alias_analysis ();
7324 /* Now compute whats actually available to move. */
7325 add_noreturn_fake_exit_edges ();
7326 build_store_vectors ();
7328 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7329 st_antloc, ae_kill, &pre_insert_map,
7332 /* Now we want to insert the new stores which are going to be needed. */
7333 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7336 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7337 delete_store (ptr, bb);
7339 for (x = 0; x < NUM_EDGES (edge_list); x++)
7340 if (TEST_BIT (pre_insert_map[x], ptr->index))
7341 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7345 commit_edge_insertions ();
7347 free_store_memory ();
7348 free_edge_list (edge_list);
7349 remove_fake_edges ();
7350 end_alias_analysis ();
7354 /* Entry point for jump bypassing optimization pass. */
7362 /* We do not construct an accurate cfg in functions which call
7363 setjmp, so just punt to be safe. */
7364 if (current_function_calls_setjmp)
7367 /* For calling dump_foo fns from gdb. */
7368 debug_stderr = stderr;
7371 /* Identify the basic block information for this function, including
7372 successors and predecessors. */
7373 max_gcse_regno = max_reg_num ();
7376 dump_flow_info (file);
7378 /* Return if there's nothing to do. */
7379 if (n_basic_blocks <= 1)
7382 /* Trying to perform global optimizations on flow graphs which have
7383 a high connectivity will take a long time and is unlikely to be
7384 particularly useful.
7386 In normal circumstances a cfg should have about twice as many edges
7387 as blocks. But we do not want to punish small functions which have
7388 a couple switch statements. So we require a relatively large number
7389 of basic blocks and the ratio of edges to blocks to be high. */
7390 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
7392 if (warn_disabled_optimization)
7393 warning ("BYPASS disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
7394 n_basic_blocks, n_edges / n_basic_blocks);
7398 /* If allocating memory for the cprop bitmap would take up too much
7399 storage it's better just to disable the optimization. */
7401 * SBITMAP_SET_SIZE (max_gcse_regno)
7402 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
7404 if (warn_disabled_optimization)
7405 warning ("GCSE disabled: %d basic blocks and %d registers",
7406 n_basic_blocks, max_gcse_regno);
7411 /* See what modes support reg/reg copy operations. */
7412 if (! can_copy_init_p)
7414 compute_can_copy ();
7415 can_copy_init_p = 1;
7418 gcc_obstack_init (&gcse_obstack);
7421 /* We need alias. */
7422 init_alias_analysis ();
7424 /* Record where pseudo-registers are set. This data is kept accurate
7425 during each pass. ??? We could also record hard-reg information here
7426 [since it's unchanging], however it is currently done during hash table
7429 It may be tempting to compute MEM set information here too, but MEM sets
7430 will be subject to code motion one day and thus we need to compute
7431 information about memory sets when we build the hash tables. */
7433 alloc_reg_set_mem (max_gcse_regno);
7434 compute_sets (get_insns ());
7436 max_gcse_regno = max_reg_num ();
7437 alloc_gcse_mem (get_insns ());
7438 changed = one_cprop_pass (1, 1, 1);
7443 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7444 current_function_name, n_basic_blocks);
7445 fprintf (file, "%d bytes\n\n", bytes_used);
7448 obstack_free (&gcse_obstack, NULL);
7449 free_reg_set_mem ();
7451 /* We are finished with alias. */
7452 end_alias_analysis ();
7453 allocate_reg_info (max_reg_num (), FALSE, FALSE);
7458 #include "gt-gcse.h"