1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
43 Global Optimization by Suppression of Partial Redundancies
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
115 Rice University Ph.D. thesis, Apr. 1996
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
125 Advanced Compiler Design and Implementation
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
148 #include "coretypes.h"
155 #include "hard-reg-set.h"
158 #include "insn-config.h"
160 #include "basic-block.h"
162 #include "function.h"
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
281 static FILE *gcse_file;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack;
302 /* Nonzero for each mode that supports (set (reg) (reg)).
303 This is trivially true for integer and floating point values.
304 It may or may not be true for condition codes. */
305 static char can_copy_p[(int) NUM_MACHINE_MODES];
307 /* Nonzero if can_copy_p has been initialized. */
308 static int can_copy_init_p;
310 struct reg_use {rtx reg_rtx; };
312 /* Hash table of expressions. */
316 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
318 /* Index in the available expression bitmaps. */
320 /* Next entry with the same hash. */
321 struct expr *next_same_hash;
322 /* List of anticipatable occurrences in basic blocks in the function.
323 An "anticipatable occurrence" is one that is the first occurrence in the
324 basic block, the operands are not modified in the basic block prior
325 to the occurrence and the output is not used between the start of
326 the block and the occurrence. */
327 struct occr *antic_occr;
328 /* List of available occurrence in basic blocks in the function.
329 An "available occurrence" is one that is the last occurrence in the
330 basic block and the operands are not modified by following statements in
331 the basic block [including this insn]. */
332 struct occr *avail_occr;
333 /* Non-null if the computation is PRE redundant.
334 The value is the newly created pseudo-reg to record a copy of the
335 expression in all the places that reach the redundant copy. */
339 /* Occurrence of an expression.
340 There is one per basic block. If a pattern appears more than once the
341 last appearance is used [or first for anticipatable expressions]. */
345 /* Next occurrence of this expression. */
347 /* The insn that computes the expression. */
349 /* Nonzero if this [anticipatable] occurrence has been deleted. */
351 /* Nonzero if this [available] occurrence has been copied to
353 /* ??? This is mutually exclusive with deleted_p, so they could share
358 /* Expression and copy propagation hash tables.
359 Each hash table is an array of buckets.
360 ??? It is known that if it were an array of entries, structure elements
361 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
362 not clear whether in the final analysis a sufficient amount of memory would
363 be saved as the size of the available expression bitmaps would be larger
364 [one could build a mapping table without holes afterwards though].
365 Someday I'll perform the computation and figure it out. */
370 This is an array of `expr_hash_table_size' elements. */
373 /* Size of the hash table, in elements. */
376 /* Number of hash table elements. */
377 unsigned int n_elems;
379 /* Whether the table is expression of copy propagation one. */
383 /* Expression hash table. */
384 static struct hash_table expr_hash_table;
386 /* Copy propagation hash table. */
387 static struct hash_table set_hash_table;
389 /* Mapping of uids to cuids.
390 Only real insns get cuids. */
391 static int *uid_cuid;
393 /* Highest UID in UID_CUID. */
396 /* Get the cuid of an insn. */
397 #ifdef ENABLE_CHECKING
398 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
400 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
403 /* Number of cuids. */
406 /* Mapping of cuids to insns. */
407 static rtx *cuid_insn;
409 /* Get insn from cuid. */
410 #define CUID_INSN(CUID) (cuid_insn[CUID])
412 /* Maximum register number in function prior to doing gcse + 1.
413 Registers created during this pass have regno >= max_gcse_regno.
414 This is named with "gcse" to not collide with global of same name. */
415 static unsigned int max_gcse_regno;
417 /* Table of registers that are modified.
419 For each register, each element is a list of places where the pseudo-reg
422 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
423 requires knowledge of which blocks kill which regs [and thus could use
424 a bitmap instead of the lists `reg_set_table' uses].
426 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
427 num-regs) [however perhaps it may be useful to keep the data as is]. One
428 advantage of recording things this way is that `reg_set_table' is fairly
429 sparse with respect to pseudo regs but for hard regs could be fairly dense
430 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
431 up functions like compute_transp since in the case of pseudo-regs we only
432 need to iterate over the number of times a pseudo-reg is set, not over the
433 number of basic blocks [clearly there is a bit of a slow down in the cases
434 where a pseudo is set more than once in a block, however it is believed
435 that the net effect is to speed things up]. This isn't done for hard-regs
436 because recording call-clobbered hard-regs in `reg_set_table' at each
437 function call can consume a fair bit of memory, and iterating over
438 hard-regs stored this way in compute_transp will be more expensive. */
440 typedef struct reg_set
442 /* The next setting of this register. */
443 struct reg_set *next;
444 /* The insn where it was set. */
448 static reg_set **reg_set_table;
450 /* Size of `reg_set_table'.
451 The table starts out at max_gcse_regno + slop, and is enlarged as
453 static int reg_set_table_size;
455 /* Amount to grow `reg_set_table' by when it's full. */
456 #define REG_SET_TABLE_SLOP 100
458 /* This is a list of expressions which are MEMs and will be used by load
460 Load motion tracks MEMs which aren't killed by
461 anything except itself. (ie, loads and stores to a single location).
462 We can then allow movement of these MEM refs with a little special
463 allowance. (all stores copy the same value to the reaching reg used
464 for the loads). This means all values used to store into memory must have
465 no side effects so we can re-issue the setter value.
466 Store Motion uses this structure as an expression table to track stores
467 which look interesting, and might be moveable towards the exit block. */
471 struct expr * expr; /* Gcse expression reference for LM. */
472 rtx pattern; /* Pattern of this mem. */
473 rtx loads; /* INSN list of loads seen. */
474 rtx stores; /* INSN list of stores seen. */
475 struct ls_expr * next; /* Next in the list. */
476 int invalid; /* Invalid for some reason. */
477 int index; /* If it maps to a bitmap index. */
478 int hash_index; /* Index when in a hash table. */
479 rtx reaching_reg; /* Register to use when re-writing. */
482 /* Head of the list of load/store memory refs. */
483 static struct ls_expr * pre_ldst_mems = NULL;
485 /* Bitmap containing one bit for each register in the program.
486 Used when performing GCSE to track which registers have been set since
487 the start of the basic block. */
488 static regset reg_set_bitmap;
490 /* For each block, a bitmap of registers set in the block.
491 This is used by expr_killed_p and compute_transp.
492 It is computed during hash table computation and not by compute_sets
493 as it includes registers added since the last pass (or between cprop and
494 gcse) and it's currently not easy to realloc sbitmap vectors. */
495 static sbitmap *reg_set_in_block;
497 /* Array, indexed by basic block number for a list of insns which modify
498 memory within that block. */
499 static rtx * modify_mem_list;
500 bitmap modify_mem_list_set;
502 /* This array parallels modify_mem_list, but is kept canonicalized. */
503 static rtx * canon_modify_mem_list;
504 bitmap canon_modify_mem_list_set;
505 /* Various variables for statistics gathering. */
507 /* Memory used in a pass.
508 This isn't intended to be absolutely precise. Its intent is only
509 to keep an eye on memory usage. */
510 static int bytes_used;
512 /* GCSE substitutions made. */
513 static int gcse_subst_count;
514 /* Number of copy instructions created. */
515 static int gcse_create_count;
516 /* Number of constants propagated. */
517 static int const_prop_count;
518 /* Number of copys propagated. */
519 static int copy_prop_count;
521 /* These variables are used by classic GCSE.
522 Normally they'd be defined a bit later, but `rd_gen' needs to
523 be declared sooner. */
525 /* Each block has a bitmap of each type.
526 The length of each blocks bitmap is:
528 max_cuid - for reaching definitions
529 n_exprs - for available expressions
531 Thus we view the bitmaps as 2 dimensional arrays. i.e.
532 rd_kill[block_num][cuid_num]
533 ae_kill[block_num][expr_num] */
535 /* For reaching defs */
536 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
538 /* for available exprs */
539 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
541 /* Objects of this type are passed around by the null-pointer check
543 struct null_pointer_info
545 /* The basic block being processed. */
546 basic_block current_block;
547 /* The first register to be handled in this pass. */
548 unsigned int min_reg;
549 /* One greater than the last register to be handled in this pass. */
550 unsigned int max_reg;
551 sbitmap *nonnull_local;
552 sbitmap *nonnull_killed;
555 static void compute_can_copy PARAMS ((void));
556 static char *gmalloc PARAMS ((unsigned int));
557 static char *grealloc PARAMS ((char *, unsigned int));
558 static char *gcse_alloc PARAMS ((unsigned long));
559 static void alloc_gcse_mem PARAMS ((rtx));
560 static void free_gcse_mem PARAMS ((void));
561 static void alloc_reg_set_mem PARAMS ((int));
562 static void free_reg_set_mem PARAMS ((void));
563 static int get_bitmap_width PARAMS ((int, int, int));
564 static void record_one_set PARAMS ((int, rtx));
565 static void record_set_info PARAMS ((rtx, rtx, void *));
566 static void compute_sets PARAMS ((rtx));
567 static void hash_scan_insn PARAMS ((rtx, struct hash_table *, int));
568 static void hash_scan_set PARAMS ((rtx, rtx, struct hash_table *));
569 static void hash_scan_clobber PARAMS ((rtx, rtx, struct hash_table *));
570 static void hash_scan_call PARAMS ((rtx, rtx, struct hash_table *));
571 static int want_to_gcse_p PARAMS ((rtx));
572 static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
573 static int oprs_anticipatable_p PARAMS ((rtx, rtx));
574 static int oprs_available_p PARAMS ((rtx, rtx));
575 static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
576 int, int, struct hash_table *));
577 static void insert_set_in_table PARAMS ((rtx, rtx, struct hash_table *));
578 static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
579 static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
580 static unsigned int hash_string_1 PARAMS ((const char *));
581 static unsigned int hash_set PARAMS ((int, int));
582 static int expr_equiv_p PARAMS ((rtx, rtx));
583 static void record_last_reg_set_info PARAMS ((rtx, int));
584 static void record_last_mem_set_info PARAMS ((rtx));
585 static void record_last_set_info PARAMS ((rtx, rtx, void *));
586 static void compute_hash_table PARAMS ((struct hash_table *));
587 static void alloc_hash_table PARAMS ((int, struct hash_table *, int));
588 static void free_hash_table PARAMS ((struct hash_table *));
589 static void compute_hash_table_work PARAMS ((struct hash_table *));
590 static void dump_hash_table PARAMS ((FILE *, const char *,
591 struct hash_table *));
592 static struct expr *lookup_expr PARAMS ((rtx, struct hash_table *));
593 static struct expr *lookup_set PARAMS ((unsigned int, rtx, struct hash_table *));
594 static struct expr *next_set PARAMS ((unsigned int, struct expr *));
595 static void reset_opr_set_tables PARAMS ((void));
596 static int oprs_not_set_p PARAMS ((rtx, rtx));
597 static void mark_call PARAMS ((rtx));
598 static void mark_set PARAMS ((rtx, rtx));
599 static void mark_clobber PARAMS ((rtx, rtx));
600 static void mark_oprs_set PARAMS ((rtx));
601 static void alloc_cprop_mem PARAMS ((int, int));
602 static void free_cprop_mem PARAMS ((void));
603 static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
604 static void compute_transpout PARAMS ((void));
605 static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
606 struct hash_table *));
607 static void compute_cprop_data PARAMS ((void));
608 static void find_used_regs PARAMS ((rtx *, void *));
609 static int try_replace_reg PARAMS ((rtx, rtx, rtx));
610 static struct expr *find_avail_set PARAMS ((int, rtx));
611 static int cprop_jump PARAMS ((basic_block, rtx, rtx, rtx, rtx));
612 static void mems_conflict_for_gcse_p PARAMS ((rtx, rtx, void *));
613 static int load_killed_in_block_p PARAMS ((basic_block, int, rtx, int));
614 static void canon_list_insert PARAMS ((rtx, rtx, void *));
615 static int cprop_insn PARAMS ((rtx, int));
616 static int cprop PARAMS ((int));
617 static int one_cprop_pass PARAMS ((int, int, int));
618 static bool constprop_register PARAMS ((rtx, rtx, rtx, int));
619 static struct expr *find_bypass_set PARAMS ((int, int));
620 static int bypass_block PARAMS ((basic_block, rtx, rtx));
621 static int bypass_conditional_jumps PARAMS ((void));
622 static void alloc_pre_mem PARAMS ((int, int));
623 static void free_pre_mem PARAMS ((void));
624 static void compute_pre_data PARAMS ((void));
625 static int pre_expr_reaches_here_p PARAMS ((basic_block, struct expr *,
627 static void insert_insn_end_bb PARAMS ((struct expr *, basic_block, int));
628 static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
629 static void pre_insert_copies PARAMS ((void));
630 static int pre_delete PARAMS ((void));
631 static int pre_gcse PARAMS ((void));
632 static int one_pre_gcse_pass PARAMS ((int));
633 static void add_label_notes PARAMS ((rtx, rtx));
634 static void alloc_code_hoist_mem PARAMS ((int, int));
635 static void free_code_hoist_mem PARAMS ((void));
636 static void compute_code_hoist_vbeinout PARAMS ((void));
637 static void compute_code_hoist_data PARAMS ((void));
638 static int hoist_expr_reaches_here_p PARAMS ((basic_block, int, basic_block,
640 static void hoist_code PARAMS ((void));
641 static int one_code_hoisting_pass PARAMS ((void));
642 static void alloc_rd_mem PARAMS ((int, int));
643 static void free_rd_mem PARAMS ((void));
644 static void handle_rd_kill_set PARAMS ((rtx, int, basic_block));
645 static void compute_kill_rd PARAMS ((void));
646 static void compute_rd PARAMS ((void));
647 static void alloc_avail_expr_mem PARAMS ((int, int));
648 static void free_avail_expr_mem PARAMS ((void));
649 static void compute_ae_gen PARAMS ((struct hash_table *));
650 static int expr_killed_p PARAMS ((rtx, basic_block));
651 static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *, struct hash_table *));
652 static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
654 static rtx computing_insn PARAMS ((struct expr *, rtx));
655 static int def_reaches_here_p PARAMS ((rtx, rtx));
656 static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
657 static int handle_avail_expr PARAMS ((rtx, struct expr *));
658 static int classic_gcse PARAMS ((void));
659 static int one_classic_gcse_pass PARAMS ((int));
660 static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
661 static int delete_null_pointer_checks_1 PARAMS ((unsigned int *,
662 sbitmap *, sbitmap *,
663 struct null_pointer_info *));
664 static rtx process_insert_insn PARAMS ((struct expr *));
665 static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
666 static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
667 basic_block, int, char *));
668 static int pre_expr_reaches_here_p_work PARAMS ((basic_block, struct expr *,
669 basic_block, char *));
670 static struct ls_expr * ldst_entry PARAMS ((rtx));
671 static void free_ldst_entry PARAMS ((struct ls_expr *));
672 static void free_ldst_mems PARAMS ((void));
673 static void print_ldst_list PARAMS ((FILE *));
674 static struct ls_expr * find_rtx_in_ldst PARAMS ((rtx));
675 static int enumerate_ldsts PARAMS ((void));
676 static inline struct ls_expr * first_ls_expr PARAMS ((void));
677 static inline struct ls_expr * next_ls_expr PARAMS ((struct ls_expr *));
678 static int simple_mem PARAMS ((rtx));
679 static void invalidate_any_buried_refs PARAMS ((rtx));
680 static void compute_ld_motion_mems PARAMS ((void));
681 static void trim_ld_motion_mems PARAMS ((void));
682 static void update_ld_motion_stores PARAMS ((struct expr *));
683 static void reg_set_info PARAMS ((rtx, rtx, void *));
684 static int store_ops_ok PARAMS ((rtx, basic_block));
685 static void find_moveable_store PARAMS ((rtx));
686 static int compute_store_table PARAMS ((void));
687 static int load_kills_store PARAMS ((rtx, rtx));
688 static int find_loads PARAMS ((rtx, rtx));
689 static int store_killed_in_insn PARAMS ((rtx, rtx));
690 static int store_killed_after PARAMS ((rtx, rtx, basic_block));
691 static int store_killed_before PARAMS ((rtx, rtx, basic_block));
692 static void build_store_vectors PARAMS ((void));
693 static void insert_insn_start_bb PARAMS ((rtx, basic_block));
694 static int insert_store PARAMS ((struct ls_expr *, edge));
695 static void replace_store_insn PARAMS ((rtx, rtx, basic_block));
696 static void delete_store PARAMS ((struct ls_expr *,
698 static void free_store_memory PARAMS ((void));
699 static void store_motion PARAMS ((void));
700 static void free_insn_expr_list_list PARAMS ((rtx *));
701 static void clear_modify_mem_tables PARAMS ((void));
702 static void free_modify_mem_tables PARAMS ((void));
703 static rtx gcse_emit_move_after PARAMS ((rtx, rtx, rtx));
704 static bool do_local_cprop PARAMS ((rtx, rtx, int, rtx*));
705 static bool adjust_libcall_notes PARAMS ((rtx, rtx, rtx, rtx*));
706 static void local_cprop_pass PARAMS ((int));
708 /* Entry point for global common subexpression elimination.
709 F is the first instruction in the function. */
717 /* Bytes used at start of pass. */
718 int initial_bytes_used;
719 /* Maximum number of bytes used by a pass. */
721 /* Point to release obstack data from for each pass. */
722 char *gcse_obstack_bottom;
724 /* We do not construct an accurate cfg in functions which call
725 setjmp, so just punt to be safe. */
726 if (current_function_calls_setjmp)
729 /* Assume that we do not need to run jump optimizations after gcse. */
730 run_jump_opt_after_gcse = 0;
732 /* For calling dump_foo fns from gdb. */
733 debug_stderr = stderr;
736 /* Identify the basic block information for this function, including
737 successors and predecessors. */
738 max_gcse_regno = max_reg_num ();
741 dump_flow_info (file);
743 /* Return if there's nothing to do. */
744 if (n_basic_blocks <= 1)
747 /* Trying to perform global optimizations on flow graphs which have
748 a high connectivity will take a long time and is unlikely to be
751 In normal circumstances a cfg should have about twice as many edges
752 as blocks. But we do not want to punish small functions which have
753 a couple switch statements. So we require a relatively large number
754 of basic blocks and the ratio of edges to blocks to be high. */
755 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
757 if (warn_disabled_optimization)
758 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
759 n_basic_blocks, n_edges / n_basic_blocks);
763 /* If allocating memory for the cprop bitmap would take up too much
764 storage it's better just to disable the optimization. */
766 * SBITMAP_SET_SIZE (max_gcse_regno)
767 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
769 if (warn_disabled_optimization)
770 warning ("GCSE disabled: %d basic blocks and %d registers",
771 n_basic_blocks, max_gcse_regno);
776 /* See what modes support reg/reg copy operations. */
777 if (! can_copy_init_p)
783 gcc_obstack_init (&gcse_obstack);
787 init_alias_analysis ();
788 /* Record where pseudo-registers are set. This data is kept accurate
789 during each pass. ??? We could also record hard-reg information here
790 [since it's unchanging], however it is currently done during hash table
793 It may be tempting to compute MEM set information here too, but MEM sets
794 will be subject to code motion one day and thus we need to compute
795 information about memory sets when we build the hash tables. */
797 alloc_reg_set_mem (max_gcse_regno);
801 initial_bytes_used = bytes_used;
803 gcse_obstack_bottom = gcse_alloc (1);
805 while (changed && pass < MAX_GCSE_PASSES)
809 fprintf (file, "GCSE pass %d\n\n", pass + 1);
811 /* Initialize bytes_used to the space for the pred/succ lists,
812 and the reg_set_table data. */
813 bytes_used = initial_bytes_used;
815 /* Each pass may create new registers, so recalculate each time. */
816 max_gcse_regno = max_reg_num ();
820 /* Don't allow constant propagation to modify jumps
822 changed = one_cprop_pass (pass + 1, 0, 0);
825 changed |= one_classic_gcse_pass (pass + 1);
828 changed |= one_pre_gcse_pass (pass + 1);
829 /* We may have just created new basic blocks. Release and
830 recompute various things which are sized on the number of
834 free_modify_mem_tables ();
836 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
837 canon_modify_mem_list
838 = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
839 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
840 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
843 alloc_reg_set_mem (max_reg_num ());
845 run_jump_opt_after_gcse = 1;
848 if (max_pass_bytes < bytes_used)
849 max_pass_bytes = bytes_used;
851 /* Free up memory, then reallocate for code hoisting. We can
852 not re-use the existing allocated memory because the tables
853 will not have info for the insns or registers created by
854 partial redundancy elimination. */
857 /* It does not make sense to run code hoisting unless we optimizing
858 for code size -- it rarely makes programs faster, and can make
859 them bigger if we did partial redundancy elimination (when optimizing
860 for space, we use a classic gcse algorithm instead of partial
861 redundancy algorithms). */
864 max_gcse_regno = max_reg_num ();
866 changed |= one_code_hoisting_pass ();
869 if (max_pass_bytes < bytes_used)
870 max_pass_bytes = bytes_used;
875 fprintf (file, "\n");
879 obstack_free (&gcse_obstack, gcse_obstack_bottom);
883 /* Do one last pass of copy propagation, including cprop into
884 conditional jumps. */
886 max_gcse_regno = max_reg_num ();
888 /* This time, go ahead and allow cprop to alter jumps. */
889 one_cprop_pass (pass + 1, 1, 0);
894 fprintf (file, "GCSE of %s: %d basic blocks, ",
895 current_function_name, n_basic_blocks);
896 fprintf (file, "%d pass%s, %d bytes\n\n",
897 pass, pass > 1 ? "es" : "", max_pass_bytes);
900 obstack_free (&gcse_obstack, NULL);
902 /* We are finished with alias. */
903 end_alias_analysis ();
904 allocate_reg_info (max_reg_num (), FALSE, FALSE);
906 /* Store motion disabled until it is fixed. */
907 if (0 && !optimize_size && flag_gcse_sm)
909 /* Record where pseudo-registers are set. */
910 return run_jump_opt_after_gcse;
913 /* Misc. utilities. */
915 /* Compute which modes support reg/reg copy operations. */
921 #ifndef AVOID_CCMODE_COPIES
924 memset (can_copy_p, 0, NUM_MACHINE_MODES);
927 for (i = 0; i < NUM_MACHINE_MODES; i++)
928 if (GET_MODE_CLASS (i) == MODE_CC)
930 #ifdef AVOID_CCMODE_COPIES
933 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
934 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
935 if (recog (PATTERN (insn), insn, NULL) >= 0)
945 /* Cover function to xmalloc to record bytes allocated. */
952 return xmalloc (size);
955 /* Cover function to xrealloc.
956 We don't record the additional size since we don't know it.
957 It won't affect memory usage stats much anyway. */
964 return xrealloc (ptr, size);
967 /* Cover function to obstack_alloc. */
974 return (char *) obstack_alloc (&gcse_obstack, size);
977 /* Allocate memory for the cuid mapping array,
978 and reg/memory set tracking tables.
980 This is called at the start of each pass. */
989 /* Find the largest UID and create a mapping from UIDs to CUIDs.
990 CUIDs are like UIDs except they increase monotonically, have no gaps,
991 and only apply to real insns. */
993 max_uid = get_max_uid ();
994 n = (max_uid + 1) * sizeof (int);
995 uid_cuid = (int *) gmalloc (n);
996 memset ((char *) uid_cuid, 0, n);
997 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1000 uid_cuid[INSN_UID (insn)] = i++;
1002 uid_cuid[INSN_UID (insn)] = i;
1005 /* Create a table mapping cuids to insns. */
1008 n = (max_cuid + 1) * sizeof (rtx);
1009 cuid_insn = (rtx *) gmalloc (n);
1010 memset ((char *) cuid_insn, 0, n);
1011 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
1013 CUID_INSN (i++) = insn;
1015 /* Allocate vars to track sets of regs. */
1016 reg_set_bitmap = BITMAP_XMALLOC ();
1018 /* Allocate vars to track sets of regs, memory per block. */
1019 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
1021 /* Allocate array to keep a list of insns which modify memory in each
1023 modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1024 canon_modify_mem_list = (rtx *) gmalloc (last_basic_block * sizeof (rtx));
1025 memset ((char *) modify_mem_list, 0, last_basic_block * sizeof (rtx));
1026 memset ((char *) canon_modify_mem_list, 0, last_basic_block * sizeof (rtx));
1027 modify_mem_list_set = BITMAP_XMALLOC ();
1028 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1031 /* Free memory allocated by alloc_gcse_mem. */
1039 BITMAP_XFREE (reg_set_bitmap);
1041 sbitmap_vector_free (reg_set_in_block);
1042 free_modify_mem_tables ();
1043 BITMAP_XFREE (modify_mem_list_set);
1044 BITMAP_XFREE (canon_modify_mem_list_set);
1047 /* Many of the global optimization algorithms work by solving dataflow
1048 equations for various expressions. Initially, some local value is
1049 computed for each expression in each block. Then, the values across the
1050 various blocks are combined (by following flow graph edges) to arrive at
1051 global values. Conceptually, each set of equations is independent. We
1052 may therefore solve all the equations in parallel, solve them one at a
1053 time, or pick any intermediate approach.
1055 When you're going to need N two-dimensional bitmaps, each X (say, the
1056 number of blocks) by Y (say, the number of expressions), call this
1057 function. It's not important what X and Y represent; only that Y
1058 correspond to the things that can be done in parallel. This function will
1059 return an appropriate chunking factor C; you should solve C sets of
1060 equations in parallel. By going through this function, we can easily
1061 trade space against time; by solving fewer equations in parallel we use
1065 get_bitmap_width (n, x, y)
1070 /* It's not really worth figuring out *exactly* how much memory will
1071 be used by a particular choice. The important thing is to get
1072 something approximately right. */
1073 size_t max_bitmap_memory = 10 * 1024 * 1024;
1075 /* The number of bytes we'd use for a single column of minimum
1077 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1079 /* Often, it's reasonable just to solve all the equations in
1081 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1084 /* Otherwise, pick the largest width we can, without going over the
1086 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1090 /* Compute the local properties of each recorded expression.
1092 Local properties are those that are defined by the block, irrespective of
1095 An expression is transparent in a block if its operands are not modified
1098 An expression is computed (locally available) in a block if it is computed
1099 at least once and expression would contain the same value if the
1100 computation was moved to the end of the block.
1102 An expression is locally anticipatable in a block if it is computed at
1103 least once and expression would contain the same value if the computation
1104 was moved to the beginning of the block.
1106 We call this routine for cprop, pre and code hoisting. They all compute
1107 basically the same information and thus can easily share this code.
1109 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1110 properties. If NULL, then it is not necessary to compute or record that
1111 particular property.
1113 TABLE controls which hash table to look at. If it is set hash table,
1114 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1118 compute_local_properties (transp, comp, antloc, table)
1122 struct hash_table *table;
1126 /* Initialize any bitmaps that were passed in. */
1130 sbitmap_vector_zero (transp, last_basic_block);
1132 sbitmap_vector_ones (transp, last_basic_block);
1136 sbitmap_vector_zero (comp, last_basic_block);
1138 sbitmap_vector_zero (antloc, last_basic_block);
1140 for (i = 0; i < table->size; i++)
1144 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1146 int indx = expr->bitmap_index;
1149 /* The expression is transparent in this block if it is not killed.
1150 We start by assuming all are transparent [none are killed], and
1151 then reset the bits for those that are. */
1153 compute_transp (expr->expr, indx, transp, table->set_p);
1155 /* The occurrences recorded in antic_occr are exactly those that
1156 we want to set to nonzero in ANTLOC. */
1158 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1160 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1162 /* While we're scanning the table, this is a good place to
1164 occr->deleted_p = 0;
1167 /* The occurrences recorded in avail_occr are exactly those that
1168 we want to set to nonzero in COMP. */
1170 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1172 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1174 /* While we're scanning the table, this is a good place to
1179 /* While we're scanning the table, this is a good place to
1181 expr->reaching_reg = 0;
1186 /* Register set information.
1188 `reg_set_table' records where each register is set or otherwise
1191 static struct obstack reg_set_obstack;
1194 alloc_reg_set_mem (n_regs)
1199 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1200 n = reg_set_table_size * sizeof (struct reg_set *);
1201 reg_set_table = (struct reg_set **) gmalloc (n);
1202 memset ((char *) reg_set_table, 0, n);
1204 gcc_obstack_init (®_set_obstack);
1210 free (reg_set_table);
1211 obstack_free (®_set_obstack, NULL);
1214 /* Record REGNO in the reg_set table. */
1217 record_one_set (regno, insn)
1221 /* Allocate a new reg_set element and link it onto the list. */
1222 struct reg_set *new_reg_info;
1224 /* If the table isn't big enough, enlarge it. */
1225 if (regno >= reg_set_table_size)
1227 int new_size = regno + REG_SET_TABLE_SLOP;
1230 = (struct reg_set **) grealloc ((char *) reg_set_table,
1231 new_size * sizeof (struct reg_set *));
1232 memset ((char *) (reg_set_table + reg_set_table_size), 0,
1233 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1234 reg_set_table_size = new_size;
1237 new_reg_info = (struct reg_set *) obstack_alloc (®_set_obstack,
1238 sizeof (struct reg_set));
1239 bytes_used += sizeof (struct reg_set);
1240 new_reg_info->insn = insn;
1241 new_reg_info->next = reg_set_table[regno];
1242 reg_set_table[regno] = new_reg_info;
1245 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1246 an insn. The DATA is really the instruction in which the SET is
1250 record_set_info (dest, setter, data)
1251 rtx dest, setter ATTRIBUTE_UNUSED;
1254 rtx record_set_insn = (rtx) data;
1256 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1257 record_one_set (REGNO (dest), record_set_insn);
1260 /* Scan the function and record each set of each pseudo-register.
1262 This is called once, at the start of the gcse pass. See the comments for
1263 `reg_set_table' for further documentation. */
1271 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1273 note_stores (PATTERN (insn), record_set_info, insn);
1276 /* Hash table support. */
1278 struct reg_avail_info
1280 basic_block last_bb;
1285 static struct reg_avail_info *reg_avail_info;
1286 static basic_block current_bb;
1289 /* See whether X, the source of a set, is something we want to consider for
1292 static GTY(()) rtx test_insn;
1297 int num_clobbers = 0;
1300 switch (GET_CODE (x))
1308 case CONSTANT_P_RTX:
1315 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1316 if (general_operand (x, GET_MODE (x)))
1318 else if (GET_MODE (x) == VOIDmode)
1321 /* Otherwise, check if we can make a valid insn from it. First initialize
1322 our test insn if we haven't already. */
1326 = make_insn_raw (gen_rtx_SET (VOIDmode,
1327 gen_rtx_REG (word_mode,
1328 FIRST_PSEUDO_REGISTER * 2),
1330 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1333 /* Now make an insn like the one we would make when GCSE'ing and see if
1335 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1336 SET_SRC (PATTERN (test_insn)) = x;
1337 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1338 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1341 /* Return nonzero if the operands of expression X are unchanged from the
1342 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1343 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1346 oprs_unchanged_p (x, insn, avail_p)
1357 code = GET_CODE (x);
1362 struct reg_avail_info *info = ®_avail_info[REGNO (x)];
1364 if (info->last_bb != current_bb)
1367 return info->last_set < INSN_CUID (insn);
1369 return info->first_set >= INSN_CUID (insn);
1373 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1377 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1403 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1407 /* If we are about to do the last recursive call needed at this
1408 level, change it into iteration. This function is called enough
1411 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1413 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1416 else if (fmt[i] == 'E')
1417 for (j = 0; j < XVECLEN (x, i); j++)
1418 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1425 /* Used for communication between mems_conflict_for_gcse_p and
1426 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1427 conflict between two memory references. */
1428 static int gcse_mems_conflict_p;
1430 /* Used for communication between mems_conflict_for_gcse_p and
1431 load_killed_in_block_p. A memory reference for a load instruction,
1432 mems_conflict_for_gcse_p will see if a memory store conflicts with
1433 this memory load. */
1434 static rtx gcse_mem_operand;
1436 /* DEST is the output of an instruction. If it is a memory reference, and
1437 possibly conflicts with the load found in gcse_mem_operand, then set
1438 gcse_mems_conflict_p to a nonzero value. */
1441 mems_conflict_for_gcse_p (dest, setter, data)
1442 rtx dest, setter ATTRIBUTE_UNUSED;
1443 void *data ATTRIBUTE_UNUSED;
1445 while (GET_CODE (dest) == SUBREG
1446 || GET_CODE (dest) == ZERO_EXTRACT
1447 || GET_CODE (dest) == SIGN_EXTRACT
1448 || GET_CODE (dest) == STRICT_LOW_PART)
1449 dest = XEXP (dest, 0);
1451 /* If DEST is not a MEM, then it will not conflict with the load. Note
1452 that function calls are assumed to clobber memory, but are handled
1454 if (GET_CODE (dest) != MEM)
1457 /* If we are setting a MEM in our list of specially recognized MEMs,
1458 don't mark as killed this time. */
1460 if (dest == gcse_mem_operand && pre_ldst_mems != NULL)
1462 if (!find_rtx_in_ldst (dest))
1463 gcse_mems_conflict_p = 1;
1467 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1469 gcse_mems_conflict_p = 1;
1472 /* Return nonzero if the expression in X (a memory reference) is killed
1473 in block BB before or after the insn with the CUID in UID_LIMIT.
1474 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1477 To check the entire block, set UID_LIMIT to max_uid + 1 and
1481 load_killed_in_block_p (bb, uid_limit, x, avail_p)
1487 rtx list_entry = modify_mem_list[bb->index];
1491 /* Ignore entries in the list that do not apply. */
1493 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1495 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1497 list_entry = XEXP (list_entry, 1);
1501 setter = XEXP (list_entry, 0);
1503 /* If SETTER is a call everything is clobbered. Note that calls
1504 to pure functions are never put on the list, so we need not
1505 worry about them. */
1506 if (GET_CODE (setter) == CALL_INSN)
1509 /* SETTER must be an INSN of some kind that sets memory. Call
1510 note_stores to examine each hunk of memory that is modified.
1512 The note_stores interface is pretty limited, so we have to
1513 communicate via global variables. Yuk. */
1514 gcse_mem_operand = x;
1515 gcse_mems_conflict_p = 0;
1516 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1517 if (gcse_mems_conflict_p)
1519 list_entry = XEXP (list_entry, 1);
1524 /* Return nonzero if the operands of expression X are unchanged from
1525 the start of INSN's basic block up to but not including INSN. */
1528 oprs_anticipatable_p (x, insn)
1531 return oprs_unchanged_p (x, insn, 0);
1534 /* Return nonzero if the operands of expression X are unchanged from
1535 INSN to the end of INSN's basic block. */
1538 oprs_available_p (x, insn)
1541 return oprs_unchanged_p (x, insn, 1);
1544 /* Hash expression X.
1546 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1547 indicating if a volatile operand is found or if the expression contains
1548 something we don't want to insert in the table.
1550 ??? One might want to merge this with canon_hash. Later. */
1553 hash_expr (x, mode, do_not_record_p, hash_table_size)
1555 enum machine_mode mode;
1556 int *do_not_record_p;
1557 int hash_table_size;
1561 *do_not_record_p = 0;
1563 hash = hash_expr_1 (x, mode, do_not_record_p);
1564 return hash % hash_table_size;
1567 /* Hash a string. Just add its bytes up. */
1569 static inline unsigned
1574 const unsigned char *p = (const unsigned char *) ps;
1583 /* Subroutine of hash_expr to do the actual work. */
1586 hash_expr_1 (x, mode, do_not_record_p)
1588 enum machine_mode mode;
1589 int *do_not_record_p;
1596 /* Used to turn recursion into iteration. We can't rely on GCC's
1597 tail-recursion elimination since we need to keep accumulating values
1604 code = GET_CODE (x);
1608 hash += ((unsigned int) REG << 7) + REGNO (x);
1612 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1613 + (unsigned int) INTVAL (x));
1617 /* This is like the general case, except that it only counts
1618 the integers representing the constant. */
1619 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1620 if (GET_MODE (x) != VOIDmode)
1621 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1622 hash += (unsigned int) XWINT (x, i);
1624 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1625 + (unsigned int) CONST_DOUBLE_HIGH (x));
1633 units = CONST_VECTOR_NUNITS (x);
1635 for (i = 0; i < units; ++i)
1637 elt = CONST_VECTOR_ELT (x, i);
1638 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1644 /* Assume there is only one rtx object for any given label. */
1646 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1647 differences and differences between each stage's debugging dumps. */
1648 hash += (((unsigned int) LABEL_REF << 7)
1649 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1654 /* Don't hash on the symbol's address to avoid bootstrap differences.
1655 Different hash values may cause expressions to be recorded in
1656 different orders and thus different registers to be used in the
1657 final assembler. This also avoids differences in the dump files
1658 between various stages. */
1660 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1663 h += (h << 7) + *p++; /* ??? revisit */
1665 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1670 if (MEM_VOLATILE_P (x))
1672 *do_not_record_p = 1;
1676 hash += (unsigned int) MEM;
1677 /* We used alias set for hashing, but this is not good, since the alias
1678 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1679 causing the profiles to fail to match. */
1690 case UNSPEC_VOLATILE:
1691 *do_not_record_p = 1;
1695 if (MEM_VOLATILE_P (x))
1697 *do_not_record_p = 1;
1702 /* We don't want to take the filename and line into account. */
1703 hash += (unsigned) code + (unsigned) GET_MODE (x)
1704 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1705 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1706 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1708 if (ASM_OPERANDS_INPUT_LENGTH (x))
1710 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1712 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1713 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1715 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1719 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1720 x = ASM_OPERANDS_INPUT (x, 0);
1721 mode = GET_MODE (x);
1731 hash += (unsigned) code + (unsigned) GET_MODE (x);
1732 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1736 /* If we are about to do the last recursive call
1737 needed at this level, change it into iteration.
1738 This function is called enough to be worth it. */
1745 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1746 if (*do_not_record_p)
1750 else if (fmt[i] == 'E')
1751 for (j = 0; j < XVECLEN (x, i); j++)
1753 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1754 if (*do_not_record_p)
1758 else if (fmt[i] == 's')
1759 hash += hash_string_1 (XSTR (x, i));
1760 else if (fmt[i] == 'i')
1761 hash += (unsigned int) XINT (x, i);
1769 /* Hash a set of register REGNO.
1771 Sets are hashed on the register that is set. This simplifies the PRE copy
1774 ??? May need to make things more elaborate. Later, as necessary. */
1777 hash_set (regno, hash_table_size)
1779 int hash_table_size;
1784 return hash % hash_table_size;
1787 /* Return nonzero if exp1 is equivalent to exp2.
1788 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1801 if (x == 0 || y == 0)
1804 code = GET_CODE (x);
1805 if (code != GET_CODE (y))
1808 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1809 if (GET_MODE (x) != GET_MODE (y))
1819 return INTVAL (x) == INTVAL (y);
1822 return XEXP (x, 0) == XEXP (y, 0);
1825 return XSTR (x, 0) == XSTR (y, 0);
1828 return REGNO (x) == REGNO (y);
1831 /* Can't merge two expressions in different alias sets, since we can
1832 decide that the expression is transparent in a block when it isn't,
1833 due to it being set with the different alias set. */
1834 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1838 /* For commutative operations, check both orders. */
1846 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1847 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1848 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1849 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1852 /* We don't use the generic code below because we want to
1853 disregard filename and line numbers. */
1855 /* A volatile asm isn't equivalent to any other. */
1856 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1859 if (GET_MODE (x) != GET_MODE (y)
1860 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1861 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1862 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1863 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1864 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1867 if (ASM_OPERANDS_INPUT_LENGTH (x))
1869 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1870 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1871 ASM_OPERANDS_INPUT (y, i))
1872 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1873 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1883 /* Compare the elements. If any pair of corresponding elements
1884 fail to match, return 0 for the whole thing. */
1886 fmt = GET_RTX_FORMAT (code);
1887 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1892 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1897 if (XVECLEN (x, i) != XVECLEN (y, i))
1899 for (j = 0; j < XVECLEN (x, i); j++)
1900 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1905 if (strcmp (XSTR (x, i), XSTR (y, i)))
1910 if (XINT (x, i) != XINT (y, i))
1915 if (XWINT (x, i) != XWINT (y, i))
1930 /* Insert expression X in INSN in the hash TABLE.
1931 If it is already present, record it as the last occurrence in INSN's
1934 MODE is the mode of the value X is being stored into.
1935 It is only used if X is a CONST_INT.
1937 ANTIC_P is nonzero if X is an anticipatable expression.
1938 AVAIL_P is nonzero if X is an available expression. */
1941 insert_expr_in_table (x, mode, insn, antic_p, avail_p, table)
1943 enum machine_mode mode;
1945 int antic_p, avail_p;
1946 struct hash_table *table;
1948 int found, do_not_record_p;
1950 struct expr *cur_expr, *last_expr = NULL;
1951 struct occr *antic_occr, *avail_occr;
1952 struct occr *last_occr = NULL;
1954 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1956 /* Do not insert expression in table if it contains volatile operands,
1957 or if hash_expr determines the expression is something we don't want
1958 to or can't handle. */
1959 if (do_not_record_p)
1962 cur_expr = table->table[hash];
1965 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1967 /* If the expression isn't found, save a pointer to the end of
1969 last_expr = cur_expr;
1970 cur_expr = cur_expr->next_same_hash;
1975 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1976 bytes_used += sizeof (struct expr);
1977 if (table->table[hash] == NULL)
1978 /* This is the first pattern that hashed to this index. */
1979 table->table[hash] = cur_expr;
1981 /* Add EXPR to end of this hash chain. */
1982 last_expr->next_same_hash = cur_expr;
1984 /* Set the fields of the expr element. */
1986 cur_expr->bitmap_index = table->n_elems++;
1987 cur_expr->next_same_hash = NULL;
1988 cur_expr->antic_occr = NULL;
1989 cur_expr->avail_occr = NULL;
1992 /* Now record the occurrence(s). */
1995 antic_occr = cur_expr->antic_occr;
1997 /* Search for another occurrence in the same basic block. */
1998 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
2000 /* If an occurrence isn't found, save a pointer to the end of
2002 last_occr = antic_occr;
2003 antic_occr = antic_occr->next;
2007 /* Found another instance of the expression in the same basic block.
2008 Prefer the currently recorded one. We want the first one in the
2009 block and the block is scanned from start to end. */
2010 ; /* nothing to do */
2013 /* First occurrence of this expression in this basic block. */
2014 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2015 bytes_used += sizeof (struct occr);
2016 /* First occurrence of this expression in any block? */
2017 if (cur_expr->antic_occr == NULL)
2018 cur_expr->antic_occr = antic_occr;
2020 last_occr->next = antic_occr;
2022 antic_occr->insn = insn;
2023 antic_occr->next = NULL;
2029 avail_occr = cur_expr->avail_occr;
2031 /* Search for another occurrence in the same basic block. */
2032 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
2034 /* If an occurrence isn't found, save a pointer to the end of
2036 last_occr = avail_occr;
2037 avail_occr = avail_occr->next;
2041 /* Found another instance of the expression in the same basic block.
2042 Prefer this occurrence to the currently recorded one. We want
2043 the last one in the block and the block is scanned from start
2045 avail_occr->insn = insn;
2048 /* First occurrence of this expression in this basic block. */
2049 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2050 bytes_used += sizeof (struct occr);
2052 /* First occurrence of this expression in any block? */
2053 if (cur_expr->avail_occr == NULL)
2054 cur_expr->avail_occr = avail_occr;
2056 last_occr->next = avail_occr;
2058 avail_occr->insn = insn;
2059 avail_occr->next = NULL;
2064 /* Insert pattern X in INSN in the hash table.
2065 X is a SET of a reg to either another reg or a constant.
2066 If it is already present, record it as the last occurrence in INSN's
2070 insert_set_in_table (x, insn, table)
2073 struct hash_table *table;
2077 struct expr *cur_expr, *last_expr = NULL;
2078 struct occr *cur_occr, *last_occr = NULL;
2080 if (GET_CODE (x) != SET
2081 || GET_CODE (SET_DEST (x)) != REG)
2084 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2086 cur_expr = table->table[hash];
2089 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2091 /* If the expression isn't found, save a pointer to the end of
2093 last_expr = cur_expr;
2094 cur_expr = cur_expr->next_same_hash;
2099 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
2100 bytes_used += sizeof (struct expr);
2101 if (table->table[hash] == NULL)
2102 /* This is the first pattern that hashed to this index. */
2103 table->table[hash] = cur_expr;
2105 /* Add EXPR to end of this hash chain. */
2106 last_expr->next_same_hash = cur_expr;
2108 /* Set the fields of the expr element.
2109 We must copy X because it can be modified when copy propagation is
2110 performed on its operands. */
2111 cur_expr->expr = copy_rtx (x);
2112 cur_expr->bitmap_index = table->n_elems++;
2113 cur_expr->next_same_hash = NULL;
2114 cur_expr->antic_occr = NULL;
2115 cur_expr->avail_occr = NULL;
2118 /* Now record the occurrence. */
2119 cur_occr = cur_expr->avail_occr;
2121 /* Search for another occurrence in the same basic block. */
2122 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2124 /* If an occurrence isn't found, save a pointer to the end of
2126 last_occr = cur_occr;
2127 cur_occr = cur_occr->next;
2131 /* Found another instance of the expression in the same basic block.
2132 Prefer this occurrence to the currently recorded one. We want the
2133 last one in the block and the block is scanned from start to end. */
2134 cur_occr->insn = insn;
2137 /* First occurrence of this expression in this basic block. */
2138 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
2139 bytes_used += sizeof (struct occr);
2141 /* First occurrence of this expression in any block? */
2142 if (cur_expr->avail_occr == NULL)
2143 cur_expr->avail_occr = cur_occr;
2145 last_occr->next = cur_occr;
2147 cur_occr->insn = insn;
2148 cur_occr->next = NULL;
2152 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2156 hash_scan_set (pat, insn, table)
2158 struct hash_table *table;
2160 rtx src = SET_SRC (pat);
2161 rtx dest = SET_DEST (pat);
2164 if (GET_CODE (src) == CALL)
2165 hash_scan_call (src, insn, table);
2167 else if (GET_CODE (dest) == REG)
2169 unsigned int regno = REGNO (dest);
2172 /* If this is a single set and we are doing constant propagation,
2173 see if a REG_NOTE shows this equivalent to a constant. */
2174 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2175 && CONSTANT_P (XEXP (note, 0)))
2176 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2178 /* Only record sets of pseudo-regs in the hash table. */
2180 && regno >= FIRST_PSEUDO_REGISTER
2181 /* Don't GCSE something if we can't do a reg/reg copy. */
2182 && can_copy_p [GET_MODE (dest)]
2183 /* GCSE commonly inserts instruction after the insn. We can't
2184 do that easily for EH_REGION notes so disable GCSE on these
2186 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2187 /* Is SET_SRC something we want to gcse? */
2188 && want_to_gcse_p (src)
2189 /* Don't CSE a nop. */
2190 && ! set_noop_p (pat)
2191 /* Don't GCSE if it has attached REG_EQUIV note.
2192 At this point this only function parameters should have
2193 REG_EQUIV notes and if the argument slot is used somewhere
2194 explicitly, it means address of parameter has been taken,
2195 so we should not extend the lifetime of the pseudo. */
2196 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2197 || GET_CODE (XEXP (note, 0)) != MEM))
2199 /* An expression is not anticipatable if its operands are
2200 modified before this insn or if this is not the only SET in
2202 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2203 /* An expression is not available if its operands are
2204 subsequently modified, including this insn. It's also not
2205 available if this is a branch, because we can't insert
2206 a set after the branch. */
2207 int avail_p = (oprs_available_p (src, insn)
2208 && ! JUMP_P (insn));
2210 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2213 /* Record sets for constant/copy propagation. */
2214 else if (table->set_p
2215 && regno >= FIRST_PSEUDO_REGISTER
2216 && ((GET_CODE (src) == REG
2217 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2218 && can_copy_p [GET_MODE (dest)]
2219 && REGNO (src) != regno)
2220 || (CONSTANT_P (src)
2221 && GET_CODE (src) != CONSTANT_P_RTX))
2222 /* A copy is not available if its src or dest is subsequently
2223 modified. Here we want to search from INSN+1 on, but
2224 oprs_available_p searches from INSN on. */
2225 && (insn == BLOCK_END (BLOCK_NUM (insn))
2226 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2227 && oprs_available_p (pat, tmp))))
2228 insert_set_in_table (pat, insn, table);
2233 hash_scan_clobber (x, insn, table)
2234 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2235 struct hash_table *table ATTRIBUTE_UNUSED;
2237 /* Currently nothing to do. */
2241 hash_scan_call (x, insn, table)
2242 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2243 struct hash_table *table ATTRIBUTE_UNUSED;
2245 /* Currently nothing to do. */
2248 /* Process INSN and add hash table entries as appropriate.
2250 Only available expressions that set a single pseudo-reg are recorded.
2252 Single sets in a PARALLEL could be handled, but it's an extra complication
2253 that isn't dealt with right now. The trick is handling the CLOBBERs that
2254 are also in the PARALLEL. Later.
2256 If SET_P is nonzero, this is for the assignment hash table,
2257 otherwise it is for the expression hash table.
2258 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2259 not record any expressions. */
2262 hash_scan_insn (insn, table, in_libcall_block)
2264 struct hash_table *table;
2265 int in_libcall_block;
2267 rtx pat = PATTERN (insn);
2270 if (in_libcall_block)
2273 /* Pick out the sets of INSN and for other forms of instructions record
2274 what's been modified. */
2276 if (GET_CODE (pat) == SET)
2277 hash_scan_set (pat, insn, table);
2278 else if (GET_CODE (pat) == PARALLEL)
2279 for (i = 0; i < XVECLEN (pat, 0); i++)
2281 rtx x = XVECEXP (pat, 0, i);
2283 if (GET_CODE (x) == SET)
2284 hash_scan_set (x, insn, table);
2285 else if (GET_CODE (x) == CLOBBER)
2286 hash_scan_clobber (x, insn, table);
2287 else if (GET_CODE (x) == CALL)
2288 hash_scan_call (x, insn, table);
2291 else if (GET_CODE (pat) == CLOBBER)
2292 hash_scan_clobber (pat, insn, table);
2293 else if (GET_CODE (pat) == CALL)
2294 hash_scan_call (pat, insn, table);
2298 dump_hash_table (file, name, table)
2301 struct hash_table *table;
2304 /* Flattened out table, so it's printed in proper order. */
2305 struct expr **flat_table;
2306 unsigned int *hash_val;
2310 = (struct expr **) xcalloc (table->n_elems, sizeof (struct expr *));
2311 hash_val = (unsigned int *) xmalloc (table->n_elems * sizeof (unsigned int));
2313 for (i = 0; i < (int) table->size; i++)
2314 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2316 flat_table[expr->bitmap_index] = expr;
2317 hash_val[expr->bitmap_index] = i;
2320 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2321 name, table->size, table->n_elems);
2323 for (i = 0; i < (int) table->n_elems; i++)
2324 if (flat_table[i] != 0)
2326 expr = flat_table[i];
2327 fprintf (file, "Index %d (hash value %d)\n ",
2328 expr->bitmap_index, hash_val[i]);
2329 print_rtl (file, expr->expr);
2330 fprintf (file, "\n");
2333 fprintf (file, "\n");
2339 /* Record register first/last/block set information for REGNO in INSN.
2341 first_set records the first place in the block where the register
2342 is set and is used to compute "anticipatability".
2344 last_set records the last place in the block where the register
2345 is set and is used to compute "availability".
2347 last_bb records the block for which first_set and last_set are
2348 valid, as a quick test to invalidate them.
2350 reg_set_in_block records whether the register is set in the block
2351 and is used to compute "transparency". */
2354 record_last_reg_set_info (insn, regno)
2358 struct reg_avail_info *info = ®_avail_info[regno];
2359 int cuid = INSN_CUID (insn);
2361 info->last_set = cuid;
2362 if (info->last_bb != current_bb)
2364 info->last_bb = current_bb;
2365 info->first_set = cuid;
2366 SET_BIT (reg_set_in_block[current_bb->index], regno);
2371 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2372 Note we store a pair of elements in the list, so they have to be
2373 taken off pairwise. */
2376 canon_list_insert (dest, unused1, v_insn)
2377 rtx dest ATTRIBUTE_UNUSED;
2378 rtx unused1 ATTRIBUTE_UNUSED;
2381 rtx dest_addr, insn;
2384 while (GET_CODE (dest) == SUBREG
2385 || GET_CODE (dest) == ZERO_EXTRACT
2386 || GET_CODE (dest) == SIGN_EXTRACT
2387 || GET_CODE (dest) == STRICT_LOW_PART)
2388 dest = XEXP (dest, 0);
2390 /* If DEST is not a MEM, then it will not conflict with a load. Note
2391 that function calls are assumed to clobber memory, but are handled
2394 if (GET_CODE (dest) != MEM)
2397 dest_addr = get_addr (XEXP (dest, 0));
2398 dest_addr = canon_rtx (dest_addr);
2399 insn = (rtx) v_insn;
2400 bb = BLOCK_NUM (insn);
2402 canon_modify_mem_list[bb] =
2403 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2404 canon_modify_mem_list[bb] =
2405 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2406 bitmap_set_bit (canon_modify_mem_list_set, bb);
2409 /* Record memory modification information for INSN. We do not actually care
2410 about the memory location(s) that are set, or even how they are set (consider
2411 a CALL_INSN). We merely need to record which insns modify memory. */
2414 record_last_mem_set_info (insn)
2417 int bb = BLOCK_NUM (insn);
2419 /* load_killed_in_block_p will handle the case of calls clobbering
2421 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2422 bitmap_set_bit (modify_mem_list_set, bb);
2424 if (GET_CODE (insn) == CALL_INSN)
2426 /* Note that traversals of this loop (other than for free-ing)
2427 will break after encountering a CALL_INSN. So, there's no
2428 need to insert a pair of items, as canon_list_insert does. */
2429 canon_modify_mem_list[bb] =
2430 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2431 bitmap_set_bit (canon_modify_mem_list_set, bb);
2434 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2437 /* Called from compute_hash_table via note_stores to handle one
2438 SET or CLOBBER in an insn. DATA is really the instruction in which
2439 the SET is taking place. */
2442 record_last_set_info (dest, setter, data)
2443 rtx dest, setter ATTRIBUTE_UNUSED;
2446 rtx last_set_insn = (rtx) data;
2448 if (GET_CODE (dest) == SUBREG)
2449 dest = SUBREG_REG (dest);
2451 if (GET_CODE (dest) == REG)
2452 record_last_reg_set_info (last_set_insn, REGNO (dest));
2453 else if (GET_CODE (dest) == MEM
2454 /* Ignore pushes, they clobber nothing. */
2455 && ! push_operand (dest, GET_MODE (dest)))
2456 record_last_mem_set_info (last_set_insn);
2459 /* Top level function to create an expression or assignment hash table.
2461 Expression entries are placed in the hash table if
2462 - they are of the form (set (pseudo-reg) src),
2463 - src is something we want to perform GCSE on,
2464 - none of the operands are subsequently modified in the block
2466 Assignment entries are placed in the hash table if
2467 - they are of the form (set (pseudo-reg) src),
2468 - src is something we want to perform const/copy propagation on,
2469 - none of the operands or target are subsequently modified in the block
2471 Currently src must be a pseudo-reg or a const_int.
2473 F is the first insn.
2474 TABLE is the table computed. */
2477 compute_hash_table_work (table)
2478 struct hash_table *table;
2482 /* While we compute the hash table we also compute a bit array of which
2483 registers are set in which blocks.
2484 ??? This isn't needed during const/copy propagation, but it's cheap to
2486 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2488 /* re-Cache any INSN_LIST nodes we have allocated. */
2489 clear_modify_mem_tables ();
2490 /* Some working arrays used to track first and last set in each block. */
2491 reg_avail_info = (struct reg_avail_info*)
2492 gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2494 for (i = 0; i < max_gcse_regno; ++i)
2495 reg_avail_info[i].last_bb = NULL;
2497 FOR_EACH_BB (current_bb)
2501 int in_libcall_block;
2503 /* First pass over the instructions records information used to
2504 determine when registers and memory are first and last set.
2505 ??? hard-reg reg_set_in_block computation
2506 could be moved to compute_sets since they currently don't change. */
2508 for (insn = current_bb->head;
2509 insn && insn != NEXT_INSN (current_bb->end);
2510 insn = NEXT_INSN (insn))
2512 if (! INSN_P (insn))
2515 if (GET_CODE (insn) == CALL_INSN)
2517 bool clobbers_all = false;
2518 #ifdef NON_SAVING_SETJMP
2519 if (NON_SAVING_SETJMP
2520 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2521 clobbers_all = true;
2524 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2526 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2527 record_last_reg_set_info (insn, regno);
2532 note_stores (PATTERN (insn), record_last_set_info, insn);
2535 /* The next pass builds the hash table. */
2537 for (insn = current_bb->head, in_libcall_block = 0;
2538 insn && insn != NEXT_INSN (current_bb->end);
2539 insn = NEXT_INSN (insn))
2542 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2543 in_libcall_block = 1;
2544 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2545 in_libcall_block = 0;
2546 hash_scan_insn (insn, table, in_libcall_block);
2547 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2548 in_libcall_block = 0;
2552 free (reg_avail_info);
2553 reg_avail_info = NULL;
2556 /* Allocate space for the set/expr hash TABLE.
2557 N_INSNS is the number of instructions in the function.
2558 It is used to determine the number of buckets to use.
2559 SET_P determines whether set or expression table will
2563 alloc_hash_table (n_insns, table, set_p)
2565 struct hash_table *table;
2570 table->size = n_insns / 4;
2571 if (table->size < 11)
2574 /* Attempt to maintain efficient use of hash table.
2575 Making it an odd number is simplest for now.
2576 ??? Later take some measurements. */
2578 n = table->size * sizeof (struct expr *);
2579 table->table = (struct expr **) gmalloc (n);
2580 table->set_p = set_p;
2583 /* Free things allocated by alloc_hash_table. */
2586 free_hash_table (table)
2587 struct hash_table *table;
2589 free (table->table);
2592 /* Compute the hash TABLE for doing copy/const propagation or
2593 expression hash table. */
2596 compute_hash_table (table)
2597 struct hash_table *table;
2599 /* Initialize count of number of entries in hash table. */
2601 memset ((char *) table->table, 0,
2602 table->size * sizeof (struct expr *));
2604 compute_hash_table_work (table);
2607 /* Expression tracking support. */
2609 /* Lookup pattern PAT in the expression TABLE.
2610 The result is a pointer to the table entry, or NULL if not found. */
2612 static struct expr *
2613 lookup_expr (pat, table)
2615 struct hash_table *table;
2617 int do_not_record_p;
2618 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2622 if (do_not_record_p)
2625 expr = table->table[hash];
2627 while (expr && ! expr_equiv_p (expr->expr, pat))
2628 expr = expr->next_same_hash;
2633 /* Lookup REGNO in the set TABLE. If PAT is non-NULL look for the entry that
2634 matches it, otherwise return the first entry for REGNO. The result is a
2635 pointer to the table entry, or NULL if not found. */
2637 static struct expr *
2638 lookup_set (regno, pat, table)
2641 struct hash_table *table;
2643 unsigned int hash = hash_set (regno, table->size);
2646 expr = table->table[hash];
2650 while (expr && ! expr_equiv_p (expr->expr, pat))
2651 expr = expr->next_same_hash;
2655 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2656 expr = expr->next_same_hash;
2662 /* Return the next entry for REGNO in list EXPR. */
2664 static struct expr *
2665 next_set (regno, expr)
2670 expr = expr->next_same_hash;
2671 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2676 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2677 types may be mixed. */
2680 free_insn_expr_list_list (listp)
2685 for (list = *listp; list ; list = next)
2687 next = XEXP (list, 1);
2688 if (GET_CODE (list) == EXPR_LIST)
2689 free_EXPR_LIST_node (list);
2691 free_INSN_LIST_node (list);
2697 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2699 clear_modify_mem_tables ()
2703 EXECUTE_IF_SET_IN_BITMAP
2704 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2705 bitmap_clear (modify_mem_list_set);
2707 EXECUTE_IF_SET_IN_BITMAP
2708 (canon_modify_mem_list_set, 0, i,
2709 free_insn_expr_list_list (canon_modify_mem_list + i));
2710 bitmap_clear (canon_modify_mem_list_set);
2713 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2716 free_modify_mem_tables ()
2718 clear_modify_mem_tables ();
2719 free (modify_mem_list);
2720 free (canon_modify_mem_list);
2721 modify_mem_list = 0;
2722 canon_modify_mem_list = 0;
2725 /* Reset tables used to keep track of what's still available [since the
2726 start of the block]. */
2729 reset_opr_set_tables ()
2731 /* Maintain a bitmap of which regs have been set since beginning of
2733 CLEAR_REG_SET (reg_set_bitmap);
2735 /* Also keep a record of the last instruction to modify memory.
2736 For now this is very trivial, we only record whether any memory
2737 location has been modified. */
2738 clear_modify_mem_tables ();
2741 /* Return nonzero if the operands of X are not set before INSN in
2742 INSN's basic block. */
2745 oprs_not_set_p (x, insn)
2755 code = GET_CODE (x);
2771 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2772 INSN_CUID (insn), x, 0))
2775 return oprs_not_set_p (XEXP (x, 0), insn);
2778 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2784 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2788 /* If we are about to do the last recursive call
2789 needed at this level, change it into iteration.
2790 This function is called enough to be worth it. */
2792 return oprs_not_set_p (XEXP (x, i), insn);
2794 if (! oprs_not_set_p (XEXP (x, i), insn))
2797 else if (fmt[i] == 'E')
2798 for (j = 0; j < XVECLEN (x, i); j++)
2799 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2806 /* Mark things set by a CALL. */
2812 if (! CONST_OR_PURE_CALL_P (insn))
2813 record_last_mem_set_info (insn);
2816 /* Mark things set by a SET. */
2819 mark_set (pat, insn)
2822 rtx dest = SET_DEST (pat);
2824 while (GET_CODE (dest) == SUBREG
2825 || GET_CODE (dest) == ZERO_EXTRACT
2826 || GET_CODE (dest) == SIGN_EXTRACT
2827 || GET_CODE (dest) == STRICT_LOW_PART)
2828 dest = XEXP (dest, 0);
2830 if (GET_CODE (dest) == REG)
2831 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2832 else if (GET_CODE (dest) == MEM)
2833 record_last_mem_set_info (insn);
2835 if (GET_CODE (SET_SRC (pat)) == CALL)
2839 /* Record things set by a CLOBBER. */
2842 mark_clobber (pat, insn)
2845 rtx clob = XEXP (pat, 0);
2847 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2848 clob = XEXP (clob, 0);
2850 if (GET_CODE (clob) == REG)
2851 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2853 record_last_mem_set_info (insn);
2856 /* Record things set by INSN.
2857 This data is used by oprs_not_set_p. */
2860 mark_oprs_set (insn)
2863 rtx pat = PATTERN (insn);
2866 if (GET_CODE (pat) == SET)
2867 mark_set (pat, insn);
2868 else if (GET_CODE (pat) == PARALLEL)
2869 for (i = 0; i < XVECLEN (pat, 0); i++)
2871 rtx x = XVECEXP (pat, 0, i);
2873 if (GET_CODE (x) == SET)
2875 else if (GET_CODE (x) == CLOBBER)
2876 mark_clobber (x, insn);
2877 else if (GET_CODE (x) == CALL)
2881 else if (GET_CODE (pat) == CLOBBER)
2882 mark_clobber (pat, insn);
2883 else if (GET_CODE (pat) == CALL)
2888 /* Classic GCSE reaching definition support. */
2890 /* Allocate reaching def variables. */
2893 alloc_rd_mem (n_blocks, n_insns)
2894 int n_blocks, n_insns;
2896 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2897 sbitmap_vector_zero (rd_kill, n_blocks);
2899 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2900 sbitmap_vector_zero (rd_gen, n_blocks);
2902 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2903 sbitmap_vector_zero (reaching_defs, n_blocks);
2905 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2906 sbitmap_vector_zero (rd_out, n_blocks);
2909 /* Free reaching def variables. */
2914 sbitmap_vector_free (rd_kill);
2915 sbitmap_vector_free (rd_gen);
2916 sbitmap_vector_free (reaching_defs);
2917 sbitmap_vector_free (rd_out);
2920 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2923 handle_rd_kill_set (insn, regno, bb)
2928 struct reg_set *this_reg;
2930 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2931 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2932 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2935 /* Compute the set of kill's for reaching definitions. */
2946 For each set bit in `gen' of the block (i.e each insn which
2947 generates a definition in the block)
2948 Call the reg set by the insn corresponding to that bit regx
2949 Look at the linked list starting at reg_set_table[regx]
2950 For each setting of regx in the linked list, which is not in
2952 Set the bit in `kill' corresponding to that insn. */
2954 for (cuid = 0; cuid < max_cuid; cuid++)
2955 if (TEST_BIT (rd_gen[bb->index], cuid))
2957 rtx insn = CUID_INSN (cuid);
2958 rtx pat = PATTERN (insn);
2960 if (GET_CODE (insn) == CALL_INSN)
2962 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2963 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2964 handle_rd_kill_set (insn, regno, bb);
2967 if (GET_CODE (pat) == PARALLEL)
2969 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2971 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2973 if ((code == SET || code == CLOBBER)
2974 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2975 handle_rd_kill_set (insn,
2976 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2980 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2981 /* Each setting of this register outside of this block
2982 must be marked in the set of kills in this block. */
2983 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2987 /* Compute the reaching definitions as in
2988 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2989 Chapter 10. It is the same algorithm as used for computing available
2990 expressions but applied to the gens and kills of reaching definitions. */
2995 int changed, passes;
2999 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
3008 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
3009 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
3010 reaching_defs[bb->index], rd_kill[bb->index]);
3016 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
3019 /* Classic GCSE available expression support. */
3021 /* Allocate memory for available expression computation. */
3024 alloc_avail_expr_mem (n_blocks, n_exprs)
3025 int n_blocks, n_exprs;
3027 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3028 sbitmap_vector_zero (ae_kill, n_blocks);
3030 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3031 sbitmap_vector_zero (ae_gen, n_blocks);
3033 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3034 sbitmap_vector_zero (ae_in, n_blocks);
3036 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
3037 sbitmap_vector_zero (ae_out, n_blocks);
3041 free_avail_expr_mem ()
3043 sbitmap_vector_free (ae_kill);
3044 sbitmap_vector_free (ae_gen);
3045 sbitmap_vector_free (ae_in);
3046 sbitmap_vector_free (ae_out);
3049 /* Compute the set of available expressions generated in each basic block. */
3052 compute_ae_gen (expr_hash_table)
3053 struct hash_table *expr_hash_table;
3059 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3060 This is all we have to do because an expression is not recorded if it
3061 is not available, and the only expressions we want to work with are the
3062 ones that are recorded. */
3063 for (i = 0; i < expr_hash_table->size; i++)
3064 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
3065 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3066 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3069 /* Return nonzero if expression X is killed in BB. */
3072 expr_killed_p (x, bb)
3083 code = GET_CODE (x);
3087 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3090 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3093 return expr_killed_p (XEXP (x, 0), bb);
3111 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3115 /* If we are about to do the last recursive call
3116 needed at this level, change it into iteration.
3117 This function is called enough to be worth it. */
3119 return expr_killed_p (XEXP (x, i), bb);
3120 else if (expr_killed_p (XEXP (x, i), bb))
3123 else if (fmt[i] == 'E')
3124 for (j = 0; j < XVECLEN (x, i); j++)
3125 if (expr_killed_p (XVECEXP (x, i, j), bb))
3132 /* Compute the set of available expressions killed in each basic block. */
3135 compute_ae_kill (ae_gen, ae_kill, expr_hash_table)
3136 sbitmap *ae_gen, *ae_kill;
3137 struct hash_table *expr_hash_table;
3144 for (i = 0; i < expr_hash_table->size; i++)
3145 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3147 /* Skip EXPR if generated in this block. */
3148 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3151 if (expr_killed_p (expr->expr, bb))
3152 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3156 /* Actually perform the Classic GCSE optimizations. */
3158 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3160 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3161 as a positive reach. We want to do this when there are two computations
3162 of the expression in the block.
3164 VISITED is a pointer to a working buffer for tracking which BB's have
3165 been visited. It is NULL for the top-level call.
3167 We treat reaching expressions that go through blocks containing the same
3168 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3169 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3170 2 as not reaching. The intent is to improve the probability of finding
3171 only one reaching expression and to reduce register lifetimes by picking
3172 the closest such expression. */
3175 expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
3179 int check_self_loop;
3184 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3186 basic_block pred_bb = pred->src;
3188 if (visited[pred_bb->index])
3189 /* This predecessor has already been visited. Nothing to do. */
3191 else if (pred_bb == bb)
3193 /* BB loops on itself. */
3195 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3196 && BLOCK_NUM (occr->insn) == pred_bb->index)
3199 visited[pred_bb->index] = 1;
3202 /* Ignore this predecessor if it kills the expression. */
3203 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3204 visited[pred_bb->index] = 1;
3206 /* Does this predecessor generate this expression? */
3207 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3209 /* Is this the occurrence we're looking for?
3210 Note that there's only one generating occurrence per block
3211 so we just need to check the block number. */
3212 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3215 visited[pred_bb->index] = 1;
3218 /* Neither gen nor kill. */
3221 visited[pred_bb->index] = 1;
3222 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3229 /* All paths have been checked. */
3233 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3234 memory allocated for that function is returned. */
3237 expr_reaches_here_p (occr, expr, bb, check_self_loop)
3241 int check_self_loop;
3244 char *visited = (char *) xcalloc (last_basic_block, 1);
3246 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3252 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3253 If there is more than one such instruction, return NULL.
3255 Called only by handle_avail_expr. */
3258 computing_insn (expr, insn)
3262 basic_block bb = BLOCK_FOR_INSN (insn);
3264 if (expr->avail_occr->next == NULL)
3266 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3267 /* The available expression is actually itself
3268 (i.e. a loop in the flow graph) so do nothing. */
3271 /* (FIXME) Case that we found a pattern that was created by
3272 a substitution that took place. */
3273 return expr->avail_occr->insn;
3277 /* Pattern is computed more than once.
3278 Search backwards from this insn to see how many of these
3279 computations actually reach this insn. */
3281 rtx insn_computes_expr = NULL;
3284 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3286 if (BLOCK_FOR_INSN (occr->insn) == bb)
3288 /* The expression is generated in this block.
3289 The only time we care about this is when the expression
3290 is generated later in the block [and thus there's a loop].
3291 We let the normal cse pass handle the other cases. */
3292 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3293 && expr_reaches_here_p (occr, expr, bb, 1))
3299 insn_computes_expr = occr->insn;
3302 else if (expr_reaches_here_p (occr, expr, bb, 0))
3308 insn_computes_expr = occr->insn;
3312 if (insn_computes_expr == NULL)
3315 return insn_computes_expr;
3319 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3320 Only called by can_disregard_other_sets. */
3323 def_reaches_here_p (insn, def_insn)
3328 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3331 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3333 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3335 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3337 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3338 reg = XEXP (PATTERN (def_insn), 0);
3339 else if (GET_CODE (PATTERN (def_insn)) == SET)
3340 reg = SET_DEST (PATTERN (def_insn));
3344 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3353 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3354 value returned is the number of definitions that reach INSN. Returning a
3355 value of zero means that [maybe] more than one definition reaches INSN and
3356 the caller can't perform whatever optimization it is trying. i.e. it is
3357 always safe to return zero. */
3360 can_disregard_other_sets (addr_this_reg, insn, for_combine)
3361 struct reg_set **addr_this_reg;
3365 int number_of_reaching_defs = 0;
3366 struct reg_set *this_reg;
3368 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3369 if (def_reaches_here_p (insn, this_reg->insn))
3371 number_of_reaching_defs++;
3372 /* Ignore parallels for now. */
3373 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3377 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3378 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3379 SET_SRC (PATTERN (insn)))))
3380 /* A setting of the reg to a different value reaches INSN. */
3383 if (number_of_reaching_defs > 1)
3385 /* If in this setting the value the register is being set to is
3386 equal to the previous value the register was set to and this
3387 setting reaches the insn we are trying to do the substitution
3388 on then we are ok. */
3389 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3391 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3392 SET_SRC (PATTERN (insn))))
3396 *addr_this_reg = this_reg;
3399 return number_of_reaching_defs;
3402 /* Expression computed by insn is available and the substitution is legal,
3403 so try to perform the substitution.
3405 The result is nonzero if any changes were made. */
3408 handle_avail_expr (insn, expr)
3412 rtx pat, insn_computes_expr, expr_set;
3414 struct reg_set *this_reg;
3415 int found_setting, use_src;
3418 /* We only handle the case where one computation of the expression
3419 reaches this instruction. */
3420 insn_computes_expr = computing_insn (expr, insn);
3421 if (insn_computes_expr == NULL)
3423 expr_set = single_set (insn_computes_expr);
3430 /* At this point we know only one computation of EXPR outside of this
3431 block reaches this insn. Now try to find a register that the
3432 expression is computed into. */
3433 if (GET_CODE (SET_SRC (expr_set)) == REG)
3435 /* This is the case when the available expression that reaches
3436 here has already been handled as an available expression. */
3437 unsigned int regnum_for_replacing
3438 = REGNO (SET_SRC (expr_set));
3440 /* If the register was created by GCSE we can't use `reg_set_table',
3441 however we know it's set only once. */
3442 if (regnum_for_replacing >= max_gcse_regno
3443 /* If the register the expression is computed into is set only once,
3444 or only one set reaches this insn, we can use it. */
3445 || (((this_reg = reg_set_table[regnum_for_replacing]),
3446 this_reg->next == NULL)
3447 || can_disregard_other_sets (&this_reg, insn, 0)))
3456 unsigned int regnum_for_replacing
3457 = REGNO (SET_DEST (expr_set));
3459 /* This shouldn't happen. */
3460 if (regnum_for_replacing >= max_gcse_regno)
3463 this_reg = reg_set_table[regnum_for_replacing];
3465 /* If the register the expression is computed into is set only once,
3466 or only one set reaches this insn, use it. */
3467 if (this_reg->next == NULL
3468 || can_disregard_other_sets (&this_reg, insn, 0))
3474 pat = PATTERN (insn);
3476 to = SET_SRC (expr_set);
3478 to = SET_DEST (expr_set);
3479 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3481 /* We should be able to ignore the return code from validate_change but
3482 to play it safe we check. */
3486 if (gcse_file != NULL)
3488 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3490 fprintf (gcse_file, " reg %d %s insn %d\n",
3491 REGNO (to), use_src ? "from" : "set in",
3492 INSN_UID (insn_computes_expr));
3497 /* The register that the expr is computed into is set more than once. */
3498 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3500 /* Insert an insn after insnx that copies the reg set in insnx
3501 into a new pseudo register call this new register REGN.
3502 From insnb until end of basic block or until REGB is set
3503 replace all uses of REGB with REGN. */
3506 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3508 /* Generate the new insn. */
3509 /* ??? If the change fails, we return 0, even though we created
3510 an insn. I think this is ok. */
3512 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3513 SET_DEST (expr_set)),
3514 insn_computes_expr);
3516 /* Keep register set table up to date. */
3517 record_one_set (REGNO (to), new_insn);
3519 gcse_create_count++;
3520 if (gcse_file != NULL)
3522 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3523 INSN_UID (NEXT_INSN (insn_computes_expr)),
3524 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3525 fprintf (gcse_file, ", computed in insn %d,\n",
3526 INSN_UID (insn_computes_expr));
3527 fprintf (gcse_file, " into newly allocated reg %d\n",
3531 pat = PATTERN (insn);
3533 /* Do register replacement for INSN. */
3534 changed = validate_change (insn, &SET_SRC (pat),
3536 (NEXT_INSN (insn_computes_expr))),
3539 /* We should be able to ignore the return code from validate_change but
3540 to play it safe we check. */
3544 if (gcse_file != NULL)
3547 "GCSE: Replacing the source in insn %d with reg %d ",
3549 REGNO (SET_DEST (PATTERN (NEXT_INSN
3550 (insn_computes_expr)))));
3551 fprintf (gcse_file, "set in insn %d\n",
3552 INSN_UID (insn_computes_expr));
3560 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3561 the dataflow analysis has been done.
3563 The result is nonzero if a change was made. */
3572 /* Note we start at block 1. */
3574 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3578 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3580 /* Reset tables used to keep track of what's still valid [since the
3581 start of the block]. */
3582 reset_opr_set_tables ();
3584 for (insn = bb->head;
3585 insn != NULL && insn != NEXT_INSN (bb->end);
3586 insn = NEXT_INSN (insn))
3588 /* Is insn of form (set (pseudo-reg) ...)? */
3589 if (GET_CODE (insn) == INSN
3590 && GET_CODE (PATTERN (insn)) == SET
3591 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3592 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3594 rtx pat = PATTERN (insn);
3595 rtx src = SET_SRC (pat);
3598 if (want_to_gcse_p (src)
3599 /* Is the expression recorded? */
3600 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3601 /* Is the expression available [at the start of the
3603 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3604 /* Are the operands unchanged since the start of the
3606 && oprs_not_set_p (src, insn))
3607 changed |= handle_avail_expr (insn, expr);
3610 /* Keep track of everything modified by this insn. */
3611 /* ??? Need to be careful w.r.t. mods done to INSN. */
3613 mark_oprs_set (insn);
3620 /* Top level routine to perform one classic GCSE pass.
3622 Return nonzero if a change was made. */
3625 one_classic_gcse_pass (pass)
3630 gcse_subst_count = 0;
3631 gcse_create_count = 0;
3633 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3634 alloc_rd_mem (last_basic_block, max_cuid);
3635 compute_hash_table (&expr_hash_table);
3637 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3639 if (expr_hash_table.n_elems > 0)
3643 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3644 compute_ae_gen (&expr_hash_table);
3645 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3646 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3647 changed = classic_gcse ();
3648 free_avail_expr_mem ();
3652 free_hash_table (&expr_hash_table);
3656 fprintf (gcse_file, "\n");
3657 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3658 current_function_name, pass, bytes_used, gcse_subst_count);
3659 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3665 /* Compute copy/constant propagation working variables. */
3667 /* Local properties of assignments. */
3668 static sbitmap *cprop_pavloc;
3669 static sbitmap *cprop_absaltered;
3671 /* Global properties of assignments (computed from the local properties). */
3672 static sbitmap *cprop_avin;
3673 static sbitmap *cprop_avout;
3675 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3676 basic blocks. N_SETS is the number of sets. */
3679 alloc_cprop_mem (n_blocks, n_sets)
3680 int n_blocks, n_sets;
3682 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3683 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3685 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3686 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3689 /* Free vars used by copy/const propagation. */
3694 sbitmap_vector_free (cprop_pavloc);
3695 sbitmap_vector_free (cprop_absaltered);
3696 sbitmap_vector_free (cprop_avin);
3697 sbitmap_vector_free (cprop_avout);
3700 /* For each block, compute whether X is transparent. X is either an
3701 expression or an assignment [though we don't care which, for this context
3702 an assignment is treated as an expression]. For each block where an
3703 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3707 compute_transp (x, indx, bmap, set_p)
3719 /* repeat is used to turn tail-recursion into iteration since GCC
3720 can't do it when there's no return value. */
3726 code = GET_CODE (x);
3732 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3735 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3736 SET_BIT (bmap[bb->index], indx);
3740 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3741 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3746 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3749 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3750 RESET_BIT (bmap[bb->index], indx);
3754 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3755 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3764 rtx list_entry = canon_modify_mem_list[bb->index];
3768 rtx dest, dest_addr;
3770 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3773 SET_BIT (bmap[bb->index], indx);
3775 RESET_BIT (bmap[bb->index], indx);
3778 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3779 Examine each hunk of memory that is modified. */
3781 dest = XEXP (list_entry, 0);
3782 list_entry = XEXP (list_entry, 1);
3783 dest_addr = XEXP (list_entry, 0);
3785 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3786 x, rtx_addr_varies_p))
3789 SET_BIT (bmap[bb->index], indx);
3791 RESET_BIT (bmap[bb->index], indx);
3794 list_entry = XEXP (list_entry, 1);
3817 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3821 /* If we are about to do the last recursive call
3822 needed at this level, change it into iteration.
3823 This function is called enough to be worth it. */
3830 compute_transp (XEXP (x, i), indx, bmap, set_p);
3832 else if (fmt[i] == 'E')
3833 for (j = 0; j < XVECLEN (x, i); j++)
3834 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3838 /* Top level routine to do the dataflow analysis needed by copy/const
3842 compute_cprop_data ()
3844 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3845 compute_available (cprop_pavloc, cprop_absaltered,
3846 cprop_avout, cprop_avin);
3849 /* Copy/constant propagation. */
3851 /* Maximum number of register uses in an insn that we handle. */
3854 /* Table of uses found in an insn.
3855 Allocated statically to avoid alloc/free complexity and overhead. */
3856 static struct reg_use reg_use_table[MAX_USES];
3858 /* Index into `reg_use_table' while building it. */
3859 static int reg_use_count;
3861 /* Set up a list of register numbers used in INSN. The found uses are stored
3862 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3863 and contains the number of uses in the table upon exit.
3865 ??? If a register appears multiple times we will record it multiple times.
3866 This doesn't hurt anything but it will slow things down. */
3869 find_used_regs (xptr, data)
3871 void *data ATTRIBUTE_UNUSED;
3878 /* repeat is used to turn tail-recursion into iteration since GCC
3879 can't do it when there's no return value. */
3884 code = GET_CODE (x);
3887 if (reg_use_count == MAX_USES)
3890 reg_use_table[reg_use_count].reg_rtx = x;
3894 /* Recursively scan the operands of this expression. */
3896 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3900 /* If we are about to do the last recursive call
3901 needed at this level, change it into iteration.
3902 This function is called enough to be worth it. */
3909 find_used_regs (&XEXP (x, i), data);
3911 else if (fmt[i] == 'E')
3912 for (j = 0; j < XVECLEN (x, i); j++)
3913 find_used_regs (&XVECEXP (x, i, j), data);
3917 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3918 Returns nonzero is successful. */
3921 try_replace_reg (from, to, insn)
3924 rtx note = find_reg_equal_equiv_note (insn);
3927 rtx set = single_set (insn);
3929 validate_replace_src_group (from, to, insn);
3930 if (num_changes_pending () && apply_change_group ())
3933 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3935 /* If above failed and this is a single set, try to simplify the source of
3936 the set given our substitution. We could perhaps try this for multiple
3937 SETs, but it probably won't buy us anything. */
3938 src = simplify_replace_rtx (SET_SRC (set), from, to);
3940 if (!rtx_equal_p (src, SET_SRC (set))
3941 && validate_change (insn, &SET_SRC (set), src, 0))
3944 /* If we've failed to do replacement, have a single SET, and don't already
3945 have a note, add a REG_EQUAL note to not lose information. */
3946 if (!success && note == 0 && set != 0)
3947 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3950 /* If there is already a NOTE, update the expression in it with our
3953 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3955 /* REG_EQUAL may get simplified into register.
3956 We don't allow that. Remove that note. This code ought
3957 not to happen, because previous code ought to synthesize
3958 reg-reg move, but be on the safe side. */
3959 if (note && REG_P (XEXP (note, 0)))
3960 remove_note (insn, note);
3965 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3966 NULL no such set is found. */
3968 static struct expr *
3969 find_avail_set (regno, insn)
3973 /* SET1 contains the last set found that can be returned to the caller for
3974 use in a substitution. */
3975 struct expr *set1 = 0;
3977 /* Loops are not possible here. To get a loop we would need two sets
3978 available at the start of the block containing INSN. ie we would
3979 need two sets like this available at the start of the block:
3981 (set (reg X) (reg Y))
3982 (set (reg Y) (reg X))
3984 This can not happen since the set of (reg Y) would have killed the
3985 set of (reg X) making it unavailable at the start of this block. */
3989 struct expr *set = lookup_set (regno, NULL_RTX, &set_hash_table);
3991 /* Find a set that is available at the start of the block
3992 which contains INSN. */
3995 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3997 set = next_set (regno, set);
4000 /* If no available set was found we've reached the end of the
4001 (possibly empty) copy chain. */
4005 if (GET_CODE (set->expr) != SET)
4008 src = SET_SRC (set->expr);
4010 /* We know the set is available.
4011 Now check that SRC is ANTLOC (i.e. none of the source operands
4012 have changed since the start of the block).
4014 If the source operand changed, we may still use it for the next
4015 iteration of this loop, but we may not use it for substitutions. */
4017 if (CONSTANT_P (src) || oprs_not_set_p (src, insn))
4020 /* If the source of the set is anything except a register, then
4021 we have reached the end of the copy chain. */
4022 if (GET_CODE (src) != REG)
4025 /* Follow the copy chain, ie start another iteration of the loop
4026 and see if we have an available copy into SRC. */
4027 regno = REGNO (src);
4030 /* SET1 holds the last set that was available and anticipatable at
4035 /* Subroutine of cprop_insn that tries to propagate constants into
4036 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
4037 it is the instruction that immediately precedes JUMP, and must be a
4038 single SET of a register. FROM is what we will try to replace,
4039 SRC is the constant we will try to substitute for it. Returns nonzero
4040 if a change was made. */
4043 cprop_jump (bb, setcc, jump, from, src)
4051 rtx set = pc_set (jump);
4053 /* First substitute in the INSN condition as the SET_SRC of the JUMP,
4054 then substitute that given values in this expanded JUMP. */
4056 && !modified_between_p (from, setcc, jump)
4057 && !modified_between_p (src, setcc, jump))
4059 rtx setcc_set = single_set (setcc);
4060 new_set = simplify_replace_rtx (SET_SRC (set),
4061 SET_DEST (setcc_set),
4062 SET_SRC (setcc_set));
4067 new = simplify_replace_rtx (new_set, from, src);
4069 /* If no simplification can be made, then try the next
4071 if (rtx_equal_p (new, new_set) || rtx_equal_p (new, SET_SRC (set)))
4074 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4079 /* Ensure the value computed inside the jump insn to be equivalent
4080 to one computed by setcc. */
4082 && modified_in_p (new, setcc))
4084 if (! validate_change (jump, &SET_SRC (set), new, 0))
4087 /* If this has turned into an unconditional jump,
4088 then put a barrier after it so that the unreachable
4089 code will be deleted. */
4090 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4091 emit_barrier_after (jump);
4095 /* Delete the cc0 setter. */
4096 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4097 delete_insn (setcc);
4100 run_jump_opt_after_gcse = 1;
4103 if (gcse_file != NULL)
4106 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4107 REGNO (from), INSN_UID (jump));
4108 print_rtl (gcse_file, src);
4109 fprintf (gcse_file, "\n");
4111 purge_dead_edges (bb);
4117 constprop_register (insn, from, to, alter_jumps)
4125 /* Check for reg or cc0 setting instructions followed by
4126 conditional branch instructions first. */
4128 && (sset = single_set (insn)) != NULL
4129 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4131 rtx dest = SET_DEST (sset);
4132 if ((REG_P (dest) || CC0_P (dest))
4133 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4137 /* Handle normal insns next. */
4138 if (GET_CODE (insn) == INSN
4139 && try_replace_reg (from, to, insn))
4142 /* Try to propagate a CONST_INT into a conditional jump.
4143 We're pretty specific about what we will handle in this
4144 code, we can extend this as necessary over time.
4146 Right now the insn in question must look like
4147 (set (pc) (if_then_else ...)) */
4148 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4149 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4153 /* Perform constant and copy propagation on INSN.
4154 The result is nonzero if a change was made. */
4157 cprop_insn (insn, alter_jumps)
4161 struct reg_use *reg_used;
4169 note_uses (&PATTERN (insn), find_used_regs, NULL);
4171 note = find_reg_equal_equiv_note (insn);
4173 /* We may win even when propagating constants into notes. */
4175 find_used_regs (&XEXP (note, 0), NULL);
4177 for (reg_used = ®_use_table[0]; reg_use_count > 0;
4178 reg_used++, reg_use_count--)
4180 unsigned int regno = REGNO (reg_used->reg_rtx);
4184 /* Ignore registers created by GCSE.
4185 We do this because ... */
4186 if (regno >= max_gcse_regno)
4189 /* If the register has already been set in this block, there's
4190 nothing we can do. */
4191 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4194 /* Find an assignment that sets reg_used and is available
4195 at the start of the block. */
4196 set = find_avail_set (regno, insn);
4201 /* ??? We might be able to handle PARALLELs. Later. */
4202 if (GET_CODE (pat) != SET)
4205 src = SET_SRC (pat);
4207 /* Constant propagation. */
4208 if (CONSTANT_P (src))
4210 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4214 if (gcse_file != NULL)
4216 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4217 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4218 print_rtl (gcse_file, src);
4219 fprintf (gcse_file, "\n");
4223 else if (GET_CODE (src) == REG
4224 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4225 && REGNO (src) != regno)
4227 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4231 if (gcse_file != NULL)
4233 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4234 regno, INSN_UID (insn));
4235 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4238 /* The original insn setting reg_used may or may not now be
4239 deletable. We leave the deletion to flow. */
4240 /* FIXME: If it turns out that the insn isn't deletable,
4241 then we may have unnecessarily extended register lifetimes
4242 and made things worse. */
4250 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4251 their REG_EQUAL notes need updating. */
4254 do_local_cprop (x, insn, alter_jumps, libcall_sp)
4260 rtx newreg = NULL, newcnst = NULL;
4262 /* Rule out USE instructions and ASM statements as we don't want to
4263 change the hard registers mentioned. */
4264 if (GET_CODE (x) == REG
4265 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4266 || (GET_CODE (PATTERN (insn)) != USE
4267 && asm_noperands (PATTERN (insn)) < 0)))
4269 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4270 struct elt_loc_list *l;
4274 for (l = val->locs; l; l = l->next)
4276 rtx this_rtx = l->loc;
4282 if (CONSTANT_P (this_rtx)
4283 && GET_CODE (this_rtx) != CONSTANT_P_RTX)
4285 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4286 /* Don't copy propagate if it has attached REG_EQUIV note.
4287 At this point this only function parameters should have
4288 REG_EQUIV notes and if the argument slot is used somewhere
4289 explicitly, it means address of parameter has been taken,
4290 so we should not extend the lifetime of the pseudo. */
4291 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4292 || GET_CODE (XEXP (note, 0)) != MEM))
4295 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4297 /* If we find a case where we can't fix the retval REG_EQUAL notes
4298 match the new register, we either have to abandon this replacement
4299 or fix delete_trivially_dead_insns to preserve the setting insn,
4300 or make it delete the REG_EUAQL note, and fix up all passes that
4301 require the REG_EQUAL note there. */
4302 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4304 if (gcse_file != NULL)
4306 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4308 fprintf (gcse_file, "insn %d with constant ",
4310 print_rtl (gcse_file, newcnst);
4311 fprintf (gcse_file, "\n");
4316 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4318 adjust_libcall_notes (x, newreg, insn, libcall_sp);
4319 if (gcse_file != NULL)
4322 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4323 REGNO (x), INSN_UID (insn));
4324 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4333 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4334 their REG_EQUAL notes need updating to reflect that OLDREG has been
4335 replaced with NEWVAL in INSN. Return true if all substitutions could
4338 adjust_libcall_notes (oldreg, newval, insn, libcall_sp)
4339 rtx oldreg, newval, insn, *libcall_sp;
4343 while ((end = *libcall_sp++))
4345 rtx note = find_reg_equal_equiv_note (end);
4352 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4356 note = find_reg_equal_equiv_note (end);
4359 if (reg_mentioned_p (newval, XEXP (note, 0)))
4362 while ((end = *libcall_sp++));
4366 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4372 #define MAX_NESTED_LIBCALLS 9
4375 local_cprop_pass (alter_jumps)
4379 struct reg_use *reg_used;
4380 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
4381 bool changed = false;
4384 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4386 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4390 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
4394 if (libcall_sp == libcall_stack)
4396 *--libcall_sp = XEXP (note, 0);
4398 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4401 note = find_reg_equal_equiv_note (insn);
4405 note_uses (&PATTERN (insn), find_used_regs, NULL);
4407 find_used_regs (&XEXP (note, 0), NULL);
4409 for (reg_used = ®_use_table[0]; reg_use_count > 0;
4410 reg_used++, reg_use_count--)
4411 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4418 while (reg_use_count);
4420 cselib_process_insn (insn);
4423 /* Global analysis may get into infinite loops for unreachable blocks. */
4424 if (changed && alter_jumps)
4425 delete_unreachable_blocks ();
4428 /* Forward propagate copies. This includes copies and constants. Return
4429 nonzero if a change was made. */
4439 /* Note we start at block 1. */
4440 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4442 if (gcse_file != NULL)
4443 fprintf (gcse_file, "\n");
4448 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4450 /* Reset tables used to keep track of what's still valid [since the
4451 start of the block]. */
4452 reset_opr_set_tables ();
4454 for (insn = bb->head;
4455 insn != NULL && insn != NEXT_INSN (bb->end);
4456 insn = NEXT_INSN (insn))
4459 changed |= cprop_insn (insn, alter_jumps);
4461 /* Keep track of everything modified by this insn. */
4462 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4463 call mark_oprs_set if we turned the insn into a NOTE. */
4464 if (GET_CODE (insn) != NOTE)
4465 mark_oprs_set (insn);
4469 if (gcse_file != NULL)
4470 fprintf (gcse_file, "\n");
4475 /* Perform one copy/constant propagation pass.
4476 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4477 propagation into conditional jumps. If BYPASS_JUMPS is true,
4478 perform conditional jump bypassing optimizations. */
4481 one_cprop_pass (pass, cprop_jumps, bypass_jumps)
4488 const_prop_count = 0;
4489 copy_prop_count = 0;
4491 local_cprop_pass (cprop_jumps);
4493 alloc_hash_table (max_cuid, &set_hash_table, 1);
4494 compute_hash_table (&set_hash_table);
4496 dump_hash_table (gcse_file, "SET", &set_hash_table);
4497 if (set_hash_table.n_elems > 0)
4499 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4500 compute_cprop_data ();
4501 changed = cprop (cprop_jumps);
4503 changed |= bypass_conditional_jumps ();
4507 free_hash_table (&set_hash_table);
4511 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4512 current_function_name, pass, bytes_used);
4513 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4514 const_prop_count, copy_prop_count);
4516 /* Global analysis may get into infinite loops for unreachable blocks. */
4517 if (changed && cprop_jumps)
4518 delete_unreachable_blocks ();
4523 /* Bypass conditional jumps. */
4525 /* Find a set of REGNO to a constant that is available at the end of basic
4526 block BB. Returns NULL if no such set is found. Based heavily upon
4529 static struct expr *
4530 find_bypass_set (regno, bb)
4534 struct expr *result = 0;
4539 struct expr *set = lookup_set (regno, NULL_RTX, &set_hash_table);
4543 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4545 set = next_set (regno, set);
4551 if (GET_CODE (set->expr) != SET)
4554 src = SET_SRC (set->expr);
4555 if (CONSTANT_P (src))
4558 if (GET_CODE (src) != REG)
4561 regno = REGNO (src);
4567 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4568 basic block BB which has more than one predecessor. If not NULL, SETCC
4569 is the first instruction of BB, which is immediately followed by JUMP_INSN
4570 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4571 Returns nonzero if a change was made. */
4574 bypass_block (bb, setcc, jump)
4582 insn = (setcc != NULL) ? setcc : jump;
4584 /* Determine set of register uses in INSN. */
4586 note_uses (&PATTERN (insn), find_used_regs, NULL);
4587 note = find_reg_equal_equiv_note (insn);
4589 find_used_regs (&XEXP (note, 0), NULL);
4592 for (e = bb->pred; e; e = enext)
4594 enext = e->pred_next;
4595 for (i = 0; i < reg_use_count; i++)
4597 struct reg_use *reg_used = ®_use_table[i];
4598 unsigned int regno = REGNO (reg_used->reg_rtx);
4599 basic_block dest, old_dest;
4603 if (regno >= max_gcse_regno)
4606 set = find_bypass_set (regno, e->src->index);
4611 src = SET_SRC (pc_set (jump));
4614 src = simplify_replace_rtx (src,
4615 SET_DEST (PATTERN (setcc)),
4616 SET_SRC (PATTERN (setcc)));
4618 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4619 SET_SRC (set->expr));
4622 dest = FALLTHRU_EDGE (bb)->dest;
4623 else if (GET_CODE (new) == LABEL_REF)
4624 dest = BRANCH_EDGE (bb)->dest;
4628 /* Once basic block indices are stable, we should be able
4629 to use redirect_edge_and_branch_force instead. */
4631 if (dest != NULL && dest != old_dest
4632 && redirect_edge_and_branch (e, dest))
4634 /* Copy the register setter to the redirected edge.
4635 Don't copy CC0 setters, as CC0 is dead after jump. */
4638 rtx pat = PATTERN (setcc);
4639 if (!CC0_P (SET_DEST (pat)))
4640 insert_insn_on_edge (copy_insn (pat), e);
4643 if (gcse_file != NULL)
4645 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4646 regno, INSN_UID (jump));
4647 print_rtl (gcse_file, SET_SRC (set->expr));
4648 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4649 e->src->index, old_dest->index, dest->index);
4659 /* Find basic blocks with more than one predecessor that only contain a
4660 single conditional jump. If the result of the comparison is known at
4661 compile-time from any incoming edge, redirect that edge to the
4662 appropriate target. Returns nonzero if a change was made. */
4665 bypass_conditional_jumps ()
4673 /* Note we start at block 1. */
4674 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4678 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
4679 EXIT_BLOCK_PTR, next_bb)
4681 /* Check for more than one predecessor. */
4682 if (bb->pred && bb->pred->pred_next)
4685 for (insn = bb->head;
4686 insn != NULL && insn != NEXT_INSN (bb->end);
4687 insn = NEXT_INSN (insn))
4688 if (GET_CODE (insn) == INSN)
4692 if (GET_CODE (PATTERN (insn)) != SET)
4695 dest = SET_DEST (PATTERN (insn));
4696 if (REG_P (dest) || CC0_P (dest))
4701 else if (GET_CODE (insn) == JUMP_INSN)
4703 if (any_condjump_p (insn) && onlyjump_p (insn))
4704 changed |= bypass_block (bb, setcc, insn);
4707 else if (INSN_P (insn))
4712 /* If we bypassed any register setting insns, we inserted a
4713 copy on the redirected edge. These need to be committed. */
4715 commit_edge_insertions();
4720 /* Compute PRE+LCM working variables. */
4722 /* Local properties of expressions. */
4723 /* Nonzero for expressions that are transparent in the block. */
4724 static sbitmap *transp;
4726 /* Nonzero for expressions that are transparent at the end of the block.
4727 This is only zero for expressions killed by abnormal critical edge
4728 created by a calls. */
4729 static sbitmap *transpout;
4731 /* Nonzero for expressions that are computed (available) in the block. */
4732 static sbitmap *comp;
4734 /* Nonzero for expressions that are locally anticipatable in the block. */
4735 static sbitmap *antloc;
4737 /* Nonzero for expressions where this block is an optimal computation
4739 static sbitmap *pre_optimal;
4741 /* Nonzero for expressions which are redundant in a particular block. */
4742 static sbitmap *pre_redundant;
4744 /* Nonzero for expressions which should be inserted on a specific edge. */
4745 static sbitmap *pre_insert_map;
4747 /* Nonzero for expressions which should be deleted in a specific block. */
4748 static sbitmap *pre_delete_map;
4750 /* Contains the edge_list returned by pre_edge_lcm. */
4751 static struct edge_list *edge_list;
4753 /* Redundant insns. */
4754 static sbitmap pre_redundant_insns;
4756 /* Allocate vars used for PRE analysis. */
4759 alloc_pre_mem (n_blocks, n_exprs)
4760 int n_blocks, n_exprs;
4762 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4763 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4764 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4767 pre_redundant = NULL;
4768 pre_insert_map = NULL;
4769 pre_delete_map = NULL;
4772 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4774 /* pre_insert and pre_delete are allocated later. */
4777 /* Free vars used for PRE analysis. */
4782 sbitmap_vector_free (transp);
4783 sbitmap_vector_free (comp);
4785 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
4788 sbitmap_vector_free (pre_optimal);
4790 sbitmap_vector_free (pre_redundant);
4792 sbitmap_vector_free (pre_insert_map);
4794 sbitmap_vector_free (pre_delete_map);
4796 sbitmap_vector_free (ae_in);
4798 sbitmap_vector_free (ae_out);
4800 transp = comp = NULL;
4801 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
4802 ae_in = ae_out = NULL;
4805 /* Top level routine to do the dataflow analysis needed by PRE. */
4810 sbitmap trapping_expr;
4814 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4815 sbitmap_vector_zero (ae_kill, last_basic_block);
4817 /* Collect expressions which might trap. */
4818 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
4819 sbitmap_zero (trapping_expr);
4820 for (ui = 0; ui < expr_hash_table.size; ui++)
4823 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
4824 if (may_trap_p (e->expr))
4825 SET_BIT (trapping_expr, e->bitmap_index);
4828 /* Compute ae_kill for each basic block using:
4832 This is significantly faster than compute_ae_kill. */
4838 /* If the current block is the destination of an abnormal edge, we
4839 kill all trapping expressions because we won't be able to properly
4840 place the instruction on the edge. So make them neither
4841 anticipatable nor transparent. This is fairly conservative. */
4842 for (e = bb->pred; e ; e = e->pred_next)
4843 if (e->flags & EDGE_ABNORMAL)
4845 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
4846 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
4850 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
4851 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
4854 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
4855 ae_kill, &pre_insert_map, &pre_delete_map);
4856 sbitmap_vector_free (antloc);
4858 sbitmap_vector_free (ae_kill);
4860 sbitmap_free (trapping_expr);
4865 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
4868 VISITED is a pointer to a working buffer for tracking which BB's have
4869 been visited. It is NULL for the top-level call.
4871 We treat reaching expressions that go through blocks containing the same
4872 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4873 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4874 2 as not reaching. The intent is to improve the probability of finding
4875 only one reaching expression and to reduce register lifetimes by picking
4876 the closest such expression. */
4879 pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
4880 basic_block occr_bb;
4887 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
4889 basic_block pred_bb = pred->src;
4891 if (pred->src == ENTRY_BLOCK_PTR
4892 /* Has predecessor has already been visited? */
4893 || visited[pred_bb->index])
4894 ;/* Nothing to do. */
4896 /* Does this predecessor generate this expression? */
4897 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
4899 /* Is this the occurrence we're looking for?
4900 Note that there's only one generating occurrence per block
4901 so we just need to check the block number. */
4902 if (occr_bb == pred_bb)
4905 visited[pred_bb->index] = 1;
4907 /* Ignore this predecessor if it kills the expression. */
4908 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
4909 visited[pred_bb->index] = 1;
4911 /* Neither gen nor kill. */
4914 visited[pred_bb->index] = 1;
4915 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
4920 /* All paths have been checked. */
4924 /* The wrapper for pre_expr_reaches_here_work that ensures that any
4925 memory allocated for that function is returned. */
4928 pre_expr_reaches_here_p (occr_bb, expr, bb)
4929 basic_block occr_bb;
4934 char *visited = (char *) xcalloc (last_basic_block, 1);
4936 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
4943 /* Given an expr, generate RTL which we can insert at the end of a BB,
4944 or on an edge. Set the block number of any insns generated to
4948 process_insert_insn (expr)
4951 rtx reg = expr->reaching_reg;
4952 rtx exp = copy_rtx (expr->expr);
4957 /* If the expression is something that's an operand, like a constant,
4958 just copy it to a register. */
4959 if (general_operand (exp, GET_MODE (reg)))
4960 emit_move_insn (reg, exp);
4962 /* Otherwise, make a new insn to compute this expression and make sure the
4963 insn will be recognized (this also adds any needed CLOBBERs). Copy the
4964 expression to make sure we don't have any sharing issues. */
4965 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
4974 /* Add EXPR to the end of basic block BB.
4976 This is used by both the PRE and code hoisting.
4978 For PRE, we want to verify that the expr is either transparent
4979 or locally anticipatable in the target block. This check makes
4980 no sense for code hoisting. */
4983 insert_insn_end_bb (expr, bb, pre)
4990 rtx reg = expr->reaching_reg;
4991 int regno = REGNO (reg);
4994 pat = process_insert_insn (expr);
4995 if (pat == NULL_RTX || ! INSN_P (pat))
4999 while (NEXT_INSN (pat_end) != NULL_RTX)
5000 pat_end = NEXT_INSN (pat_end);
5002 /* If the last insn is a jump, insert EXPR in front [taking care to
5003 handle cc0, etc. properly]. Similary we need to care trapping
5004 instructions in presence of non-call exceptions. */
5006 if (GET_CODE (insn) == JUMP_INSN
5007 || (GET_CODE (insn) == INSN
5008 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
5013 /* It should always be the case that we can put these instructions
5014 anywhere in the basic block with performing PRE optimizations.
5016 if (GET_CODE (insn) == INSN && pre
5017 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5018 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5021 /* If this is a jump table, then we can't insert stuff here. Since
5022 we know the previous real insn must be the tablejump, we insert
5023 the new instruction just before the tablejump. */
5024 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5025 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5026 insn = prev_real_insn (insn);
5029 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5030 if cc0 isn't set. */
5031 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5033 insn = XEXP (note, 0);
5036 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5037 if (maybe_cc0_setter
5038 && INSN_P (maybe_cc0_setter)
5039 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5040 insn = maybe_cc0_setter;
5043 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5044 new_insn = emit_insn_before (pat, insn);
5047 /* Likewise if the last insn is a call, as will happen in the presence
5048 of exception handling. */
5049 else if (GET_CODE (insn) == CALL_INSN
5050 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
5052 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5053 we search backward and place the instructions before the first
5054 parameter is loaded. Do this for everyone for consistency and a
5055 presumption that we'll get better code elsewhere as well.
5057 It should always be the case that we can put these instructions
5058 anywhere in the basic block with performing PRE optimizations.
5062 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5063 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5066 /* Since different machines initialize their parameter registers
5067 in different orders, assume nothing. Collect the set of all
5068 parameter registers. */
5069 insn = find_first_parameter_load (insn, bb->head);
5071 /* If we found all the parameter loads, then we want to insert
5072 before the first parameter load.
5074 If we did not find all the parameter loads, then we might have
5075 stopped on the head of the block, which could be a CODE_LABEL.
5076 If we inserted before the CODE_LABEL, then we would be putting
5077 the insn in the wrong basic block. In that case, put the insn
5078 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5079 while (GET_CODE (insn) == CODE_LABEL
5080 || NOTE_INSN_BASIC_BLOCK_P (insn))
5081 insn = NEXT_INSN (insn);
5083 new_insn = emit_insn_before (pat, insn);
5086 new_insn = emit_insn_after (pat, insn);
5092 add_label_notes (PATTERN (pat), new_insn);
5093 note_stores (PATTERN (pat), record_set_info, pat);
5097 pat = NEXT_INSN (pat);
5100 gcse_create_count++;
5104 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5105 bb->index, INSN_UID (new_insn));
5106 fprintf (gcse_file, "copying expression %d to reg %d\n",
5107 expr->bitmap_index, regno);
5111 /* Insert partially redundant expressions on edges in the CFG to make
5112 the expressions fully redundant. */
5115 pre_edge_insert (edge_list, index_map)
5116 struct edge_list *edge_list;
5117 struct expr **index_map;
5119 int e, i, j, num_edges, set_size, did_insert = 0;
5122 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5123 if it reaches any of the deleted expressions. */
5125 set_size = pre_insert_map[0]->size;
5126 num_edges = NUM_EDGES (edge_list);
5127 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5128 sbitmap_vector_zero (inserted, num_edges);
5130 for (e = 0; e < num_edges; e++)
5133 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5135 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5137 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5139 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5140 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5142 struct expr *expr = index_map[j];
5145 /* Now look at each deleted occurrence of this expression. */
5146 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5148 if (! occr->deleted_p)
5151 /* Insert this expression on this edge if if it would
5152 reach the deleted occurrence in BB. */
5153 if (!TEST_BIT (inserted[e], j))
5156 edge eg = INDEX_EDGE (edge_list, e);
5158 /* We can't insert anything on an abnormal and
5159 critical edge, so we insert the insn at the end of
5160 the previous block. There are several alternatives
5161 detailed in Morgans book P277 (sec 10.5) for
5162 handling this situation. This one is easiest for
5165 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5166 insert_insn_end_bb (index_map[j], bb, 0);
5169 insn = process_insert_insn (index_map[j]);
5170 insert_insn_on_edge (insn, eg);
5175 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5177 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5178 fprintf (gcse_file, "copy expression %d\n",
5179 expr->bitmap_index);
5182 update_ld_motion_stores (expr);
5183 SET_BIT (inserted[e], j);
5185 gcse_create_count++;
5192 sbitmap_vector_free (inserted);
5196 /* Copy the result of INSN to REG. INDX is the expression number. */
5199 pre_insert_copy_insn (expr, insn)
5203 rtx reg = expr->reaching_reg;
5204 int regno = REGNO (reg);
5205 int indx = expr->bitmap_index;
5206 rtx set = single_set (insn);
5212 new_insn = emit_insn_after (gen_move_insn (reg, SET_DEST (set)), insn);
5214 /* Keep register set table up to date. */
5215 record_one_set (regno, new_insn);
5217 gcse_create_count++;
5221 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5222 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5223 INSN_UID (insn), regno);
5224 update_ld_motion_stores (expr);
5227 /* Copy available expressions that reach the redundant expression
5228 to `reaching_reg'. */
5231 pre_insert_copies ()
5238 /* For each available expression in the table, copy the result to
5239 `reaching_reg' if the expression reaches a deleted one.
5241 ??? The current algorithm is rather brute force.
5242 Need to do some profiling. */
5244 for (i = 0; i < expr_hash_table.size; i++)
5245 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5247 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5248 we don't want to insert a copy here because the expression may not
5249 really be redundant. So only insert an insn if the expression was
5250 deleted. This test also avoids further processing if the
5251 expression wasn't deleted anywhere. */
5252 if (expr->reaching_reg == NULL)
5255 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5257 if (! occr->deleted_p)
5260 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5262 rtx insn = avail->insn;
5264 /* No need to handle this one if handled already. */
5265 if (avail->copied_p)
5268 /* Don't handle this one if it's a redundant one. */
5269 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5272 /* Or if the expression doesn't reach the deleted one. */
5273 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5275 BLOCK_FOR_INSN (occr->insn)))
5278 /* Copy the result of avail to reaching_reg. */
5279 pre_insert_copy_insn (expr, insn);
5280 avail->copied_p = 1;
5286 /* Emit move from SRC to DEST noting the equivalence with expression computed
5289 gcse_emit_move_after (src, dest, insn)
5290 rtx src, dest, insn;
5293 rtx set = single_set (insn), set2;
5297 /* This should never fail since we're creating a reg->reg copy
5298 we've verified to be valid. */
5300 new = emit_insn_after (gen_move_insn (dest, src), insn);
5302 /* Note the equivalence for local CSE pass. */
5303 set2 = single_set (new);
5304 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5306 if ((note = find_reg_equal_equiv_note (insn)))
5307 eqv = XEXP (note, 0);
5309 eqv = SET_SRC (set);
5311 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
5316 /* Delete redundant computations.
5317 Deletion is done by changing the insn to copy the `reaching_reg' of
5318 the expression into the result of the SET. It is left to later passes
5319 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5321 Returns nonzero if a change is made. */
5332 for (i = 0; i < expr_hash_table.size; i++)
5333 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5335 int indx = expr->bitmap_index;
5337 /* We only need to search antic_occr since we require
5340 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5342 rtx insn = occr->insn;
5344 basic_block bb = BLOCK_FOR_INSN (insn);
5346 if (TEST_BIT (pre_delete_map[bb->index], indx))
5348 set = single_set (insn);
5352 /* Create a pseudo-reg to store the result of reaching
5353 expressions into. Get the mode for the new pseudo from
5354 the mode of the original destination pseudo. */
5355 if (expr->reaching_reg == NULL)
5357 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5359 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5361 occr->deleted_p = 1;
5362 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5369 "PRE: redundant insn %d (expression %d) in ",
5370 INSN_UID (insn), indx);
5371 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5372 bb->index, REGNO (expr->reaching_reg));
5381 /* Perform GCSE optimizations using PRE.
5382 This is called by one_pre_gcse_pass after all the dataflow analysis
5385 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5386 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5387 Compiler Design and Implementation.
5389 ??? A new pseudo reg is created to hold the reaching expression. The nice
5390 thing about the classical approach is that it would try to use an existing
5391 reg. If the register can't be adequately optimized [i.e. we introduce
5392 reload problems], one could add a pass here to propagate the new register
5395 ??? We don't handle single sets in PARALLELs because we're [currently] not
5396 able to copy the rest of the parallel when we insert copies to create full
5397 redundancies from partial redundancies. However, there's no reason why we
5398 can't handle PARALLELs in the cases where there are no partial
5405 int did_insert, changed;
5406 struct expr **index_map;
5409 /* Compute a mapping from expression number (`bitmap_index') to
5410 hash table entry. */
5412 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5413 for (i = 0; i < expr_hash_table.size; i++)
5414 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5415 index_map[expr->bitmap_index] = expr;
5417 /* Reset bitmap used to track which insns are redundant. */
5418 pre_redundant_insns = sbitmap_alloc (max_cuid);
5419 sbitmap_zero (pre_redundant_insns);
5421 /* Delete the redundant insns first so that
5422 - we know what register to use for the new insns and for the other
5423 ones with reaching expressions
5424 - we know which insns are redundant when we go to create copies */
5426 changed = pre_delete ();
5428 did_insert = pre_edge_insert (edge_list, index_map);
5430 /* In other places with reaching expressions, copy the expression to the
5431 specially allocated pseudo-reg that reaches the redundant expr. */
5432 pre_insert_copies ();
5435 commit_edge_insertions ();
5440 sbitmap_free (pre_redundant_insns);
5444 /* Top level routine to perform one PRE GCSE pass.
5446 Return nonzero if a change was made. */
5449 one_pre_gcse_pass (pass)
5454 gcse_subst_count = 0;
5455 gcse_create_count = 0;
5457 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5458 add_noreturn_fake_exit_edges ();
5460 compute_ld_motion_mems ();
5462 compute_hash_table (&expr_hash_table);
5463 trim_ld_motion_mems ();
5465 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5467 if (expr_hash_table.n_elems > 0)
5469 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5470 compute_pre_data ();
5471 changed |= pre_gcse ();
5472 free_edge_list (edge_list);
5477 remove_fake_edges ();
5478 free_hash_table (&expr_hash_table);
5482 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5483 current_function_name, pass, bytes_used);
5484 fprintf (gcse_file, "%d substs, %d insns created\n",
5485 gcse_subst_count, gcse_create_count);
5491 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5492 If notes are added to an insn which references a CODE_LABEL, the
5493 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5494 because the following loop optimization pass requires them. */
5496 /* ??? This is very similar to the loop.c add_label_notes function. We
5497 could probably share code here. */
5499 /* ??? If there was a jump optimization pass after gcse and before loop,
5500 then we would not need to do this here, because jump would add the
5501 necessary REG_LABEL notes. */
5504 add_label_notes (x, insn)
5508 enum rtx_code code = GET_CODE (x);
5512 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5514 /* This code used to ignore labels that referred to dispatch tables to
5515 avoid flow generating (slighly) worse code.
5517 We no longer ignore such label references (see LABEL_REF handling in
5518 mark_jump_label for additional information). */
5520 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5522 if (LABEL_P (XEXP (x, 0)))
5523 LABEL_NUSES (XEXP (x, 0))++;
5527 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5530 add_label_notes (XEXP (x, i), insn);
5531 else if (fmt[i] == 'E')
5532 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5533 add_label_notes (XVECEXP (x, i, j), insn);
5537 /* Compute transparent outgoing information for each block.
5539 An expression is transparent to an edge unless it is killed by
5540 the edge itself. This can only happen with abnormal control flow,
5541 when the edge is traversed through a call. This happens with
5542 non-local labels and exceptions.
5544 This would not be necessary if we split the edge. While this is
5545 normally impossible for abnormal critical edges, with some effort
5546 it should be possible with exception handling, since we still have
5547 control over which handler should be invoked. But due to increased
5548 EH table sizes, this may not be worthwhile. */
5551 compute_transpout ()
5557 sbitmap_vector_ones (transpout, last_basic_block);
5561 /* Note that flow inserted a nop a the end of basic blocks that
5562 end in call instructions for reasons other than abnormal
5564 if (GET_CODE (bb->end) != CALL_INSN)
5567 for (i = 0; i < expr_hash_table.size; i++)
5568 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5569 if (GET_CODE (expr->expr) == MEM)
5571 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5572 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5575 /* ??? Optimally, we would use interprocedural alias
5576 analysis to determine if this mem is actually killed
5578 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5583 /* Removal of useless null pointer checks */
5585 /* Called via note_stores. X is set by SETTER. If X is a register we must
5586 invalidate nonnull_local and set nonnull_killed. DATA is really a
5587 `null_pointer_info *'.
5589 We ignore hard registers. */
5592 invalidate_nonnull_info (x, setter, data)
5594 rtx setter ATTRIBUTE_UNUSED;
5598 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5600 while (GET_CODE (x) == SUBREG)
5603 /* Ignore anything that is not a register or is a hard register. */
5604 if (GET_CODE (x) != REG
5605 || REGNO (x) < npi->min_reg
5606 || REGNO (x) >= npi->max_reg)
5609 regno = REGNO (x) - npi->min_reg;
5611 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5612 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5615 /* Do null-pointer check elimination for the registers indicated in
5616 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5617 they are not our responsibility to free. */
5620 delete_null_pointer_checks_1 (block_reg, nonnull_avin,
5622 unsigned int *block_reg;
5623 sbitmap *nonnull_avin;
5624 sbitmap *nonnull_avout;
5625 struct null_pointer_info *npi;
5627 basic_block bb, current_block;
5628 sbitmap *nonnull_local = npi->nonnull_local;
5629 sbitmap *nonnull_killed = npi->nonnull_killed;
5630 int something_changed = 0;
5632 /* Compute local properties, nonnull and killed. A register will have
5633 the nonnull property if at the end of the current block its value is
5634 known to be nonnull. The killed property indicates that somewhere in
5635 the block any information we had about the register is killed.
5637 Note that a register can have both properties in a single block. That
5638 indicates that it's killed, then later in the block a new value is
5640 sbitmap_vector_zero (nonnull_local, last_basic_block);
5641 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5643 FOR_EACH_BB (current_block)
5645 rtx insn, stop_insn;
5647 /* Set the current block for invalidate_nonnull_info. */
5648 npi->current_block = current_block;
5650 /* Scan each insn in the basic block looking for memory references and
5652 stop_insn = NEXT_INSN (current_block->end);
5653 for (insn = current_block->head;
5655 insn = NEXT_INSN (insn))
5660 /* Ignore anything that is not a normal insn. */
5661 if (! INSN_P (insn))
5664 /* Basically ignore anything that is not a simple SET. We do have
5665 to make sure to invalidate nonnull_local and set nonnull_killed
5666 for such insns though. */
5667 set = single_set (insn);
5670 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5674 /* See if we've got a usable memory load. We handle it first
5675 in case it uses its address register as a dest (which kills
5676 the nonnull property). */
5677 if (GET_CODE (SET_SRC (set)) == MEM
5678 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5679 && REGNO (reg) >= npi->min_reg
5680 && REGNO (reg) < npi->max_reg)
5681 SET_BIT (nonnull_local[current_block->index],
5682 REGNO (reg) - npi->min_reg);
5684 /* Now invalidate stuff clobbered by this insn. */
5685 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5687 /* And handle stores, we do these last since any sets in INSN can
5688 not kill the nonnull property if it is derived from a MEM
5689 appearing in a SET_DEST. */
5690 if (GET_CODE (SET_DEST (set)) == MEM
5691 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5692 && REGNO (reg) >= npi->min_reg
5693 && REGNO (reg) < npi->max_reg)
5694 SET_BIT (nonnull_local[current_block->index],
5695 REGNO (reg) - npi->min_reg);
5699 /* Now compute global properties based on the local properties. This
5700 is a classic global availability algorithm. */
5701 compute_available (nonnull_local, nonnull_killed,
5702 nonnull_avout, nonnull_avin);
5704 /* Now look at each bb and see if it ends with a compare of a value
5708 rtx last_insn = bb->end;
5709 rtx condition, earliest;
5710 int compare_and_branch;
5712 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5713 since BLOCK_REG[BB] is zero if this block did not end with a
5714 comparison against zero, this condition works. */
5715 if (block_reg[bb->index] < npi->min_reg
5716 || block_reg[bb->index] >= npi->max_reg)
5719 /* LAST_INSN is a conditional jump. Get its condition. */
5720 condition = get_condition (last_insn, &earliest);
5722 /* If we can't determine the condition then skip. */
5726 /* Is the register known to have a nonzero value? */
5727 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
5730 /* Try to compute whether the compare/branch at the loop end is one or
5731 two instructions. */
5732 if (earliest == last_insn)
5733 compare_and_branch = 1;
5734 else if (earliest == prev_nonnote_insn (last_insn))
5735 compare_and_branch = 2;
5739 /* We know the register in this comparison is nonnull at exit from
5740 this block. We can optimize this comparison. */
5741 if (GET_CODE (condition) == NE)
5745 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
5747 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5748 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5749 emit_barrier_after (new_jump);
5752 something_changed = 1;
5753 delete_insn (last_insn);
5754 if (compare_and_branch == 2)
5755 delete_insn (earliest);
5756 purge_dead_edges (bb);
5758 /* Don't check this block again. (Note that BLOCK_END is
5759 invalid here; we deleted the last instruction in the
5761 block_reg[bb->index] = 0;
5764 return something_changed;
5767 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5770 This is conceptually similar to global constant/copy propagation and
5771 classic global CSE (it even uses the same dataflow equations as cprop).
5773 If a register is used as memory address with the form (mem (reg)), then we
5774 know that REG can not be zero at that point in the program. Any instruction
5775 which sets REG "kills" this property.
5777 So, if every path leading to a conditional branch has an available memory
5778 reference of that form, then we know the register can not have the value
5779 zero at the conditional branch.
5781 So we merely need to compute the local properties and propagate that data
5782 around the cfg, then optimize where possible.
5784 We run this pass two times. Once before CSE, then again after CSE. This
5785 has proven to be the most profitable approach. It is rare for new
5786 optimization opportunities of this nature to appear after the first CSE
5789 This could probably be integrated with global cprop with a little work. */
5792 delete_null_pointer_checks (f)
5793 rtx f ATTRIBUTE_UNUSED;
5795 sbitmap *nonnull_avin, *nonnull_avout;
5796 unsigned int *block_reg;
5801 struct null_pointer_info npi;
5802 int something_changed = 0;
5804 /* If we have only a single block, then there's nothing to do. */
5805 if (n_basic_blocks <= 1)
5808 /* Trying to perform global optimizations on flow graphs which have
5809 a high connectivity will take a long time and is unlikely to be
5810 particularly useful.
5812 In normal circumstances a cfg should have about twice as many edges
5813 as blocks. But we do not want to punish small functions which have
5814 a couple switch statements. So we require a relatively large number
5815 of basic blocks and the ratio of edges to blocks to be high. */
5816 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
5819 /* We need four bitmaps, each with a bit for each register in each
5821 max_reg = max_reg_num ();
5822 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
5824 /* Allocate bitmaps to hold local and global properties. */
5825 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5826 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5827 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5828 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5830 /* Go through the basic blocks, seeing whether or not each block
5831 ends with a conditional branch whose condition is a comparison
5832 against zero. Record the register compared in BLOCK_REG. */
5833 block_reg = (unsigned int *) xcalloc (last_basic_block, sizeof (int));
5836 rtx last_insn = bb->end;
5837 rtx condition, earliest, reg;
5839 /* We only want conditional branches. */
5840 if (GET_CODE (last_insn) != JUMP_INSN
5841 || !any_condjump_p (last_insn)
5842 || !onlyjump_p (last_insn))
5845 /* LAST_INSN is a conditional jump. Get its condition. */
5846 condition = get_condition (last_insn, &earliest);
5848 /* If we were unable to get the condition, or it is not an equality
5849 comparison against zero then there's nothing we can do. */
5851 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
5852 || GET_CODE (XEXP (condition, 1)) != CONST_INT
5853 || (XEXP (condition, 1)
5854 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
5857 /* We must be checking a register against zero. */
5858 reg = XEXP (condition, 0);
5859 if (GET_CODE (reg) != REG)
5862 block_reg[bb->index] = REGNO (reg);
5865 /* Go through the algorithm for each block of registers. */
5866 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
5869 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
5870 something_changed |= delete_null_pointer_checks_1 (block_reg,
5876 /* Free the table of registers compared at the end of every block. */
5880 sbitmap_vector_free (npi.nonnull_local);
5881 sbitmap_vector_free (npi.nonnull_killed);
5882 sbitmap_vector_free (nonnull_avin);
5883 sbitmap_vector_free (nonnull_avout);
5885 return something_changed;
5888 /* Code Hoisting variables and subroutines. */
5890 /* Very busy expressions. */
5891 static sbitmap *hoist_vbein;
5892 static sbitmap *hoist_vbeout;
5894 /* Hoistable expressions. */
5895 static sbitmap *hoist_exprs;
5897 /* Dominator bitmaps. */
5898 dominance_info dominators;
5900 /* ??? We could compute post dominators and run this algorithm in
5901 reverse to perform tail merging, doing so would probably be
5902 more effective than the tail merging code in jump.c.
5904 It's unclear if tail merging could be run in parallel with
5905 code hoisting. It would be nice. */
5907 /* Allocate vars used for code hoisting analysis. */
5910 alloc_code_hoist_mem (n_blocks, n_exprs)
5911 int n_blocks, n_exprs;
5913 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5914 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5915 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5917 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
5918 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
5919 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
5920 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
5923 /* Free vars used for code hoisting analysis. */
5926 free_code_hoist_mem ()
5928 sbitmap_vector_free (antloc);
5929 sbitmap_vector_free (transp);
5930 sbitmap_vector_free (comp);
5932 sbitmap_vector_free (hoist_vbein);
5933 sbitmap_vector_free (hoist_vbeout);
5934 sbitmap_vector_free (hoist_exprs);
5935 sbitmap_vector_free (transpout);
5937 free_dominance_info (dominators);
5940 /* Compute the very busy expressions at entry/exit from each block.
5942 An expression is very busy if all paths from a given point
5943 compute the expression. */
5946 compute_code_hoist_vbeinout ()
5948 int changed, passes;
5951 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
5952 sbitmap_vector_zero (hoist_vbein, last_basic_block);
5961 /* We scan the blocks in the reverse order to speed up
5963 FOR_EACH_BB_REVERSE (bb)
5965 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
5966 hoist_vbeout[bb->index], transp[bb->index]);
5967 if (bb->next_bb != EXIT_BLOCK_PTR)
5968 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
5975 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
5978 /* Top level routine to do the dataflow analysis needed by code hoisting. */
5981 compute_code_hoist_data ()
5983 compute_local_properties (transp, comp, antloc, &expr_hash_table);
5984 compute_transpout ();
5985 compute_code_hoist_vbeinout ();
5986 dominators = calculate_dominance_info (CDI_DOMINATORS);
5988 fprintf (gcse_file, "\n");
5991 /* Determine if the expression identified by EXPR_INDEX would
5992 reach BB unimpared if it was placed at the end of EXPR_BB.
5994 It's unclear exactly what Muchnick meant by "unimpared". It seems
5995 to me that the expression must either be computed or transparent in
5996 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5997 would allow the expression to be hoisted out of loops, even if
5998 the expression wasn't a loop invariant.
6000 Contrast this to reachability for PRE where an expression is
6001 considered reachable if *any* path reaches instead of *all*
6005 hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
6006 basic_block expr_bb;
6012 int visited_allocated_locally = 0;
6015 if (visited == NULL)
6017 visited_allocated_locally = 1;
6018 visited = xcalloc (last_basic_block, 1);
6021 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
6023 basic_block pred_bb = pred->src;
6025 if (pred->src == ENTRY_BLOCK_PTR)
6027 else if (pred_bb == expr_bb)
6029 else if (visited[pred_bb->index])
6032 /* Does this predecessor generate this expression? */
6033 else if (TEST_BIT (comp[pred_bb->index], expr_index))
6035 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
6041 visited[pred_bb->index] = 1;
6042 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6047 if (visited_allocated_locally)
6050 return (pred == NULL);
6053 /* Actually perform code hoisting. */
6058 basic_block bb, dominated;
6060 unsigned int domby_len;
6062 struct expr **index_map;
6065 sbitmap_vector_zero (hoist_exprs, last_basic_block);
6067 /* Compute a mapping from expression number (`bitmap_index') to
6068 hash table entry. */
6070 index_map = (struct expr **) xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6071 for (i = 0; i < expr_hash_table.size; i++)
6072 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
6073 index_map[expr->bitmap_index] = expr;
6075 /* Walk over each basic block looking for potentially hoistable
6076 expressions, nothing gets hoisted from the entry block. */
6080 int insn_inserted_p;
6082 domby_len = get_dominated_by (dominators, bb, &domby);
6083 /* Examine each expression that is very busy at the exit of this
6084 block. These are the potentially hoistable expressions. */
6085 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
6089 if (TEST_BIT (hoist_vbeout[bb->index], i)
6090 && TEST_BIT (transpout[bb->index], i))
6092 /* We've found a potentially hoistable expression, now
6093 we look at every block BB dominates to see if it
6094 computes the expression. */
6095 for (j = 0; j < domby_len; j++)
6097 dominated = domby[j];
6098 /* Ignore self dominance. */
6099 if (bb == dominated)
6101 /* We've found a dominated block, now see if it computes
6102 the busy expression and whether or not moving that
6103 expression to the "beginning" of that block is safe. */
6104 if (!TEST_BIT (antloc[dominated->index], i))
6107 /* Note if the expression would reach the dominated block
6108 unimpared if it was placed at the end of BB.
6110 Keep track of how many times this expression is hoistable
6111 from a dominated block into BB. */
6112 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6116 /* If we found more than one hoistable occurrence of this
6117 expression, then note it in the bitmap of expressions to
6118 hoist. It makes no sense to hoist things which are computed
6119 in only one BB, and doing so tends to pessimize register
6120 allocation. One could increase this value to try harder
6121 to avoid any possible code expansion due to register
6122 allocation issues; however experiments have shown that
6123 the vast majority of hoistable expressions are only movable
6124 from two successors, so raising this threshhold is likely
6125 to nullify any benefit we get from code hoisting. */
6128 SET_BIT (hoist_exprs[bb->index], i);
6133 /* If we found nothing to hoist, then quit now. */
6140 /* Loop over all the hoistable expressions. */
6141 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6143 /* We want to insert the expression into BB only once, so
6144 note when we've inserted it. */
6145 insn_inserted_p = 0;
6147 /* These tests should be the same as the tests above. */
6148 if (TEST_BIT (hoist_vbeout[bb->index], i))
6150 /* We've found a potentially hoistable expression, now
6151 we look at every block BB dominates to see if it
6152 computes the expression. */
6153 for (j = 0; j < domby_len; j++)
6155 dominated = domby[j];
6156 /* Ignore self dominance. */
6157 if (bb == dominated)
6160 /* We've found a dominated block, now see if it computes
6161 the busy expression and whether or not moving that
6162 expression to the "beginning" of that block is safe. */
6163 if (!TEST_BIT (antloc[dominated->index], i))
6166 /* The expression is computed in the dominated block and
6167 it would be safe to compute it at the start of the
6168 dominated block. Now we have to determine if the
6169 expression would reach the dominated block if it was
6170 placed at the end of BB. */
6171 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6173 struct expr *expr = index_map[i];
6174 struct occr *occr = expr->antic_occr;
6178 /* Find the right occurrence of this expression. */
6179 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6182 /* Should never happen. */
6188 set = single_set (insn);
6192 /* Create a pseudo-reg to store the result of reaching
6193 expressions into. Get the mode for the new pseudo
6194 from the mode of the original destination pseudo. */
6195 if (expr->reaching_reg == NULL)
6197 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6199 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6201 occr->deleted_p = 1;
6202 if (!insn_inserted_p)
6204 insert_insn_end_bb (index_map[i], bb, 0);
6205 insn_inserted_p = 1;
6217 /* Top level routine to perform one code hoisting (aka unification) pass
6219 Return nonzero if a change was made. */
6222 one_code_hoisting_pass ()
6226 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6227 compute_hash_table (&expr_hash_table);
6229 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6231 if (expr_hash_table.n_elems > 0)
6233 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6234 compute_code_hoist_data ();
6236 free_code_hoist_mem ();
6239 free_hash_table (&expr_hash_table);
6244 /* Here we provide the things required to do store motion towards
6245 the exit. In order for this to be effective, gcse also needed to
6246 be taught how to move a load when it is kill only by a store to itself.
6251 void foo(float scale)
6253 for (i=0; i<10; i++)
6257 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6258 the load out since its live around the loop, and stored at the bottom
6261 The 'Load Motion' referred to and implemented in this file is
6262 an enhancement to gcse which when using edge based lcm, recognizes
6263 this situation and allows gcse to move the load out of the loop.
6265 Once gcse has hoisted the load, store motion can then push this
6266 load towards the exit, and we end up with no loads or stores of 'i'
6269 /* This will search the ldst list for a matching expression. If it
6270 doesn't find one, we create one and initialize it. */
6272 static struct ls_expr *
6276 struct ls_expr * ptr;
6278 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6279 if (expr_equiv_p (ptr->pattern, x))
6284 ptr = (struct ls_expr *) xmalloc (sizeof (struct ls_expr));
6286 ptr->next = pre_ldst_mems;
6289 ptr->loads = NULL_RTX;
6290 ptr->stores = NULL_RTX;
6291 ptr->reaching_reg = NULL_RTX;
6294 ptr->hash_index = 0;
6295 pre_ldst_mems = ptr;
6301 /* Free up an individual ldst entry. */
6304 free_ldst_entry (ptr)
6305 struct ls_expr * ptr;
6307 free_INSN_LIST_list (& ptr->loads);
6308 free_INSN_LIST_list (& ptr->stores);
6313 /* Free up all memory associated with the ldst list. */
6318 while (pre_ldst_mems)
6320 struct ls_expr * tmp = pre_ldst_mems;
6322 pre_ldst_mems = pre_ldst_mems->next;
6324 free_ldst_entry (tmp);
6327 pre_ldst_mems = NULL;
6330 /* Dump debugging info about the ldst list. */
6333 print_ldst_list (file)
6336 struct ls_expr * ptr;
6338 fprintf (file, "LDST list: \n");
6340 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6342 fprintf (file, " Pattern (%3d): ", ptr->index);
6344 print_rtl (file, ptr->pattern);
6346 fprintf (file, "\n Loads : ");
6349 print_rtl (file, ptr->loads);
6351 fprintf (file, "(nil)");
6353 fprintf (file, "\n Stores : ");
6356 print_rtl (file, ptr->stores);
6358 fprintf (file, "(nil)");
6360 fprintf (file, "\n\n");
6363 fprintf (file, "\n");
6366 /* Returns 1 if X is in the list of ldst only expressions. */
6368 static struct ls_expr *
6369 find_rtx_in_ldst (x)
6372 struct ls_expr * ptr;
6374 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6375 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6381 /* Assign each element of the list of mems a monotonically increasing value. */
6386 struct ls_expr * ptr;
6389 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6395 /* Return first item in the list. */
6397 static inline struct ls_expr *
6400 return pre_ldst_mems;
6403 /* Return the next item in ther list after the specified one. */
6405 static inline struct ls_expr *
6407 struct ls_expr * ptr;
6412 /* Load Motion for loads which only kill themselves. */
6414 /* Return true if x is a simple MEM operation, with no registers or
6415 side effects. These are the types of loads we consider for the
6416 ld_motion list, otherwise we let the usual aliasing take care of it. */
6422 if (GET_CODE (x) != MEM)
6425 if (MEM_VOLATILE_P (x))
6428 if (GET_MODE (x) == BLKmode)
6431 if (!rtx_varies_p (XEXP (x, 0), 0))
6437 /* Make sure there isn't a buried reference in this pattern anywhere.
6438 If there is, invalidate the entry for it since we're not capable
6439 of fixing it up just yet.. We have to be sure we know about ALL
6440 loads since the aliasing code will allow all entries in the
6441 ld_motion list to not-alias itself. If we miss a load, we will get
6442 the wrong value since gcse might common it and we won't know to
6446 invalidate_any_buried_refs (x)
6451 struct ls_expr * ptr;
6453 /* Invalidate it in the list. */
6454 if (GET_CODE (x) == MEM && simple_mem (x))
6456 ptr = ldst_entry (x);
6460 /* Recursively process the insn. */
6461 fmt = GET_RTX_FORMAT (GET_CODE (x));
6463 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6466 invalidate_any_buried_refs (XEXP (x, i));
6467 else if (fmt[i] == 'E')
6468 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6469 invalidate_any_buried_refs (XVECEXP (x, i, j));
6473 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6474 being defined as MEM loads and stores to symbols, with no
6475 side effects and no registers in the expression. If there are any
6476 uses/defs which don't match this criteria, it is invalidated and
6477 trimmed out later. */
6480 compute_ld_motion_mems ()
6482 struct ls_expr * ptr;
6486 pre_ldst_mems = NULL;
6490 for (insn = bb->head;
6491 insn && insn != NEXT_INSN (bb->end);
6492 insn = NEXT_INSN (insn))
6494 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6496 if (GET_CODE (PATTERN (insn)) == SET)
6498 rtx src = SET_SRC (PATTERN (insn));
6499 rtx dest = SET_DEST (PATTERN (insn));
6501 /* Check for a simple LOAD... */
6502 if (GET_CODE (src) == MEM && simple_mem (src))
6504 ptr = ldst_entry (src);
6505 if (GET_CODE (dest) == REG)
6506 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6512 /* Make sure there isn't a buried load somewhere. */
6513 invalidate_any_buried_refs (src);
6516 /* Check for stores. Don't worry about aliased ones, they
6517 will block any movement we might do later. We only care
6518 about this exact pattern since those are the only
6519 circumstance that we will ignore the aliasing info. */
6520 if (GET_CODE (dest) == MEM && simple_mem (dest))
6522 ptr = ldst_entry (dest);
6524 if (GET_CODE (src) != MEM
6525 && GET_CODE (src) != ASM_OPERANDS)
6526 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6532 invalidate_any_buried_refs (PATTERN (insn));
6538 /* Remove any references that have been either invalidated or are not in the
6539 expression list for pre gcse. */
6542 trim_ld_motion_mems ()
6544 struct ls_expr * last = NULL;
6545 struct ls_expr * ptr = first_ls_expr ();
6549 int del = ptr->invalid;
6550 struct expr * expr = NULL;
6552 /* Delete if entry has been made invalid. */
6558 /* Delete if we cannot find this mem in the expression list. */
6559 for (i = 0; i < expr_hash_table.size && del; i++)
6561 for (expr = expr_hash_table.table[i];
6563 expr = expr->next_same_hash)
6564 if (expr_equiv_p (expr->expr, ptr->pattern))
6576 last->next = ptr->next;
6577 free_ldst_entry (ptr);
6582 pre_ldst_mems = pre_ldst_mems->next;
6583 free_ldst_entry (ptr);
6584 ptr = pre_ldst_mems;
6589 /* Set the expression field if we are keeping it. */
6596 /* Show the world what we've found. */
6597 if (gcse_file && pre_ldst_mems != NULL)
6598 print_ldst_list (gcse_file);
6601 /* This routine will take an expression which we are replacing with
6602 a reaching register, and update any stores that are needed if
6603 that expression is in the ld_motion list. Stores are updated by
6604 copying their SRC to the reaching register, and then storeing
6605 the reaching register into the store location. These keeps the
6606 correct value in the reaching register for the loads. */
6609 update_ld_motion_stores (expr)
6612 struct ls_expr * mem_ptr;
6614 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6616 /* We can try to find just the REACHED stores, but is shouldn't
6617 matter to set the reaching reg everywhere... some might be
6618 dead and should be eliminated later. */
6620 /* We replace SET mem = expr with
6622 SET mem = reg , where reg is the
6623 reaching reg used in the load. */
6624 rtx list = mem_ptr->stores;
6626 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6628 rtx insn = XEXP (list, 0);
6629 rtx pat = PATTERN (insn);
6630 rtx src = SET_SRC (pat);
6631 rtx reg = expr->reaching_reg;
6634 /* If we've already copied it, continue. */
6635 if (expr->reaching_reg == src)
6640 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6641 print_rtl (gcse_file, expr->reaching_reg);
6642 fprintf (gcse_file, ":\n ");
6643 print_inline_rtx (gcse_file, insn, 8);
6644 fprintf (gcse_file, "\n");
6647 copy = gen_move_insn ( reg, SET_SRC (pat));
6648 new = emit_insn_before (copy, insn);
6649 record_one_set (REGNO (reg), new);
6650 SET_SRC (pat) = reg;
6652 /* un-recognize this pattern since it's probably different now. */
6653 INSN_CODE (insn) = -1;
6654 gcse_create_count++;
6659 /* Store motion code. */
6661 /* This is used to communicate the target bitvector we want to use in the
6662 reg_set_info routine when called via the note_stores mechanism. */
6663 static sbitmap * regvec;
6665 /* Used in computing the reverse edge graph bit vectors. */
6666 static sbitmap * st_antloc;
6668 /* Global holding the number of store expressions we are dealing with. */
6669 static int num_stores;
6671 /* Checks to set if we need to mark a register set. Called from note_stores. */
6674 reg_set_info (dest, setter, data)
6675 rtx dest, setter ATTRIBUTE_UNUSED;
6676 void * data ATTRIBUTE_UNUSED;
6678 if (GET_CODE (dest) == SUBREG)
6679 dest = SUBREG_REG (dest);
6681 if (GET_CODE (dest) == REG)
6682 SET_BIT (*regvec, REGNO (dest));
6685 /* Return nonzero if the register operands of expression X are killed
6686 anywhere in basic block BB. */
6689 store_ops_ok (x, bb)
6697 /* Repeat is used to turn tail-recursion into iteration. */
6703 code = GET_CODE (x);
6707 /* If a reg has changed after us in this
6708 block, the operand has been killed. */
6709 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
6737 i = GET_RTX_LENGTH (code) - 1;
6738 fmt = GET_RTX_FORMAT (code);
6744 rtx tem = XEXP (x, i);
6746 /* If we are about to do the last recursive call
6747 needed at this level, change it into iteration.
6748 This function is called enough to be worth it. */
6755 if (! store_ops_ok (tem, bb))
6758 else if (fmt[i] == 'E')
6762 for (j = 0; j < XVECLEN (x, i); j++)
6764 if (! store_ops_ok (XVECEXP (x, i, j), bb))
6773 /* Determine whether insn is MEM store pattern that we will consider moving. */
6776 find_moveable_store (insn)
6779 struct ls_expr * ptr;
6780 rtx dest = PATTERN (insn);
6782 if (GET_CODE (dest) != SET
6783 || GET_CODE (SET_SRC (dest)) == ASM_OPERANDS)
6786 dest = SET_DEST (dest);
6788 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6789 || GET_MODE (dest) == BLKmode)
6792 if (GET_CODE (XEXP (dest, 0)) != SYMBOL_REF)
6795 if (rtx_varies_p (XEXP (dest, 0), 0))
6798 ptr = ldst_entry (dest);
6799 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6802 /* Perform store motion. Much like gcse, except we move expressions the
6803 other way by looking at the flowgraph in reverse. */
6806 compute_store_table ()
6813 max_gcse_regno = max_reg_num ();
6815 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (last_basic_block,
6817 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
6820 /* Find all the stores we care about. */
6823 regvec = & (reg_set_in_block[bb->index]);
6824 for (insn = bb->end;
6825 insn && insn != PREV_INSN (bb->end);
6826 insn = PREV_INSN (insn))
6828 /* Ignore anything that is not a normal insn. */
6829 if (! INSN_P (insn))
6832 if (GET_CODE (insn) == CALL_INSN)
6834 bool clobbers_all = false;
6835 #ifdef NON_SAVING_SETJMP
6836 if (NON_SAVING_SETJMP
6837 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
6838 clobbers_all = true;
6841 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6843 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
6844 SET_BIT (reg_set_in_block[bb->index], regno);
6847 pat = PATTERN (insn);
6848 note_stores (pat, reg_set_info, NULL);
6850 /* Now that we've marked regs, look for stores. */
6851 if (GET_CODE (pat) == SET)
6852 find_moveable_store (insn);
6856 ret = enumerate_ldsts ();
6860 fprintf (gcse_file, "Store Motion Expressions.\n");
6861 print_ldst_list (gcse_file);
6867 /* Check to see if the load X is aliased with STORE_PATTERN. */
6870 load_kills_store (x, store_pattern)
6871 rtx x, store_pattern;
6873 if (true_dependence (x, GET_MODE (x), store_pattern, rtx_addr_varies_p))
6878 /* Go through the entire insn X, looking for any loads which might alias
6879 STORE_PATTERN. Return 1 if found. */
6882 find_loads (x, store_pattern)
6883 rtx x, store_pattern;
6892 if (GET_CODE (x) == SET)
6895 if (GET_CODE (x) == MEM)
6897 if (load_kills_store (x, store_pattern))
6901 /* Recursively process the insn. */
6902 fmt = GET_RTX_FORMAT (GET_CODE (x));
6904 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
6907 ret |= find_loads (XEXP (x, i), store_pattern);
6908 else if (fmt[i] == 'E')
6909 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6910 ret |= find_loads (XVECEXP (x, i, j), store_pattern);
6915 /* Check if INSN kills the store pattern X (is aliased with it).
6916 Return 1 if it it does. */
6919 store_killed_in_insn (x, insn)
6922 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
6925 if (GET_CODE (insn) == CALL_INSN)
6927 /* A normal or pure call might read from pattern,
6928 but a const call will not. */
6929 return ! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn);
6932 if (GET_CODE (PATTERN (insn)) == SET)
6934 rtx pat = PATTERN (insn);
6935 /* Check for memory stores to aliased objects. */
6936 if (GET_CODE (SET_DEST (pat)) == MEM && !expr_equiv_p (SET_DEST (pat), x))
6937 /* pretend its a load and check for aliasing. */
6938 if (find_loads (SET_DEST (pat), x))
6940 return find_loads (SET_SRC (pat), x);
6943 return find_loads (PATTERN (insn), x);
6946 /* Returns 1 if the expression X is loaded or clobbered on or after INSN
6947 within basic block BB. */
6950 store_killed_after (x, insn, bb)
6959 /* Check if the register operands of the store are OK in this block.
6960 Note that if registers are changed ANYWHERE in the block, we'll
6961 decide we can't move it, regardless of whether it changed above
6962 or below the store. This could be improved by checking the register
6963 operands while looking for aliasing in each insn. */
6964 if (!store_ops_ok (XEXP (x, 0), bb))
6967 for ( ; insn && insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
6968 if (store_killed_in_insn (x, insn))
6974 /* Returns 1 if the expression X is loaded or clobbered on or before INSN
6975 within basic block BB. */
6977 store_killed_before (x, insn, bb)
6981 rtx first = bb->head;
6984 return store_killed_in_insn (x, insn);
6986 /* Check if the register operands of the store are OK in this block.
6987 Note that if registers are changed ANYWHERE in the block, we'll
6988 decide we can't move it, regardless of whether it changed above
6989 or below the store. This could be improved by checking the register
6990 operands while looking for aliasing in each insn. */
6991 if (!store_ops_ok (XEXP (x, 0), bb))
6994 for ( ; insn && insn != PREV_INSN (first); insn = PREV_INSN (insn))
6995 if (store_killed_in_insn (x, insn))
7001 #define ANTIC_STORE_LIST(x) ((x)->loads)
7002 #define AVAIL_STORE_LIST(x) ((x)->stores)
7004 /* Given the table of available store insns at the end of blocks,
7005 determine which ones are not killed by aliasing, and generate
7006 the appropriate vectors for gen and killed. */
7008 build_store_vectors ()
7012 struct ls_expr * ptr;
7014 /* Build the gen_vector. This is any store in the table which is not killed
7015 by aliasing later in its block. */
7016 ae_gen = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7017 sbitmap_vector_zero (ae_gen, last_basic_block);
7019 st_antloc = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7020 sbitmap_vector_zero (st_antloc, last_basic_block);
7022 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7024 /* Put all the stores into either the antic list, or the avail list,
7026 rtx store_list = ptr->stores;
7027 ptr->stores = NULL_RTX;
7029 for (st = store_list; st != NULL; st = XEXP (st, 1))
7031 insn = XEXP (st, 0);
7032 bb = BLOCK_FOR_INSN (insn);
7034 if (!store_killed_after (ptr->pattern, insn, bb))
7036 /* If we've already seen an available expression in this block,
7037 we can delete the one we saw already (It occurs earlier in
7038 the block), and replace it with this one). We'll copy the
7039 old SRC expression to an unused register in case there
7040 are any side effects. */
7041 if (TEST_BIT (ae_gen[bb->index], ptr->index))
7043 /* Find previous store. */
7045 for (st = AVAIL_STORE_LIST (ptr); st ; st = XEXP (st, 1))
7046 if (BLOCK_FOR_INSN (XEXP (st, 0)) == bb)
7050 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7052 fprintf (gcse_file, "Removing redundant store:\n");
7053 replace_store_insn (r, XEXP (st, 0), bb);
7054 XEXP (st, 0) = insn;
7058 SET_BIT (ae_gen[bb->index], ptr->index);
7059 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
7060 AVAIL_STORE_LIST (ptr));
7063 if (!store_killed_before (ptr->pattern, insn, bb))
7065 SET_BIT (st_antloc[BLOCK_NUM (insn)], ptr->index);
7066 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (insn,
7067 ANTIC_STORE_LIST (ptr));
7071 /* Free the original list of store insns. */
7072 free_INSN_LIST_list (&store_list);
7075 ae_kill = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7076 sbitmap_vector_zero (ae_kill, last_basic_block);
7078 transp = (sbitmap *) sbitmap_vector_alloc (last_basic_block, num_stores);
7079 sbitmap_vector_zero (transp, last_basic_block);
7081 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7084 if (store_killed_after (ptr->pattern, b->head, b))
7086 /* The anticipatable expression is not killed if it's gen'd. */
7088 We leave this check out for now. If we have a code sequence
7089 in a block which looks like:
7093 We should flag this as having an ANTIC expression, NOT
7094 transparent, NOT killed, and AVAIL.
7095 Unfortunately, since we haven't re-written all loads to
7096 use the reaching reg, we'll end up doing an incorrect
7097 Load in the middle here if we push the store down. It happens in
7098 gcc.c-torture/execute/960311-1.c with -O3
7099 If we always kill it in this case, we'll sometimes do
7100 unnecessary work, but it shouldn't actually hurt anything.
7101 if (!TEST_BIT (ae_gen[b], ptr->index)). */
7102 SET_BIT (ae_kill[b->index], ptr->index);
7105 SET_BIT (transp[b->index], ptr->index);
7108 /* Any block with no exits calls some non-returning function, so
7109 we better mark the store killed here, or we might not store to
7110 it at all. If we knew it was abort, we wouldn't have to store,
7111 but we don't know that for sure. */
7114 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7115 print_ldst_list (gcse_file);
7116 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7117 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7118 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7119 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7123 /* Insert an instruction at the beginning of a basic block, and update
7124 the BLOCK_HEAD if needed. */
7127 insert_insn_start_bb (insn, bb)
7131 /* Insert at start of successor block. */
7132 rtx prev = PREV_INSN (bb->head);
7133 rtx before = bb->head;
7136 if (GET_CODE (before) != CODE_LABEL
7137 && (GET_CODE (before) != NOTE
7138 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7141 if (prev == bb->end)
7143 before = NEXT_INSN (before);
7146 insn = emit_insn_after (insn, prev);
7150 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7152 print_inline_rtx (gcse_file, insn, 6);
7153 fprintf (gcse_file, "\n");
7157 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7158 the memory reference, and E is the edge to insert it on. Returns nonzero
7159 if an edge insertion was performed. */
7162 insert_store (expr, e)
7163 struct ls_expr * expr;
7170 /* We did all the deleted before this insert, so if we didn't delete a
7171 store, then we haven't set the reaching reg yet either. */
7172 if (expr->reaching_reg == NULL_RTX)
7175 reg = expr->reaching_reg;
7176 insn = gen_move_insn (expr->pattern, reg);
7178 /* If we are inserting this expression on ALL predecessor edges of a BB,
7179 insert it at the start of the BB, and reset the insert bits on the other
7180 edges so we don't try to insert it on the other edges. */
7182 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7184 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7185 if (index == EDGE_INDEX_NO_EDGE)
7187 if (! TEST_BIT (pre_insert_map[index], expr->index))
7191 /* If tmp is NULL, we found an insertion on every edge, blank the
7192 insertion vector for these edges, and insert at the start of the BB. */
7193 if (!tmp && bb != EXIT_BLOCK_PTR)
7195 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7197 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7198 RESET_BIT (pre_insert_map[index], expr->index);
7200 insert_insn_start_bb (insn, bb);
7204 /* We can't insert on this edge, so we'll insert at the head of the
7205 successors block. See Morgan, sec 10.5. */
7206 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7208 insert_insn_start_bb (insn, bb);
7212 insert_insn_on_edge (insn, e);
7216 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7217 e->src->index, e->dest->index);
7218 print_inline_rtx (gcse_file, insn, 6);
7219 fprintf (gcse_file, "\n");
7225 /* This routine will replace a store with a SET to a specified register. */
7228 replace_store_insn (reg, del, bb)
7234 insn = gen_move_insn (reg, SET_SRC (PATTERN (del)));
7235 insn = emit_insn_after (insn, del);
7240 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7241 print_inline_rtx (gcse_file, del, 6);
7242 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7243 print_inline_rtx (gcse_file, insn, 6);
7244 fprintf (gcse_file, "\n");
7251 /* Delete a store, but copy the value that would have been stored into
7252 the reaching_reg for later storing. */
7255 delete_store (expr, bb)
7256 struct ls_expr * expr;
7261 if (expr->reaching_reg == NULL_RTX)
7262 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7265 /* If there is more than 1 store, the earlier ones will be dead,
7266 but it doesn't hurt to replace them here. */
7267 reg = expr->reaching_reg;
7269 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7272 if (BLOCK_FOR_INSN (del) == bb)
7274 /* We know there is only one since we deleted redundant
7275 ones during the available computation. */
7276 replace_store_insn (reg, del, bb);
7282 /* Free memory used by store motion. */
7285 free_store_memory ()
7290 sbitmap_vector_free (ae_gen);
7292 sbitmap_vector_free (ae_kill);
7294 sbitmap_vector_free (transp);
7296 sbitmap_vector_free (st_antloc);
7298 sbitmap_vector_free (pre_insert_map);
7300 sbitmap_vector_free (pre_delete_map);
7301 if (reg_set_in_block)
7302 sbitmap_vector_free (reg_set_in_block);
7304 ae_gen = ae_kill = transp = st_antloc = NULL;
7305 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7308 /* Perform store motion. Much like gcse, except we move expressions the
7309 other way by looking at the flowgraph in reverse. */
7316 struct ls_expr * ptr;
7317 int update_flow = 0;
7321 fprintf (gcse_file, "before store motion\n");
7322 print_rtl (gcse_file, get_insns ());
7326 init_alias_analysis ();
7328 /* Find all the stores that are live to the end of their block. */
7329 num_stores = compute_store_table ();
7330 if (num_stores == 0)
7332 sbitmap_vector_free (reg_set_in_block);
7333 end_alias_analysis ();
7337 /* Now compute whats actually available to move. */
7338 add_noreturn_fake_exit_edges ();
7339 build_store_vectors ();
7341 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7342 st_antloc, ae_kill, &pre_insert_map,
7345 /* Now we want to insert the new stores which are going to be needed. */
7346 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7349 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7350 delete_store (ptr, bb);
7352 for (x = 0; x < NUM_EDGES (edge_list); x++)
7353 if (TEST_BIT (pre_insert_map[x], ptr->index))
7354 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7358 commit_edge_insertions ();
7360 free_store_memory ();
7361 free_edge_list (edge_list);
7362 remove_fake_edges ();
7363 end_alias_analysis ();
7367 /* Entry point for jump bypassing optimization pass. */
7375 /* We do not construct an accurate cfg in functions which call
7376 setjmp, so just punt to be safe. */
7377 if (current_function_calls_setjmp)
7380 /* For calling dump_foo fns from gdb. */
7381 debug_stderr = stderr;
7384 /* Identify the basic block information for this function, including
7385 successors and predecessors. */
7386 max_gcse_regno = max_reg_num ();
7389 dump_flow_info (file);
7391 /* Return if there's nothing to do. */
7392 if (n_basic_blocks <= 1)
7395 /* Trying to perform global optimizations on flow graphs which have
7396 a high connectivity will take a long time and is unlikely to be
7397 particularly useful.
7399 In normal circumstances a cfg should have about twice as many edges
7400 as blocks. But we do not want to punish small functions which have
7401 a couple switch statements. So we require a relatively large number
7402 of basic blocks and the ratio of edges to blocks to be high. */
7403 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
7405 if (warn_disabled_optimization)
7406 warning ("BYPASS disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
7407 n_basic_blocks, n_edges / n_basic_blocks);
7411 /* If allocating memory for the cprop bitmap would take up too much
7412 storage it's better just to disable the optimization. */
7414 * SBITMAP_SET_SIZE (max_gcse_regno)
7415 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
7417 if (warn_disabled_optimization)
7418 warning ("GCSE disabled: %d basic blocks and %d registers",
7419 n_basic_blocks, max_gcse_regno);
7424 /* See what modes support reg/reg copy operations. */
7425 if (! can_copy_init_p)
7427 compute_can_copy ();
7428 can_copy_init_p = 1;
7431 gcc_obstack_init (&gcse_obstack);
7434 /* We need alias. */
7435 init_alias_analysis ();
7437 /* Record where pseudo-registers are set. This data is kept accurate
7438 during each pass. ??? We could also record hard-reg information here
7439 [since it's unchanging], however it is currently done during hash table
7442 It may be tempting to compute MEM set information here too, but MEM sets
7443 will be subject to code motion one day and thus we need to compute
7444 information about memory sets when we build the hash tables. */
7446 alloc_reg_set_mem (max_gcse_regno);
7447 compute_sets (get_insns ());
7449 max_gcse_regno = max_reg_num ();
7450 alloc_gcse_mem (get_insns ());
7451 changed = one_cprop_pass (1, 1, 1);
7456 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7457 current_function_name, n_basic_blocks);
7458 fprintf (file, "%d bytes\n\n", bytes_used);
7461 obstack_free (&gcse_obstack, NULL);
7462 free_reg_set_mem ();
7464 /* We are finished with alias. */
7465 end_alias_analysis ();
7466 allocate_reg_info (max_reg_num (), FALSE, FALSE);
7471 #include "gt-gcse.h"