1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 - reordering of memory allocation and freeing to be more space efficient
24 - do rough calc of how many regs are needed in each block, and a rough
25 calc of how many regs are available in each class and use that to
26 throttle back the code in cases where RTX_COST is minimal.
27 - dead store elimination
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
43 Global Optimization by Suppression of Partial Redundancies
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
115 Rice University Ph.D. thesis, Apr. 1996
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
125 Advanced Compiler Design and Implementation
127 Morgan Kaufmann, 1997
129 People wishing to speed up the code here should read:
130 Elimination Algorithms for Data Flow Analysis
131 B.G. Ryder, M.C. Paull
132 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
134 How to Analyze Large Programs Efficiently and Informatively
135 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
136 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
138 People wishing to do something different can find various possibilities
139 in the above papers and elsewhere.
147 #include "hard-reg-set.h"
150 #include "insn-config.h"
152 #include "basic-block.h"
157 #define obstack_chunk_alloc gmalloc
158 #define obstack_chunk_free free
160 /* Maximum number of passes to perform. */
163 /* Propagate flow information through back edges and thus enable PRE's
164 moving loop invariant calculations out of loops.
166 Originally this tended to create worse overall code, but several
167 improvements during the development of PRE seem to have made following
168 back edges generally a win.
170 Note much of the loop invariant code motion done here would normally
171 be done by loop.c, which has more heuristics for when to move invariants
172 out of loops. At some point we might need to move some of those
173 heuristics into gcse.c. */
174 #define FOLLOW_BACK_EDGES 1
176 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
177 are a superset of those done by GCSE.
179 We perform the following steps:
181 1) Compute basic block information.
183 2) Compute table of places where registers are set.
185 3) Perform copy/constant propagation.
187 4) Perform global cse.
189 5) Perform another pass of copy/constant propagation.
191 Two passes of copy/constant propagation are done because the first one
192 enables more GCSE and the second one helps to clean up the copies that
193 GCSE creates. This is needed more for PRE than for Classic because Classic
194 GCSE will try to use an existing register containing the common
195 subexpression rather than create a new one. This is harder to do for PRE
196 because of the code motion (which Classic GCSE doesn't do).
198 Expressions we are interested in GCSE-ing are of the form
199 (set (pseudo-reg) (expression)).
200 Function want_to_gcse_p says what these are.
202 PRE handles moving invariant expressions out of loops (by treating them as
203 partially redundant).
205 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
206 assignment) based GVN (global value numbering). L. T. Simpson's paper
207 (Rice University) on value numbering is a useful reference for this.
209 **********************
211 We used to support multiple passes but there are diminishing returns in
212 doing so. The first pass usually makes 90% of the changes that are doable.
213 A second pass can make a few more changes made possible by the first pass.
214 Experiments show any further passes don't make enough changes to justify
217 A study of spec92 using an unlimited number of passes:
218 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
219 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
220 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
222 It was found doing copy propagation between each pass enables further
225 PRE is quite expensive in complicated functions because the DFA can take
226 awhile to converge. Hence we only perform one pass. Macro MAX_PASSES can
227 be modified if one wants to experiment.
229 **********************
231 The steps for PRE are:
233 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
235 2) Perform the data flow analysis for PRE.
237 3) Delete the redundant instructions
239 4) Insert the required copies [if any] that make the partially
240 redundant instructions fully redundant.
242 5) For other reaching expressions, insert an instruction to copy the value
243 to a newly created pseudo that will reach the redundant instruction.
245 The deletion is done first so that when we do insertions we
246 know which pseudo reg to use.
248 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
249 argue it is not. The number of iterations for the algorithm to converge
250 is typically 2-4 so I don't view it as that expensive (relatively speaking).
252 PRE GCSE depends heavily on the second CSE pass to clean up the copies
253 we create. To make an expression reach the place where it's redundant,
254 the result of the expression is copied to a new register, and the redundant
255 expression is deleted by replacing it with this new register. Classic GCSE
256 doesn't have this problem as much as it computes the reaching defs of
257 each register in each block and thus can try to use an existing register.
259 **********************
261 A fair bit of simplicity is created by creating small functions for simple
262 tasks, even when the function is only called in one place. This may
263 measurably slow things down [or may not] by creating more function call
264 overhead than is necessary. The source is laid out so that it's trivial
265 to make the affected functions inline so that one can measure what speed
266 up, if any, can be achieved, and maybe later when things settle things can
269 Help stamp out big monolithic functions! */
271 /* GCSE global vars. */
274 static FILE *gcse_file;
276 /* Note whether or not we should run jump optimization after gcse. We
277 want to do this for two cases.
279 * If we changed any jumps via cprop.
281 * If we added any labels via edge splitting. */
283 static int run_jump_opt_after_gcse;
285 /* Element I is a list of I's predecessors/successors. */
286 static int_list_ptr *s_preds;
287 static int_list_ptr *s_succs;
289 /* Element I is the number of predecessors/successors of basic block I. */
290 static int *num_preds;
291 static int *num_succs;
293 /* Bitmaps are normally not included in debugging dumps.
294 However it's useful to be able to print them from GDB.
295 We could create special functions for this, but it's simpler to
296 just allow passing stderr to the dump_foo fns. Since stderr can
297 be a macro, we store a copy here. */
298 static FILE *debug_stderr;
300 /* An obstack for our working variables. */
301 static struct obstack gcse_obstack;
303 /* Non-zero for each mode that supports (set (reg) (reg)).
304 This is trivially true for integer and floating point values.
305 It may or may not be true for condition codes. */
306 static char can_copy_p[(int) NUM_MACHINE_MODES];
308 /* Non-zero if can_copy_p has been initialized. */
309 static int can_copy_init_p;
311 /* Hash table of expressions. */
315 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
317 /* Index in the available expression bitmaps. */
319 /* Next entry with the same hash. */
320 struct expr *next_same_hash;
321 /* List of anticipatable occurrences in basic blocks in the function.
322 An "anticipatable occurrence" is one that is the first occurrence in the
323 basic block, the operands are not modified in the basic block prior
324 to the occurrence and the output is not used between the start of
325 the block and the occurrence. */
326 struct occr *antic_occr;
327 /* List of available occurrence in basic blocks in the function.
328 An "available occurrence" is one that is the last occurrence in the
329 basic block and the operands are not modified by following statements in
330 the basic block [including this insn]. */
331 struct occr *avail_occr;
332 /* Non-null if the computation is PRE redundant.
333 The value is the newly created pseudo-reg to record a copy of the
334 expression in all the places that reach the redundant copy. */
338 /* Occurrence of an expression.
339 There is one per basic block. If a pattern appears more than once the
340 last appearance is used [or first for anticipatable expressions]. */
344 /* Next occurrence of this expression. */
346 /* The insn that computes the expression. */
348 /* Non-zero if this [anticipatable] occurrence has been deleted. */
350 /* Non-zero if this [available] occurrence has been copied to
352 /* ??? This is mutually exclusive with deleted_p, so they could share
357 /* Expression and copy propagation hash tables.
358 Each hash table is an array of buckets.
359 ??? It is known that if it were an array of entries, structure elements
360 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
361 not clear whether in the final analysis a sufficient amount of memory would
362 be saved as the size of the available expression bitmaps would be larger
363 [one could build a mapping table without holes afterwards though].
364 Someday I'll perform the computation and figure it out.
367 /* Total size of the expression hash table, in elements. */
368 static int expr_hash_table_size;
370 This is an array of `expr_hash_table_size' elements. */
371 static struct expr **expr_hash_table;
373 /* Total size of the copy propagation hash table, in elements. */
374 static int set_hash_table_size;
376 This is an array of `set_hash_table_size' elements. */
377 static struct expr **set_hash_table;
379 /* Mapping of uids to cuids.
380 Only real insns get cuids. */
381 static int *uid_cuid;
383 /* Highest UID in UID_CUID. */
386 /* Get the cuid of an insn. */
387 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
389 /* Number of cuids. */
392 /* Mapping of cuids to insns. */
393 static rtx *cuid_insn;
395 /* Get insn from cuid. */
396 #define CUID_INSN(CUID) (cuid_insn[CUID])
398 /* Maximum register number in function prior to doing gcse + 1.
399 Registers created during this pass have regno >= max_gcse_regno.
400 This is named with "gcse" to not collide with global of same name. */
401 static int max_gcse_regno;
403 /* Maximum number of cse-able expressions found. */
405 /* Maximum number of assignments for copy propagation found. */
408 /* Table of registers that are modified.
409 For each register, each element is a list of places where the pseudo-reg
412 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
413 requires knowledge of which blocks kill which regs [and thus could use
414 a bitmap instead of the lists `reg_set_table' uses].
416 `reg_set_table' and could be turned into an array of bitmaps
418 [however perhaps it may be useful to keep the data as is].
419 One advantage of recording things this way is that `reg_set_table' is
420 fairly sparse with respect to pseudo regs but for hard regs could be
421 fairly dense [relatively speaking].
422 And recording sets of pseudo-regs in lists speeds
423 up functions like compute_transp since in the case of pseudo-regs we only
424 need to iterate over the number of times a pseudo-reg is set, not over the
425 number of basic blocks [clearly there is a bit of a slow down in the cases
426 where a pseudo is set more than once in a block, however it is believed
427 that the net effect is to speed things up]. This isn't done for hard-regs
428 because recording call-clobbered hard-regs in `reg_set_table' at each
429 function call can consume a fair bit of memory, and iterating over hard-regs
430 stored this way in compute_transp will be more expensive. */
432 typedef struct reg_set {
433 /* The next setting of this register. */
434 struct reg_set *next;
435 /* The insn where it was set. */
438 static reg_set **reg_set_table;
439 /* Size of `reg_set_table'.
440 The table starts out at max_gcse_regno + slop, and is enlarged as
442 static int reg_set_table_size;
443 /* Amount to grow `reg_set_table' by when it's full. */
444 #define REG_SET_TABLE_SLOP 100
446 /* Bitmap containing one bit for each register in the program.
447 Used when performing GCSE to track which registers have been set since
448 the start of the basic block. */
449 static sbitmap reg_set_bitmap;
451 /* For each block, a bitmap of registers set in the block.
452 This is used by expr_killed_p and compute_transp.
453 It is computed during hash table computation and not by compute_sets
454 as it includes registers added since the last pass (or between cprop and
455 gcse) and it's currently not easy to realloc sbitmap vectors. */
456 static sbitmap *reg_set_in_block;
458 /* For each block, non-zero if memory is set in that block.
459 This is computed during hash table computation and is used by
460 expr_killed_p and compute_transp.
461 ??? Handling of memory is very simple, we don't make any attempt
462 to optimize things (later).
463 ??? This can be computed by compute_sets since the information
465 static char *mem_set_in_block;
467 /* Various variables for statistics gathering. */
469 /* Memory used in a pass.
470 This isn't intended to be absolutely precise. Its intent is only
471 to keep an eye on memory usage. */
472 static int bytes_used;
473 /* GCSE substitutions made. */
474 static int gcse_subst_count;
475 /* Number of copy instructions created. */
476 static int gcse_create_count;
477 /* Number of constants propagated. */
478 static int const_prop_count;
479 /* Number of copys propagated. */
480 static int copy_prop_count;
482 extern char *current_function_name;
483 extern int current_function_calls_setjmp;
485 /* These variables are used by classic GCSE.
486 Normally they'd be defined a bit later, but `rd_gen' needs to
487 be declared sooner. */
489 /* A bitmap of all ones for implementing the algorithm for available
490 expressions and reaching definitions. */
491 /* ??? Available expression bitmaps have a different size than reaching
492 definition bitmaps. This should be the larger of the two, however, it
493 is not currently used for reaching definitions. */
494 static sbitmap u_bitmap;
496 /* Each block has a bitmap of each type.
497 The length of each blocks bitmap is:
499 max_cuid - for reaching definitions
500 n_exprs - for available expressions
502 Thus we view the bitmaps as 2 dimensional arrays. i.e.
503 rd_kill[block_num][cuid_num]
504 ae_kill[block_num][expr_num]
507 /* For reaching defs */
508 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
510 /* for available exprs */
511 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
514 static void compute_can_copy PROTO ((void));
516 static char *gmalloc PROTO ((unsigned int));
517 static char *grealloc PROTO ((char *, unsigned int));
518 static char *gcse_alloc PROTO ((unsigned long));
519 static void alloc_gcse_mem PROTO ((rtx));
520 static void free_gcse_mem PROTO ((void));
521 static void alloc_reg_set_mem PROTO ((int));
522 static void free_reg_set_mem PROTO ((void));
523 static void record_one_set PROTO ((int, rtx));
524 static void record_set_info PROTO ((rtx, rtx));
525 static void compute_sets PROTO ((rtx));
527 static void hash_scan_insn PROTO ((rtx, int, int));
528 static void hash_scan_set PROTO ((rtx, rtx, int));
529 static void hash_scan_clobber PROTO ((rtx, rtx));
530 static void hash_scan_call PROTO ((rtx, rtx));
531 static int want_to_gcse_p PROTO ((rtx));
532 static int oprs_unchanged_p PROTO ((rtx, rtx, int));
533 static int oprs_anticipatable_p PROTO ((rtx, rtx));
534 static int oprs_available_p PROTO ((rtx, rtx));
535 static void insert_expr_in_table PROTO ((rtx, enum machine_mode,
537 static void insert_set_in_table PROTO ((rtx, rtx));
538 static unsigned int hash_expr PROTO ((rtx, enum machine_mode,
540 static unsigned int hash_expr_1 PROTO ((rtx, enum machine_mode, int *));
541 static unsigned int hash_set PROTO ((int, int));
542 static int expr_equiv_p PROTO ((rtx, rtx));
543 static void record_last_reg_set_info PROTO ((rtx, int));
544 static void record_last_mem_set_info PROTO ((rtx));
545 static void record_last_set_info PROTO ((rtx, rtx));
546 static void compute_hash_table PROTO ((int));
547 static void alloc_set_hash_table PROTO ((int));
548 static void free_set_hash_table PROTO ((void));
549 static void compute_set_hash_table PROTO ((void));
550 static void alloc_expr_hash_table PROTO ((int));
551 static void free_expr_hash_table PROTO ((void));
552 static void compute_expr_hash_table PROTO ((void));
553 static void dump_hash_table PROTO ((FILE *, const char *, struct expr **,
555 static struct expr *lookup_expr PROTO ((rtx));
556 static struct expr *lookup_set PROTO ((int, rtx));
557 static struct expr *next_set PROTO ((int, struct expr *));
558 static void reset_opr_set_tables PROTO ((void));
559 static int oprs_not_set_p PROTO ((rtx, rtx));
560 static void mark_call PROTO ((rtx));
561 static void mark_set PROTO ((rtx, rtx));
562 static void mark_clobber PROTO ((rtx, rtx));
563 static void mark_oprs_set PROTO ((rtx));
565 static void alloc_cprop_mem PROTO ((int, int));
566 static void free_cprop_mem PROTO ((void));
567 static void compute_transp PROTO ((rtx, int, sbitmap *, int));
568 static void compute_transpout PROTO ((void));
569 static void compute_local_properties PROTO ((sbitmap *, sbitmap *,
571 static void compute_cprop_avinout PROTO ((void));
572 static void compute_cprop_data PROTO ((void));
573 static void find_used_regs PROTO ((rtx));
574 static int try_replace_reg PROTO ((rtx, rtx, rtx));
575 static struct expr *find_avail_set PROTO ((int, rtx));
576 static int cprop_insn PROTO ((rtx, int));
577 static int cprop PROTO ((int));
578 static int one_cprop_pass PROTO ((int, int));
580 static void alloc_pre_mem PROTO ((int, int));
581 static void free_pre_mem PROTO ((void));
582 static void compute_pre_data PROTO ((void));
583 static int pre_expr_reaches_here_p PROTO ((int, struct expr *,
585 static void insert_insn_end_bb PROTO ((struct expr *, int, int));
586 static void pre_insert PROTO ((struct expr **));
587 static void pre_insert_copy_insn PROTO ((struct expr *, rtx));
588 static void pre_insert_copies PROTO ((void));
589 static int pre_delete PROTO ((void));
590 static int pre_gcse PROTO ((void));
591 static int one_pre_gcse_pass PROTO ((int));
593 static void add_label_notes PROTO ((rtx, rtx));
595 static void alloc_rd_mem PROTO ((int, int));
596 static void free_rd_mem PROTO ((void));
597 static void handle_rd_kill_set PROTO ((rtx, int, int));
598 static void compute_kill_rd PROTO ((void));
599 static void compute_rd PROTO ((void));
600 static void alloc_avail_expr_mem PROTO ((int, int));
601 static void free_avail_expr_mem PROTO ((void));
602 static void compute_ae_gen PROTO ((void));
603 static int expr_killed_p PROTO ((rtx, int));
604 static void compute_ae_kill PROTO ((void));
605 static void compute_available PROTO ((void));
606 static int expr_reaches_here_p PROTO ((struct occr *, struct expr *,
608 static rtx computing_insn PROTO ((struct expr *, rtx));
609 static int def_reaches_here_p PROTO ((rtx, rtx));
610 static int can_disregard_other_sets PROTO ((struct reg_set **, rtx, int));
611 static int handle_avail_expr PROTO ((rtx, struct expr *));
612 static int classic_gcse PROTO ((void));
613 static int one_classic_gcse_pass PROTO ((int));
616 /* Entry point for global common subexpression elimination.
617 F is the first instruction in the function. */
625 /* Bytes used at start of pass. */
626 int initial_bytes_used;
627 /* Maximum number of bytes used by a pass. */
629 /* Point to release obstack data from for each pass. */
630 char *gcse_obstack_bottom;
632 /* We do not construct an accurate cfg in functions which call
633 setjmp, so just punt to be safe. */
634 if (current_function_calls_setjmp)
637 /* Assume that we do not need to run jump optimizations after gcse. */
638 run_jump_opt_after_gcse = 0;
640 /* For calling dump_foo fns from gdb. */
641 debug_stderr = stderr;
644 /* Identify the basic block information for this function, including
645 successors and predecessors. */
646 max_gcse_regno = max_reg_num ();
647 find_basic_blocks (f, max_gcse_regno, file);
649 /* Return if there's nothing to do. */
650 if (n_basic_blocks <= 1)
652 /* Free storage allocated by find_basic_blocks. */
653 free_basic_block_vars (0);
657 /* See what modes support reg/reg copy operations. */
658 if (! can_copy_init_p)
664 gcc_obstack_init (&gcse_obstack);
666 /* Allocate and compute predecessors/successors. */
668 s_preds = (int_list_ptr *) alloca (n_basic_blocks * sizeof (int_list_ptr));
669 s_succs = (int_list_ptr *) alloca (n_basic_blocks * sizeof (int_list_ptr));
670 num_preds = (int *) alloca (n_basic_blocks * sizeof (int));
671 num_succs = (int *) alloca (n_basic_blocks * sizeof (int));
672 bytes_used = 4 * n_basic_blocks * sizeof (int_list_ptr);
673 compute_preds_succs (s_preds, s_succs, num_preds, num_succs);
676 dump_bb_data (file, s_preds, s_succs, 0);
678 /* Record where pseudo-registers are set.
679 This data is kept accurate during each pass.
680 ??? We could also record hard-reg information here
681 [since it's unchanging], however it is currently done during
682 hash table computation.
684 It may be tempting to compute MEM set information here too, but MEM
685 sets will be subject to code motion one day and thus we need to compute
686 information about memory sets when we build the hash tables. */
688 alloc_reg_set_mem (max_gcse_regno);
692 initial_bytes_used = bytes_used;
694 gcse_obstack_bottom = gcse_alloc (1);
696 while (changed && pass < MAX_PASSES)
700 fprintf (file, "GCSE pass %d\n\n", pass + 1);
702 /* Initialize bytes_used to the space for the pred/succ lists,
703 and the reg_set_table data. */
704 bytes_used = initial_bytes_used;
706 /* Each pass may create new registers, so recalculate each time. */
707 max_gcse_regno = max_reg_num ();
711 /* Don't allow constant propagation to modify jumps
713 changed = one_cprop_pass (pass + 1, 0);
716 changed |= one_classic_gcse_pass (pass + 1);
718 changed |= one_pre_gcse_pass (pass + 1);
720 if (max_pass_bytes < bytes_used)
721 max_pass_bytes = bytes_used;
727 fprintf (file, "\n");
730 obstack_free (&gcse_obstack, gcse_obstack_bottom);
734 /* Do one last pass of copy propagation, including cprop into
735 conditional jumps. */
737 max_gcse_regno = max_reg_num ();
739 /* This time, go ahead and allow cprop to alter jumps. */
740 one_cprop_pass (pass + 1, 1);
745 fprintf (file, "GCSE of %s: %d basic blocks, ",
746 current_function_name, n_basic_blocks);
747 fprintf (file, "%d pass%s, %d bytes\n\n",
748 pass, pass > 1 ? "es" : "", max_pass_bytes);
751 /* Free our obstack. */
752 obstack_free (&gcse_obstack, NULL_PTR);
753 /* Free reg_set_table. */
755 /* Free storage used to record predecessor/successor data. */
757 /* Free storage allocated by find_basic_blocks. */
758 free_basic_block_vars (0);
759 return run_jump_opt_after_gcse;
762 /* Misc. utilities. */
764 /* Compute which modes support reg/reg copy operations. */
770 #ifndef AVOID_CCMODE_COPIES
773 char *free_point = (char *) oballoc (1);
775 bzero (can_copy_p, NUM_MACHINE_MODES);
778 for (i = 0; i < NUM_MACHINE_MODES; i++)
780 switch (GET_MODE_CLASS (i))
783 #ifdef AVOID_CCMODE_COPIES
786 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
787 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
788 if (recog (PATTERN (insn), insn, NULL_PTR) >= 0)
799 /* Free the objects we just allocated. */
803 /* Cover function to xmalloc to record bytes allocated. */
810 return xmalloc (size);
813 /* Cover function to xrealloc.
814 We don't record the additional size since we don't know it.
815 It won't affect memory usage stats much anyway. */
822 return xrealloc (ptr, size);
825 /* Cover function to obstack_alloc.
826 We don't need to record the bytes allocated here since
827 obstack_chunk_alloc is set to gmalloc. */
833 return (char *) obstack_alloc (&gcse_obstack, size);
836 /* Allocate memory for the cuid mapping array,
837 and reg/memory set tracking tables.
839 This is called at the start of each pass. */
848 /* Find the largest UID and create a mapping from UIDs to CUIDs.
849 CUIDs are like UIDs except they increase monotonically, have no gaps,
850 and only apply to real insns. */
852 max_uid = get_max_uid ();
853 n = (max_uid + 1) * sizeof (int);
854 uid_cuid = (int *) gmalloc (n);
855 bzero ((char *) uid_cuid, n);
856 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
858 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
859 INSN_CUID (insn) = i++;
861 INSN_CUID (insn) = i;
864 /* Create a table mapping cuids to insns. */
867 n = (max_cuid + 1) * sizeof (rtx);
868 cuid_insn = (rtx *) gmalloc (n);
869 bzero ((char *) cuid_insn, n);
870 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
872 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
874 CUID_INSN (i) = insn;
879 /* Allocate vars to track sets of regs. */
881 reg_set_bitmap = (sbitmap) sbitmap_alloc (max_gcse_regno);
883 /* Allocate vars to track sets of regs, memory per block. */
885 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks,
887 mem_set_in_block = (char *) gmalloc (n_basic_blocks);
890 /* Free memory allocated by alloc_gcse_mem. */
898 free (reg_set_bitmap);
900 free (reg_set_in_block);
901 free (mem_set_in_block);
905 /* Compute the local properties of each recorded expression.
906 Local properties are those that are defined by the block, irrespective
909 An expression is transparent in a block if its operands are not modified
912 An expression is computed (locally available) in a block if it is computed
913 at least once and expression would contain the same value if the
914 computation was moved to the end of the block.
916 An expression is locally anticipatable in a block if it is computed at
917 least once and expression would contain the same value if the computation
918 was moved to the beginning of the block.
920 We call this routine for cprop, pre and code hoisting. They all
921 compute basically the same information and thus can easily share
924 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording
925 local properties. If NULL, then it is not necessary to compute
926 or record that particular property.
928 SETP controls which hash table to look at. If zero, this routine
929 looks at the expr hash table; if nonzero this routine looks at
930 the set hash table. */
933 compute_local_properties (transp, comp, antloc, setp)
939 int i, hash_table_size;
940 struct expr **hash_table;
942 /* Initialize any bitmaps that were passed in. */
944 sbitmap_vector_ones (transp, n_basic_blocks);
946 sbitmap_vector_zero (comp, n_basic_blocks);
948 sbitmap_vector_zero (antloc, n_basic_blocks);
950 /* We use the same code for cprop, pre and hoisting. For cprop
951 we care about the set hash table, for pre and hoisting we
952 care about the expr hash table. */
953 hash_table_size = setp ? set_hash_table_size : expr_hash_table_size;
954 hash_table = setp ? set_hash_table : expr_hash_table;
956 for (i = 0; i < hash_table_size; i++)
960 for (expr = hash_table[i]; expr != NULL; expr = expr->next_same_hash)
963 int indx = expr->bitmap_index;
965 /* The expression is transparent in this block if it is not killed.
966 We start by assuming all are transparent [none are killed], and
967 then reset the bits for those that are. */
970 compute_transp (expr->expr, indx, transp, setp);
972 /* The occurrences recorded in antic_occr are exactly those that
973 we want to set to non-zero in ANTLOC. */
977 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
979 int bb = BLOCK_NUM (occr->insn);
980 SET_BIT (antloc[bb], indx);
982 /* While we're scanning the table, this is a good place to
988 /* The occurrences recorded in avail_occr are exactly those that
989 we want to set to non-zero in COMP. */
993 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
995 int bb = BLOCK_NUM (occr->insn);
996 SET_BIT (comp[bb], indx);
998 /* While we're scanning the table, this is a good place to
1004 /* While we're scanning the table, this is a good place to
1006 expr->reaching_reg = 0;
1012 /* Register set information.
1014 `reg_set_table' records where each register is set or otherwise
1017 static struct obstack reg_set_obstack;
1020 alloc_reg_set_mem (n_regs)
1025 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1026 n = reg_set_table_size * sizeof (struct reg_set *);
1027 reg_set_table = (struct reg_set **) gmalloc (n);
1028 bzero ((char *) reg_set_table, n);
1030 gcc_obstack_init (®_set_obstack);
1036 free (reg_set_table);
1037 obstack_free (®_set_obstack, NULL_PTR);
1040 /* Record REGNO in the reg_set table. */
1043 record_one_set (regno, insn)
1047 /* allocate a new reg_set element and link it onto the list */
1048 struct reg_set *new_reg_info, *reg_info_ptr1, *reg_info_ptr2;
1050 /* If the table isn't big enough, enlarge it. */
1051 if (regno >= reg_set_table_size)
1053 int new_size = regno + REG_SET_TABLE_SLOP;
1054 reg_set_table = (struct reg_set **)
1055 grealloc ((char *) reg_set_table,
1056 new_size * sizeof (struct reg_set *));
1057 bzero ((char *) (reg_set_table + reg_set_table_size),
1058 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1059 reg_set_table_size = new_size;
1062 new_reg_info = (struct reg_set *) obstack_alloc (®_set_obstack,
1063 sizeof (struct reg_set));
1064 bytes_used += sizeof (struct reg_set);
1065 new_reg_info->insn = insn;
1066 new_reg_info->next = NULL;
1067 if (reg_set_table[regno] == NULL)
1068 reg_set_table[regno] = new_reg_info;
1071 reg_info_ptr1 = reg_info_ptr2 = reg_set_table[regno];
1072 /* ??? One could keep a "last" pointer to speed this up. */
1073 while (reg_info_ptr1 != NULL)
1075 reg_info_ptr2 = reg_info_ptr1;
1076 reg_info_ptr1 = reg_info_ptr1->next;
1078 reg_info_ptr2->next = new_reg_info;
1082 /* For communication between next two functions (via note_stores). */
1083 static rtx record_set_insn;
1085 /* Called from compute_sets via note_stores to handle one
1086 SET or CLOBBER in an insn. */
1089 record_set_info (dest, setter)
1090 rtx dest, setter ATTRIBUTE_UNUSED;
1092 if (GET_CODE (dest) == SUBREG)
1093 dest = SUBREG_REG (dest);
1095 if (GET_CODE (dest) == REG)
1097 if (REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1098 record_one_set (REGNO (dest), record_set_insn);
1102 /* Scan the function and record each set of each pseudo-register.
1104 This is called once, at the start of the gcse pass.
1105 See the comments for `reg_set_table' for further docs. */
1115 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1117 record_set_insn = insn;
1118 note_stores (PATTERN (insn), record_set_info);
1120 insn = NEXT_INSN (insn);
1124 /* Hash table support. */
1126 #define NEVER_SET -1
1128 /* For each register, the cuid of the first/last insn in the block to set it,
1129 or -1 if not set. */
1130 static int *reg_first_set;
1131 static int *reg_last_set;
1133 /* While computing "first/last set" info, this is the CUID of first/last insn
1134 to set memory or -1 if not set. `mem_last_set' is also used when
1135 performing GCSE to record whether memory has been set since the beginning
1137 Note that handling of memory is very simple, we don't make any attempt
1138 to optimize things (later). */
1139 static int mem_first_set;
1140 static int mem_last_set;
1142 /* Perform a quick check whether X, the source of a set, is something
1143 we want to consider for GCSE. */
1149 enum rtx_code code = GET_CODE (x);
1167 /* Return non-zero if the operands of expression X are unchanged from the
1168 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1169 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1172 oprs_unchanged_p (x, insn, avail_p)
1180 /* repeat is used to turn tail-recursion into iteration. */
1186 code = GET_CODE (x);
1191 return (reg_last_set[REGNO (x)] == NEVER_SET
1192 || reg_last_set[REGNO (x)] < INSN_CUID (insn));
1194 return (reg_first_set[REGNO (x)] == NEVER_SET
1195 || reg_first_set[REGNO (x)] >= INSN_CUID (insn));
1200 if (mem_last_set != NEVER_SET
1201 && mem_last_set >= INSN_CUID (insn))
1206 if (mem_first_set != NEVER_SET
1207 && mem_first_set < INSN_CUID (insn))
1234 i = GET_RTX_LENGTH (code) - 1;
1235 fmt = GET_RTX_FORMAT (code);
1240 rtx tem = XEXP (x, i);
1242 /* If we are about to do the last recursive call
1243 needed at this level, change it into iteration.
1244 This function is called enough to be worth it. */
1250 if (! oprs_unchanged_p (tem, insn, avail_p))
1253 else if (fmt[i] == 'E')
1256 for (j = 0; j < XVECLEN (x, i); j++)
1258 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1267 /* Return non-zero if the operands of expression X are unchanged from
1268 the start of INSN's basic block up to but not including INSN. */
1271 oprs_anticipatable_p (x, insn)
1274 return oprs_unchanged_p (x, insn, 0);
1277 /* Return non-zero if the operands of expression X are unchanged from
1278 INSN to the end of INSN's basic block. */
1281 oprs_available_p (x, insn)
1284 return oprs_unchanged_p (x, insn, 1);
1287 /* Hash expression X.
1288 MODE is only used if X is a CONST_INT.
1289 A boolean indicating if a volatile operand is found or if the expression
1290 contains something we don't want to insert in the table is stored in
1293 ??? One might want to merge this with canon_hash. Later. */
1296 hash_expr (x, mode, do_not_record_p, hash_table_size)
1298 enum machine_mode mode;
1299 int *do_not_record_p;
1300 int hash_table_size;
1304 *do_not_record_p = 0;
1306 hash = hash_expr_1 (x, mode, do_not_record_p);
1307 return hash % hash_table_size;
1310 /* Subroutine of hash_expr to do the actual work. */
1313 hash_expr_1 (x, mode, do_not_record_p)
1315 enum machine_mode mode;
1316 int *do_not_record_p;
1323 /* repeat is used to turn tail-recursion into iteration. */
1329 code = GET_CODE (x);
1334 register int regno = REGNO (x);
1335 hash += ((unsigned) REG << 7) + regno;
1341 unsigned HOST_WIDE_INT tem = INTVAL (x);
1342 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
1347 /* This is like the general case, except that it only counts
1348 the integers representing the constant. */
1349 hash += (unsigned) code + (unsigned) GET_MODE (x);
1350 if (GET_MODE (x) != VOIDmode)
1351 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1353 unsigned tem = XINT (x, i);
1357 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1358 + (unsigned) CONST_DOUBLE_HIGH (x));
1361 /* Assume there is only one rtx object for any given label. */
1363 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1364 differences and differences between each stage's debugging dumps. */
1365 hash += ((unsigned) LABEL_REF << 7) + CODE_LABEL_NUMBER (XEXP (x, 0));
1370 /* Don't hash on the symbol's address to avoid bootstrap differences.
1371 Different hash values may cause expressions to be recorded in
1372 different orders and thus different registers to be used in the
1373 final assembler. This also avoids differences in the dump files
1374 between various stages. */
1376 unsigned char *p = (unsigned char *) XSTR (x, 0);
1378 h += (h << 7) + *p++; /* ??? revisit */
1379 hash += ((unsigned) SYMBOL_REF << 7) + h;
1384 if (MEM_VOLATILE_P (x))
1386 *do_not_record_p = 1;
1389 hash += (unsigned) MEM;
1400 case UNSPEC_VOLATILE:
1401 *do_not_record_p = 1;
1405 if (MEM_VOLATILE_P (x))
1407 *do_not_record_p = 1;
1415 i = GET_RTX_LENGTH (code) - 1;
1416 hash += (unsigned) code + (unsigned) GET_MODE (x);
1417 fmt = GET_RTX_FORMAT (code);
1422 rtx tem = XEXP (x, i);
1424 /* If we are about to do the last recursive call
1425 needed at this level, change it into iteration.
1426 This function is called enough to be worth it. */
1432 hash += hash_expr_1 (tem, 0, do_not_record_p);
1433 if (*do_not_record_p)
1436 else if (fmt[i] == 'E')
1437 for (j = 0; j < XVECLEN (x, i); j++)
1439 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1440 if (*do_not_record_p)
1443 else if (fmt[i] == 's')
1445 register unsigned char *p = (unsigned char *) XSTR (x, i);
1450 else if (fmt[i] == 'i')
1452 register unsigned tem = XINT (x, i);
1462 /* Hash a set of register REGNO.
1464 Sets are hashed on the register that is set.
1465 This simplifies the PRE copy propagation code.
1467 ??? May need to make things more elaborate. Later, as necessary. */
1470 hash_set (regno, hash_table_size)
1472 int hash_table_size;
1477 return hash % hash_table_size;
1480 /* Return non-zero if exp1 is equivalent to exp2.
1481 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1488 register enum rtx_code code;
1493 if (x == 0 || y == 0)
1496 code = GET_CODE (x);
1497 if (code != GET_CODE (y))
1500 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1501 if (GET_MODE (x) != GET_MODE (y))
1511 return INTVAL (x) == INTVAL (y);
1514 return XEXP (x, 0) == XEXP (y, 0);
1517 return XSTR (x, 0) == XSTR (y, 0);
1520 return REGNO (x) == REGNO (y);
1522 /* For commutative operations, check both orders. */
1530 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1531 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1532 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1533 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1539 /* Compare the elements. If any pair of corresponding elements
1540 fail to match, return 0 for the whole thing. */
1542 fmt = GET_RTX_FORMAT (code);
1543 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1548 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1553 if (XVECLEN (x, i) != XVECLEN (y, i))
1555 for (j = 0; j < XVECLEN (x, i); j++)
1556 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1561 if (strcmp (XSTR (x, i), XSTR (y, i)))
1566 if (XINT (x, i) != XINT (y, i))
1571 if (XWINT (x, i) != XWINT (y, i))
1586 /* Insert expression X in INSN in the hash table.
1587 If it is already present, record it as the last occurrence in INSN's
1590 MODE is the mode of the value X is being stored into.
1591 It is only used if X is a CONST_INT.
1593 ANTIC_P is non-zero if X is an anticipatable expression.
1594 AVAIL_P is non-zero if X is an available expression. */
1597 insert_expr_in_table (x, mode, insn, antic_p, avail_p)
1599 enum machine_mode mode;
1601 int antic_p, avail_p;
1603 int found, do_not_record_p;
1605 struct expr *cur_expr, *last_expr = NULL;
1606 struct occr *antic_occr, *avail_occr;
1607 struct occr *last_occr = NULL;
1609 hash = hash_expr (x, mode, &do_not_record_p, expr_hash_table_size);
1611 /* Do not insert expression in table if it contains volatile operands,
1612 or if hash_expr determines the expression is something we don't want
1613 to or can't handle. */
1614 if (do_not_record_p)
1617 cur_expr = expr_hash_table[hash];
1620 while (cur_expr && ! (found = expr_equiv_p (cur_expr->expr, x)))
1622 /* If the expression isn't found, save a pointer to the end of
1624 last_expr = cur_expr;
1625 cur_expr = cur_expr->next_same_hash;
1630 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1631 bytes_used += sizeof (struct expr);
1632 if (expr_hash_table[hash] == NULL)
1634 /* This is the first pattern that hashed to this index. */
1635 expr_hash_table[hash] = cur_expr;
1639 /* Add EXPR to end of this hash chain. */
1640 last_expr->next_same_hash = cur_expr;
1642 /* Set the fields of the expr element. */
1644 cur_expr->bitmap_index = n_exprs++;
1645 cur_expr->next_same_hash = NULL;
1646 cur_expr->antic_occr = NULL;
1647 cur_expr->avail_occr = NULL;
1650 /* Now record the occurrence(s). */
1654 antic_occr = cur_expr->antic_occr;
1656 /* Search for another occurrence in the same basic block. */
1657 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1659 /* If an occurrence isn't found, save a pointer to the end of
1661 last_occr = antic_occr;
1662 antic_occr = antic_occr->next;
1667 /* Found another instance of the expression in the same basic block.
1668 Prefer the currently recorded one. We want the first one in the
1669 block and the block is scanned from start to end. */
1670 ; /* nothing to do */
1674 /* First occurrence of this expression in this basic block. */
1675 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
1676 bytes_used += sizeof (struct occr);
1677 /* First occurrence of this expression in any block? */
1678 if (cur_expr->antic_occr == NULL)
1679 cur_expr->antic_occr = antic_occr;
1681 last_occr->next = antic_occr;
1682 antic_occr->insn = insn;
1683 antic_occr->next = NULL;
1689 avail_occr = cur_expr->avail_occr;
1691 /* Search for another occurrence in the same basic block. */
1692 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1694 /* If an occurrence isn't found, save a pointer to the end of
1696 last_occr = avail_occr;
1697 avail_occr = avail_occr->next;
1702 /* Found another instance of the expression in the same basic block.
1703 Prefer this occurrence to the currently recorded one. We want
1704 the last one in the block and the block is scanned from start
1706 avail_occr->insn = insn;
1710 /* First occurrence of this expression in this basic block. */
1711 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
1712 bytes_used += sizeof (struct occr);
1713 /* First occurrence of this expression in any block? */
1714 if (cur_expr->avail_occr == NULL)
1715 cur_expr->avail_occr = avail_occr;
1717 last_occr->next = avail_occr;
1718 avail_occr->insn = insn;
1719 avail_occr->next = NULL;
1724 /* Insert pattern X in INSN in the hash table.
1725 X is a SET of a reg to either another reg or a constant.
1726 If it is already present, record it as the last occurrence in INSN's
1730 insert_set_in_table (x, insn)
1736 struct expr *cur_expr, *last_expr = NULL;
1737 struct occr *cur_occr, *last_occr = NULL;
1739 if (GET_CODE (x) != SET
1740 || GET_CODE (SET_DEST (x)) != REG)
1743 hash = hash_set (REGNO (SET_DEST (x)), set_hash_table_size);
1745 cur_expr = set_hash_table[hash];
1748 while (cur_expr && ! (found = expr_equiv_p (cur_expr->expr, x)))
1750 /* If the expression isn't found, save a pointer to the end of
1752 last_expr = cur_expr;
1753 cur_expr = cur_expr->next_same_hash;
1758 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1759 bytes_used += sizeof (struct expr);
1760 if (set_hash_table[hash] == NULL)
1762 /* This is the first pattern that hashed to this index. */
1763 set_hash_table[hash] = cur_expr;
1767 /* Add EXPR to end of this hash chain. */
1768 last_expr->next_same_hash = cur_expr;
1770 /* Set the fields of the expr element.
1771 We must copy X because it can be modified when copy propagation is
1772 performed on its operands. */
1773 /* ??? Should this go in a different obstack? */
1774 cur_expr->expr = copy_rtx (x);
1775 cur_expr->bitmap_index = n_sets++;
1776 cur_expr->next_same_hash = NULL;
1777 cur_expr->antic_occr = NULL;
1778 cur_expr->avail_occr = NULL;
1781 /* Now record the occurrence. */
1783 cur_occr = cur_expr->avail_occr;
1785 /* Search for another occurrence in the same basic block. */
1786 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
1788 /* If an occurrence isn't found, save a pointer to the end of
1790 last_occr = cur_occr;
1791 cur_occr = cur_occr->next;
1796 /* Found another instance of the expression in the same basic block.
1797 Prefer this occurrence to the currently recorded one. We want
1798 the last one in the block and the block is scanned from start
1800 cur_occr->insn = insn;
1804 /* First occurrence of this expression in this basic block. */
1805 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
1806 bytes_used += sizeof (struct occr);
1807 /* First occurrence of this expression in any block? */
1808 if (cur_expr->avail_occr == NULL)
1809 cur_expr->avail_occr = cur_occr;
1811 last_occr->next = cur_occr;
1812 cur_occr->insn = insn;
1813 cur_occr->next = NULL;
1817 /* Scan pattern PAT of INSN and add an entry to the hash table.
1818 If SET_P is non-zero, this is for the assignment hash table,
1819 otherwise it is for the expression hash table. */
1822 hash_scan_set (pat, insn, set_p)
1826 rtx src = SET_SRC (pat);
1827 rtx dest = SET_DEST (pat);
1829 if (GET_CODE (src) == CALL)
1830 hash_scan_call (src, insn);
1832 if (GET_CODE (dest) == REG)
1834 int regno = REGNO (dest);
1837 /* Only record sets of pseudo-regs in the hash table. */
1839 && regno >= FIRST_PSEUDO_REGISTER
1840 /* Don't GCSE something if we can't do a reg/reg copy. */
1841 && can_copy_p [GET_MODE (dest)]
1842 /* Is SET_SRC something we want to gcse? */
1843 && want_to_gcse_p (src))
1845 /* An expression is not anticipatable if its operands are
1846 modified before this insn. */
1847 int antic_p = ! optimize_size && oprs_anticipatable_p (src, insn);
1848 /* An expression is not available if its operands are
1849 subsequently modified, including this insn. */
1850 int avail_p = oprs_available_p (src, insn);
1851 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p);
1853 /* Record sets for constant/copy propagation. */
1855 && regno >= FIRST_PSEUDO_REGISTER
1856 && ((GET_CODE (src) == REG
1857 && REGNO (src) >= FIRST_PSEUDO_REGISTER
1858 && can_copy_p [GET_MODE (dest)])
1859 /* ??? CONST_INT:wip */
1860 || GET_CODE (src) == CONST_INT
1861 || GET_CODE (src) == CONST_DOUBLE)
1862 /* A copy is not available if its src or dest is subsequently
1863 modified. Here we want to search from INSN+1 on, but
1864 oprs_available_p searches from INSN on. */
1865 && (insn == BLOCK_END (BLOCK_NUM (insn))
1866 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
1867 && oprs_available_p (pat, tmp))))
1868 insert_set_in_table (pat, insn);
1873 hash_scan_clobber (x, insn)
1874 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
1876 /* Currently nothing to do. */
1880 hash_scan_call (x, insn)
1881 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
1883 /* Currently nothing to do. */
1886 /* Process INSN and add hash table entries as appropriate.
1888 Only available expressions that set a single pseudo-reg are recorded.
1890 Single sets in a PARALLEL could be handled, but it's an extra complication
1891 that isn't dealt with right now. The trick is handling the CLOBBERs that
1892 are also in the PARALLEL. Later.
1894 If SET_P is non-zero, this is for the assignment hash table,
1895 otherwise it is for the expression hash table.
1896 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
1897 not record any expressions. */
1900 hash_scan_insn (insn, set_p, in_libcall_block)
1903 int in_libcall_block;
1905 rtx pat = PATTERN (insn);
1907 /* Pick out the sets of INSN and for other forms of instructions record
1908 what's been modified. */
1910 if (GET_CODE (pat) == SET && ! in_libcall_block)
1911 hash_scan_set (pat, insn, set_p);
1912 else if (GET_CODE (pat) == PARALLEL)
1916 for (i = 0; i < XVECLEN (pat, 0); i++)
1918 rtx x = XVECEXP (pat, 0, i);
1920 if (GET_CODE (x) == SET)
1922 if (GET_CODE (SET_SRC (x)) == CALL)
1923 hash_scan_call (SET_SRC (x), insn);
1925 else if (GET_CODE (x) == CLOBBER)
1926 hash_scan_clobber (x, insn);
1927 else if (GET_CODE (x) == CALL)
1928 hash_scan_call (x, insn);
1931 else if (GET_CODE (pat) == CLOBBER)
1932 hash_scan_clobber (pat, insn);
1933 else if (GET_CODE (pat) == CALL)
1934 hash_scan_call (pat, insn);
1938 dump_hash_table (file, name, table, table_size, total_size)
1941 struct expr **table;
1942 int table_size, total_size;
1945 /* Flattened out table, so it's printed in proper order. */
1946 struct expr **flat_table = (struct expr **) alloca (total_size * sizeof (struct expr *));
1947 unsigned int *hash_val = (unsigned int *) alloca (total_size * sizeof (unsigned int));
1949 bzero ((char *) flat_table, total_size * sizeof (struct expr *));
1950 for (i = 0; i < table_size; i++)
1954 for (expr = table[i]; expr != NULL; expr = expr->next_same_hash)
1956 flat_table[expr->bitmap_index] = expr;
1957 hash_val[expr->bitmap_index] = i;
1961 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
1962 name, table_size, total_size);
1964 for (i = 0; i < total_size; i++)
1966 struct expr *expr = flat_table[i];
1968 fprintf (file, "Index %d (hash value %d)\n ",
1969 expr->bitmap_index, hash_val[i]);
1970 print_rtl (file, expr->expr);
1971 fprintf (file, "\n");
1974 fprintf (file, "\n");
1977 /* Record register first/last/block set information for REGNO in INSN.
1978 reg_first_set records the first place in the block where the register
1979 is set and is used to compute "anticipatability".
1980 reg_last_set records the last place in the block where the register
1981 is set and is used to compute "availability".
1982 reg_set_in_block records whether the register is set in the block
1983 and is used to compute "transparency". */
1986 record_last_reg_set_info (insn, regno)
1990 if (reg_first_set[regno] == NEVER_SET)
1991 reg_first_set[regno] = INSN_CUID (insn);
1992 reg_last_set[regno] = INSN_CUID (insn);
1993 SET_BIT (reg_set_in_block[BLOCK_NUM (insn)], regno);
1996 /* Record memory first/last/block set information for INSN. */
1999 record_last_mem_set_info (insn)
2002 if (mem_first_set == NEVER_SET)
2003 mem_first_set = INSN_CUID (insn);
2004 mem_last_set = INSN_CUID (insn);
2005 mem_set_in_block[BLOCK_NUM (insn)] = 1;
2008 /* Used for communicating between next two routines. */
2009 static rtx last_set_insn;
2011 /* Called from compute_hash_table via note_stores to handle one
2012 SET or CLOBBER in an insn. */
2015 record_last_set_info (dest, setter)
2016 rtx dest, setter ATTRIBUTE_UNUSED;
2018 if (GET_CODE (dest) == SUBREG)
2019 dest = SUBREG_REG (dest);
2021 if (GET_CODE (dest) == REG)
2022 record_last_reg_set_info (last_set_insn, REGNO (dest));
2023 else if (GET_CODE (dest) == MEM
2024 /* Ignore pushes, they clobber nothing. */
2025 && ! push_operand (dest, GET_MODE (dest)))
2026 record_last_mem_set_info (last_set_insn);
2029 /* Top level function to create an expression or assignment hash table.
2031 Expression entries are placed in the hash table if
2032 - they are of the form (set (pseudo-reg) src),
2033 - src is something we want to perform GCSE on,
2034 - none of the operands are subsequently modified in the block
2036 Assignment entries are placed in the hash table if
2037 - they are of the form (set (pseudo-reg) src),
2038 - src is something we want to perform const/copy propagation on,
2039 - none of the operands or target are subsequently modified in the block
2040 Currently src must be a pseudo-reg or a const_int.
2042 F is the first insn.
2043 SET_P is non-zero for computing the assignment hash table. */
2046 compute_hash_table (set_p)
2051 /* While we compute the hash table we also compute a bit array of which
2052 registers are set in which blocks.
2053 We also compute which blocks set memory, in the absence of aliasing
2054 support [which is TODO].
2055 ??? This isn't needed during const/copy propagation, but it's cheap to
2057 sbitmap_vector_zero (reg_set_in_block, n_basic_blocks);
2058 bzero ((char *) mem_set_in_block, n_basic_blocks);
2060 /* Some working arrays used to track first and last set in each block. */
2061 /* ??? One could use alloca here, but at some size a threshold is crossed
2062 beyond which one should use malloc. Are we at that threshold here? */
2063 reg_first_set = (int *) gmalloc (max_gcse_regno * sizeof (int));
2064 reg_last_set = (int *) gmalloc (max_gcse_regno * sizeof (int));
2066 for (bb = 0; bb < n_basic_blocks; bb++)
2070 int in_libcall_block;
2073 /* First pass over the instructions records information used to
2074 determine when registers and memory are first and last set.
2075 ??? The mem_set_in_block and hard-reg reg_set_in_block computation
2076 could be moved to compute_sets since they currently don't change. */
2078 for (i = 0; i < max_gcse_regno; i++)
2079 reg_first_set[i] = reg_last_set[i] = NEVER_SET;
2080 mem_first_set = NEVER_SET;
2081 mem_last_set = NEVER_SET;
2083 for (insn = BLOCK_HEAD (bb);
2084 insn && insn != NEXT_INSN (BLOCK_END (bb));
2085 insn = NEXT_INSN (insn))
2087 #ifdef NON_SAVING_SETJMP
2088 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
2089 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
2091 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2092 record_last_reg_set_info (insn, regno);
2097 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
2100 if (GET_CODE (insn) == CALL_INSN)
2102 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2103 if ((call_used_regs[regno]
2104 && regno != STACK_POINTER_REGNUM
2105 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2106 && regno != HARD_FRAME_POINTER_REGNUM
2108 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2109 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2111 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
2112 && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2115 && regno != FRAME_POINTER_REGNUM)
2116 || global_regs[regno])
2117 record_last_reg_set_info (insn, regno);
2118 if (! CONST_CALL_P (insn))
2119 record_last_mem_set_info (insn);
2122 last_set_insn = insn;
2123 note_stores (PATTERN (insn), record_last_set_info);
2126 /* The next pass builds the hash table. */
2128 for (insn = BLOCK_HEAD (bb), in_libcall_block = 0;
2129 insn && insn != NEXT_INSN (BLOCK_END (bb));
2130 insn = NEXT_INSN (insn))
2132 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2134 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2135 in_libcall_block = 1;
2136 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
2137 in_libcall_block = 0;
2138 hash_scan_insn (insn, set_p, in_libcall_block);
2143 free (reg_first_set);
2144 free (reg_last_set);
2145 /* Catch bugs early. */
2146 reg_first_set = reg_last_set = 0;
2149 /* Allocate space for the set hash table.
2150 N_INSNS is the number of instructions in the function.
2151 It is used to determine the number of buckets to use. */
2154 alloc_set_hash_table (n_insns)
2159 set_hash_table_size = n_insns / 4;
2160 if (set_hash_table_size < 11)
2161 set_hash_table_size = 11;
2162 /* Attempt to maintain efficient use of hash table.
2163 Making it an odd number is simplest for now.
2164 ??? Later take some measurements. */
2165 set_hash_table_size |= 1;
2166 n = set_hash_table_size * sizeof (struct expr *);
2167 set_hash_table = (struct expr **) gmalloc (n);
2170 /* Free things allocated by alloc_set_hash_table. */
2173 free_set_hash_table ()
2175 free (set_hash_table);
2178 /* Compute the hash table for doing copy/const propagation. */
2181 compute_set_hash_table ()
2183 /* Initialize count of number of entries in hash table. */
2185 bzero ((char *) set_hash_table, set_hash_table_size * sizeof (struct expr *));
2187 compute_hash_table (1);
2190 /* Allocate space for the expression hash table.
2191 N_INSNS is the number of instructions in the function.
2192 It is used to determine the number of buckets to use. */
2195 alloc_expr_hash_table (n_insns)
2200 expr_hash_table_size = n_insns / 2;
2201 /* Make sure the amount is usable. */
2202 if (expr_hash_table_size < 11)
2203 expr_hash_table_size = 11;
2204 /* Attempt to maintain efficient use of hash table.
2205 Making it an odd number is simplest for now.
2206 ??? Later take some measurements. */
2207 expr_hash_table_size |= 1;
2208 n = expr_hash_table_size * sizeof (struct expr *);
2209 expr_hash_table = (struct expr **) gmalloc (n);
2212 /* Free things allocated by alloc_expr_hash_table. */
2215 free_expr_hash_table ()
2217 free (expr_hash_table);
2220 /* Compute the hash table for doing GCSE. */
2223 compute_expr_hash_table ()
2225 /* Initialize count of number of entries in hash table. */
2227 bzero ((char *) expr_hash_table, expr_hash_table_size * sizeof (struct expr *));
2229 compute_hash_table (0);
2232 /* Expression tracking support. */
2234 /* Lookup pattern PAT in the expression table.
2235 The result is a pointer to the table entry, or NULL if not found. */
2237 static struct expr *
2241 int do_not_record_p;
2242 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2243 expr_hash_table_size);
2246 if (do_not_record_p)
2249 expr = expr_hash_table[hash];
2251 while (expr && ! expr_equiv_p (expr->expr, pat))
2252 expr = expr->next_same_hash;
2257 /* Lookup REGNO in the set table.
2258 If PAT is non-NULL look for the entry that matches it, otherwise return
2259 the first entry for REGNO.
2260 The result is a pointer to the table entry, or NULL if not found. */
2262 static struct expr *
2263 lookup_set (regno, pat)
2267 unsigned int hash = hash_set (regno, set_hash_table_size);
2270 expr = set_hash_table[hash];
2274 while (expr && ! expr_equiv_p (expr->expr, pat))
2275 expr = expr->next_same_hash;
2279 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2280 expr = expr->next_same_hash;
2286 /* Return the next entry for REGNO in list EXPR. */
2288 static struct expr *
2289 next_set (regno, expr)
2294 expr = expr->next_same_hash;
2295 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2299 /* Reset tables used to keep track of what's still available [since the
2300 start of the block]. */
2303 reset_opr_set_tables ()
2305 /* Maintain a bitmap of which regs have been set since beginning of
2307 sbitmap_zero (reg_set_bitmap);
2308 /* Also keep a record of the last instruction to modify memory.
2309 For now this is very trivial, we only record whether any memory
2310 location has been modified. */
2314 /* Return non-zero if the operands of X are not set before INSN in
2315 INSN's basic block. */
2318 oprs_not_set_p (x, insn)
2325 /* repeat is used to turn tail-recursion into iteration. */
2331 code = GET_CODE (x);
2346 if (mem_last_set != 0)
2352 return ! TEST_BIT (reg_set_bitmap, REGNO (x));
2358 fmt = GET_RTX_FORMAT (code);
2359 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2364 /* If we are about to do the last recursive call
2365 needed at this level, change it into iteration.
2366 This function is called enough to be worth it. */
2372 not_set_p = oprs_not_set_p (XEXP (x, i), insn);
2376 else if (fmt[i] == 'E')
2379 for (j = 0; j < XVECLEN (x, i); j++)
2381 int not_set_p = oprs_not_set_p (XVECEXP (x, i, j), insn);
2391 /* Mark things set by a CALL. */
2397 mem_last_set = INSN_CUID (insn);
2400 /* Mark things set by a SET. */
2403 mark_set (pat, insn)
2406 rtx dest = SET_DEST (pat);
2408 while (GET_CODE (dest) == SUBREG
2409 || GET_CODE (dest) == ZERO_EXTRACT
2410 || GET_CODE (dest) == SIGN_EXTRACT
2411 || GET_CODE (dest) == STRICT_LOW_PART)
2412 dest = XEXP (dest, 0);
2414 if (GET_CODE (dest) == REG)
2415 SET_BIT (reg_set_bitmap, REGNO (dest));
2416 else if (GET_CODE (dest) == MEM)
2417 mem_last_set = INSN_CUID (insn);
2419 if (GET_CODE (SET_SRC (pat)) == CALL)
2423 /* Record things set by a CLOBBER. */
2426 mark_clobber (pat, insn)
2429 rtx clob = XEXP (pat, 0);
2431 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2432 clob = XEXP (clob, 0);
2434 if (GET_CODE (clob) == REG)
2435 SET_BIT (reg_set_bitmap, REGNO (clob));
2437 mem_last_set = INSN_CUID (insn);
2440 /* Record things set by INSN.
2441 This data is used by oprs_not_set_p. */
2444 mark_oprs_set (insn)
2447 rtx pat = PATTERN (insn);
2449 if (GET_CODE (pat) == SET)
2450 mark_set (pat, insn);
2451 else if (GET_CODE (pat) == PARALLEL)
2455 for (i = 0; i < XVECLEN (pat, 0); i++)
2457 rtx x = XVECEXP (pat, 0, i);
2459 if (GET_CODE (x) == SET)
2461 else if (GET_CODE (x) == CLOBBER)
2462 mark_clobber (x, insn);
2463 else if (GET_CODE (x) == CALL)
2467 else if (GET_CODE (pat) == CLOBBER)
2468 mark_clobber (pat, insn);
2469 else if (GET_CODE (pat) == CALL)
2474 /* Classic GCSE reaching definition support. */
2476 /* Allocate reaching def variables. */
2479 alloc_rd_mem (n_blocks, n_insns)
2480 int n_blocks, n_insns;
2482 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2483 sbitmap_vector_zero (rd_kill, n_basic_blocks);
2485 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2486 sbitmap_vector_zero (rd_gen, n_basic_blocks);
2488 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2489 sbitmap_vector_zero (reaching_defs, n_basic_blocks);
2491 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2492 sbitmap_vector_zero (rd_out, n_basic_blocks);
2495 /* Free reaching def variables. */
2502 free (reaching_defs);
2506 /* Add INSN to the kills of BB.
2507 REGNO, set in BB, is killed by INSN. */
2510 handle_rd_kill_set (insn, regno, bb)
2514 struct reg_set *this_reg = reg_set_table[regno];
2518 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2519 SET_BIT (rd_kill[bb], INSN_CUID (this_reg->insn));
2520 this_reg = this_reg->next;
2524 /* Compute the set of kill's for reaching definitions. */
2532 For each set bit in `gen' of the block (i.e each insn which
2533 generates a definition in the block)
2534 Call the reg set by the insn corresponding to that bit regx
2535 Look at the linked list starting at reg_set_table[regx]
2536 For each setting of regx in the linked list, which is not in
2538 Set the bit in `kill' corresponding to that insn
2541 for (bb = 0; bb < n_basic_blocks; bb++)
2543 for (cuid = 0; cuid < max_cuid; cuid++)
2545 if (TEST_BIT (rd_gen[bb], cuid))
2547 rtx insn = CUID_INSN (cuid);
2548 rtx pat = PATTERN (insn);
2550 if (GET_CODE (insn) == CALL_INSN)
2554 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2556 if ((call_used_regs[regno]
2557 && regno != STACK_POINTER_REGNUM
2558 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2559 && regno != HARD_FRAME_POINTER_REGNUM
2561 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2562 && ! (regno == ARG_POINTER_REGNUM
2563 && fixed_regs[regno])
2565 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
2566 && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2568 && regno != FRAME_POINTER_REGNUM)
2569 || global_regs[regno])
2570 handle_rd_kill_set (insn, regno, bb);
2574 if (GET_CODE (pat) == PARALLEL)
2578 /* We work backwards because ... */
2579 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2581 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2582 if ((code == SET || code == CLOBBER)
2583 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2584 handle_rd_kill_set (insn,
2585 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2589 else if (GET_CODE (pat) == SET)
2591 if (GET_CODE (SET_DEST (pat)) == REG)
2593 /* Each setting of this register outside of this block
2594 must be marked in the set of kills in this block. */
2595 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2598 /* FIXME: CLOBBER? */
2604 /* Compute the reaching definitions as in
2605 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2606 Chapter 10. It is the same algorithm as used for computing available
2607 expressions but applied to the gens and kills of reaching definitions. */
2612 int bb, changed, passes;
2614 for (bb = 0; bb < n_basic_blocks; bb++)
2615 sbitmap_copy (rd_out[bb] /*dst*/, rd_gen[bb] /*src*/);
2622 for (bb = 0; bb < n_basic_blocks; bb++)
2624 sbitmap_union_of_predecessors (reaching_defs[bb], rd_out,
2626 changed |= sbitmap_union_of_diff (rd_out[bb], rd_gen[bb],
2627 reaching_defs[bb], rd_kill[bb]);
2633 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
2636 /* Classic GCSE available expression support. */
2638 /* Allocate memory for available expression computation. */
2641 alloc_avail_expr_mem (n_blocks, n_exprs)
2642 int n_blocks, n_exprs;
2644 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2645 sbitmap_vector_zero (ae_kill, n_basic_blocks);
2647 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2648 sbitmap_vector_zero (ae_gen, n_basic_blocks);
2650 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2651 sbitmap_vector_zero (ae_in, n_basic_blocks);
2653 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2654 sbitmap_vector_zero (ae_out, n_basic_blocks);
2656 u_bitmap = (sbitmap) sbitmap_alloc (n_exprs);
2657 sbitmap_ones (u_bitmap);
2661 free_avail_expr_mem ()
2670 /* Compute the set of available expressions generated in each basic block. */
2677 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
2678 This is all we have to do because an expression is not recorded if it
2679 is not available, and the only expressions we want to work with are the
2680 ones that are recorded. */
2682 for (i = 0; i < expr_hash_table_size; i++)
2684 struct expr *expr = expr_hash_table[i];
2685 while (expr != NULL)
2687 struct occr *occr = expr->avail_occr;
2688 while (occr != NULL)
2690 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
2693 expr = expr->next_same_hash;
2698 /* Return non-zero if expression X is killed in BB. */
2701 expr_killed_p (x, bb)
2709 /* repeat is used to turn tail-recursion into iteration. */
2715 code = GET_CODE (x);
2719 return TEST_BIT (reg_set_in_block[bb], REGNO (x));
2722 if (mem_set_in_block[bb])
2742 i = GET_RTX_LENGTH (code) - 1;
2743 fmt = GET_RTX_FORMAT (code);
2748 rtx tem = XEXP (x, i);
2750 /* If we are about to do the last recursive call
2751 needed at this level, change it into iteration.
2752 This function is called enough to be worth it. */
2758 if (expr_killed_p (tem, bb))
2761 else if (fmt[i] == 'E')
2764 for (j = 0; j < XVECLEN (x, i); j++)
2766 if (expr_killed_p (XVECEXP (x, i, j), bb))
2775 /* Compute the set of available expressions killed in each basic block. */
2782 for (bb = 0; bb < n_basic_blocks; bb++)
2784 for (i = 0; i < expr_hash_table_size; i++)
2786 struct expr *expr = expr_hash_table[i];
2788 for ( ; expr != NULL; expr = expr->next_same_hash)
2790 /* Skip EXPR if generated in this block. */
2791 if (TEST_BIT (ae_gen[bb], expr->bitmap_index))
2794 if (expr_killed_p (expr->expr, bb))
2795 SET_BIT (ae_kill[bb], expr->bitmap_index);
2801 /* Compute available expressions.
2803 Implement the algorithm to find available expressions
2804 as given in the Aho Sethi Ullman book, pages 627-631. */
2807 compute_available ()
2809 int bb, changed, passes;
2811 sbitmap_zero (ae_in[0]);
2813 sbitmap_copy (ae_out[0] /*dst*/, ae_gen[0] /*src*/);
2815 for (bb = 1; bb < n_basic_blocks; bb++)
2816 sbitmap_difference (ae_out[bb], u_bitmap, ae_kill[bb]);
2823 for (bb = 1; bb < n_basic_blocks; bb++)
2825 sbitmap_intersect_of_predecessors (ae_in[bb], ae_out, bb, s_preds);
2826 changed |= sbitmap_union_of_diff (ae_out[bb], ae_gen[bb],
2827 ae_in[bb], ae_kill[bb]);
2833 fprintf (gcse_file, "avail expr computation: %d passes\n", passes);
2836 /* Actually perform the Classic GCSE optimizations. */
2838 /* Return non-zero if occurrence OCCR of expression EXPR reaches block BB.
2840 CHECK_SELF_LOOP is non-zero if we should consider a block reaching itself
2841 as a positive reach. We want to do this when there are two computations
2842 of the expression in the block.
2844 VISITED is a pointer to a working buffer for tracking which BB's have
2845 been visited. It is NULL for the top-level call.
2847 We treat reaching expressions that go through blocks containing the same
2848 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
2849 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
2850 2 as not reaching. The intent is to improve the probability of finding
2851 only one reaching expression and to reduce register lifetimes by picking
2852 the closest such expression. */
2855 expr_reaches_here_p (occr, expr, bb, check_self_loop, visited)
2859 int check_self_loop;
2864 if (visited == NULL)
2866 visited = (char *) alloca (n_basic_blocks);
2867 bzero (visited, n_basic_blocks);
2870 for (pred = s_preds[bb]; pred != NULL; pred = pred->next)
2872 int pred_bb = INT_LIST_VAL (pred);
2874 if (visited[pred_bb])
2876 /* This predecessor has already been visited.
2880 else if (pred_bb == bb)
2882 /* BB loops on itself. */
2884 && TEST_BIT (ae_gen[pred_bb], expr->bitmap_index)
2885 && BLOCK_NUM (occr->insn) == pred_bb)
2887 visited[pred_bb] = 1;
2889 /* Ignore this predecessor if it kills the expression. */
2890 else if (TEST_BIT (ae_kill[pred_bb], expr->bitmap_index))
2891 visited[pred_bb] = 1;
2892 /* Does this predecessor generate this expression? */
2893 else if (TEST_BIT (ae_gen[pred_bb], expr->bitmap_index))
2895 /* Is this the occurrence we're looking for?
2896 Note that there's only one generating occurrence per block
2897 so we just need to check the block number. */
2898 if (BLOCK_NUM (occr->insn) == pred_bb)
2900 visited[pred_bb] = 1;
2902 /* Neither gen nor kill. */
2905 visited[pred_bb] = 1;
2906 if (expr_reaches_here_p (occr, expr, pred_bb, check_self_loop, visited))
2911 /* All paths have been checked. */
2915 /* Return the instruction that computes EXPR that reaches INSN's basic block.
2916 If there is more than one such instruction, return NULL.
2918 Called only by handle_avail_expr. */
2921 computing_insn (expr, insn)
2925 int bb = BLOCK_NUM (insn);
2927 if (expr->avail_occr->next == NULL)
2929 if (BLOCK_NUM (expr->avail_occr->insn) == bb)
2931 /* The available expression is actually itself
2932 (i.e. a loop in the flow graph) so do nothing. */
2935 /* (FIXME) Case that we found a pattern that was created by
2936 a substitution that took place. */
2937 return expr->avail_occr->insn;
2941 /* Pattern is computed more than once.
2942 Search backwards from this insn to see how many of these
2943 computations actually reach this insn. */
2945 rtx insn_computes_expr = NULL;
2948 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
2950 if (BLOCK_NUM (occr->insn) == bb)
2952 /* The expression is generated in this block.
2953 The only time we care about this is when the expression
2954 is generated later in the block [and thus there's a loop].
2955 We let the normal cse pass handle the other cases. */
2956 if (INSN_CUID (insn) < INSN_CUID (occr->insn))
2958 if (expr_reaches_here_p (occr, expr, bb, 1, NULL))
2963 insn_computes_expr = occr->insn;
2967 else /* Computation of the pattern outside this block. */
2969 if (expr_reaches_here_p (occr, expr, bb, 0, NULL))
2974 insn_computes_expr = occr->insn;
2979 if (insn_computes_expr == NULL)
2981 return insn_computes_expr;
2985 /* Return non-zero if the definition in DEF_INSN can reach INSN.
2986 Only called by can_disregard_other_sets. */
2989 def_reaches_here_p (insn, def_insn)
2994 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
2997 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
2999 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3001 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3003 if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3004 reg = XEXP (PATTERN (def_insn), 0);
3005 else if (GET_CODE (PATTERN (def_insn)) == SET)
3006 reg = SET_DEST (PATTERN (def_insn));
3009 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3018 /* Return non-zero if *ADDR_THIS_REG can only have one value at INSN.
3019 The value returned is the number of definitions that reach INSN.
3020 Returning a value of zero means that [maybe] more than one definition
3021 reaches INSN and the caller can't perform whatever optimization it is
3022 trying. i.e. it is always safe to return zero. */
3025 can_disregard_other_sets (addr_this_reg, insn, for_combine)
3026 struct reg_set **addr_this_reg;
3030 int number_of_reaching_defs = 0;
3031 struct reg_set *this_reg = *addr_this_reg;
3035 if (def_reaches_here_p (insn, this_reg->insn))
3037 number_of_reaching_defs++;
3038 /* Ignore parallels for now. */
3039 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3042 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3043 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3044 SET_SRC (PATTERN (insn)))))
3046 /* A setting of the reg to a different value reaches INSN. */
3049 if (number_of_reaching_defs > 1)
3051 /* If in this setting the value the register is being
3052 set to is equal to the previous value the register
3053 was set to and this setting reaches the insn we are
3054 trying to do the substitution on then we are ok. */
3056 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3058 if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3059 SET_SRC (PATTERN (insn))))
3062 *addr_this_reg = this_reg;
3065 /* prev_this_reg = this_reg; */
3066 this_reg = this_reg->next;
3069 return number_of_reaching_defs;
3072 /* Expression computed by insn is available and the substitution is legal,
3073 so try to perform the substitution.
3075 The result is non-zero if any changes were made. */
3078 handle_avail_expr (insn, expr)
3082 rtx pat, insn_computes_expr;
3084 struct reg_set *this_reg;
3085 int found_setting, use_src;
3088 /* We only handle the case where one computation of the expression
3089 reaches this instruction. */
3090 insn_computes_expr = computing_insn (expr, insn);
3091 if (insn_computes_expr == NULL)
3097 /* At this point we know only one computation of EXPR outside of this
3098 block reaches this insn. Now try to find a register that the
3099 expression is computed into. */
3101 if (GET_CODE (SET_SRC (PATTERN (insn_computes_expr))) == REG)
3103 /* This is the case when the available expression that reaches
3104 here has already been handled as an available expression. */
3105 int regnum_for_replacing = REGNO (SET_SRC (PATTERN (insn_computes_expr)));
3106 /* If the register was created by GCSE we can't use `reg_set_table',
3107 however we know it's set only once. */
3108 if (regnum_for_replacing >= max_gcse_regno
3109 /* If the register the expression is computed into is set only once,
3110 or only one set reaches this insn, we can use it. */
3111 || (((this_reg = reg_set_table[regnum_for_replacing]),
3112 this_reg->next == NULL)
3113 || can_disregard_other_sets (&this_reg, insn, 0)))
3122 int regnum_for_replacing = REGNO (SET_DEST (PATTERN (insn_computes_expr)));
3123 /* This shouldn't happen. */
3124 if (regnum_for_replacing >= max_gcse_regno)
3126 this_reg = reg_set_table[regnum_for_replacing];
3127 /* If the register the expression is computed into is set only once,
3128 or only one set reaches this insn, use it. */
3129 if (this_reg->next == NULL
3130 || can_disregard_other_sets (&this_reg, insn, 0))
3136 pat = PATTERN (insn);
3138 to = SET_SRC (PATTERN (insn_computes_expr));
3140 to = SET_DEST (PATTERN (insn_computes_expr));
3141 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3143 /* We should be able to ignore the return code from validate_change but
3144 to play it safe we check. */
3148 if (gcse_file != NULL)
3150 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with reg %d %s insn %d\n",
3151 INSN_UID (insn), REGNO (to),
3152 use_src ? "from" : "set in",
3153 INSN_UID (insn_computes_expr));
3158 /* The register that the expr is computed into is set more than once. */
3159 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3161 /* Insert an insn after insnx that copies the reg set in insnx
3162 into a new pseudo register call this new register REGN.
3163 From insnb until end of basic block or until REGB is set
3164 replace all uses of REGB with REGN. */
3167 to = gen_reg_rtx (GET_MODE (SET_DEST (PATTERN (insn_computes_expr))));
3169 /* Generate the new insn. */
3170 /* ??? If the change fails, we return 0, even though we created
3171 an insn. I think this is ok. */
3173 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3174 SET_DEST (PATTERN (insn_computes_expr))),
3175 insn_computes_expr);
3176 /* Keep block number table up to date. */
3177 set_block_num (new_insn, BLOCK_NUM (insn_computes_expr));
3178 /* Keep register set table up to date. */
3179 record_one_set (REGNO (to), new_insn);
3181 gcse_create_count++;
3182 if (gcse_file != NULL)
3184 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d, computed in insn %d,\n",
3185 INSN_UID (NEXT_INSN (insn_computes_expr)),
3186 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))),
3187 INSN_UID (insn_computes_expr));
3188 fprintf (gcse_file, " into newly allocated reg %d\n", REGNO (to));
3191 pat = PATTERN (insn);
3193 /* Do register replacement for INSN. */
3194 changed = validate_change (insn, &SET_SRC (pat),
3195 SET_DEST (PATTERN (NEXT_INSN (insn_computes_expr))),
3198 /* We should be able to ignore the return code from validate_change but
3199 to play it safe we check. */
3203 if (gcse_file != NULL)
3205 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with reg %d set in insn %d\n",
3207 REGNO (SET_DEST (PATTERN (NEXT_INSN (insn_computes_expr)))),
3208 INSN_UID (insn_computes_expr));
3217 /* Perform classic GCSE.
3218 This is called by one_classic_gcse_pass after all the dataflow analysis
3221 The result is non-zero if a change was made. */
3229 /* Note we start at block 1. */
3232 for (bb = 1; bb < n_basic_blocks; bb++)
3234 /* Reset tables used to keep track of what's still valid [since the
3235 start of the block]. */
3236 reset_opr_set_tables ();
3238 for (insn = BLOCK_HEAD (bb);
3239 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
3240 insn = NEXT_INSN (insn))
3242 /* Is insn of form (set (pseudo-reg) ...)? */
3244 if (GET_CODE (insn) == INSN
3245 && GET_CODE (PATTERN (insn)) == SET
3246 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3247 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3249 rtx pat = PATTERN (insn);
3250 rtx src = SET_SRC (pat);
3253 if (want_to_gcse_p (src)
3254 /* Is the expression recorded? */
3255 && ((expr = lookup_expr (src)) != NULL)
3256 /* Is the expression available [at the start of the
3258 && TEST_BIT (ae_in[bb], expr->bitmap_index)
3259 /* Are the operands unchanged since the start of the
3261 && oprs_not_set_p (src, insn))
3262 changed |= handle_avail_expr (insn, expr);
3265 /* Keep track of everything modified by this insn. */
3266 /* ??? Need to be careful w.r.t. mods done to INSN. */
3267 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3268 mark_oprs_set (insn);
3275 /* Top level routine to perform one classic GCSE pass.
3277 Return non-zero if a change was made. */
3280 one_classic_gcse_pass (pass)
3285 gcse_subst_count = 0;
3286 gcse_create_count = 0;
3288 alloc_expr_hash_table (max_cuid);
3289 alloc_rd_mem (n_basic_blocks, max_cuid);
3290 compute_expr_hash_table ();
3292 dump_hash_table (gcse_file, "Expression", expr_hash_table,
3293 expr_hash_table_size, n_exprs);
3298 alloc_avail_expr_mem (n_basic_blocks, n_exprs);
3301 compute_available ();
3302 changed = classic_gcse ();
3303 free_avail_expr_mem ();
3306 free_expr_hash_table ();
3310 fprintf (gcse_file, "\n");
3311 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs, %d insns created\n",
3312 current_function_name, pass,
3313 bytes_used, gcse_subst_count, gcse_create_count);
3319 /* Compute copy/constant propagation working variables. */
3321 /* Local properties of assignments. */
3323 static sbitmap *cprop_pavloc;
3324 static sbitmap *cprop_absaltered;
3326 /* Global properties of assignments (computed from the local properties). */
3328 static sbitmap *cprop_avin;
3329 static sbitmap *cprop_avout;
3331 /* Allocate vars used for copy/const propagation.
3332 N_BLOCKS is the number of basic blocks.
3333 N_SETS is the number of sets. */
3336 alloc_cprop_mem (n_blocks, n_sets)
3337 int n_blocks, n_sets;
3339 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3340 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3342 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3343 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3346 /* Free vars used by copy/const propagation. */
3351 free (cprop_pavloc);
3352 free (cprop_absaltered);
3357 /* For each block, compute whether X is transparent.
3358 X is either an expression or an assignment [though we don't care which,
3359 for this context an assignment is treated as an expression].
3360 For each block where an element of X is modified, set (SET_P == 1) or reset
3361 (SET_P == 0) the INDX bit in BMAP. */
3364 compute_transp (x, indx, bmap, set_p)
3374 /* repeat is used to turn tail-recursion into iteration. */
3380 code = GET_CODE (x);
3386 int regno = REGNO (x);
3390 if (regno < FIRST_PSEUDO_REGISTER)
3392 for (bb = 0; bb < n_basic_blocks; bb++)
3393 if (TEST_BIT (reg_set_in_block[bb], regno))
3394 SET_BIT (bmap[bb], indx);
3398 for (r = reg_set_table[regno]; r != NULL; r = r->next)
3400 bb = BLOCK_NUM (r->insn);
3401 SET_BIT (bmap[bb], indx);
3407 if (regno < FIRST_PSEUDO_REGISTER)
3409 for (bb = 0; bb < n_basic_blocks; bb++)
3410 if (TEST_BIT (reg_set_in_block[bb], regno))
3411 RESET_BIT (bmap[bb], indx);
3415 for (r = reg_set_table[regno]; r != NULL; r = r->next)
3417 bb = BLOCK_NUM (r->insn);
3418 RESET_BIT (bmap[bb], indx);
3428 for (bb = 0; bb < n_basic_blocks; bb++)
3429 if (mem_set_in_block[bb])
3430 SET_BIT (bmap[bb], indx);
3434 for (bb = 0; bb < n_basic_blocks; bb++)
3435 if (mem_set_in_block[bb])
3436 RESET_BIT (bmap[bb], indx);
3456 i = GET_RTX_LENGTH (code) - 1;
3457 fmt = GET_RTX_FORMAT (code);
3462 rtx tem = XEXP (x, i);
3464 /* If we are about to do the last recursive call
3465 needed at this level, change it into iteration.
3466 This function is called enough to be worth it. */
3472 compute_transp (tem, indx, bmap, set_p);
3474 else if (fmt[i] == 'E')
3477 for (j = 0; j < XVECLEN (x, i); j++)
3478 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3483 /* Compute the available expressions at the start and end of each
3484 basic block for cprop. This particular dataflow equation is
3485 used often enough that we might want to generalize it and make
3486 as a subroutine for other global optimizations that need available
3487 in/out information. */
3489 compute_cprop_avinout ()
3491 int bb, changed, passes;
3493 sbitmap_zero (cprop_avin[0]);
3494 sbitmap_vector_ones (cprop_avout, n_basic_blocks);
3501 for (bb = 0; bb < n_basic_blocks; bb++)
3504 sbitmap_intersect_of_predecessors (cprop_avin[bb],
3505 cprop_avout, bb, s_preds);
3506 changed |= sbitmap_union_of_diff (cprop_avout[bb],
3509 cprop_absaltered[bb]);
3515 fprintf (gcse_file, "cprop avail expr computation: %d passes\n", passes);
3518 /* Top level routine to do the dataflow analysis needed by copy/const
3522 compute_cprop_data ()
3524 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, 1);
3525 compute_cprop_avinout ();
3528 /* Copy/constant propagation. */
3534 /* Maximum number of register uses in an insn that we handle. */
3537 /* Table of uses found in an insn.
3538 Allocated statically to avoid alloc/free complexity and overhead. */
3539 static struct reg_use reg_use_table[MAX_USES];
3541 /* Index into `reg_use_table' while building it. */
3542 static int reg_use_count;
3544 /* Set up a list of register numbers used in INSN.
3545 The found uses are stored in `reg_use_table'.
3546 `reg_use_count' is initialized to zero before entry, and
3547 contains the number of uses in the table upon exit.
3549 ??? If a register appears multiple times we will record it multiple
3550 times. This doesn't hurt anything but it will slow things down. */
3560 /* repeat is used to turn tail-recursion into iteration. */
3566 code = GET_CODE (x);
3570 if (reg_use_count == MAX_USES)
3572 reg_use_table[reg_use_count].reg_rtx = x;
3590 case ASM_INPUT: /*FIXME*/
3594 if (GET_CODE (SET_DEST (x)) == MEM)
3595 find_used_regs (SET_DEST (x));
3603 /* Recursively scan the operands of this expression. */
3605 fmt = GET_RTX_FORMAT (code);
3606 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3610 /* If we are about to do the last recursive call
3611 needed at this level, change it into iteration.
3612 This function is called enough to be worth it. */
3618 find_used_regs (XEXP (x, i));
3620 else if (fmt[i] == 'E')
3623 for (j = 0; j < XVECLEN (x, i); j++)
3624 find_used_regs (XVECEXP (x, i, j));
3629 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3630 Returns non-zero is successful. */
3633 try_replace_reg (from, to, insn)
3636 /* If this fails we could try to simplify the result of the
3637 replacement and attempt to recognize the simplified insn.
3639 But we need a general simplify_rtx that doesn't have pass
3640 specific state variables. I'm not aware of one at the moment. */
3641 return validate_replace_src (from, to, insn);
3644 /* Find a set of REGNO that is available on entry to INSN's block.
3645 Returns NULL if not found. */
3647 static struct expr *
3648 find_avail_set (regno, insn)
3652 struct expr *set = lookup_set (regno, NULL_RTX);
3656 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3658 set = next_set (regno, set);
3664 /* Perform constant and copy propagation on INSN.
3665 The result is non-zero if a change was made. */
3668 cprop_insn (insn, alter_jumps)
3672 struct reg_use *reg_used;
3675 /* Only propagate into SETs. Note that a conditional jump is a
3676 SET with pc_rtx as the destination. */
3677 if ((GET_CODE (insn) != INSN
3678 && GET_CODE (insn) != JUMP_INSN)
3679 || GET_CODE (PATTERN (insn)) != SET)
3683 find_used_regs (PATTERN (insn));
3685 reg_used = ®_use_table[0];
3686 for ( ; reg_use_count > 0; reg_used++, reg_use_count--)
3690 int regno = REGNO (reg_used->reg_rtx);
3692 /* Ignore registers created by GCSE.
3693 We do this because ... */
3694 if (regno >= max_gcse_regno)
3697 /* If the register has already been set in this block, there's
3698 nothing we can do. */
3699 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
3702 /* Find an assignment that sets reg_used and is available
3703 at the start of the block. */
3704 set = find_avail_set (regno, insn);
3709 /* ??? We might be able to handle PARALLELs. Later. */
3710 if (GET_CODE (pat) != SET)
3712 src = SET_SRC (pat);
3714 /* Constant propagation. */
3715 if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE)
3717 /* Handle normal insns first. */
3718 if (GET_CODE (insn) == INSN
3719 && try_replace_reg (reg_used->reg_rtx, src, insn))
3723 if (gcse_file != NULL)
3725 fprintf (gcse_file, "CONST-PROP: Replacing reg %d in insn %d with constant ",
3726 regno, INSN_UID (insn));
3727 print_rtl (gcse_file, src);
3728 fprintf (gcse_file, "\n");
3731 /* The original insn setting reg_used may or may not now be
3732 deletable. We leave the deletion to flow. */
3735 /* Try to propagate a CONST_INT into a conditional jump.
3736 We're pretty specific about what we will handle in this
3737 code, we can extend this as necessary over time.
3739 Right now the insn in question must look like
3741 (set (pc) (if_then_else ...))
3743 Note this does not currently handle machines which use cc0. */
3744 else if (alter_jumps
3745 && GET_CODE (insn) == JUMP_INSN && condjump_p (insn))
3747 /* We want a copy of the JUMP_INSN so we can modify it
3748 in-place as needed without effecting the original. */
3749 rtx copy = copy_rtx (insn);
3750 rtx set = PATTERN (copy);
3753 /* Replace the register with the appropriate constant. */
3754 replace_rtx (SET_SRC (set), reg_used->reg_rtx, src);
3756 temp = simplify_ternary_operation (GET_CODE (SET_SRC (set)),
3757 GET_MODE (SET_SRC (set)),
3758 GET_MODE (XEXP (SET_SRC (set), 0)),
3759 XEXP (SET_SRC (set), 0),
3760 XEXP (SET_SRC (set), 1),
3761 XEXP (SET_SRC (set), 2));
3763 /* If no simplification can be made, then try the next
3766 SET_SRC (set) = temp;
3770 /* That may have changed the structure of TEMP, so
3771 force it to be rerecognized if it has not turned
3772 into a nop or unconditional jump. */
3774 INSN_CODE (copy) = -1;
3775 if ((SET_DEST (set) == pc_rtx
3776 && (SET_SRC (set) == pc_rtx
3777 || GET_CODE (SET_SRC (set)) == LABEL_REF))
3778 || recog (PATTERN (copy), copy, NULL) >= 0)
3780 /* This has either become an unconditional jump
3781 or a nop-jump. We'd like to delete nop jumps
3782 here, but doing so confuses gcse. So we just
3783 make the replacement and let later passes
3785 PATTERN (insn) = set;
3786 INSN_CODE (insn) = -1;
3788 /* One less use of the label this insn used to jump to
3789 if we turned this into a NOP jump. */
3790 if (SET_SRC (set) == pc_rtx && JUMP_LABEL (insn) != 0)
3791 --LABEL_NUSES (JUMP_LABEL (insn));
3793 /* If this has turned into an unconditional jump,
3794 then put a barrier after it so that the unreachable
3795 code will be deleted. */
3796 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
3797 emit_barrier_after (insn);
3799 run_jump_opt_after_gcse = 1;
3803 if (gcse_file != NULL)
3805 fprintf (gcse_file, "CONST-PROP: Replacing reg %d in insn %d with constant ",
3806 regno, INSN_UID (insn));
3807 print_rtl (gcse_file, src);
3808 fprintf (gcse_file, "\n");
3813 else if (GET_CODE (src) == REG
3814 && REGNO (src) >= FIRST_PSEUDO_REGISTER
3815 && REGNO (src) != regno)
3817 /* We know the set is available.
3818 Now check that SET_SRC is ANTLOC (i.e. none of the source operands
3819 have changed since the start of the block). */
3820 if (oprs_not_set_p (src, insn))
3822 if (try_replace_reg (reg_used->reg_rtx, src, insn))
3826 if (gcse_file != NULL)
3828 fprintf (gcse_file, "COPY-PROP: Replacing reg %d in insn %d with reg %d\n",
3829 regno, INSN_UID (insn), REGNO (src));
3832 /* The original insn setting reg_used may or may not now be
3833 deletable. We leave the deletion to flow. */
3834 /* FIXME: If it turns out that the insn isn't deletable,
3835 then we may have unnecessarily extended register lifetimes
3836 and made things worse. */
3845 /* Forward propagate copies.
3846 This includes copies and constants.
3847 Return non-zero if a change was made. */
3856 /* Note we start at block 1. */
3859 for (bb = 1; bb < n_basic_blocks; bb++)
3861 /* Reset tables used to keep track of what's still valid [since the
3862 start of the block]. */
3863 reset_opr_set_tables ();
3865 for (insn = BLOCK_HEAD (bb);
3866 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
3867 insn = NEXT_INSN (insn))
3869 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3871 changed |= cprop_insn (insn, alter_jumps);
3873 /* Keep track of everything modified by this insn. */
3874 /* ??? Need to be careful w.r.t. mods done to INSN. */
3875 mark_oprs_set (insn);
3880 if (gcse_file != NULL)
3881 fprintf (gcse_file, "\n");
3886 /* Perform one copy/constant propagation pass.
3887 F is the first insn in the function.
3888 PASS is the pass count. */
3891 one_cprop_pass (pass, alter_jumps)
3897 const_prop_count = 0;
3898 copy_prop_count = 0;
3900 alloc_set_hash_table (max_cuid);
3901 compute_set_hash_table ();
3903 dump_hash_table (gcse_file, "SET", set_hash_table, set_hash_table_size,
3907 alloc_cprop_mem (n_basic_blocks, n_sets);
3908 compute_cprop_data ();
3909 changed = cprop (alter_jumps);
3912 free_set_hash_table ();
3916 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, %d const props, %d copy props\n",
3917 current_function_name, pass,
3918 bytes_used, const_prop_count, copy_prop_count);
3919 fprintf (gcse_file, "\n");
3925 /* Compute PRE+LCM working variables. */
3927 /* Local properties of expressions. */
3928 /* Nonzero for expressions that are transparent in the block. */
3929 static sbitmap *transp;
3931 /* Nonzero for expressions that are transparent at the end of the block.
3932 This is only zero for expressions killed by abnormal critical edge
3933 created by a calls. */
3934 static sbitmap *transpout;
3936 /* Nonzero for expressions that are computed (available) in the block. */
3937 static sbitmap *comp;
3939 /* Nonzero for expressions that are locally anticipatable in the block. */
3940 static sbitmap *antloc;
3942 /* Nonzero for expressions where this block is an optimal computation
3944 static sbitmap *pre_optimal;
3946 /* Nonzero for expressions which are redundant in a particular block. */
3947 static sbitmap *pre_redundant;
3949 static sbitmap *temp_bitmap;
3951 /* Redundant insns. */
3952 static sbitmap pre_redundant_insns;
3954 /* Allocate vars used for PRE analysis. */
3957 alloc_pre_mem (n_blocks, n_exprs)
3958 int n_blocks, n_exprs;
3960 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
3961 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
3962 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
3964 temp_bitmap = sbitmap_vector_alloc (n_blocks, n_exprs);
3965 pre_optimal = sbitmap_vector_alloc (n_blocks, n_exprs);
3966 pre_redundant = sbitmap_vector_alloc (n_blocks, n_exprs);
3967 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
3970 /* Free vars used for PRE analysis. */
3980 free (pre_redundant);
3984 /* Top level routine to do the dataflow analysis needed by PRE. */
3989 compute_local_properties (transp, comp, antloc, 0);
3990 compute_transpout ();
3991 pre_lcm (n_basic_blocks, n_exprs, s_preds, s_succs, transp,
3992 antloc, pre_redundant, pre_optimal);
3998 /* Return non-zero if an occurrence of expression EXPR in OCCR_BB would reach
4001 VISITED is a pointer to a working buffer for tracking which BB's have
4002 been visited. It is NULL for the top-level call.
4004 CHECK_PRE_COMP controls whether or not we check for a computation of
4007 We treat reaching expressions that go through blocks containing the same
4008 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4009 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4010 2 as not reaching. The intent is to improve the probability of finding
4011 only one reaching expression and to reduce register lifetimes by picking
4012 the closest such expression. */
4015 pre_expr_reaches_here_p (occr_bb, expr, bb, check_pre_comp, visited)
4024 if (visited == NULL)
4026 visited = (char *) alloca (n_basic_blocks);
4027 bzero (visited, n_basic_blocks);
4030 for (pred = s_preds[bb]; pred != NULL; pred = pred->next)
4032 int pred_bb = INT_LIST_VAL (pred);
4034 if (pred_bb == ENTRY_BLOCK
4035 /* Has predecessor has already been visited? */
4036 || visited[pred_bb])
4038 /* Nothing to do. */
4040 /* Does this predecessor generate this expression? */
4041 else if ((!check_pre_comp && occr_bb == pred_bb)
4042 || TEST_BIT (comp[pred_bb], expr->bitmap_index))
4044 /* Is this the occurrence we're looking for?
4045 Note that there's only one generating occurrence per block
4046 so we just need to check the block number. */
4047 if (occr_bb == pred_bb)
4049 visited[pred_bb] = 1;
4051 /* Ignore this predecessor if it kills the expression. */
4052 else if (! TEST_BIT (transp[pred_bb], expr->bitmap_index))
4053 visited[pred_bb] = 1;
4054 /* Neither gen nor kill. */
4057 visited[pred_bb] = 1;
4058 if (pre_expr_reaches_here_p (occr_bb, expr, pred_bb,
4059 check_pre_comp, visited))
4064 /* All paths have been checked. */
4068 /* Add EXPR to the end of basic block BB.
4070 This is used by both the PRE and code hoisting.
4072 For PRE, we want to verify that the expr is either transparent
4073 or locally anticipatable in the target block. This check makes
4074 no sense for code hoisting. */
4077 insert_insn_end_bb (expr, bb, pre)
4082 rtx insn = BLOCK_END (bb);
4084 rtx reg = expr->reaching_reg;
4085 int regno = REGNO (reg);
4086 rtx pat, copied_expr;
4090 copied_expr = copy_rtx (expr->expr);
4091 emit_move_insn (reg, copied_expr);
4092 first_new_insn = get_insns ();
4093 pat = gen_sequence ();
4096 /* If the last insn is a jump, insert EXPR in front [taking care to
4097 handle cc0, etc. properly]. */
4099 if (GET_CODE (insn) == JUMP_INSN)
4105 /* If this is a jump table, then we can't insert stuff here. Since
4106 we know the previous real insn must be the tablejump, we insert
4107 the new instruction just before the tablejump. */
4108 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4109 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4110 insn = prev_real_insn (insn);
4113 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4114 if cc0 isn't set. */
4115 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4117 insn = XEXP (note, 0);
4120 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4121 if (maybe_cc0_setter
4122 && GET_RTX_CLASS (GET_CODE (maybe_cc0_setter)) == 'i'
4123 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4124 insn = maybe_cc0_setter;
4127 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4128 new_insn = emit_insn_before (pat, insn);
4129 if (BLOCK_HEAD (bb) == insn)
4130 BLOCK_HEAD (bb) = new_insn;
4132 /* Likewise if the last insn is a call, as will happen in the presence
4133 of exception handling. */
4134 else if (GET_CODE (insn) == CALL_INSN)
4136 HARD_REG_SET parm_regs;
4140 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4141 we search backward and place the instructions before the first
4142 parameter is loaded. Do this for everyone for consistency and a
4143 presumtion that we'll get better code elsewhere as well. */
4145 /* It should always be the case that we can put these instructions
4146 anywhere in the basic block with performing PRE optimizations.
4149 && !TEST_BIT (antloc[bb], expr->bitmap_index)
4150 && !TEST_BIT (transp[bb], expr->bitmap_index))
4153 /* Since different machines initialize their parameter registers
4154 in different orders, assume nothing. Collect the set of all
4155 parameter registers. */
4156 CLEAR_HARD_REG_SET (parm_regs);
4158 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
4159 if (GET_CODE (XEXP (p, 0)) == USE
4160 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
4162 int regno = REGNO (XEXP (XEXP (p, 0), 0));
4163 if (regno >= FIRST_PSEUDO_REGISTER)
4165 SET_HARD_REG_BIT (parm_regs, regno);
4169 /* Search backward for the first set of a register in this set. */
4170 while (nparm_regs && BLOCK_HEAD (bb) != insn)
4172 insn = PREV_INSN (insn);
4173 p = single_set (insn);
4174 if (p && GET_CODE (SET_DEST (p)) == REG
4175 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
4176 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
4178 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
4183 new_insn = emit_insn_before (pat, insn);
4184 if (BLOCK_HEAD (bb) == insn)
4185 BLOCK_HEAD (bb) = new_insn;
4189 new_insn = emit_insn_after (pat, insn);
4190 BLOCK_END (bb) = new_insn;
4193 /* Keep block number table up to date.
4194 Note, PAT could be a multiple insn sequence, we have to make
4195 sure that each insn in the sequence is handled. */
4196 if (GET_CODE (pat) == SEQUENCE)
4200 for (i = 0; i < XVECLEN (pat, 0); i++)
4202 rtx insn = XVECEXP (pat, 0, i);
4203 set_block_num (insn, bb);
4204 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4205 add_label_notes (PATTERN (insn), new_insn);
4206 record_set_insn = insn;
4207 note_stores (PATTERN (insn), record_set_info);
4212 add_label_notes (SET_SRC (pat), new_insn);
4213 set_block_num (new_insn, bb);
4214 /* Keep register set table up to date. */
4215 record_one_set (regno, new_insn);
4218 gcse_create_count++;
4222 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, copying expression %d to reg %d\n",
4223 bb, INSN_UID (new_insn), expr->bitmap_index, regno);
4227 /* Insert partially redundant expressions at the ends of appropriate basic
4228 blocks making them fully redundant. */
4231 pre_insert (index_map)
4232 struct expr **index_map;
4234 int bb, i, set_size;
4237 /* Compute INSERT = PRE_OPTIMAL & ~PRE_REDUNDANT.
4238 Where INSERT is nonzero, we add the expression at the end of the basic
4239 block if it reaches any of the deleted expressions. */
4241 set_size = pre_optimal[0]->size;
4242 inserted = sbitmap_vector_alloc (n_basic_blocks, n_exprs);
4243 sbitmap_vector_zero (inserted, n_basic_blocks);
4245 for (bb = 0; bb < n_basic_blocks; bb++)
4249 /* This computes the number of potential insertions we need. */
4250 sbitmap_not (temp_bitmap[bb], pre_redundant[bb]);
4251 sbitmap_a_and_b (temp_bitmap[bb], temp_bitmap[bb], pre_optimal[bb]);
4253 /* TEMP_BITMAP[bb] now contains a bitmap of the expressions that we need
4254 to insert at the end of this basic block. */
4255 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
4257 SBITMAP_ELT_TYPE insert = temp_bitmap[bb]->elms[i];
4260 for (j = indx; insert && j < n_exprs; j++, insert >>= 1)
4262 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4264 struct expr *expr = index_map[j];
4267 /* Now look at each deleted occurence of this expression. */
4268 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4270 if (! occr->deleted_p)
4273 /* Insert this expression at the end of BB if it would
4274 reach the deleted occurence. */
4275 if (!TEST_BIT (inserted[bb], j)
4276 && pre_expr_reaches_here_p (bb, expr,
4277 BLOCK_NUM (occr->insn), 0,
4280 SET_BIT (inserted[bb], j);
4281 insert_insn_end_bb (index_map[j], bb, 1);
4290 /* Copy the result of INSN to REG.
4291 INDX is the expression number. */
4294 pre_insert_copy_insn (expr, insn)
4298 rtx reg = expr->reaching_reg;
4299 int regno = REGNO (reg);
4300 int indx = expr->bitmap_index;
4301 rtx set = single_set (insn);
4306 new_insn = emit_insn_after (gen_rtx_SET (VOIDmode, reg, SET_DEST (set)),
4308 /* Keep block number table up to date. */
4309 set_block_num (new_insn, BLOCK_NUM (insn));
4310 /* Keep register set table up to date. */
4311 record_one_set (regno, new_insn);
4313 gcse_create_count++;
4317 fprintf (gcse_file, "PRE: bb %d, insn %d, copying expression %d in insn %d to reg %d\n",
4318 BLOCK_NUM (insn), INSN_UID (new_insn), indx, INSN_UID (insn), regno);
4322 /* Copy available expressions that reach the redundant expression
4323 to `reaching_reg'. */
4326 pre_insert_copies ()
4330 for (bb = 0; bb < n_basic_blocks; bb++)
4332 sbitmap_a_and_b (temp_bitmap[bb], pre_optimal[bb], pre_redundant[bb]);
4335 /* For each available expression in the table, copy the result to
4336 `reaching_reg' if the expression reaches a deleted one.
4338 ??? The current algorithm is rather brute force.
4339 Need to do some profiling. */
4341 for (i = 0; i < expr_hash_table_size; i++)
4345 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4349 /* If the basic block isn't reachable, PPOUT will be TRUE.
4350 However, we don't want to insert a copy here because the
4351 expression may not really be redundant. So only insert
4352 an insn if the expression was deleted.
4353 This test also avoids further processing if the expression
4354 wasn't deleted anywhere. */
4355 if (expr->reaching_reg == NULL)
4358 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4362 if (! occr->deleted_p)
4365 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4367 rtx insn = avail->insn;
4368 int bb = BLOCK_NUM (insn);
4370 if (!TEST_BIT (temp_bitmap[bb], expr->bitmap_index))
4373 /* No need to handle this one if handled already. */
4374 if (avail->copied_p)
4376 /* Don't handle this one if it's a redundant one. */
4377 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4379 /* Or if the expression doesn't reach the deleted one. */
4380 if (! pre_expr_reaches_here_p (BLOCK_NUM (avail->insn), expr,
4381 BLOCK_NUM (occr->insn),
4385 /* Copy the result of avail to reaching_reg. */
4386 pre_insert_copy_insn (expr, insn);
4387 avail->copied_p = 1;
4394 /* Delete redundant computations.
4395 Deletion is done by changing the insn to copy the `reaching_reg' of
4396 the expression into the result of the SET. It is left to later passes
4397 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4399 Returns non-zero if a change is made. */
4406 /* Compute the expressions which are redundant and need to be replaced by
4407 copies from the reaching reg to the target reg. */
4408 for (bb = 0; bb < n_basic_blocks; bb++)
4410 sbitmap_not (temp_bitmap[bb], pre_optimal[bb]);
4411 sbitmap_a_and_b (temp_bitmap[bb], temp_bitmap[bb], pre_redundant[bb]);
4415 for (i = 0; i < expr_hash_table_size; i++)
4419 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4422 int indx = expr->bitmap_index;
4424 /* We only need to search antic_occr since we require
4427 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4429 rtx insn = occr->insn;
4431 int bb = BLOCK_NUM (insn);
4433 if (TEST_BIT (temp_bitmap[bb], indx))
4435 set = single_set (insn);
4439 /* Create a pseudo-reg to store the result of reaching
4440 expressions into. Get the mode for the new pseudo
4441 from the mode of the original destination pseudo. */
4442 if (expr->reaching_reg == NULL)
4444 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4446 /* In theory this should never fail since we're creating
4449 However, on the x86 some of the movXX patterns actually
4450 contain clobbers of scratch regs. This may cause the
4451 insn created by validate_change to not match any pattern
4452 and thus cause validate_change to fail. */
4453 if (validate_change (insn, &SET_SRC (set),
4454 expr->reaching_reg, 0))
4456 occr->deleted_p = 1;
4457 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4465 "PRE: redundant insn %d (expression %d) in bb %d, reaching reg is %d\n",
4466 INSN_UID (insn), indx, bb, REGNO (expr->reaching_reg));
4476 /* Perform GCSE optimizations using PRE.
4477 This is called by one_pre_gcse_pass after all the dataflow analysis
4480 This is based on the original Morel-Renvoise paper Fred Chow's thesis,
4481 and lazy code motion from Knoop, Ruthing and Steffen as described in
4482 Advanced Compiler Design and Implementation.
4484 ??? A new pseudo reg is created to hold the reaching expression.
4485 The nice thing about the classical approach is that it would try to
4486 use an existing reg. If the register can't be adequately optimized
4487 [i.e. we introduce reload problems], one could add a pass here to
4488 propagate the new register through the block.
4490 ??? We don't handle single sets in PARALLELs because we're [currently]
4491 not able to copy the rest of the parallel when we insert copies to create
4492 full redundancies from partial redundancies. However, there's no reason
4493 why we can't handle PARALLELs in the cases where there are no partial
4501 struct expr **index_map;
4503 /* Compute a mapping from expression number (`bitmap_index') to
4504 hash table entry. */
4506 index_map = (struct expr **) alloca (n_exprs * sizeof (struct expr *));
4507 bzero ((char *) index_map, n_exprs * sizeof (struct expr *));
4508 for (i = 0; i < expr_hash_table_size; i++)
4512 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4513 index_map[expr->bitmap_index] = expr;
4516 /* Reset bitmap used to track which insns are redundant. */
4517 pre_redundant_insns = sbitmap_alloc (max_cuid);
4518 sbitmap_zero (pre_redundant_insns);
4520 /* Delete the redundant insns first so that
4521 - we know what register to use for the new insns and for the other
4522 ones with reaching expressions
4523 - we know which insns are redundant when we go to create copies */
4524 changed = pre_delete ();
4526 /* Insert insns in places that make partially redundant expressions
4528 pre_insert (index_map);
4530 /* In other places with reaching expressions, copy the expression to the
4531 specially allocated pseudo-reg that reaches the redundant expression. */
4532 pre_insert_copies ();
4534 free (pre_redundant_insns);
4539 /* Top level routine to perform one PRE GCSE pass.
4541 Return non-zero if a change was made. */
4544 one_pre_gcse_pass (pass)
4549 gcse_subst_count = 0;
4550 gcse_create_count = 0;
4552 alloc_expr_hash_table (max_cuid);
4553 compute_expr_hash_table ();
4555 dump_hash_table (gcse_file, "Expression", expr_hash_table,
4556 expr_hash_table_size, n_exprs);
4559 alloc_pre_mem (n_basic_blocks, n_exprs);
4560 compute_pre_data ();
4561 changed |= pre_gcse ();
4564 free_expr_hash_table ();
4568 fprintf (gcse_file, "\n");
4569 fprintf (gcse_file, "PRE GCSE of %s, pass %d: %d bytes needed, %d substs, %d insns created\n",
4570 current_function_name, pass,
4571 bytes_used, gcse_subst_count, gcse_create_count);
4577 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
4578 We have to add REG_LABEL notes, because the following loop optimization
4579 pass requires them. */
4581 /* ??? This is very similar to the loop.c add_label_notes function. We
4582 could probably share code here. */
4584 /* ??? If there was a jump optimization pass after gcse and before loop,
4585 then we would not need to do this here, because jump would add the
4586 necessary REG_LABEL notes. */
4589 add_label_notes (x, insn)
4593 enum rtx_code code = GET_CODE (x);
4597 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4599 /* This code used to ignore labels that referred to dispatch tables to
4600 avoid flow generating (slighly) worse code.
4602 We no longer ignore such label references (see LABEL_REF handling in
4603 mark_jump_label for additional information). */
4604 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, XEXP (x, 0),
4609 fmt = GET_RTX_FORMAT (code);
4610 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4613 add_label_notes (XEXP (x, i), insn);
4614 else if (fmt[i] == 'E')
4615 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4616 add_label_notes (XVECEXP (x, i, j), insn);
4620 /* Compute transparent outgoing information for each block.
4622 An expression is transparent to an edge unless it is killed by
4623 the edge itself. This can only happen with abnormal control flow,
4624 when the edge is traversed through a call. This happens with
4625 non-local labels and exceptions.
4627 This would not be necessary if we split the edge. While this is
4628 normally impossible for abnormal critical edges, with some effort
4629 it should be possible with exception handling, since we still have
4630 control over which handler should be invoked. But due to increased
4631 EH table sizes, this may not be worthwhile. */
4634 compute_transpout ()
4638 sbitmap_vector_ones (transpout, n_basic_blocks);
4640 for (bb = 0; bb < n_basic_blocks; ++bb)
4644 /* Note that flow inserted a nop a the end of basic blocks that
4645 end in call instructions for reasons other than abnormal
4647 if (GET_CODE (BLOCK_END (bb)) != CALL_INSN)
4650 for (i = 0; i < expr_hash_table_size; i++)
4653 for (expr = expr_hash_table[i]; expr ; expr = expr->next_same_hash)
4654 if (GET_CODE (expr->expr) == MEM)
4656 rtx addr = XEXP (expr->expr, 0);
4658 if (GET_CODE (addr) == SYMBOL_REF
4659 && CONSTANT_POOL_ADDRESS_P (addr))
4662 /* ??? Optimally, we would use interprocedural alias
4663 analysis to determine if this mem is actually killed
4665 RESET_BIT (transpout[bb], expr->bitmap_index);