1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
43 Global Optimization by Suppression of Partial Redundancies
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
115 Rice University Ph.D. thesis, Apr. 1996
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
125 Advanced Compiler Design and Implementation
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
148 #include "coretypes.h"
155 #include "hard-reg-set.h"
158 #include "insn-config.h"
160 #include "basic-block.h"
162 #include "function.h"
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
281 static FILE *gcse_file;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack;
302 struct reg_use {rtx reg_rtx; };
304 /* Hash table of expressions. */
308 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
310 /* Index in the available expression bitmaps. */
312 /* Next entry with the same hash. */
313 struct expr *next_same_hash;
314 /* List of anticipatable occurrences in basic blocks in the function.
315 An "anticipatable occurrence" is one that is the first occurrence in the
316 basic block, the operands are not modified in the basic block prior
317 to the occurrence and the output is not used between the start of
318 the block and the occurrence. */
319 struct occr *antic_occr;
320 /* List of available occurrence in basic blocks in the function.
321 An "available occurrence" is one that is the last occurrence in the
322 basic block and the operands are not modified by following statements in
323 the basic block [including this insn]. */
324 struct occr *avail_occr;
325 /* Non-null if the computation is PRE redundant.
326 The value is the newly created pseudo-reg to record a copy of the
327 expression in all the places that reach the redundant copy. */
331 /* Occurrence of an expression.
332 There is one per basic block. If a pattern appears more than once the
333 last appearance is used [or first for anticipatable expressions]. */
337 /* Next occurrence of this expression. */
339 /* The insn that computes the expression. */
341 /* Nonzero if this [anticipatable] occurrence has been deleted. */
343 /* Nonzero if this [available] occurrence has been copied to
345 /* ??? This is mutually exclusive with deleted_p, so they could share
350 /* Expression and copy propagation hash tables.
351 Each hash table is an array of buckets.
352 ??? It is known that if it were an array of entries, structure elements
353 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
354 not clear whether in the final analysis a sufficient amount of memory would
355 be saved as the size of the available expression bitmaps would be larger
356 [one could build a mapping table without holes afterwards though].
357 Someday I'll perform the computation and figure it out. */
362 This is an array of `expr_hash_table_size' elements. */
365 /* Size of the hash table, in elements. */
368 /* Number of hash table elements. */
369 unsigned int n_elems;
371 /* Whether the table is expression of copy propagation one. */
375 /* Expression hash table. */
376 static struct hash_table expr_hash_table;
378 /* Copy propagation hash table. */
379 static struct hash_table set_hash_table;
381 /* Mapping of uids to cuids.
382 Only real insns get cuids. */
383 static int *uid_cuid;
385 /* Highest UID in UID_CUID. */
388 /* Get the cuid of an insn. */
389 #ifdef ENABLE_CHECKING
390 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
392 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
395 /* Number of cuids. */
398 /* Mapping of cuids to insns. */
399 static rtx *cuid_insn;
401 /* Get insn from cuid. */
402 #define CUID_INSN(CUID) (cuid_insn[CUID])
404 /* Maximum register number in function prior to doing gcse + 1.
405 Registers created during this pass have regno >= max_gcse_regno.
406 This is named with "gcse" to not collide with global of same name. */
407 static unsigned int max_gcse_regno;
409 /* Table of registers that are modified.
411 For each register, each element is a list of places where the pseudo-reg
414 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
415 requires knowledge of which blocks kill which regs [and thus could use
416 a bitmap instead of the lists `reg_set_table' uses].
418 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
419 num-regs) [however perhaps it may be useful to keep the data as is]. One
420 advantage of recording things this way is that `reg_set_table' is fairly
421 sparse with respect to pseudo regs but for hard regs could be fairly dense
422 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
423 up functions like compute_transp since in the case of pseudo-regs we only
424 need to iterate over the number of times a pseudo-reg is set, not over the
425 number of basic blocks [clearly there is a bit of a slow down in the cases
426 where a pseudo is set more than once in a block, however it is believed
427 that the net effect is to speed things up]. This isn't done for hard-regs
428 because recording call-clobbered hard-regs in `reg_set_table' at each
429 function call can consume a fair bit of memory, and iterating over
430 hard-regs stored this way in compute_transp will be more expensive. */
432 typedef struct reg_set
434 /* The next setting of this register. */
435 struct reg_set *next;
436 /* The insn where it was set. */
440 static reg_set **reg_set_table;
442 /* Size of `reg_set_table'.
443 The table starts out at max_gcse_regno + slop, and is enlarged as
445 static int reg_set_table_size;
447 /* Amount to grow `reg_set_table' by when it's full. */
448 #define REG_SET_TABLE_SLOP 100
450 /* This is a list of expressions which are MEMs and will be used by load
452 Load motion tracks MEMs which aren't killed by
453 anything except itself. (ie, loads and stores to a single location).
454 We can then allow movement of these MEM refs with a little special
455 allowance. (all stores copy the same value to the reaching reg used
456 for the loads). This means all values used to store into memory must have
457 no side effects so we can re-issue the setter value.
458 Store Motion uses this structure as an expression table to track stores
459 which look interesting, and might be moveable towards the exit block. */
463 struct expr * expr; /* Gcse expression reference for LM. */
464 rtx pattern; /* Pattern of this mem. */
465 rtx pattern_regs; /* List of registers mentioned by the mem. */
466 rtx loads; /* INSN list of loads seen. */
467 rtx stores; /* INSN list of stores seen. */
468 struct ls_expr * next; /* Next in the list. */
469 int invalid; /* Invalid for some reason. */
470 int index; /* If it maps to a bitmap index. */
471 int hash_index; /* Index when in a hash table. */
472 rtx reaching_reg; /* Register to use when re-writing. */
475 /* Array of implicit set patterns indexed by basic block index. */
476 static rtx *implicit_sets;
478 /* Head of the list of load/store memory refs. */
479 static struct ls_expr * pre_ldst_mems = NULL;
481 /* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
484 static regset reg_set_bitmap;
486 /* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491 static sbitmap *reg_set_in_block;
493 /* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495 static rtx * modify_mem_list;
496 bitmap modify_mem_list_set;
498 /* This array parallels modify_mem_list, but is kept canonicalized. */
499 static rtx * canon_modify_mem_list;
500 bitmap canon_modify_mem_list_set;
501 /* Various variables for statistics gathering. */
503 /* Memory used in a pass.
504 This isn't intended to be absolutely precise. Its intent is only
505 to keep an eye on memory usage. */
506 static int bytes_used;
508 /* GCSE substitutions made. */
509 static int gcse_subst_count;
510 /* Number of copy instructions created. */
511 static int gcse_create_count;
512 /* Number of constants propagated. */
513 static int const_prop_count;
514 /* Number of copys propagated. */
515 static int copy_prop_count;
517 /* These variables are used by classic GCSE.
518 Normally they'd be defined a bit later, but `rd_gen' needs to
519 be declared sooner. */
521 /* Each block has a bitmap of each type.
522 The length of each blocks bitmap is:
524 max_cuid - for reaching definitions
525 n_exprs - for available expressions
527 Thus we view the bitmaps as 2 dimensional arrays. i.e.
528 rd_kill[block_num][cuid_num]
529 ae_kill[block_num][expr_num] */
531 /* For reaching defs */
532 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
534 /* for available exprs */
535 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
537 /* Objects of this type are passed around by the null-pointer check
539 struct null_pointer_info
541 /* The basic block being processed. */
542 basic_block current_block;
543 /* The first register to be handled in this pass. */
544 unsigned int min_reg;
545 /* One greater than the last register to be handled in this pass. */
546 unsigned int max_reg;
547 sbitmap *nonnull_local;
548 sbitmap *nonnull_killed;
551 static void compute_can_copy (void);
552 static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
553 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
554 static void *grealloc (void *, size_t);
555 static void *gcse_alloc (unsigned long);
556 static void alloc_gcse_mem (rtx);
557 static void free_gcse_mem (void);
558 static void alloc_reg_set_mem (int);
559 static void free_reg_set_mem (void);
560 static int get_bitmap_width (int, int, int);
561 static void record_one_set (int, rtx);
562 static void replace_one_set (int, rtx, rtx);
563 static void record_set_info (rtx, rtx, void *);
564 static void compute_sets (rtx);
565 static void hash_scan_insn (rtx, struct hash_table *, int);
566 static void hash_scan_set (rtx, rtx, struct hash_table *);
567 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
568 static void hash_scan_call (rtx, rtx, struct hash_table *);
569 static int want_to_gcse_p (rtx);
570 static bool gcse_constant_p (rtx);
571 static int oprs_unchanged_p (rtx, rtx, int);
572 static int oprs_anticipatable_p (rtx, rtx);
573 static int oprs_available_p (rtx, rtx);
574 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
575 struct hash_table *);
576 static void insert_set_in_table (rtx, rtx, struct hash_table *);
577 static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
578 static unsigned int hash_expr_1 (rtx, enum machine_mode, int *);
579 static unsigned int hash_string_1 (const char *);
580 static unsigned int hash_set (int, int);
581 static int expr_equiv_p (rtx, rtx);
582 static void record_last_reg_set_info (rtx, int);
583 static void record_last_mem_set_info (rtx);
584 static void record_last_set_info (rtx, rtx, void *);
585 static void compute_hash_table (struct hash_table *);
586 static void alloc_hash_table (int, struct hash_table *, int);
587 static void free_hash_table (struct hash_table *);
588 static void compute_hash_table_work (struct hash_table *);
589 static void dump_hash_table (FILE *, const char *, struct hash_table *);
590 static struct expr *lookup_expr (rtx, struct hash_table *);
591 static struct expr *lookup_set (unsigned int, struct hash_table *);
592 static struct expr *next_set (unsigned int, struct expr *);
593 static void reset_opr_set_tables (void);
594 static int oprs_not_set_p (rtx, rtx);
595 static void mark_call (rtx);
596 static void mark_set (rtx, rtx);
597 static void mark_clobber (rtx, rtx);
598 static void mark_oprs_set (rtx);
599 static void alloc_cprop_mem (int, int);
600 static void free_cprop_mem (void);
601 static void compute_transp (rtx, int, sbitmap *, int);
602 static void compute_transpout (void);
603 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
604 struct hash_table *);
605 static void compute_cprop_data (void);
606 static void find_used_regs (rtx *, void *);
607 static int try_replace_reg (rtx, rtx, rtx);
608 static struct expr *find_avail_set (int, rtx);
609 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
610 static void mems_conflict_for_gcse_p (rtx, rtx, void *);
611 static int load_killed_in_block_p (basic_block, int, rtx, int);
612 static void canon_list_insert (rtx, rtx, void *);
613 static int cprop_insn (rtx, int);
614 static int cprop (int);
615 static void find_implicit_sets (void);
616 static int one_cprop_pass (int, int, int);
617 static bool constprop_register (rtx, rtx, rtx, int);
618 static struct expr *find_bypass_set (int, int);
619 static bool reg_killed_on_edge (rtx, edge);
620 static int bypass_block (basic_block, rtx, rtx);
621 static int bypass_conditional_jumps (void);
622 static void alloc_pre_mem (int, int);
623 static void free_pre_mem (void);
624 static void compute_pre_data (void);
625 static int pre_expr_reaches_here_p (basic_block, struct expr *,
627 static void insert_insn_end_bb (struct expr *, basic_block, int);
628 static void pre_insert_copy_insn (struct expr *, rtx);
629 static void pre_insert_copies (void);
630 static int pre_delete (void);
631 static int pre_gcse (void);
632 static int one_pre_gcse_pass (int);
633 static void add_label_notes (rtx, rtx);
634 static void alloc_code_hoist_mem (int, int);
635 static void free_code_hoist_mem (void);
636 static void compute_code_hoist_vbeinout (void);
637 static void compute_code_hoist_data (void);
638 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
639 static void hoist_code (void);
640 static int one_code_hoisting_pass (void);
641 static void alloc_rd_mem (int, int);
642 static void free_rd_mem (void);
643 static void handle_rd_kill_set (rtx, int, basic_block);
644 static void compute_kill_rd (void);
645 static void compute_rd (void);
646 static void alloc_avail_expr_mem (int, int);
647 static void free_avail_expr_mem (void);
648 static void compute_ae_gen (struct hash_table *);
649 static int expr_killed_p (rtx, basic_block);
650 static void compute_ae_kill (sbitmap *, sbitmap *, struct hash_table *);
651 static int expr_reaches_here_p (struct occr *, struct expr *, basic_block,
653 static rtx computing_insn (struct expr *, rtx);
654 static int def_reaches_here_p (rtx, rtx);
655 static int can_disregard_other_sets (struct reg_set **, rtx, int);
656 static int handle_avail_expr (rtx, struct expr *);
657 static int classic_gcse (void);
658 static int one_classic_gcse_pass (int);
659 static void invalidate_nonnull_info (rtx, rtx, void *);
660 static int delete_null_pointer_checks_1 (unsigned int *, sbitmap *, sbitmap *,
661 struct null_pointer_info *);
662 static rtx process_insert_insn (struct expr *);
663 static int pre_edge_insert (struct edge_list *, struct expr **);
664 static int expr_reaches_here_p_work (struct occr *, struct expr *,
665 basic_block, int, char *);
666 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
667 basic_block, char *);
668 static struct ls_expr * ldst_entry (rtx);
669 static void free_ldst_entry (struct ls_expr *);
670 static void free_ldst_mems (void);
671 static void print_ldst_list (FILE *);
672 static struct ls_expr * find_rtx_in_ldst (rtx);
673 static int enumerate_ldsts (void);
674 static inline struct ls_expr * first_ls_expr (void);
675 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
676 static int simple_mem (rtx);
677 static void invalidate_any_buried_refs (rtx);
678 static void compute_ld_motion_mems (void);
679 static void trim_ld_motion_mems (void);
680 static void update_ld_motion_stores (struct expr *);
681 static void reg_set_info (rtx, rtx, void *);
682 static bool store_ops_ok (rtx, int *);
683 static rtx extract_mentioned_regs (rtx);
684 static rtx extract_mentioned_regs_helper (rtx, rtx);
685 static void find_moveable_store (rtx, int *, int *);
686 static int compute_store_table (void);
687 static bool load_kills_store (rtx, rtx, int);
688 static bool find_loads (rtx, rtx, int);
689 static bool store_killed_in_insn (rtx, rtx, rtx, int);
690 static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
691 static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
692 static void build_store_vectors (void);
693 static void insert_insn_start_bb (rtx, basic_block);
694 static int insert_store (struct ls_expr *, edge);
695 static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
696 static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
697 static void delete_store (struct ls_expr *, basic_block);
698 static void free_store_memory (void);
699 static void store_motion (void);
700 static void free_insn_expr_list_list (rtx *);
701 static void clear_modify_mem_tables (void);
702 static void free_modify_mem_tables (void);
703 static rtx gcse_emit_move_after (rtx, rtx, rtx);
704 static void local_cprop_find_used_regs (rtx *, void *);
705 static bool do_local_cprop (rtx, rtx, int, rtx*);
706 static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
707 static void local_cprop_pass (int);
708 static bool is_too_expensive (const char *);
711 /* Entry point for global common subexpression elimination.
712 F is the first instruction in the function. */
715 gcse_main (rtx f, FILE *file)
718 /* Bytes used at start of pass. */
719 int initial_bytes_used;
720 /* Maximum number of bytes used by a pass. */
722 /* Point to release obstack data from for each pass. */
723 char *gcse_obstack_bottom;
725 /* We do not construct an accurate cfg in functions which call
726 setjmp, so just punt to be safe. */
727 if (current_function_calls_setjmp)
730 /* Assume that we do not need to run jump optimizations after gcse. */
731 run_jump_opt_after_gcse = 0;
733 /* For calling dump_foo fns from gdb. */
734 debug_stderr = stderr;
737 /* Identify the basic block information for this function, including
738 successors and predecessors. */
739 max_gcse_regno = max_reg_num ();
742 dump_flow_info (file);
744 /* Return if there's nothing to do, or it is too expensive. */
745 if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
748 gcc_obstack_init (&gcse_obstack);
752 init_alias_analysis ();
753 /* Record where pseudo-registers are set. This data is kept accurate
754 during each pass. ??? We could also record hard-reg information here
755 [since it's unchanging], however it is currently done during hash table
758 It may be tempting to compute MEM set information here too, but MEM sets
759 will be subject to code motion one day and thus we need to compute
760 information about memory sets when we build the hash tables. */
762 alloc_reg_set_mem (max_gcse_regno);
766 initial_bytes_used = bytes_used;
768 gcse_obstack_bottom = gcse_alloc (1);
770 while (changed && pass < MAX_GCSE_PASSES)
774 fprintf (file, "GCSE pass %d\n\n", pass + 1);
776 /* Initialize bytes_used to the space for the pred/succ lists,
777 and the reg_set_table data. */
778 bytes_used = initial_bytes_used;
780 /* Each pass may create new registers, so recalculate each time. */
781 max_gcse_regno = max_reg_num ();
785 /* Don't allow constant propagation to modify jumps
787 changed = one_cprop_pass (pass + 1, 0, 0);
790 changed |= one_classic_gcse_pass (pass + 1);
793 changed |= one_pre_gcse_pass (pass + 1);
794 /* We may have just created new basic blocks. Release and
795 recompute various things which are sized on the number of
799 free_modify_mem_tables ();
800 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
801 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
804 alloc_reg_set_mem (max_reg_num ());
806 run_jump_opt_after_gcse = 1;
809 if (max_pass_bytes < bytes_used)
810 max_pass_bytes = bytes_used;
812 /* Free up memory, then reallocate for code hoisting. We can
813 not re-use the existing allocated memory because the tables
814 will not have info for the insns or registers created by
815 partial redundancy elimination. */
818 /* It does not make sense to run code hoisting unless we optimizing
819 for code size -- it rarely makes programs faster, and can make
820 them bigger if we did partial redundancy elimination (when optimizing
821 for space, we use a classic gcse algorithm instead of partial
822 redundancy algorithms). */
825 max_gcse_regno = max_reg_num ();
827 changed |= one_code_hoisting_pass ();
830 if (max_pass_bytes < bytes_used)
831 max_pass_bytes = bytes_used;
836 fprintf (file, "\n");
840 obstack_free (&gcse_obstack, gcse_obstack_bottom);
844 /* Do one last pass of copy propagation, including cprop into
845 conditional jumps. */
847 max_gcse_regno = max_reg_num ();
849 /* This time, go ahead and allow cprop to alter jumps. */
850 one_cprop_pass (pass + 1, 1, 0);
855 fprintf (file, "GCSE of %s: %d basic blocks, ",
856 current_function_name, n_basic_blocks);
857 fprintf (file, "%d pass%s, %d bytes\n\n",
858 pass, pass > 1 ? "es" : "", max_pass_bytes);
861 obstack_free (&gcse_obstack, NULL);
863 /* We are finished with alias. */
864 end_alias_analysis ();
865 allocate_reg_info (max_reg_num (), FALSE, FALSE);
867 if (!optimize_size && flag_gcse_sm)
870 /* Record where pseudo-registers are set. */
871 return run_jump_opt_after_gcse;
874 /* Misc. utilities. */
876 /* Nonzero for each mode that supports (set (reg) (reg)).
877 This is trivially true for integer and floating point values.
878 It may or may not be true for condition codes. */
879 static char can_copy[(int) NUM_MACHINE_MODES];
881 /* Compute which modes support reg/reg copy operations. */
884 compute_can_copy (void)
887 #ifndef AVOID_CCMODE_COPIES
890 memset (can_copy, 0, NUM_MACHINE_MODES);
893 for (i = 0; i < NUM_MACHINE_MODES; i++)
894 if (GET_MODE_CLASS (i) == MODE_CC)
896 #ifdef AVOID_CCMODE_COPIES
899 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
900 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
901 if (recog (PATTERN (insn), insn, NULL) >= 0)
911 /* Returns whether the mode supports reg/reg copy operations. */
914 can_copy_p (enum machine_mode mode)
916 static bool can_copy_init_p = false;
918 if (! can_copy_init_p)
921 can_copy_init_p = true;
924 return can_copy[mode] != 0;
927 /* Cover function to xmalloc to record bytes allocated. */
930 gmalloc (size_t size)
933 return xmalloc (size);
936 /* Cover function to xcalloc to record bytes allocated. */
939 gcalloc (size_t nelem, size_t elsize)
941 bytes_used += nelem * elsize;
942 return xcalloc (nelem, elsize);
945 /* Cover function to xrealloc.
946 We don't record the additional size since we don't know it.
947 It won't affect memory usage stats much anyway. */
950 grealloc (void *ptr, size_t size)
952 return xrealloc (ptr, size);
955 /* Cover function to obstack_alloc. */
958 gcse_alloc (unsigned long size)
961 return obstack_alloc (&gcse_obstack, size);
964 /* Allocate memory for the cuid mapping array,
965 and reg/memory set tracking tables.
967 This is called at the start of each pass. */
970 alloc_gcse_mem (rtx f)
975 /* Find the largest UID and create a mapping from UIDs to CUIDs.
976 CUIDs are like UIDs except they increase monotonically, have no gaps,
977 and only apply to real insns. */
979 max_uid = get_max_uid ();
980 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
981 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
984 uid_cuid[INSN_UID (insn)] = i++;
986 uid_cuid[INSN_UID (insn)] = i;
989 /* Create a table mapping cuids to insns. */
992 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
993 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
995 CUID_INSN (i++) = insn;
997 /* Allocate vars to track sets of regs. */
998 reg_set_bitmap = BITMAP_XMALLOC ();
1000 /* Allocate vars to track sets of regs, memory per block. */
1001 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
1002 /* Allocate array to keep a list of insns which modify memory in each
1004 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
1005 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
1006 modify_mem_list_set = BITMAP_XMALLOC ();
1007 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1010 /* Free memory allocated by alloc_gcse_mem. */
1013 free_gcse_mem (void)
1018 BITMAP_XFREE (reg_set_bitmap);
1020 sbitmap_vector_free (reg_set_in_block);
1021 free_modify_mem_tables ();
1022 BITMAP_XFREE (modify_mem_list_set);
1023 BITMAP_XFREE (canon_modify_mem_list_set);
1026 /* Many of the global optimization algorithms work by solving dataflow
1027 equations for various expressions. Initially, some local value is
1028 computed for each expression in each block. Then, the values across the
1029 various blocks are combined (by following flow graph edges) to arrive at
1030 global values. Conceptually, each set of equations is independent. We
1031 may therefore solve all the equations in parallel, solve them one at a
1032 time, or pick any intermediate approach.
1034 When you're going to need N two-dimensional bitmaps, each X (say, the
1035 number of blocks) by Y (say, the number of expressions), call this
1036 function. It's not important what X and Y represent; only that Y
1037 correspond to the things that can be done in parallel. This function will
1038 return an appropriate chunking factor C; you should solve C sets of
1039 equations in parallel. By going through this function, we can easily
1040 trade space against time; by solving fewer equations in parallel we use
1044 get_bitmap_width (int n, int x, int y)
1046 /* It's not really worth figuring out *exactly* how much memory will
1047 be used by a particular choice. The important thing is to get
1048 something approximately right. */
1049 size_t max_bitmap_memory = 10 * 1024 * 1024;
1051 /* The number of bytes we'd use for a single column of minimum
1053 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1055 /* Often, it's reasonable just to solve all the equations in
1057 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1060 /* Otherwise, pick the largest width we can, without going over the
1062 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1066 /* Compute the local properties of each recorded expression.
1068 Local properties are those that are defined by the block, irrespective of
1071 An expression is transparent in a block if its operands are not modified
1074 An expression is computed (locally available) in a block if it is computed
1075 at least once and expression would contain the same value if the
1076 computation was moved to the end of the block.
1078 An expression is locally anticipatable in a block if it is computed at
1079 least once and expression would contain the same value if the computation
1080 was moved to the beginning of the block.
1082 We call this routine for cprop, pre and code hoisting. They all compute
1083 basically the same information and thus can easily share this code.
1085 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1086 properties. If NULL, then it is not necessary to compute or record that
1087 particular property.
1089 TABLE controls which hash table to look at. If it is set hash table,
1090 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1094 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc, struct hash_table *table)
1098 /* Initialize any bitmaps that were passed in. */
1102 sbitmap_vector_zero (transp, last_basic_block);
1104 sbitmap_vector_ones (transp, last_basic_block);
1108 sbitmap_vector_zero (comp, last_basic_block);
1110 sbitmap_vector_zero (antloc, last_basic_block);
1112 for (i = 0; i < table->size; i++)
1116 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1118 int indx = expr->bitmap_index;
1121 /* The expression is transparent in this block if it is not killed.
1122 We start by assuming all are transparent [none are killed], and
1123 then reset the bits for those that are. */
1125 compute_transp (expr->expr, indx, transp, table->set_p);
1127 /* The occurrences recorded in antic_occr are exactly those that
1128 we want to set to nonzero in ANTLOC. */
1130 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1132 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1134 /* While we're scanning the table, this is a good place to
1136 occr->deleted_p = 0;
1139 /* The occurrences recorded in avail_occr are exactly those that
1140 we want to set to nonzero in COMP. */
1142 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1144 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1146 /* While we're scanning the table, this is a good place to
1151 /* While we're scanning the table, this is a good place to
1153 expr->reaching_reg = 0;
1158 /* Register set information.
1160 `reg_set_table' records where each register is set or otherwise
1163 static struct obstack reg_set_obstack;
1166 alloc_reg_set_mem (int n_regs)
1168 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1169 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
1171 gcc_obstack_init (®_set_obstack);
1175 free_reg_set_mem (void)
1177 free (reg_set_table);
1178 obstack_free (®_set_obstack, NULL);
1181 /* An OLD_INSN that used to set REGNO was replaced by NEW_INSN.
1182 Update the corresponding `reg_set_table' entry accordingly.
1183 We assume that NEW_INSN is not already recorded in reg_set_table[regno]. */
1186 replace_one_set (int regno, rtx old_insn, rtx new_insn)
1188 struct reg_set *reg_info;
1189 if (regno >= reg_set_table_size)
1191 for (reg_info = reg_set_table[regno]; reg_info; reg_info = reg_info->next)
1192 if (reg_info->insn == old_insn)
1194 reg_info->insn = new_insn;
1199 /* Record REGNO in the reg_set table. */
1202 record_one_set (int regno, rtx insn)
1204 /* Allocate a new reg_set element and link it onto the list. */
1205 struct reg_set *new_reg_info;
1207 /* If the table isn't big enough, enlarge it. */
1208 if (regno >= reg_set_table_size)
1210 int new_size = regno + REG_SET_TABLE_SLOP;
1212 reg_set_table = grealloc (reg_set_table,
1213 new_size * sizeof (struct reg_set *));
1214 memset (reg_set_table + reg_set_table_size, 0,
1215 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1216 reg_set_table_size = new_size;
1219 new_reg_info = obstack_alloc (®_set_obstack, sizeof (struct reg_set));
1220 bytes_used += sizeof (struct reg_set);
1221 new_reg_info->insn = insn;
1222 new_reg_info->next = reg_set_table[regno];
1223 reg_set_table[regno] = new_reg_info;
1226 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1227 an insn. The DATA is really the instruction in which the SET is
1231 record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
1233 rtx record_set_insn = (rtx) data;
1235 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1236 record_one_set (REGNO (dest), record_set_insn);
1239 /* Scan the function and record each set of each pseudo-register.
1241 This is called once, at the start of the gcse pass. See the comments for
1242 `reg_set_table' for further documentation. */
1245 compute_sets (rtx f)
1249 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1251 note_stores (PATTERN (insn), record_set_info, insn);
1254 /* Hash table support. */
1256 struct reg_avail_info
1258 basic_block last_bb;
1263 static struct reg_avail_info *reg_avail_info;
1264 static basic_block current_bb;
1267 /* See whether X, the source of a set, is something we want to consider for
1270 static GTY(()) rtx test_insn;
1272 want_to_gcse_p (rtx x)
1274 int num_clobbers = 0;
1277 switch (GET_CODE (x))
1285 case CONSTANT_P_RTX:
1292 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1293 if (general_operand (x, GET_MODE (x)))
1295 else if (GET_MODE (x) == VOIDmode)
1298 /* Otherwise, check if we can make a valid insn from it. First initialize
1299 our test insn if we haven't already. */
1303 = make_insn_raw (gen_rtx_SET (VOIDmode,
1304 gen_rtx_REG (word_mode,
1305 FIRST_PSEUDO_REGISTER * 2),
1307 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1310 /* Now make an insn like the one we would make when GCSE'ing and see if
1312 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1313 SET_SRC (PATTERN (test_insn)) = x;
1314 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1315 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1318 /* Return nonzero if the operands of expression X are unchanged from the
1319 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1320 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1323 oprs_unchanged_p (rtx x, rtx insn, int avail_p)
1332 code = GET_CODE (x);
1337 struct reg_avail_info *info = ®_avail_info[REGNO (x)];
1339 if (info->last_bb != current_bb)
1342 return info->last_set < INSN_CUID (insn);
1344 return info->first_set >= INSN_CUID (insn);
1348 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1352 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1378 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1382 /* If we are about to do the last recursive call needed at this
1383 level, change it into iteration. This function is called enough
1386 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1388 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1391 else if (fmt[i] == 'E')
1392 for (j = 0; j < XVECLEN (x, i); j++)
1393 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1400 /* Used for communication between mems_conflict_for_gcse_p and
1401 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1402 conflict between two memory references. */
1403 static int gcse_mems_conflict_p;
1405 /* Used for communication between mems_conflict_for_gcse_p and
1406 load_killed_in_block_p. A memory reference for a load instruction,
1407 mems_conflict_for_gcse_p will see if a memory store conflicts with
1408 this memory load. */
1409 static rtx gcse_mem_operand;
1411 /* DEST is the output of an instruction. If it is a memory reference, and
1412 possibly conflicts with the load found in gcse_mem_operand, then set
1413 gcse_mems_conflict_p to a nonzero value. */
1416 mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1417 void *data ATTRIBUTE_UNUSED)
1419 while (GET_CODE (dest) == SUBREG
1420 || GET_CODE (dest) == ZERO_EXTRACT
1421 || GET_CODE (dest) == SIGN_EXTRACT
1422 || GET_CODE (dest) == STRICT_LOW_PART)
1423 dest = XEXP (dest, 0);
1425 /* If DEST is not a MEM, then it will not conflict with the load. Note
1426 that function calls are assumed to clobber memory, but are handled
1428 if (GET_CODE (dest) != MEM)
1431 /* If we are setting a MEM in our list of specially recognized MEMs,
1432 don't mark as killed this time. */
1434 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1436 if (!find_rtx_in_ldst (dest))
1437 gcse_mems_conflict_p = 1;
1441 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1443 gcse_mems_conflict_p = 1;
1446 /* Return nonzero if the expression in X (a memory reference) is killed
1447 in block BB before or after the insn with the CUID in UID_LIMIT.
1448 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1451 To check the entire block, set UID_LIMIT to max_uid + 1 and
1455 load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
1457 rtx list_entry = modify_mem_list[bb->index];
1461 /* Ignore entries in the list that do not apply. */
1463 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1465 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1467 list_entry = XEXP (list_entry, 1);
1471 setter = XEXP (list_entry, 0);
1473 /* If SETTER is a call everything is clobbered. Note that calls
1474 to pure functions are never put on the list, so we need not
1475 worry about them. */
1476 if (GET_CODE (setter) == CALL_INSN)
1479 /* SETTER must be an INSN of some kind that sets memory. Call
1480 note_stores to examine each hunk of memory that is modified.
1482 The note_stores interface is pretty limited, so we have to
1483 communicate via global variables. Yuk. */
1484 gcse_mem_operand = x;
1485 gcse_mems_conflict_p = 0;
1486 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1487 if (gcse_mems_conflict_p)
1489 list_entry = XEXP (list_entry, 1);
1494 /* Return nonzero if the operands of expression X are unchanged from
1495 the start of INSN's basic block up to but not including INSN. */
1498 oprs_anticipatable_p (rtx x, rtx insn)
1500 return oprs_unchanged_p (x, insn, 0);
1503 /* Return nonzero if the operands of expression X are unchanged from
1504 INSN to the end of INSN's basic block. */
1507 oprs_available_p (rtx x, rtx insn)
1509 return oprs_unchanged_p (x, insn, 1);
1512 /* Hash expression X.
1514 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1515 indicating if a volatile operand is found or if the expression contains
1516 something we don't want to insert in the table.
1518 ??? One might want to merge this with canon_hash. Later. */
1521 hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p, int hash_table_size)
1525 *do_not_record_p = 0;
1527 hash = hash_expr_1 (x, mode, do_not_record_p);
1528 return hash % hash_table_size;
1531 /* Hash a string. Just add its bytes up. */
1533 static inline unsigned
1534 hash_string_1 (const char *ps)
1537 const unsigned char *p = (const unsigned char *) ps;
1546 /* Subroutine of hash_expr to do the actual work. */
1549 hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p)
1556 /* Used to turn recursion into iteration. We can't rely on GCC's
1557 tail-recursion elimination since we need to keep accumulating values
1564 code = GET_CODE (x);
1568 hash += ((unsigned int) REG << 7) + REGNO (x);
1572 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1573 + (unsigned int) INTVAL (x));
1577 /* This is like the general case, except that it only counts
1578 the integers representing the constant. */
1579 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1580 if (GET_MODE (x) != VOIDmode)
1581 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1582 hash += (unsigned int) XWINT (x, i);
1584 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1585 + (unsigned int) CONST_DOUBLE_HIGH (x));
1593 units = CONST_VECTOR_NUNITS (x);
1595 for (i = 0; i < units; ++i)
1597 elt = CONST_VECTOR_ELT (x, i);
1598 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1604 /* Assume there is only one rtx object for any given label. */
1606 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1607 differences and differences between each stage's debugging dumps. */
1608 hash += (((unsigned int) LABEL_REF << 7)
1609 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1614 /* Don't hash on the symbol's address to avoid bootstrap differences.
1615 Different hash values may cause expressions to be recorded in
1616 different orders and thus different registers to be used in the
1617 final assembler. This also avoids differences in the dump files
1618 between various stages. */
1620 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1623 h += (h << 7) + *p++; /* ??? revisit */
1625 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1630 if (MEM_VOLATILE_P (x))
1632 *do_not_record_p = 1;
1636 hash += (unsigned int) MEM;
1637 /* We used alias set for hashing, but this is not good, since the alias
1638 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1639 causing the profiles to fail to match. */
1650 case UNSPEC_VOLATILE:
1651 *do_not_record_p = 1;
1655 if (MEM_VOLATILE_P (x))
1657 *do_not_record_p = 1;
1662 /* We don't want to take the filename and line into account. */
1663 hash += (unsigned) code + (unsigned) GET_MODE (x)
1664 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1665 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1666 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1668 if (ASM_OPERANDS_INPUT_LENGTH (x))
1670 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1672 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1673 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1675 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1679 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1680 x = ASM_OPERANDS_INPUT (x, 0);
1681 mode = GET_MODE (x);
1691 hash += (unsigned) code + (unsigned) GET_MODE (x);
1692 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1696 /* If we are about to do the last recursive call
1697 needed at this level, change it into iteration.
1698 This function is called enough to be worth it. */
1705 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1706 if (*do_not_record_p)
1710 else if (fmt[i] == 'E')
1711 for (j = 0; j < XVECLEN (x, i); j++)
1713 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1714 if (*do_not_record_p)
1718 else if (fmt[i] == 's')
1719 hash += hash_string_1 (XSTR (x, i));
1720 else if (fmt[i] == 'i')
1721 hash += (unsigned int) XINT (x, i);
1729 /* Hash a set of register REGNO.
1731 Sets are hashed on the register that is set. This simplifies the PRE copy
1734 ??? May need to make things more elaborate. Later, as necessary. */
1737 hash_set (int regno, int hash_table_size)
1742 return hash % hash_table_size;
1745 /* Return nonzero if exp1 is equivalent to exp2.
1746 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1749 expr_equiv_p (rtx x, rtx y)
1758 if (x == 0 || y == 0)
1761 code = GET_CODE (x);
1762 if (code != GET_CODE (y))
1765 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1766 if (GET_MODE (x) != GET_MODE (y))
1777 return XEXP (x, 0) == XEXP (y, 0);
1780 return XSTR (x, 0) == XSTR (y, 0);
1783 return REGNO (x) == REGNO (y);
1786 /* Can't merge two expressions in different alias sets, since we can
1787 decide that the expression is transparent in a block when it isn't,
1788 due to it being set with the different alias set. */
1789 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1792 /* A volatile mem should not be considered equivalent to any other. */
1793 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1797 /* For commutative operations, check both orders. */
1805 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1806 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1807 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1808 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1811 /* We don't use the generic code below because we want to
1812 disregard filename and line numbers. */
1814 /* A volatile asm isn't equivalent to any other. */
1815 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1818 if (GET_MODE (x) != GET_MODE (y)
1819 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1820 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1821 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1822 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1823 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1826 if (ASM_OPERANDS_INPUT_LENGTH (x))
1828 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1829 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1830 ASM_OPERANDS_INPUT (y, i))
1831 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1832 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1842 /* Compare the elements. If any pair of corresponding elements
1843 fail to match, return 0 for the whole thing. */
1845 fmt = GET_RTX_FORMAT (code);
1846 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1851 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1856 if (XVECLEN (x, i) != XVECLEN (y, i))
1858 for (j = 0; j < XVECLEN (x, i); j++)
1859 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1864 if (strcmp (XSTR (x, i), XSTR (y, i)))
1869 if (XINT (x, i) != XINT (y, i))
1874 if (XWINT (x, i) != XWINT (y, i))
1889 /* Insert expression X in INSN in the hash TABLE.
1890 If it is already present, record it as the last occurrence in INSN's
1893 MODE is the mode of the value X is being stored into.
1894 It is only used if X is a CONST_INT.
1896 ANTIC_P is nonzero if X is an anticipatable expression.
1897 AVAIL_P is nonzero if X is an available expression. */
1900 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1901 int avail_p, struct hash_table *table)
1903 int found, do_not_record_p;
1905 struct expr *cur_expr, *last_expr = NULL;
1906 struct occr *antic_occr, *avail_occr;
1907 struct occr *last_occr = NULL;
1909 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1911 /* Do not insert expression in table if it contains volatile operands,
1912 or if hash_expr determines the expression is something we don't want
1913 to or can't handle. */
1914 if (do_not_record_p)
1917 cur_expr = table->table[hash];
1920 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1922 /* If the expression isn't found, save a pointer to the end of
1924 last_expr = cur_expr;
1925 cur_expr = cur_expr->next_same_hash;
1930 cur_expr = gcse_alloc (sizeof (struct expr));
1931 bytes_used += sizeof (struct expr);
1932 if (table->table[hash] == NULL)
1933 /* This is the first pattern that hashed to this index. */
1934 table->table[hash] = cur_expr;
1936 /* Add EXPR to end of this hash chain. */
1937 last_expr->next_same_hash = cur_expr;
1939 /* Set the fields of the expr element. */
1941 cur_expr->bitmap_index = table->n_elems++;
1942 cur_expr->next_same_hash = NULL;
1943 cur_expr->antic_occr = NULL;
1944 cur_expr->avail_occr = NULL;
1947 /* Now record the occurrence(s). */
1950 antic_occr = cur_expr->antic_occr;
1952 /* Search for another occurrence in the same basic block. */
1953 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1955 /* If an occurrence isn't found, save a pointer to the end of
1957 last_occr = antic_occr;
1958 antic_occr = antic_occr->next;
1962 /* Found another instance of the expression in the same basic block.
1963 Prefer the currently recorded one. We want the first one in the
1964 block and the block is scanned from start to end. */
1965 ; /* nothing to do */
1968 /* First occurrence of this expression in this basic block. */
1969 antic_occr = gcse_alloc (sizeof (struct occr));
1970 bytes_used += sizeof (struct occr);
1971 /* First occurrence of this expression in any block? */
1972 if (cur_expr->antic_occr == NULL)
1973 cur_expr->antic_occr = antic_occr;
1975 last_occr->next = antic_occr;
1977 antic_occr->insn = insn;
1978 antic_occr->next = NULL;
1984 avail_occr = cur_expr->avail_occr;
1986 /* Search for another occurrence in the same basic block. */
1987 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1989 /* If an occurrence isn't found, save a pointer to the end of
1991 last_occr = avail_occr;
1992 avail_occr = avail_occr->next;
1996 /* Found another instance of the expression in the same basic block.
1997 Prefer this occurrence to the currently recorded one. We want
1998 the last one in the block and the block is scanned from start
2000 avail_occr->insn = insn;
2003 /* First occurrence of this expression in this basic block. */
2004 avail_occr = gcse_alloc (sizeof (struct occr));
2005 bytes_used += sizeof (struct occr);
2007 /* First occurrence of this expression in any block? */
2008 if (cur_expr->avail_occr == NULL)
2009 cur_expr->avail_occr = avail_occr;
2011 last_occr->next = avail_occr;
2013 avail_occr->insn = insn;
2014 avail_occr->next = NULL;
2019 /* Insert pattern X in INSN in the hash table.
2020 X is a SET of a reg to either another reg or a constant.
2021 If it is already present, record it as the last occurrence in INSN's
2025 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
2029 struct expr *cur_expr, *last_expr = NULL;
2030 struct occr *cur_occr, *last_occr = NULL;
2032 if (GET_CODE (x) != SET
2033 || GET_CODE (SET_DEST (x)) != REG)
2036 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2038 cur_expr = table->table[hash];
2041 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2043 /* If the expression isn't found, save a pointer to the end of
2045 last_expr = cur_expr;
2046 cur_expr = cur_expr->next_same_hash;
2051 cur_expr = gcse_alloc (sizeof (struct expr));
2052 bytes_used += sizeof (struct expr);
2053 if (table->table[hash] == NULL)
2054 /* This is the first pattern that hashed to this index. */
2055 table->table[hash] = cur_expr;
2057 /* Add EXPR to end of this hash chain. */
2058 last_expr->next_same_hash = cur_expr;
2060 /* Set the fields of the expr element.
2061 We must copy X because it can be modified when copy propagation is
2062 performed on its operands. */
2063 cur_expr->expr = copy_rtx (x);
2064 cur_expr->bitmap_index = table->n_elems++;
2065 cur_expr->next_same_hash = NULL;
2066 cur_expr->antic_occr = NULL;
2067 cur_expr->avail_occr = NULL;
2070 /* Now record the occurrence. */
2071 cur_occr = cur_expr->avail_occr;
2073 /* Search for another occurrence in the same basic block. */
2074 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2076 /* If an occurrence isn't found, save a pointer to the end of
2078 last_occr = cur_occr;
2079 cur_occr = cur_occr->next;
2083 /* Found another instance of the expression in the same basic block.
2084 Prefer this occurrence to the currently recorded one. We want the
2085 last one in the block and the block is scanned from start to end. */
2086 cur_occr->insn = insn;
2089 /* First occurrence of this expression in this basic block. */
2090 cur_occr = gcse_alloc (sizeof (struct occr));
2091 bytes_used += sizeof (struct occr);
2093 /* First occurrence of this expression in any block? */
2094 if (cur_expr->avail_occr == NULL)
2095 cur_expr->avail_occr = cur_occr;
2097 last_occr->next = cur_occr;
2099 cur_occr->insn = insn;
2100 cur_occr->next = NULL;
2104 /* Determine whether the rtx X should be treated as a constant for
2105 the purposes of GCSE's constant propagation. */
2108 gcse_constant_p (rtx x)
2110 /* Consider a COMPARE of two integers constant. */
2111 if (GET_CODE (x) == COMPARE
2112 && GET_CODE (XEXP (x, 0)) == CONST_INT
2113 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2117 /* Consider a COMPARE of the same registers is a constant
2118 if they are not floating point registers. */
2119 if (GET_CODE(x) == COMPARE
2120 && GET_CODE (XEXP (x, 0)) == REG
2121 && GET_CODE (XEXP (x, 1)) == REG
2122 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
2123 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
2124 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
2127 if (GET_CODE (x) == CONSTANT_P_RTX)
2130 return CONSTANT_P (x);
2133 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2137 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
2139 rtx src = SET_SRC (pat);
2140 rtx dest = SET_DEST (pat);
2143 if (GET_CODE (src) == CALL)
2144 hash_scan_call (src, insn, table);
2146 else if (GET_CODE (dest) == REG)
2148 unsigned int regno = REGNO (dest);
2151 /* If this is a single set and we are doing constant propagation,
2152 see if a REG_NOTE shows this equivalent to a constant. */
2153 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2154 && gcse_constant_p (XEXP (note, 0)))
2155 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2157 /* Only record sets of pseudo-regs in the hash table. */
2159 && regno >= FIRST_PSEUDO_REGISTER
2160 /* Don't GCSE something if we can't do a reg/reg copy. */
2161 && can_copy_p (GET_MODE (dest))
2162 /* GCSE commonly inserts instruction after the insn. We can't
2163 do that easily for EH_REGION notes so disable GCSE on these
2165 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2166 /* Is SET_SRC something we want to gcse? */
2167 && want_to_gcse_p (src)
2168 /* Don't CSE a nop. */
2169 && ! set_noop_p (pat)
2170 /* Don't GCSE if it has attached REG_EQUIV note.
2171 At this point this only function parameters should have
2172 REG_EQUIV notes and if the argument slot is used somewhere
2173 explicitly, it means address of parameter has been taken,
2174 so we should not extend the lifetime of the pseudo. */
2175 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2176 || GET_CODE (XEXP (note, 0)) != MEM))
2178 /* An expression is not anticipatable if its operands are
2179 modified before this insn or if this is not the only SET in
2181 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2182 /* An expression is not available if its operands are
2183 subsequently modified, including this insn. It's also not
2184 available if this is a branch, because we can't insert
2185 a set after the branch. */
2186 int avail_p = (oprs_available_p (src, insn)
2187 && ! JUMP_P (insn));
2189 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2192 /* Record sets for constant/copy propagation. */
2193 else if (table->set_p
2194 && regno >= FIRST_PSEUDO_REGISTER
2195 && ((GET_CODE (src) == REG
2196 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2197 && can_copy_p (GET_MODE (dest))
2198 && REGNO (src) != regno)
2199 || gcse_constant_p (src))
2200 /* A copy is not available if its src or dest is subsequently
2201 modified. Here we want to search from INSN+1 on, but
2202 oprs_available_p searches from INSN on. */
2203 && (insn == BLOCK_END (BLOCK_NUM (insn))
2204 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2205 && oprs_available_p (pat, tmp))))
2206 insert_set_in_table (pat, insn, table);
2211 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2212 struct hash_table *table ATTRIBUTE_UNUSED)
2214 /* Currently nothing to do. */
2218 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2219 struct hash_table *table ATTRIBUTE_UNUSED)
2221 /* Currently nothing to do. */
2224 /* Process INSN and add hash table entries as appropriate.
2226 Only available expressions that set a single pseudo-reg are recorded.
2228 Single sets in a PARALLEL could be handled, but it's an extra complication
2229 that isn't dealt with right now. The trick is handling the CLOBBERs that
2230 are also in the PARALLEL. Later.
2232 If SET_P is nonzero, this is for the assignment hash table,
2233 otherwise it is for the expression hash table.
2234 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2235 not record any expressions. */
2238 hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
2240 rtx pat = PATTERN (insn);
2243 if (in_libcall_block)
2246 /* Pick out the sets of INSN and for other forms of instructions record
2247 what's been modified. */
2249 if (GET_CODE (pat) == SET)
2250 hash_scan_set (pat, insn, table);
2251 else if (GET_CODE (pat) == PARALLEL)
2252 for (i = 0; i < XVECLEN (pat, 0); i++)
2254 rtx x = XVECEXP (pat, 0, i);
2256 if (GET_CODE (x) == SET)
2257 hash_scan_set (x, insn, table);
2258 else if (GET_CODE (x) == CLOBBER)
2259 hash_scan_clobber (x, insn, table);
2260 else if (GET_CODE (x) == CALL)
2261 hash_scan_call (x, insn, table);
2264 else if (GET_CODE (pat) == CLOBBER)
2265 hash_scan_clobber (pat, insn, table);
2266 else if (GET_CODE (pat) == CALL)
2267 hash_scan_call (pat, insn, table);
2271 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
2274 /* Flattened out table, so it's printed in proper order. */
2275 struct expr **flat_table;
2276 unsigned int *hash_val;
2279 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
2280 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
2282 for (i = 0; i < (int) table->size; i++)
2283 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2285 flat_table[expr->bitmap_index] = expr;
2286 hash_val[expr->bitmap_index] = i;
2289 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2290 name, table->size, table->n_elems);
2292 for (i = 0; i < (int) table->n_elems; i++)
2293 if (flat_table[i] != 0)
2295 expr = flat_table[i];
2296 fprintf (file, "Index %d (hash value %d)\n ",
2297 expr->bitmap_index, hash_val[i]);
2298 print_rtl (file, expr->expr);
2299 fprintf (file, "\n");
2302 fprintf (file, "\n");
2308 /* Record register first/last/block set information for REGNO in INSN.
2310 first_set records the first place in the block where the register
2311 is set and is used to compute "anticipatability".
2313 last_set records the last place in the block where the register
2314 is set and is used to compute "availability".
2316 last_bb records the block for which first_set and last_set are
2317 valid, as a quick test to invalidate them.
2319 reg_set_in_block records whether the register is set in the block
2320 and is used to compute "transparency". */
2323 record_last_reg_set_info (rtx insn, int regno)
2325 struct reg_avail_info *info = ®_avail_info[regno];
2326 int cuid = INSN_CUID (insn);
2328 info->last_set = cuid;
2329 if (info->last_bb != current_bb)
2331 info->last_bb = current_bb;
2332 info->first_set = cuid;
2333 SET_BIT (reg_set_in_block[current_bb->index], regno);
2338 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2339 Note we store a pair of elements in the list, so they have to be
2340 taken off pairwise. */
2343 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
2346 rtx dest_addr, insn;
2349 while (GET_CODE (dest) == SUBREG
2350 || GET_CODE (dest) == ZERO_EXTRACT
2351 || GET_CODE (dest) == SIGN_EXTRACT
2352 || GET_CODE (dest) == STRICT_LOW_PART)
2353 dest = XEXP (dest, 0);
2355 /* If DEST is not a MEM, then it will not conflict with a load. Note
2356 that function calls are assumed to clobber memory, but are handled
2359 if (GET_CODE (dest) != MEM)
2362 dest_addr = get_addr (XEXP (dest, 0));
2363 dest_addr = canon_rtx (dest_addr);
2364 insn = (rtx) v_insn;
2365 bb = BLOCK_NUM (insn);
2367 canon_modify_mem_list[bb] =
2368 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2369 canon_modify_mem_list[bb] =
2370 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2371 bitmap_set_bit (canon_modify_mem_list_set, bb);
2374 /* Record memory modification information for INSN. We do not actually care
2375 about the memory location(s) that are set, or even how they are set (consider
2376 a CALL_INSN). We merely need to record which insns modify memory. */
2379 record_last_mem_set_info (rtx insn)
2381 int bb = BLOCK_NUM (insn);
2383 /* load_killed_in_block_p will handle the case of calls clobbering
2385 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2386 bitmap_set_bit (modify_mem_list_set, bb);
2388 if (GET_CODE (insn) == CALL_INSN)
2390 /* Note that traversals of this loop (other than for free-ing)
2391 will break after encountering a CALL_INSN. So, there's no
2392 need to insert a pair of items, as canon_list_insert does. */
2393 canon_modify_mem_list[bb] =
2394 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2395 bitmap_set_bit (canon_modify_mem_list_set, bb);
2398 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2401 /* Called from compute_hash_table via note_stores to handle one
2402 SET or CLOBBER in an insn. DATA is really the instruction in which
2403 the SET is taking place. */
2406 record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
2408 rtx last_set_insn = (rtx) data;
2410 if (GET_CODE (dest) == SUBREG)
2411 dest = SUBREG_REG (dest);
2413 if (GET_CODE (dest) == REG)
2414 record_last_reg_set_info (last_set_insn, REGNO (dest));
2415 else if (GET_CODE (dest) == MEM
2416 /* Ignore pushes, they clobber nothing. */
2417 && ! push_operand (dest, GET_MODE (dest)))
2418 record_last_mem_set_info (last_set_insn);
2421 /* Top level function to create an expression or assignment hash table.
2423 Expression entries are placed in the hash table if
2424 - they are of the form (set (pseudo-reg) src),
2425 - src is something we want to perform GCSE on,
2426 - none of the operands are subsequently modified in the block
2428 Assignment entries are placed in the hash table if
2429 - they are of the form (set (pseudo-reg) src),
2430 - src is something we want to perform const/copy propagation on,
2431 - none of the operands or target are subsequently modified in the block
2433 Currently src must be a pseudo-reg or a const_int.
2435 TABLE is the table computed. */
2438 compute_hash_table_work (struct hash_table *table)
2442 /* While we compute the hash table we also compute a bit array of which
2443 registers are set in which blocks.
2444 ??? This isn't needed during const/copy propagation, but it's cheap to
2446 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2448 /* re-Cache any INSN_LIST nodes we have allocated. */
2449 clear_modify_mem_tables ();
2450 /* Some working arrays used to track first and last set in each block. */
2451 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2453 for (i = 0; i < max_gcse_regno; ++i)
2454 reg_avail_info[i].last_bb = NULL;
2456 FOR_EACH_BB (current_bb)
2460 int in_libcall_block;
2462 /* First pass over the instructions records information used to
2463 determine when registers and memory are first and last set.
2464 ??? hard-reg reg_set_in_block computation
2465 could be moved to compute_sets since they currently don't change. */
2467 for (insn = current_bb->head;
2468 insn && insn != NEXT_INSN (current_bb->end);
2469 insn = NEXT_INSN (insn))
2471 if (! INSN_P (insn))
2474 if (GET_CODE (insn) == CALL_INSN)
2476 bool clobbers_all = false;
2477 #ifdef NON_SAVING_SETJMP
2478 if (NON_SAVING_SETJMP
2479 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2480 clobbers_all = true;
2483 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2485 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2486 record_last_reg_set_info (insn, regno);
2491 note_stores (PATTERN (insn), record_last_set_info, insn);
2494 /* Insert implicit sets in the hash table. */
2496 && implicit_sets[current_bb->index] != NULL_RTX)
2497 hash_scan_set (implicit_sets[current_bb->index],
2498 current_bb->head, table);
2500 /* The next pass builds the hash table. */
2502 for (insn = current_bb->head, in_libcall_block = 0;
2503 insn && insn != NEXT_INSN (current_bb->end);
2504 insn = NEXT_INSN (insn))
2507 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2508 in_libcall_block = 1;
2509 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2510 in_libcall_block = 0;
2511 hash_scan_insn (insn, table, in_libcall_block);
2512 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2513 in_libcall_block = 0;
2517 free (reg_avail_info);
2518 reg_avail_info = NULL;
2521 /* Allocate space for the set/expr hash TABLE.
2522 N_INSNS is the number of instructions in the function.
2523 It is used to determine the number of buckets to use.
2524 SET_P determines whether set or expression table will
2528 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2532 table->size = n_insns / 4;
2533 if (table->size < 11)
2536 /* Attempt to maintain efficient use of hash table.
2537 Making it an odd number is simplest for now.
2538 ??? Later take some measurements. */
2540 n = table->size * sizeof (struct expr *);
2541 table->table = gmalloc (n);
2542 table->set_p = set_p;
2545 /* Free things allocated by alloc_hash_table. */
2548 free_hash_table (struct hash_table *table)
2550 free (table->table);
2553 /* Compute the hash TABLE for doing copy/const propagation or
2554 expression hash table. */
2557 compute_hash_table (struct hash_table *table)
2559 /* Initialize count of number of entries in hash table. */
2561 memset (table->table, 0, table->size * sizeof (struct expr *));
2563 compute_hash_table_work (table);
2566 /* Expression tracking support. */
2568 /* Lookup pattern PAT in the expression TABLE.
2569 The result is a pointer to the table entry, or NULL if not found. */
2571 static struct expr *
2572 lookup_expr (rtx pat, struct hash_table *table)
2574 int do_not_record_p;
2575 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2579 if (do_not_record_p)
2582 expr = table->table[hash];
2584 while (expr && ! expr_equiv_p (expr->expr, pat))
2585 expr = expr->next_same_hash;
2590 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2591 table entry, or NULL if not found. */
2593 static struct expr *
2594 lookup_set (unsigned int regno, struct hash_table *table)
2596 unsigned int hash = hash_set (regno, table->size);
2599 expr = table->table[hash];
2601 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2602 expr = expr->next_same_hash;
2607 /* Return the next entry for REGNO in list EXPR. */
2609 static struct expr *
2610 next_set (unsigned int regno, struct expr *expr)
2613 expr = expr->next_same_hash;
2614 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2619 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2620 types may be mixed. */
2623 free_insn_expr_list_list (rtx *listp)
2627 for (list = *listp; list ; list = next)
2629 next = XEXP (list, 1);
2630 if (GET_CODE (list) == EXPR_LIST)
2631 free_EXPR_LIST_node (list);
2633 free_INSN_LIST_node (list);
2639 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2641 clear_modify_mem_tables (void)
2645 EXECUTE_IF_SET_IN_BITMAP
2646 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2647 bitmap_clear (modify_mem_list_set);
2649 EXECUTE_IF_SET_IN_BITMAP
2650 (canon_modify_mem_list_set, 0, i,
2651 free_insn_expr_list_list (canon_modify_mem_list + i));
2652 bitmap_clear (canon_modify_mem_list_set);
2655 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2658 free_modify_mem_tables (void)
2660 clear_modify_mem_tables ();
2661 free (modify_mem_list);
2662 free (canon_modify_mem_list);
2663 modify_mem_list = 0;
2664 canon_modify_mem_list = 0;
2667 /* Reset tables used to keep track of what's still available [since the
2668 start of the block]. */
2671 reset_opr_set_tables (void)
2673 /* Maintain a bitmap of which regs have been set since beginning of
2675 CLEAR_REG_SET (reg_set_bitmap);
2677 /* Also keep a record of the last instruction to modify memory.
2678 For now this is very trivial, we only record whether any memory
2679 location has been modified. */
2680 clear_modify_mem_tables ();
2683 /* Return nonzero if the operands of X are not set before INSN in
2684 INSN's basic block. */
2687 oprs_not_set_p (rtx x, rtx insn)
2696 code = GET_CODE (x);
2712 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2713 INSN_CUID (insn), x, 0))
2716 return oprs_not_set_p (XEXP (x, 0), insn);
2719 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2725 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2729 /* If we are about to do the last recursive call
2730 needed at this level, change it into iteration.
2731 This function is called enough to be worth it. */
2733 return oprs_not_set_p (XEXP (x, i), insn);
2735 if (! oprs_not_set_p (XEXP (x, i), insn))
2738 else if (fmt[i] == 'E')
2739 for (j = 0; j < XVECLEN (x, i); j++)
2740 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2747 /* Mark things set by a CALL. */
2750 mark_call (rtx insn)
2752 if (! CONST_OR_PURE_CALL_P (insn))
2753 record_last_mem_set_info (insn);
2756 /* Mark things set by a SET. */
2759 mark_set (rtx pat, rtx insn)
2761 rtx dest = SET_DEST (pat);
2763 while (GET_CODE (dest) == SUBREG
2764 || GET_CODE (dest) == ZERO_EXTRACT
2765 || GET_CODE (dest) == SIGN_EXTRACT
2766 || GET_CODE (dest) == STRICT_LOW_PART)
2767 dest = XEXP (dest, 0);
2769 if (GET_CODE (dest) == REG)
2770 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2771 else if (GET_CODE (dest) == MEM)
2772 record_last_mem_set_info (insn);
2774 if (GET_CODE (SET_SRC (pat)) == CALL)
2778 /* Record things set by a CLOBBER. */
2781 mark_clobber (rtx pat, rtx insn)
2783 rtx clob = XEXP (pat, 0);
2785 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2786 clob = XEXP (clob, 0);
2788 if (GET_CODE (clob) == REG)
2789 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2791 record_last_mem_set_info (insn);
2794 /* Record things set by INSN.
2795 This data is used by oprs_not_set_p. */
2798 mark_oprs_set (rtx insn)
2800 rtx pat = PATTERN (insn);
2803 if (GET_CODE (pat) == SET)
2804 mark_set (pat, insn);
2805 else if (GET_CODE (pat) == PARALLEL)
2806 for (i = 0; i < XVECLEN (pat, 0); i++)
2808 rtx x = XVECEXP (pat, 0, i);
2810 if (GET_CODE (x) == SET)
2812 else if (GET_CODE (x) == CLOBBER)
2813 mark_clobber (x, insn);
2814 else if (GET_CODE (x) == CALL)
2818 else if (GET_CODE (pat) == CLOBBER)
2819 mark_clobber (pat, insn);
2820 else if (GET_CODE (pat) == CALL)
2825 /* Classic GCSE reaching definition support. */
2827 /* Allocate reaching def variables. */
2830 alloc_rd_mem (int n_blocks, int n_insns)
2832 rd_kill = sbitmap_vector_alloc (n_blocks, n_insns);
2833 sbitmap_vector_zero (rd_kill, n_blocks);
2835 rd_gen = sbitmap_vector_alloc (n_blocks, n_insns);
2836 sbitmap_vector_zero (rd_gen, n_blocks);
2838 reaching_defs = sbitmap_vector_alloc (n_blocks, n_insns);
2839 sbitmap_vector_zero (reaching_defs, n_blocks);
2841 rd_out = sbitmap_vector_alloc (n_blocks, n_insns);
2842 sbitmap_vector_zero (rd_out, n_blocks);
2845 /* Free reaching def variables. */
2850 sbitmap_vector_free (rd_kill);
2851 sbitmap_vector_free (rd_gen);
2852 sbitmap_vector_free (reaching_defs);
2853 sbitmap_vector_free (rd_out);
2856 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2859 handle_rd_kill_set (rtx insn, int regno, basic_block bb)
2861 struct reg_set *this_reg;
2863 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2864 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2865 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2868 /* Compute the set of kill's for reaching definitions. */
2871 compute_kill_rd (void)
2879 For each set bit in `gen' of the block (i.e each insn which
2880 generates a definition in the block)
2881 Call the reg set by the insn corresponding to that bit regx
2882 Look at the linked list starting at reg_set_table[regx]
2883 For each setting of regx in the linked list, which is not in
2885 Set the bit in `kill' corresponding to that insn. */
2887 for (cuid = 0; cuid < max_cuid; cuid++)
2888 if (TEST_BIT (rd_gen[bb->index], cuid))
2890 rtx insn = CUID_INSN (cuid);
2891 rtx pat = PATTERN (insn);
2893 if (GET_CODE (insn) == CALL_INSN)
2895 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2896 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2897 handle_rd_kill_set (insn, regno, bb);
2900 if (GET_CODE (pat) == PARALLEL)
2902 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2904 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2906 if ((code == SET || code == CLOBBER)
2907 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2908 handle_rd_kill_set (insn,
2909 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2913 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2914 /* Each setting of this register outside of this block
2915 must be marked in the set of kills in this block. */
2916 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2920 /* Compute the reaching definitions as in
2921 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2922 Chapter 10. It is the same algorithm as used for computing available
2923 expressions but applied to the gens and kills of reaching definitions. */
2928 int changed, passes;
2932 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
2941 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
2942 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
2943 reaching_defs[bb->index], rd_kill[bb->index]);
2949 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
2952 /* Classic GCSE available expression support. */
2954 /* Allocate memory for available expression computation. */
2957 alloc_avail_expr_mem (int n_blocks, int n_exprs)
2959 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
2960 sbitmap_vector_zero (ae_kill, n_blocks);
2962 ae_gen = sbitmap_vector_alloc (n_blocks, n_exprs);
2963 sbitmap_vector_zero (ae_gen, n_blocks);
2965 ae_in = sbitmap_vector_alloc (n_blocks, n_exprs);
2966 sbitmap_vector_zero (ae_in, n_blocks);
2968 ae_out = sbitmap_vector_alloc (n_blocks, n_exprs);
2969 sbitmap_vector_zero (ae_out, n_blocks);
2973 free_avail_expr_mem (void)
2975 sbitmap_vector_free (ae_kill);
2976 sbitmap_vector_free (ae_gen);
2977 sbitmap_vector_free (ae_in);
2978 sbitmap_vector_free (ae_out);
2981 /* Compute the set of available expressions generated in each basic block. */
2984 compute_ae_gen (struct hash_table *expr_hash_table)
2990 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
2991 This is all we have to do because an expression is not recorded if it
2992 is not available, and the only expressions we want to work with are the
2993 ones that are recorded. */
2994 for (i = 0; i < expr_hash_table->size; i++)
2995 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
2996 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
2997 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3000 /* Return nonzero if expression X is killed in BB. */
3003 expr_killed_p (rtx x, basic_block bb)
3012 code = GET_CODE (x);
3016 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3019 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3022 return expr_killed_p (XEXP (x, 0), bb);
3040 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3044 /* If we are about to do the last recursive call
3045 needed at this level, change it into iteration.
3046 This function is called enough to be worth it. */
3048 return expr_killed_p (XEXP (x, i), bb);
3049 else if (expr_killed_p (XEXP (x, i), bb))
3052 else if (fmt[i] == 'E')
3053 for (j = 0; j < XVECLEN (x, i); j++)
3054 if (expr_killed_p (XVECEXP (x, i, j), bb))
3061 /* Compute the set of available expressions killed in each basic block. */
3064 compute_ae_kill (sbitmap *ae_gen, sbitmap *ae_kill,
3065 struct hash_table *expr_hash_table)
3072 for (i = 0; i < expr_hash_table->size; i++)
3073 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3075 /* Skip EXPR if generated in this block. */
3076 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3079 if (expr_killed_p (expr->expr, bb))
3080 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3084 /* Actually perform the Classic GCSE optimizations. */
3086 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3088 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3089 as a positive reach. We want to do this when there are two computations
3090 of the expression in the block.
3092 VISITED is a pointer to a working buffer for tracking which BB's have
3093 been visited. It is NULL for the top-level call.
3095 We treat reaching expressions that go through blocks containing the same
3096 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3097 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3098 2 as not reaching. The intent is to improve the probability of finding
3099 only one reaching expression and to reduce register lifetimes by picking
3100 the closest such expression. */
3103 expr_reaches_here_p_work (struct occr *occr, struct expr *expr,
3104 basic_block bb, int check_self_loop, char *visited)
3108 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3110 basic_block pred_bb = pred->src;
3112 if (visited[pred_bb->index])
3113 /* This predecessor has already been visited. Nothing to do. */
3115 else if (pred_bb == bb)
3117 /* BB loops on itself. */
3119 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3120 && BLOCK_NUM (occr->insn) == pred_bb->index)
3123 visited[pred_bb->index] = 1;
3126 /* Ignore this predecessor if it kills the expression. */
3127 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3128 visited[pred_bb->index] = 1;
3130 /* Does this predecessor generate this expression? */
3131 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3133 /* Is this the occurrence we're looking for?
3134 Note that there's only one generating occurrence per block
3135 so we just need to check the block number. */
3136 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3139 visited[pred_bb->index] = 1;
3142 /* Neither gen nor kill. */
3145 visited[pred_bb->index] = 1;
3146 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3153 /* All paths have been checked. */
3157 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3158 memory allocated for that function is returned. */
3161 expr_reaches_here_p (struct occr *occr, struct expr *expr, basic_block bb,
3162 int check_self_loop)
3165 char *visited = xcalloc (last_basic_block, 1);
3167 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3173 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3174 If there is more than one such instruction, return NULL.
3176 Called only by handle_avail_expr. */
3179 computing_insn (struct expr *expr, rtx insn)
3181 basic_block bb = BLOCK_FOR_INSN (insn);
3183 if (expr->avail_occr->next == NULL)
3185 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3186 /* The available expression is actually itself
3187 (i.e. a loop in the flow graph) so do nothing. */
3190 /* (FIXME) Case that we found a pattern that was created by
3191 a substitution that took place. */
3192 return expr->avail_occr->insn;
3196 /* Pattern is computed more than once.
3197 Search backwards from this insn to see how many of these
3198 computations actually reach this insn. */
3200 rtx insn_computes_expr = NULL;
3203 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3205 if (BLOCK_FOR_INSN (occr->insn) == bb)
3207 /* The expression is generated in this block.
3208 The only time we care about this is when the expression
3209 is generated later in the block [and thus there's a loop].
3210 We let the normal cse pass handle the other cases. */
3211 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3212 && expr_reaches_here_p (occr, expr, bb, 1))
3218 insn_computes_expr = occr->insn;
3221 else if (expr_reaches_here_p (occr, expr, bb, 0))
3227 insn_computes_expr = occr->insn;
3231 if (insn_computes_expr == NULL)
3234 return insn_computes_expr;
3238 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3239 Only called by can_disregard_other_sets. */
3242 def_reaches_here_p (rtx insn, rtx def_insn)
3246 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3249 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3251 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3253 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3255 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3256 reg = XEXP (PATTERN (def_insn), 0);
3257 else if (GET_CODE (PATTERN (def_insn)) == SET)
3258 reg = SET_DEST (PATTERN (def_insn));
3262 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3271 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3272 value returned is the number of definitions that reach INSN. Returning a
3273 value of zero means that [maybe] more than one definition reaches INSN and
3274 the caller can't perform whatever optimization it is trying. i.e. it is
3275 always safe to return zero. */
3278 can_disregard_other_sets (struct reg_set **addr_this_reg, rtx insn, int for_combine)
3280 int number_of_reaching_defs = 0;
3281 struct reg_set *this_reg;
3283 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3284 if (def_reaches_here_p (insn, this_reg->insn))
3286 number_of_reaching_defs++;
3287 /* Ignore parallels for now. */
3288 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3292 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3293 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3294 SET_SRC (PATTERN (insn)))))
3295 /* A setting of the reg to a different value reaches INSN. */
3298 if (number_of_reaching_defs > 1)
3300 /* If in this setting the value the register is being set to is
3301 equal to the previous value the register was set to and this
3302 setting reaches the insn we are trying to do the substitution
3303 on then we are ok. */
3304 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3306 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3307 SET_SRC (PATTERN (insn))))
3311 *addr_this_reg = this_reg;
3314 return number_of_reaching_defs;
3317 /* Expression computed by insn is available and the substitution is legal,
3318 so try to perform the substitution.
3320 The result is nonzero if any changes were made. */
3323 handle_avail_expr (rtx insn, struct expr *expr)
3325 rtx pat, insn_computes_expr, expr_set;
3327 struct reg_set *this_reg;
3328 int found_setting, use_src;
3331 /* We only handle the case where one computation of the expression
3332 reaches this instruction. */
3333 insn_computes_expr = computing_insn (expr, insn);
3334 if (insn_computes_expr == NULL)
3336 expr_set = single_set (insn_computes_expr);
3343 /* At this point we know only one computation of EXPR outside of this
3344 block reaches this insn. Now try to find a register that the
3345 expression is computed into. */
3346 if (GET_CODE (SET_SRC (expr_set)) == REG)
3348 /* This is the case when the available expression that reaches
3349 here has already been handled as an available expression. */
3350 unsigned int regnum_for_replacing
3351 = REGNO (SET_SRC (expr_set));
3353 /* If the register was created by GCSE we can't use `reg_set_table',
3354 however we know it's set only once. */
3355 if (regnum_for_replacing >= max_gcse_regno
3356 /* If the register the expression is computed into is set only once,
3357 or only one set reaches this insn, we can use it. */
3358 || (((this_reg = reg_set_table[regnum_for_replacing]),
3359 this_reg->next == NULL)
3360 || can_disregard_other_sets (&this_reg, insn, 0)))
3369 unsigned int regnum_for_replacing
3370 = REGNO (SET_DEST (expr_set));
3372 /* This shouldn't happen. */
3373 if (regnum_for_replacing >= max_gcse_regno)
3376 this_reg = reg_set_table[regnum_for_replacing];
3378 /* If the register the expression is computed into is set only once,
3379 or only one set reaches this insn, use it. */
3380 if (this_reg->next == NULL
3381 || can_disregard_other_sets (&this_reg, insn, 0))
3387 pat = PATTERN (insn);
3389 to = SET_SRC (expr_set);
3391 to = SET_DEST (expr_set);
3392 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3394 /* We should be able to ignore the return code from validate_change but
3395 to play it safe we check. */
3399 if (gcse_file != NULL)
3401 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3403 fprintf (gcse_file, " reg %d %s insn %d\n",
3404 REGNO (to), use_src ? "from" : "set in",
3405 INSN_UID (insn_computes_expr));
3410 /* The register that the expr is computed into is set more than once. */
3411 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3413 /* Insert an insn after insnx that copies the reg set in insnx
3414 into a new pseudo register call this new register REGN.
3415 From insnb until end of basic block or until REGB is set
3416 replace all uses of REGB with REGN. */
3419 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3421 /* Generate the new insn. */
3422 /* ??? If the change fails, we return 0, even though we created
3423 an insn. I think this is ok. */
3425 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3426 SET_DEST (expr_set)),
3427 insn_computes_expr);
3429 /* Keep register set table up to date. */
3430 record_one_set (REGNO (to), new_insn);
3432 gcse_create_count++;
3433 if (gcse_file != NULL)
3435 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3436 INSN_UID (NEXT_INSN (insn_computes_expr)),
3437 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3438 fprintf (gcse_file, ", computed in insn %d,\n",
3439 INSN_UID (insn_computes_expr));
3440 fprintf (gcse_file, " into newly allocated reg %d\n",
3444 pat = PATTERN (insn);
3446 /* Do register replacement for INSN. */
3447 changed = validate_change (insn, &SET_SRC (pat),
3449 (NEXT_INSN (insn_computes_expr))),
3452 /* We should be able to ignore the return code from validate_change but
3453 to play it safe we check. */
3457 if (gcse_file != NULL)
3460 "GCSE: Replacing the source in insn %d with reg %d ",
3462 REGNO (SET_DEST (PATTERN (NEXT_INSN
3463 (insn_computes_expr)))));
3464 fprintf (gcse_file, "set in insn %d\n",
3465 INSN_UID (insn_computes_expr));
3473 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3474 the dataflow analysis has been done.
3476 The result is nonzero if a change was made. */
3485 /* Note we start at block 1. */
3487 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3491 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3493 /* Reset tables used to keep track of what's still valid [since the
3494 start of the block]. */
3495 reset_opr_set_tables ();
3497 for (insn = bb->head;
3498 insn != NULL && insn != NEXT_INSN (bb->end);
3499 insn = NEXT_INSN (insn))
3501 /* Is insn of form (set (pseudo-reg) ...)? */
3502 if (GET_CODE (insn) == INSN
3503 && GET_CODE (PATTERN (insn)) == SET
3504 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3505 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3507 rtx pat = PATTERN (insn);
3508 rtx src = SET_SRC (pat);
3511 if (want_to_gcse_p (src)
3512 /* Is the expression recorded? */
3513 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3514 /* Is the expression available [at the start of the
3516 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3517 /* Are the operands unchanged since the start of the
3519 && oprs_not_set_p (src, insn))
3520 changed |= handle_avail_expr (insn, expr);
3523 /* Keep track of everything modified by this insn. */
3524 /* ??? Need to be careful w.r.t. mods done to INSN. */
3526 mark_oprs_set (insn);
3533 /* Top level routine to perform one classic GCSE pass.
3535 Return nonzero if a change was made. */
3538 one_classic_gcse_pass (int pass)
3542 gcse_subst_count = 0;
3543 gcse_create_count = 0;
3545 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3546 alloc_rd_mem (last_basic_block, max_cuid);
3547 compute_hash_table (&expr_hash_table);
3549 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3551 if (expr_hash_table.n_elems > 0)
3555 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3556 compute_ae_gen (&expr_hash_table);
3557 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3558 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3559 changed = classic_gcse ();
3560 free_avail_expr_mem ();
3564 free_hash_table (&expr_hash_table);
3568 fprintf (gcse_file, "\n");
3569 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3570 current_function_name, pass, bytes_used, gcse_subst_count);
3571 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3577 /* Compute copy/constant propagation working variables. */
3579 /* Local properties of assignments. */
3580 static sbitmap *cprop_pavloc;
3581 static sbitmap *cprop_absaltered;
3583 /* Global properties of assignments (computed from the local properties). */
3584 static sbitmap *cprop_avin;
3585 static sbitmap *cprop_avout;
3587 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3588 basic blocks. N_SETS is the number of sets. */
3591 alloc_cprop_mem (int n_blocks, int n_sets)
3593 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3594 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3596 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3597 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3600 /* Free vars used by copy/const propagation. */
3603 free_cprop_mem (void)
3605 sbitmap_vector_free (cprop_pavloc);
3606 sbitmap_vector_free (cprop_absaltered);
3607 sbitmap_vector_free (cprop_avin);
3608 sbitmap_vector_free (cprop_avout);
3611 /* For each block, compute whether X is transparent. X is either an
3612 expression or an assignment [though we don't care which, for this context
3613 an assignment is treated as an expression]. For each block where an
3614 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3618 compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
3626 /* repeat is used to turn tail-recursion into iteration since GCC
3627 can't do it when there's no return value. */
3633 code = GET_CODE (x);
3639 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3642 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3643 SET_BIT (bmap[bb->index], indx);
3647 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3648 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3653 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3656 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3657 RESET_BIT (bmap[bb->index], indx);
3661 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3662 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3671 rtx list_entry = canon_modify_mem_list[bb->index];
3675 rtx dest, dest_addr;
3677 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3680 SET_BIT (bmap[bb->index], indx);
3682 RESET_BIT (bmap[bb->index], indx);
3685 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3686 Examine each hunk of memory that is modified. */
3688 dest = XEXP (list_entry, 0);
3689 list_entry = XEXP (list_entry, 1);
3690 dest_addr = XEXP (list_entry, 0);
3692 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3693 x, rtx_addr_varies_p))
3696 SET_BIT (bmap[bb->index], indx);
3698 RESET_BIT (bmap[bb->index], indx);
3701 list_entry = XEXP (list_entry, 1);
3724 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3728 /* If we are about to do the last recursive call
3729 needed at this level, change it into iteration.
3730 This function is called enough to be worth it. */
3737 compute_transp (XEXP (x, i), indx, bmap, set_p);
3739 else if (fmt[i] == 'E')
3740 for (j = 0; j < XVECLEN (x, i); j++)
3741 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3745 /* Top level routine to do the dataflow analysis needed by copy/const
3749 compute_cprop_data (void)
3751 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3752 compute_available (cprop_pavloc, cprop_absaltered,
3753 cprop_avout, cprop_avin);
3756 /* Copy/constant propagation. */
3758 /* Maximum number of register uses in an insn that we handle. */
3761 /* Table of uses found in an insn.
3762 Allocated statically to avoid alloc/free complexity and overhead. */
3763 static struct reg_use reg_use_table[MAX_USES];
3765 /* Index into `reg_use_table' while building it. */
3766 static int reg_use_count;
3768 /* Set up a list of register numbers used in INSN. The found uses are stored
3769 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3770 and contains the number of uses in the table upon exit.
3772 ??? If a register appears multiple times we will record it multiple times.
3773 This doesn't hurt anything but it will slow things down. */
3776 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
3783 /* repeat is used to turn tail-recursion into iteration since GCC
3784 can't do it when there's no return value. */
3789 code = GET_CODE (x);
3792 if (reg_use_count == MAX_USES)
3795 reg_use_table[reg_use_count].reg_rtx = x;
3799 /* Recursively scan the operands of this expression. */
3801 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3805 /* If we are about to do the last recursive call
3806 needed at this level, change it into iteration.
3807 This function is called enough to be worth it. */
3814 find_used_regs (&XEXP (x, i), data);
3816 else if (fmt[i] == 'E')
3817 for (j = 0; j < XVECLEN (x, i); j++)
3818 find_used_regs (&XVECEXP (x, i, j), data);
3822 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3823 Returns nonzero is successful. */
3826 try_replace_reg (rtx from, rtx to, rtx insn)
3828 rtx note = find_reg_equal_equiv_note (insn);
3831 rtx set = single_set (insn);
3833 validate_replace_src_group (from, to, insn);
3834 if (num_changes_pending () && apply_change_group ())
3837 /* Try to simplify SET_SRC if we have substituted a constant. */
3838 if (success && set && CONSTANT_P (to))
3840 src = simplify_rtx (SET_SRC (set));
3843 validate_change (insn, &SET_SRC (set), src, 0);
3846 /* If there is already a NOTE, update the expression in it with our
3849 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3851 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3853 /* If above failed and this is a single set, try to simplify the source of
3854 the set given our substitution. We could perhaps try this for multiple
3855 SETs, but it probably won't buy us anything. */
3856 src = simplify_replace_rtx (SET_SRC (set), from, to);
3858 if (!rtx_equal_p (src, SET_SRC (set))
3859 && validate_change (insn, &SET_SRC (set), src, 0))
3862 /* If we've failed to do replacement, have a single SET, don't already
3863 have a note, and have no special SET, add a REG_EQUAL note to not
3864 lose information. */
3865 if (!success && note == 0 && set != 0
3866 && GET_CODE (XEXP (set, 0)) != ZERO_EXTRACT
3867 && GET_CODE (XEXP (set, 0)) != SIGN_EXTRACT)
3868 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3871 /* REG_EQUAL may get simplified into register.
3872 We don't allow that. Remove that note. This code ought
3873 not to happen, because previous code ought to synthesize
3874 reg-reg move, but be on the safe side. */
3875 if (note && REG_P (XEXP (note, 0)))
3876 remove_note (insn, note);
3881 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3882 NULL no such set is found. */
3884 static struct expr *
3885 find_avail_set (int regno, rtx insn)
3887 /* SET1 contains the last set found that can be returned to the caller for
3888 use in a substitution. */
3889 struct expr *set1 = 0;
3891 /* Loops are not possible here. To get a loop we would need two sets
3892 available at the start of the block containing INSN. ie we would
3893 need two sets like this available at the start of the block:
3895 (set (reg X) (reg Y))
3896 (set (reg Y) (reg X))
3898 This can not happen since the set of (reg Y) would have killed the
3899 set of (reg X) making it unavailable at the start of this block. */
3903 struct expr *set = lookup_set (regno, &set_hash_table);
3905 /* Find a set that is available at the start of the block
3906 which contains INSN. */
3909 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3911 set = next_set (regno, set);
3914 /* If no available set was found we've reached the end of the
3915 (possibly empty) copy chain. */
3919 if (GET_CODE (set->expr) != SET)
3922 src = SET_SRC (set->expr);
3924 /* We know the set is available.
3925 Now check that SRC is ANTLOC (i.e. none of the source operands
3926 have changed since the start of the block).
3928 If the source operand changed, we may still use it for the next
3929 iteration of this loop, but we may not use it for substitutions. */
3931 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
3934 /* If the source of the set is anything except a register, then
3935 we have reached the end of the copy chain. */
3936 if (GET_CODE (src) != REG)
3939 /* Follow the copy chain, ie start another iteration of the loop
3940 and see if we have an available copy into SRC. */
3941 regno = REGNO (src);
3944 /* SET1 holds the last set that was available and anticipatable at
3949 /* Subroutine of cprop_insn that tries to propagate constants into
3950 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
3951 it is the instruction that immediately precedes JUMP, and must be a
3952 single SET of a register. FROM is what we will try to replace,
3953 SRC is the constant we will try to substitute for it. Returns nonzero
3954 if a change was made. */
3957 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
3959 rtx new, set_src, note_src;
3960 rtx set = pc_set (jump);
3961 rtx note = find_reg_equal_equiv_note (jump);
3965 note_src = XEXP (note, 0);
3966 if (GET_CODE (note_src) == EXPR_LIST)
3967 note_src = NULL_RTX;
3969 else note_src = NULL_RTX;
3971 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
3972 set_src = note_src ? note_src : SET_SRC (set);
3974 /* First substitute the SETCC condition into the JUMP instruction,
3975 then substitute that given values into this expanded JUMP. */
3976 if (setcc != NULL_RTX
3977 && !modified_between_p (from, setcc, jump)
3978 && !modified_between_p (src, setcc, jump))
3981 rtx setcc_set = single_set (setcc);
3982 rtx setcc_note = find_reg_equal_equiv_note (setcc);
3983 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
3984 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
3985 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
3991 new = simplify_replace_rtx (set_src, from, src);
3993 /* If no simplification can be made, then try the next register. */
3994 if (rtx_equal_p (new, SET_SRC (set)))
3997 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4002 /* Ensure the value computed inside the jump insn to be equivalent
4003 to one computed by setcc. */
4004 if (setcc && modified_in_p (new, setcc))
4006 if (! validate_change (jump, &SET_SRC (set), new, 0))
4008 /* When (some) constants are not valid in a comparison, and there
4009 are two registers to be replaced by constants before the entire
4010 comparison can be folded into a constant, we need to keep
4011 intermediate information in REG_EQUAL notes. For targets with
4012 separate compare insns, such notes are added by try_replace_reg.
4013 When we have a combined compare-and-branch instruction, however,
4014 we need to attach a note to the branch itself to make this
4015 optimization work. */
4017 if (!rtx_equal_p (new, note_src))
4018 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
4022 /* Remove REG_EQUAL note after simplification. */
4024 remove_note (jump, note);
4026 /* If this has turned into an unconditional jump,
4027 then put a barrier after it so that the unreachable
4028 code will be deleted. */
4029 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4030 emit_barrier_after (jump);
4034 /* Delete the cc0 setter. */
4035 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4036 delete_insn (setcc);
4039 run_jump_opt_after_gcse = 1;
4042 if (gcse_file != NULL)
4045 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4046 REGNO (from), INSN_UID (jump));
4047 print_rtl (gcse_file, src);
4048 fprintf (gcse_file, "\n");
4050 purge_dead_edges (bb);
4056 constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
4060 /* Check for reg or cc0 setting instructions followed by
4061 conditional branch instructions first. */
4063 && (sset = single_set (insn)) != NULL
4065 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4067 rtx dest = SET_DEST (sset);
4068 if ((REG_P (dest) || CC0_P (dest))
4069 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4073 /* Handle normal insns next. */
4074 if (GET_CODE (insn) == INSN
4075 && try_replace_reg (from, to, insn))
4078 /* Try to propagate a CONST_INT into a conditional jump.
4079 We're pretty specific about what we will handle in this
4080 code, we can extend this as necessary over time.
4082 Right now the insn in question must look like
4083 (set (pc) (if_then_else ...)) */
4084 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4085 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4089 /* Perform constant and copy propagation on INSN.
4090 The result is nonzero if a change was made. */
4093 cprop_insn (rtx insn, int alter_jumps)
4095 struct reg_use *reg_used;
4103 note_uses (&PATTERN (insn), find_used_regs, NULL);
4105 note = find_reg_equal_equiv_note (insn);
4107 /* We may win even when propagating constants into notes. */
4109 find_used_regs (&XEXP (note, 0), NULL);
4111 for (reg_used = ®_use_table[0]; reg_use_count > 0;
4112 reg_used++, reg_use_count--)
4114 unsigned int regno = REGNO (reg_used->reg_rtx);
4118 /* Ignore registers created by GCSE.
4119 We do this because ... */
4120 if (regno >= max_gcse_regno)
4123 /* If the register has already been set in this block, there's
4124 nothing we can do. */
4125 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4128 /* Find an assignment that sets reg_used and is available
4129 at the start of the block. */
4130 set = find_avail_set (regno, insn);
4135 /* ??? We might be able to handle PARALLELs. Later. */
4136 if (GET_CODE (pat) != SET)
4139 src = SET_SRC (pat);
4141 /* Constant propagation. */
4142 if (gcse_constant_p (src))
4144 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4148 if (gcse_file != NULL)
4150 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4151 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4152 print_rtl (gcse_file, src);
4153 fprintf (gcse_file, "\n");
4155 if (INSN_DELETED_P (insn))
4159 else if (GET_CODE (src) == REG
4160 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4161 && REGNO (src) != regno)
4163 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4167 if (gcse_file != NULL)
4169 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4170 regno, INSN_UID (insn));
4171 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4174 /* The original insn setting reg_used may or may not now be
4175 deletable. We leave the deletion to flow. */
4176 /* FIXME: If it turns out that the insn isn't deletable,
4177 then we may have unnecessarily extended register lifetimes
4178 and made things worse. */
4186 /* Like find_used_regs, but avoid recording uses that appear in
4187 input-output contexts such as zero_extract or pre_dec. This
4188 restricts the cases we consider to those for which local cprop
4189 can legitimately make replacements. */
4192 local_cprop_find_used_regs (rtx *xptr, void *data)
4199 switch (GET_CODE (x))
4203 case STRICT_LOW_PART:
4212 /* Can only legitimately appear this early in the context of
4213 stack pushes for function arguments, but handle all of the
4214 codes nonetheless. */
4218 /* Setting a subreg of a register larger than word_mode leaves
4219 the non-written words unchanged. */
4220 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4228 find_used_regs (xptr, data);
4231 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4232 their REG_EQUAL notes need updating. */
4235 do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
4237 rtx newreg = NULL, newcnst = NULL;
4239 /* Rule out USE instructions and ASM statements as we don't want to
4240 change the hard registers mentioned. */
4241 if (GET_CODE (x) == REG
4242 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4243 || (GET_CODE (PATTERN (insn)) != USE
4244 && asm_noperands (PATTERN (insn)) < 0)))
4246 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4247 struct elt_loc_list *l;
4251 for (l = val->locs; l; l = l->next)
4253 rtx this_rtx = l->loc;
4259 if (gcse_constant_p (this_rtx))
4261 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4262 /* Don't copy propagate if it has attached REG_EQUIV note.
4263 At this point this only function parameters should have
4264 REG_EQUIV notes and if the argument slot is used somewhere
4265 explicitly, it means address of parameter has been taken,
4266 so we should not extend the lifetime of the pseudo. */
4267 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4268 || GET_CODE (XEXP (note, 0)) != MEM))
4271 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4273 /* If we find a case where we can't fix the retval REG_EQUAL notes
4274 match the new register, we either have to abandon this replacement
4275 or fix delete_trivially_dead_insns to preserve the setting insn,
4276 or make it delete the REG_EUAQL note, and fix up all passes that
4277 require the REG_EQUAL note there. */
4278 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4280 if (gcse_file != NULL)
4282 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4284 fprintf (gcse_file, "insn %d with constant ",
4286 print_rtl (gcse_file, newcnst);
4287 fprintf (gcse_file, "\n");
4292 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4294 adjust_libcall_notes (x, newreg, insn, libcall_sp);
4295 if (gcse_file != NULL)
4298 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4299 REGNO (x), INSN_UID (insn));
4300 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4309 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4310 their REG_EQUAL notes need updating to reflect that OLDREG has been
4311 replaced with NEWVAL in INSN. Return true if all substitutions could
4314 adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
4318 while ((end = *libcall_sp++))
4320 rtx note = find_reg_equal_equiv_note (end);
4327 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4331 note = find_reg_equal_equiv_note (end);
4334 if (reg_mentioned_p (newval, XEXP (note, 0)))
4337 while ((end = *libcall_sp++));
4341 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4347 #define MAX_NESTED_LIBCALLS 9
4350 local_cprop_pass (int alter_jumps)
4353 struct reg_use *reg_used;
4354 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
4355 bool changed = false;
4358 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4360 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4364 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
4368 if (libcall_sp == libcall_stack)
4370 *--libcall_sp = XEXP (note, 0);
4372 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4375 note = find_reg_equal_equiv_note (insn);
4379 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
4381 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
4383 for (reg_used = ®_use_table[0]; reg_use_count > 0;
4384 reg_used++, reg_use_count--)
4385 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4391 if (INSN_DELETED_P (insn))
4394 while (reg_use_count);
4396 cselib_process_insn (insn);
4399 /* Global analysis may get into infinite loops for unreachable blocks. */
4400 if (changed && alter_jumps)
4402 delete_unreachable_blocks ();
4403 free_reg_set_mem ();
4404 alloc_reg_set_mem (max_reg_num ());
4405 compute_sets (get_insns ());
4409 /* Forward propagate copies. This includes copies and constants. Return
4410 nonzero if a change was made. */
4413 cprop (int alter_jumps)
4419 /* Note we start at block 1. */
4420 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4422 if (gcse_file != NULL)
4423 fprintf (gcse_file, "\n");
4428 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4430 /* Reset tables used to keep track of what's still valid [since the
4431 start of the block]. */
4432 reset_opr_set_tables ();
4434 for (insn = bb->head;
4435 insn != NULL && insn != NEXT_INSN (bb->end);
4436 insn = NEXT_INSN (insn))
4439 changed |= cprop_insn (insn, alter_jumps);
4441 /* Keep track of everything modified by this insn. */
4442 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4443 call mark_oprs_set if we turned the insn into a NOTE. */
4444 if (GET_CODE (insn) != NOTE)
4445 mark_oprs_set (insn);
4449 if (gcse_file != NULL)
4450 fprintf (gcse_file, "\n");
4455 /* Similar to get_condition, only the resulting condition must be
4456 valid at JUMP, instead of at EARLIEST.
4458 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4459 settle for the condition variable in the jump instruction being integral.
4460 We prefer to be able to record the value of a user variable, rather than
4461 the value of a temporary used in a condition. This could be solved by
4462 recording the value of *every* register scaned by canonicalize_condition,
4463 but this would require some code reorganization. */
4466 fis_get_condition (rtx jump)
4468 rtx cond, set, tmp, insn, earliest;
4471 if (! any_condjump_p (jump))
4474 set = pc_set (jump);
4475 cond = XEXP (SET_SRC (set), 0);
4477 /* If this branches to JUMP_LABEL when the condition is false,
4478 reverse the condition. */
4479 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4480 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4482 /* Use canonicalize_condition to do the dirty work of manipulating
4483 MODE_CC values and COMPARE rtx codes. */
4484 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX,
4489 /* Verify that the given condition is valid at JUMP by virtue of not
4490 having been modified since EARLIEST. */
4491 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4492 if (INSN_P (insn) && modified_in_p (tmp, insn))
4497 /* The condition was modified. See if we can get a partial result
4498 that doesn't follow all the reversals. Perhaps combine can fold
4499 them together later. */
4500 tmp = XEXP (tmp, 0);
4501 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4503 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp,
4508 /* For sanity's sake, re-validate the new result. */
4509 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4510 if (INSN_P (insn) && modified_in_p (tmp, insn))
4516 /* Find the implicit sets of a function. An "implicit set" is a constraint
4517 on the value of a variable, implied by a conditional jump. For example,
4518 following "if (x == 2)", the then branch may be optimized as though the
4519 conditional performed an "explicit set", in this example, "x = 2". This
4520 function records the set patterns that are implicit at the start of each
4524 find_implicit_sets (void)
4526 basic_block bb, dest;
4532 /* Check for more than one successor. */
4533 if (bb->succ && bb->succ->succ_next)
4535 cond = fis_get_condition (bb->end);
4538 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4539 && GET_CODE (XEXP (cond, 0)) == REG
4540 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
4541 && gcse_constant_p (XEXP (cond, 1)))
4543 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4544 : FALLTHRU_EDGE (bb)->dest;
4546 if (dest && ! dest->pred->pred_next
4547 && dest != EXIT_BLOCK_PTR)
4549 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4551 implicit_sets[dest->index] = new;
4554 fprintf(gcse_file, "Implicit set of reg %d in ",
4555 REGNO (XEXP (cond, 0)));
4556 fprintf(gcse_file, "basic block %d\n", dest->index);
4564 fprintf (gcse_file, "Found %d implicit sets\n", count);
4567 /* Perform one copy/constant propagation pass.
4568 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4569 propagation into conditional jumps. If BYPASS_JUMPS is true,
4570 perform conditional jump bypassing optimizations. */
4573 one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
4577 const_prop_count = 0;
4578 copy_prop_count = 0;
4580 local_cprop_pass (cprop_jumps);
4582 /* Determine implicit sets. */
4583 implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
4584 find_implicit_sets ();
4586 alloc_hash_table (max_cuid, &set_hash_table, 1);
4587 compute_hash_table (&set_hash_table);
4589 /* Free implicit_sets before peak usage. */
4590 free (implicit_sets);
4591 implicit_sets = NULL;
4594 dump_hash_table (gcse_file, "SET", &set_hash_table);
4595 if (set_hash_table.n_elems > 0)
4597 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4598 compute_cprop_data ();
4599 changed = cprop (cprop_jumps);
4601 changed |= bypass_conditional_jumps ();
4605 free_hash_table (&set_hash_table);
4609 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4610 current_function_name, pass, bytes_used);
4611 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4612 const_prop_count, copy_prop_count);
4614 /* Global analysis may get into infinite loops for unreachable blocks. */
4615 if (changed && cprop_jumps)
4616 delete_unreachable_blocks ();
4621 /* Bypass conditional jumps. */
4623 /* The value of last_basic_block at the beginning of the jump_bypass
4624 pass. The use of redirect_edge_and_branch_force may introduce new
4625 basic blocks, but the data flow analysis is only valid for basic
4626 block indices less than bypass_last_basic_block. */
4628 static int bypass_last_basic_block;
4630 /* Find a set of REGNO to a constant that is available at the end of basic
4631 block BB. Returns NULL if no such set is found. Based heavily upon
4634 static struct expr *
4635 find_bypass_set (int regno, int bb)
4637 struct expr *result = 0;
4642 struct expr *set = lookup_set (regno, &set_hash_table);
4646 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4648 set = next_set (regno, set);
4654 if (GET_CODE (set->expr) != SET)
4657 src = SET_SRC (set->expr);
4658 if (gcse_constant_p (src))
4661 if (GET_CODE (src) != REG)
4664 regno = REGNO (src);
4670 /* Subroutine of bypass_block that checks whether a pseudo is killed by
4671 any of the instructions inserted on an edge. Jump bypassing places
4672 condition code setters on CFG edges using insert_insn_on_edge. This
4673 function is required to check that our data flow analysis is still
4674 valid prior to commit_edge_insertions. */
4677 reg_killed_on_edge (rtx reg, edge e)
4681 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
4682 if (INSN_P (insn) && reg_set_p (reg, insn))
4688 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4689 basic block BB which has more than one predecessor. If not NULL, SETCC
4690 is the first instruction of BB, which is immediately followed by JUMP_INSN
4691 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4692 Returns nonzero if a change was made.
4694 During the jump bypassing pass, we may place copies of SETCC instructions
4695 on CFG edges. The following routine must be careful to pay attention to
4696 these inserted insns when performing its transformations. */
4699 bypass_block (basic_block bb, rtx setcc, rtx jump)
4702 edge e, enext, edest;
4704 int may_be_loop_header;
4706 insn = (setcc != NULL) ? setcc : jump;
4708 /* Determine set of register uses in INSN. */
4710 note_uses (&PATTERN (insn), find_used_regs, NULL);
4711 note = find_reg_equal_equiv_note (insn);
4713 find_used_regs (&XEXP (note, 0), NULL);
4715 may_be_loop_header = false;
4716 for (e = bb->pred; e; e = e->pred_next)
4717 if (e->flags & EDGE_DFS_BACK)
4719 may_be_loop_header = true;
4724 for (e = bb->pred; e; e = enext)
4726 enext = e->pred_next;
4727 if (e->flags & EDGE_COMPLEX)
4730 /* We can't redirect edges from new basic blocks. */
4731 if (e->src->index >= bypass_last_basic_block)
4734 /* The irreducible loops created by redirecting of edges entering the
4735 loop from outside would decrease effectiveness of some of the following
4736 optimizations, so prevent this. */
4737 if (may_be_loop_header
4738 && !(e->flags & EDGE_DFS_BACK))
4741 for (i = 0; i < reg_use_count; i++)
4743 struct reg_use *reg_used = ®_use_table[i];
4744 unsigned int regno = REGNO (reg_used->reg_rtx);
4745 basic_block dest, old_dest;
4749 if (regno >= max_gcse_regno)
4752 set = find_bypass_set (regno, e->src->index);
4757 /* Check the data flow is valid after edge insertions. */
4758 if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e))
4761 src = SET_SRC (pc_set (jump));
4764 src = simplify_replace_rtx (src,
4765 SET_DEST (PATTERN (setcc)),
4766 SET_SRC (PATTERN (setcc)));
4768 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4769 SET_SRC (set->expr));
4771 /* Jump bypassing may have already placed instructions on
4772 edges of the CFG. We can't bypass an outgoing edge that
4773 has instructions associated with it, as these insns won't
4774 get executed if the incoming edge is redirected. */
4778 edest = FALLTHRU_EDGE (bb);
4779 dest = edest->insns ? NULL : edest->dest;
4781 else if (GET_CODE (new) == LABEL_REF)
4783 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4784 /* Don't bypass edges containing instructions. */
4785 for (edest = bb->succ; edest; edest = edest->succ_next)
4786 if (edest->dest == dest && edest->insns)
4798 && dest != EXIT_BLOCK_PTR)
4800 redirect_edge_and_branch_force (e, dest);
4802 /* Copy the register setter to the redirected edge.
4803 Don't copy CC0 setters, as CC0 is dead after jump. */
4806 rtx pat = PATTERN (setcc);
4807 if (!CC0_P (SET_DEST (pat)))
4808 insert_insn_on_edge (copy_insn (pat), e);
4811 if (gcse_file != NULL)
4813 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4814 regno, INSN_UID (jump));
4815 print_rtl (gcse_file, SET_SRC (set->expr));
4816 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4817 e->src->index, old_dest->index, dest->index);
4827 /* Find basic blocks with more than one predecessor that only contain a
4828 single conditional jump. If the result of the comparison is known at
4829 compile-time from any incoming edge, redirect that edge to the
4830 appropriate target. Returns nonzero if a change was made.
4832 This function is now mis-named, because we also handle indirect jumps. */
4835 bypass_conditional_jumps (void)
4843 /* Note we start at block 1. */
4844 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4847 bypass_last_basic_block = last_basic_block;
4848 mark_dfs_back_edges ();
4851 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
4852 EXIT_BLOCK_PTR, next_bb)
4854 /* Check for more than one predecessor. */
4855 if (bb->pred && bb->pred->pred_next)
4858 for (insn = bb->head;
4859 insn != NULL && insn != NEXT_INSN (bb->end);
4860 insn = NEXT_INSN (insn))
4861 if (GET_CODE (insn) == INSN)
4865 if (GET_CODE (PATTERN (insn)) != SET)
4868 dest = SET_DEST (PATTERN (insn));
4869 if (REG_P (dest) || CC0_P (dest))
4874 else if (GET_CODE (insn) == JUMP_INSN)
4876 if ((any_condjump_p (insn) || computed_jump_p (insn))
4877 && onlyjump_p (insn))
4878 changed |= bypass_block (bb, setcc, insn);
4881 else if (INSN_P (insn))
4886 /* If we bypassed any register setting insns, we inserted a
4887 copy on the redirected edge. These need to be committed. */
4889 commit_edge_insertions();
4894 /* Compute PRE+LCM working variables. */
4896 /* Local properties of expressions. */
4897 /* Nonzero for expressions that are transparent in the block. */
4898 static sbitmap *transp;
4900 /* Nonzero for expressions that are transparent at the end of the block.
4901 This is only zero for expressions killed by abnormal critical edge
4902 created by a calls. */
4903 static sbitmap *transpout;
4905 /* Nonzero for expressions that are computed (available) in the block. */
4906 static sbitmap *comp;
4908 /* Nonzero for expressions that are locally anticipatable in the block. */
4909 static sbitmap *antloc;
4911 /* Nonzero for expressions where this block is an optimal computation
4913 static sbitmap *pre_optimal;
4915 /* Nonzero for expressions which are redundant in a particular block. */
4916 static sbitmap *pre_redundant;
4918 /* Nonzero for expressions which should be inserted on a specific edge. */
4919 static sbitmap *pre_insert_map;
4921 /* Nonzero for expressions which should be deleted in a specific block. */
4922 static sbitmap *pre_delete_map;
4924 /* Contains the edge_list returned by pre_edge_lcm. */
4925 static struct edge_list *edge_list;
4927 /* Redundant insns. */
4928 static sbitmap pre_redundant_insns;
4930 /* Allocate vars used for PRE analysis. */
4933 alloc_pre_mem (int n_blocks, int n_exprs)
4935 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4936 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4937 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4940 pre_redundant = NULL;
4941 pre_insert_map = NULL;
4942 pre_delete_map = NULL;
4945 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4947 /* pre_insert and pre_delete are allocated later. */
4950 /* Free vars used for PRE analysis. */
4955 sbitmap_vector_free (transp);
4956 sbitmap_vector_free (comp);
4958 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
4961 sbitmap_vector_free (pre_optimal);
4963 sbitmap_vector_free (pre_redundant);
4965 sbitmap_vector_free (pre_insert_map);
4967 sbitmap_vector_free (pre_delete_map);
4969 sbitmap_vector_free (ae_in);
4971 sbitmap_vector_free (ae_out);
4973 transp = comp = NULL;
4974 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
4975 ae_in = ae_out = NULL;
4978 /* Top level routine to do the dataflow analysis needed by PRE. */
4981 compute_pre_data (void)
4983 sbitmap trapping_expr;
4987 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4988 sbitmap_vector_zero (ae_kill, last_basic_block);
4990 /* Collect expressions which might trap. */
4991 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
4992 sbitmap_zero (trapping_expr);
4993 for (ui = 0; ui < expr_hash_table.size; ui++)
4996 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
4997 if (may_trap_p (e->expr))
4998 SET_BIT (trapping_expr, e->bitmap_index);
5001 /* Compute ae_kill for each basic block using:
5005 This is significantly faster than compute_ae_kill. */
5011 /* If the current block is the destination of an abnormal edge, we
5012 kill all trapping expressions because we won't be able to properly
5013 place the instruction on the edge. So make them neither
5014 anticipatable nor transparent. This is fairly conservative. */
5015 for (e = bb->pred; e ; e = e->pred_next)
5016 if (e->flags & EDGE_ABNORMAL)
5018 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5019 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
5023 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5024 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
5027 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
5028 ae_kill, &pre_insert_map, &pre_delete_map);
5029 sbitmap_vector_free (antloc);
5031 sbitmap_vector_free (ae_kill);
5033 sbitmap_free (trapping_expr);
5038 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
5041 VISITED is a pointer to a working buffer for tracking which BB's have
5042 been visited. It is NULL for the top-level call.
5044 We treat reaching expressions that go through blocks containing the same
5045 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5046 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5047 2 as not reaching. The intent is to improve the probability of finding
5048 only one reaching expression and to reduce register lifetimes by picking
5049 the closest such expression. */
5052 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
5056 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
5058 basic_block pred_bb = pred->src;
5060 if (pred->src == ENTRY_BLOCK_PTR
5061 /* Has predecessor has already been visited? */
5062 || visited[pred_bb->index])
5063 ;/* Nothing to do. */
5065 /* Does this predecessor generate this expression? */
5066 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
5068 /* Is this the occurrence we're looking for?
5069 Note that there's only one generating occurrence per block
5070 so we just need to check the block number. */
5071 if (occr_bb == pred_bb)
5074 visited[pred_bb->index] = 1;
5076 /* Ignore this predecessor if it kills the expression. */
5077 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5078 visited[pred_bb->index] = 1;
5080 /* Neither gen nor kill. */
5083 visited[pred_bb->index] = 1;
5084 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
5089 /* All paths have been checked. */
5093 /* The wrapper for pre_expr_reaches_here_work that ensures that any
5094 memory allocated for that function is returned. */
5097 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
5100 char *visited = xcalloc (last_basic_block, 1);
5102 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
5109 /* Given an expr, generate RTL which we can insert at the end of a BB,
5110 or on an edge. Set the block number of any insns generated to
5114 process_insert_insn (struct expr *expr)
5116 rtx reg = expr->reaching_reg;
5117 rtx exp = copy_rtx (expr->expr);
5122 /* If the expression is something that's an operand, like a constant,
5123 just copy it to a register. */
5124 if (general_operand (exp, GET_MODE (reg)))
5125 emit_move_insn (reg, exp);
5127 /* Otherwise, make a new insn to compute this expression and make sure the
5128 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5129 expression to make sure we don't have any sharing issues. */
5130 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
5139 /* Add EXPR to the end of basic block BB.
5141 This is used by both the PRE and code hoisting.
5143 For PRE, we want to verify that the expr is either transparent
5144 or locally anticipatable in the target block. This check makes
5145 no sense for code hoisting. */
5148 insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
5152 rtx reg = expr->reaching_reg;
5153 int regno = REGNO (reg);
5156 pat = process_insert_insn (expr);
5157 if (pat == NULL_RTX || ! INSN_P (pat))
5161 while (NEXT_INSN (pat_end) != NULL_RTX)
5162 pat_end = NEXT_INSN (pat_end);
5164 /* If the last insn is a jump, insert EXPR in front [taking care to
5165 handle cc0, etc. properly]. Similarly we need to care trapping
5166 instructions in presence of non-call exceptions. */
5168 if (GET_CODE (insn) == JUMP_INSN
5169 || (GET_CODE (insn) == INSN
5170 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
5175 /* It should always be the case that we can put these instructions
5176 anywhere in the basic block with performing PRE optimizations.
5178 if (GET_CODE (insn) == INSN && pre
5179 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5180 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5183 /* If this is a jump table, then we can't insert stuff here. Since
5184 we know the previous real insn must be the tablejump, we insert
5185 the new instruction just before the tablejump. */
5186 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5187 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5188 insn = prev_real_insn (insn);
5191 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5192 if cc0 isn't set. */
5193 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5195 insn = XEXP (note, 0);
5198 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5199 if (maybe_cc0_setter
5200 && INSN_P (maybe_cc0_setter)
5201 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5202 insn = maybe_cc0_setter;
5205 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5206 new_insn = emit_insn_before (pat, insn);
5209 /* Likewise if the last insn is a call, as will happen in the presence
5210 of exception handling. */
5211 else if (GET_CODE (insn) == CALL_INSN
5212 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
5214 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5215 we search backward and place the instructions before the first
5216 parameter is loaded. Do this for everyone for consistency and a
5217 presumption that we'll get better code elsewhere as well.
5219 It should always be the case that we can put these instructions
5220 anywhere in the basic block with performing PRE optimizations.
5224 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5225 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5228 /* Since different machines initialize their parameter registers
5229 in different orders, assume nothing. Collect the set of all
5230 parameter registers. */
5231 insn = find_first_parameter_load (insn, bb->head);
5233 /* If we found all the parameter loads, then we want to insert
5234 before the first parameter load.
5236 If we did not find all the parameter loads, then we might have
5237 stopped on the head of the block, which could be a CODE_LABEL.
5238 If we inserted before the CODE_LABEL, then we would be putting
5239 the insn in the wrong basic block. In that case, put the insn
5240 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5241 while (GET_CODE (insn) == CODE_LABEL
5242 || NOTE_INSN_BASIC_BLOCK_P (insn))
5243 insn = NEXT_INSN (insn);
5245 new_insn = emit_insn_before (pat, insn);
5248 new_insn = emit_insn_after (pat, insn);
5254 add_label_notes (PATTERN (pat), new_insn);
5255 note_stores (PATTERN (pat), record_set_info, pat);
5259 pat = NEXT_INSN (pat);
5262 gcse_create_count++;
5266 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5267 bb->index, INSN_UID (new_insn));
5268 fprintf (gcse_file, "copying expression %d to reg %d\n",
5269 expr->bitmap_index, regno);
5273 /* Insert partially redundant expressions on edges in the CFG to make
5274 the expressions fully redundant. */
5277 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
5279 int e, i, j, num_edges, set_size, did_insert = 0;
5282 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5283 if it reaches any of the deleted expressions. */
5285 set_size = pre_insert_map[0]->size;
5286 num_edges = NUM_EDGES (edge_list);
5287 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5288 sbitmap_vector_zero (inserted, num_edges);
5290 for (e = 0; e < num_edges; e++)
5293 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5295 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5297 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5299 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5300 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5302 struct expr *expr = index_map[j];
5305 /* Now look at each deleted occurrence of this expression. */
5306 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5308 if (! occr->deleted_p)
5311 /* Insert this expression on this edge if if it would
5312 reach the deleted occurrence in BB. */
5313 if (!TEST_BIT (inserted[e], j))
5316 edge eg = INDEX_EDGE (edge_list, e);
5318 /* We can't insert anything on an abnormal and
5319 critical edge, so we insert the insn at the end of
5320 the previous block. There are several alternatives
5321 detailed in Morgans book P277 (sec 10.5) for
5322 handling this situation. This one is easiest for
5325 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5326 insert_insn_end_bb (index_map[j], bb, 0);
5329 insn = process_insert_insn (index_map[j]);
5330 insert_insn_on_edge (insn, eg);
5335 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5337 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5338 fprintf (gcse_file, "copy expression %d\n",
5339 expr->bitmap_index);
5342 update_ld_motion_stores (expr);
5343 SET_BIT (inserted[e], j);
5345 gcse_create_count++;
5352 sbitmap_vector_free (inserted);
5356 /* Copy the result of INSN to REG. INDX is the expression number.
5357 Given "old_reg <- expr" (INSN), instead of adding after it
5358 reaching_reg <- old_reg
5359 it's better to do the following:
5360 reaching_reg <- expr
5361 old_reg <- reaching_reg
5362 because this way copy propagation can discover additional PRE
5366 pre_insert_copy_insn (struct expr *expr, rtx insn)
5368 rtx reg = expr->reaching_reg;
5369 int regno = REGNO (reg);
5370 int indx = expr->bitmap_index;
5371 rtx set = single_set (insn);
5379 old_reg = SET_DEST (set);
5380 new_insn = emit_insn_after (gen_move_insn (old_reg,
5383 new_set = single_set (new_insn);
5387 SET_DEST (set) = reg;
5389 /* Keep register set table up to date. */
5390 replace_one_set (REGNO (old_reg), insn, new_insn);
5391 record_one_set (regno, insn);
5393 gcse_create_count++;
5397 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5398 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5399 INSN_UID (insn), regno);
5400 update_ld_motion_stores (expr);
5403 /* Copy available expressions that reach the redundant expression
5404 to `reaching_reg'. */
5407 pre_insert_copies (void)
5414 /* For each available expression in the table, copy the result to
5415 `reaching_reg' if the expression reaches a deleted one.
5417 ??? The current algorithm is rather brute force.
5418 Need to do some profiling. */
5420 for (i = 0; i < expr_hash_table.size; i++)
5421 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5423 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5424 we don't want to insert a copy here because the expression may not
5425 really be redundant. So only insert an insn if the expression was
5426 deleted. This test also avoids further processing if the
5427 expression wasn't deleted anywhere. */
5428 if (expr->reaching_reg == NULL)
5431 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5433 if (! occr->deleted_p)
5436 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5438 rtx insn = avail->insn;
5440 /* No need to handle this one if handled already. */
5441 if (avail->copied_p)
5444 /* Don't handle this one if it's a redundant one. */
5445 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5448 /* Or if the expression doesn't reach the deleted one. */
5449 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5451 BLOCK_FOR_INSN (occr->insn)))
5454 /* Copy the result of avail to reaching_reg. */
5455 pre_insert_copy_insn (expr, insn);
5456 avail->copied_p = 1;
5462 /* Emit move from SRC to DEST noting the equivalence with expression computed
5465 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
5468 rtx set = single_set (insn), set2;
5472 /* This should never fail since we're creating a reg->reg copy
5473 we've verified to be valid. */
5475 new = emit_insn_after (gen_move_insn (dest, src), insn);
5477 /* Note the equivalence for local CSE pass. */
5478 set2 = single_set (new);
5479 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5481 if ((note = find_reg_equal_equiv_note (insn)))
5482 eqv = XEXP (note, 0);
5484 eqv = SET_SRC (set);
5486 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
5491 /* Delete redundant computations.
5492 Deletion is done by changing the insn to copy the `reaching_reg' of
5493 the expression into the result of the SET. It is left to later passes
5494 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5496 Returns nonzero if a change is made. */
5507 for (i = 0; i < expr_hash_table.size; i++)
5508 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5510 int indx = expr->bitmap_index;
5512 /* We only need to search antic_occr since we require
5515 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5517 rtx insn = occr->insn;
5519 basic_block bb = BLOCK_FOR_INSN (insn);
5521 if (TEST_BIT (pre_delete_map[bb->index], indx))
5523 set = single_set (insn);
5527 /* Create a pseudo-reg to store the result of reaching
5528 expressions into. Get the mode for the new pseudo from
5529 the mode of the original destination pseudo. */
5530 if (expr->reaching_reg == NULL)
5532 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5534 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5536 occr->deleted_p = 1;
5537 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5544 "PRE: redundant insn %d (expression %d) in ",
5545 INSN_UID (insn), indx);
5546 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5547 bb->index, REGNO (expr->reaching_reg));
5556 /* Perform GCSE optimizations using PRE.
5557 This is called by one_pre_gcse_pass after all the dataflow analysis
5560 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5561 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5562 Compiler Design and Implementation.
5564 ??? A new pseudo reg is created to hold the reaching expression. The nice
5565 thing about the classical approach is that it would try to use an existing
5566 reg. If the register can't be adequately optimized [i.e. we introduce
5567 reload problems], one could add a pass here to propagate the new register
5570 ??? We don't handle single sets in PARALLELs because we're [currently] not
5571 able to copy the rest of the parallel when we insert copies to create full
5572 redundancies from partial redundancies. However, there's no reason why we
5573 can't handle PARALLELs in the cases where there are no partial
5580 int did_insert, changed;
5581 struct expr **index_map;
5584 /* Compute a mapping from expression number (`bitmap_index') to
5585 hash table entry. */
5587 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5588 for (i = 0; i < expr_hash_table.size; i++)
5589 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5590 index_map[expr->bitmap_index] = expr;
5592 /* Reset bitmap used to track which insns are redundant. */
5593 pre_redundant_insns = sbitmap_alloc (max_cuid);
5594 sbitmap_zero (pre_redundant_insns);
5596 /* Delete the redundant insns first so that
5597 - we know what register to use for the new insns and for the other
5598 ones with reaching expressions
5599 - we know which insns are redundant when we go to create copies */
5601 changed = pre_delete ();
5603 did_insert = pre_edge_insert (edge_list, index_map);
5605 /* In other places with reaching expressions, copy the expression to the
5606 specially allocated pseudo-reg that reaches the redundant expr. */
5607 pre_insert_copies ();
5610 commit_edge_insertions ();
5615 sbitmap_free (pre_redundant_insns);
5619 /* Top level routine to perform one PRE GCSE pass.
5621 Return nonzero if a change was made. */
5624 one_pre_gcse_pass (int pass)
5628 gcse_subst_count = 0;
5629 gcse_create_count = 0;
5631 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5632 add_noreturn_fake_exit_edges ();
5634 compute_ld_motion_mems ();
5636 compute_hash_table (&expr_hash_table);
5637 trim_ld_motion_mems ();
5639 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5641 if (expr_hash_table.n_elems > 0)
5643 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5644 compute_pre_data ();
5645 changed |= pre_gcse ();
5646 free_edge_list (edge_list);
5651 remove_fake_edges ();
5652 free_hash_table (&expr_hash_table);
5656 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5657 current_function_name, pass, bytes_used);
5658 fprintf (gcse_file, "%d substs, %d insns created\n",
5659 gcse_subst_count, gcse_create_count);
5665 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5666 If notes are added to an insn which references a CODE_LABEL, the
5667 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5668 because the following loop optimization pass requires them. */
5670 /* ??? This is very similar to the loop.c add_label_notes function. We
5671 could probably share code here. */
5673 /* ??? If there was a jump optimization pass after gcse and before loop,
5674 then we would not need to do this here, because jump would add the
5675 necessary REG_LABEL notes. */
5678 add_label_notes (rtx x, rtx insn)
5680 enum rtx_code code = GET_CODE (x);
5684 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5686 /* This code used to ignore labels that referred to dispatch tables to
5687 avoid flow generating (slightly) worse code.
5689 We no longer ignore such label references (see LABEL_REF handling in
5690 mark_jump_label for additional information). */
5692 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5694 if (LABEL_P (XEXP (x, 0)))
5695 LABEL_NUSES (XEXP (x, 0))++;
5699 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5702 add_label_notes (XEXP (x, i), insn);
5703 else if (fmt[i] == 'E')
5704 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5705 add_label_notes (XVECEXP (x, i, j), insn);
5709 /* Compute transparent outgoing information for each block.
5711 An expression is transparent to an edge unless it is killed by
5712 the edge itself. This can only happen with abnormal control flow,
5713 when the edge is traversed through a call. This happens with
5714 non-local labels and exceptions.
5716 This would not be necessary if we split the edge. While this is
5717 normally impossible for abnormal critical edges, with some effort
5718 it should be possible with exception handling, since we still have
5719 control over which handler should be invoked. But due to increased
5720 EH table sizes, this may not be worthwhile. */
5723 compute_transpout (void)
5729 sbitmap_vector_ones (transpout, last_basic_block);
5733 /* Note that flow inserted a nop a the end of basic blocks that
5734 end in call instructions for reasons other than abnormal
5736 if (GET_CODE (bb->end) != CALL_INSN)
5739 for (i = 0; i < expr_hash_table.size; i++)
5740 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5741 if (GET_CODE (expr->expr) == MEM)
5743 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5744 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5747 /* ??? Optimally, we would use interprocedural alias
5748 analysis to determine if this mem is actually killed
5750 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5755 /* Removal of useless null pointer checks */
5757 /* Called via note_stores. X is set by SETTER. If X is a register we must
5758 invalidate nonnull_local and set nonnull_killed. DATA is really a
5759 `null_pointer_info *'.
5761 We ignore hard registers. */
5764 invalidate_nonnull_info (rtx x, rtx setter ATTRIBUTE_UNUSED, void *data)
5767 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5769 while (GET_CODE (x) == SUBREG)
5772 /* Ignore anything that is not a register or is a hard register. */
5773 if (GET_CODE (x) != REG
5774 || REGNO (x) < npi->min_reg
5775 || REGNO (x) >= npi->max_reg)
5778 regno = REGNO (x) - npi->min_reg;
5780 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5781 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5784 /* Do null-pointer check elimination for the registers indicated in
5785 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5786 they are not our responsibility to free. */
5789 delete_null_pointer_checks_1 (unsigned int *block_reg, sbitmap *nonnull_avin,
5790 sbitmap *nonnull_avout,
5791 struct null_pointer_info *npi)
5793 basic_block bb, current_block;
5794 sbitmap *nonnull_local = npi->nonnull_local;
5795 sbitmap *nonnull_killed = npi->nonnull_killed;
5796 int something_changed = 0;
5798 /* Compute local properties, nonnull and killed. A register will have
5799 the nonnull property if at the end of the current block its value is
5800 known to be nonnull. The killed property indicates that somewhere in
5801 the block any information we had about the register is killed.
5803 Note that a register can have both properties in a single block. That
5804 indicates that it's killed, then later in the block a new value is
5806 sbitmap_vector_zero (nonnull_local, last_basic_block);
5807 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5809 FOR_EACH_BB (current_block)
5811 rtx insn, stop_insn;
5813 /* Set the current block for invalidate_nonnull_info. */
5814 npi->current_block = current_block;
5816 /* Scan each insn in the basic block looking for memory references and
5818 stop_insn = NEXT_INSN (current_block->end);
5819 for (insn = current_block->head;
5821 insn = NEXT_INSN (insn))
5826 /* Ignore anything that is not a normal insn. */
5827 if (! INSN_P (insn))
5830 /* Basically ignore anything that is not a simple SET. We do have
5831 to make sure to invalidate nonnull_local and set nonnull_killed
5832 for such insns though. */
5833 set = single_set (insn);
5836 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5840 /* See if we've got a usable memory load. We handle it first
5841 in case it uses its address register as a dest (which kills
5842 the nonnull property). */
5843 if (GET_CODE (SET_SRC (set)) == MEM
5844 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5845 && REGNO (reg) >= npi->min_reg
5846 && REGNO (reg) < npi->max_reg)
5847 SET_BIT (nonnull_local[current_block->index],
5848 REGNO (reg) - npi->min_reg);
5850 /* Now invalidate stuff clobbered by this insn. */
5851 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5853 /* And handle stores, we do these last since any sets in INSN can
5854 not kill the nonnull property if it is derived from a MEM
5855 appearing in a SET_DEST. */
5856 if (GET_CODE (SET_DEST (set)) == MEM
5857 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5858 && REGNO (reg) >= npi->min_reg
5859 && REGNO (reg) < npi->max_reg)
5860 SET_BIT (nonnull_local[current_block->index],
5861 REGNO (reg) - npi->min_reg);
5865 /* Now compute global properties based on the local properties. This
5866 is a classic global availability algorithm. */
5867 compute_available (nonnull_local, nonnull_killed,
5868 nonnull_avout, nonnull_avin);
5870 /* Now look at each bb and see if it ends with a compare of a value
5874 rtx last_insn = bb->end;
5875 rtx condition, earliest;
5876 int compare_and_branch;
5878 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5879 since BLOCK_REG[BB] is zero if this block did not end with a
5880 comparison against zero, this condition works. */
5881 if (block_reg[bb->index] < npi->min_reg
5882 || block_reg[bb->index] >= npi->max_reg)
5885 /* LAST_INSN is a conditional jump. Get its condition. */
5886 condition = get_condition (last_insn, &earliest, false);
5888 /* If we can't determine the condition then skip. */
5892 /* Is the register known to have a nonzero value? */
5893 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
5896 /* Try to compute whether the compare/branch at the loop end is one or
5897 two instructions. */
5898 if (earliest == last_insn)
5899 compare_and_branch = 1;
5900 else if (earliest == prev_nonnote_insn (last_insn))
5901 compare_and_branch = 2;
5905 /* We know the register in this comparison is nonnull at exit from
5906 this block. We can optimize this comparison. */
5907 if (GET_CODE (condition) == NE)
5911 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
5913 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5914 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5915 emit_barrier_after (new_jump);
5918 something_changed = 1;
5919 delete_insn (last_insn);
5920 if (compare_and_branch == 2)
5921 delete_insn (earliest);
5922 purge_dead_edges (bb);
5924 /* Don't check this block again. (Note that BLOCK_END is
5925 invalid here; we deleted the last instruction in the
5927 block_reg[bb->index] = 0;
5930 return something_changed;
5933 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5936 This is conceptually similar to global constant/copy propagation and
5937 classic global CSE (it even uses the same dataflow equations as cprop).
5939 If a register is used as memory address with the form (mem (reg)), then we
5940 know that REG can not be zero at that point in the program. Any instruction
5941 which sets REG "kills" this property.
5943 So, if every path leading to a conditional branch has an available memory
5944 reference of that form, then we know the register can not have the value
5945 zero at the conditional branch.
5947 So we merely need to compute the local properties and propagate that data
5948 around the cfg, then optimize where possible.
5950 We run this pass two times. Once before CSE, then again after CSE. This
5951 has proven to be the most profitable approach. It is rare for new
5952 optimization opportunities of this nature to appear after the first CSE
5955 This could probably be integrated with global cprop with a little work. */
5958 delete_null_pointer_checks (rtx f ATTRIBUTE_UNUSED)
5960 sbitmap *nonnull_avin, *nonnull_avout;
5961 unsigned int *block_reg;
5965 int max_reg = max_reg_num ();
5966 struct null_pointer_info npi;
5967 int something_changed = 0;
5969 /* If we have only a single block, or it is too expensive, give up. */
5970 if (n_basic_blocks <= 1
5971 || is_too_expensive (_ ("NULL pointer checks disabled")))
5974 /* We need four bitmaps, each with a bit for each register in each
5976 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
5978 /* Allocate bitmaps to hold local and global properties. */
5979 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5980 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5981 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5982 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
5984 /* Go through the basic blocks, seeing whether or not each block
5985 ends with a conditional branch whose condition is a comparison
5986 against zero. Record the register compared in BLOCK_REG. */
5987 block_reg = xcalloc (last_basic_block, sizeof (int));
5990 rtx last_insn = bb->end;
5991 rtx condition, earliest, reg;
5993 /* We only want conditional branches. */
5994 if (GET_CODE (last_insn) != JUMP_INSN
5995 || !any_condjump_p (last_insn)
5996 || !onlyjump_p (last_insn))
5999 /* LAST_INSN is a conditional jump. Get its condition. */
6000 condition = get_condition (last_insn, &earliest, false);
6002 /* If we were unable to get the condition, or it is not an equality
6003 comparison against zero then there's nothing we can do. */
6005 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
6006 || GET_CODE (XEXP (condition, 1)) != CONST_INT
6007 || (XEXP (condition, 1)
6008 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
6011 /* We must be checking a register against zero. */
6012 reg = XEXP (condition, 0);
6013 if (GET_CODE (reg) != REG)
6016 block_reg[bb->index] = REGNO (reg);
6019 /* Go through the algorithm for each block of registers. */
6020 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6023 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
6024 something_changed |= delete_null_pointer_checks_1 (block_reg,
6030 /* Free the table of registers compared at the end of every block. */
6034 sbitmap_vector_free (npi.nonnull_local);
6035 sbitmap_vector_free (npi.nonnull_killed);
6036 sbitmap_vector_free (nonnull_avin);
6037 sbitmap_vector_free (nonnull_avout);
6039 return something_changed;
6042 /* Code Hoisting variables and subroutines. */
6044 /* Very busy expressions. */
6045 static sbitmap *hoist_vbein;
6046 static sbitmap *hoist_vbeout;
6048 /* Hoistable expressions. */
6049 static sbitmap *hoist_exprs;
6051 /* Dominator bitmaps. */
6052 dominance_info dominators;
6054 /* ??? We could compute post dominators and run this algorithm in
6055 reverse to perform tail merging, doing so would probably be
6056 more effective than the tail merging code in jump.c.
6058 It's unclear if tail merging could be run in parallel with
6059 code hoisting. It would be nice. */
6061 /* Allocate vars used for code hoisting analysis. */
6064 alloc_code_hoist_mem (int n_blocks, int n_exprs)
6066 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6067 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6068 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6070 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6071 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6072 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6073 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
6076 /* Free vars used for code hoisting analysis. */
6079 free_code_hoist_mem (void)
6081 sbitmap_vector_free (antloc);
6082 sbitmap_vector_free (transp);
6083 sbitmap_vector_free (comp);
6085 sbitmap_vector_free (hoist_vbein);
6086 sbitmap_vector_free (hoist_vbeout);
6087 sbitmap_vector_free (hoist_exprs);
6088 sbitmap_vector_free (transpout);
6090 free_dominance_info (dominators);
6093 /* Compute the very busy expressions at entry/exit from each block.
6095 An expression is very busy if all paths from a given point
6096 compute the expression. */
6099 compute_code_hoist_vbeinout (void)
6101 int changed, passes;
6104 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6105 sbitmap_vector_zero (hoist_vbein, last_basic_block);
6114 /* We scan the blocks in the reverse order to speed up
6116 FOR_EACH_BB_REVERSE (bb)
6118 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6119 hoist_vbeout[bb->index], transp[bb->index]);
6120 if (bb->next_bb != EXIT_BLOCK_PTR)
6121 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
6128 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6131 /* Top level routine to do the dataflow analysis needed by code hoisting. */
6134 compute_code_hoist_data (void)
6136 compute_local_properties (transp, comp, antloc, &expr_hash_table);
6137 compute_transpout ();
6138 compute_code_hoist_vbeinout ();
6139 dominators = calculate_dominance_info (CDI_DOMINATORS);
6141 fprintf (gcse_file, "\n");
6144 /* Determine if the expression identified by EXPR_INDEX would
6145 reach BB unimpared if it was placed at the end of EXPR_BB.
6147 It's unclear exactly what Muchnick meant by "unimpared". It seems
6148 to me that the expression must either be computed or transparent in
6149 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6150 would allow the expression to be hoisted out of loops, even if
6151 the expression wasn't a loop invariant.
6153 Contrast this to reachability for PRE where an expression is
6154 considered reachable if *any* path reaches instead of *all*
6158 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
6161 int visited_allocated_locally = 0;
6164 if (visited == NULL)
6166 visited_allocated_locally = 1;
6167 visited = xcalloc (last_basic_block, 1);
6170 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
6172 basic_block pred_bb = pred->src;
6174 if (pred->src == ENTRY_BLOCK_PTR)
6176 else if (pred_bb == expr_bb)
6178 else if (visited[pred_bb->index])
6181 /* Does this predecessor generate this expression? */
6182 else if (TEST_BIT (comp[pred_bb->index], expr_index))
6184 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
6190 visited[pred_bb->index] = 1;
6191 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6196 if (visited_allocated_locally)
6199 return (pred == NULL);
6202 /* Actually perform code hoisting. */
6207 basic_block bb, dominated;
6209 unsigned int domby_len;
6211 struct expr **index_map;
6214 sbitmap_vector_zero (hoist_exprs, last_basic_block);
6216 /* Compute a mapping from expression number (`bitmap_index') to
6217 hash table entry. */
6219 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6220 for (i = 0; i < expr_hash_table.size; i++)
6221 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
6222 index_map[expr->bitmap_index] = expr;
6224 /* Walk over each basic block looking for potentially hoistable
6225 expressions, nothing gets hoisted from the entry block. */
6229 int insn_inserted_p;
6231 domby_len = get_dominated_by (dominators, bb, &domby);
6232 /* Examine each expression that is very busy at the exit of this
6233 block. These are the potentially hoistable expressions. */
6234 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
6238 if (TEST_BIT (hoist_vbeout[bb->index], i)
6239 && TEST_BIT (transpout[bb->index], i))
6241 /* We've found a potentially hoistable expression, now
6242 we look at every block BB dominates to see if it
6243 computes the expression. */
6244 for (j = 0; j < domby_len; j++)
6246 dominated = domby[j];
6247 /* Ignore self dominance. */
6248 if (bb == dominated)
6250 /* We've found a dominated block, now see if it computes
6251 the busy expression and whether or not moving that
6252 expression to the "beginning" of that block is safe. */
6253 if (!TEST_BIT (antloc[dominated->index], i))
6256 /* Note if the expression would reach the dominated block
6257 unimpared if it was placed at the end of BB.
6259 Keep track of how many times this expression is hoistable
6260 from a dominated block into BB. */
6261 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6265 /* If we found more than one hoistable occurrence of this
6266 expression, then note it in the bitmap of expressions to
6267 hoist. It makes no sense to hoist things which are computed
6268 in only one BB, and doing so tends to pessimize register
6269 allocation. One could increase this value to try harder
6270 to avoid any possible code expansion due to register
6271 allocation issues; however experiments have shown that
6272 the vast majority of hoistable expressions are only movable
6273 from two successors, so raising this threshold is likely
6274 to nullify any benefit we get from code hoisting. */
6277 SET_BIT (hoist_exprs[bb->index], i);
6282 /* If we found nothing to hoist, then quit now. */
6289 /* Loop over all the hoistable expressions. */
6290 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6292 /* We want to insert the expression into BB only once, so
6293 note when we've inserted it. */
6294 insn_inserted_p = 0;
6296 /* These tests should be the same as the tests above. */
6297 if (TEST_BIT (hoist_vbeout[bb->index], i))
6299 /* We've found a potentially hoistable expression, now
6300 we look at every block BB dominates to see if it
6301 computes the expression. */
6302 for (j = 0; j < domby_len; j++)
6304 dominated = domby[j];
6305 /* Ignore self dominance. */
6306 if (bb == dominated)
6309 /* We've found a dominated block, now see if it computes
6310 the busy expression and whether or not moving that
6311 expression to the "beginning" of that block is safe. */
6312 if (!TEST_BIT (antloc[dominated->index], i))
6315 /* The expression is computed in the dominated block and
6316 it would be safe to compute it at the start of the
6317 dominated block. Now we have to determine if the
6318 expression would reach the dominated block if it was
6319 placed at the end of BB. */
6320 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6322 struct expr *expr = index_map[i];
6323 struct occr *occr = expr->antic_occr;
6327 /* Find the right occurrence of this expression. */
6328 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6331 /* Should never happen. */
6337 set = single_set (insn);
6341 /* Create a pseudo-reg to store the result of reaching
6342 expressions into. Get the mode for the new pseudo
6343 from the mode of the original destination pseudo. */
6344 if (expr->reaching_reg == NULL)
6346 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6348 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6350 occr->deleted_p = 1;
6351 if (!insn_inserted_p)
6353 insert_insn_end_bb (index_map[i], bb, 0);
6354 insn_inserted_p = 1;
6366 /* Top level routine to perform one code hoisting (aka unification) pass
6368 Return nonzero if a change was made. */
6371 one_code_hoisting_pass (void)
6375 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6376 compute_hash_table (&expr_hash_table);
6378 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6380 if (expr_hash_table.n_elems > 0)
6382 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6383 compute_code_hoist_data ();
6385 free_code_hoist_mem ();
6388 free_hash_table (&expr_hash_table);
6393 /* Here we provide the things required to do store motion towards
6394 the exit. In order for this to be effective, gcse also needed to
6395 be taught how to move a load when it is kill only by a store to itself.
6400 void foo(float scale)
6402 for (i=0; i<10; i++)
6406 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6407 the load out since its live around the loop, and stored at the bottom
6410 The 'Load Motion' referred to and implemented in this file is
6411 an enhancement to gcse which when using edge based lcm, recognizes
6412 this situation and allows gcse to move the load out of the loop.
6414 Once gcse has hoisted the load, store motion can then push this
6415 load towards the exit, and we end up with no loads or stores of 'i'
6418 /* This will search the ldst list for a matching expression. If it
6419 doesn't find one, we create one and initialize it. */
6421 static struct ls_expr *
6424 struct ls_expr * ptr;
6426 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6427 if (expr_equiv_p (ptr->pattern, x))
6432 ptr = xmalloc (sizeof (struct ls_expr));
6434 ptr->next = pre_ldst_mems;
6437 ptr->pattern_regs = NULL_RTX;
6438 ptr->loads = NULL_RTX;
6439 ptr->stores = NULL_RTX;
6440 ptr->reaching_reg = NULL_RTX;
6443 ptr->hash_index = 0;
6444 pre_ldst_mems = ptr;
6450 /* Free up an individual ldst entry. */
6453 free_ldst_entry (struct ls_expr * ptr)
6455 free_INSN_LIST_list (& ptr->loads);
6456 free_INSN_LIST_list (& ptr->stores);
6461 /* Free up all memory associated with the ldst list. */
6464 free_ldst_mems (void)
6466 while (pre_ldst_mems)
6468 struct ls_expr * tmp = pre_ldst_mems;
6470 pre_ldst_mems = pre_ldst_mems->next;
6472 free_ldst_entry (tmp);
6475 pre_ldst_mems = NULL;
6478 /* Dump debugging info about the ldst list. */
6481 print_ldst_list (FILE * file)
6483 struct ls_expr * ptr;
6485 fprintf (file, "LDST list: \n");
6487 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6489 fprintf (file, " Pattern (%3d): ", ptr->index);
6491 print_rtl (file, ptr->pattern);
6493 fprintf (file, "\n Loads : ");
6496 print_rtl (file, ptr->loads);
6498 fprintf (file, "(nil)");
6500 fprintf (file, "\n Stores : ");
6503 print_rtl (file, ptr->stores);
6505 fprintf (file, "(nil)");
6507 fprintf (file, "\n\n");
6510 fprintf (file, "\n");
6513 /* Returns 1 if X is in the list of ldst only expressions. */
6515 static struct ls_expr *
6516 find_rtx_in_ldst (rtx x)
6518 struct ls_expr * ptr;
6520 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6521 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6527 /* Assign each element of the list of mems a monotonically increasing value. */
6530 enumerate_ldsts (void)
6532 struct ls_expr * ptr;
6535 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6541 /* Return first item in the list. */
6543 static inline struct ls_expr *
6544 first_ls_expr (void)
6546 return pre_ldst_mems;
6549 /* Return the next item in the list after the specified one. */
6551 static inline struct ls_expr *
6552 next_ls_expr (struct ls_expr * ptr)
6557 /* Load Motion for loads which only kill themselves. */
6559 /* Return true if x is a simple MEM operation, with no registers or
6560 side effects. These are the types of loads we consider for the
6561 ld_motion list, otherwise we let the usual aliasing take care of it. */
6566 if (GET_CODE (x) != MEM)
6569 if (MEM_VOLATILE_P (x))
6572 if (GET_MODE (x) == BLKmode)
6575 /* If we are handling exceptions, we must be careful with memory references
6576 that may trap. If we are not, the behavior is undefined, so we may just
6578 if (flag_non_call_exceptions && may_trap_p (x))
6581 if (side_effects_p (x))
6584 /* Do not consider function arguments passed on stack. */
6585 if (reg_mentioned_p (stack_pointer_rtx, x))
6588 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
6594 /* Make sure there isn't a buried reference in this pattern anywhere.
6595 If there is, invalidate the entry for it since we're not capable
6596 of fixing it up just yet.. We have to be sure we know about ALL
6597 loads since the aliasing code will allow all entries in the
6598 ld_motion list to not-alias itself. If we miss a load, we will get
6599 the wrong value since gcse might common it and we won't know to
6603 invalidate_any_buried_refs (rtx x)
6607 struct ls_expr * ptr;
6609 /* Invalidate it in the list. */
6610 if (GET_CODE (x) == MEM && simple_mem (x))
6612 ptr = ldst_entry (x);
6616 /* Recursively process the insn. */
6617 fmt = GET_RTX_FORMAT (GET_CODE (x));
6619 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6622 invalidate_any_buried_refs (XEXP (x, i));
6623 else if (fmt[i] == 'E')
6624 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6625 invalidate_any_buried_refs (XVECEXP (x, i, j));
6629 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6630 being defined as MEM loads and stores to symbols, with no side effects
6631 and no registers in the expression. For a MEM destination, we also
6632 check that the insn is still valid if we replace the destination with a
6633 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6634 which don't match this criteria, they are invalidated and trimmed out
6638 compute_ld_motion_mems (void)
6640 struct ls_expr * ptr;
6644 pre_ldst_mems = NULL;
6648 for (insn = bb->head;
6649 insn && insn != NEXT_INSN (bb->end);
6650 insn = NEXT_INSN (insn))
6654 if (GET_CODE (PATTERN (insn)) == SET)
6656 rtx src = SET_SRC (PATTERN (insn));
6657 rtx dest = SET_DEST (PATTERN (insn));
6659 /* Check for a simple LOAD... */
6660 if (GET_CODE (src) == MEM && simple_mem (src))
6662 ptr = ldst_entry (src);
6663 if (GET_CODE (dest) == REG)
6664 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6670 /* Make sure there isn't a buried load somewhere. */
6671 invalidate_any_buried_refs (src);
6674 /* Check for stores. Don't worry about aliased ones, they
6675 will block any movement we might do later. We only care
6676 about this exact pattern since those are the only
6677 circumstance that we will ignore the aliasing info. */
6678 if (GET_CODE (dest) == MEM && simple_mem (dest))
6680 ptr = ldst_entry (dest);
6682 if (GET_CODE (src) != MEM
6683 && GET_CODE (src) != ASM_OPERANDS
6684 /* Check for REG manually since want_to_gcse_p
6685 returns 0 for all REGs. */
6686 && (REG_P (src) || want_to_gcse_p (src)))
6687 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6693 invalidate_any_buried_refs (PATTERN (insn));
6699 /* Remove any references that have been either invalidated or are not in the
6700 expression list for pre gcse. */
6703 trim_ld_motion_mems (void)
6705 struct ls_expr * last = NULL;
6706 struct ls_expr * ptr = first_ls_expr ();
6710 int del = ptr->invalid;
6711 struct expr * expr = NULL;
6713 /* Delete if entry has been made invalid. */
6719 /* Delete if we cannot find this mem in the expression list. */
6720 for (i = 0; i < expr_hash_table.size && del; i++)
6722 for (expr = expr_hash_table.table[i];
6724 expr = expr->next_same_hash)
6725 if (expr_equiv_p (expr->expr, ptr->pattern))
6737 last->next = ptr->next;
6738 free_ldst_entry (ptr);
6743 pre_ldst_mems = pre_ldst_mems->next;
6744 free_ldst_entry (ptr);
6745 ptr = pre_ldst_mems;
6750 /* Set the expression field if we are keeping it. */
6757 /* Show the world what we've found. */
6758 if (gcse_file && pre_ldst_mems != NULL)
6759 print_ldst_list (gcse_file);
6762 /* This routine will take an expression which we are replacing with
6763 a reaching register, and update any stores that are needed if
6764 that expression is in the ld_motion list. Stores are updated by
6765 copying their SRC to the reaching register, and then storing
6766 the reaching register into the store location. These keeps the
6767 correct value in the reaching register for the loads. */
6770 update_ld_motion_stores (struct expr * expr)
6772 struct ls_expr * mem_ptr;
6774 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6776 /* We can try to find just the REACHED stores, but is shouldn't
6777 matter to set the reaching reg everywhere... some might be
6778 dead and should be eliminated later. */
6780 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6781 where reg is the reaching reg used in the load. We checked in
6782 compute_ld_motion_mems that we can replace (set mem expr) with
6783 (set reg expr) in that insn. */
6784 rtx list = mem_ptr->stores;
6786 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6788 rtx insn = XEXP (list, 0);
6789 rtx pat = PATTERN (insn);
6790 rtx src = SET_SRC (pat);
6791 rtx reg = expr->reaching_reg;
6794 /* If we've already copied it, continue. */
6795 if (expr->reaching_reg == src)
6800 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6801 print_rtl (gcse_file, expr->reaching_reg);
6802 fprintf (gcse_file, ":\n ");
6803 print_inline_rtx (gcse_file, insn, 8);
6804 fprintf (gcse_file, "\n");
6807 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
6808 new = emit_insn_before (copy, insn);
6809 record_one_set (REGNO (reg), new);
6810 SET_SRC (pat) = reg;
6812 /* un-recognize this pattern since it's probably different now. */
6813 INSN_CODE (insn) = -1;
6814 gcse_create_count++;
6819 /* Store motion code. */
6821 #define ANTIC_STORE_LIST(x) ((x)->loads)
6822 #define AVAIL_STORE_LIST(x) ((x)->stores)
6823 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
6825 /* This is used to communicate the target bitvector we want to use in the
6826 reg_set_info routine when called via the note_stores mechanism. */
6827 static int * regvec;
6829 /* And current insn, for the same routine. */
6830 static rtx compute_store_table_current_insn;
6832 /* Used in computing the reverse edge graph bit vectors. */
6833 static sbitmap * st_antloc;
6835 /* Global holding the number of store expressions we are dealing with. */
6836 static int num_stores;
6838 /* Checks to set if we need to mark a register set. Called from note_stores. */
6841 reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
6842 void *data ATTRIBUTE_UNUSED)
6844 if (GET_CODE (dest) == SUBREG)
6845 dest = SUBREG_REG (dest);
6847 if (GET_CODE (dest) == REG)
6848 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
6851 /* Return zero if some of the registers in list X are killed
6852 due to set of registers in bitmap REGS_SET. */
6855 store_ops_ok (rtx x, int *regs_set)
6859 for (; x; x = XEXP (x, 1))
6862 if (regs_set[REGNO(reg)])
6869 /* Returns a list of registers mentioned in X. */
6871 extract_mentioned_regs (rtx x)
6873 return extract_mentioned_regs_helper (x, NULL_RTX);
6876 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
6879 extract_mentioned_regs_helper (rtx x, rtx accum)
6885 /* Repeat is used to turn tail-recursion into iteration. */
6891 code = GET_CODE (x);
6895 return alloc_EXPR_LIST (0, x, accum);
6905 /* We do not run this function with arguments having side effects. */
6924 i = GET_RTX_LENGTH (code) - 1;
6925 fmt = GET_RTX_FORMAT (code);
6931 rtx tem = XEXP (x, i);
6933 /* If we are about to do the last recursive call
6934 needed at this level, change it into iteration. */
6941 accum = extract_mentioned_regs_helper (tem, accum);
6943 else if (fmt[i] == 'E')
6947 for (j = 0; j < XVECLEN (x, i); j++)
6948 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
6955 /* Determine whether INSN is MEM store pattern that we will consider moving.
6956 REGS_SET_BEFORE is bitmap of registers set before (and including) the
6957 current insn, REGS_SET_AFTER is bitmap of registers set after (and
6958 including) the insn in this basic block. We must be passing through BB from
6959 head to end, as we are using this fact to speed things up.
6961 The results are stored this way:
6963 -- the first anticipatable expression is added into ANTIC_STORE_LIST
6964 -- if the processed expression is not anticipatable, NULL_RTX is added
6965 there instead, so that we can use it as indicator that no further
6966 expression of this type may be anticipatable
6967 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
6968 consequently, all of them but this head are dead and may be deleted.
6969 -- if the expression is not available, the insn due to that it fails to be
6970 available is stored in reaching_reg.
6972 The things are complicated a bit by fact that there already may be stores
6973 to the same MEM from other blocks; also caller must take care of the
6974 necessary cleanup of the temporary markers after end of the basic block.
6978 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
6980 struct ls_expr * ptr;
6982 int check_anticipatable, check_available;
6983 basic_block bb = BLOCK_FOR_INSN (insn);
6985 set = single_set (insn);
6989 dest = SET_DEST (set);
6991 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
6992 || GET_MODE (dest) == BLKmode)
6995 if (side_effects_p (dest))
6998 /* If we are handling exceptions, we must be careful with memory references
6999 that may trap. If we are not, the behavior is undefined, so we may just
7001 if (flag_non_call_exceptions && may_trap_p (dest))
7004 ptr = ldst_entry (dest);
7005 if (!ptr->pattern_regs)
7006 ptr->pattern_regs = extract_mentioned_regs (dest);
7008 /* Do not check for anticipatability if we either found one anticipatable
7009 store already, or tested for one and found out that it was killed. */
7010 check_anticipatable = 0;
7011 if (!ANTIC_STORE_LIST (ptr))
7012 check_anticipatable = 1;
7015 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
7017 && BLOCK_FOR_INSN (tmp) != bb)
7018 check_anticipatable = 1;
7020 if (check_anticipatable)
7022 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
7026 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
7027 ANTIC_STORE_LIST (ptr));
7030 /* It is not necessary to check whether store is available if we did
7031 it successfully before; if we failed before, do not bother to check
7032 until we reach the insn that caused us to fail. */
7033 check_available = 0;
7034 if (!AVAIL_STORE_LIST (ptr))
7035 check_available = 1;
7038 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
7039 if (BLOCK_FOR_INSN (tmp) != bb)
7040 check_available = 1;
7042 if (check_available)
7044 /* Check that we have already reached the insn at that the check
7045 failed last time. */
7046 if (LAST_AVAIL_CHECK_FAILURE (ptr))
7049 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
7050 tmp = PREV_INSN (tmp))
7053 check_available = 0;
7056 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
7058 &LAST_AVAIL_CHECK_FAILURE (ptr));
7060 if (!check_available)
7061 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
7064 /* Find available and anticipatable stores. */
7067 compute_store_table (void)
7073 int *last_set_in, *already_set;
7074 struct ls_expr * ptr, **prev_next_ptr_ptr;
7076 max_gcse_regno = max_reg_num ();
7078 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
7080 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7082 last_set_in = xmalloc (sizeof (int) * max_gcse_regno);
7083 already_set = xmalloc (sizeof (int) * max_gcse_regno);
7085 /* Find all the stores we care about. */
7088 /* First compute the registers set in this block. */
7089 memset (last_set_in, 0, sizeof (int) * max_gcse_regno);
7090 regvec = last_set_in;
7092 for (insn = bb->head;
7093 insn != NEXT_INSN (bb->end);
7094 insn = NEXT_INSN (insn))
7096 if (! INSN_P (insn))
7099 if (GET_CODE (insn) == CALL_INSN)
7101 bool clobbers_all = false;
7102 #ifdef NON_SAVING_SETJMP
7103 if (NON_SAVING_SETJMP
7104 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7105 clobbers_all = true;
7108 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7110 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7111 last_set_in[regno] = INSN_UID (insn);
7114 pat = PATTERN (insn);
7115 compute_store_table_current_insn = insn;
7116 note_stores (pat, reg_set_info, NULL);
7119 /* Record the set registers. */
7120 for (regno = 0; regno < max_gcse_regno; regno++)
7121 if (last_set_in[regno])
7122 SET_BIT (reg_set_in_block[bb->index], regno);
7124 /* Now find the stores. */
7125 memset (already_set, 0, sizeof (int) * max_gcse_regno);
7126 regvec = already_set;
7127 for (insn = bb->head;
7128 insn != NEXT_INSN (bb->end);
7129 insn = NEXT_INSN (insn))
7131 if (! INSN_P (insn))
7134 if (GET_CODE (insn) == CALL_INSN)
7136 bool clobbers_all = false;
7137 #ifdef NON_SAVING_SETJMP
7138 if (NON_SAVING_SETJMP
7139 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7140 clobbers_all = true;
7143 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7145 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7146 already_set[regno] = 1;
7149 pat = PATTERN (insn);
7150 note_stores (pat, reg_set_info, NULL);
7152 /* Now that we've marked regs, look for stores. */
7153 find_moveable_store (insn, already_set, last_set_in);
7155 /* Unmark regs that are no longer set. */
7156 for (regno = 0; regno < max_gcse_regno; regno++)
7157 if (last_set_in[regno] == INSN_UID (insn))
7158 last_set_in[regno] = 0;
7161 /* Clear temporary marks. */
7162 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7164 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
7165 if (ANTIC_STORE_LIST (ptr)
7166 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
7167 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
7171 /* Remove the stores that are not available anywhere, as there will
7172 be no opportunity to optimize them. */
7173 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
7175 ptr = *prev_next_ptr_ptr)
7177 if (!AVAIL_STORE_LIST (ptr))
7179 *prev_next_ptr_ptr = ptr->next;
7180 free_ldst_entry (ptr);
7183 prev_next_ptr_ptr = &ptr->next;
7186 ret = enumerate_ldsts ();
7190 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7191 print_ldst_list (gcse_file);
7199 /* Check to see if the load X is aliased with STORE_PATTERN.
7200 AFTER is true if we are checking the case when STORE_PATTERN occurs
7204 load_kills_store (rtx x, rtx store_pattern, int after)
7207 return anti_dependence (x, store_pattern);
7209 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
7213 /* Go through the entire insn X, looking for any loads which might alias
7214 STORE_PATTERN. Return true if found.
7215 AFTER is true if we are checking the case when STORE_PATTERN occurs
7216 after the insn X. */
7219 find_loads (rtx x, rtx store_pattern, int after)
7228 if (GET_CODE (x) == SET)
7231 if (GET_CODE (x) == MEM)
7233 if (load_kills_store (x, store_pattern, after))
7237 /* Recursively process the insn. */
7238 fmt = GET_RTX_FORMAT (GET_CODE (x));
7240 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7243 ret |= find_loads (XEXP (x, i), store_pattern, after);
7244 else if (fmt[i] == 'E')
7245 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7246 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
7251 /* Check if INSN kills the store pattern X (is aliased with it).
7252 AFTER is true if we are checking the case when store X occurs
7253 after the insn. Return true if it it does. */
7256 store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
7258 rtx reg, base, note;
7263 if (GET_CODE (insn) == CALL_INSN)
7265 /* A normal or pure call might read from pattern,
7266 but a const call will not. */
7267 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
7270 /* But even a const call reads its parameters. Check whether the
7271 base of some of registers used in mem is stack pointer. */
7272 for (reg = x_regs; reg; reg = XEXP (reg, 1))
7274 base = find_base_term (XEXP (reg, 0));
7276 || (GET_CODE (base) == ADDRESS
7277 && GET_MODE (base) == Pmode
7278 && XEXP (base, 0) == stack_pointer_rtx))
7285 if (GET_CODE (PATTERN (insn)) == SET)
7287 rtx pat = PATTERN (insn);
7288 rtx dest = SET_DEST (pat);
7290 if (GET_CODE (dest) == SIGN_EXTRACT
7291 || GET_CODE (dest) == ZERO_EXTRACT)
7292 dest = XEXP (dest, 0);
7294 /* Check for memory stores to aliased objects. */
7295 if (GET_CODE (dest) == MEM
7296 && !expr_equiv_p (dest, x))
7300 if (output_dependence (dest, x))
7305 if (output_dependence (x, dest))
7309 if (find_loads (SET_SRC (pat), x, after))
7312 else if (find_loads (PATTERN (insn), x, after))
7315 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
7316 location aliased with X, then this insn kills X. */
7317 note = find_reg_equal_equiv_note (insn);
7320 note = XEXP (note, 0);
7322 /* However, if the note represents a must alias rather than a may
7323 alias relationship, then it does not kill X. */
7324 if (expr_equiv_p (note, x))
7327 /* See if there are any aliased loads in the note. */
7328 return find_loads (note, x, after);
7331 /* Returns true if the expression X is loaded or clobbered on or after INSN
7332 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7333 or after the insn. X_REGS is list of registers mentioned in X. If the store
7334 is killed, return the last insn in that it occurs in FAIL_INSN. */
7337 store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
7338 int *regs_set_after, rtx *fail_insn)
7340 rtx last = bb->end, act;
7342 if (!store_ops_ok (x_regs, regs_set_after))
7344 /* We do not know where it will happen. */
7346 *fail_insn = NULL_RTX;
7350 /* Scan from the end, so that fail_insn is determined correctly. */
7351 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
7352 if (store_killed_in_insn (x, x_regs, act, false))
7362 /* Returns true if the expression X is loaded or clobbered on or before INSN
7363 within basic block BB. X_REGS is list of registers mentioned in X.
7364 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7366 store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
7367 int *regs_set_before)
7369 rtx first = bb->head;
7371 if (!store_ops_ok (x_regs, regs_set_before))
7374 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
7375 if (store_killed_in_insn (x, x_regs, insn, true))
7381 /* Fill in available, anticipatable, transparent and kill vectors in
7382 STORE_DATA, based on lists of available and anticipatable stores. */
7384 build_store_vectors (void)
7387 int *regs_set_in_block;
7389 struct ls_expr * ptr;
7392 /* Build the gen_vector. This is any store in the table which is not killed
7393 by aliasing later in its block. */
7394 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
7395 sbitmap_vector_zero (ae_gen, last_basic_block);
7397 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
7398 sbitmap_vector_zero (st_antloc, last_basic_block);
7400 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7402 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7404 insn = XEXP (st, 0);
7405 bb = BLOCK_FOR_INSN (insn);
7407 /* If we've already seen an available expression in this block,
7408 we can delete this one (It occurs earlier in the block). We'll
7409 copy the SRC expression to an unused register in case there
7410 are any side effects. */
7411 if (TEST_BIT (ae_gen[bb->index], ptr->index))
7413 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7415 fprintf (gcse_file, "Removing redundant store:\n");
7416 replace_store_insn (r, XEXP (st, 0), bb, ptr);
7419 SET_BIT (ae_gen[bb->index], ptr->index);
7422 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7424 insn = XEXP (st, 0);
7425 bb = BLOCK_FOR_INSN (insn);
7426 SET_BIT (st_antloc[bb->index], ptr->index);
7430 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
7431 sbitmap_vector_zero (ae_kill, last_basic_block);
7433 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
7434 sbitmap_vector_zero (transp, last_basic_block);
7435 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
7439 for (regno = 0; regno < max_gcse_regno; regno++)
7440 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
7442 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7444 if (store_killed_after (ptr->pattern, ptr->pattern_regs, bb->head,
7445 bb, regs_set_in_block, NULL))
7447 /* It should not be necessary to consider the expression
7448 killed if it is both anticipatable and available. */
7449 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
7450 || !TEST_BIT (ae_gen[bb->index], ptr->index))
7451 SET_BIT (ae_kill[bb->index], ptr->index);
7454 SET_BIT (transp[bb->index], ptr->index);
7458 free (regs_set_in_block);
7462 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7463 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7464 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7465 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7469 /* Insert an instruction at the beginning of a basic block, and update
7470 the BLOCK_HEAD if needed. */
7473 insert_insn_start_bb (rtx insn, basic_block bb)
7475 /* Insert at start of successor block. */
7476 rtx prev = PREV_INSN (bb->head);
7477 rtx before = bb->head;
7480 if (GET_CODE (before) != CODE_LABEL
7481 && (GET_CODE (before) != NOTE
7482 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7485 if (prev == bb->end)
7487 before = NEXT_INSN (before);
7490 insn = emit_insn_after (insn, prev);
7494 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7496 print_inline_rtx (gcse_file, insn, 6);
7497 fprintf (gcse_file, "\n");
7501 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7502 the memory reference, and E is the edge to insert it on. Returns nonzero
7503 if an edge insertion was performed. */
7506 insert_store (struct ls_expr * expr, edge e)
7512 /* We did all the deleted before this insert, so if we didn't delete a
7513 store, then we haven't set the reaching reg yet either. */
7514 if (expr->reaching_reg == NULL_RTX)
7517 if (e->flags & EDGE_FAKE)
7520 reg = expr->reaching_reg;
7521 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
7523 /* If we are inserting this expression on ALL predecessor edges of a BB,
7524 insert it at the start of the BB, and reset the insert bits on the other
7525 edges so we don't try to insert it on the other edges. */
7527 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7528 if (!(tmp->flags & EDGE_FAKE))
7530 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7531 if (index == EDGE_INDEX_NO_EDGE)
7533 if (! TEST_BIT (pre_insert_map[index], expr->index))
7537 /* If tmp is NULL, we found an insertion on every edge, blank the
7538 insertion vector for these edges, and insert at the start of the BB. */
7539 if (!tmp && bb != EXIT_BLOCK_PTR)
7541 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7543 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7544 RESET_BIT (pre_insert_map[index], expr->index);
7546 insert_insn_start_bb (insn, bb);
7550 /* We can't insert on this edge, so we'll insert at the head of the
7551 successors block. See Morgan, sec 10.5. */
7552 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7554 insert_insn_start_bb (insn, bb);
7558 insert_insn_on_edge (insn, e);
7562 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7563 e->src->index, e->dest->index);
7564 print_inline_rtx (gcse_file, insn, 6);
7565 fprintf (gcse_file, "\n");
7571 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
7572 memory location in SMEXPR set in basic block BB.
7574 This could be rather expensive. */
7577 remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
7579 edge *stack = xmalloc (sizeof (edge) * n_basic_blocks), act;
7580 sbitmap visited = sbitmap_alloc (last_basic_block);
7582 rtx last, insn, note;
7583 rtx mem = smexpr->pattern;
7585 sbitmap_zero (visited);
7595 sbitmap_free (visited);
7598 act = stack[--stack_top];
7602 if (bb == EXIT_BLOCK_PTR
7603 || TEST_BIT (visited, bb->index)
7604 || TEST_BIT (ae_kill[bb->index], smexpr->index))
7606 act = act->succ_next;
7609 SET_BIT (visited, bb->index);
7611 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
7613 for (last = ANTIC_STORE_LIST (smexpr);
7614 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
7615 last = XEXP (last, 1))
7617 last = XEXP (last, 0);
7620 last = NEXT_INSN (bb->end);
7622 for (insn = bb->head; insn != last; insn = NEXT_INSN (insn))
7625 note = find_reg_equal_equiv_note (insn);
7626 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
7630 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
7632 remove_note (insn, note);
7634 act = act->succ_next;
7638 stack[stack_top++] = act;
7644 /* This routine will replace a store with a SET to a specified register. */
7647 replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
7649 rtx insn, mem, note, set, ptr;
7651 mem = smexpr->pattern;
7652 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
7653 insn = emit_insn_after (insn, del);
7658 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7659 print_inline_rtx (gcse_file, del, 6);
7660 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7661 print_inline_rtx (gcse_file, insn, 6);
7662 fprintf (gcse_file, "\n");
7665 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
7666 if (XEXP (ptr, 0) == del)
7668 XEXP (ptr, 0) = insn;
7673 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
7674 they are no longer accurate provided that they are reached by this
7675 definition, so drop them. */
7676 for (; insn != NEXT_INSN (bb->end); insn = NEXT_INSN (insn))
7679 set = single_set (insn);
7682 if (expr_equiv_p (SET_DEST (set), mem))
7684 note = find_reg_equal_equiv_note (insn);
7685 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
7689 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
7691 remove_note (insn, note);
7693 remove_reachable_equiv_notes (bb, smexpr);
7697 /* Delete a store, but copy the value that would have been stored into
7698 the reaching_reg for later storing. */
7701 delete_store (struct ls_expr * expr, basic_block bb)
7705 if (expr->reaching_reg == NULL_RTX)
7706 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7708 reg = expr->reaching_reg;
7710 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7713 if (BLOCK_FOR_INSN (del) == bb)
7715 /* We know there is only one since we deleted redundant
7716 ones during the available computation. */
7717 replace_store_insn (reg, del, bb, expr);
7723 /* Free memory used by store motion. */
7726 free_store_memory (void)
7731 sbitmap_vector_free (ae_gen);
7733 sbitmap_vector_free (ae_kill);
7735 sbitmap_vector_free (transp);
7737 sbitmap_vector_free (st_antloc);
7739 sbitmap_vector_free (pre_insert_map);
7741 sbitmap_vector_free (pre_delete_map);
7742 if (reg_set_in_block)
7743 sbitmap_vector_free (reg_set_in_block);
7745 ae_gen = ae_kill = transp = st_antloc = NULL;
7746 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7749 /* Perform store motion. Much like gcse, except we move expressions the
7750 other way by looking at the flowgraph in reverse. */
7757 struct ls_expr * ptr;
7758 int update_flow = 0;
7762 fprintf (gcse_file, "before store motion\n");
7763 print_rtl (gcse_file, get_insns ());
7766 init_alias_analysis ();
7768 /* Find all the available and anticipatable stores. */
7769 num_stores = compute_store_table ();
7770 if (num_stores == 0)
7772 sbitmap_vector_free (reg_set_in_block);
7773 end_alias_analysis ();
7777 /* Now compute kill & transp vectors. */
7778 build_store_vectors ();
7779 add_noreturn_fake_exit_edges ();
7780 connect_infinite_loops_to_exit ();
7782 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7783 st_antloc, ae_kill, &pre_insert_map,
7786 /* Now we want to insert the new stores which are going to be needed. */
7787 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7790 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7791 delete_store (ptr, bb);
7793 for (x = 0; x < NUM_EDGES (edge_list); x++)
7794 if (TEST_BIT (pre_insert_map[x], ptr->index))
7795 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7799 commit_edge_insertions ();
7801 free_store_memory ();
7802 free_edge_list (edge_list);
7803 remove_fake_edges ();
7804 end_alias_analysis ();
7808 /* Entry point for jump bypassing optimization pass. */
7811 bypass_jumps (FILE *file)
7815 /* We do not construct an accurate cfg in functions which call
7816 setjmp, so just punt to be safe. */
7817 if (current_function_calls_setjmp)
7820 /* For calling dump_foo fns from gdb. */
7821 debug_stderr = stderr;
7824 /* Identify the basic block information for this function, including
7825 successors and predecessors. */
7826 max_gcse_regno = max_reg_num ();
7829 dump_flow_info (file);
7831 /* Return if there's nothing to do, or it is too expensive */
7832 if (n_basic_blocks <= 1 || is_too_expensive (_ ("jump bypassing disabled")))
7835 gcc_obstack_init (&gcse_obstack);
7838 /* We need alias. */
7839 init_alias_analysis ();
7841 /* Record where pseudo-registers are set. This data is kept accurate
7842 during each pass. ??? We could also record hard-reg information here
7843 [since it's unchanging], however it is currently done during hash table
7846 It may be tempting to compute MEM set information here too, but MEM sets
7847 will be subject to code motion one day and thus we need to compute
7848 information about memory sets when we build the hash tables. */
7850 alloc_reg_set_mem (max_gcse_regno);
7851 compute_sets (get_insns ());
7853 max_gcse_regno = max_reg_num ();
7854 alloc_gcse_mem (get_insns ());
7855 changed = one_cprop_pass (1, 1, 1);
7860 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7861 current_function_name, n_basic_blocks);
7862 fprintf (file, "%d bytes\n\n", bytes_used);
7865 obstack_free (&gcse_obstack, NULL);
7866 free_reg_set_mem ();
7868 /* We are finished with alias. */
7869 end_alias_analysis ();
7870 allocate_reg_info (max_reg_num (), FALSE, FALSE);
7875 /* Return true if the graph is too expensive to optimize. PASS is the
7876 optimization about to be performed. */
7879 is_too_expensive (const char *pass)
7881 /* Trying to perform global optimizations on flow graphs which have
7882 a high connectivity will take a long time and is unlikely to be
7883 particularly useful.
7885 In normal circumstances a cfg should have about twice as many
7886 edges as blocks. But we do not want to punish small functions
7887 which have a couple switch statements. Rather than simply
7888 threshold the number of blocks, uses something with a more
7889 graceful degradation. */
7890 if (n_edges > 20000 + n_basic_blocks * 4)
7892 if (warn_disabled_optimization)
7893 warning ("%s: %d basic blocks and %d edges/basic block",
7894 pass, n_basic_blocks, n_edges / n_basic_blocks);
7899 /* If allocating memory for the cprop bitmap would take up too much
7900 storage it's better just to disable the optimization. */
7902 * SBITMAP_SET_SIZE (max_reg_num ())
7903 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
7905 if (warn_disabled_optimization)
7906 warning ("%s: %d basic blocks and %d registers",
7907 pass, n_basic_blocks, max_reg_num ());
7915 #include "gt-gcse.h"