1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
43 Global Optimization by Suppression of Partial Redundancies
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
115 Rice University Ph.D. thesis, Apr. 1996
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
125 Advanced Compiler Design and Implementation
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
148 #include "coretypes.h"
155 #include "hard-reg-set.h"
158 #include "insn-config.h"
160 #include "basic-block.h"
162 #include "function.h"
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
281 static FILE *gcse_file;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack;
302 struct reg_use {rtx reg_rtx; };
304 /* Hash table of expressions. */
308 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
310 /* Index in the available expression bitmaps. */
312 /* Next entry with the same hash. */
313 struct expr *next_same_hash;
314 /* List of anticipatable occurrences in basic blocks in the function.
315 An "anticipatable occurrence" is one that is the first occurrence in the
316 basic block, the operands are not modified in the basic block prior
317 to the occurrence and the output is not used between the start of
318 the block and the occurrence. */
319 struct occr *antic_occr;
320 /* List of available occurrence in basic blocks in the function.
321 An "available occurrence" is one that is the last occurrence in the
322 basic block and the operands are not modified by following statements in
323 the basic block [including this insn]. */
324 struct occr *avail_occr;
325 /* Non-null if the computation is PRE redundant.
326 The value is the newly created pseudo-reg to record a copy of the
327 expression in all the places that reach the redundant copy. */
331 /* Occurrence of an expression.
332 There is one per basic block. If a pattern appears more than once the
333 last appearance is used [or first for anticipatable expressions]. */
337 /* Next occurrence of this expression. */
339 /* The insn that computes the expression. */
341 /* Nonzero if this [anticipatable] occurrence has been deleted. */
343 /* Nonzero if this [available] occurrence has been copied to
345 /* ??? This is mutually exclusive with deleted_p, so they could share
350 /* Expression and copy propagation hash tables.
351 Each hash table is an array of buckets.
352 ??? It is known that if it were an array of entries, structure elements
353 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
354 not clear whether in the final analysis a sufficient amount of memory would
355 be saved as the size of the available expression bitmaps would be larger
356 [one could build a mapping table without holes afterwards though].
357 Someday I'll perform the computation and figure it out. */
362 This is an array of `expr_hash_table_size' elements. */
365 /* Size of the hash table, in elements. */
368 /* Number of hash table elements. */
369 unsigned int n_elems;
371 /* Whether the table is expression of copy propagation one. */
375 /* Expression hash table. */
376 static struct hash_table expr_hash_table;
378 /* Copy propagation hash table. */
379 static struct hash_table set_hash_table;
381 /* Mapping of uids to cuids.
382 Only real insns get cuids. */
383 static int *uid_cuid;
385 /* Highest UID in UID_CUID. */
388 /* Get the cuid of an insn. */
389 #ifdef ENABLE_CHECKING
390 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
392 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
395 /* Number of cuids. */
398 /* Mapping of cuids to insns. */
399 static rtx *cuid_insn;
401 /* Get insn from cuid. */
402 #define CUID_INSN(CUID) (cuid_insn[CUID])
404 /* Maximum register number in function prior to doing gcse + 1.
405 Registers created during this pass have regno >= max_gcse_regno.
406 This is named with "gcse" to not collide with global of same name. */
407 static unsigned int max_gcse_regno;
409 /* Table of registers that are modified.
411 For each register, each element is a list of places where the pseudo-reg
414 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
415 requires knowledge of which blocks kill which regs [and thus could use
416 a bitmap instead of the lists `reg_set_table' uses].
418 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
419 num-regs) [however perhaps it may be useful to keep the data as is]. One
420 advantage of recording things this way is that `reg_set_table' is fairly
421 sparse with respect to pseudo regs but for hard regs could be fairly dense
422 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
423 up functions like compute_transp since in the case of pseudo-regs we only
424 need to iterate over the number of times a pseudo-reg is set, not over the
425 number of basic blocks [clearly there is a bit of a slow down in the cases
426 where a pseudo is set more than once in a block, however it is believed
427 that the net effect is to speed things up]. This isn't done for hard-regs
428 because recording call-clobbered hard-regs in `reg_set_table' at each
429 function call can consume a fair bit of memory, and iterating over
430 hard-regs stored this way in compute_transp will be more expensive. */
432 typedef struct reg_set
434 /* The next setting of this register. */
435 struct reg_set *next;
436 /* The insn where it was set. */
440 static reg_set **reg_set_table;
442 /* Size of `reg_set_table'.
443 The table starts out at max_gcse_regno + slop, and is enlarged as
445 static int reg_set_table_size;
447 /* Amount to grow `reg_set_table' by when it's full. */
448 #define REG_SET_TABLE_SLOP 100
450 /* This is a list of expressions which are MEMs and will be used by load
452 Load motion tracks MEMs which aren't killed by
453 anything except itself. (ie, loads and stores to a single location).
454 We can then allow movement of these MEM refs with a little special
455 allowance. (all stores copy the same value to the reaching reg used
456 for the loads). This means all values used to store into memory must have
457 no side effects so we can re-issue the setter value.
458 Store Motion uses this structure as an expression table to track stores
459 which look interesting, and might be moveable towards the exit block. */
463 struct expr * expr; /* Gcse expression reference for LM. */
464 rtx pattern; /* Pattern of this mem. */
465 rtx pattern_regs; /* List of registers mentioned by the mem. */
466 rtx loads; /* INSN list of loads seen. */
467 rtx stores; /* INSN list of stores seen. */
468 struct ls_expr * next; /* Next in the list. */
469 int invalid; /* Invalid for some reason. */
470 int index; /* If it maps to a bitmap index. */
471 unsigned int hash_index; /* Index when in a hash table. */
472 rtx reaching_reg; /* Register to use when re-writing. */
475 /* Array of implicit set patterns indexed by basic block index. */
476 static rtx *implicit_sets;
478 /* Head of the list of load/store memory refs. */
479 static struct ls_expr * pre_ldst_mems = NULL;
481 /* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
484 static regset reg_set_bitmap;
486 /* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491 static sbitmap *reg_set_in_block;
493 /* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495 static rtx * modify_mem_list;
496 bitmap modify_mem_list_set;
498 /* This array parallels modify_mem_list, but is kept canonicalized. */
499 static rtx * canon_modify_mem_list;
500 bitmap canon_modify_mem_list_set;
501 /* Various variables for statistics gathering. */
503 /* Memory used in a pass.
504 This isn't intended to be absolutely precise. Its intent is only
505 to keep an eye on memory usage. */
506 static int bytes_used;
508 /* GCSE substitutions made. */
509 static int gcse_subst_count;
510 /* Number of copy instructions created. */
511 static int gcse_create_count;
512 /* Number of constants propagated. */
513 static int const_prop_count;
514 /* Number of copys propagated. */
515 static int copy_prop_count;
517 /* These variables are used by classic GCSE.
518 Normally they'd be defined a bit later, but `rd_gen' needs to
519 be declared sooner. */
521 /* Each block has a bitmap of each type.
522 The length of each blocks bitmap is:
524 max_cuid - for reaching definitions
525 n_exprs - for available expressions
527 Thus we view the bitmaps as 2 dimensional arrays. i.e.
528 rd_kill[block_num][cuid_num]
529 ae_kill[block_num][expr_num] */
531 /* For reaching defs */
532 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
534 /* for available exprs */
535 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
537 /* Objects of this type are passed around by the null-pointer check
539 struct null_pointer_info
541 /* The basic block being processed. */
542 basic_block current_block;
543 /* The first register to be handled in this pass. */
544 unsigned int min_reg;
545 /* One greater than the last register to be handled in this pass. */
546 unsigned int max_reg;
547 sbitmap *nonnull_local;
548 sbitmap *nonnull_killed;
551 static void compute_can_copy (void);
552 static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
553 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
554 static void *grealloc (void *, size_t);
555 static void *gcse_alloc (unsigned long);
556 static void alloc_gcse_mem (rtx);
557 static void free_gcse_mem (void);
558 static void alloc_reg_set_mem (int);
559 static void free_reg_set_mem (void);
560 static int get_bitmap_width (int, int, int);
561 static void record_one_set (int, rtx);
562 static void replace_one_set (int, rtx, rtx);
563 static void record_set_info (rtx, rtx, void *);
564 static void compute_sets (rtx);
565 static void hash_scan_insn (rtx, struct hash_table *, int);
566 static void hash_scan_set (rtx, rtx, struct hash_table *);
567 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
568 static void hash_scan_call (rtx, rtx, struct hash_table *);
569 static int want_to_gcse_p (rtx);
570 static bool gcse_constant_p (rtx);
571 static int oprs_unchanged_p (rtx, rtx, int);
572 static int oprs_anticipatable_p (rtx, rtx);
573 static int oprs_available_p (rtx, rtx);
574 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
575 struct hash_table *);
576 static void insert_set_in_table (rtx, rtx, struct hash_table *);
577 static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
578 static unsigned int hash_expr_1 (rtx, enum machine_mode, int *);
579 static unsigned int hash_string_1 (const char *);
580 static unsigned int hash_set (int, int);
581 static int expr_equiv_p (rtx, rtx);
582 static void record_last_reg_set_info (rtx, int);
583 static void record_last_mem_set_info (rtx);
584 static void record_last_set_info (rtx, rtx, void *);
585 static void compute_hash_table (struct hash_table *);
586 static void alloc_hash_table (int, struct hash_table *, int);
587 static void free_hash_table (struct hash_table *);
588 static void compute_hash_table_work (struct hash_table *);
589 static void dump_hash_table (FILE *, const char *, struct hash_table *);
590 static struct expr *lookup_expr (rtx, struct hash_table *);
591 static struct expr *lookup_set (unsigned int, struct hash_table *);
592 static struct expr *next_set (unsigned int, struct expr *);
593 static void reset_opr_set_tables (void);
594 static int oprs_not_set_p (rtx, rtx);
595 static void mark_call (rtx);
596 static void mark_set (rtx, rtx);
597 static void mark_clobber (rtx, rtx);
598 static void mark_oprs_set (rtx);
599 static void alloc_cprop_mem (int, int);
600 static void free_cprop_mem (void);
601 static void compute_transp (rtx, int, sbitmap *, int);
602 static void compute_transpout (void);
603 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
604 struct hash_table *);
605 static void compute_cprop_data (void);
606 static void find_used_regs (rtx *, void *);
607 static int try_replace_reg (rtx, rtx, rtx);
608 static struct expr *find_avail_set (int, rtx);
609 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
610 static void mems_conflict_for_gcse_p (rtx, rtx, void *);
611 static int load_killed_in_block_p (basic_block, int, rtx, int);
612 static void canon_list_insert (rtx, rtx, void *);
613 static int cprop_insn (rtx, int);
614 static int cprop (int);
615 static void find_implicit_sets (void);
616 static int one_cprop_pass (int, int, int);
617 static bool constprop_register (rtx, rtx, rtx, int);
618 static struct expr *find_bypass_set (int, int);
619 static bool reg_killed_on_edge (rtx, edge);
620 static int bypass_block (basic_block, rtx, rtx);
621 static int bypass_conditional_jumps (void);
622 static void alloc_pre_mem (int, int);
623 static void free_pre_mem (void);
624 static void compute_pre_data (void);
625 static int pre_expr_reaches_here_p (basic_block, struct expr *,
627 static void insert_insn_end_bb (struct expr *, basic_block, int);
628 static void pre_insert_copy_insn (struct expr *, rtx);
629 static void pre_insert_copies (void);
630 static int pre_delete (void);
631 static int pre_gcse (void);
632 static int one_pre_gcse_pass (int);
633 static void add_label_notes (rtx, rtx);
634 static void alloc_code_hoist_mem (int, int);
635 static void free_code_hoist_mem (void);
636 static void compute_code_hoist_vbeinout (void);
637 static void compute_code_hoist_data (void);
638 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
639 static void hoist_code (void);
640 static int one_code_hoisting_pass (void);
641 static void alloc_rd_mem (int, int);
642 static void free_rd_mem (void);
643 static void handle_rd_kill_set (rtx, int, basic_block);
644 static void compute_kill_rd (void);
645 static void compute_rd (void);
646 static void alloc_avail_expr_mem (int, int);
647 static void free_avail_expr_mem (void);
648 static void compute_ae_gen (struct hash_table *);
649 static int expr_killed_p (rtx, basic_block);
650 static void compute_ae_kill (sbitmap *, sbitmap *, struct hash_table *);
651 static int expr_reaches_here_p (struct occr *, struct expr *, basic_block,
653 static rtx computing_insn (struct expr *, rtx);
654 static int def_reaches_here_p (rtx, rtx);
655 static int can_disregard_other_sets (struct reg_set **, rtx, int);
656 static int handle_avail_expr (rtx, struct expr *);
657 static int classic_gcse (void);
658 static int one_classic_gcse_pass (int);
659 static void invalidate_nonnull_info (rtx, rtx, void *);
660 static int delete_null_pointer_checks_1 (unsigned int *, sbitmap *, sbitmap *,
661 struct null_pointer_info *);
662 static rtx process_insert_insn (struct expr *);
663 static int pre_edge_insert (struct edge_list *, struct expr **);
664 static int expr_reaches_here_p_work (struct occr *, struct expr *,
665 basic_block, int, char *);
666 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
667 basic_block, char *);
668 static struct ls_expr * ldst_entry (rtx);
669 static void free_ldst_entry (struct ls_expr *);
670 static void free_ldst_mems (void);
671 static void print_ldst_list (FILE *);
672 static struct ls_expr * find_rtx_in_ldst (rtx);
673 static int enumerate_ldsts (void);
674 static inline struct ls_expr * first_ls_expr (void);
675 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
676 static int simple_mem (rtx);
677 static void invalidate_any_buried_refs (rtx);
678 static void compute_ld_motion_mems (void);
679 static void trim_ld_motion_mems (void);
680 static void update_ld_motion_stores (struct expr *);
681 static void reg_set_info (rtx, rtx, void *);
682 static bool store_ops_ok (rtx, int *);
683 static rtx extract_mentioned_regs (rtx);
684 static rtx extract_mentioned_regs_helper (rtx, rtx);
685 static void find_moveable_store (rtx, int *, int *);
686 static int compute_store_table (void);
687 static bool load_kills_store (rtx, rtx, int);
688 static bool find_loads (rtx, rtx, int);
689 static bool store_killed_in_insn (rtx, rtx, rtx, int);
690 static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
691 static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
692 static void build_store_vectors (void);
693 static void insert_insn_start_bb (rtx, basic_block);
694 static int insert_store (struct ls_expr *, edge);
695 static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
696 static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
697 static void delete_store (struct ls_expr *, basic_block);
698 static void free_store_memory (void);
699 static void store_motion (void);
700 static void free_insn_expr_list_list (rtx *);
701 static void clear_modify_mem_tables (void);
702 static void free_modify_mem_tables (void);
703 static rtx gcse_emit_move_after (rtx, rtx, rtx);
704 static void local_cprop_find_used_regs (rtx *, void *);
705 static bool do_local_cprop (rtx, rtx, int, rtx*);
706 static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
707 static void local_cprop_pass (int);
708 static bool is_too_expensive (const char *);
711 /* Entry point for global common subexpression elimination.
712 F is the first instruction in the function. */
715 gcse_main (rtx f, FILE *file)
718 /* Bytes used at start of pass. */
719 int initial_bytes_used;
720 /* Maximum number of bytes used by a pass. */
722 /* Point to release obstack data from for each pass. */
723 char *gcse_obstack_bottom;
725 /* We do not construct an accurate cfg in functions which call
726 setjmp, so just punt to be safe. */
727 if (current_function_calls_setjmp)
730 /* Assume that we do not need to run jump optimizations after gcse. */
731 run_jump_opt_after_gcse = 0;
733 /* For calling dump_foo fns from gdb. */
734 debug_stderr = stderr;
737 /* Identify the basic block information for this function, including
738 successors and predecessors. */
739 max_gcse_regno = max_reg_num ();
742 dump_flow_info (file);
744 /* Return if there's nothing to do, or it is too expensive. */
745 if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
748 gcc_obstack_init (&gcse_obstack);
752 init_alias_analysis ();
753 /* Record where pseudo-registers are set. This data is kept accurate
754 during each pass. ??? We could also record hard-reg information here
755 [since it's unchanging], however it is currently done during hash table
758 It may be tempting to compute MEM set information here too, but MEM sets
759 will be subject to code motion one day and thus we need to compute
760 information about memory sets when we build the hash tables. */
762 alloc_reg_set_mem (max_gcse_regno);
766 initial_bytes_used = bytes_used;
768 gcse_obstack_bottom = gcse_alloc (1);
770 while (changed && pass < MAX_GCSE_PASSES)
774 fprintf (file, "GCSE pass %d\n\n", pass + 1);
776 /* Initialize bytes_used to the space for the pred/succ lists,
777 and the reg_set_table data. */
778 bytes_used = initial_bytes_used;
780 /* Each pass may create new registers, so recalculate each time. */
781 max_gcse_regno = max_reg_num ();
785 /* Don't allow constant propagation to modify jumps
787 changed = one_cprop_pass (pass + 1, 0, 0);
790 changed |= one_classic_gcse_pass (pass + 1);
793 changed |= one_pre_gcse_pass (pass + 1);
794 /* We may have just created new basic blocks. Release and
795 recompute various things which are sized on the number of
799 free_modify_mem_tables ();
800 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
801 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
804 alloc_reg_set_mem (max_reg_num ());
806 run_jump_opt_after_gcse = 1;
809 if (max_pass_bytes < bytes_used)
810 max_pass_bytes = bytes_used;
812 /* Free up memory, then reallocate for code hoisting. We can
813 not re-use the existing allocated memory because the tables
814 will not have info for the insns or registers created by
815 partial redundancy elimination. */
818 /* It does not make sense to run code hoisting unless we optimizing
819 for code size -- it rarely makes programs faster, and can make
820 them bigger if we did partial redundancy elimination (when optimizing
821 for space, we use a classic gcse algorithm instead of partial
822 redundancy algorithms). */
825 max_gcse_regno = max_reg_num ();
827 changed |= one_code_hoisting_pass ();
830 if (max_pass_bytes < bytes_used)
831 max_pass_bytes = bytes_used;
836 fprintf (file, "\n");
840 obstack_free (&gcse_obstack, gcse_obstack_bottom);
844 /* Do one last pass of copy propagation, including cprop into
845 conditional jumps. */
847 max_gcse_regno = max_reg_num ();
849 /* This time, go ahead and allow cprop to alter jumps. */
850 one_cprop_pass (pass + 1, 1, 0);
855 fprintf (file, "GCSE of %s: %d basic blocks, ",
856 current_function_name, n_basic_blocks);
857 fprintf (file, "%d pass%s, %d bytes\n\n",
858 pass, pass > 1 ? "es" : "", max_pass_bytes);
861 obstack_free (&gcse_obstack, NULL);
863 /* We are finished with alias. */
864 end_alias_analysis ();
865 allocate_reg_info (max_reg_num (), FALSE, FALSE);
867 if (!optimize_size && flag_gcse_sm)
870 /* Record where pseudo-registers are set. */
871 return run_jump_opt_after_gcse;
874 /* Misc. utilities. */
876 /* Nonzero for each mode that supports (set (reg) (reg)).
877 This is trivially true for integer and floating point values.
878 It may or may not be true for condition codes. */
879 static char can_copy[(int) NUM_MACHINE_MODES];
881 /* Compute which modes support reg/reg copy operations. */
884 compute_can_copy (void)
887 #ifndef AVOID_CCMODE_COPIES
890 memset (can_copy, 0, NUM_MACHINE_MODES);
893 for (i = 0; i < NUM_MACHINE_MODES; i++)
894 if (GET_MODE_CLASS (i) == MODE_CC)
896 #ifdef AVOID_CCMODE_COPIES
899 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
900 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
901 if (recog (PATTERN (insn), insn, NULL) >= 0)
911 /* Returns whether the mode supports reg/reg copy operations. */
914 can_copy_p (enum machine_mode mode)
916 static bool can_copy_init_p = false;
918 if (! can_copy_init_p)
921 can_copy_init_p = true;
924 return can_copy[mode] != 0;
927 /* Cover function to xmalloc to record bytes allocated. */
930 gmalloc (size_t size)
933 return xmalloc (size);
936 /* Cover function to xcalloc to record bytes allocated. */
939 gcalloc (size_t nelem, size_t elsize)
941 bytes_used += nelem * elsize;
942 return xcalloc (nelem, elsize);
945 /* Cover function to xrealloc.
946 We don't record the additional size since we don't know it.
947 It won't affect memory usage stats much anyway. */
950 grealloc (void *ptr, size_t size)
952 return xrealloc (ptr, size);
955 /* Cover function to obstack_alloc. */
958 gcse_alloc (unsigned long size)
961 return obstack_alloc (&gcse_obstack, size);
964 /* Allocate memory for the cuid mapping array,
965 and reg/memory set tracking tables.
967 This is called at the start of each pass. */
970 alloc_gcse_mem (rtx f)
975 /* Find the largest UID and create a mapping from UIDs to CUIDs.
976 CUIDs are like UIDs except they increase monotonically, have no gaps,
977 and only apply to real insns. */
979 max_uid = get_max_uid ();
980 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
981 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
984 uid_cuid[INSN_UID (insn)] = i++;
986 uid_cuid[INSN_UID (insn)] = i;
989 /* Create a table mapping cuids to insns. */
992 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
993 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
995 CUID_INSN (i++) = insn;
997 /* Allocate vars to track sets of regs. */
998 reg_set_bitmap = BITMAP_XMALLOC ();
1000 /* Allocate vars to track sets of regs, memory per block. */
1001 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
1002 /* Allocate array to keep a list of insns which modify memory in each
1004 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
1005 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
1006 modify_mem_list_set = BITMAP_XMALLOC ();
1007 canon_modify_mem_list_set = BITMAP_XMALLOC ();
1010 /* Free memory allocated by alloc_gcse_mem. */
1013 free_gcse_mem (void)
1018 BITMAP_XFREE (reg_set_bitmap);
1020 sbitmap_vector_free (reg_set_in_block);
1021 free_modify_mem_tables ();
1022 BITMAP_XFREE (modify_mem_list_set);
1023 BITMAP_XFREE (canon_modify_mem_list_set);
1026 /* Many of the global optimization algorithms work by solving dataflow
1027 equations for various expressions. Initially, some local value is
1028 computed for each expression in each block. Then, the values across the
1029 various blocks are combined (by following flow graph edges) to arrive at
1030 global values. Conceptually, each set of equations is independent. We
1031 may therefore solve all the equations in parallel, solve them one at a
1032 time, or pick any intermediate approach.
1034 When you're going to need N two-dimensional bitmaps, each X (say, the
1035 number of blocks) by Y (say, the number of expressions), call this
1036 function. It's not important what X and Y represent; only that Y
1037 correspond to the things that can be done in parallel. This function will
1038 return an appropriate chunking factor C; you should solve C sets of
1039 equations in parallel. By going through this function, we can easily
1040 trade space against time; by solving fewer equations in parallel we use
1044 get_bitmap_width (int n, int x, int y)
1046 /* It's not really worth figuring out *exactly* how much memory will
1047 be used by a particular choice. The important thing is to get
1048 something approximately right. */
1049 size_t max_bitmap_memory = 10 * 1024 * 1024;
1051 /* The number of bytes we'd use for a single column of minimum
1053 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
1055 /* Often, it's reasonable just to solve all the equations in
1057 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
1060 /* Otherwise, pick the largest width we can, without going over the
1062 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
1066 /* Compute the local properties of each recorded expression.
1068 Local properties are those that are defined by the block, irrespective of
1071 An expression is transparent in a block if its operands are not modified
1074 An expression is computed (locally available) in a block if it is computed
1075 at least once and expression would contain the same value if the
1076 computation was moved to the end of the block.
1078 An expression is locally anticipatable in a block if it is computed at
1079 least once and expression would contain the same value if the computation
1080 was moved to the beginning of the block.
1082 We call this routine for cprop, pre and code hoisting. They all compute
1083 basically the same information and thus can easily share this code.
1085 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1086 properties. If NULL, then it is not necessary to compute or record that
1087 particular property.
1089 TABLE controls which hash table to look at. If it is set hash table,
1090 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1094 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc, struct hash_table *table)
1098 /* Initialize any bitmaps that were passed in. */
1102 sbitmap_vector_zero (transp, last_basic_block);
1104 sbitmap_vector_ones (transp, last_basic_block);
1108 sbitmap_vector_zero (comp, last_basic_block);
1110 sbitmap_vector_zero (antloc, last_basic_block);
1112 for (i = 0; i < table->size; i++)
1116 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1118 int indx = expr->bitmap_index;
1121 /* The expression is transparent in this block if it is not killed.
1122 We start by assuming all are transparent [none are killed], and
1123 then reset the bits for those that are. */
1125 compute_transp (expr->expr, indx, transp, table->set_p);
1127 /* The occurrences recorded in antic_occr are exactly those that
1128 we want to set to nonzero in ANTLOC. */
1130 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1132 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1134 /* While we're scanning the table, this is a good place to
1136 occr->deleted_p = 0;
1139 /* The occurrences recorded in avail_occr are exactly those that
1140 we want to set to nonzero in COMP. */
1142 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1144 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1146 /* While we're scanning the table, this is a good place to
1151 /* While we're scanning the table, this is a good place to
1153 expr->reaching_reg = 0;
1158 /* Register set information.
1160 `reg_set_table' records where each register is set or otherwise
1163 static struct obstack reg_set_obstack;
1166 alloc_reg_set_mem (int n_regs)
1168 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1169 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
1171 gcc_obstack_init (®_set_obstack);
1175 free_reg_set_mem (void)
1177 free (reg_set_table);
1178 obstack_free (®_set_obstack, NULL);
1181 /* An OLD_INSN that used to set REGNO was replaced by NEW_INSN.
1182 Update the corresponding `reg_set_table' entry accordingly.
1183 We assume that NEW_INSN is not already recorded in reg_set_table[regno]. */
1186 replace_one_set (int regno, rtx old_insn, rtx new_insn)
1188 struct reg_set *reg_info;
1189 if (regno >= reg_set_table_size)
1191 for (reg_info = reg_set_table[regno]; reg_info; reg_info = reg_info->next)
1192 if (reg_info->insn == old_insn)
1194 reg_info->insn = new_insn;
1199 /* Record REGNO in the reg_set table. */
1202 record_one_set (int regno, rtx insn)
1204 /* Allocate a new reg_set element and link it onto the list. */
1205 struct reg_set *new_reg_info;
1207 /* If the table isn't big enough, enlarge it. */
1208 if (regno >= reg_set_table_size)
1210 int new_size = regno + REG_SET_TABLE_SLOP;
1212 reg_set_table = grealloc (reg_set_table,
1213 new_size * sizeof (struct reg_set *));
1214 memset (reg_set_table + reg_set_table_size, 0,
1215 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1216 reg_set_table_size = new_size;
1219 new_reg_info = obstack_alloc (®_set_obstack, sizeof (struct reg_set));
1220 bytes_used += sizeof (struct reg_set);
1221 new_reg_info->insn = insn;
1222 new_reg_info->next = reg_set_table[regno];
1223 reg_set_table[regno] = new_reg_info;
1226 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1227 an insn. The DATA is really the instruction in which the SET is
1231 record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
1233 rtx record_set_insn = (rtx) data;
1235 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1236 record_one_set (REGNO (dest), record_set_insn);
1239 /* Scan the function and record each set of each pseudo-register.
1241 This is called once, at the start of the gcse pass. See the comments for
1242 `reg_set_table' for further documentation. */
1245 compute_sets (rtx f)
1249 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1251 note_stores (PATTERN (insn), record_set_info, insn);
1254 /* Hash table support. */
1256 struct reg_avail_info
1258 basic_block last_bb;
1263 static struct reg_avail_info *reg_avail_info;
1264 static basic_block current_bb;
1267 /* See whether X, the source of a set, is something we want to consider for
1270 static GTY(()) rtx test_insn;
1272 want_to_gcse_p (rtx x)
1274 int num_clobbers = 0;
1277 switch (GET_CODE (x))
1285 case CONSTANT_P_RTX:
1292 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1293 if (general_operand (x, GET_MODE (x)))
1295 else if (GET_MODE (x) == VOIDmode)
1298 /* Otherwise, check if we can make a valid insn from it. First initialize
1299 our test insn if we haven't already. */
1303 = make_insn_raw (gen_rtx_SET (VOIDmode,
1304 gen_rtx_REG (word_mode,
1305 FIRST_PSEUDO_REGISTER * 2),
1307 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1310 /* Now make an insn like the one we would make when GCSE'ing and see if
1312 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1313 SET_SRC (PATTERN (test_insn)) = x;
1314 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1315 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1318 /* Return nonzero if the operands of expression X are unchanged from the
1319 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1320 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1323 oprs_unchanged_p (rtx x, rtx insn, int avail_p)
1332 code = GET_CODE (x);
1337 struct reg_avail_info *info = ®_avail_info[REGNO (x)];
1339 if (info->last_bb != current_bb)
1342 return info->last_set < INSN_CUID (insn);
1344 return info->first_set >= INSN_CUID (insn);
1348 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1352 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1378 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1382 /* If we are about to do the last recursive call needed at this
1383 level, change it into iteration. This function is called enough
1386 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1388 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1391 else if (fmt[i] == 'E')
1392 for (j = 0; j < XVECLEN (x, i); j++)
1393 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1400 /* Used for communication between mems_conflict_for_gcse_p and
1401 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1402 conflict between two memory references. */
1403 static int gcse_mems_conflict_p;
1405 /* Used for communication between mems_conflict_for_gcse_p and
1406 load_killed_in_block_p. A memory reference for a load instruction,
1407 mems_conflict_for_gcse_p will see if a memory store conflicts with
1408 this memory load. */
1409 static rtx gcse_mem_operand;
1411 /* DEST is the output of an instruction. If it is a memory reference, and
1412 possibly conflicts with the load found in gcse_mem_operand, then set
1413 gcse_mems_conflict_p to a nonzero value. */
1416 mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1417 void *data ATTRIBUTE_UNUSED)
1419 while (GET_CODE (dest) == SUBREG
1420 || GET_CODE (dest) == ZERO_EXTRACT
1421 || GET_CODE (dest) == SIGN_EXTRACT
1422 || GET_CODE (dest) == STRICT_LOW_PART)
1423 dest = XEXP (dest, 0);
1425 /* If DEST is not a MEM, then it will not conflict with the load. Note
1426 that function calls are assumed to clobber memory, but are handled
1428 if (GET_CODE (dest) != MEM)
1431 /* If we are setting a MEM in our list of specially recognized MEMs,
1432 don't mark as killed this time. */
1434 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1436 if (!find_rtx_in_ldst (dest))
1437 gcse_mems_conflict_p = 1;
1441 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1443 gcse_mems_conflict_p = 1;
1446 /* Return nonzero if the expression in X (a memory reference) is killed
1447 in block BB before or after the insn with the CUID in UID_LIMIT.
1448 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1451 To check the entire block, set UID_LIMIT to max_uid + 1 and
1455 load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
1457 rtx list_entry = modify_mem_list[bb->index];
1461 /* Ignore entries in the list that do not apply. */
1463 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1465 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1467 list_entry = XEXP (list_entry, 1);
1471 setter = XEXP (list_entry, 0);
1473 /* If SETTER is a call everything is clobbered. Note that calls
1474 to pure functions are never put on the list, so we need not
1475 worry about them. */
1476 if (GET_CODE (setter) == CALL_INSN)
1479 /* SETTER must be an INSN of some kind that sets memory. Call
1480 note_stores to examine each hunk of memory that is modified.
1482 The note_stores interface is pretty limited, so we have to
1483 communicate via global variables. Yuk. */
1484 gcse_mem_operand = x;
1485 gcse_mems_conflict_p = 0;
1486 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1487 if (gcse_mems_conflict_p)
1489 list_entry = XEXP (list_entry, 1);
1494 /* Return nonzero if the operands of expression X are unchanged from
1495 the start of INSN's basic block up to but not including INSN. */
1498 oprs_anticipatable_p (rtx x, rtx insn)
1500 return oprs_unchanged_p (x, insn, 0);
1503 /* Return nonzero if the operands of expression X are unchanged from
1504 INSN to the end of INSN's basic block. */
1507 oprs_available_p (rtx x, rtx insn)
1509 return oprs_unchanged_p (x, insn, 1);
1512 /* Hash expression X.
1514 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1515 indicating if a volatile operand is found or if the expression contains
1516 something we don't want to insert in the table. HASH_TABLE_SIZE is
1517 the current size of the hash table to be probed.
1519 ??? One might want to merge this with canon_hash. Later. */
1522 hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
1523 int hash_table_size)
1527 *do_not_record_p = 0;
1529 hash = hash_expr_1 (x, mode, do_not_record_p);
1530 return hash % hash_table_size;
1533 /* Hash a string. Just add its bytes up. */
1535 static inline unsigned
1536 hash_string_1 (const char *ps)
1539 const unsigned char *p = (const unsigned char *) ps;
1548 /* Subroutine of hash_expr to do the actual work. */
1551 hash_expr_1 (rtx x, enum machine_mode mode, int *do_not_record_p)
1558 /* Used to turn recursion into iteration. We can't rely on GCC's
1559 tail-recursion elimination since we need to keep accumulating values
1566 code = GET_CODE (x);
1570 hash += ((unsigned int) REG << 7) + REGNO (x);
1574 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1575 + (unsigned int) INTVAL (x));
1579 /* This is like the general case, except that it only counts
1580 the integers representing the constant. */
1581 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1582 if (GET_MODE (x) != VOIDmode)
1583 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1584 hash += (unsigned int) XWINT (x, i);
1586 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1587 + (unsigned int) CONST_DOUBLE_HIGH (x));
1595 units = CONST_VECTOR_NUNITS (x);
1597 for (i = 0; i < units; ++i)
1599 elt = CONST_VECTOR_ELT (x, i);
1600 hash += hash_expr_1 (elt, GET_MODE (elt), do_not_record_p);
1606 /* Assume there is only one rtx object for any given label. */
1608 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1609 differences and differences between each stage's debugging dumps. */
1610 hash += (((unsigned int) LABEL_REF << 7)
1611 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1616 /* Don't hash on the symbol's address to avoid bootstrap differences.
1617 Different hash values may cause expressions to be recorded in
1618 different orders and thus different registers to be used in the
1619 final assembler. This also avoids differences in the dump files
1620 between various stages. */
1622 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1625 h += (h << 7) + *p++; /* ??? revisit */
1627 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1632 if (MEM_VOLATILE_P (x))
1634 *do_not_record_p = 1;
1638 hash += (unsigned int) MEM;
1639 /* We used alias set for hashing, but this is not good, since the alias
1640 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1641 causing the profiles to fail to match. */
1652 case UNSPEC_VOLATILE:
1653 *do_not_record_p = 1;
1657 if (MEM_VOLATILE_P (x))
1659 *do_not_record_p = 1;
1664 /* We don't want to take the filename and line into account. */
1665 hash += (unsigned) code + (unsigned) GET_MODE (x)
1666 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1667 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1668 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1670 if (ASM_OPERANDS_INPUT_LENGTH (x))
1672 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1674 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1675 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1677 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1681 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1682 x = ASM_OPERANDS_INPUT (x, 0);
1683 mode = GET_MODE (x);
1693 hash += (unsigned) code + (unsigned) GET_MODE (x);
1694 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1698 /* If we are about to do the last recursive call
1699 needed at this level, change it into iteration.
1700 This function is called enough to be worth it. */
1707 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1708 if (*do_not_record_p)
1712 else if (fmt[i] == 'E')
1713 for (j = 0; j < XVECLEN (x, i); j++)
1715 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1716 if (*do_not_record_p)
1720 else if (fmt[i] == 's')
1721 hash += hash_string_1 (XSTR (x, i));
1722 else if (fmt[i] == 'i')
1723 hash += (unsigned int) XINT (x, i);
1731 /* Hash a set of register REGNO.
1733 Sets are hashed on the register that is set. This simplifies the PRE copy
1736 ??? May need to make things more elaborate. Later, as necessary. */
1739 hash_set (int regno, int hash_table_size)
1744 return hash % hash_table_size;
1747 /* Return nonzero if exp1 is equivalent to exp2.
1748 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1751 expr_equiv_p (rtx x, rtx y)
1760 if (x == 0 || y == 0)
1763 code = GET_CODE (x);
1764 if (code != GET_CODE (y))
1767 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1768 if (GET_MODE (x) != GET_MODE (y))
1779 return XEXP (x, 0) == XEXP (y, 0);
1782 return XSTR (x, 0) == XSTR (y, 0);
1785 return REGNO (x) == REGNO (y);
1788 /* Can't merge two expressions in different alias sets, since we can
1789 decide that the expression is transparent in a block when it isn't,
1790 due to it being set with the different alias set. */
1791 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1794 /* A volatile mem should not be considered equivalent to any other. */
1795 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1799 /* For commutative operations, check both orders. */
1807 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1808 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1809 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1810 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1813 /* We don't use the generic code below because we want to
1814 disregard filename and line numbers. */
1816 /* A volatile asm isn't equivalent to any other. */
1817 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1820 if (GET_MODE (x) != GET_MODE (y)
1821 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1822 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1823 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1824 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1825 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1828 if (ASM_OPERANDS_INPUT_LENGTH (x))
1830 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1831 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1832 ASM_OPERANDS_INPUT (y, i))
1833 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1834 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1844 /* Compare the elements. If any pair of corresponding elements
1845 fail to match, return 0 for the whole thing. */
1847 fmt = GET_RTX_FORMAT (code);
1848 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1853 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1858 if (XVECLEN (x, i) != XVECLEN (y, i))
1860 for (j = 0; j < XVECLEN (x, i); j++)
1861 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1866 if (strcmp (XSTR (x, i), XSTR (y, i)))
1871 if (XINT (x, i) != XINT (y, i))
1876 if (XWINT (x, i) != XWINT (y, i))
1891 /* Insert expression X in INSN in the hash TABLE.
1892 If it is already present, record it as the last occurrence in INSN's
1895 MODE is the mode of the value X is being stored into.
1896 It is only used if X is a CONST_INT.
1898 ANTIC_P is nonzero if X is an anticipatable expression.
1899 AVAIL_P is nonzero if X is an available expression. */
1902 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1903 int avail_p, struct hash_table *table)
1905 int found, do_not_record_p;
1907 struct expr *cur_expr, *last_expr = NULL;
1908 struct occr *antic_occr, *avail_occr;
1909 struct occr *last_occr = NULL;
1911 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1913 /* Do not insert expression in table if it contains volatile operands,
1914 or if hash_expr determines the expression is something we don't want
1915 to or can't handle. */
1916 if (do_not_record_p)
1919 cur_expr = table->table[hash];
1922 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1924 /* If the expression isn't found, save a pointer to the end of
1926 last_expr = cur_expr;
1927 cur_expr = cur_expr->next_same_hash;
1932 cur_expr = gcse_alloc (sizeof (struct expr));
1933 bytes_used += sizeof (struct expr);
1934 if (table->table[hash] == NULL)
1935 /* This is the first pattern that hashed to this index. */
1936 table->table[hash] = cur_expr;
1938 /* Add EXPR to end of this hash chain. */
1939 last_expr->next_same_hash = cur_expr;
1941 /* Set the fields of the expr element. */
1943 cur_expr->bitmap_index = table->n_elems++;
1944 cur_expr->next_same_hash = NULL;
1945 cur_expr->antic_occr = NULL;
1946 cur_expr->avail_occr = NULL;
1949 /* Now record the occurrence(s). */
1952 antic_occr = cur_expr->antic_occr;
1954 /* Search for another occurrence in the same basic block. */
1955 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1957 /* If an occurrence isn't found, save a pointer to the end of
1959 last_occr = antic_occr;
1960 antic_occr = antic_occr->next;
1964 /* Found another instance of the expression in the same basic block.
1965 Prefer the currently recorded one. We want the first one in the
1966 block and the block is scanned from start to end. */
1967 ; /* nothing to do */
1970 /* First occurrence of this expression in this basic block. */
1971 antic_occr = gcse_alloc (sizeof (struct occr));
1972 bytes_used += sizeof (struct occr);
1973 /* First occurrence of this expression in any block? */
1974 if (cur_expr->antic_occr == NULL)
1975 cur_expr->antic_occr = antic_occr;
1977 last_occr->next = antic_occr;
1979 antic_occr->insn = insn;
1980 antic_occr->next = NULL;
1986 avail_occr = cur_expr->avail_occr;
1988 /* Search for another occurrence in the same basic block. */
1989 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1991 /* If an occurrence isn't found, save a pointer to the end of
1993 last_occr = avail_occr;
1994 avail_occr = avail_occr->next;
1998 /* Found another instance of the expression in the same basic block.
1999 Prefer this occurrence to the currently recorded one. We want
2000 the last one in the block and the block is scanned from start
2002 avail_occr->insn = insn;
2005 /* First occurrence of this expression in this basic block. */
2006 avail_occr = gcse_alloc (sizeof (struct occr));
2007 bytes_used += sizeof (struct occr);
2009 /* First occurrence of this expression in any block? */
2010 if (cur_expr->avail_occr == NULL)
2011 cur_expr->avail_occr = avail_occr;
2013 last_occr->next = avail_occr;
2015 avail_occr->insn = insn;
2016 avail_occr->next = NULL;
2021 /* Insert pattern X in INSN in the hash table.
2022 X is a SET of a reg to either another reg or a constant.
2023 If it is already present, record it as the last occurrence in INSN's
2027 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
2031 struct expr *cur_expr, *last_expr = NULL;
2032 struct occr *cur_occr, *last_occr = NULL;
2034 if (GET_CODE (x) != SET
2035 || GET_CODE (SET_DEST (x)) != REG)
2038 hash = hash_set (REGNO (SET_DEST (x)), table->size);
2040 cur_expr = table->table[hash];
2043 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
2045 /* If the expression isn't found, save a pointer to the end of
2047 last_expr = cur_expr;
2048 cur_expr = cur_expr->next_same_hash;
2053 cur_expr = gcse_alloc (sizeof (struct expr));
2054 bytes_used += sizeof (struct expr);
2055 if (table->table[hash] == NULL)
2056 /* This is the first pattern that hashed to this index. */
2057 table->table[hash] = cur_expr;
2059 /* Add EXPR to end of this hash chain. */
2060 last_expr->next_same_hash = cur_expr;
2062 /* Set the fields of the expr element.
2063 We must copy X because it can be modified when copy propagation is
2064 performed on its operands. */
2065 cur_expr->expr = copy_rtx (x);
2066 cur_expr->bitmap_index = table->n_elems++;
2067 cur_expr->next_same_hash = NULL;
2068 cur_expr->antic_occr = NULL;
2069 cur_expr->avail_occr = NULL;
2072 /* Now record the occurrence. */
2073 cur_occr = cur_expr->avail_occr;
2075 /* Search for another occurrence in the same basic block. */
2076 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
2078 /* If an occurrence isn't found, save a pointer to the end of
2080 last_occr = cur_occr;
2081 cur_occr = cur_occr->next;
2085 /* Found another instance of the expression in the same basic block.
2086 Prefer this occurrence to the currently recorded one. We want the
2087 last one in the block and the block is scanned from start to end. */
2088 cur_occr->insn = insn;
2091 /* First occurrence of this expression in this basic block. */
2092 cur_occr = gcse_alloc (sizeof (struct occr));
2093 bytes_used += sizeof (struct occr);
2095 /* First occurrence of this expression in any block? */
2096 if (cur_expr->avail_occr == NULL)
2097 cur_expr->avail_occr = cur_occr;
2099 last_occr->next = cur_occr;
2101 cur_occr->insn = insn;
2102 cur_occr->next = NULL;
2106 /* Determine whether the rtx X should be treated as a constant for
2107 the purposes of GCSE's constant propagation. */
2110 gcse_constant_p (rtx x)
2112 /* Consider a COMPARE of two integers constant. */
2113 if (GET_CODE (x) == COMPARE
2114 && GET_CODE (XEXP (x, 0)) == CONST_INT
2115 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2119 /* Consider a COMPARE of the same registers is a constant
2120 if they are not floating point registers. */
2121 if (GET_CODE(x) == COMPARE
2122 && GET_CODE (XEXP (x, 0)) == REG
2123 && GET_CODE (XEXP (x, 1)) == REG
2124 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
2125 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
2126 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
2129 if (GET_CODE (x) == CONSTANT_P_RTX)
2132 return CONSTANT_P (x);
2135 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2139 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
2141 rtx src = SET_SRC (pat);
2142 rtx dest = SET_DEST (pat);
2145 if (GET_CODE (src) == CALL)
2146 hash_scan_call (src, insn, table);
2148 else if (GET_CODE (dest) == REG)
2150 unsigned int regno = REGNO (dest);
2153 /* If this is a single set and we are doing constant propagation,
2154 see if a REG_NOTE shows this equivalent to a constant. */
2155 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
2156 && gcse_constant_p (XEXP (note, 0)))
2157 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
2159 /* Only record sets of pseudo-regs in the hash table. */
2161 && regno >= FIRST_PSEUDO_REGISTER
2162 /* Don't GCSE something if we can't do a reg/reg copy. */
2163 && can_copy_p (GET_MODE (dest))
2164 /* GCSE commonly inserts instruction after the insn. We can't
2165 do that easily for EH_REGION notes so disable GCSE on these
2167 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2168 /* Is SET_SRC something we want to gcse? */
2169 && want_to_gcse_p (src)
2170 /* Don't CSE a nop. */
2171 && ! set_noop_p (pat)
2172 /* Don't GCSE if it has attached REG_EQUIV note.
2173 At this point this only function parameters should have
2174 REG_EQUIV notes and if the argument slot is used somewhere
2175 explicitly, it means address of parameter has been taken,
2176 so we should not extend the lifetime of the pseudo. */
2177 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2178 || GET_CODE (XEXP (note, 0)) != MEM))
2180 /* An expression is not anticipatable if its operands are
2181 modified before this insn or if this is not the only SET in
2183 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
2184 /* An expression is not available if its operands are
2185 subsequently modified, including this insn. It's also not
2186 available if this is a branch, because we can't insert
2187 a set after the branch. */
2188 int avail_p = (oprs_available_p (src, insn)
2189 && ! JUMP_P (insn));
2191 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
2194 /* Record sets for constant/copy propagation. */
2195 else if (table->set_p
2196 && regno >= FIRST_PSEUDO_REGISTER
2197 && ((GET_CODE (src) == REG
2198 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2199 && can_copy_p (GET_MODE (dest))
2200 && REGNO (src) != regno)
2201 || gcse_constant_p (src))
2202 /* A copy is not available if its src or dest is subsequently
2203 modified. Here we want to search from INSN+1 on, but
2204 oprs_available_p searches from INSN on. */
2205 && (insn == BLOCK_END (BLOCK_NUM (insn))
2206 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
2207 && oprs_available_p (pat, tmp))))
2208 insert_set_in_table (pat, insn, table);
2210 /* In case of store we want to consider the memory value as available in
2211 the REG stored in that memory. This makes it possible to remove
2212 redundant loads from due to stores to the same location. */
2213 else if (flag_gcse_las && GET_CODE (src) == REG && GET_CODE (dest) == MEM)
2215 unsigned int regno = REGNO (src);
2217 /* Do not do this for constant/copy propagation. */
2219 /* Only record sets of pseudo-regs in the hash table. */
2220 && regno >= FIRST_PSEUDO_REGISTER
2221 /* Don't GCSE something if we can't do a reg/reg copy. */
2222 && can_copy_p (GET_MODE (src))
2223 /* GCSE commonly inserts instruction after the insn. We can't
2224 do that easily for EH_REGION notes so disable GCSE on these
2226 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2227 /* Is SET_DEST something we want to gcse? */
2228 && want_to_gcse_p (dest)
2229 /* Don't CSE a nop. */
2230 && ! set_noop_p (pat)
2231 /* Don't GCSE if it has attached REG_EQUIV note.
2232 At this point this only function parameters should have
2233 REG_EQUIV notes and if the argument slot is used somewhere
2234 explicitly, it means address of parameter has been taken,
2235 so we should not extend the lifetime of the pseudo. */
2236 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
2237 || GET_CODE (XEXP (note, 0)) != MEM))
2239 /* Stores are never anticipatable. */
2241 /* An expression is not available if its operands are
2242 subsequently modified, including this insn. It's also not
2243 available if this is a branch, because we can't insert
2244 a set after the branch. */
2245 int avail_p = oprs_available_p (dest, insn)
2248 /* Record the memory expression (DEST) in the hash table. */
2249 insert_expr_in_table (dest, GET_MODE (dest), insn,
2250 antic_p, avail_p, table);
2256 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2257 struct hash_table *table ATTRIBUTE_UNUSED)
2259 /* Currently nothing to do. */
2263 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
2264 struct hash_table *table ATTRIBUTE_UNUSED)
2266 /* Currently nothing to do. */
2269 /* Process INSN and add hash table entries as appropriate.
2271 Only available expressions that set a single pseudo-reg are recorded.
2273 Single sets in a PARALLEL could be handled, but it's an extra complication
2274 that isn't dealt with right now. The trick is handling the CLOBBERs that
2275 are also in the PARALLEL. Later.
2277 If SET_P is nonzero, this is for the assignment hash table,
2278 otherwise it is for the expression hash table.
2279 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2280 not record any expressions. */
2283 hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
2285 rtx pat = PATTERN (insn);
2288 if (in_libcall_block)
2291 /* Pick out the sets of INSN and for other forms of instructions record
2292 what's been modified. */
2294 if (GET_CODE (pat) == SET)
2295 hash_scan_set (pat, insn, table);
2296 else if (GET_CODE (pat) == PARALLEL)
2297 for (i = 0; i < XVECLEN (pat, 0); i++)
2299 rtx x = XVECEXP (pat, 0, i);
2301 if (GET_CODE (x) == SET)
2302 hash_scan_set (x, insn, table);
2303 else if (GET_CODE (x) == CLOBBER)
2304 hash_scan_clobber (x, insn, table);
2305 else if (GET_CODE (x) == CALL)
2306 hash_scan_call (x, insn, table);
2309 else if (GET_CODE (pat) == CLOBBER)
2310 hash_scan_clobber (pat, insn, table);
2311 else if (GET_CODE (pat) == CALL)
2312 hash_scan_call (pat, insn, table);
2316 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
2319 /* Flattened out table, so it's printed in proper order. */
2320 struct expr **flat_table;
2321 unsigned int *hash_val;
2324 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
2325 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
2327 for (i = 0; i < (int) table->size; i++)
2328 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
2330 flat_table[expr->bitmap_index] = expr;
2331 hash_val[expr->bitmap_index] = i;
2334 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2335 name, table->size, table->n_elems);
2337 for (i = 0; i < (int) table->n_elems; i++)
2338 if (flat_table[i] != 0)
2340 expr = flat_table[i];
2341 fprintf (file, "Index %d (hash value %d)\n ",
2342 expr->bitmap_index, hash_val[i]);
2343 print_rtl (file, expr->expr);
2344 fprintf (file, "\n");
2347 fprintf (file, "\n");
2353 /* Record register first/last/block set information for REGNO in INSN.
2355 first_set records the first place in the block where the register
2356 is set and is used to compute "anticipatability".
2358 last_set records the last place in the block where the register
2359 is set and is used to compute "availability".
2361 last_bb records the block for which first_set and last_set are
2362 valid, as a quick test to invalidate them.
2364 reg_set_in_block records whether the register is set in the block
2365 and is used to compute "transparency". */
2368 record_last_reg_set_info (rtx insn, int regno)
2370 struct reg_avail_info *info = ®_avail_info[regno];
2371 int cuid = INSN_CUID (insn);
2373 info->last_set = cuid;
2374 if (info->last_bb != current_bb)
2376 info->last_bb = current_bb;
2377 info->first_set = cuid;
2378 SET_BIT (reg_set_in_block[current_bb->index], regno);
2383 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2384 Note we store a pair of elements in the list, so they have to be
2385 taken off pairwise. */
2388 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
2391 rtx dest_addr, insn;
2394 while (GET_CODE (dest) == SUBREG
2395 || GET_CODE (dest) == ZERO_EXTRACT
2396 || GET_CODE (dest) == SIGN_EXTRACT
2397 || GET_CODE (dest) == STRICT_LOW_PART)
2398 dest = XEXP (dest, 0);
2400 /* If DEST is not a MEM, then it will not conflict with a load. Note
2401 that function calls are assumed to clobber memory, but are handled
2404 if (GET_CODE (dest) != MEM)
2407 dest_addr = get_addr (XEXP (dest, 0));
2408 dest_addr = canon_rtx (dest_addr);
2409 insn = (rtx) v_insn;
2410 bb = BLOCK_NUM (insn);
2412 canon_modify_mem_list[bb] =
2413 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
2414 canon_modify_mem_list[bb] =
2415 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
2416 bitmap_set_bit (canon_modify_mem_list_set, bb);
2419 /* Record memory modification information for INSN. We do not actually care
2420 about the memory location(s) that are set, or even how they are set (consider
2421 a CALL_INSN). We merely need to record which insns modify memory. */
2424 record_last_mem_set_info (rtx insn)
2426 int bb = BLOCK_NUM (insn);
2428 /* load_killed_in_block_p will handle the case of calls clobbering
2430 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
2431 bitmap_set_bit (modify_mem_list_set, bb);
2433 if (GET_CODE (insn) == CALL_INSN)
2435 /* Note that traversals of this loop (other than for free-ing)
2436 will break after encountering a CALL_INSN. So, there's no
2437 need to insert a pair of items, as canon_list_insert does. */
2438 canon_modify_mem_list[bb] =
2439 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
2440 bitmap_set_bit (canon_modify_mem_list_set, bb);
2443 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
2446 /* Called from compute_hash_table via note_stores to handle one
2447 SET or CLOBBER in an insn. DATA is really the instruction in which
2448 the SET is taking place. */
2451 record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
2453 rtx last_set_insn = (rtx) data;
2455 if (GET_CODE (dest) == SUBREG)
2456 dest = SUBREG_REG (dest);
2458 if (GET_CODE (dest) == REG)
2459 record_last_reg_set_info (last_set_insn, REGNO (dest));
2460 else if (GET_CODE (dest) == MEM
2461 /* Ignore pushes, they clobber nothing. */
2462 && ! push_operand (dest, GET_MODE (dest)))
2463 record_last_mem_set_info (last_set_insn);
2466 /* Top level function to create an expression or assignment hash table.
2468 Expression entries are placed in the hash table if
2469 - they are of the form (set (pseudo-reg) src),
2470 - src is something we want to perform GCSE on,
2471 - none of the operands are subsequently modified in the block
2473 Assignment entries are placed in the hash table if
2474 - they are of the form (set (pseudo-reg) src),
2475 - src is something we want to perform const/copy propagation on,
2476 - none of the operands or target are subsequently modified in the block
2478 Currently src must be a pseudo-reg or a const_int.
2480 TABLE is the table computed. */
2483 compute_hash_table_work (struct hash_table *table)
2487 /* While we compute the hash table we also compute a bit array of which
2488 registers are set in which blocks.
2489 ??? This isn't needed during const/copy propagation, but it's cheap to
2491 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2493 /* re-Cache any INSN_LIST nodes we have allocated. */
2494 clear_modify_mem_tables ();
2495 /* Some working arrays used to track first and last set in each block. */
2496 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2498 for (i = 0; i < max_gcse_regno; ++i)
2499 reg_avail_info[i].last_bb = NULL;
2501 FOR_EACH_BB (current_bb)
2505 int in_libcall_block;
2507 /* First pass over the instructions records information used to
2508 determine when registers and memory are first and last set.
2509 ??? hard-reg reg_set_in_block computation
2510 could be moved to compute_sets since they currently don't change. */
2512 for (insn = current_bb->head;
2513 insn && insn != NEXT_INSN (current_bb->end);
2514 insn = NEXT_INSN (insn))
2516 if (! INSN_P (insn))
2519 if (GET_CODE (insn) == CALL_INSN)
2521 bool clobbers_all = false;
2522 #ifdef NON_SAVING_SETJMP
2523 if (NON_SAVING_SETJMP
2524 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
2525 clobbers_all = true;
2528 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2530 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2531 record_last_reg_set_info (insn, regno);
2536 note_stores (PATTERN (insn), record_last_set_info, insn);
2539 /* Insert implicit sets in the hash table. */
2541 && implicit_sets[current_bb->index] != NULL_RTX)
2542 hash_scan_set (implicit_sets[current_bb->index],
2543 current_bb->head, table);
2545 /* The next pass builds the hash table. */
2547 for (insn = current_bb->head, in_libcall_block = 0;
2548 insn && insn != NEXT_INSN (current_bb->end);
2549 insn = NEXT_INSN (insn))
2552 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2553 in_libcall_block = 1;
2554 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2555 in_libcall_block = 0;
2556 hash_scan_insn (insn, table, in_libcall_block);
2557 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2558 in_libcall_block = 0;
2562 free (reg_avail_info);
2563 reg_avail_info = NULL;
2566 /* Allocate space for the set/expr hash TABLE.
2567 N_INSNS is the number of instructions in the function.
2568 It is used to determine the number of buckets to use.
2569 SET_P determines whether set or expression table will
2573 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2577 table->size = n_insns / 4;
2578 if (table->size < 11)
2581 /* Attempt to maintain efficient use of hash table.
2582 Making it an odd number is simplest for now.
2583 ??? Later take some measurements. */
2585 n = table->size * sizeof (struct expr *);
2586 table->table = gmalloc (n);
2587 table->set_p = set_p;
2590 /* Free things allocated by alloc_hash_table. */
2593 free_hash_table (struct hash_table *table)
2595 free (table->table);
2598 /* Compute the hash TABLE for doing copy/const propagation or
2599 expression hash table. */
2602 compute_hash_table (struct hash_table *table)
2604 /* Initialize count of number of entries in hash table. */
2606 memset (table->table, 0, table->size * sizeof (struct expr *));
2608 compute_hash_table_work (table);
2611 /* Expression tracking support. */
2613 /* Lookup pattern PAT in the expression TABLE.
2614 The result is a pointer to the table entry, or NULL if not found. */
2616 static struct expr *
2617 lookup_expr (rtx pat, struct hash_table *table)
2619 int do_not_record_p;
2620 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2624 if (do_not_record_p)
2627 expr = table->table[hash];
2629 while (expr && ! expr_equiv_p (expr->expr, pat))
2630 expr = expr->next_same_hash;
2635 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2636 table entry, or NULL if not found. */
2638 static struct expr *
2639 lookup_set (unsigned int regno, struct hash_table *table)
2641 unsigned int hash = hash_set (regno, table->size);
2644 expr = table->table[hash];
2646 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2647 expr = expr->next_same_hash;
2652 /* Return the next entry for REGNO in list EXPR. */
2654 static struct expr *
2655 next_set (unsigned int regno, struct expr *expr)
2658 expr = expr->next_same_hash;
2659 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2664 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2665 types may be mixed. */
2668 free_insn_expr_list_list (rtx *listp)
2672 for (list = *listp; list ; list = next)
2674 next = XEXP (list, 1);
2675 if (GET_CODE (list) == EXPR_LIST)
2676 free_EXPR_LIST_node (list);
2678 free_INSN_LIST_node (list);
2684 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2686 clear_modify_mem_tables (void)
2690 EXECUTE_IF_SET_IN_BITMAP
2691 (modify_mem_list_set, 0, i, free_INSN_LIST_list (modify_mem_list + i));
2692 bitmap_clear (modify_mem_list_set);
2694 EXECUTE_IF_SET_IN_BITMAP
2695 (canon_modify_mem_list_set, 0, i,
2696 free_insn_expr_list_list (canon_modify_mem_list + i));
2697 bitmap_clear (canon_modify_mem_list_set);
2700 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2703 free_modify_mem_tables (void)
2705 clear_modify_mem_tables ();
2706 free (modify_mem_list);
2707 free (canon_modify_mem_list);
2708 modify_mem_list = 0;
2709 canon_modify_mem_list = 0;
2712 /* Reset tables used to keep track of what's still available [since the
2713 start of the block]. */
2716 reset_opr_set_tables (void)
2718 /* Maintain a bitmap of which regs have been set since beginning of
2720 CLEAR_REG_SET (reg_set_bitmap);
2722 /* Also keep a record of the last instruction to modify memory.
2723 For now this is very trivial, we only record whether any memory
2724 location has been modified. */
2725 clear_modify_mem_tables ();
2728 /* Return nonzero if the operands of X are not set before INSN in
2729 INSN's basic block. */
2732 oprs_not_set_p (rtx x, rtx insn)
2741 code = GET_CODE (x);
2757 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2758 INSN_CUID (insn), x, 0))
2761 return oprs_not_set_p (XEXP (x, 0), insn);
2764 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2770 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2774 /* If we are about to do the last recursive call
2775 needed at this level, change it into iteration.
2776 This function is called enough to be worth it. */
2778 return oprs_not_set_p (XEXP (x, i), insn);
2780 if (! oprs_not_set_p (XEXP (x, i), insn))
2783 else if (fmt[i] == 'E')
2784 for (j = 0; j < XVECLEN (x, i); j++)
2785 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2792 /* Mark things set by a CALL. */
2795 mark_call (rtx insn)
2797 if (! CONST_OR_PURE_CALL_P (insn))
2798 record_last_mem_set_info (insn);
2801 /* Mark things set by a SET. */
2804 mark_set (rtx pat, rtx insn)
2806 rtx dest = SET_DEST (pat);
2808 while (GET_CODE (dest) == SUBREG
2809 || GET_CODE (dest) == ZERO_EXTRACT
2810 || GET_CODE (dest) == SIGN_EXTRACT
2811 || GET_CODE (dest) == STRICT_LOW_PART)
2812 dest = XEXP (dest, 0);
2814 if (GET_CODE (dest) == REG)
2815 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2816 else if (GET_CODE (dest) == MEM)
2817 record_last_mem_set_info (insn);
2819 if (GET_CODE (SET_SRC (pat)) == CALL)
2823 /* Record things set by a CLOBBER. */
2826 mark_clobber (rtx pat, rtx insn)
2828 rtx clob = XEXP (pat, 0);
2830 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2831 clob = XEXP (clob, 0);
2833 if (GET_CODE (clob) == REG)
2834 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2836 record_last_mem_set_info (insn);
2839 /* Record things set by INSN.
2840 This data is used by oprs_not_set_p. */
2843 mark_oprs_set (rtx insn)
2845 rtx pat = PATTERN (insn);
2848 if (GET_CODE (pat) == SET)
2849 mark_set (pat, insn);
2850 else if (GET_CODE (pat) == PARALLEL)
2851 for (i = 0; i < XVECLEN (pat, 0); i++)
2853 rtx x = XVECEXP (pat, 0, i);
2855 if (GET_CODE (x) == SET)
2857 else if (GET_CODE (x) == CLOBBER)
2858 mark_clobber (x, insn);
2859 else if (GET_CODE (x) == CALL)
2863 else if (GET_CODE (pat) == CLOBBER)
2864 mark_clobber (pat, insn);
2865 else if (GET_CODE (pat) == CALL)
2870 /* Classic GCSE reaching definition support. */
2872 /* Allocate reaching def variables. */
2875 alloc_rd_mem (int n_blocks, int n_insns)
2877 rd_kill = sbitmap_vector_alloc (n_blocks, n_insns);
2878 sbitmap_vector_zero (rd_kill, n_blocks);
2880 rd_gen = sbitmap_vector_alloc (n_blocks, n_insns);
2881 sbitmap_vector_zero (rd_gen, n_blocks);
2883 reaching_defs = sbitmap_vector_alloc (n_blocks, n_insns);
2884 sbitmap_vector_zero (reaching_defs, n_blocks);
2886 rd_out = sbitmap_vector_alloc (n_blocks, n_insns);
2887 sbitmap_vector_zero (rd_out, n_blocks);
2890 /* Free reaching def variables. */
2895 sbitmap_vector_free (rd_kill);
2896 sbitmap_vector_free (rd_gen);
2897 sbitmap_vector_free (reaching_defs);
2898 sbitmap_vector_free (rd_out);
2901 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2904 handle_rd_kill_set (rtx insn, int regno, basic_block bb)
2906 struct reg_set *this_reg;
2908 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2909 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2910 SET_BIT (rd_kill[bb->index], INSN_CUID (this_reg->insn));
2913 /* Compute the set of kill's for reaching definitions. */
2916 compute_kill_rd (void)
2924 For each set bit in `gen' of the block (i.e each insn which
2925 generates a definition in the block)
2926 Call the reg set by the insn corresponding to that bit regx
2927 Look at the linked list starting at reg_set_table[regx]
2928 For each setting of regx in the linked list, which is not in
2930 Set the bit in `kill' corresponding to that insn. */
2932 for (cuid = 0; cuid < max_cuid; cuid++)
2933 if (TEST_BIT (rd_gen[bb->index], cuid))
2935 rtx insn = CUID_INSN (cuid);
2936 rtx pat = PATTERN (insn);
2938 if (GET_CODE (insn) == CALL_INSN)
2940 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2941 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2942 handle_rd_kill_set (insn, regno, bb);
2945 if (GET_CODE (pat) == PARALLEL)
2947 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2949 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2951 if ((code == SET || code == CLOBBER)
2952 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2953 handle_rd_kill_set (insn,
2954 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2958 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2959 /* Each setting of this register outside of this block
2960 must be marked in the set of kills in this block. */
2961 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2965 /* Compute the reaching definitions as in
2966 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2967 Chapter 10. It is the same algorithm as used for computing available
2968 expressions but applied to the gens and kills of reaching definitions. */
2973 int changed, passes;
2977 sbitmap_copy (rd_out[bb->index] /*dst*/, rd_gen[bb->index] /*src*/);
2986 sbitmap_union_of_preds (reaching_defs[bb->index], rd_out, bb->index);
2987 changed |= sbitmap_union_of_diff_cg (rd_out[bb->index], rd_gen[bb->index],
2988 reaching_defs[bb->index], rd_kill[bb->index]);
2994 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
2997 /* Classic GCSE available expression support. */
2999 /* Allocate memory for available expression computation. */
3002 alloc_avail_expr_mem (int n_blocks, int n_exprs)
3004 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
3005 sbitmap_vector_zero (ae_kill, n_blocks);
3007 ae_gen = sbitmap_vector_alloc (n_blocks, n_exprs);
3008 sbitmap_vector_zero (ae_gen, n_blocks);
3010 ae_in = sbitmap_vector_alloc (n_blocks, n_exprs);
3011 sbitmap_vector_zero (ae_in, n_blocks);
3013 ae_out = sbitmap_vector_alloc (n_blocks, n_exprs);
3014 sbitmap_vector_zero (ae_out, n_blocks);
3018 free_avail_expr_mem (void)
3020 sbitmap_vector_free (ae_kill);
3021 sbitmap_vector_free (ae_gen);
3022 sbitmap_vector_free (ae_in);
3023 sbitmap_vector_free (ae_out);
3026 /* Compute the set of available expressions generated in each basic block. */
3029 compute_ae_gen (struct hash_table *expr_hash_table)
3035 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3036 This is all we have to do because an expression is not recorded if it
3037 is not available, and the only expressions we want to work with are the
3038 ones that are recorded. */
3039 for (i = 0; i < expr_hash_table->size; i++)
3040 for (expr = expr_hash_table->table[i]; expr != 0; expr = expr->next_same_hash)
3041 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
3042 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
3045 /* Return nonzero if expression X is killed in BB. */
3048 expr_killed_p (rtx x, basic_block bb)
3057 code = GET_CODE (x);
3061 return TEST_BIT (reg_set_in_block[bb->index], REGNO (x));
3064 if (load_killed_in_block_p (bb, get_max_uid () + 1, x, 0))
3067 return expr_killed_p (XEXP (x, 0), bb);
3085 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3089 /* If we are about to do the last recursive call
3090 needed at this level, change it into iteration.
3091 This function is called enough to be worth it. */
3093 return expr_killed_p (XEXP (x, i), bb);
3094 else if (expr_killed_p (XEXP (x, i), bb))
3097 else if (fmt[i] == 'E')
3098 for (j = 0; j < XVECLEN (x, i); j++)
3099 if (expr_killed_p (XVECEXP (x, i, j), bb))
3106 /* Compute the set of available expressions killed in each basic block. */
3109 compute_ae_kill (sbitmap *ae_gen, sbitmap *ae_kill,
3110 struct hash_table *expr_hash_table)
3117 for (i = 0; i < expr_hash_table->size; i++)
3118 for (expr = expr_hash_table->table[i]; expr; expr = expr->next_same_hash)
3120 /* Skip EXPR if generated in this block. */
3121 if (TEST_BIT (ae_gen[bb->index], expr->bitmap_index))
3124 if (expr_killed_p (expr->expr, bb))
3125 SET_BIT (ae_kill[bb->index], expr->bitmap_index);
3129 /* Actually perform the Classic GCSE optimizations. */
3131 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3133 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3134 as a positive reach. We want to do this when there are two computations
3135 of the expression in the block.
3137 VISITED is a pointer to a working buffer for tracking which BB's have
3138 been visited. It is NULL for the top-level call.
3140 We treat reaching expressions that go through blocks containing the same
3141 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3142 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3143 2 as not reaching. The intent is to improve the probability of finding
3144 only one reaching expression and to reduce register lifetimes by picking
3145 the closest such expression. */
3148 expr_reaches_here_p_work (struct occr *occr, struct expr *expr,
3149 basic_block bb, int check_self_loop, char *visited)
3153 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
3155 basic_block pred_bb = pred->src;
3157 if (visited[pred_bb->index])
3158 /* This predecessor has already been visited. Nothing to do. */
3160 else if (pred_bb == bb)
3162 /* BB loops on itself. */
3164 && TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index)
3165 && BLOCK_NUM (occr->insn) == pred_bb->index)
3168 visited[pred_bb->index] = 1;
3171 /* Ignore this predecessor if it kills the expression. */
3172 else if (TEST_BIT (ae_kill[pred_bb->index], expr->bitmap_index))
3173 visited[pred_bb->index] = 1;
3175 /* Does this predecessor generate this expression? */
3176 else if (TEST_BIT (ae_gen[pred_bb->index], expr->bitmap_index))
3178 /* Is this the occurrence we're looking for?
3179 Note that there's only one generating occurrence per block
3180 so we just need to check the block number. */
3181 if (BLOCK_NUM (occr->insn) == pred_bb->index)
3184 visited[pred_bb->index] = 1;
3187 /* Neither gen nor kill. */
3190 visited[pred_bb->index] = 1;
3191 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
3198 /* All paths have been checked. */
3202 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3203 memory allocated for that function is returned. */
3206 expr_reaches_here_p (struct occr *occr, struct expr *expr, basic_block bb,
3207 int check_self_loop)
3210 char *visited = xcalloc (last_basic_block, 1);
3212 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
3218 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3219 If there is more than one such instruction, return NULL.
3221 Called only by handle_avail_expr. */
3224 computing_insn (struct expr *expr, rtx insn)
3226 basic_block bb = BLOCK_FOR_INSN (insn);
3228 if (expr->avail_occr->next == NULL)
3230 if (BLOCK_FOR_INSN (expr->avail_occr->insn) == bb)
3231 /* The available expression is actually itself
3232 (i.e. a loop in the flow graph) so do nothing. */
3235 /* (FIXME) Case that we found a pattern that was created by
3236 a substitution that took place. */
3237 return expr->avail_occr->insn;
3241 /* Pattern is computed more than once.
3242 Search backwards from this insn to see how many of these
3243 computations actually reach this insn. */
3245 rtx insn_computes_expr = NULL;
3248 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3250 if (BLOCK_FOR_INSN (occr->insn) == bb)
3252 /* The expression is generated in this block.
3253 The only time we care about this is when the expression
3254 is generated later in the block [and thus there's a loop].
3255 We let the normal cse pass handle the other cases. */
3256 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3257 && expr_reaches_here_p (occr, expr, bb, 1))
3263 insn_computes_expr = occr->insn;
3266 else if (expr_reaches_here_p (occr, expr, bb, 0))
3272 insn_computes_expr = occr->insn;
3276 if (insn_computes_expr == NULL)
3279 return insn_computes_expr;
3283 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3284 Only called by can_disregard_other_sets. */
3287 def_reaches_here_p (rtx insn, rtx def_insn)
3291 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3294 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3296 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3298 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3300 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3301 reg = XEXP (PATTERN (def_insn), 0);
3302 else if (GET_CODE (PATTERN (def_insn)) == SET)
3303 reg = SET_DEST (PATTERN (def_insn));
3307 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3316 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3317 value returned is the number of definitions that reach INSN. Returning a
3318 value of zero means that [maybe] more than one definition reaches INSN and
3319 the caller can't perform whatever optimization it is trying. i.e. it is
3320 always safe to return zero. */
3323 can_disregard_other_sets (struct reg_set **addr_this_reg, rtx insn, int for_combine)
3325 int number_of_reaching_defs = 0;
3326 struct reg_set *this_reg;
3328 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3329 if (def_reaches_here_p (insn, this_reg->insn))
3331 number_of_reaching_defs++;
3332 /* Ignore parallels for now. */
3333 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3337 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3338 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3339 SET_SRC (PATTERN (insn)))))
3340 /* A setting of the reg to a different value reaches INSN. */
3343 if (number_of_reaching_defs > 1)
3345 /* If in this setting the value the register is being set to is
3346 equal to the previous value the register was set to and this
3347 setting reaches the insn we are trying to do the substitution
3348 on then we are ok. */
3349 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3351 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3352 SET_SRC (PATTERN (insn))))
3356 *addr_this_reg = this_reg;
3359 return number_of_reaching_defs;
3362 /* Expression computed by insn is available and the substitution is legal,
3363 so try to perform the substitution.
3365 The result is nonzero if any changes were made. */
3368 handle_avail_expr (rtx insn, struct expr *expr)
3370 rtx pat, insn_computes_expr, expr_set;
3372 struct reg_set *this_reg;
3373 int found_setting, use_src;
3376 /* We only handle the case where one computation of the expression
3377 reaches this instruction. */
3378 insn_computes_expr = computing_insn (expr, insn);
3379 if (insn_computes_expr == NULL)
3381 expr_set = single_set (insn_computes_expr);
3388 /* At this point we know only one computation of EXPR outside of this
3389 block reaches this insn. Now try to find a register that the
3390 expression is computed into. */
3391 if (GET_CODE (SET_SRC (expr_set)) == REG)
3393 /* This is the case when the available expression that reaches
3394 here has already been handled as an available expression. */
3395 unsigned int regnum_for_replacing
3396 = REGNO (SET_SRC (expr_set));
3398 /* If the register was created by GCSE we can't use `reg_set_table',
3399 however we know it's set only once. */
3400 if (regnum_for_replacing >= max_gcse_regno
3401 /* If the register the expression is computed into is set only once,
3402 or only one set reaches this insn, we can use it. */
3403 || (((this_reg = reg_set_table[regnum_for_replacing]),
3404 this_reg->next == NULL)
3405 || can_disregard_other_sets (&this_reg, insn, 0)))
3414 unsigned int regnum_for_replacing
3415 = REGNO (SET_DEST (expr_set));
3417 /* This shouldn't happen. */
3418 if (regnum_for_replacing >= max_gcse_regno)
3421 this_reg = reg_set_table[regnum_for_replacing];
3423 /* If the register the expression is computed into is set only once,
3424 or only one set reaches this insn, use it. */
3425 if (this_reg->next == NULL
3426 || can_disregard_other_sets (&this_reg, insn, 0))
3432 pat = PATTERN (insn);
3434 to = SET_SRC (expr_set);
3436 to = SET_DEST (expr_set);
3437 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3439 /* We should be able to ignore the return code from validate_change but
3440 to play it safe we check. */
3444 if (gcse_file != NULL)
3446 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3448 fprintf (gcse_file, " reg %d %s insn %d\n",
3449 REGNO (to), use_src ? "from" : "set in",
3450 INSN_UID (insn_computes_expr));
3455 /* The register that the expr is computed into is set more than once. */
3456 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3458 /* Insert an insn after insnx that copies the reg set in insnx
3459 into a new pseudo register call this new register REGN.
3460 From insnb until end of basic block or until REGB is set
3461 replace all uses of REGB with REGN. */
3464 to = gen_reg_rtx (GET_MODE (SET_DEST (expr_set)));
3466 /* Generate the new insn. */
3467 /* ??? If the change fails, we return 0, even though we created
3468 an insn. I think this is ok. */
3470 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3471 SET_DEST (expr_set)),
3472 insn_computes_expr);
3474 /* Keep register set table up to date. */
3475 record_one_set (REGNO (to), new_insn);
3477 gcse_create_count++;
3478 if (gcse_file != NULL)
3480 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3481 INSN_UID (NEXT_INSN (insn_computes_expr)),
3482 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3483 fprintf (gcse_file, ", computed in insn %d,\n",
3484 INSN_UID (insn_computes_expr));
3485 fprintf (gcse_file, " into newly allocated reg %d\n",
3489 pat = PATTERN (insn);
3491 /* Do register replacement for INSN. */
3492 changed = validate_change (insn, &SET_SRC (pat),
3494 (NEXT_INSN (insn_computes_expr))),
3497 /* We should be able to ignore the return code from validate_change but
3498 to play it safe we check. */
3502 if (gcse_file != NULL)
3505 "GCSE: Replacing the source in insn %d with reg %d ",
3507 REGNO (SET_DEST (PATTERN (NEXT_INSN
3508 (insn_computes_expr)))));
3509 fprintf (gcse_file, "set in insn %d\n",
3510 INSN_UID (insn_computes_expr));
3518 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3519 the dataflow analysis has been done.
3521 The result is nonzero if a change was made. */
3530 /* Note we start at block 1. */
3532 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3536 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3538 /* Reset tables used to keep track of what's still valid [since the
3539 start of the block]. */
3540 reset_opr_set_tables ();
3542 for (insn = bb->head;
3543 insn != NULL && insn != NEXT_INSN (bb->end);
3544 insn = NEXT_INSN (insn))
3546 /* Is insn of form (set (pseudo-reg) ...)? */
3547 if (GET_CODE (insn) == INSN
3548 && GET_CODE (PATTERN (insn)) == SET
3549 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3550 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3552 rtx pat = PATTERN (insn);
3553 rtx src = SET_SRC (pat);
3556 if (want_to_gcse_p (src)
3557 /* Is the expression recorded? */
3558 && ((expr = lookup_expr (src, &expr_hash_table)) != NULL)
3559 /* Is the expression available [at the start of the
3561 && TEST_BIT (ae_in[bb->index], expr->bitmap_index)
3562 /* Are the operands unchanged since the start of the
3564 && oprs_not_set_p (src, insn))
3565 changed |= handle_avail_expr (insn, expr);
3568 /* Keep track of everything modified by this insn. */
3569 /* ??? Need to be careful w.r.t. mods done to INSN. */
3571 mark_oprs_set (insn);
3578 /* Top level routine to perform one classic GCSE pass.
3580 Return nonzero if a change was made. */
3583 one_classic_gcse_pass (int pass)
3587 gcse_subst_count = 0;
3588 gcse_create_count = 0;
3590 alloc_hash_table (max_cuid, &expr_hash_table, 0);
3591 alloc_rd_mem (last_basic_block, max_cuid);
3592 compute_hash_table (&expr_hash_table);
3594 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
3596 if (expr_hash_table.n_elems > 0)
3600 alloc_avail_expr_mem (last_basic_block, expr_hash_table.n_elems);
3601 compute_ae_gen (&expr_hash_table);
3602 compute_ae_kill (ae_gen, ae_kill, &expr_hash_table);
3603 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3604 changed = classic_gcse ();
3605 free_avail_expr_mem ();
3609 free_hash_table (&expr_hash_table);
3613 fprintf (gcse_file, "\n");
3614 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3615 current_function_name, pass, bytes_used, gcse_subst_count);
3616 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3622 /* Compute copy/constant propagation working variables. */
3624 /* Local properties of assignments. */
3625 static sbitmap *cprop_pavloc;
3626 static sbitmap *cprop_absaltered;
3628 /* Global properties of assignments (computed from the local properties). */
3629 static sbitmap *cprop_avin;
3630 static sbitmap *cprop_avout;
3632 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3633 basic blocks. N_SETS is the number of sets. */
3636 alloc_cprop_mem (int n_blocks, int n_sets)
3638 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3639 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3641 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3642 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3645 /* Free vars used by copy/const propagation. */
3648 free_cprop_mem (void)
3650 sbitmap_vector_free (cprop_pavloc);
3651 sbitmap_vector_free (cprop_absaltered);
3652 sbitmap_vector_free (cprop_avin);
3653 sbitmap_vector_free (cprop_avout);
3656 /* For each block, compute whether X is transparent. X is either an
3657 expression or an assignment [though we don't care which, for this context
3658 an assignment is treated as an expression]. For each block where an
3659 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3663 compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
3671 /* repeat is used to turn tail-recursion into iteration since GCC
3672 can't do it when there's no return value. */
3678 code = GET_CODE (x);
3684 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3687 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3688 SET_BIT (bmap[bb->index], indx);
3692 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3693 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3698 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3701 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
3702 RESET_BIT (bmap[bb->index], indx);
3706 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3707 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3716 rtx list_entry = canon_modify_mem_list[bb->index];
3720 rtx dest, dest_addr;
3722 if (GET_CODE (XEXP (list_entry, 0)) == CALL_INSN)
3725 SET_BIT (bmap[bb->index], indx);
3727 RESET_BIT (bmap[bb->index], indx);
3730 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3731 Examine each hunk of memory that is modified. */
3733 dest = XEXP (list_entry, 0);
3734 list_entry = XEXP (list_entry, 1);
3735 dest_addr = XEXP (list_entry, 0);
3737 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
3738 x, rtx_addr_varies_p))
3741 SET_BIT (bmap[bb->index], indx);
3743 RESET_BIT (bmap[bb->index], indx);
3746 list_entry = XEXP (list_entry, 1);
3769 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3773 /* If we are about to do the last recursive call
3774 needed at this level, change it into iteration.
3775 This function is called enough to be worth it. */
3782 compute_transp (XEXP (x, i), indx, bmap, set_p);
3784 else if (fmt[i] == 'E')
3785 for (j = 0; j < XVECLEN (x, i); j++)
3786 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3790 /* Top level routine to do the dataflow analysis needed by copy/const
3794 compute_cprop_data (void)
3796 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
3797 compute_available (cprop_pavloc, cprop_absaltered,
3798 cprop_avout, cprop_avin);
3801 /* Copy/constant propagation. */
3803 /* Maximum number of register uses in an insn that we handle. */
3806 /* Table of uses found in an insn.
3807 Allocated statically to avoid alloc/free complexity and overhead. */
3808 static struct reg_use reg_use_table[MAX_USES];
3810 /* Index into `reg_use_table' while building it. */
3811 static int reg_use_count;
3813 /* Set up a list of register numbers used in INSN. The found uses are stored
3814 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3815 and contains the number of uses in the table upon exit.
3817 ??? If a register appears multiple times we will record it multiple times.
3818 This doesn't hurt anything but it will slow things down. */
3821 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
3828 /* repeat is used to turn tail-recursion into iteration since GCC
3829 can't do it when there's no return value. */
3834 code = GET_CODE (x);
3837 if (reg_use_count == MAX_USES)
3840 reg_use_table[reg_use_count].reg_rtx = x;
3844 /* Recursively scan the operands of this expression. */
3846 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3850 /* If we are about to do the last recursive call
3851 needed at this level, change it into iteration.
3852 This function is called enough to be worth it. */
3859 find_used_regs (&XEXP (x, i), data);
3861 else if (fmt[i] == 'E')
3862 for (j = 0; j < XVECLEN (x, i); j++)
3863 find_used_regs (&XVECEXP (x, i, j), data);
3867 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3868 Returns nonzero is successful. */
3871 try_replace_reg (rtx from, rtx to, rtx insn)
3873 rtx note = find_reg_equal_equiv_note (insn);
3876 rtx set = single_set (insn);
3878 validate_replace_src_group (from, to, insn);
3879 if (num_changes_pending () && apply_change_group ())
3882 /* Try to simplify SET_SRC if we have substituted a constant. */
3883 if (success && set && CONSTANT_P (to))
3885 src = simplify_rtx (SET_SRC (set));
3888 validate_change (insn, &SET_SRC (set), src, 0);
3891 /* If there is already a NOTE, update the expression in it with our
3894 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3896 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
3898 /* If above failed and this is a single set, try to simplify the source of
3899 the set given our substitution. We could perhaps try this for multiple
3900 SETs, but it probably won't buy us anything. */
3901 src = simplify_replace_rtx (SET_SRC (set), from, to);
3903 if (!rtx_equal_p (src, SET_SRC (set))
3904 && validate_change (insn, &SET_SRC (set), src, 0))
3907 /* If we've failed to do replacement, have a single SET, don't already
3908 have a note, and have no special SET, add a REG_EQUAL note to not
3909 lose information. */
3910 if (!success && note == 0 && set != 0
3911 && GET_CODE (XEXP (set, 0)) != ZERO_EXTRACT
3912 && GET_CODE (XEXP (set, 0)) != SIGN_EXTRACT)
3913 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
3916 /* REG_EQUAL may get simplified into register.
3917 We don't allow that. Remove that note. This code ought
3918 not to happen, because previous code ought to synthesize
3919 reg-reg move, but be on the safe side. */
3920 if (note && REG_P (XEXP (note, 0)))
3921 remove_note (insn, note);
3926 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3927 NULL no such set is found. */
3929 static struct expr *
3930 find_avail_set (int regno, rtx insn)
3932 /* SET1 contains the last set found that can be returned to the caller for
3933 use in a substitution. */
3934 struct expr *set1 = 0;
3936 /* Loops are not possible here. To get a loop we would need two sets
3937 available at the start of the block containing INSN. ie we would
3938 need two sets like this available at the start of the block:
3940 (set (reg X) (reg Y))
3941 (set (reg Y) (reg X))
3943 This can not happen since the set of (reg Y) would have killed the
3944 set of (reg X) making it unavailable at the start of this block. */
3948 struct expr *set = lookup_set (regno, &set_hash_table);
3950 /* Find a set that is available at the start of the block
3951 which contains INSN. */
3954 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3956 set = next_set (regno, set);
3959 /* If no available set was found we've reached the end of the
3960 (possibly empty) copy chain. */
3964 if (GET_CODE (set->expr) != SET)
3967 src = SET_SRC (set->expr);
3969 /* We know the set is available.
3970 Now check that SRC is ANTLOC (i.e. none of the source operands
3971 have changed since the start of the block).
3973 If the source operand changed, we may still use it for the next
3974 iteration of this loop, but we may not use it for substitutions. */
3976 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
3979 /* If the source of the set is anything except a register, then
3980 we have reached the end of the copy chain. */
3981 if (GET_CODE (src) != REG)
3984 /* Follow the copy chain, ie start another iteration of the loop
3985 and see if we have an available copy into SRC. */
3986 regno = REGNO (src);
3989 /* SET1 holds the last set that was available and anticipatable at
3994 /* Subroutine of cprop_insn that tries to propagate constants into
3995 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
3996 it is the instruction that immediately precedes JUMP, and must be a
3997 single SET of a register. FROM is what we will try to replace,
3998 SRC is the constant we will try to substitute for it. Returns nonzero
3999 if a change was made. */
4002 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
4004 rtx new, set_src, note_src;
4005 rtx set = pc_set (jump);
4006 rtx note = find_reg_equal_equiv_note (jump);
4010 note_src = XEXP (note, 0);
4011 if (GET_CODE (note_src) == EXPR_LIST)
4012 note_src = NULL_RTX;
4014 else note_src = NULL_RTX;
4016 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
4017 set_src = note_src ? note_src : SET_SRC (set);
4019 /* First substitute the SETCC condition into the JUMP instruction,
4020 then substitute that given values into this expanded JUMP. */
4021 if (setcc != NULL_RTX
4022 && !modified_between_p (from, setcc, jump)
4023 && !modified_between_p (src, setcc, jump))
4026 rtx setcc_set = single_set (setcc);
4027 rtx setcc_note = find_reg_equal_equiv_note (setcc);
4028 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
4029 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
4030 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
4036 new = simplify_replace_rtx (set_src, from, src);
4038 /* If no simplification can be made, then try the next register. */
4039 if (rtx_equal_p (new, SET_SRC (set)))
4042 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4047 /* Ensure the value computed inside the jump insn to be equivalent
4048 to one computed by setcc. */
4049 if (setcc && modified_in_p (new, setcc))
4051 if (! validate_change (jump, &SET_SRC (set), new, 0))
4053 /* When (some) constants are not valid in a comparison, and there
4054 are two registers to be replaced by constants before the entire
4055 comparison can be folded into a constant, we need to keep
4056 intermediate information in REG_EQUAL notes. For targets with
4057 separate compare insns, such notes are added by try_replace_reg.
4058 When we have a combined compare-and-branch instruction, however,
4059 we need to attach a note to the branch itself to make this
4060 optimization work. */
4062 if (!rtx_equal_p (new, note_src))
4063 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
4067 /* Remove REG_EQUAL note after simplification. */
4069 remove_note (jump, note);
4071 /* If this has turned into an unconditional jump,
4072 then put a barrier after it so that the unreachable
4073 code will be deleted. */
4074 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
4075 emit_barrier_after (jump);
4079 /* Delete the cc0 setter. */
4080 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
4081 delete_insn (setcc);
4084 run_jump_opt_after_gcse = 1;
4087 if (gcse_file != NULL)
4090 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4091 REGNO (from), INSN_UID (jump));
4092 print_rtl (gcse_file, src);
4093 fprintf (gcse_file, "\n");
4095 purge_dead_edges (bb);
4101 constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
4105 /* Check for reg or cc0 setting instructions followed by
4106 conditional branch instructions first. */
4108 && (sset = single_set (insn)) != NULL
4110 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
4112 rtx dest = SET_DEST (sset);
4113 if ((REG_P (dest) || CC0_P (dest))
4114 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
4118 /* Handle normal insns next. */
4119 if (GET_CODE (insn) == INSN
4120 && try_replace_reg (from, to, insn))
4123 /* Try to propagate a CONST_INT into a conditional jump.
4124 We're pretty specific about what we will handle in this
4125 code, we can extend this as necessary over time.
4127 Right now the insn in question must look like
4128 (set (pc) (if_then_else ...)) */
4129 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
4130 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
4134 /* Perform constant and copy propagation on INSN.
4135 The result is nonzero if a change was made. */
4138 cprop_insn (rtx insn, int alter_jumps)
4140 struct reg_use *reg_used;
4148 note_uses (&PATTERN (insn), find_used_regs, NULL);
4150 note = find_reg_equal_equiv_note (insn);
4152 /* We may win even when propagating constants into notes. */
4154 find_used_regs (&XEXP (note, 0), NULL);
4156 for (reg_used = ®_use_table[0]; reg_use_count > 0;
4157 reg_used++, reg_use_count--)
4159 unsigned int regno = REGNO (reg_used->reg_rtx);
4163 /* Ignore registers created by GCSE.
4164 We do this because ... */
4165 if (regno >= max_gcse_regno)
4168 /* If the register has already been set in this block, there's
4169 nothing we can do. */
4170 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
4173 /* Find an assignment that sets reg_used and is available
4174 at the start of the block. */
4175 set = find_avail_set (regno, insn);
4180 /* ??? We might be able to handle PARALLELs. Later. */
4181 if (GET_CODE (pat) != SET)
4184 src = SET_SRC (pat);
4186 /* Constant propagation. */
4187 if (gcse_constant_p (src))
4189 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
4193 if (gcse_file != NULL)
4195 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
4196 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
4197 print_rtl (gcse_file, src);
4198 fprintf (gcse_file, "\n");
4200 if (INSN_DELETED_P (insn))
4204 else if (GET_CODE (src) == REG
4205 && REGNO (src) >= FIRST_PSEUDO_REGISTER
4206 && REGNO (src) != regno)
4208 if (try_replace_reg (reg_used->reg_rtx, src, insn))
4212 if (gcse_file != NULL)
4214 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4215 regno, INSN_UID (insn));
4216 fprintf (gcse_file, " with reg %d\n", REGNO (src));
4219 /* The original insn setting reg_used may or may not now be
4220 deletable. We leave the deletion to flow. */
4221 /* FIXME: If it turns out that the insn isn't deletable,
4222 then we may have unnecessarily extended register lifetimes
4223 and made things worse. */
4231 /* Like find_used_regs, but avoid recording uses that appear in
4232 input-output contexts such as zero_extract or pre_dec. This
4233 restricts the cases we consider to those for which local cprop
4234 can legitimately make replacements. */
4237 local_cprop_find_used_regs (rtx *xptr, void *data)
4244 switch (GET_CODE (x))
4248 case STRICT_LOW_PART:
4257 /* Can only legitimately appear this early in the context of
4258 stack pushes for function arguments, but handle all of the
4259 codes nonetheless. */
4263 /* Setting a subreg of a register larger than word_mode leaves
4264 the non-written words unchanged. */
4265 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
4273 find_used_regs (xptr, data);
4276 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4277 their REG_EQUAL notes need updating. */
4280 do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
4282 rtx newreg = NULL, newcnst = NULL;
4284 /* Rule out USE instructions and ASM statements as we don't want to
4285 change the hard registers mentioned. */
4286 if (GET_CODE (x) == REG
4287 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
4288 || (GET_CODE (PATTERN (insn)) != USE
4289 && asm_noperands (PATTERN (insn)) < 0)))
4291 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
4292 struct elt_loc_list *l;
4296 for (l = val->locs; l; l = l->next)
4298 rtx this_rtx = l->loc;
4304 if (gcse_constant_p (this_rtx))
4306 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
4307 /* Don't copy propagate if it has attached REG_EQUIV note.
4308 At this point this only function parameters should have
4309 REG_EQUIV notes and if the argument slot is used somewhere
4310 explicitly, it means address of parameter has been taken,
4311 so we should not extend the lifetime of the pseudo. */
4312 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
4313 || GET_CODE (XEXP (note, 0)) != MEM))
4316 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
4318 /* If we find a case where we can't fix the retval REG_EQUAL notes
4319 match the new register, we either have to abandon this replacement
4320 or fix delete_trivially_dead_insns to preserve the setting insn,
4321 or make it delete the REG_EUAQL note, and fix up all passes that
4322 require the REG_EQUAL note there. */
4323 if (!adjust_libcall_notes (x, newcnst, insn, libcall_sp))
4325 if (gcse_file != NULL)
4327 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
4329 fprintf (gcse_file, "insn %d with constant ",
4331 print_rtl (gcse_file, newcnst);
4332 fprintf (gcse_file, "\n");
4337 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
4339 adjust_libcall_notes (x, newreg, insn, libcall_sp);
4340 if (gcse_file != NULL)
4343 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4344 REGNO (x), INSN_UID (insn));
4345 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
4354 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4355 their REG_EQUAL notes need updating to reflect that OLDREG has been
4356 replaced with NEWVAL in INSN. Return true if all substitutions could
4359 adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
4363 while ((end = *libcall_sp++))
4365 rtx note = find_reg_equal_equiv_note (end);
4372 if (reg_set_between_p (newval, PREV_INSN (insn), end))
4376 note = find_reg_equal_equiv_note (end);
4379 if (reg_mentioned_p (newval, XEXP (note, 0)))
4382 while ((end = *libcall_sp++));
4386 XEXP (note, 0) = replace_rtx (XEXP (note, 0), oldreg, newval);
4392 #define MAX_NESTED_LIBCALLS 9
4395 local_cprop_pass (int alter_jumps)
4398 struct reg_use *reg_used;
4399 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
4400 bool changed = false;
4403 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
4405 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4409 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
4413 if (libcall_sp == libcall_stack)
4415 *--libcall_sp = XEXP (note, 0);
4417 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4420 note = find_reg_equal_equiv_note (insn);
4424 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
4426 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
4428 for (reg_used = ®_use_table[0]; reg_use_count > 0;
4429 reg_used++, reg_use_count--)
4430 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
4436 if (INSN_DELETED_P (insn))
4439 while (reg_use_count);
4441 cselib_process_insn (insn);
4444 /* Global analysis may get into infinite loops for unreachable blocks. */
4445 if (changed && alter_jumps)
4447 delete_unreachable_blocks ();
4448 free_reg_set_mem ();
4449 alloc_reg_set_mem (max_reg_num ());
4450 compute_sets (get_insns ());
4454 /* Forward propagate copies. This includes copies and constants. Return
4455 nonzero if a change was made. */
4458 cprop (int alter_jumps)
4464 /* Note we start at block 1. */
4465 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4467 if (gcse_file != NULL)
4468 fprintf (gcse_file, "\n");
4473 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
4475 /* Reset tables used to keep track of what's still valid [since the
4476 start of the block]. */
4477 reset_opr_set_tables ();
4479 for (insn = bb->head;
4480 insn != NULL && insn != NEXT_INSN (bb->end);
4481 insn = NEXT_INSN (insn))
4484 changed |= cprop_insn (insn, alter_jumps);
4486 /* Keep track of everything modified by this insn. */
4487 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4488 call mark_oprs_set if we turned the insn into a NOTE. */
4489 if (GET_CODE (insn) != NOTE)
4490 mark_oprs_set (insn);
4494 if (gcse_file != NULL)
4495 fprintf (gcse_file, "\n");
4500 /* Similar to get_condition, only the resulting condition must be
4501 valid at JUMP, instead of at EARLIEST.
4503 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4504 settle for the condition variable in the jump instruction being integral.
4505 We prefer to be able to record the value of a user variable, rather than
4506 the value of a temporary used in a condition. This could be solved by
4507 recording the value of *every* register scaned by canonicalize_condition,
4508 but this would require some code reorganization. */
4511 fis_get_condition (rtx jump)
4513 rtx cond, set, tmp, insn, earliest;
4516 if (! any_condjump_p (jump))
4519 set = pc_set (jump);
4520 cond = XEXP (SET_SRC (set), 0);
4522 /* If this branches to JUMP_LABEL when the condition is false,
4523 reverse the condition. */
4524 reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4525 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
4527 /* Use canonicalize_condition to do the dirty work of manipulating
4528 MODE_CC values and COMPARE rtx codes. */
4529 tmp = canonicalize_condition (jump, cond, reverse, &earliest, NULL_RTX,
4534 /* Verify that the given condition is valid at JUMP by virtue of not
4535 having been modified since EARLIEST. */
4536 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4537 if (INSN_P (insn) && modified_in_p (tmp, insn))
4542 /* The condition was modified. See if we can get a partial result
4543 that doesn't follow all the reversals. Perhaps combine can fold
4544 them together later. */
4545 tmp = XEXP (tmp, 0);
4546 if (!REG_P (tmp) || GET_MODE_CLASS (GET_MODE (tmp)) != MODE_INT)
4548 tmp = canonicalize_condition (jump, cond, reverse, &earliest, tmp,
4553 /* For sanity's sake, re-validate the new result. */
4554 for (insn = earliest; insn != jump; insn = NEXT_INSN (insn))
4555 if (INSN_P (insn) && modified_in_p (tmp, insn))
4561 /* Find the implicit sets of a function. An "implicit set" is a constraint
4562 on the value of a variable, implied by a conditional jump. For example,
4563 following "if (x == 2)", the then branch may be optimized as though the
4564 conditional performed an "explicit set", in this example, "x = 2". This
4565 function records the set patterns that are implicit at the start of each
4569 find_implicit_sets (void)
4571 basic_block bb, dest;
4577 /* Check for more than one successor. */
4578 if (bb->succ && bb->succ->succ_next)
4580 cond = fis_get_condition (bb->end);
4583 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
4584 && GET_CODE (XEXP (cond, 0)) == REG
4585 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
4586 && gcse_constant_p (XEXP (cond, 1)))
4588 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
4589 : FALLTHRU_EDGE (bb)->dest;
4591 if (dest && ! dest->pred->pred_next
4592 && dest != EXIT_BLOCK_PTR)
4594 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
4596 implicit_sets[dest->index] = new;
4599 fprintf(gcse_file, "Implicit set of reg %d in ",
4600 REGNO (XEXP (cond, 0)));
4601 fprintf(gcse_file, "basic block %d\n", dest->index);
4609 fprintf (gcse_file, "Found %d implicit sets\n", count);
4612 /* Perform one copy/constant propagation pass.
4613 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4614 propagation into conditional jumps. If BYPASS_JUMPS is true,
4615 perform conditional jump bypassing optimizations. */
4618 one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
4622 const_prop_count = 0;
4623 copy_prop_count = 0;
4625 local_cprop_pass (cprop_jumps);
4627 /* Determine implicit sets. */
4628 implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
4629 find_implicit_sets ();
4631 alloc_hash_table (max_cuid, &set_hash_table, 1);
4632 compute_hash_table (&set_hash_table);
4634 /* Free implicit_sets before peak usage. */
4635 free (implicit_sets);
4636 implicit_sets = NULL;
4639 dump_hash_table (gcse_file, "SET", &set_hash_table);
4640 if (set_hash_table.n_elems > 0)
4642 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
4643 compute_cprop_data ();
4644 changed = cprop (cprop_jumps);
4646 changed |= bypass_conditional_jumps ();
4650 free_hash_table (&set_hash_table);
4654 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4655 current_function_name, pass, bytes_used);
4656 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4657 const_prop_count, copy_prop_count);
4659 /* Global analysis may get into infinite loops for unreachable blocks. */
4660 if (changed && cprop_jumps)
4661 delete_unreachable_blocks ();
4666 /* Bypass conditional jumps. */
4668 /* The value of last_basic_block at the beginning of the jump_bypass
4669 pass. The use of redirect_edge_and_branch_force may introduce new
4670 basic blocks, but the data flow analysis is only valid for basic
4671 block indices less than bypass_last_basic_block. */
4673 static int bypass_last_basic_block;
4675 /* Find a set of REGNO to a constant that is available at the end of basic
4676 block BB. Returns NULL if no such set is found. Based heavily upon
4679 static struct expr *
4680 find_bypass_set (int regno, int bb)
4682 struct expr *result = 0;
4687 struct expr *set = lookup_set (regno, &set_hash_table);
4691 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
4693 set = next_set (regno, set);
4699 if (GET_CODE (set->expr) != SET)
4702 src = SET_SRC (set->expr);
4703 if (gcse_constant_p (src))
4706 if (GET_CODE (src) != REG)
4709 regno = REGNO (src);
4715 /* Subroutine of bypass_block that checks whether a pseudo is killed by
4716 any of the instructions inserted on an edge. Jump bypassing places
4717 condition code setters on CFG edges using insert_insn_on_edge. This
4718 function is required to check that our data flow analysis is still
4719 valid prior to commit_edge_insertions. */
4722 reg_killed_on_edge (rtx reg, edge e)
4726 for (insn = e->insns; insn; insn = NEXT_INSN (insn))
4727 if (INSN_P (insn) && reg_set_p (reg, insn))
4733 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4734 basic block BB which has more than one predecessor. If not NULL, SETCC
4735 is the first instruction of BB, which is immediately followed by JUMP_INSN
4736 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4737 Returns nonzero if a change was made.
4739 During the jump bypassing pass, we may place copies of SETCC instructions
4740 on CFG edges. The following routine must be careful to pay attention to
4741 these inserted insns when performing its transformations. */
4744 bypass_block (basic_block bb, rtx setcc, rtx jump)
4747 edge e, enext, edest;
4749 int may_be_loop_header;
4751 insn = (setcc != NULL) ? setcc : jump;
4753 /* Determine set of register uses in INSN. */
4755 note_uses (&PATTERN (insn), find_used_regs, NULL);
4756 note = find_reg_equal_equiv_note (insn);
4758 find_used_regs (&XEXP (note, 0), NULL);
4760 may_be_loop_header = false;
4761 for (e = bb->pred; e; e = e->pred_next)
4762 if (e->flags & EDGE_DFS_BACK)
4764 may_be_loop_header = true;
4769 for (e = bb->pred; e; e = enext)
4771 enext = e->pred_next;
4772 if (e->flags & EDGE_COMPLEX)
4775 /* We can't redirect edges from new basic blocks. */
4776 if (e->src->index >= bypass_last_basic_block)
4779 /* The irreducible loops created by redirecting of edges entering the
4780 loop from outside would decrease effectiveness of some of the following
4781 optimizations, so prevent this. */
4782 if (may_be_loop_header
4783 && !(e->flags & EDGE_DFS_BACK))
4786 for (i = 0; i < reg_use_count; i++)
4788 struct reg_use *reg_used = ®_use_table[i];
4789 unsigned int regno = REGNO (reg_used->reg_rtx);
4790 basic_block dest, old_dest;
4794 if (regno >= max_gcse_regno)
4797 set = find_bypass_set (regno, e->src->index);
4802 /* Check the data flow is valid after edge insertions. */
4803 if (e->insns && reg_killed_on_edge (reg_used->reg_rtx, e))
4806 src = SET_SRC (pc_set (jump));
4809 src = simplify_replace_rtx (src,
4810 SET_DEST (PATTERN (setcc)),
4811 SET_SRC (PATTERN (setcc)));
4813 new = simplify_replace_rtx (src, reg_used->reg_rtx,
4814 SET_SRC (set->expr));
4816 /* Jump bypassing may have already placed instructions on
4817 edges of the CFG. We can't bypass an outgoing edge that
4818 has instructions associated with it, as these insns won't
4819 get executed if the incoming edge is redirected. */
4823 edest = FALLTHRU_EDGE (bb);
4824 dest = edest->insns ? NULL : edest->dest;
4826 else if (GET_CODE (new) == LABEL_REF)
4828 dest = BLOCK_FOR_INSN (XEXP (new, 0));
4829 /* Don't bypass edges containing instructions. */
4830 for (edest = bb->succ; edest; edest = edest->succ_next)
4831 if (edest->dest == dest && edest->insns)
4843 && dest != EXIT_BLOCK_PTR)
4845 redirect_edge_and_branch_force (e, dest);
4847 /* Copy the register setter to the redirected edge.
4848 Don't copy CC0 setters, as CC0 is dead after jump. */
4851 rtx pat = PATTERN (setcc);
4852 if (!CC0_P (SET_DEST (pat)))
4853 insert_insn_on_edge (copy_insn (pat), e);
4856 if (gcse_file != NULL)
4858 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4859 regno, INSN_UID (jump));
4860 print_rtl (gcse_file, SET_SRC (set->expr));
4861 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
4862 e->src->index, old_dest->index, dest->index);
4872 /* Find basic blocks with more than one predecessor that only contain a
4873 single conditional jump. If the result of the comparison is known at
4874 compile-time from any incoming edge, redirect that edge to the
4875 appropriate target. Returns nonzero if a change was made.
4877 This function is now mis-named, because we also handle indirect jumps. */
4880 bypass_conditional_jumps (void)
4888 /* Note we start at block 1. */
4889 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
4892 bypass_last_basic_block = last_basic_block;
4893 mark_dfs_back_edges ();
4896 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
4897 EXIT_BLOCK_PTR, next_bb)
4899 /* Check for more than one predecessor. */
4900 if (bb->pred && bb->pred->pred_next)
4903 for (insn = bb->head;
4904 insn != NULL && insn != NEXT_INSN (bb->end);
4905 insn = NEXT_INSN (insn))
4906 if (GET_CODE (insn) == INSN)
4910 if (GET_CODE (PATTERN (insn)) != SET)
4913 dest = SET_DEST (PATTERN (insn));
4914 if (REG_P (dest) || CC0_P (dest))
4919 else if (GET_CODE (insn) == JUMP_INSN)
4921 if ((any_condjump_p (insn) || computed_jump_p (insn))
4922 && onlyjump_p (insn))
4923 changed |= bypass_block (bb, setcc, insn);
4926 else if (INSN_P (insn))
4931 /* If we bypassed any register setting insns, we inserted a
4932 copy on the redirected edge. These need to be committed. */
4934 commit_edge_insertions();
4939 /* Compute PRE+LCM working variables. */
4941 /* Local properties of expressions. */
4942 /* Nonzero for expressions that are transparent in the block. */
4943 static sbitmap *transp;
4945 /* Nonzero for expressions that are transparent at the end of the block.
4946 This is only zero for expressions killed by abnormal critical edge
4947 created by a calls. */
4948 static sbitmap *transpout;
4950 /* Nonzero for expressions that are computed (available) in the block. */
4951 static sbitmap *comp;
4953 /* Nonzero for expressions that are locally anticipatable in the block. */
4954 static sbitmap *antloc;
4956 /* Nonzero for expressions where this block is an optimal computation
4958 static sbitmap *pre_optimal;
4960 /* Nonzero for expressions which are redundant in a particular block. */
4961 static sbitmap *pre_redundant;
4963 /* Nonzero for expressions which should be inserted on a specific edge. */
4964 static sbitmap *pre_insert_map;
4966 /* Nonzero for expressions which should be deleted in a specific block. */
4967 static sbitmap *pre_delete_map;
4969 /* Contains the edge_list returned by pre_edge_lcm. */
4970 static struct edge_list *edge_list;
4972 /* Redundant insns. */
4973 static sbitmap pre_redundant_insns;
4975 /* Allocate vars used for PRE analysis. */
4978 alloc_pre_mem (int n_blocks, int n_exprs)
4980 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4981 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4982 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4985 pre_redundant = NULL;
4986 pre_insert_map = NULL;
4987 pre_delete_map = NULL;
4990 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4992 /* pre_insert and pre_delete are allocated later. */
4995 /* Free vars used for PRE analysis. */
5000 sbitmap_vector_free (transp);
5001 sbitmap_vector_free (comp);
5003 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
5006 sbitmap_vector_free (pre_optimal);
5008 sbitmap_vector_free (pre_redundant);
5010 sbitmap_vector_free (pre_insert_map);
5012 sbitmap_vector_free (pre_delete_map);
5014 sbitmap_vector_free (ae_in);
5016 sbitmap_vector_free (ae_out);
5018 transp = comp = NULL;
5019 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
5020 ae_in = ae_out = NULL;
5023 /* Top level routine to do the dataflow analysis needed by PRE. */
5026 compute_pre_data (void)
5028 sbitmap trapping_expr;
5032 compute_local_properties (transp, comp, antloc, &expr_hash_table);
5033 sbitmap_vector_zero (ae_kill, last_basic_block);
5035 /* Collect expressions which might trap. */
5036 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
5037 sbitmap_zero (trapping_expr);
5038 for (ui = 0; ui < expr_hash_table.size; ui++)
5041 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
5042 if (may_trap_p (e->expr))
5043 SET_BIT (trapping_expr, e->bitmap_index);
5046 /* Compute ae_kill for each basic block using:
5050 This is significantly faster than compute_ae_kill. */
5056 /* If the current block is the destination of an abnormal edge, we
5057 kill all trapping expressions because we won't be able to properly
5058 place the instruction on the edge. So make them neither
5059 anticipatable nor transparent. This is fairly conservative. */
5060 for (e = bb->pred; e ; e = e->pred_next)
5061 if (e->flags & EDGE_ABNORMAL)
5063 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
5064 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
5068 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
5069 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
5072 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
5073 ae_kill, &pre_insert_map, &pre_delete_map);
5074 sbitmap_vector_free (antloc);
5076 sbitmap_vector_free (ae_kill);
5078 sbitmap_free (trapping_expr);
5083 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
5086 VISITED is a pointer to a working buffer for tracking which BB's have
5087 been visited. It is NULL for the top-level call.
5089 We treat reaching expressions that go through blocks containing the same
5090 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5091 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5092 2 as not reaching. The intent is to improve the probability of finding
5093 only one reaching expression and to reduce register lifetimes by picking
5094 the closest such expression. */
5097 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
5101 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
5103 basic_block pred_bb = pred->src;
5105 if (pred->src == ENTRY_BLOCK_PTR
5106 /* Has predecessor has already been visited? */
5107 || visited[pred_bb->index])
5108 ;/* Nothing to do. */
5110 /* Does this predecessor generate this expression? */
5111 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
5113 /* Is this the occurrence we're looking for?
5114 Note that there's only one generating occurrence per block
5115 so we just need to check the block number. */
5116 if (occr_bb == pred_bb)
5119 visited[pred_bb->index] = 1;
5121 /* Ignore this predecessor if it kills the expression. */
5122 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
5123 visited[pred_bb->index] = 1;
5125 /* Neither gen nor kill. */
5128 visited[pred_bb->index] = 1;
5129 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
5134 /* All paths have been checked. */
5138 /* The wrapper for pre_expr_reaches_here_work that ensures that any
5139 memory allocated for that function is returned. */
5142 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
5145 char *visited = xcalloc (last_basic_block, 1);
5147 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
5154 /* Given an expr, generate RTL which we can insert at the end of a BB,
5155 or on an edge. Set the block number of any insns generated to
5159 process_insert_insn (struct expr *expr)
5161 rtx reg = expr->reaching_reg;
5162 rtx exp = copy_rtx (expr->expr);
5167 /* If the expression is something that's an operand, like a constant,
5168 just copy it to a register. */
5169 if (general_operand (exp, GET_MODE (reg)))
5170 emit_move_insn (reg, exp);
5172 /* Otherwise, make a new insn to compute this expression and make sure the
5173 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5174 expression to make sure we don't have any sharing issues. */
5175 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
5184 /* Add EXPR to the end of basic block BB.
5186 This is used by both the PRE and code hoisting.
5188 For PRE, we want to verify that the expr is either transparent
5189 or locally anticipatable in the target block. This check makes
5190 no sense for code hoisting. */
5193 insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
5197 rtx reg = expr->reaching_reg;
5198 int regno = REGNO (reg);
5201 pat = process_insert_insn (expr);
5202 if (pat == NULL_RTX || ! INSN_P (pat))
5206 while (NEXT_INSN (pat_end) != NULL_RTX)
5207 pat_end = NEXT_INSN (pat_end);
5209 /* If the last insn is a jump, insert EXPR in front [taking care to
5210 handle cc0, etc. properly]. Similarly we need to care trapping
5211 instructions in presence of non-call exceptions. */
5213 if (GET_CODE (insn) == JUMP_INSN
5214 || (GET_CODE (insn) == INSN
5215 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
5220 /* It should always be the case that we can put these instructions
5221 anywhere in the basic block with performing PRE optimizations.
5223 if (GET_CODE (insn) == INSN && pre
5224 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5225 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5228 /* If this is a jump table, then we can't insert stuff here. Since
5229 we know the previous real insn must be the tablejump, we insert
5230 the new instruction just before the tablejump. */
5231 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
5232 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
5233 insn = prev_real_insn (insn);
5236 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5237 if cc0 isn't set. */
5238 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
5240 insn = XEXP (note, 0);
5243 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
5244 if (maybe_cc0_setter
5245 && INSN_P (maybe_cc0_setter)
5246 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
5247 insn = maybe_cc0_setter;
5250 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5251 new_insn = emit_insn_before (pat, insn);
5254 /* Likewise if the last insn is a call, as will happen in the presence
5255 of exception handling. */
5256 else if (GET_CODE (insn) == CALL_INSN
5257 && (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL)))
5259 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5260 we search backward and place the instructions before the first
5261 parameter is loaded. Do this for everyone for consistency and a
5262 presumption that we'll get better code elsewhere as well.
5264 It should always be the case that we can put these instructions
5265 anywhere in the basic block with performing PRE optimizations.
5269 && !TEST_BIT (antloc[bb->index], expr->bitmap_index)
5270 && !TEST_BIT (transp[bb->index], expr->bitmap_index))
5273 /* Since different machines initialize their parameter registers
5274 in different orders, assume nothing. Collect the set of all
5275 parameter registers. */
5276 insn = find_first_parameter_load (insn, bb->head);
5278 /* If we found all the parameter loads, then we want to insert
5279 before the first parameter load.
5281 If we did not find all the parameter loads, then we might have
5282 stopped on the head of the block, which could be a CODE_LABEL.
5283 If we inserted before the CODE_LABEL, then we would be putting
5284 the insn in the wrong basic block. In that case, put the insn
5285 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5286 while (GET_CODE (insn) == CODE_LABEL
5287 || NOTE_INSN_BASIC_BLOCK_P (insn))
5288 insn = NEXT_INSN (insn);
5290 new_insn = emit_insn_before (pat, insn);
5293 new_insn = emit_insn_after (pat, insn);
5299 add_label_notes (PATTERN (pat), new_insn);
5300 note_stores (PATTERN (pat), record_set_info, pat);
5304 pat = NEXT_INSN (pat);
5307 gcse_create_count++;
5311 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
5312 bb->index, INSN_UID (new_insn));
5313 fprintf (gcse_file, "copying expression %d to reg %d\n",
5314 expr->bitmap_index, regno);
5318 /* Insert partially redundant expressions on edges in the CFG to make
5319 the expressions fully redundant. */
5322 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
5324 int e, i, j, num_edges, set_size, did_insert = 0;
5327 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5328 if it reaches any of the deleted expressions. */
5330 set_size = pre_insert_map[0]->size;
5331 num_edges = NUM_EDGES (edge_list);
5332 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
5333 sbitmap_vector_zero (inserted, num_edges);
5335 for (e = 0; e < num_edges; e++)
5338 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
5340 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
5342 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
5344 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
5345 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
5347 struct expr *expr = index_map[j];
5350 /* Now look at each deleted occurrence of this expression. */
5351 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5353 if (! occr->deleted_p)
5356 /* Insert this expression on this edge if if it would
5357 reach the deleted occurrence in BB. */
5358 if (!TEST_BIT (inserted[e], j))
5361 edge eg = INDEX_EDGE (edge_list, e);
5363 /* We can't insert anything on an abnormal and
5364 critical edge, so we insert the insn at the end of
5365 the previous block. There are several alternatives
5366 detailed in Morgans book P277 (sec 10.5) for
5367 handling this situation. This one is easiest for
5370 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
5371 insert_insn_end_bb (index_map[j], bb, 0);
5374 insn = process_insert_insn (index_map[j]);
5375 insert_insn_on_edge (insn, eg);
5380 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
5382 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
5383 fprintf (gcse_file, "copy expression %d\n",
5384 expr->bitmap_index);
5387 update_ld_motion_stores (expr);
5388 SET_BIT (inserted[e], j);
5390 gcse_create_count++;
5397 sbitmap_vector_free (inserted);
5401 /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
5402 Given "old_reg <- expr" (INSN), instead of adding after it
5403 reaching_reg <- old_reg
5404 it's better to do the following:
5405 reaching_reg <- expr
5406 old_reg <- reaching_reg
5407 because this way copy propagation can discover additional PRE
5408 opportunities. But if this fails, we try the old way.
5409 When "expr" is a store, i.e.
5410 given "MEM <- old_reg", instead of adding after it
5411 reaching_reg <- old_reg
5412 it's better to add it before as follows:
5413 reaching_reg <- old_reg
5414 MEM <- reaching_reg. */
5417 pre_insert_copy_insn (struct expr *expr, rtx insn)
5419 rtx reg = expr->reaching_reg;
5420 int regno = REGNO (reg);
5421 int indx = expr->bitmap_index;
5422 rtx pat = PATTERN (insn);
5427 /* This block matches the logic in hash_scan_insn. */
5428 if (GET_CODE (pat) == SET)
5430 else if (GET_CODE (pat) == PARALLEL)
5432 /* Search through the parallel looking for the set whose
5433 source was the expression that we're interested in. */
5435 for (i = 0; i < XVECLEN (pat, 0); i++)
5437 rtx x = XVECEXP (pat, 0, i);
5438 if (GET_CODE (x) == SET
5439 && expr_equiv_p (SET_SRC (x), expr->expr))
5449 if (GET_CODE (SET_DEST (set)) == REG)
5451 old_reg = SET_DEST (set);
5452 /* Check if we can modify the set destination in the original insn. */
5453 if (validate_change (insn, &SET_DEST (set), reg, 0))
5455 new_insn = gen_move_insn (old_reg, reg);
5456 new_insn = emit_insn_after (new_insn, insn);
5458 /* Keep register set table up to date. */
5459 replace_one_set (REGNO (old_reg), insn, new_insn);
5460 record_one_set (regno, insn);
5464 new_insn = gen_move_insn (reg, old_reg);
5465 new_insn = emit_insn_after (new_insn, insn);
5467 /* Keep register set table up to date. */
5468 record_one_set (regno, new_insn);
5471 else /* This is possible only in case of a store to memory. */
5473 old_reg = SET_SRC (set);
5474 new_insn = gen_move_insn (reg, old_reg);
5476 /* Check if we can modify the set source in the original insn. */
5477 if (validate_change (insn, &SET_SRC (set), reg, 0))
5478 new_insn = emit_insn_before (new_insn, insn);
5480 new_insn = emit_insn_after (new_insn, insn);
5482 /* Keep register set table up to date. */
5483 record_one_set (regno, new_insn);
5486 gcse_create_count++;
5490 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5491 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
5492 INSN_UID (insn), regno);
5495 /* Copy available expressions that reach the redundant expression
5496 to `reaching_reg'. */
5499 pre_insert_copies (void)
5501 unsigned int i, added_copy;
5506 /* For each available expression in the table, copy the result to
5507 `reaching_reg' if the expression reaches a deleted one.
5509 ??? The current algorithm is rather brute force.
5510 Need to do some profiling. */
5512 for (i = 0; i < expr_hash_table.size; i++)
5513 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5515 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5516 we don't want to insert a copy here because the expression may not
5517 really be redundant. So only insert an insn if the expression was
5518 deleted. This test also avoids further processing if the
5519 expression wasn't deleted anywhere. */
5520 if (expr->reaching_reg == NULL)
5523 /* Set when we add a copy for that expression. */
5526 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5528 if (! occr->deleted_p)
5531 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
5533 rtx insn = avail->insn;
5535 /* No need to handle this one if handled already. */
5536 if (avail->copied_p)
5539 /* Don't handle this one if it's a redundant one. */
5540 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
5543 /* Or if the expression doesn't reach the deleted one. */
5544 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
5546 BLOCK_FOR_INSN (occr->insn)))
5551 /* Copy the result of avail to reaching_reg. */
5552 pre_insert_copy_insn (expr, insn);
5553 avail->copied_p = 1;
5558 update_ld_motion_stores (expr);
5562 /* Emit move from SRC to DEST noting the equivalence with expression computed
5565 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
5568 rtx set = single_set (insn), set2;
5572 /* This should never fail since we're creating a reg->reg copy
5573 we've verified to be valid. */
5575 new = emit_insn_after (gen_move_insn (dest, src), insn);
5577 /* Note the equivalence for local CSE pass. */
5578 set2 = single_set (new);
5579 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
5581 if ((note = find_reg_equal_equiv_note (insn)))
5582 eqv = XEXP (note, 0);
5584 eqv = SET_SRC (set);
5586 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
5591 /* Delete redundant computations.
5592 Deletion is done by changing the insn to copy the `reaching_reg' of
5593 the expression into the result of the SET. It is left to later passes
5594 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5596 Returns nonzero if a change is made. */
5607 for (i = 0; i < expr_hash_table.size; i++)
5608 for (expr = expr_hash_table.table[i];
5610 expr = expr->next_same_hash)
5612 int indx = expr->bitmap_index;
5614 /* We only need to search antic_occr since we require
5617 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
5619 rtx insn = occr->insn;
5621 basic_block bb = BLOCK_FOR_INSN (insn);
5623 /* We only delete insns that have a single_set. */
5624 if (TEST_BIT (pre_delete_map[bb->index], indx)
5625 && (set = single_set (insn)) != 0)
5627 /* Create a pseudo-reg to store the result of reaching
5628 expressions into. Get the mode for the new pseudo from
5629 the mode of the original destination pseudo. */
5630 if (expr->reaching_reg == NULL)
5632 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5634 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
5636 occr->deleted_p = 1;
5637 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
5644 "PRE: redundant insn %d (expression %d) in ",
5645 INSN_UID (insn), indx);
5646 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
5647 bb->index, REGNO (expr->reaching_reg));
5656 /* Perform GCSE optimizations using PRE.
5657 This is called by one_pre_gcse_pass after all the dataflow analysis
5660 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5661 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5662 Compiler Design and Implementation.
5664 ??? A new pseudo reg is created to hold the reaching expression. The nice
5665 thing about the classical approach is that it would try to use an existing
5666 reg. If the register can't be adequately optimized [i.e. we introduce
5667 reload problems], one could add a pass here to propagate the new register
5670 ??? We don't handle single sets in PARALLELs because we're [currently] not
5671 able to copy the rest of the parallel when we insert copies to create full
5672 redundancies from partial redundancies. However, there's no reason why we
5673 can't handle PARALLELs in the cases where there are no partial
5680 int did_insert, changed;
5681 struct expr **index_map;
5684 /* Compute a mapping from expression number (`bitmap_index') to
5685 hash table entry. */
5687 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
5688 for (i = 0; i < expr_hash_table.size; i++)
5689 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
5690 index_map[expr->bitmap_index] = expr;
5692 /* Reset bitmap used to track which insns are redundant. */
5693 pre_redundant_insns = sbitmap_alloc (max_cuid);
5694 sbitmap_zero (pre_redundant_insns);
5696 /* Delete the redundant insns first so that
5697 - we know what register to use for the new insns and for the other
5698 ones with reaching expressions
5699 - we know which insns are redundant when we go to create copies */
5701 changed = pre_delete ();
5703 did_insert = pre_edge_insert (edge_list, index_map);
5705 /* In other places with reaching expressions, copy the expression to the
5706 specially allocated pseudo-reg that reaches the redundant expr. */
5707 pre_insert_copies ();
5710 commit_edge_insertions ();
5715 sbitmap_free (pre_redundant_insns);
5719 /* Top level routine to perform one PRE GCSE pass.
5721 Return nonzero if a change was made. */
5724 one_pre_gcse_pass (int pass)
5728 gcse_subst_count = 0;
5729 gcse_create_count = 0;
5731 alloc_hash_table (max_cuid, &expr_hash_table, 0);
5732 add_noreturn_fake_exit_edges ();
5734 compute_ld_motion_mems ();
5736 compute_hash_table (&expr_hash_table);
5737 trim_ld_motion_mems ();
5739 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
5741 if (expr_hash_table.n_elems > 0)
5743 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
5744 compute_pre_data ();
5745 changed |= pre_gcse ();
5746 free_edge_list (edge_list);
5751 remove_fake_edges ();
5752 free_hash_table (&expr_hash_table);
5756 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5757 current_function_name, pass, bytes_used);
5758 fprintf (gcse_file, "%d substs, %d insns created\n",
5759 gcse_subst_count, gcse_create_count);
5765 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5766 If notes are added to an insn which references a CODE_LABEL, the
5767 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5768 because the following loop optimization pass requires them. */
5770 /* ??? This is very similar to the loop.c add_label_notes function. We
5771 could probably share code here. */
5773 /* ??? If there was a jump optimization pass after gcse and before loop,
5774 then we would not need to do this here, because jump would add the
5775 necessary REG_LABEL notes. */
5778 add_label_notes (rtx x, rtx insn)
5780 enum rtx_code code = GET_CODE (x);
5784 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
5786 /* This code used to ignore labels that referred to dispatch tables to
5787 avoid flow generating (slightly) worse code.
5789 We no longer ignore such label references (see LABEL_REF handling in
5790 mark_jump_label for additional information). */
5792 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
5794 if (LABEL_P (XEXP (x, 0)))
5795 LABEL_NUSES (XEXP (x, 0))++;
5799 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
5802 add_label_notes (XEXP (x, i), insn);
5803 else if (fmt[i] == 'E')
5804 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5805 add_label_notes (XVECEXP (x, i, j), insn);
5809 /* Compute transparent outgoing information for each block.
5811 An expression is transparent to an edge unless it is killed by
5812 the edge itself. This can only happen with abnormal control flow,
5813 when the edge is traversed through a call. This happens with
5814 non-local labels and exceptions.
5816 This would not be necessary if we split the edge. While this is
5817 normally impossible for abnormal critical edges, with some effort
5818 it should be possible with exception handling, since we still have
5819 control over which handler should be invoked. But due to increased
5820 EH table sizes, this may not be worthwhile. */
5823 compute_transpout (void)
5829 sbitmap_vector_ones (transpout, last_basic_block);
5833 /* Note that flow inserted a nop a the end of basic blocks that
5834 end in call instructions for reasons other than abnormal
5836 if (GET_CODE (bb->end) != CALL_INSN)
5839 for (i = 0; i < expr_hash_table.size; i++)
5840 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
5841 if (GET_CODE (expr->expr) == MEM)
5843 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
5844 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
5847 /* ??? Optimally, we would use interprocedural alias
5848 analysis to determine if this mem is actually killed
5850 RESET_BIT (transpout[bb->index], expr->bitmap_index);
5855 /* Removal of useless null pointer checks */
5857 /* Called via note_stores. X is set by SETTER. If X is a register we must
5858 invalidate nonnull_local and set nonnull_killed. DATA is really a
5859 `null_pointer_info *'.
5861 We ignore hard registers. */
5864 invalidate_nonnull_info (rtx x, rtx setter ATTRIBUTE_UNUSED, void *data)
5867 struct null_pointer_info *npi = (struct null_pointer_info *) data;
5869 while (GET_CODE (x) == SUBREG)
5872 /* Ignore anything that is not a register or is a hard register. */
5873 if (GET_CODE (x) != REG
5874 || REGNO (x) < npi->min_reg
5875 || REGNO (x) >= npi->max_reg)
5878 regno = REGNO (x) - npi->min_reg;
5880 RESET_BIT (npi->nonnull_local[npi->current_block->index], regno);
5881 SET_BIT (npi->nonnull_killed[npi->current_block->index], regno);
5884 /* Do null-pointer check elimination for the registers indicated in
5885 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5886 they are not our responsibility to free. */
5889 delete_null_pointer_checks_1 (unsigned int *block_reg, sbitmap *nonnull_avin,
5890 sbitmap *nonnull_avout,
5891 struct null_pointer_info *npi)
5893 basic_block bb, current_block;
5894 sbitmap *nonnull_local = npi->nonnull_local;
5895 sbitmap *nonnull_killed = npi->nonnull_killed;
5896 int something_changed = 0;
5898 /* Compute local properties, nonnull and killed. A register will have
5899 the nonnull property if at the end of the current block its value is
5900 known to be nonnull. The killed property indicates that somewhere in
5901 the block any information we had about the register is killed.
5903 Note that a register can have both properties in a single block. That
5904 indicates that it's killed, then later in the block a new value is
5906 sbitmap_vector_zero (nonnull_local, last_basic_block);
5907 sbitmap_vector_zero (nonnull_killed, last_basic_block);
5909 FOR_EACH_BB (current_block)
5911 rtx insn, stop_insn;
5913 /* Set the current block for invalidate_nonnull_info. */
5914 npi->current_block = current_block;
5916 /* Scan each insn in the basic block looking for memory references and
5918 stop_insn = NEXT_INSN (current_block->end);
5919 for (insn = current_block->head;
5921 insn = NEXT_INSN (insn))
5926 /* Ignore anything that is not a normal insn. */
5927 if (! INSN_P (insn))
5930 /* Basically ignore anything that is not a simple SET. We do have
5931 to make sure to invalidate nonnull_local and set nonnull_killed
5932 for such insns though. */
5933 set = single_set (insn);
5936 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5940 /* See if we've got a usable memory load. We handle it first
5941 in case it uses its address register as a dest (which kills
5942 the nonnull property). */
5943 if (GET_CODE (SET_SRC (set)) == MEM
5944 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5945 && REGNO (reg) >= npi->min_reg
5946 && REGNO (reg) < npi->max_reg)
5947 SET_BIT (nonnull_local[current_block->index],
5948 REGNO (reg) - npi->min_reg);
5950 /* Now invalidate stuff clobbered by this insn. */
5951 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5953 /* And handle stores, we do these last since any sets in INSN can
5954 not kill the nonnull property if it is derived from a MEM
5955 appearing in a SET_DEST. */
5956 if (GET_CODE (SET_DEST (set)) == MEM
5957 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5958 && REGNO (reg) >= npi->min_reg
5959 && REGNO (reg) < npi->max_reg)
5960 SET_BIT (nonnull_local[current_block->index],
5961 REGNO (reg) - npi->min_reg);
5965 /* Now compute global properties based on the local properties. This
5966 is a classic global availability algorithm. */
5967 compute_available (nonnull_local, nonnull_killed,
5968 nonnull_avout, nonnull_avin);
5970 /* Now look at each bb and see if it ends with a compare of a value
5974 rtx last_insn = bb->end;
5975 rtx condition, earliest;
5976 int compare_and_branch;
5978 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5979 since BLOCK_REG[BB] is zero if this block did not end with a
5980 comparison against zero, this condition works. */
5981 if (block_reg[bb->index] < npi->min_reg
5982 || block_reg[bb->index] >= npi->max_reg)
5985 /* LAST_INSN is a conditional jump. Get its condition. */
5986 condition = get_condition (last_insn, &earliest, false);
5988 /* If we can't determine the condition then skip. */
5992 /* Is the register known to have a nonzero value? */
5993 if (!TEST_BIT (nonnull_avout[bb->index], block_reg[bb->index] - npi->min_reg))
5996 /* Try to compute whether the compare/branch at the loop end is one or
5997 two instructions. */
5998 if (earliest == last_insn)
5999 compare_and_branch = 1;
6000 else if (earliest == prev_nonnote_insn (last_insn))
6001 compare_and_branch = 2;
6005 /* We know the register in this comparison is nonnull at exit from
6006 this block. We can optimize this comparison. */
6007 if (GET_CODE (condition) == NE)
6011 new_jump = emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn)),
6013 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
6014 LABEL_NUSES (JUMP_LABEL (new_jump))++;
6015 emit_barrier_after (new_jump);
6018 something_changed = 1;
6019 delete_insn (last_insn);
6020 if (compare_and_branch == 2)
6021 delete_insn (earliest);
6022 purge_dead_edges (bb);
6024 /* Don't check this block again. (Note that BLOCK_END is
6025 invalid here; we deleted the last instruction in the
6027 block_reg[bb->index] = 0;
6030 return something_changed;
6033 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
6036 This is conceptually similar to global constant/copy propagation and
6037 classic global CSE (it even uses the same dataflow equations as cprop).
6039 If a register is used as memory address with the form (mem (reg)), then we
6040 know that REG can not be zero at that point in the program. Any instruction
6041 which sets REG "kills" this property.
6043 So, if every path leading to a conditional branch has an available memory
6044 reference of that form, then we know the register can not have the value
6045 zero at the conditional branch.
6047 So we merely need to compute the local properties and propagate that data
6048 around the cfg, then optimize where possible.
6050 We run this pass two times. Once before CSE, then again after CSE. This
6051 has proven to be the most profitable approach. It is rare for new
6052 optimization opportunities of this nature to appear after the first CSE
6055 This could probably be integrated with global cprop with a little work. */
6058 delete_null_pointer_checks (rtx f ATTRIBUTE_UNUSED)
6060 sbitmap *nonnull_avin, *nonnull_avout;
6061 unsigned int *block_reg;
6065 int max_reg = max_reg_num ();
6066 struct null_pointer_info npi;
6067 int something_changed = 0;
6069 /* If we have only a single block, or it is too expensive, give up. */
6070 if (n_basic_blocks <= 1
6071 || is_too_expensive (_ ("NULL pointer checks disabled")))
6074 /* We need four bitmaps, each with a bit for each register in each
6076 regs_per_pass = get_bitmap_width (4, last_basic_block, max_reg);
6078 /* Allocate bitmaps to hold local and global properties. */
6079 npi.nonnull_local = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6080 npi.nonnull_killed = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6081 nonnull_avin = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6082 nonnull_avout = sbitmap_vector_alloc (last_basic_block, regs_per_pass);
6084 /* Go through the basic blocks, seeing whether or not each block
6085 ends with a conditional branch whose condition is a comparison
6086 against zero. Record the register compared in BLOCK_REG. */
6087 block_reg = xcalloc (last_basic_block, sizeof (int));
6090 rtx last_insn = bb->end;
6091 rtx condition, earliest, reg;
6093 /* We only want conditional branches. */
6094 if (GET_CODE (last_insn) != JUMP_INSN
6095 || !any_condjump_p (last_insn)
6096 || !onlyjump_p (last_insn))
6099 /* LAST_INSN is a conditional jump. Get its condition. */
6100 condition = get_condition (last_insn, &earliest, false);
6102 /* If we were unable to get the condition, or it is not an equality
6103 comparison against zero then there's nothing we can do. */
6105 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
6106 || GET_CODE (XEXP (condition, 1)) != CONST_INT
6107 || (XEXP (condition, 1)
6108 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
6111 /* We must be checking a register against zero. */
6112 reg = XEXP (condition, 0);
6113 if (GET_CODE (reg) != REG)
6116 block_reg[bb->index] = REGNO (reg);
6119 /* Go through the algorithm for each block of registers. */
6120 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
6123 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
6124 something_changed |= delete_null_pointer_checks_1 (block_reg,
6130 /* Free the table of registers compared at the end of every block. */
6134 sbitmap_vector_free (npi.nonnull_local);
6135 sbitmap_vector_free (npi.nonnull_killed);
6136 sbitmap_vector_free (nonnull_avin);
6137 sbitmap_vector_free (nonnull_avout);
6139 return something_changed;
6142 /* Code Hoisting variables and subroutines. */
6144 /* Very busy expressions. */
6145 static sbitmap *hoist_vbein;
6146 static sbitmap *hoist_vbeout;
6148 /* Hoistable expressions. */
6149 static sbitmap *hoist_exprs;
6151 /* Dominator bitmaps. */
6152 dominance_info dominators;
6154 /* ??? We could compute post dominators and run this algorithm in
6155 reverse to perform tail merging, doing so would probably be
6156 more effective than the tail merging code in jump.c.
6158 It's unclear if tail merging could be run in parallel with
6159 code hoisting. It would be nice. */
6161 /* Allocate vars used for code hoisting analysis. */
6164 alloc_code_hoist_mem (int n_blocks, int n_exprs)
6166 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
6167 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
6168 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
6170 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
6171 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
6172 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
6173 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
6176 /* Free vars used for code hoisting analysis. */
6179 free_code_hoist_mem (void)
6181 sbitmap_vector_free (antloc);
6182 sbitmap_vector_free (transp);
6183 sbitmap_vector_free (comp);
6185 sbitmap_vector_free (hoist_vbein);
6186 sbitmap_vector_free (hoist_vbeout);
6187 sbitmap_vector_free (hoist_exprs);
6188 sbitmap_vector_free (transpout);
6190 free_dominance_info (dominators);
6193 /* Compute the very busy expressions at entry/exit from each block.
6195 An expression is very busy if all paths from a given point
6196 compute the expression. */
6199 compute_code_hoist_vbeinout (void)
6201 int changed, passes;
6204 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
6205 sbitmap_vector_zero (hoist_vbein, last_basic_block);
6214 /* We scan the blocks in the reverse order to speed up
6216 FOR_EACH_BB_REVERSE (bb)
6218 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
6219 hoist_vbeout[bb->index], transp[bb->index]);
6220 if (bb->next_bb != EXIT_BLOCK_PTR)
6221 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
6228 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
6231 /* Top level routine to do the dataflow analysis needed by code hoisting. */
6234 compute_code_hoist_data (void)
6236 compute_local_properties (transp, comp, antloc, &expr_hash_table);
6237 compute_transpout ();
6238 compute_code_hoist_vbeinout ();
6239 dominators = calculate_dominance_info (CDI_DOMINATORS);
6241 fprintf (gcse_file, "\n");
6244 /* Determine if the expression identified by EXPR_INDEX would
6245 reach BB unimpared if it was placed at the end of EXPR_BB.
6247 It's unclear exactly what Muchnick meant by "unimpared". It seems
6248 to me that the expression must either be computed or transparent in
6249 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6250 would allow the expression to be hoisted out of loops, even if
6251 the expression wasn't a loop invariant.
6253 Contrast this to reachability for PRE where an expression is
6254 considered reachable if *any* path reaches instead of *all*
6258 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
6261 int visited_allocated_locally = 0;
6264 if (visited == NULL)
6266 visited_allocated_locally = 1;
6267 visited = xcalloc (last_basic_block, 1);
6270 for (pred = bb->pred; pred != NULL; pred = pred->pred_next)
6272 basic_block pred_bb = pred->src;
6274 if (pred->src == ENTRY_BLOCK_PTR)
6276 else if (pred_bb == expr_bb)
6278 else if (visited[pred_bb->index])
6281 /* Does this predecessor generate this expression? */
6282 else if (TEST_BIT (comp[pred_bb->index], expr_index))
6284 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
6290 visited[pred_bb->index] = 1;
6291 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
6296 if (visited_allocated_locally)
6299 return (pred == NULL);
6302 /* Actually perform code hoisting. */
6307 basic_block bb, dominated;
6309 unsigned int domby_len;
6311 struct expr **index_map;
6314 sbitmap_vector_zero (hoist_exprs, last_basic_block);
6316 /* Compute a mapping from expression number (`bitmap_index') to
6317 hash table entry. */
6319 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
6320 for (i = 0; i < expr_hash_table.size; i++)
6321 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
6322 index_map[expr->bitmap_index] = expr;
6324 /* Walk over each basic block looking for potentially hoistable
6325 expressions, nothing gets hoisted from the entry block. */
6329 int insn_inserted_p;
6331 domby_len = get_dominated_by (dominators, bb, &domby);
6332 /* Examine each expression that is very busy at the exit of this
6333 block. These are the potentially hoistable expressions. */
6334 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
6338 if (TEST_BIT (hoist_vbeout[bb->index], i)
6339 && TEST_BIT (transpout[bb->index], i))
6341 /* We've found a potentially hoistable expression, now
6342 we look at every block BB dominates to see if it
6343 computes the expression. */
6344 for (j = 0; j < domby_len; j++)
6346 dominated = domby[j];
6347 /* Ignore self dominance. */
6348 if (bb == dominated)
6350 /* We've found a dominated block, now see if it computes
6351 the busy expression and whether or not moving that
6352 expression to the "beginning" of that block is safe. */
6353 if (!TEST_BIT (antloc[dominated->index], i))
6356 /* Note if the expression would reach the dominated block
6357 unimpared if it was placed at the end of BB.
6359 Keep track of how many times this expression is hoistable
6360 from a dominated block into BB. */
6361 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6365 /* If we found more than one hoistable occurrence of this
6366 expression, then note it in the bitmap of expressions to
6367 hoist. It makes no sense to hoist things which are computed
6368 in only one BB, and doing so tends to pessimize register
6369 allocation. One could increase this value to try harder
6370 to avoid any possible code expansion due to register
6371 allocation issues; however experiments have shown that
6372 the vast majority of hoistable expressions are only movable
6373 from two successors, so raising this threshold is likely
6374 to nullify any benefit we get from code hoisting. */
6377 SET_BIT (hoist_exprs[bb->index], i);
6382 /* If we found nothing to hoist, then quit now. */
6389 /* Loop over all the hoistable expressions. */
6390 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
6392 /* We want to insert the expression into BB only once, so
6393 note when we've inserted it. */
6394 insn_inserted_p = 0;
6396 /* These tests should be the same as the tests above. */
6397 if (TEST_BIT (hoist_vbeout[bb->index], i))
6399 /* We've found a potentially hoistable expression, now
6400 we look at every block BB dominates to see if it
6401 computes the expression. */
6402 for (j = 0; j < domby_len; j++)
6404 dominated = domby[j];
6405 /* Ignore self dominance. */
6406 if (bb == dominated)
6409 /* We've found a dominated block, now see if it computes
6410 the busy expression and whether or not moving that
6411 expression to the "beginning" of that block is safe. */
6412 if (!TEST_BIT (antloc[dominated->index], i))
6415 /* The expression is computed in the dominated block and
6416 it would be safe to compute it at the start of the
6417 dominated block. Now we have to determine if the
6418 expression would reach the dominated block if it was
6419 placed at the end of BB. */
6420 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
6422 struct expr *expr = index_map[i];
6423 struct occr *occr = expr->antic_occr;
6427 /* Find the right occurrence of this expression. */
6428 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
6431 /* Should never happen. */
6437 set = single_set (insn);
6441 /* Create a pseudo-reg to store the result of reaching
6442 expressions into. Get the mode for the new pseudo
6443 from the mode of the original destination pseudo. */
6444 if (expr->reaching_reg == NULL)
6446 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
6448 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
6450 occr->deleted_p = 1;
6451 if (!insn_inserted_p)
6453 insert_insn_end_bb (index_map[i], bb, 0);
6454 insn_inserted_p = 1;
6466 /* Top level routine to perform one code hoisting (aka unification) pass
6468 Return nonzero if a change was made. */
6471 one_code_hoisting_pass (void)
6475 alloc_hash_table (max_cuid, &expr_hash_table, 0);
6476 compute_hash_table (&expr_hash_table);
6478 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
6480 if (expr_hash_table.n_elems > 0)
6482 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
6483 compute_code_hoist_data ();
6485 free_code_hoist_mem ();
6488 free_hash_table (&expr_hash_table);
6493 /* Here we provide the things required to do store motion towards
6494 the exit. In order for this to be effective, gcse also needed to
6495 be taught how to move a load when it is kill only by a store to itself.
6500 void foo(float scale)
6502 for (i=0; i<10; i++)
6506 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6507 the load out since its live around the loop, and stored at the bottom
6510 The 'Load Motion' referred to and implemented in this file is
6511 an enhancement to gcse which when using edge based lcm, recognizes
6512 this situation and allows gcse to move the load out of the loop.
6514 Once gcse has hoisted the load, store motion can then push this
6515 load towards the exit, and we end up with no loads or stores of 'i'
6518 /* This will search the ldst list for a matching expression. If it
6519 doesn't find one, we create one and initialize it. */
6521 static struct ls_expr *
6524 int do_not_record_p = 0;
6525 struct ls_expr * ptr;
6528 hash = hash_expr_1 (x, GET_MODE (x), & do_not_record_p);
6530 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6531 if (ptr->hash_index == hash && expr_equiv_p (ptr->pattern, x))
6534 ptr = xmalloc (sizeof (struct ls_expr));
6536 ptr->next = pre_ldst_mems;
6539 ptr->pattern_regs = NULL_RTX;
6540 ptr->loads = NULL_RTX;
6541 ptr->stores = NULL_RTX;
6542 ptr->reaching_reg = NULL_RTX;
6545 ptr->hash_index = hash;
6546 pre_ldst_mems = ptr;
6551 /* Free up an individual ldst entry. */
6554 free_ldst_entry (struct ls_expr * ptr)
6556 free_INSN_LIST_list (& ptr->loads);
6557 free_INSN_LIST_list (& ptr->stores);
6562 /* Free up all memory associated with the ldst list. */
6565 free_ldst_mems (void)
6567 while (pre_ldst_mems)
6569 struct ls_expr * tmp = pre_ldst_mems;
6571 pre_ldst_mems = pre_ldst_mems->next;
6573 free_ldst_entry (tmp);
6576 pre_ldst_mems = NULL;
6579 /* Dump debugging info about the ldst list. */
6582 print_ldst_list (FILE * file)
6584 struct ls_expr * ptr;
6586 fprintf (file, "LDST list: \n");
6588 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
6590 fprintf (file, " Pattern (%3d): ", ptr->index);
6592 print_rtl (file, ptr->pattern);
6594 fprintf (file, "\n Loads : ");
6597 print_rtl (file, ptr->loads);
6599 fprintf (file, "(nil)");
6601 fprintf (file, "\n Stores : ");
6604 print_rtl (file, ptr->stores);
6606 fprintf (file, "(nil)");
6608 fprintf (file, "\n\n");
6611 fprintf (file, "\n");
6614 /* Returns 1 if X is in the list of ldst only expressions. */
6616 static struct ls_expr *
6617 find_rtx_in_ldst (rtx x)
6619 struct ls_expr * ptr;
6621 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6622 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
6628 /* Assign each element of the list of mems a monotonically increasing value. */
6631 enumerate_ldsts (void)
6633 struct ls_expr * ptr;
6636 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
6642 /* Return first item in the list. */
6644 static inline struct ls_expr *
6645 first_ls_expr (void)
6647 return pre_ldst_mems;
6650 /* Return the next item in the list after the specified one. */
6652 static inline struct ls_expr *
6653 next_ls_expr (struct ls_expr * ptr)
6658 /* Load Motion for loads which only kill themselves. */
6660 /* Return true if x is a simple MEM operation, with no registers or
6661 side effects. These are the types of loads we consider for the
6662 ld_motion list, otherwise we let the usual aliasing take care of it. */
6667 if (GET_CODE (x) != MEM)
6670 if (MEM_VOLATILE_P (x))
6673 if (GET_MODE (x) == BLKmode)
6676 /* If we are handling exceptions, we must be careful with memory references
6677 that may trap. If we are not, the behavior is undefined, so we may just
6679 if (flag_non_call_exceptions && may_trap_p (x))
6682 if (side_effects_p (x))
6685 /* Do not consider function arguments passed on stack. */
6686 if (reg_mentioned_p (stack_pointer_rtx, x))
6689 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
6695 /* Make sure there isn't a buried reference in this pattern anywhere.
6696 If there is, invalidate the entry for it since we're not capable
6697 of fixing it up just yet.. We have to be sure we know about ALL
6698 loads since the aliasing code will allow all entries in the
6699 ld_motion list to not-alias itself. If we miss a load, we will get
6700 the wrong value since gcse might common it and we won't know to
6704 invalidate_any_buried_refs (rtx x)
6708 struct ls_expr * ptr;
6710 /* Invalidate it in the list. */
6711 if (GET_CODE (x) == MEM && simple_mem (x))
6713 ptr = ldst_entry (x);
6717 /* Recursively process the insn. */
6718 fmt = GET_RTX_FORMAT (GET_CODE (x));
6720 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6723 invalidate_any_buried_refs (XEXP (x, i));
6724 else if (fmt[i] == 'E')
6725 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6726 invalidate_any_buried_refs (XVECEXP (x, i, j));
6730 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6731 being defined as MEM loads and stores to symbols, with no side effects
6732 and no registers in the expression. For a MEM destination, we also
6733 check that the insn is still valid if we replace the destination with a
6734 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6735 which don't match this criteria, they are invalidated and trimmed out
6739 compute_ld_motion_mems (void)
6741 struct ls_expr * ptr;
6745 pre_ldst_mems = NULL;
6749 for (insn = bb->head;
6750 insn && insn != NEXT_INSN (bb->end);
6751 insn = NEXT_INSN (insn))
6755 if (GET_CODE (PATTERN (insn)) == SET)
6757 rtx src = SET_SRC (PATTERN (insn));
6758 rtx dest = SET_DEST (PATTERN (insn));
6760 /* Check for a simple LOAD... */
6761 if (GET_CODE (src) == MEM && simple_mem (src))
6763 ptr = ldst_entry (src);
6764 if (GET_CODE (dest) == REG)
6765 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
6771 /* Make sure there isn't a buried load somewhere. */
6772 invalidate_any_buried_refs (src);
6775 /* Check for stores. Don't worry about aliased ones, they
6776 will block any movement we might do later. We only care
6777 about this exact pattern since those are the only
6778 circumstance that we will ignore the aliasing info. */
6779 if (GET_CODE (dest) == MEM && simple_mem (dest))
6781 ptr = ldst_entry (dest);
6783 if (GET_CODE (src) != MEM
6784 && GET_CODE (src) != ASM_OPERANDS
6785 /* Check for REG manually since want_to_gcse_p
6786 returns 0 for all REGs. */
6787 && (REG_P (src) || want_to_gcse_p (src)))
6788 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
6794 invalidate_any_buried_refs (PATTERN (insn));
6800 /* Remove any references that have been either invalidated or are not in the
6801 expression list for pre gcse. */
6804 trim_ld_motion_mems (void)
6806 struct ls_expr * * last = & pre_ldst_mems;
6807 struct ls_expr * ptr = pre_ldst_mems;
6813 /* Delete if entry has been made invalid. */
6816 /* Delete if we cannot find this mem in the expression list. */
6817 unsigned int hash = ptr->hash_index % expr_hash_table.size;
6819 for (expr = expr_hash_table.table[hash];
6821 expr = expr->next_same_hash)
6822 if (expr_equiv_p (expr->expr, ptr->pattern))
6826 expr = (struct expr *) 0;
6830 /* Set the expression field if we are keeping it. */
6838 free_ldst_entry (ptr);
6843 /* Show the world what we've found. */
6844 if (gcse_file && pre_ldst_mems != NULL)
6845 print_ldst_list (gcse_file);
6848 /* This routine will take an expression which we are replacing with
6849 a reaching register, and update any stores that are needed if
6850 that expression is in the ld_motion list. Stores are updated by
6851 copying their SRC to the reaching register, and then storing
6852 the reaching register into the store location. These keeps the
6853 correct value in the reaching register for the loads. */
6856 update_ld_motion_stores (struct expr * expr)
6858 struct ls_expr * mem_ptr;
6860 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
6862 /* We can try to find just the REACHED stores, but is shouldn't
6863 matter to set the reaching reg everywhere... some might be
6864 dead and should be eliminated later. */
6866 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6867 where reg is the reaching reg used in the load. We checked in
6868 compute_ld_motion_mems that we can replace (set mem expr) with
6869 (set reg expr) in that insn. */
6870 rtx list = mem_ptr->stores;
6872 for ( ; list != NULL_RTX; list = XEXP (list, 1))
6874 rtx insn = XEXP (list, 0);
6875 rtx pat = PATTERN (insn);
6876 rtx src = SET_SRC (pat);
6877 rtx reg = expr->reaching_reg;
6880 /* If we've already copied it, continue. */
6881 if (expr->reaching_reg == src)
6886 fprintf (gcse_file, "PRE: store updated with reaching reg ");
6887 print_rtl (gcse_file, expr->reaching_reg);
6888 fprintf (gcse_file, ":\n ");
6889 print_inline_rtx (gcse_file, insn, 8);
6890 fprintf (gcse_file, "\n");
6893 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
6894 new = emit_insn_before (copy, insn);
6895 record_one_set (REGNO (reg), new);
6896 SET_SRC (pat) = reg;
6898 /* un-recognize this pattern since it's probably different now. */
6899 INSN_CODE (insn) = -1;
6900 gcse_create_count++;
6905 /* Store motion code. */
6907 #define ANTIC_STORE_LIST(x) ((x)->loads)
6908 #define AVAIL_STORE_LIST(x) ((x)->stores)
6909 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
6911 /* This is used to communicate the target bitvector we want to use in the
6912 reg_set_info routine when called via the note_stores mechanism. */
6913 static int * regvec;
6915 /* And current insn, for the same routine. */
6916 static rtx compute_store_table_current_insn;
6918 /* Used in computing the reverse edge graph bit vectors. */
6919 static sbitmap * st_antloc;
6921 /* Global holding the number of store expressions we are dealing with. */
6922 static int num_stores;
6924 /* Checks to set if we need to mark a register set. Called from note_stores. */
6927 reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
6928 void *data ATTRIBUTE_UNUSED)
6930 if (GET_CODE (dest) == SUBREG)
6931 dest = SUBREG_REG (dest);
6933 if (GET_CODE (dest) == REG)
6934 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
6937 /* Return zero if some of the registers in list X are killed
6938 due to set of registers in bitmap REGS_SET. */
6941 store_ops_ok (rtx x, int *regs_set)
6945 for (; x; x = XEXP (x, 1))
6948 if (regs_set[REGNO(reg)])
6955 /* Returns a list of registers mentioned in X. */
6957 extract_mentioned_regs (rtx x)
6959 return extract_mentioned_regs_helper (x, NULL_RTX);
6962 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
6965 extract_mentioned_regs_helper (rtx x, rtx accum)
6971 /* Repeat is used to turn tail-recursion into iteration. */
6977 code = GET_CODE (x);
6981 return alloc_EXPR_LIST (0, x, accum);
6991 /* We do not run this function with arguments having side effects. */
7010 i = GET_RTX_LENGTH (code) - 1;
7011 fmt = GET_RTX_FORMAT (code);
7017 rtx tem = XEXP (x, i);
7019 /* If we are about to do the last recursive call
7020 needed at this level, change it into iteration. */
7027 accum = extract_mentioned_regs_helper (tem, accum);
7029 else if (fmt[i] == 'E')
7033 for (j = 0; j < XVECLEN (x, i); j++)
7034 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
7041 /* Determine whether INSN is MEM store pattern that we will consider moving.
7042 REGS_SET_BEFORE is bitmap of registers set before (and including) the
7043 current insn, REGS_SET_AFTER is bitmap of registers set after (and
7044 including) the insn in this basic block. We must be passing through BB from
7045 head to end, as we are using this fact to speed things up.
7047 The results are stored this way:
7049 -- the first anticipatable expression is added into ANTIC_STORE_LIST
7050 -- if the processed expression is not anticipatable, NULL_RTX is added
7051 there instead, so that we can use it as indicator that no further
7052 expression of this type may be anticipatable
7053 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
7054 consequently, all of them but this head are dead and may be deleted.
7055 -- if the expression is not available, the insn due to that it fails to be
7056 available is stored in reaching_reg.
7058 The things are complicated a bit by fact that there already may be stores
7059 to the same MEM from other blocks; also caller must take care of the
7060 necessary cleanup of the temporary markers after end of the basic block.
7064 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
7066 struct ls_expr * ptr;
7068 int check_anticipatable, check_available;
7069 basic_block bb = BLOCK_FOR_INSN (insn);
7071 set = single_set (insn);
7075 dest = SET_DEST (set);
7077 if (GET_CODE (dest) != MEM || MEM_VOLATILE_P (dest)
7078 || GET_MODE (dest) == BLKmode)
7081 if (side_effects_p (dest))
7084 /* If we are handling exceptions, we must be careful with memory references
7085 that may trap. If we are not, the behavior is undefined, so we may just
7087 if (flag_non_call_exceptions && may_trap_p (dest))
7090 ptr = ldst_entry (dest);
7091 if (!ptr->pattern_regs)
7092 ptr->pattern_regs = extract_mentioned_regs (dest);
7094 /* Do not check for anticipatability if we either found one anticipatable
7095 store already, or tested for one and found out that it was killed. */
7096 check_anticipatable = 0;
7097 if (!ANTIC_STORE_LIST (ptr))
7098 check_anticipatable = 1;
7101 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
7103 && BLOCK_FOR_INSN (tmp) != bb)
7104 check_anticipatable = 1;
7106 if (check_anticipatable)
7108 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
7112 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
7113 ANTIC_STORE_LIST (ptr));
7116 /* It is not necessary to check whether store is available if we did
7117 it successfully before; if we failed before, do not bother to check
7118 until we reach the insn that caused us to fail. */
7119 check_available = 0;
7120 if (!AVAIL_STORE_LIST (ptr))
7121 check_available = 1;
7124 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
7125 if (BLOCK_FOR_INSN (tmp) != bb)
7126 check_available = 1;
7128 if (check_available)
7130 /* Check that we have already reached the insn at that the check
7131 failed last time. */
7132 if (LAST_AVAIL_CHECK_FAILURE (ptr))
7135 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
7136 tmp = PREV_INSN (tmp))
7139 check_available = 0;
7142 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
7144 &LAST_AVAIL_CHECK_FAILURE (ptr));
7146 if (!check_available)
7147 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
7150 /* Find available and anticipatable stores. */
7153 compute_store_table (void)
7159 int *last_set_in, *already_set;
7160 struct ls_expr * ptr, **prev_next_ptr_ptr;
7162 max_gcse_regno = max_reg_num ();
7164 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
7166 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
7168 last_set_in = xmalloc (sizeof (int) * max_gcse_regno);
7169 already_set = xmalloc (sizeof (int) * max_gcse_regno);
7171 /* Find all the stores we care about. */
7174 /* First compute the registers set in this block. */
7175 memset (last_set_in, 0, sizeof (int) * max_gcse_regno);
7176 regvec = last_set_in;
7178 for (insn = bb->head;
7179 insn != NEXT_INSN (bb->end);
7180 insn = NEXT_INSN (insn))
7182 if (! INSN_P (insn))
7185 if (GET_CODE (insn) == CALL_INSN)
7187 bool clobbers_all = false;
7188 #ifdef NON_SAVING_SETJMP
7189 if (NON_SAVING_SETJMP
7190 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7191 clobbers_all = true;
7194 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7196 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7197 last_set_in[regno] = INSN_UID (insn);
7200 pat = PATTERN (insn);
7201 compute_store_table_current_insn = insn;
7202 note_stores (pat, reg_set_info, NULL);
7205 /* Record the set registers. */
7206 for (regno = 0; regno < max_gcse_regno; regno++)
7207 if (last_set_in[regno])
7208 SET_BIT (reg_set_in_block[bb->index], regno);
7210 /* Now find the stores. */
7211 memset (already_set, 0, sizeof (int) * max_gcse_regno);
7212 regvec = already_set;
7213 for (insn = bb->head;
7214 insn != NEXT_INSN (bb->end);
7215 insn = NEXT_INSN (insn))
7217 if (! INSN_P (insn))
7220 if (GET_CODE (insn) == CALL_INSN)
7222 bool clobbers_all = false;
7223 #ifdef NON_SAVING_SETJMP
7224 if (NON_SAVING_SETJMP
7225 && find_reg_note (insn, REG_SETJMP, NULL_RTX))
7226 clobbers_all = true;
7229 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7231 || TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
7232 already_set[regno] = 1;
7235 pat = PATTERN (insn);
7236 note_stores (pat, reg_set_info, NULL);
7238 /* Now that we've marked regs, look for stores. */
7239 find_moveable_store (insn, already_set, last_set_in);
7241 /* Unmark regs that are no longer set. */
7242 for (regno = 0; regno < max_gcse_regno; regno++)
7243 if (last_set_in[regno] == INSN_UID (insn))
7244 last_set_in[regno] = 0;
7247 /* Clear temporary marks. */
7248 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7250 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
7251 if (ANTIC_STORE_LIST (ptr)
7252 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
7253 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
7257 /* Remove the stores that are not available anywhere, as there will
7258 be no opportunity to optimize them. */
7259 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
7261 ptr = *prev_next_ptr_ptr)
7263 if (!AVAIL_STORE_LIST (ptr))
7265 *prev_next_ptr_ptr = ptr->next;
7266 free_ldst_entry (ptr);
7269 prev_next_ptr_ptr = &ptr->next;
7272 ret = enumerate_ldsts ();
7276 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
7277 print_ldst_list (gcse_file);
7285 /* Check to see if the load X is aliased with STORE_PATTERN.
7286 AFTER is true if we are checking the case when STORE_PATTERN occurs
7290 load_kills_store (rtx x, rtx store_pattern, int after)
7293 return anti_dependence (x, store_pattern);
7295 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
7299 /* Go through the entire insn X, looking for any loads which might alias
7300 STORE_PATTERN. Return true if found.
7301 AFTER is true if we are checking the case when STORE_PATTERN occurs
7302 after the insn X. */
7305 find_loads (rtx x, rtx store_pattern, int after)
7314 if (GET_CODE (x) == SET)
7317 if (GET_CODE (x) == MEM)
7319 if (load_kills_store (x, store_pattern, after))
7323 /* Recursively process the insn. */
7324 fmt = GET_RTX_FORMAT (GET_CODE (x));
7326 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
7329 ret |= find_loads (XEXP (x, i), store_pattern, after);
7330 else if (fmt[i] == 'E')
7331 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7332 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
7337 /* Check if INSN kills the store pattern X (is aliased with it).
7338 AFTER is true if we are checking the case when store X occurs
7339 after the insn. Return true if it it does. */
7342 store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
7344 rtx reg, base, note;
7349 if (GET_CODE (insn) == CALL_INSN)
7351 /* A normal or pure call might read from pattern,
7352 but a const call will not. */
7353 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
7356 /* But even a const call reads its parameters. Check whether the
7357 base of some of registers used in mem is stack pointer. */
7358 for (reg = x_regs; reg; reg = XEXP (reg, 1))
7360 base = find_base_term (XEXP (reg, 0));
7362 || (GET_CODE (base) == ADDRESS
7363 && GET_MODE (base) == Pmode
7364 && XEXP (base, 0) == stack_pointer_rtx))
7371 if (GET_CODE (PATTERN (insn)) == SET)
7373 rtx pat = PATTERN (insn);
7374 rtx dest = SET_DEST (pat);
7376 if (GET_CODE (dest) == SIGN_EXTRACT
7377 || GET_CODE (dest) == ZERO_EXTRACT)
7378 dest = XEXP (dest, 0);
7380 /* Check for memory stores to aliased objects. */
7381 if (GET_CODE (dest) == MEM
7382 && !expr_equiv_p (dest, x))
7386 if (output_dependence (dest, x))
7391 if (output_dependence (x, dest))
7395 if (find_loads (SET_SRC (pat), x, after))
7398 else if (find_loads (PATTERN (insn), x, after))
7401 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
7402 location aliased with X, then this insn kills X. */
7403 note = find_reg_equal_equiv_note (insn);
7406 note = XEXP (note, 0);
7408 /* However, if the note represents a must alias rather than a may
7409 alias relationship, then it does not kill X. */
7410 if (expr_equiv_p (note, x))
7413 /* See if there are any aliased loads in the note. */
7414 return find_loads (note, x, after);
7417 /* Returns true if the expression X is loaded or clobbered on or after INSN
7418 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7419 or after the insn. X_REGS is list of registers mentioned in X. If the store
7420 is killed, return the last insn in that it occurs in FAIL_INSN. */
7423 store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
7424 int *regs_set_after, rtx *fail_insn)
7426 rtx last = bb->end, act;
7428 if (!store_ops_ok (x_regs, regs_set_after))
7430 /* We do not know where it will happen. */
7432 *fail_insn = NULL_RTX;
7436 /* Scan from the end, so that fail_insn is determined correctly. */
7437 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
7438 if (store_killed_in_insn (x, x_regs, act, false))
7448 /* Returns true if the expression X is loaded or clobbered on or before INSN
7449 within basic block BB. X_REGS is list of registers mentioned in X.
7450 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7452 store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
7453 int *regs_set_before)
7455 rtx first = bb->head;
7457 if (!store_ops_ok (x_regs, regs_set_before))
7460 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
7461 if (store_killed_in_insn (x, x_regs, insn, true))
7467 /* Fill in available, anticipatable, transparent and kill vectors in
7468 STORE_DATA, based on lists of available and anticipatable stores. */
7470 build_store_vectors (void)
7473 int *regs_set_in_block;
7475 struct ls_expr * ptr;
7478 /* Build the gen_vector. This is any store in the table which is not killed
7479 by aliasing later in its block. */
7480 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
7481 sbitmap_vector_zero (ae_gen, last_basic_block);
7483 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
7484 sbitmap_vector_zero (st_antloc, last_basic_block);
7486 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7488 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7490 insn = XEXP (st, 0);
7491 bb = BLOCK_FOR_INSN (insn);
7493 /* If we've already seen an available expression in this block,
7494 we can delete this one (It occurs earlier in the block). We'll
7495 copy the SRC expression to an unused register in case there
7496 are any side effects. */
7497 if (TEST_BIT (ae_gen[bb->index], ptr->index))
7499 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
7501 fprintf (gcse_file, "Removing redundant store:\n");
7502 replace_store_insn (r, XEXP (st, 0), bb, ptr);
7505 SET_BIT (ae_gen[bb->index], ptr->index);
7508 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
7510 insn = XEXP (st, 0);
7511 bb = BLOCK_FOR_INSN (insn);
7512 SET_BIT (st_antloc[bb->index], ptr->index);
7516 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
7517 sbitmap_vector_zero (ae_kill, last_basic_block);
7519 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
7520 sbitmap_vector_zero (transp, last_basic_block);
7521 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
7525 for (regno = 0; regno < max_gcse_regno; regno++)
7526 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
7528 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7530 if (store_killed_after (ptr->pattern, ptr->pattern_regs, bb->head,
7531 bb, regs_set_in_block, NULL))
7533 /* It should not be necessary to consider the expression
7534 killed if it is both anticipatable and available. */
7535 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
7536 || !TEST_BIT (ae_gen[bb->index], ptr->index))
7537 SET_BIT (ae_kill[bb->index], ptr->index);
7540 SET_BIT (transp[bb->index], ptr->index);
7544 free (regs_set_in_block);
7548 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
7549 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
7550 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
7551 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
7555 /* Insert an instruction at the beginning of a basic block, and update
7556 the BLOCK_HEAD if needed. */
7559 insert_insn_start_bb (rtx insn, basic_block bb)
7561 /* Insert at start of successor block. */
7562 rtx prev = PREV_INSN (bb->head);
7563 rtx before = bb->head;
7566 if (GET_CODE (before) != CODE_LABEL
7567 && (GET_CODE (before) != NOTE
7568 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
7571 if (prev == bb->end)
7573 before = NEXT_INSN (before);
7576 insn = emit_insn_after (insn, prev);
7580 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
7582 print_inline_rtx (gcse_file, insn, 6);
7583 fprintf (gcse_file, "\n");
7587 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7588 the memory reference, and E is the edge to insert it on. Returns nonzero
7589 if an edge insertion was performed. */
7592 insert_store (struct ls_expr * expr, edge e)
7598 /* We did all the deleted before this insert, so if we didn't delete a
7599 store, then we haven't set the reaching reg yet either. */
7600 if (expr->reaching_reg == NULL_RTX)
7603 if (e->flags & EDGE_FAKE)
7606 reg = expr->reaching_reg;
7607 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
7609 /* If we are inserting this expression on ALL predecessor edges of a BB,
7610 insert it at the start of the BB, and reset the insert bits on the other
7611 edges so we don't try to insert it on the other edges. */
7613 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7614 if (!(tmp->flags & EDGE_FAKE))
7616 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7617 if (index == EDGE_INDEX_NO_EDGE)
7619 if (! TEST_BIT (pre_insert_map[index], expr->index))
7623 /* If tmp is NULL, we found an insertion on every edge, blank the
7624 insertion vector for these edges, and insert at the start of the BB. */
7625 if (!tmp && bb != EXIT_BLOCK_PTR)
7627 for (tmp = e->dest->pred; tmp ; tmp = tmp->pred_next)
7629 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
7630 RESET_BIT (pre_insert_map[index], expr->index);
7632 insert_insn_start_bb (insn, bb);
7636 /* We can't insert on this edge, so we'll insert at the head of the
7637 successors block. See Morgan, sec 10.5. */
7638 if ((e->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
7640 insert_insn_start_bb (insn, bb);
7644 insert_insn_on_edge (insn, e);
7648 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
7649 e->src->index, e->dest->index);
7650 print_inline_rtx (gcse_file, insn, 6);
7651 fprintf (gcse_file, "\n");
7657 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
7658 memory location in SMEXPR set in basic block BB.
7660 This could be rather expensive. */
7663 remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
7665 edge *stack = xmalloc (sizeof (edge) * n_basic_blocks), act;
7666 sbitmap visited = sbitmap_alloc (last_basic_block);
7668 rtx last, insn, note;
7669 rtx mem = smexpr->pattern;
7671 sbitmap_zero (visited);
7681 sbitmap_free (visited);
7684 act = stack[--stack_top];
7688 if (bb == EXIT_BLOCK_PTR
7689 || TEST_BIT (visited, bb->index)
7690 || TEST_BIT (ae_kill[bb->index], smexpr->index))
7692 act = act->succ_next;
7695 SET_BIT (visited, bb->index);
7697 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
7699 for (last = ANTIC_STORE_LIST (smexpr);
7700 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
7701 last = XEXP (last, 1))
7703 last = XEXP (last, 0);
7706 last = NEXT_INSN (bb->end);
7708 for (insn = bb->head; insn != last; insn = NEXT_INSN (insn))
7711 note = find_reg_equal_equiv_note (insn);
7712 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
7716 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
7718 remove_note (insn, note);
7720 act = act->succ_next;
7724 stack[stack_top++] = act;
7730 /* This routine will replace a store with a SET to a specified register. */
7733 replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
7735 rtx insn, mem, note, set, ptr;
7737 mem = smexpr->pattern;
7738 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
7739 insn = emit_insn_after (insn, del);
7744 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
7745 print_inline_rtx (gcse_file, del, 6);
7746 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
7747 print_inline_rtx (gcse_file, insn, 6);
7748 fprintf (gcse_file, "\n");
7751 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
7752 if (XEXP (ptr, 0) == del)
7754 XEXP (ptr, 0) = insn;
7759 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
7760 they are no longer accurate provided that they are reached by this
7761 definition, so drop them. */
7762 for (; insn != NEXT_INSN (bb->end); insn = NEXT_INSN (insn))
7765 set = single_set (insn);
7768 if (expr_equiv_p (SET_DEST (set), mem))
7770 note = find_reg_equal_equiv_note (insn);
7771 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
7775 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
7777 remove_note (insn, note);
7779 remove_reachable_equiv_notes (bb, smexpr);
7783 /* Delete a store, but copy the value that would have been stored into
7784 the reaching_reg for later storing. */
7787 delete_store (struct ls_expr * expr, basic_block bb)
7791 if (expr->reaching_reg == NULL_RTX)
7792 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
7794 reg = expr->reaching_reg;
7796 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
7799 if (BLOCK_FOR_INSN (del) == bb)
7801 /* We know there is only one since we deleted redundant
7802 ones during the available computation. */
7803 replace_store_insn (reg, del, bb, expr);
7809 /* Free memory used by store motion. */
7812 free_store_memory (void)
7817 sbitmap_vector_free (ae_gen);
7819 sbitmap_vector_free (ae_kill);
7821 sbitmap_vector_free (transp);
7823 sbitmap_vector_free (st_antloc);
7825 sbitmap_vector_free (pre_insert_map);
7827 sbitmap_vector_free (pre_delete_map);
7828 if (reg_set_in_block)
7829 sbitmap_vector_free (reg_set_in_block);
7831 ae_gen = ae_kill = transp = st_antloc = NULL;
7832 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
7835 /* Perform store motion. Much like gcse, except we move expressions the
7836 other way by looking at the flowgraph in reverse. */
7843 struct ls_expr * ptr;
7844 int update_flow = 0;
7848 fprintf (gcse_file, "before store motion\n");
7849 print_rtl (gcse_file, get_insns ());
7852 init_alias_analysis ();
7854 /* Find all the available and anticipatable stores. */
7855 num_stores = compute_store_table ();
7856 if (num_stores == 0)
7858 sbitmap_vector_free (reg_set_in_block);
7859 end_alias_analysis ();
7863 /* Now compute kill & transp vectors. */
7864 build_store_vectors ();
7865 add_noreturn_fake_exit_edges ();
7866 connect_infinite_loops_to_exit ();
7868 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
7869 st_antloc, ae_kill, &pre_insert_map,
7872 /* Now we want to insert the new stores which are going to be needed. */
7873 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
7876 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
7877 delete_store (ptr, bb);
7879 for (x = 0; x < NUM_EDGES (edge_list); x++)
7880 if (TEST_BIT (pre_insert_map[x], ptr->index))
7881 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
7885 commit_edge_insertions ();
7887 free_store_memory ();
7888 free_edge_list (edge_list);
7889 remove_fake_edges ();
7890 end_alias_analysis ();
7894 /* Entry point for jump bypassing optimization pass. */
7897 bypass_jumps (FILE *file)
7901 /* We do not construct an accurate cfg in functions which call
7902 setjmp, so just punt to be safe. */
7903 if (current_function_calls_setjmp)
7906 /* For calling dump_foo fns from gdb. */
7907 debug_stderr = stderr;
7910 /* Identify the basic block information for this function, including
7911 successors and predecessors. */
7912 max_gcse_regno = max_reg_num ();
7915 dump_flow_info (file);
7917 /* Return if there's nothing to do, or it is too expensive */
7918 if (n_basic_blocks <= 1 || is_too_expensive (_ ("jump bypassing disabled")))
7921 gcc_obstack_init (&gcse_obstack);
7924 /* We need alias. */
7925 init_alias_analysis ();
7927 /* Record where pseudo-registers are set. This data is kept accurate
7928 during each pass. ??? We could also record hard-reg information here
7929 [since it's unchanging], however it is currently done during hash table
7932 It may be tempting to compute MEM set information here too, but MEM sets
7933 will be subject to code motion one day and thus we need to compute
7934 information about memory sets when we build the hash tables. */
7936 alloc_reg_set_mem (max_gcse_regno);
7937 compute_sets (get_insns ());
7939 max_gcse_regno = max_reg_num ();
7940 alloc_gcse_mem (get_insns ());
7941 changed = one_cprop_pass (1, 1, 1);
7946 fprintf (file, "BYPASS of %s: %d basic blocks, ",
7947 current_function_name, n_basic_blocks);
7948 fprintf (file, "%d bytes\n\n", bytes_used);
7951 obstack_free (&gcse_obstack, NULL);
7952 free_reg_set_mem ();
7954 /* We are finished with alias. */
7955 end_alias_analysis ();
7956 allocate_reg_info (max_reg_num (), FALSE, FALSE);
7961 /* Return true if the graph is too expensive to optimize. PASS is the
7962 optimization about to be performed. */
7965 is_too_expensive (const char *pass)
7967 /* Trying to perform global optimizations on flow graphs which have
7968 a high connectivity will take a long time and is unlikely to be
7969 particularly useful.
7971 In normal circumstances a cfg should have about twice as many
7972 edges as blocks. But we do not want to punish small functions
7973 which have a couple switch statements. Rather than simply
7974 threshold the number of blocks, uses something with a more
7975 graceful degradation. */
7976 if (n_edges > 20000 + n_basic_blocks * 4)
7978 if (warn_disabled_optimization)
7979 warning ("%s: %d basic blocks and %d edges/basic block",
7980 pass, n_basic_blocks, n_edges / n_basic_blocks);
7985 /* If allocating memory for the cprop bitmap would take up too much
7986 storage it's better just to disable the optimization. */
7988 * SBITMAP_SET_SIZE (max_reg_num ())
7989 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
7991 if (warn_disabled_optimization)
7992 warning ("%s: %d basic blocks and %d registers",
7993 pass, n_basic_blocks, max_reg_num ());
8001 #include "gt-gcse.h"