1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
43 Global Optimization by Suppression of Partial Redundancies
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
115 Rice University Ph.D. thesis, Apr. 1996
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
125 Advanced Compiler Design and Implementation
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
148 #include "coretypes.h"
156 #include "hard-reg-set.h"
159 #include "insn-config.h"
161 #include "basic-block.h"
163 #include "function.h"
173 /* Propagate flow information through back edges and thus enable PRE's
174 moving loop invariant calculations out of loops.
176 Originally this tended to create worse overall code, but several
177 improvements during the development of PRE seem to have made following
178 back edges generally a win.
180 Note much of the loop invariant code motion done here would normally
181 be done by loop.c, which has more heuristics for when to move invariants
182 out of loops. At some point we might need to move some of those
183 heuristics into gcse.c. */
185 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
186 are a superset of those done by GCSE.
188 We perform the following steps:
190 1) Compute basic block information.
192 2) Compute table of places where registers are set.
194 3) Perform copy/constant propagation.
196 4) Perform global cse using lazy code motion if not optimizing
197 for size, or code hoisting if we are.
199 5) Perform another pass of copy/constant propagation.
201 Two passes of copy/constant propagation are done because the first one
202 enables more GCSE and the second one helps to clean up the copies that
203 GCSE creates. This is needed more for PRE than for Classic because Classic
204 GCSE will try to use an existing register containing the common
205 subexpression rather than create a new one. This is harder to do for PRE
206 because of the code motion (which Classic GCSE doesn't do).
208 Expressions we are interested in GCSE-ing are of the form
209 (set (pseudo-reg) (expression)).
210 Function want_to_gcse_p says what these are.
212 PRE handles moving invariant expressions out of loops (by treating them as
213 partially redundant).
215 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
216 assignment) based GVN (global value numbering). L. T. Simpson's paper
217 (Rice University) on value numbering is a useful reference for this.
219 **********************
221 We used to support multiple passes but there are diminishing returns in
222 doing so. The first pass usually makes 90% of the changes that are doable.
223 A second pass can make a few more changes made possible by the first pass.
224 Experiments show any further passes don't make enough changes to justify
227 A study of spec92 using an unlimited number of passes:
228 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
229 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
230 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
232 It was found doing copy propagation between each pass enables further
235 PRE is quite expensive in complicated functions because the DFA can take
236 a while to converge. Hence we only perform one pass. The parameter
237 max-gcse-passes can be modified if one wants to experiment.
239 **********************
241 The steps for PRE are:
243 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
245 2) Perform the data flow analysis for PRE.
247 3) Delete the redundant instructions
249 4) Insert the required copies [if any] that make the partially
250 redundant instructions fully redundant.
252 5) For other reaching expressions, insert an instruction to copy the value
253 to a newly created pseudo that will reach the redundant instruction.
255 The deletion is done first so that when we do insertions we
256 know which pseudo reg to use.
258 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
259 argue it is not. The number of iterations for the algorithm to converge
260 is typically 2-4 so I don't view it as that expensive (relatively speaking).
262 PRE GCSE depends heavily on the second CSE pass to clean up the copies
263 we create. To make an expression reach the place where it's redundant,
264 the result of the expression is copied to a new register, and the redundant
265 expression is deleted by replacing it with this new register. Classic GCSE
266 doesn't have this problem as much as it computes the reaching defs of
267 each register in each block and thus can try to use an existing register.
269 **********************
271 A fair bit of simplicity is created by creating small functions for simple
272 tasks, even when the function is only called in one place. This may
273 measurably slow things down [or may not] by creating more function call
274 overhead than is necessary. The source is laid out so that it's trivial
275 to make the affected functions inline so that one can measure what speed
276 up, if any, can be achieved, and maybe later when things settle things can
279 Help stamp out big monolithic functions! */
281 /* GCSE global vars. */
284 static FILE *gcse_file;
286 /* Note whether or not we should run jump optimization after gcse. We
287 want to do this for two cases.
289 * If we changed any jumps via cprop.
291 * If we added any labels via edge splitting. */
292 static int run_jump_opt_after_gcse;
294 /* Bitmaps are normally not included in debugging dumps.
295 However it's useful to be able to print them from GDB.
296 We could create special functions for this, but it's simpler to
297 just allow passing stderr to the dump_foo fns. Since stderr can
298 be a macro, we store a copy here. */
299 static FILE *debug_stderr;
301 /* An obstack for our working variables. */
302 static struct obstack gcse_obstack;
304 struct reg_use {rtx reg_rtx; };
306 /* Hash table of expressions. */
310 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
312 /* Index in the available expression bitmaps. */
314 /* Next entry with the same hash. */
315 struct expr *next_same_hash;
316 /* List of anticipatable occurrences in basic blocks in the function.
317 An "anticipatable occurrence" is one that is the first occurrence in the
318 basic block, the operands are not modified in the basic block prior
319 to the occurrence and the output is not used between the start of
320 the block and the occurrence. */
321 struct occr *antic_occr;
322 /* List of available occurrence in basic blocks in the function.
323 An "available occurrence" is one that is the last occurrence in the
324 basic block and the operands are not modified by following statements in
325 the basic block [including this insn]. */
326 struct occr *avail_occr;
327 /* Non-null if the computation is PRE redundant.
328 The value is the newly created pseudo-reg to record a copy of the
329 expression in all the places that reach the redundant copy. */
333 /* Occurrence of an expression.
334 There is one per basic block. If a pattern appears more than once the
335 last appearance is used [or first for anticipatable expressions]. */
339 /* Next occurrence of this expression. */
341 /* The insn that computes the expression. */
343 /* Nonzero if this [anticipatable] occurrence has been deleted. */
345 /* Nonzero if this [available] occurrence has been copied to
347 /* ??? This is mutually exclusive with deleted_p, so they could share
352 /* Expression and copy propagation hash tables.
353 Each hash table is an array of buckets.
354 ??? It is known that if it were an array of entries, structure elements
355 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
356 not clear whether in the final analysis a sufficient amount of memory would
357 be saved as the size of the available expression bitmaps would be larger
358 [one could build a mapping table without holes afterwards though].
359 Someday I'll perform the computation and figure it out. */
364 This is an array of `expr_hash_table_size' elements. */
367 /* Size of the hash table, in elements. */
370 /* Number of hash table elements. */
371 unsigned int n_elems;
373 /* Whether the table is expression of copy propagation one. */
377 /* Expression hash table. */
378 static struct hash_table expr_hash_table;
380 /* Copy propagation hash table. */
381 static struct hash_table set_hash_table;
383 /* Mapping of uids to cuids.
384 Only real insns get cuids. */
385 static int *uid_cuid;
387 /* Highest UID in UID_CUID. */
390 /* Get the cuid of an insn. */
391 #ifdef ENABLE_CHECKING
392 #define INSN_CUID(INSN) \
393 (gcc_assert (INSN_UID (INSN) <= max_uid), uid_cuid[INSN_UID (INSN)])
395 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
398 /* Number of cuids. */
401 /* Mapping of cuids to insns. */
402 static rtx *cuid_insn;
404 /* Get insn from cuid. */
405 #define CUID_INSN(CUID) (cuid_insn[CUID])
407 /* Maximum register number in function prior to doing gcse + 1.
408 Registers created during this pass have regno >= max_gcse_regno.
409 This is named with "gcse" to not collide with global of same name. */
410 static unsigned int max_gcse_regno;
412 /* Table of registers that are modified.
414 For each register, each element is a list of places where the pseudo-reg
417 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
418 requires knowledge of which blocks kill which regs [and thus could use
419 a bitmap instead of the lists `reg_set_table' uses].
421 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
422 num-regs) [however perhaps it may be useful to keep the data as is]. One
423 advantage of recording things this way is that `reg_set_table' is fairly
424 sparse with respect to pseudo regs but for hard regs could be fairly dense
425 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
426 up functions like compute_transp since in the case of pseudo-regs we only
427 need to iterate over the number of times a pseudo-reg is set, not over the
428 number of basic blocks [clearly there is a bit of a slow down in the cases
429 where a pseudo is set more than once in a block, however it is believed
430 that the net effect is to speed things up]. This isn't done for hard-regs
431 because recording call-clobbered hard-regs in `reg_set_table' at each
432 function call can consume a fair bit of memory, and iterating over
433 hard-regs stored this way in compute_transp will be more expensive. */
435 typedef struct reg_set
437 /* The next setting of this register. */
438 struct reg_set *next;
439 /* The index of the block where it was set. */
443 static reg_set **reg_set_table;
445 /* Size of `reg_set_table'.
446 The table starts out at max_gcse_regno + slop, and is enlarged as
448 static int reg_set_table_size;
450 /* Amount to grow `reg_set_table' by when it's full. */
451 #define REG_SET_TABLE_SLOP 100
453 /* This is a list of expressions which are MEMs and will be used by load
455 Load motion tracks MEMs which aren't killed by
456 anything except itself. (i.e., loads and stores to a single location).
457 We can then allow movement of these MEM refs with a little special
458 allowance. (all stores copy the same value to the reaching reg used
459 for the loads). This means all values used to store into memory must have
460 no side effects so we can re-issue the setter value.
461 Store Motion uses this structure as an expression table to track stores
462 which look interesting, and might be moveable towards the exit block. */
466 struct expr * expr; /* Gcse expression reference for LM. */
467 rtx pattern; /* Pattern of this mem. */
468 rtx pattern_regs; /* List of registers mentioned by the mem. */
469 rtx loads; /* INSN list of loads seen. */
470 rtx stores; /* INSN list of stores seen. */
471 struct ls_expr * next; /* Next in the list. */
472 int invalid; /* Invalid for some reason. */
473 int index; /* If it maps to a bitmap index. */
474 unsigned int hash_index; /* Index when in a hash table. */
475 rtx reaching_reg; /* Register to use when re-writing. */
478 /* Array of implicit set patterns indexed by basic block index. */
479 static rtx *implicit_sets;
481 /* Head of the list of load/store memory refs. */
482 static struct ls_expr * pre_ldst_mems = NULL;
484 /* Bitmap containing one bit for each register in the program.
485 Used when performing GCSE to track which registers have been set since
486 the start of the basic block. */
487 static regset reg_set_bitmap;
489 /* For each block, a bitmap of registers set in the block.
490 This is used by compute_transp.
491 It is computed during hash table computation and not by compute_sets
492 as it includes registers added since the last pass (or between cprop and
493 gcse) and it's currently not easy to realloc sbitmap vectors. */
494 static sbitmap *reg_set_in_block;
496 /* Array, indexed by basic block number for a list of insns which modify
497 memory within that block. */
498 static rtx * modify_mem_list;
499 static bitmap modify_mem_list_set;
501 /* This array parallels modify_mem_list, but is kept canonicalized. */
502 static rtx * canon_modify_mem_list;
504 /* Various variables for statistics gathering. */
506 /* Memory used in a pass.
507 This isn't intended to be absolutely precise. Its intent is only
508 to keep an eye on memory usage. */
509 static int bytes_used;
511 /* GCSE substitutions made. */
512 static int gcse_subst_count;
513 /* Number of copy instructions created. */
514 static int gcse_create_count;
515 /* Number of local constants propagated. */
516 static int local_const_prop_count;
517 /* Number of local copys propagated. */
518 static int local_copy_prop_count;
519 /* Number of global constants propagated. */
520 static int global_const_prop_count;
521 /* Number of global copys propagated. */
522 static int global_copy_prop_count;
524 /* For available exprs */
525 static sbitmap *ae_kill, *ae_gen;
527 static void compute_can_copy (void);
528 static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
529 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC;
530 static void *grealloc (void *, size_t);
531 static void *gcse_alloc (unsigned long);
532 static void alloc_gcse_mem (rtx);
533 static void free_gcse_mem (void);
534 static void alloc_reg_set_mem (int);
535 static void free_reg_set_mem (void);
536 static void record_one_set (int, rtx);
537 static void record_set_info (rtx, rtx, void *);
538 static void compute_sets (rtx);
539 static void hash_scan_insn (rtx, struct hash_table *, int);
540 static void hash_scan_set (rtx, rtx, struct hash_table *);
541 static void hash_scan_clobber (rtx, rtx, struct hash_table *);
542 static void hash_scan_call (rtx, rtx, struct hash_table *);
543 static int want_to_gcse_p (rtx);
544 static bool can_assign_to_reg_p (rtx);
545 static bool gcse_constant_p (rtx);
546 static int oprs_unchanged_p (rtx, rtx, int);
547 static int oprs_anticipatable_p (rtx, rtx);
548 static int oprs_available_p (rtx, rtx);
549 static void insert_expr_in_table (rtx, enum machine_mode, rtx, int, int,
550 struct hash_table *);
551 static void insert_set_in_table (rtx, rtx, struct hash_table *);
552 static unsigned int hash_expr (rtx, enum machine_mode, int *, int);
553 static unsigned int hash_set (int, int);
554 static int expr_equiv_p (rtx, rtx);
555 static void record_last_reg_set_info (rtx, int);
556 static void record_last_mem_set_info (rtx);
557 static void record_last_set_info (rtx, rtx, void *);
558 static void compute_hash_table (struct hash_table *);
559 static void alloc_hash_table (int, struct hash_table *, int);
560 static void free_hash_table (struct hash_table *);
561 static void compute_hash_table_work (struct hash_table *);
562 static void dump_hash_table (FILE *, const char *, struct hash_table *);
563 static struct expr *lookup_set (unsigned int, struct hash_table *);
564 static struct expr *next_set (unsigned int, struct expr *);
565 static void reset_opr_set_tables (void);
566 static int oprs_not_set_p (rtx, rtx);
567 static void mark_call (rtx);
568 static void mark_set (rtx, rtx);
569 static void mark_clobber (rtx, rtx);
570 static void mark_oprs_set (rtx);
571 static void alloc_cprop_mem (int, int);
572 static void free_cprop_mem (void);
573 static void compute_transp (rtx, int, sbitmap *, int);
574 static void compute_transpout (void);
575 static void compute_local_properties (sbitmap *, sbitmap *, sbitmap *,
576 struct hash_table *);
577 static void compute_cprop_data (void);
578 static void find_used_regs (rtx *, void *);
579 static int try_replace_reg (rtx, rtx, rtx);
580 static struct expr *find_avail_set (int, rtx);
581 static int cprop_jump (basic_block, rtx, rtx, rtx, rtx);
582 static void mems_conflict_for_gcse_p (rtx, rtx, void *);
583 static int load_killed_in_block_p (basic_block, int, rtx, int);
584 static void canon_list_insert (rtx, rtx, void *);
585 static int cprop_insn (rtx, int);
586 static int cprop (int);
587 static void find_implicit_sets (void);
588 static int one_cprop_pass (int, int, int);
589 static bool constprop_register (rtx, rtx, rtx, int);
590 static struct expr *find_bypass_set (int, int);
591 static bool reg_killed_on_edge (rtx, edge);
592 static int bypass_block (basic_block, rtx, rtx);
593 static int bypass_conditional_jumps (void);
594 static void alloc_pre_mem (int, int);
595 static void free_pre_mem (void);
596 static void compute_pre_data (void);
597 static int pre_expr_reaches_here_p (basic_block, struct expr *,
599 static void insert_insn_end_bb (struct expr *, basic_block, int);
600 static void pre_insert_copy_insn (struct expr *, rtx);
601 static void pre_insert_copies (void);
602 static int pre_delete (void);
603 static int pre_gcse (void);
604 static int one_pre_gcse_pass (int);
605 static void add_label_notes (rtx, rtx);
606 static void alloc_code_hoist_mem (int, int);
607 static void free_code_hoist_mem (void);
608 static void compute_code_hoist_vbeinout (void);
609 static void compute_code_hoist_data (void);
610 static int hoist_expr_reaches_here_p (basic_block, int, basic_block, char *);
611 static void hoist_code (void);
612 static int one_code_hoisting_pass (void);
613 static rtx process_insert_insn (struct expr *);
614 static int pre_edge_insert (struct edge_list *, struct expr **);
615 static int pre_expr_reaches_here_p_work (basic_block, struct expr *,
616 basic_block, char *);
617 static struct ls_expr * ldst_entry (rtx);
618 static void free_ldst_entry (struct ls_expr *);
619 static void free_ldst_mems (void);
620 static void print_ldst_list (FILE *);
621 static struct ls_expr * find_rtx_in_ldst (rtx);
622 static int enumerate_ldsts (void);
623 static inline struct ls_expr * first_ls_expr (void);
624 static inline struct ls_expr * next_ls_expr (struct ls_expr *);
625 static int simple_mem (rtx);
626 static void invalidate_any_buried_refs (rtx);
627 static void compute_ld_motion_mems (void);
628 static void trim_ld_motion_mems (void);
629 static void update_ld_motion_stores (struct expr *);
630 static void reg_set_info (rtx, rtx, void *);
631 static void reg_clear_last_set (rtx, rtx, void *);
632 static bool store_ops_ok (rtx, int *);
633 static rtx extract_mentioned_regs (rtx);
634 static rtx extract_mentioned_regs_helper (rtx, rtx);
635 static void find_moveable_store (rtx, int *, int *);
636 static int compute_store_table (void);
637 static bool load_kills_store (rtx, rtx, int);
638 static bool find_loads (rtx, rtx, int);
639 static bool store_killed_in_insn (rtx, rtx, rtx, int);
640 static bool store_killed_after (rtx, rtx, rtx, basic_block, int *, rtx *);
641 static bool store_killed_before (rtx, rtx, rtx, basic_block, int *);
642 static void build_store_vectors (void);
643 static void insert_insn_start_bb (rtx, basic_block);
644 static int insert_store (struct ls_expr *, edge);
645 static void remove_reachable_equiv_notes (basic_block, struct ls_expr *);
646 static void replace_store_insn (rtx, rtx, basic_block, struct ls_expr *);
647 static void delete_store (struct ls_expr *, basic_block);
648 static void free_store_memory (void);
649 static void store_motion (void);
650 static void free_insn_expr_list_list (rtx *);
651 static void clear_modify_mem_tables (void);
652 static void free_modify_mem_tables (void);
653 static rtx gcse_emit_move_after (rtx, rtx, rtx);
654 static void local_cprop_find_used_regs (rtx *, void *);
655 static bool do_local_cprop (rtx, rtx, int, rtx*);
656 static bool adjust_libcall_notes (rtx, rtx, rtx, rtx*);
657 static void local_cprop_pass (int);
658 static bool is_too_expensive (const char *);
661 /* Entry point for global common subexpression elimination.
662 F is the first instruction in the function. Return nonzero if a
666 gcse_main (rtx f, FILE *file)
669 /* Bytes used at start of pass. */
670 int initial_bytes_used;
671 /* Maximum number of bytes used by a pass. */
673 /* Point to release obstack data from for each pass. */
674 char *gcse_obstack_bottom;
676 /* We do not construct an accurate cfg in functions which call
677 setjmp, so just punt to be safe. */
678 if (current_function_calls_setjmp)
681 /* Assume that we do not need to run jump optimizations after gcse. */
682 run_jump_opt_after_gcse = 0;
684 /* For calling dump_foo fns from gdb. */
685 debug_stderr = stderr;
688 /* Identify the basic block information for this function, including
689 successors and predecessors. */
690 max_gcse_regno = max_reg_num ();
693 dump_flow_info (file);
695 /* Return if there's nothing to do, or it is too expensive. */
696 if (n_basic_blocks <= 1 || is_too_expensive (_("GCSE disabled")))
699 gcc_obstack_init (&gcse_obstack);
703 init_alias_analysis ();
704 /* Record where pseudo-registers are set. This data is kept accurate
705 during each pass. ??? We could also record hard-reg information here
706 [since it's unchanging], however it is currently done during hash table
709 It may be tempting to compute MEM set information here too, but MEM sets
710 will be subject to code motion one day and thus we need to compute
711 information about memory sets when we build the hash tables. */
713 alloc_reg_set_mem (max_gcse_regno);
717 initial_bytes_used = bytes_used;
719 gcse_obstack_bottom = gcse_alloc (1);
721 while (changed && pass < MAX_GCSE_PASSES)
725 fprintf (file, "GCSE pass %d\n\n", pass + 1);
727 /* Initialize bytes_used to the space for the pred/succ lists,
728 and the reg_set_table data. */
729 bytes_used = initial_bytes_used;
731 /* Each pass may create new registers, so recalculate each time. */
732 max_gcse_regno = max_reg_num ();
736 /* Don't allow constant propagation to modify jumps
738 timevar_push (TV_CPROP1);
739 changed = one_cprop_pass (pass + 1, 0, 0);
740 timevar_pop (TV_CPROP1);
746 timevar_push (TV_PRE);
747 changed |= one_pre_gcse_pass (pass + 1);
748 /* We may have just created new basic blocks. Release and
749 recompute various things which are sized on the number of
753 free_modify_mem_tables ();
754 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
755 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
758 alloc_reg_set_mem (max_reg_num ());
760 run_jump_opt_after_gcse = 1;
761 timevar_pop (TV_PRE);
764 if (max_pass_bytes < bytes_used)
765 max_pass_bytes = bytes_used;
767 /* Free up memory, then reallocate for code hoisting. We can
768 not re-use the existing allocated memory because the tables
769 will not have info for the insns or registers created by
770 partial redundancy elimination. */
773 /* It does not make sense to run code hoisting unless we are optimizing
774 for code size -- it rarely makes programs faster, and can make
775 them bigger if we did partial redundancy elimination (when optimizing
776 for space, we don't run the partial redundancy algorithms). */
779 timevar_push (TV_HOIST);
780 max_gcse_regno = max_reg_num ();
782 changed |= one_code_hoisting_pass ();
785 if (max_pass_bytes < bytes_used)
786 max_pass_bytes = bytes_used;
787 timevar_pop (TV_HOIST);
792 fprintf (file, "\n");
796 obstack_free (&gcse_obstack, gcse_obstack_bottom);
800 /* Do one last pass of copy propagation, including cprop into
801 conditional jumps. */
803 max_gcse_regno = max_reg_num ();
805 /* This time, go ahead and allow cprop to alter jumps. */
806 timevar_push (TV_CPROP2);
807 one_cprop_pass (pass + 1, 1, 0);
808 timevar_pop (TV_CPROP2);
813 fprintf (file, "GCSE of %s: %d basic blocks, ",
814 current_function_name (), n_basic_blocks);
815 fprintf (file, "%d pass%s, %d bytes\n\n",
816 pass, pass > 1 ? "es" : "", max_pass_bytes);
819 obstack_free (&gcse_obstack, NULL);
822 /* We are finished with alias. */
823 end_alias_analysis ();
824 allocate_reg_info (max_reg_num (), FALSE, FALSE);
826 if (!optimize_size && flag_gcse_sm)
828 timevar_push (TV_LSM);
830 timevar_pop (TV_LSM);
833 /* Record where pseudo-registers are set. */
834 return run_jump_opt_after_gcse;
837 /* Misc. utilities. */
839 /* Nonzero for each mode that supports (set (reg) (reg)).
840 This is trivially true for integer and floating point values.
841 It may or may not be true for condition codes. */
842 static char can_copy[(int) NUM_MACHINE_MODES];
844 /* Compute which modes support reg/reg copy operations. */
847 compute_can_copy (void)
850 #ifndef AVOID_CCMODE_COPIES
853 memset (can_copy, 0, NUM_MACHINE_MODES);
856 for (i = 0; i < NUM_MACHINE_MODES; i++)
857 if (GET_MODE_CLASS (i) == MODE_CC)
859 #ifdef AVOID_CCMODE_COPIES
862 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
863 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
864 if (recog (PATTERN (insn), insn, NULL) >= 0)
874 /* Returns whether the mode supports reg/reg copy operations. */
877 can_copy_p (enum machine_mode mode)
879 static bool can_copy_init_p = false;
881 if (! can_copy_init_p)
884 can_copy_init_p = true;
887 return can_copy[mode] != 0;
890 /* Cover function to xmalloc to record bytes allocated. */
893 gmalloc (size_t size)
896 return xmalloc (size);
899 /* Cover function to xcalloc to record bytes allocated. */
902 gcalloc (size_t nelem, size_t elsize)
904 bytes_used += nelem * elsize;
905 return xcalloc (nelem, elsize);
908 /* Cover function to xrealloc.
909 We don't record the additional size since we don't know it.
910 It won't affect memory usage stats much anyway. */
913 grealloc (void *ptr, size_t size)
915 return xrealloc (ptr, size);
918 /* Cover function to obstack_alloc. */
921 gcse_alloc (unsigned long size)
924 return obstack_alloc (&gcse_obstack, size);
927 /* Allocate memory for the cuid mapping array,
928 and reg/memory set tracking tables.
930 This is called at the start of each pass. */
933 alloc_gcse_mem (rtx f)
938 /* Find the largest UID and create a mapping from UIDs to CUIDs.
939 CUIDs are like UIDs except they increase monotonically, have no gaps,
940 and only apply to real insns. */
942 max_uid = get_max_uid ();
943 uid_cuid = gcalloc (max_uid + 1, sizeof (int));
944 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
947 uid_cuid[INSN_UID (insn)] = i++;
949 uid_cuid[INSN_UID (insn)] = i;
952 /* Create a table mapping cuids to insns. */
955 cuid_insn = gcalloc (max_cuid + 1, sizeof (rtx));
956 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
958 CUID_INSN (i++) = insn;
960 /* Allocate vars to track sets of regs. */
961 reg_set_bitmap = BITMAP_XMALLOC ();
963 /* Allocate vars to track sets of regs, memory per block. */
964 reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
965 /* Allocate array to keep a list of insns which modify memory in each
967 modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
968 canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
969 modify_mem_list_set = BITMAP_XMALLOC ();
972 /* Free memory allocated by alloc_gcse_mem. */
980 BITMAP_XFREE (reg_set_bitmap);
982 sbitmap_vector_free (reg_set_in_block);
983 free_modify_mem_tables ();
984 BITMAP_XFREE (modify_mem_list_set);
987 /* Compute the local properties of each recorded expression.
989 Local properties are those that are defined by the block, irrespective of
992 An expression is transparent in a block if its operands are not modified
995 An expression is computed (locally available) in a block if it is computed
996 at least once and expression would contain the same value if the
997 computation was moved to the end of the block.
999 An expression is locally anticipatable in a block if it is computed at
1000 least once and expression would contain the same value if the computation
1001 was moved to the beginning of the block.
1003 We call this routine for cprop, pre and code hoisting. They all compute
1004 basically the same information and thus can easily share this code.
1006 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1007 properties. If NULL, then it is not necessary to compute or record that
1008 particular property.
1010 TABLE controls which hash table to look at. If it is set hash table,
1011 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1015 compute_local_properties (sbitmap *transp, sbitmap *comp, sbitmap *antloc,
1016 struct hash_table *table)
1020 /* Initialize any bitmaps that were passed in. */
1024 sbitmap_vector_zero (transp, last_basic_block);
1026 sbitmap_vector_ones (transp, last_basic_block);
1030 sbitmap_vector_zero (comp, last_basic_block);
1032 sbitmap_vector_zero (antloc, last_basic_block);
1034 for (i = 0; i < table->size; i++)
1038 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1040 int indx = expr->bitmap_index;
1043 /* The expression is transparent in this block if it is not killed.
1044 We start by assuming all are transparent [none are killed], and
1045 then reset the bits for those that are. */
1047 compute_transp (expr->expr, indx, transp, table->set_p);
1049 /* The occurrences recorded in antic_occr are exactly those that
1050 we want to set to nonzero in ANTLOC. */
1052 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1054 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1056 /* While we're scanning the table, this is a good place to
1058 occr->deleted_p = 0;
1061 /* The occurrences recorded in avail_occr are exactly those that
1062 we want to set to nonzero in COMP. */
1064 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1066 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1068 /* While we're scanning the table, this is a good place to
1073 /* While we're scanning the table, this is a good place to
1075 expr->reaching_reg = 0;
1080 /* Register set information.
1082 `reg_set_table' records where each register is set or otherwise
1085 static struct obstack reg_set_obstack;
1088 alloc_reg_set_mem (int n_regs)
1090 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1091 reg_set_table = gcalloc (reg_set_table_size, sizeof (struct reg_set *));
1093 gcc_obstack_init (®_set_obstack);
1097 free_reg_set_mem (void)
1099 free (reg_set_table);
1100 obstack_free (®_set_obstack, NULL);
1103 /* Record REGNO in the reg_set table. */
1106 record_one_set (int regno, rtx insn)
1108 /* Allocate a new reg_set element and link it onto the list. */
1109 struct reg_set *new_reg_info;
1111 /* If the table isn't big enough, enlarge it. */
1112 if (regno >= reg_set_table_size)
1114 int new_size = regno + REG_SET_TABLE_SLOP;
1116 reg_set_table = grealloc (reg_set_table,
1117 new_size * sizeof (struct reg_set *));
1118 memset (reg_set_table + reg_set_table_size, 0,
1119 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1120 reg_set_table_size = new_size;
1123 new_reg_info = obstack_alloc (®_set_obstack, sizeof (struct reg_set));
1124 bytes_used += sizeof (struct reg_set);
1125 new_reg_info->bb_index = BLOCK_NUM (insn);
1126 new_reg_info->next = reg_set_table[regno];
1127 reg_set_table[regno] = new_reg_info;
1130 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1131 an insn. The DATA is really the instruction in which the SET is
1135 record_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
1137 rtx record_set_insn = (rtx) data;
1139 if (REG_P (dest) && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1140 record_one_set (REGNO (dest), record_set_insn);
1143 /* Scan the function and record each set of each pseudo-register.
1145 This is called once, at the start of the gcse pass. See the comments for
1146 `reg_set_table' for further documentation. */
1149 compute_sets (rtx f)
1153 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1155 note_stores (PATTERN (insn), record_set_info, insn);
1158 /* Hash table support. */
1160 struct reg_avail_info
1162 basic_block last_bb;
1167 static struct reg_avail_info *reg_avail_info;
1168 static basic_block current_bb;
1171 /* See whether X, the source of a set, is something we want to consider for
1175 want_to_gcse_p (rtx x)
1177 switch (GET_CODE (x))
1188 return can_assign_to_reg_p (x);
1192 /* Used internally by can_assign_to_reg_p. */
1194 static GTY(()) rtx test_insn;
1196 /* Return true if we can assign X to a pseudo register. */
1199 can_assign_to_reg_p (rtx x)
1201 int num_clobbers = 0;
1204 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1205 if (general_operand (x, GET_MODE (x)))
1207 else if (GET_MODE (x) == VOIDmode)
1210 /* Otherwise, check if we can make a valid insn from it. First initialize
1211 our test insn if we haven't already. */
1215 = make_insn_raw (gen_rtx_SET (VOIDmode,
1216 gen_rtx_REG (word_mode,
1217 FIRST_PSEUDO_REGISTER * 2),
1219 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1222 /* Now make an insn like the one we would make when GCSE'ing and see if
1224 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1225 SET_SRC (PATTERN (test_insn)) = x;
1226 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1227 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1230 /* Return nonzero if the operands of expression X are unchanged from the
1231 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1232 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1235 oprs_unchanged_p (rtx x, rtx insn, int avail_p)
1244 code = GET_CODE (x);
1249 struct reg_avail_info *info = ®_avail_info[REGNO (x)];
1251 if (info->last_bb != current_bb)
1254 return info->last_set < INSN_CUID (insn);
1256 return info->first_set >= INSN_CUID (insn);
1260 if (load_killed_in_block_p (current_bb, INSN_CUID (insn),
1264 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1290 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1294 /* If we are about to do the last recursive call needed at this
1295 level, change it into iteration. This function is called enough
1298 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1300 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1303 else if (fmt[i] == 'E')
1304 for (j = 0; j < XVECLEN (x, i); j++)
1305 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1312 /* Used for communication between mems_conflict_for_gcse_p and
1313 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1314 conflict between two memory references. */
1315 static int gcse_mems_conflict_p;
1317 /* Used for communication between mems_conflict_for_gcse_p and
1318 load_killed_in_block_p. A memory reference for a load instruction,
1319 mems_conflict_for_gcse_p will see if a memory store conflicts with
1320 this memory load. */
1321 static rtx gcse_mem_operand;
1323 /* DEST is the output of an instruction. If it is a memory reference, and
1324 possibly conflicts with the load found in gcse_mem_operand, then set
1325 gcse_mems_conflict_p to a nonzero value. */
1328 mems_conflict_for_gcse_p (rtx dest, rtx setter ATTRIBUTE_UNUSED,
1329 void *data ATTRIBUTE_UNUSED)
1331 while (GET_CODE (dest) == SUBREG
1332 || GET_CODE (dest) == ZERO_EXTRACT
1333 || GET_CODE (dest) == STRICT_LOW_PART)
1334 dest = XEXP (dest, 0);
1336 /* If DEST is not a MEM, then it will not conflict with the load. Note
1337 that function calls are assumed to clobber memory, but are handled
1342 /* If we are setting a MEM in our list of specially recognized MEMs,
1343 don't mark as killed this time. */
1345 if (expr_equiv_p (dest, gcse_mem_operand) && pre_ldst_mems != NULL)
1347 if (!find_rtx_in_ldst (dest))
1348 gcse_mems_conflict_p = 1;
1352 if (true_dependence (dest, GET_MODE (dest), gcse_mem_operand,
1354 gcse_mems_conflict_p = 1;
1357 /* Return nonzero if the expression in X (a memory reference) is killed
1358 in block BB before or after the insn with the CUID in UID_LIMIT.
1359 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1362 To check the entire block, set UID_LIMIT to max_uid + 1 and
1366 load_killed_in_block_p (basic_block bb, int uid_limit, rtx x, int avail_p)
1368 rtx list_entry = modify_mem_list[bb->index];
1372 /* Ignore entries in the list that do not apply. */
1374 && INSN_CUID (XEXP (list_entry, 0)) < uid_limit)
1376 && INSN_CUID (XEXP (list_entry, 0)) > uid_limit))
1378 list_entry = XEXP (list_entry, 1);
1382 setter = XEXP (list_entry, 0);
1384 /* If SETTER is a call everything is clobbered. Note that calls
1385 to pure functions are never put on the list, so we need not
1386 worry about them. */
1387 if (CALL_P (setter))
1390 /* SETTER must be an INSN of some kind that sets memory. Call
1391 note_stores to examine each hunk of memory that is modified.
1393 The note_stores interface is pretty limited, so we have to
1394 communicate via global variables. Yuk. */
1395 gcse_mem_operand = x;
1396 gcse_mems_conflict_p = 0;
1397 note_stores (PATTERN (setter), mems_conflict_for_gcse_p, NULL);
1398 if (gcse_mems_conflict_p)
1400 list_entry = XEXP (list_entry, 1);
1405 /* Return nonzero if the operands of expression X are unchanged from
1406 the start of INSN's basic block up to but not including INSN. */
1409 oprs_anticipatable_p (rtx x, rtx insn)
1411 return oprs_unchanged_p (x, insn, 0);
1414 /* Return nonzero if the operands of expression X are unchanged from
1415 INSN to the end of INSN's basic block. */
1418 oprs_available_p (rtx x, rtx insn)
1420 return oprs_unchanged_p (x, insn, 1);
1423 /* Hash expression X.
1425 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1426 indicating if a volatile operand is found or if the expression contains
1427 something we don't want to insert in the table. HASH_TABLE_SIZE is
1428 the current size of the hash table to be probed. */
1431 hash_expr (rtx x, enum machine_mode mode, int *do_not_record_p,
1432 int hash_table_size)
1436 *do_not_record_p = 0;
1438 hash = hash_rtx (x, mode, do_not_record_p,
1439 NULL, /*have_reg_qty=*/false);
1440 return hash % hash_table_size;
1443 /* Hash a set of register REGNO.
1445 Sets are hashed on the register that is set. This simplifies the PRE copy
1448 ??? May need to make things more elaborate. Later, as necessary. */
1451 hash_set (int regno, int hash_table_size)
1456 return hash % hash_table_size;
1459 /* Return nonzero if exp1 is equivalent to exp2. */
1462 expr_equiv_p (rtx x, rtx y)
1464 return exp_equiv_p (x, y, 0, true);
1467 /* Insert expression X in INSN in the hash TABLE.
1468 If it is already present, record it as the last occurrence in INSN's
1471 MODE is the mode of the value X is being stored into.
1472 It is only used if X is a CONST_INT.
1474 ANTIC_P is nonzero if X is an anticipatable expression.
1475 AVAIL_P is nonzero if X is an available expression. */
1478 insert_expr_in_table (rtx x, enum machine_mode mode, rtx insn, int antic_p,
1479 int avail_p, struct hash_table *table)
1481 int found, do_not_record_p;
1483 struct expr *cur_expr, *last_expr = NULL;
1484 struct occr *antic_occr, *avail_occr;
1486 hash = hash_expr (x, mode, &do_not_record_p, table->size);
1488 /* Do not insert expression in table if it contains volatile operands,
1489 or if hash_expr determines the expression is something we don't want
1490 to or can't handle. */
1491 if (do_not_record_p)
1494 cur_expr = table->table[hash];
1497 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1499 /* If the expression isn't found, save a pointer to the end of
1501 last_expr = cur_expr;
1502 cur_expr = cur_expr->next_same_hash;
1507 cur_expr = gcse_alloc (sizeof (struct expr));
1508 bytes_used += sizeof (struct expr);
1509 if (table->table[hash] == NULL)
1510 /* This is the first pattern that hashed to this index. */
1511 table->table[hash] = cur_expr;
1513 /* Add EXPR to end of this hash chain. */
1514 last_expr->next_same_hash = cur_expr;
1516 /* Set the fields of the expr element. */
1518 cur_expr->bitmap_index = table->n_elems++;
1519 cur_expr->next_same_hash = NULL;
1520 cur_expr->antic_occr = NULL;
1521 cur_expr->avail_occr = NULL;
1524 /* Now record the occurrence(s). */
1527 antic_occr = cur_expr->antic_occr;
1529 if (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1533 /* Found another instance of the expression in the same basic block.
1534 Prefer the currently recorded one. We want the first one in the
1535 block and the block is scanned from start to end. */
1536 ; /* nothing to do */
1539 /* First occurrence of this expression in this basic block. */
1540 antic_occr = gcse_alloc (sizeof (struct occr));
1541 bytes_used += sizeof (struct occr);
1542 antic_occr->insn = insn;
1543 antic_occr->next = cur_expr->antic_occr;
1544 antic_occr->deleted_p = 0;
1545 cur_expr->antic_occr = antic_occr;
1551 avail_occr = cur_expr->avail_occr;
1553 if (avail_occr && BLOCK_NUM (avail_occr->insn) == BLOCK_NUM (insn))
1555 /* Found another instance of the expression in the same basic block.
1556 Prefer this occurrence to the currently recorded one. We want
1557 the last one in the block and the block is scanned from start
1559 avail_occr->insn = insn;
1563 /* First occurrence of this expression in this basic block. */
1564 avail_occr = gcse_alloc (sizeof (struct occr));
1565 bytes_used += sizeof (struct occr);
1566 avail_occr->insn = insn;
1567 avail_occr->next = cur_expr->avail_occr;
1568 avail_occr->deleted_p = 0;
1569 cur_expr->avail_occr = avail_occr;
1574 /* Insert pattern X in INSN in the hash table.
1575 X is a SET of a reg to either another reg or a constant.
1576 If it is already present, record it as the last occurrence in INSN's
1580 insert_set_in_table (rtx x, rtx insn, struct hash_table *table)
1584 struct expr *cur_expr, *last_expr = NULL;
1585 struct occr *cur_occr;
1587 gcc_assert (GET_CODE (x) == SET && REG_P (SET_DEST (x)));
1589 hash = hash_set (REGNO (SET_DEST (x)), table->size);
1591 cur_expr = table->table[hash];
1594 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1596 /* If the expression isn't found, save a pointer to the end of
1598 last_expr = cur_expr;
1599 cur_expr = cur_expr->next_same_hash;
1604 cur_expr = gcse_alloc (sizeof (struct expr));
1605 bytes_used += sizeof (struct expr);
1606 if (table->table[hash] == NULL)
1607 /* This is the first pattern that hashed to this index. */
1608 table->table[hash] = cur_expr;
1610 /* Add EXPR to end of this hash chain. */
1611 last_expr->next_same_hash = cur_expr;
1613 /* Set the fields of the expr element.
1614 We must copy X because it can be modified when copy propagation is
1615 performed on its operands. */
1616 cur_expr->expr = copy_rtx (x);
1617 cur_expr->bitmap_index = table->n_elems++;
1618 cur_expr->next_same_hash = NULL;
1619 cur_expr->antic_occr = NULL;
1620 cur_expr->avail_occr = NULL;
1623 /* Now record the occurrence. */
1624 cur_occr = cur_expr->avail_occr;
1626 if (cur_occr && BLOCK_NUM (cur_occr->insn) == BLOCK_NUM (insn))
1628 /* Found another instance of the expression in the same basic block.
1629 Prefer this occurrence to the currently recorded one. We want
1630 the last one in the block and the block is scanned from start
1632 cur_occr->insn = insn;
1636 /* First occurrence of this expression in this basic block. */
1637 cur_occr = gcse_alloc (sizeof (struct occr));
1638 bytes_used += sizeof (struct occr);
1640 cur_occr->insn = insn;
1641 cur_occr->next = cur_expr->avail_occr;
1642 cur_occr->deleted_p = 0;
1643 cur_expr->avail_occr = cur_occr;
1647 /* Determine whether the rtx X should be treated as a constant for
1648 the purposes of GCSE's constant propagation. */
1651 gcse_constant_p (rtx x)
1653 /* Consider a COMPARE of two integers constant. */
1654 if (GET_CODE (x) == COMPARE
1655 && GET_CODE (XEXP (x, 0)) == CONST_INT
1656 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1659 /* Consider a COMPARE of the same registers is a constant
1660 if they are not floating point registers. */
1661 if (GET_CODE(x) == COMPARE
1662 && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1))
1663 && REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 1))
1664 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
1665 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 1))))
1668 return CONSTANT_P (x);
1671 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
1675 hash_scan_set (rtx pat, rtx insn, struct hash_table *table)
1677 rtx src = SET_SRC (pat);
1678 rtx dest = SET_DEST (pat);
1681 if (GET_CODE (src) == CALL)
1682 hash_scan_call (src, insn, table);
1684 else if (REG_P (dest))
1686 unsigned int regno = REGNO (dest);
1689 /* If this is a single set and we are doing constant propagation,
1690 see if a REG_NOTE shows this equivalent to a constant. */
1691 if (table->set_p && (note = find_reg_equal_equiv_note (insn)) != 0
1692 && gcse_constant_p (XEXP (note, 0)))
1693 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
1695 /* Only record sets of pseudo-regs in the hash table. */
1697 && regno >= FIRST_PSEUDO_REGISTER
1698 /* Don't GCSE something if we can't do a reg/reg copy. */
1699 && can_copy_p (GET_MODE (dest))
1700 /* GCSE commonly inserts instruction after the insn. We can't
1701 do that easily for EH_REGION notes so disable GCSE on these
1703 && !find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1704 /* Is SET_SRC something we want to gcse? */
1705 && want_to_gcse_p (src)
1706 /* Don't CSE a nop. */
1707 && ! set_noop_p (pat)
1708 /* Don't GCSE if it has attached REG_EQUIV note.
1709 At this point this only function parameters should have
1710 REG_EQUIV notes and if the argument slot is used somewhere
1711 explicitly, it means address of parameter has been taken,
1712 so we should not extend the lifetime of the pseudo. */
1713 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
1714 || ! MEM_P (XEXP (note, 0))))
1716 /* An expression is not anticipatable if its operands are
1717 modified before this insn or if this is not the only SET in
1719 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
1720 /* An expression is not available if its operands are
1721 subsequently modified, including this insn. It's also not
1722 available if this is a branch, because we can't insert
1723 a set after the branch. */
1724 int avail_p = (oprs_available_p (src, insn)
1725 && ! JUMP_P (insn));
1727 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p, table);
1730 /* Record sets for constant/copy propagation. */
1731 else if (table->set_p
1732 && regno >= FIRST_PSEUDO_REGISTER
1734 && REGNO (src) >= FIRST_PSEUDO_REGISTER
1735 && can_copy_p (GET_MODE (dest))
1736 && REGNO (src) != regno)
1737 || gcse_constant_p (src))
1738 /* A copy is not available if its src or dest is subsequently
1739 modified. Here we want to search from INSN+1 on, but
1740 oprs_available_p searches from INSN on. */
1741 && (insn == BB_END (BLOCK_FOR_INSN (insn))
1742 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
1743 && oprs_available_p (pat, tmp))))
1744 insert_set_in_table (pat, insn, table);
1746 /* In case of store we want to consider the memory value as available in
1747 the REG stored in that memory. This makes it possible to remove
1748 redundant loads from due to stores to the same location. */
1749 else if (flag_gcse_las && REG_P (src) && MEM_P (dest))
1751 unsigned int regno = REGNO (src);
1753 /* Do not do this for constant/copy propagation. */
1755 /* Only record sets of pseudo-regs in the hash table. */
1756 && regno >= FIRST_PSEUDO_REGISTER
1757 /* Don't GCSE something if we can't do a reg/reg copy. */
1758 && can_copy_p (GET_MODE (src))
1759 /* GCSE commonly inserts instruction after the insn. We can't
1760 do that easily for EH_REGION notes so disable GCSE on these
1762 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1763 /* Is SET_DEST something we want to gcse? */
1764 && want_to_gcse_p (dest)
1765 /* Don't CSE a nop. */
1766 && ! set_noop_p (pat)
1767 /* Don't GCSE if it has attached REG_EQUIV note.
1768 At this point this only function parameters should have
1769 REG_EQUIV notes and if the argument slot is used somewhere
1770 explicitly, it means address of parameter has been taken,
1771 so we should not extend the lifetime of the pseudo. */
1772 && ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) == 0
1773 || ! MEM_P (XEXP (note, 0))))
1775 /* Stores are never anticipatable. */
1777 /* An expression is not available if its operands are
1778 subsequently modified, including this insn. It's also not
1779 available if this is a branch, because we can't insert
1780 a set after the branch. */
1781 int avail_p = oprs_available_p (dest, insn)
1784 /* Record the memory expression (DEST) in the hash table. */
1785 insert_expr_in_table (dest, GET_MODE (dest), insn,
1786 antic_p, avail_p, table);
1792 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1793 struct hash_table *table ATTRIBUTE_UNUSED)
1795 /* Currently nothing to do. */
1799 hash_scan_call (rtx x ATTRIBUTE_UNUSED, rtx insn ATTRIBUTE_UNUSED,
1800 struct hash_table *table ATTRIBUTE_UNUSED)
1802 /* Currently nothing to do. */
1805 /* Process INSN and add hash table entries as appropriate.
1807 Only available expressions that set a single pseudo-reg are recorded.
1809 Single sets in a PARALLEL could be handled, but it's an extra complication
1810 that isn't dealt with right now. The trick is handling the CLOBBERs that
1811 are also in the PARALLEL. Later.
1813 If SET_P is nonzero, this is for the assignment hash table,
1814 otherwise it is for the expression hash table.
1815 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
1816 not record any expressions. */
1819 hash_scan_insn (rtx insn, struct hash_table *table, int in_libcall_block)
1821 rtx pat = PATTERN (insn);
1824 if (in_libcall_block)
1827 /* Pick out the sets of INSN and for other forms of instructions record
1828 what's been modified. */
1830 if (GET_CODE (pat) == SET)
1831 hash_scan_set (pat, insn, table);
1832 else if (GET_CODE (pat) == PARALLEL)
1833 for (i = 0; i < XVECLEN (pat, 0); i++)
1835 rtx x = XVECEXP (pat, 0, i);
1837 if (GET_CODE (x) == SET)
1838 hash_scan_set (x, insn, table);
1839 else if (GET_CODE (x) == CLOBBER)
1840 hash_scan_clobber (x, insn, table);
1841 else if (GET_CODE (x) == CALL)
1842 hash_scan_call (x, insn, table);
1845 else if (GET_CODE (pat) == CLOBBER)
1846 hash_scan_clobber (pat, insn, table);
1847 else if (GET_CODE (pat) == CALL)
1848 hash_scan_call (pat, insn, table);
1852 dump_hash_table (FILE *file, const char *name, struct hash_table *table)
1855 /* Flattened out table, so it's printed in proper order. */
1856 struct expr **flat_table;
1857 unsigned int *hash_val;
1860 flat_table = xcalloc (table->n_elems, sizeof (struct expr *));
1861 hash_val = xmalloc (table->n_elems * sizeof (unsigned int));
1863 for (i = 0; i < (int) table->size; i++)
1864 for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
1866 flat_table[expr->bitmap_index] = expr;
1867 hash_val[expr->bitmap_index] = i;
1870 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
1871 name, table->size, table->n_elems);
1873 for (i = 0; i < (int) table->n_elems; i++)
1874 if (flat_table[i] != 0)
1876 expr = flat_table[i];
1877 fprintf (file, "Index %d (hash value %d)\n ",
1878 expr->bitmap_index, hash_val[i]);
1879 print_rtl (file, expr->expr);
1880 fprintf (file, "\n");
1883 fprintf (file, "\n");
1889 /* Record register first/last/block set information for REGNO in INSN.
1891 first_set records the first place in the block where the register
1892 is set and is used to compute "anticipatability".
1894 last_set records the last place in the block where the register
1895 is set and is used to compute "availability".
1897 last_bb records the block for which first_set and last_set are
1898 valid, as a quick test to invalidate them.
1900 reg_set_in_block records whether the register is set in the block
1901 and is used to compute "transparency". */
1904 record_last_reg_set_info (rtx insn, int regno)
1906 struct reg_avail_info *info = ®_avail_info[regno];
1907 int cuid = INSN_CUID (insn);
1909 info->last_set = cuid;
1910 if (info->last_bb != current_bb)
1912 info->last_bb = current_bb;
1913 info->first_set = cuid;
1914 SET_BIT (reg_set_in_block[current_bb->index], regno);
1919 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
1920 Note we store a pair of elements in the list, so they have to be
1921 taken off pairwise. */
1924 canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
1927 rtx dest_addr, insn;
1930 while (GET_CODE (dest) == SUBREG
1931 || GET_CODE (dest) == ZERO_EXTRACT
1932 || GET_CODE (dest) == STRICT_LOW_PART)
1933 dest = XEXP (dest, 0);
1935 /* If DEST is not a MEM, then it will not conflict with a load. Note
1936 that function calls are assumed to clobber memory, but are handled
1942 dest_addr = get_addr (XEXP (dest, 0));
1943 dest_addr = canon_rtx (dest_addr);
1944 insn = (rtx) v_insn;
1945 bb = BLOCK_NUM (insn);
1947 canon_modify_mem_list[bb] =
1948 alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
1949 canon_modify_mem_list[bb] =
1950 alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
1953 /* Record memory modification information for INSN. We do not actually care
1954 about the memory location(s) that are set, or even how they are set (consider
1955 a CALL_INSN). We merely need to record which insns modify memory. */
1958 record_last_mem_set_info (rtx insn)
1960 int bb = BLOCK_NUM (insn);
1962 /* load_killed_in_block_p will handle the case of calls clobbering
1964 modify_mem_list[bb] = alloc_INSN_LIST (insn, modify_mem_list[bb]);
1965 bitmap_set_bit (modify_mem_list_set, bb);
1969 /* Note that traversals of this loop (other than for free-ing)
1970 will break after encountering a CALL_INSN. So, there's no
1971 need to insert a pair of items, as canon_list_insert does. */
1972 canon_modify_mem_list[bb] =
1973 alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
1976 note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
1979 /* Called from compute_hash_table via note_stores to handle one
1980 SET or CLOBBER in an insn. DATA is really the instruction in which
1981 the SET is taking place. */
1984 record_last_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED, void *data)
1986 rtx last_set_insn = (rtx) data;
1988 if (GET_CODE (dest) == SUBREG)
1989 dest = SUBREG_REG (dest);
1992 record_last_reg_set_info (last_set_insn, REGNO (dest));
1993 else if (MEM_P (dest)
1994 /* Ignore pushes, they clobber nothing. */
1995 && ! push_operand (dest, GET_MODE (dest)))
1996 record_last_mem_set_info (last_set_insn);
1999 /* Top level function to create an expression or assignment hash table.
2001 Expression entries are placed in the hash table if
2002 - they are of the form (set (pseudo-reg) src),
2003 - src is something we want to perform GCSE on,
2004 - none of the operands are subsequently modified in the block
2006 Assignment entries are placed in the hash table if
2007 - they are of the form (set (pseudo-reg) src),
2008 - src is something we want to perform const/copy propagation on,
2009 - none of the operands or target are subsequently modified in the block
2011 Currently src must be a pseudo-reg or a const_int.
2013 TABLE is the table computed. */
2016 compute_hash_table_work (struct hash_table *table)
2020 /* While we compute the hash table we also compute a bit array of which
2021 registers are set in which blocks.
2022 ??? This isn't needed during const/copy propagation, but it's cheap to
2024 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
2026 /* re-Cache any INSN_LIST nodes we have allocated. */
2027 clear_modify_mem_tables ();
2028 /* Some working arrays used to track first and last set in each block. */
2029 reg_avail_info = gmalloc (max_gcse_regno * sizeof (struct reg_avail_info));
2031 for (i = 0; i < max_gcse_regno; ++i)
2032 reg_avail_info[i].last_bb = NULL;
2034 FOR_EACH_BB (current_bb)
2038 int in_libcall_block;
2040 /* First pass over the instructions records information used to
2041 determine when registers and memory are first and last set.
2042 ??? hard-reg reg_set_in_block computation
2043 could be moved to compute_sets since they currently don't change. */
2045 for (insn = BB_HEAD (current_bb);
2046 insn && insn != NEXT_INSN (BB_END (current_bb));
2047 insn = NEXT_INSN (insn))
2049 if (! INSN_P (insn))
2054 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2055 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2056 record_last_reg_set_info (insn, regno);
2061 note_stores (PATTERN (insn), record_last_set_info, insn);
2064 /* Insert implicit sets in the hash table. */
2066 && implicit_sets[current_bb->index] != NULL_RTX)
2067 hash_scan_set (implicit_sets[current_bb->index],
2068 BB_HEAD (current_bb), table);
2070 /* The next pass builds the hash table. */
2072 for (insn = BB_HEAD (current_bb), in_libcall_block = 0;
2073 insn && insn != NEXT_INSN (BB_END (current_bb));
2074 insn = NEXT_INSN (insn))
2077 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2078 in_libcall_block = 1;
2079 else if (table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2080 in_libcall_block = 0;
2081 hash_scan_insn (insn, table, in_libcall_block);
2082 if (!table->set_p && find_reg_note (insn, REG_RETVAL, NULL_RTX))
2083 in_libcall_block = 0;
2087 free (reg_avail_info);
2088 reg_avail_info = NULL;
2091 /* Allocate space for the set/expr hash TABLE.
2092 N_INSNS is the number of instructions in the function.
2093 It is used to determine the number of buckets to use.
2094 SET_P determines whether set or expression table will
2098 alloc_hash_table (int n_insns, struct hash_table *table, int set_p)
2102 table->size = n_insns / 4;
2103 if (table->size < 11)
2106 /* Attempt to maintain efficient use of hash table.
2107 Making it an odd number is simplest for now.
2108 ??? Later take some measurements. */
2110 n = table->size * sizeof (struct expr *);
2111 table->table = gmalloc (n);
2112 table->set_p = set_p;
2115 /* Free things allocated by alloc_hash_table. */
2118 free_hash_table (struct hash_table *table)
2120 free (table->table);
2123 /* Compute the hash TABLE for doing copy/const propagation or
2124 expression hash table. */
2127 compute_hash_table (struct hash_table *table)
2129 /* Initialize count of number of entries in hash table. */
2131 memset (table->table, 0, table->size * sizeof (struct expr *));
2133 compute_hash_table_work (table);
2136 /* Expression tracking support. */
2138 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2139 table entry, or NULL if not found. */
2141 static struct expr *
2142 lookup_set (unsigned int regno, struct hash_table *table)
2144 unsigned int hash = hash_set (regno, table->size);
2147 expr = table->table[hash];
2149 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2150 expr = expr->next_same_hash;
2155 /* Return the next entry for REGNO in list EXPR. */
2157 static struct expr *
2158 next_set (unsigned int regno, struct expr *expr)
2161 expr = expr->next_same_hash;
2162 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2167 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2168 types may be mixed. */
2171 free_insn_expr_list_list (rtx *listp)
2175 for (list = *listp; list ; list = next)
2177 next = XEXP (list, 1);
2178 if (GET_CODE (list) == EXPR_LIST)
2179 free_EXPR_LIST_node (list);
2181 free_INSN_LIST_node (list);
2187 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2189 clear_modify_mem_tables (void)
2194 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
2196 free_INSN_LIST_list (modify_mem_list + i);
2197 free_insn_expr_list_list (canon_modify_mem_list + i);
2199 bitmap_clear (modify_mem_list_set);
2202 /* Release memory used by modify_mem_list_set. */
2205 free_modify_mem_tables (void)
2207 clear_modify_mem_tables ();
2208 free (modify_mem_list);
2209 free (canon_modify_mem_list);
2210 modify_mem_list = 0;
2211 canon_modify_mem_list = 0;
2214 /* Reset tables used to keep track of what's still available [since the
2215 start of the block]. */
2218 reset_opr_set_tables (void)
2220 /* Maintain a bitmap of which regs have been set since beginning of
2222 CLEAR_REG_SET (reg_set_bitmap);
2224 /* Also keep a record of the last instruction to modify memory.
2225 For now this is very trivial, we only record whether any memory
2226 location has been modified. */
2227 clear_modify_mem_tables ();
2230 /* Return nonzero if the operands of X are not set before INSN in
2231 INSN's basic block. */
2234 oprs_not_set_p (rtx x, rtx insn)
2243 code = GET_CODE (x);
2259 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn),
2260 INSN_CUID (insn), x, 0))
2263 return oprs_not_set_p (XEXP (x, 0), insn);
2266 return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
2272 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2276 /* If we are about to do the last recursive call
2277 needed at this level, change it into iteration.
2278 This function is called enough to be worth it. */
2280 return oprs_not_set_p (XEXP (x, i), insn);
2282 if (! oprs_not_set_p (XEXP (x, i), insn))
2285 else if (fmt[i] == 'E')
2286 for (j = 0; j < XVECLEN (x, i); j++)
2287 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2294 /* Mark things set by a CALL. */
2297 mark_call (rtx insn)
2299 if (! CONST_OR_PURE_CALL_P (insn))
2300 record_last_mem_set_info (insn);
2303 /* Mark things set by a SET. */
2306 mark_set (rtx pat, rtx insn)
2308 rtx dest = SET_DEST (pat);
2310 while (GET_CODE (dest) == SUBREG
2311 || GET_CODE (dest) == ZERO_EXTRACT
2312 || GET_CODE (dest) == STRICT_LOW_PART)
2313 dest = XEXP (dest, 0);
2316 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (dest));
2317 else if (MEM_P (dest))
2318 record_last_mem_set_info (insn);
2320 if (GET_CODE (SET_SRC (pat)) == CALL)
2324 /* Record things set by a CLOBBER. */
2327 mark_clobber (rtx pat, rtx insn)
2329 rtx clob = XEXP (pat, 0);
2331 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2332 clob = XEXP (clob, 0);
2335 SET_REGNO_REG_SET (reg_set_bitmap, REGNO (clob));
2337 record_last_mem_set_info (insn);
2340 /* Record things set by INSN.
2341 This data is used by oprs_not_set_p. */
2344 mark_oprs_set (rtx insn)
2346 rtx pat = PATTERN (insn);
2349 if (GET_CODE (pat) == SET)
2350 mark_set (pat, insn);
2351 else if (GET_CODE (pat) == PARALLEL)
2352 for (i = 0; i < XVECLEN (pat, 0); i++)
2354 rtx x = XVECEXP (pat, 0, i);
2356 if (GET_CODE (x) == SET)
2358 else if (GET_CODE (x) == CLOBBER)
2359 mark_clobber (x, insn);
2360 else if (GET_CODE (x) == CALL)
2364 else if (GET_CODE (pat) == CLOBBER)
2365 mark_clobber (pat, insn);
2366 else if (GET_CODE (pat) == CALL)
2371 /* Compute copy/constant propagation working variables. */
2373 /* Local properties of assignments. */
2374 static sbitmap *cprop_pavloc;
2375 static sbitmap *cprop_absaltered;
2377 /* Global properties of assignments (computed from the local properties). */
2378 static sbitmap *cprop_avin;
2379 static sbitmap *cprop_avout;
2381 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
2382 basic blocks. N_SETS is the number of sets. */
2385 alloc_cprop_mem (int n_blocks, int n_sets)
2387 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
2388 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
2390 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
2391 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
2394 /* Free vars used by copy/const propagation. */
2397 free_cprop_mem (void)
2399 sbitmap_vector_free (cprop_pavloc);
2400 sbitmap_vector_free (cprop_absaltered);
2401 sbitmap_vector_free (cprop_avin);
2402 sbitmap_vector_free (cprop_avout);
2405 /* For each block, compute whether X is transparent. X is either an
2406 expression or an assignment [though we don't care which, for this context
2407 an assignment is treated as an expression]. For each block where an
2408 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
2412 compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
2420 /* repeat is used to turn tail-recursion into iteration since GCC
2421 can't do it when there's no return value. */
2427 code = GET_CODE (x);
2433 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2436 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2437 SET_BIT (bmap[bb->index], indx);
2441 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2442 SET_BIT (bmap[r->bb_index], indx);
2447 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
2450 if (TEST_BIT (reg_set_in_block[bb->index], REGNO (x)))
2451 RESET_BIT (bmap[bb->index], indx);
2455 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
2456 RESET_BIT (bmap[r->bb_index], indx);
2465 rtx list_entry = canon_modify_mem_list[bb->index];
2469 rtx dest, dest_addr;
2471 if (CALL_P (XEXP (list_entry, 0)))
2474 SET_BIT (bmap[bb->index], indx);
2476 RESET_BIT (bmap[bb->index], indx);
2479 /* LIST_ENTRY must be an INSN of some kind that sets memory.
2480 Examine each hunk of memory that is modified. */
2482 dest = XEXP (list_entry, 0);
2483 list_entry = XEXP (list_entry, 1);
2484 dest_addr = XEXP (list_entry, 0);
2486 if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
2487 x, rtx_addr_varies_p))
2490 SET_BIT (bmap[bb->index], indx);
2492 RESET_BIT (bmap[bb->index], indx);
2495 list_entry = XEXP (list_entry, 1);
2518 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2522 /* If we are about to do the last recursive call
2523 needed at this level, change it into iteration.
2524 This function is called enough to be worth it. */
2531 compute_transp (XEXP (x, i), indx, bmap, set_p);
2533 else if (fmt[i] == 'E')
2534 for (j = 0; j < XVECLEN (x, i); j++)
2535 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
2539 /* Top level routine to do the dataflow analysis needed by copy/const
2543 compute_cprop_data (void)
2545 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, &set_hash_table);
2546 compute_available (cprop_pavloc, cprop_absaltered,
2547 cprop_avout, cprop_avin);
2550 /* Copy/constant propagation. */
2552 /* Maximum number of register uses in an insn that we handle. */
2555 /* Table of uses found in an insn.
2556 Allocated statically to avoid alloc/free complexity and overhead. */
2557 static struct reg_use reg_use_table[MAX_USES];
2559 /* Index into `reg_use_table' while building it. */
2560 static int reg_use_count;
2562 /* Set up a list of register numbers used in INSN. The found uses are stored
2563 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
2564 and contains the number of uses in the table upon exit.
2566 ??? If a register appears multiple times we will record it multiple times.
2567 This doesn't hurt anything but it will slow things down. */
2570 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
2577 /* repeat is used to turn tail-recursion into iteration since GCC
2578 can't do it when there's no return value. */
2583 code = GET_CODE (x);
2586 if (reg_use_count == MAX_USES)
2589 reg_use_table[reg_use_count].reg_rtx = x;
2593 /* Recursively scan the operands of this expression. */
2595 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2599 /* If we are about to do the last recursive call
2600 needed at this level, change it into iteration.
2601 This function is called enough to be worth it. */
2608 find_used_regs (&XEXP (x, i), data);
2610 else if (fmt[i] == 'E')
2611 for (j = 0; j < XVECLEN (x, i); j++)
2612 find_used_regs (&XVECEXP (x, i, j), data);
2616 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
2617 Returns nonzero is successful. */
2620 try_replace_reg (rtx from, rtx to, rtx insn)
2622 rtx note = find_reg_equal_equiv_note (insn);
2625 rtx set = single_set (insn);
2627 validate_replace_src_group (from, to, insn);
2628 if (num_changes_pending () && apply_change_group ())
2631 /* Try to simplify SET_SRC if we have substituted a constant. */
2632 if (success && set && CONSTANT_P (to))
2634 src = simplify_rtx (SET_SRC (set));
2637 validate_change (insn, &SET_SRC (set), src, 0);
2640 /* If there is already a NOTE, update the expression in it with our
2643 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
2645 if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
2647 /* If above failed and this is a single set, try to simplify the source of
2648 the set given our substitution. We could perhaps try this for multiple
2649 SETs, but it probably won't buy us anything. */
2650 src = simplify_replace_rtx (SET_SRC (set), from, to);
2652 if (!rtx_equal_p (src, SET_SRC (set))
2653 && validate_change (insn, &SET_SRC (set), src, 0))
2656 /* If we've failed to do replacement, have a single SET, don't already
2657 have a note, and have no special SET, add a REG_EQUAL note to not
2658 lose information. */
2659 if (!success && note == 0 && set != 0
2660 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT)
2661 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
2664 /* REG_EQUAL may get simplified into register.
2665 We don't allow that. Remove that note. This code ought
2666 not to happen, because previous code ought to synthesize
2667 reg-reg move, but be on the safe side. */
2668 if (note && REG_P (XEXP (note, 0)))
2669 remove_note (insn, note);
2674 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
2675 NULL no such set is found. */
2677 static struct expr *
2678 find_avail_set (int regno, rtx insn)
2680 /* SET1 contains the last set found that can be returned to the caller for
2681 use in a substitution. */
2682 struct expr *set1 = 0;
2684 /* Loops are not possible here. To get a loop we would need two sets
2685 available at the start of the block containing INSN. i.e. we would
2686 need two sets like this available at the start of the block:
2688 (set (reg X) (reg Y))
2689 (set (reg Y) (reg X))
2691 This can not happen since the set of (reg Y) would have killed the
2692 set of (reg X) making it unavailable at the start of this block. */
2696 struct expr *set = lookup_set (regno, &set_hash_table);
2698 /* Find a set that is available at the start of the block
2699 which contains INSN. */
2702 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
2704 set = next_set (regno, set);
2707 /* If no available set was found we've reached the end of the
2708 (possibly empty) copy chain. */
2712 gcc_assert (GET_CODE (set->expr) == SET);
2714 src = SET_SRC (set->expr);
2716 /* We know the set is available.
2717 Now check that SRC is ANTLOC (i.e. none of the source operands
2718 have changed since the start of the block).
2720 If the source operand changed, we may still use it for the next
2721 iteration of this loop, but we may not use it for substitutions. */
2723 if (gcse_constant_p (src) || oprs_not_set_p (src, insn))
2726 /* If the source of the set is anything except a register, then
2727 we have reached the end of the copy chain. */
2731 /* Follow the copy chain, i.e. start another iteration of the loop
2732 and see if we have an available copy into SRC. */
2733 regno = REGNO (src);
2736 /* SET1 holds the last set that was available and anticipatable at
2741 /* Subroutine of cprop_insn that tries to propagate constants into
2742 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
2743 it is the instruction that immediately precedes JUMP, and must be a
2744 single SET of a register. FROM is what we will try to replace,
2745 SRC is the constant we will try to substitute for it. Returns nonzero
2746 if a change was made. */
2749 cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
2751 rtx new, set_src, note_src;
2752 rtx set = pc_set (jump);
2753 rtx note = find_reg_equal_equiv_note (jump);
2757 note_src = XEXP (note, 0);
2758 if (GET_CODE (note_src) == EXPR_LIST)
2759 note_src = NULL_RTX;
2761 else note_src = NULL_RTX;
2763 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
2764 set_src = note_src ? note_src : SET_SRC (set);
2766 /* First substitute the SETCC condition into the JUMP instruction,
2767 then substitute that given values into this expanded JUMP. */
2768 if (setcc != NULL_RTX
2769 && !modified_between_p (from, setcc, jump)
2770 && !modified_between_p (src, setcc, jump))
2773 rtx setcc_set = single_set (setcc);
2774 rtx setcc_note = find_reg_equal_equiv_note (setcc);
2775 setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
2776 ? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
2777 set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
2783 new = simplify_replace_rtx (set_src, from, src);
2785 /* If no simplification can be made, then try the next register. */
2786 if (rtx_equal_p (new, SET_SRC (set)))
2789 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
2794 /* Ensure the value computed inside the jump insn to be equivalent
2795 to one computed by setcc. */
2796 if (setcc && modified_in_p (new, setcc))
2798 if (! validate_change (jump, &SET_SRC (set), new, 0))
2800 /* When (some) constants are not valid in a comparison, and there
2801 are two registers to be replaced by constants before the entire
2802 comparison can be folded into a constant, we need to keep
2803 intermediate information in REG_EQUAL notes. For targets with
2804 separate compare insns, such notes are added by try_replace_reg.
2805 When we have a combined compare-and-branch instruction, however,
2806 we need to attach a note to the branch itself to make this
2807 optimization work. */
2809 if (!rtx_equal_p (new, note_src))
2810 set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new));
2814 /* Remove REG_EQUAL note after simplification. */
2816 remove_note (jump, note);
2818 /* If this has turned into an unconditional jump,
2819 then put a barrier after it so that the unreachable
2820 code will be deleted. */
2821 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
2822 emit_barrier_after (jump);
2826 /* Delete the cc0 setter. */
2827 if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
2828 delete_insn (setcc);
2831 run_jump_opt_after_gcse = 1;
2833 global_const_prop_count++;
2834 if (gcse_file != NULL)
2837 "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
2838 REGNO (from), INSN_UID (jump));
2839 print_rtl (gcse_file, src);
2840 fprintf (gcse_file, "\n");
2842 purge_dead_edges (bb);
2848 constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
2852 /* Check for reg or cc0 setting instructions followed by
2853 conditional branch instructions first. */
2855 && (sset = single_set (insn)) != NULL
2857 && any_condjump_p (NEXT_INSN (insn)) && onlyjump_p (NEXT_INSN (insn)))
2859 rtx dest = SET_DEST (sset);
2860 if ((REG_P (dest) || CC0_P (dest))
2861 && cprop_jump (BLOCK_FOR_INSN (insn), insn, NEXT_INSN (insn), from, to))
2865 /* Handle normal insns next. */
2866 if (NONJUMP_INSN_P (insn)
2867 && try_replace_reg (from, to, insn))
2870 /* Try to propagate a CONST_INT into a conditional jump.
2871 We're pretty specific about what we will handle in this
2872 code, we can extend this as necessary over time.
2874 Right now the insn in question must look like
2875 (set (pc) (if_then_else ...)) */
2876 else if (alter_jumps && any_condjump_p (insn) && onlyjump_p (insn))
2877 return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, to);
2881 /* Perform constant and copy propagation on INSN.
2882 The result is nonzero if a change was made. */
2885 cprop_insn (rtx insn, int alter_jumps)
2887 struct reg_use *reg_used;
2895 note_uses (&PATTERN (insn), find_used_regs, NULL);
2897 note = find_reg_equal_equiv_note (insn);
2899 /* We may win even when propagating constants into notes. */
2901 find_used_regs (&XEXP (note, 0), NULL);
2903 for (reg_used = ®_use_table[0]; reg_use_count > 0;
2904 reg_used++, reg_use_count--)
2906 unsigned int regno = REGNO (reg_used->reg_rtx);
2910 /* Ignore registers created by GCSE.
2911 We do this because ... */
2912 if (regno >= max_gcse_regno)
2915 /* If the register has already been set in this block, there's
2916 nothing we can do. */
2917 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
2920 /* Find an assignment that sets reg_used and is available
2921 at the start of the block. */
2922 set = find_avail_set (regno, insn);
2927 /* ??? We might be able to handle PARALLELs. Later. */
2928 gcc_assert (GET_CODE (pat) == SET);
2930 src = SET_SRC (pat);
2932 /* Constant propagation. */
2933 if (gcse_constant_p (src))
2935 if (constprop_register (insn, reg_used->reg_rtx, src, alter_jumps))
2938 global_const_prop_count++;
2939 if (gcse_file != NULL)
2941 fprintf (gcse_file, "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
2942 fprintf (gcse_file, "insn %d with constant ", INSN_UID (insn));
2943 print_rtl (gcse_file, src);
2944 fprintf (gcse_file, "\n");
2946 if (INSN_DELETED_P (insn))
2950 else if (REG_P (src)
2951 && REGNO (src) >= FIRST_PSEUDO_REGISTER
2952 && REGNO (src) != regno)
2954 if (try_replace_reg (reg_used->reg_rtx, src, insn))
2957 global_copy_prop_count++;
2958 if (gcse_file != NULL)
2960 fprintf (gcse_file, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
2961 regno, INSN_UID (insn));
2962 fprintf (gcse_file, " with reg %d\n", REGNO (src));
2965 /* The original insn setting reg_used may or may not now be
2966 deletable. We leave the deletion to flow. */
2967 /* FIXME: If it turns out that the insn isn't deletable,
2968 then we may have unnecessarily extended register lifetimes
2969 and made things worse. */
2977 /* Like find_used_regs, but avoid recording uses that appear in
2978 input-output contexts such as zero_extract or pre_dec. This
2979 restricts the cases we consider to those for which local cprop
2980 can legitimately make replacements. */
2983 local_cprop_find_used_regs (rtx *xptr, void *data)
2990 switch (GET_CODE (x))
2994 case STRICT_LOW_PART:
3003 /* Can only legitimately appear this early in the context of
3004 stack pushes for function arguments, but handle all of the
3005 codes nonetheless. */
3009 /* Setting a subreg of a register larger than word_mode leaves
3010 the non-written words unchanged. */
3011 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) > BITS_PER_WORD)
3019 find_used_regs (xptr, data);
3022 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3023 their REG_EQUAL notes need updating. */
3026 do_local_cprop (rtx x, rtx insn, int alter_jumps, rtx *libcall_sp)
3028 rtx newreg = NULL, newcnst = NULL;
3030 /* Rule out USE instructions and ASM statements as we don't want to
3031 change the hard registers mentioned. */
3033 && (REGNO (x) >= FIRST_PSEUDO_REGISTER
3034 || (GET_CODE (PATTERN (insn)) != USE
3035 && asm_noperands (PATTERN (insn)) < 0)))
3037 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0);
3038 struct elt_loc_list *l;
3042 for (l = val->locs; l; l = l->next)
3044 rtx this_rtx = l->loc;
3047 /* Don't CSE non-constant values out of libcall blocks. */
3048 if (l->in_libcall && ! CONSTANT_P (this_rtx))
3051 if (gcse_constant_p (this_rtx))
3053 if (REG_P (this_rtx) && REGNO (this_rtx) >= FIRST_PSEUDO_REGISTER
3054 /* Don't copy propagate if it has attached REG_EQUIV note.
3055 At this point this only function parameters should have
3056 REG_EQUIV notes and if the argument slot is used somewhere
3057 explicitly, it means address of parameter has been taken,
3058 so we should not extend the lifetime of the pseudo. */
3059 && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
3060 || ! MEM_P (XEXP (note, 0))))
3063 if (newcnst && constprop_register (insn, x, newcnst, alter_jumps))
3065 /* If we find a case where we can't fix the retval REG_EQUAL notes
3066 match the new register, we either have to abandon this replacement
3067 or fix delete_trivially_dead_insns to preserve the setting insn,
3068 or make it delete the REG_EUAQL note, and fix up all passes that
3069 require the REG_EQUAL note there. */
3072 adjusted = adjust_libcall_notes (x, newcnst, insn, libcall_sp);
3073 gcc_assert (adjusted);
3075 if (gcse_file != NULL)
3077 fprintf (gcse_file, "LOCAL CONST-PROP: Replacing reg %d in ",
3079 fprintf (gcse_file, "insn %d with constant ",
3081 print_rtl (gcse_file, newcnst);
3082 fprintf (gcse_file, "\n");
3084 local_const_prop_count++;
3087 else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
3089 adjust_libcall_notes (x, newreg, insn, libcall_sp);
3090 if (gcse_file != NULL)
3093 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
3094 REGNO (x), INSN_UID (insn));
3095 fprintf (gcse_file, " with reg %d\n", REGNO (newreg));
3097 local_copy_prop_count++;
3104 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3105 their REG_EQUAL notes need updating to reflect that OLDREG has been
3106 replaced with NEWVAL in INSN. Return true if all substitutions could
3109 adjust_libcall_notes (rtx oldreg, rtx newval, rtx insn, rtx *libcall_sp)
3113 while ((end = *libcall_sp++))
3115 rtx note = find_reg_equal_equiv_note (end);
3122 if (reg_set_between_p (newval, PREV_INSN (insn), end))
3126 note = find_reg_equal_equiv_note (end);
3129 if (reg_mentioned_p (newval, XEXP (note, 0)))
3132 while ((end = *libcall_sp++));
3136 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), oldreg, newval);
3142 #define MAX_NESTED_LIBCALLS 9
3145 local_cprop_pass (int alter_jumps)
3148 struct reg_use *reg_used;
3149 rtx libcall_stack[MAX_NESTED_LIBCALLS + 1], *libcall_sp;
3150 bool changed = false;
3152 cselib_init (false);
3153 libcall_sp = &libcall_stack[MAX_NESTED_LIBCALLS];
3155 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3159 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
3163 gcc_assert (libcall_sp != libcall_stack);
3164 *--libcall_sp = XEXP (note, 0);
3166 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3169 note = find_reg_equal_equiv_note (insn);
3173 note_uses (&PATTERN (insn), local_cprop_find_used_regs, NULL);
3175 local_cprop_find_used_regs (&XEXP (note, 0), NULL);
3177 for (reg_used = ®_use_table[0]; reg_use_count > 0;
3178 reg_used++, reg_use_count--)
3179 if (do_local_cprop (reg_used->reg_rtx, insn, alter_jumps,
3185 if (INSN_DELETED_P (insn))
3188 while (reg_use_count);
3190 cselib_process_insn (insn);
3193 /* Global analysis may get into infinite loops for unreachable blocks. */
3194 if (changed && alter_jumps)
3196 delete_unreachable_blocks ();
3197 free_reg_set_mem ();
3198 alloc_reg_set_mem (max_reg_num ());
3199 compute_sets (get_insns ());
3203 /* Forward propagate copies. This includes copies and constants. Return
3204 nonzero if a change was made. */
3207 cprop (int alter_jumps)
3213 /* Note we start at block 1. */
3214 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3216 if (gcse_file != NULL)
3217 fprintf (gcse_file, "\n");
3222 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR, next_bb)
3224 /* Reset tables used to keep track of what's still valid [since the
3225 start of the block]. */
3226 reset_opr_set_tables ();
3228 for (insn = BB_HEAD (bb);
3229 insn != NULL && insn != NEXT_INSN (BB_END (bb));
3230 insn = NEXT_INSN (insn))
3233 changed |= cprop_insn (insn, alter_jumps);
3235 /* Keep track of everything modified by this insn. */
3236 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
3237 call mark_oprs_set if we turned the insn into a NOTE. */
3238 if (! NOTE_P (insn))
3239 mark_oprs_set (insn);
3243 if (gcse_file != NULL)
3244 fprintf (gcse_file, "\n");
3249 /* Similar to get_condition, only the resulting condition must be
3250 valid at JUMP, instead of at EARLIEST.
3252 This differs from noce_get_condition in ifcvt.c in that we prefer not to
3253 settle for the condition variable in the jump instruction being integral.
3254 We prefer to be able to record the value of a user variable, rather than
3255 the value of a temporary used in a condition. This could be solved by
3256 recording the value of *every* register scaned by canonicalize_condition,
3257 but this would require some code reorganization. */
3260 fis_get_condition (rtx jump)
3262 return get_condition (jump, NULL, false, true);
3265 /* Check the comparison COND to see if we can safely form an implicit set from
3266 it. COND is either an EQ or NE comparison. */
3269 implicit_set_cond_p (rtx cond)
3271 enum machine_mode mode = GET_MODE (XEXP (cond, 0));
3272 rtx cst = XEXP (cond, 1);
3274 /* We can't perform this optimization if either operand might be or might
3275 contain a signed zero. */
3276 if (HONOR_SIGNED_ZEROS (mode))
3278 /* It is sufficient to check if CST is or contains a zero. We must
3279 handle float, complex, and vector. If any subpart is a zero, then
3280 the optimization can't be performed. */
3281 /* ??? The complex and vector checks are not implemented yet. We just
3282 always return zero for them. */
3283 if (GET_CODE (cst) == CONST_DOUBLE)
3286 REAL_VALUE_FROM_CONST_DOUBLE (d, cst);
3287 if (REAL_VALUES_EQUAL (d, dconst0))
3294 return gcse_constant_p (cst);
3297 /* Find the implicit sets of a function. An "implicit set" is a constraint
3298 on the value of a variable, implied by a conditional jump. For example,
3299 following "if (x == 2)", the then branch may be optimized as though the
3300 conditional performed an "explicit set", in this example, "x = 2". This
3301 function records the set patterns that are implicit at the start of each
3305 find_implicit_sets (void)
3307 basic_block bb, dest;
3313 /* Check for more than one successor. */
3314 if (EDGE_COUNT (bb->succs) > 1)
3316 cond = fis_get_condition (BB_END (bb));
3319 && (GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
3320 && REG_P (XEXP (cond, 0))
3321 && REGNO (XEXP (cond, 0)) >= FIRST_PSEUDO_REGISTER
3322 && implicit_set_cond_p (cond))
3324 dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
3325 : FALLTHRU_EDGE (bb)->dest;
3327 if (dest && EDGE_COUNT (dest->preds) == 1
3328 && dest != EXIT_BLOCK_PTR)
3330 new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
3332 implicit_sets[dest->index] = new;
3335 fprintf(gcse_file, "Implicit set of reg %d in ",
3336 REGNO (XEXP (cond, 0)));
3337 fprintf(gcse_file, "basic block %d\n", dest->index);
3345 fprintf (gcse_file, "Found %d implicit sets\n", count);
3348 /* Perform one copy/constant propagation pass.
3349 PASS is the pass count. If CPROP_JUMPS is true, perform constant
3350 propagation into conditional jumps. If BYPASS_JUMPS is true,
3351 perform conditional jump bypassing optimizations. */
3354 one_cprop_pass (int pass, int cprop_jumps, int bypass_jumps)
3358 global_const_prop_count = local_const_prop_count = 0;
3359 global_copy_prop_count = local_copy_prop_count = 0;
3361 local_cprop_pass (cprop_jumps);
3363 /* Determine implicit sets. */
3364 implicit_sets = xcalloc (last_basic_block, sizeof (rtx));
3365 find_implicit_sets ();
3367 alloc_hash_table (max_cuid, &set_hash_table, 1);
3368 compute_hash_table (&set_hash_table);
3370 /* Free implicit_sets before peak usage. */
3371 free (implicit_sets);
3372 implicit_sets = NULL;
3375 dump_hash_table (gcse_file, "SET", &set_hash_table);
3376 if (set_hash_table.n_elems > 0)
3378 alloc_cprop_mem (last_basic_block, set_hash_table.n_elems);
3379 compute_cprop_data ();
3380 changed = cprop (cprop_jumps);
3382 changed |= bypass_conditional_jumps ();
3386 free_hash_table (&set_hash_table);
3390 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
3391 current_function_name (), pass, bytes_used);
3392 fprintf (gcse_file, "%d local const props, %d local copy props\n\n",
3393 local_const_prop_count, local_copy_prop_count);
3394 fprintf (gcse_file, "%d global const props, %d global copy props\n\n",
3395 global_const_prop_count, global_copy_prop_count);
3397 /* Global analysis may get into infinite loops for unreachable blocks. */
3398 if (changed && cprop_jumps)
3399 delete_unreachable_blocks ();
3404 /* Bypass conditional jumps. */
3406 /* The value of last_basic_block at the beginning of the jump_bypass
3407 pass. The use of redirect_edge_and_branch_force may introduce new
3408 basic blocks, but the data flow analysis is only valid for basic
3409 block indices less than bypass_last_basic_block. */
3411 static int bypass_last_basic_block;
3413 /* Find a set of REGNO to a constant that is available at the end of basic
3414 block BB. Returns NULL if no such set is found. Based heavily upon
3417 static struct expr *
3418 find_bypass_set (int regno, int bb)
3420 struct expr *result = 0;
3425 struct expr *set = lookup_set (regno, &set_hash_table);
3429 if (TEST_BIT (cprop_avout[bb], set->bitmap_index))
3431 set = next_set (regno, set);
3437 gcc_assert (GET_CODE (set->expr) == SET);
3439 src = SET_SRC (set->expr);
3440 if (gcse_constant_p (src))
3446 regno = REGNO (src);
3452 /* Subroutine of bypass_block that checks whether a pseudo is killed by
3453 any of the instructions inserted on an edge. Jump bypassing places
3454 condition code setters on CFG edges using insert_insn_on_edge. This
3455 function is required to check that our data flow analysis is still
3456 valid prior to commit_edge_insertions. */
3459 reg_killed_on_edge (rtx reg, edge e)
3463 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
3464 if (INSN_P (insn) && reg_set_p (reg, insn))
3470 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
3471 basic block BB which has more than one predecessor. If not NULL, SETCC
3472 is the first instruction of BB, which is immediately followed by JUMP_INSN
3473 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
3474 Returns nonzero if a change was made.
3476 During the jump bypassing pass, we may place copies of SETCC instructions
3477 on CFG edges. The following routine must be careful to pay attention to
3478 these inserted insns when performing its transformations. */
3481 bypass_block (basic_block bb, rtx setcc, rtx jump)
3486 int may_be_loop_header;
3490 insn = (setcc != NULL) ? setcc : jump;
3492 /* Determine set of register uses in INSN. */
3494 note_uses (&PATTERN (insn), find_used_regs, NULL);
3495 note = find_reg_equal_equiv_note (insn);
3497 find_used_regs (&XEXP (note, 0), NULL);
3499 may_be_loop_header = false;
3500 FOR_EACH_EDGE (e, ei, bb->preds)
3501 if (e->flags & EDGE_DFS_BACK)
3503 may_be_loop_header = true;
3508 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3512 if (e->flags & EDGE_COMPLEX)
3518 /* We can't redirect edges from new basic blocks. */
3519 if (e->src->index >= bypass_last_basic_block)
3525 /* The irreducible loops created by redirecting of edges entering the
3526 loop from outside would decrease effectiveness of some of the following
3527 optimizations, so prevent this. */
3528 if (may_be_loop_header
3529 && !(e->flags & EDGE_DFS_BACK))
3535 for (i = 0; i < reg_use_count; i++)
3537 struct reg_use *reg_used = ®_use_table[i];
3538 unsigned int regno = REGNO (reg_used->reg_rtx);
3539 basic_block dest, old_dest;
3543 if (regno >= max_gcse_regno)
3546 set = find_bypass_set (regno, e->src->index);
3551 /* Check the data flow is valid after edge insertions. */
3552 if (e->insns.r && reg_killed_on_edge (reg_used->reg_rtx, e))
3555 src = SET_SRC (pc_set (jump));
3558 src = simplify_replace_rtx (src,
3559 SET_DEST (PATTERN (setcc)),
3560 SET_SRC (PATTERN (setcc)));
3562 new = simplify_replace_rtx (src, reg_used->reg_rtx,
3563 SET_SRC (set->expr));
3565 /* Jump bypassing may have already placed instructions on
3566 edges of the CFG. We can't bypass an outgoing edge that
3567 has instructions associated with it, as these insns won't
3568 get executed if the incoming edge is redirected. */
3572 edest = FALLTHRU_EDGE (bb);
3573 dest = edest->insns.r ? NULL : edest->dest;
3575 else if (GET_CODE (new) == LABEL_REF)
3579 dest = BLOCK_FOR_INSN (XEXP (new, 0));
3580 /* Don't bypass edges containing instructions. */
3581 FOR_EACH_EDGE (edest, ei2, bb->succs)
3582 if (edest->dest == dest && edest->insns.r)
3591 /* Avoid unification of the edge with other edges from original
3592 branch. We would end up emitting the instruction on "both"
3595 if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc))))
3600 FOR_EACH_EDGE (e2, ei2, e->src->succs)
3601 if (e2->dest == dest)
3611 && dest != EXIT_BLOCK_PTR)
3613 redirect_edge_and_branch_force (e, dest);
3615 /* Copy the register setter to the redirected edge.
3616 Don't copy CC0 setters, as CC0 is dead after jump. */
3619 rtx pat = PATTERN (setcc);
3620 if (!CC0_P (SET_DEST (pat)))
3621 insert_insn_on_edge (copy_insn (pat), e);
3624 if (gcse_file != NULL)
3626 fprintf (gcse_file, "JUMP-BYPASS: Proved reg %d "
3627 "in jump_insn %d equals constant ",
3628 regno, INSN_UID (jump));
3629 print_rtl (gcse_file, SET_SRC (set->expr));
3630 fprintf (gcse_file, "\nBypass edge from %d->%d to %d\n",
3631 e->src->index, old_dest->index, dest->index);
3644 /* Find basic blocks with more than one predecessor that only contain a
3645 single conditional jump. If the result of the comparison is known at
3646 compile-time from any incoming edge, redirect that edge to the
3647 appropriate target. Returns nonzero if a change was made.
3649 This function is now mis-named, because we also handle indirect jumps. */
3652 bypass_conditional_jumps (void)
3660 /* Note we start at block 1. */
3661 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
3664 bypass_last_basic_block = last_basic_block;
3665 mark_dfs_back_edges ();
3668 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
3669 EXIT_BLOCK_PTR, next_bb)
3671 /* Check for more than one predecessor. */
3672 if (EDGE_COUNT (bb->preds) > 1)
3675 for (insn = BB_HEAD (bb);
3676 insn != NULL && insn != NEXT_INSN (BB_END (bb));
3677 insn = NEXT_INSN (insn))
3678 if (NONJUMP_INSN_P (insn))
3682 if (GET_CODE (PATTERN (insn)) != SET)
3685 dest = SET_DEST (PATTERN (insn));
3686 if (REG_P (dest) || CC0_P (dest))
3691 else if (JUMP_P (insn))
3693 if ((any_condjump_p (insn) || computed_jump_p (insn))
3694 && onlyjump_p (insn))
3695 changed |= bypass_block (bb, setcc, insn);
3698 else if (INSN_P (insn))
3703 /* If we bypassed any register setting insns, we inserted a
3704 copy on the redirected edge. These need to be committed. */
3706 commit_edge_insertions();
3711 /* Compute PRE+LCM working variables. */
3713 /* Local properties of expressions. */
3714 /* Nonzero for expressions that are transparent in the block. */
3715 static sbitmap *transp;
3717 /* Nonzero for expressions that are transparent at the end of the block.
3718 This is only zero for expressions killed by abnormal critical edge
3719 created by a calls. */
3720 static sbitmap *transpout;
3722 /* Nonzero for expressions that are computed (available) in the block. */
3723 static sbitmap *comp;
3725 /* Nonzero for expressions that are locally anticipatable in the block. */
3726 static sbitmap *antloc;
3728 /* Nonzero for expressions where this block is an optimal computation
3730 static sbitmap *pre_optimal;
3732 /* Nonzero for expressions which are redundant in a particular block. */
3733 static sbitmap *pre_redundant;
3735 /* Nonzero for expressions which should be inserted on a specific edge. */
3736 static sbitmap *pre_insert_map;
3738 /* Nonzero for expressions which should be deleted in a specific block. */
3739 static sbitmap *pre_delete_map;
3741 /* Contains the edge_list returned by pre_edge_lcm. */
3742 static struct edge_list *edge_list;
3744 /* Redundant insns. */
3745 static sbitmap pre_redundant_insns;
3747 /* Allocate vars used for PRE analysis. */
3750 alloc_pre_mem (int n_blocks, int n_exprs)
3752 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
3753 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
3754 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
3757 pre_redundant = NULL;
3758 pre_insert_map = NULL;
3759 pre_delete_map = NULL;
3760 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
3762 /* pre_insert and pre_delete are allocated later. */
3765 /* Free vars used for PRE analysis. */
3770 sbitmap_vector_free (transp);
3771 sbitmap_vector_free (comp);
3773 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
3776 sbitmap_vector_free (pre_optimal);
3778 sbitmap_vector_free (pre_redundant);
3780 sbitmap_vector_free (pre_insert_map);
3782 sbitmap_vector_free (pre_delete_map);
3784 transp = comp = NULL;
3785 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
3788 /* Top level routine to do the dataflow analysis needed by PRE. */
3791 compute_pre_data (void)
3793 sbitmap trapping_expr;
3797 compute_local_properties (transp, comp, antloc, &expr_hash_table);
3798 sbitmap_vector_zero (ae_kill, last_basic_block);
3800 /* Collect expressions which might trap. */
3801 trapping_expr = sbitmap_alloc (expr_hash_table.n_elems);
3802 sbitmap_zero (trapping_expr);
3803 for (ui = 0; ui < expr_hash_table.size; ui++)
3806 for (e = expr_hash_table.table[ui]; e != NULL; e = e->next_same_hash)
3807 if (may_trap_p (e->expr))
3808 SET_BIT (trapping_expr, e->bitmap_index);
3811 /* Compute ae_kill for each basic block using:
3821 /* If the current block is the destination of an abnormal edge, we
3822 kill all trapping expressions because we won't be able to properly
3823 place the instruction on the edge. So make them neither
3824 anticipatable nor transparent. This is fairly conservative. */
3825 FOR_EACH_EDGE (e, ei, bb->preds)
3826 if (e->flags & EDGE_ABNORMAL)
3828 sbitmap_difference (antloc[bb->index], antloc[bb->index], trapping_expr);
3829 sbitmap_difference (transp[bb->index], transp[bb->index], trapping_expr);
3833 sbitmap_a_or_b (ae_kill[bb->index], transp[bb->index], comp[bb->index]);
3834 sbitmap_not (ae_kill[bb->index], ae_kill[bb->index]);
3837 edge_list = pre_edge_lcm (gcse_file, expr_hash_table.n_elems, transp, comp, antloc,
3838 ae_kill, &pre_insert_map, &pre_delete_map);
3839 sbitmap_vector_free (antloc);
3841 sbitmap_vector_free (ae_kill);
3843 sbitmap_free (trapping_expr);
3848 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
3851 VISITED is a pointer to a working buffer for tracking which BB's have
3852 been visited. It is NULL for the top-level call.
3854 We treat reaching expressions that go through blocks containing the same
3855 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3856 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3857 2 as not reaching. The intent is to improve the probability of finding
3858 only one reaching expression and to reduce register lifetimes by picking
3859 the closest such expression. */
3862 pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr, basic_block bb, char *visited)
3867 FOR_EACH_EDGE (pred, ei, bb->preds)
3869 basic_block pred_bb = pred->src;
3871 if (pred->src == ENTRY_BLOCK_PTR
3872 /* Has predecessor has already been visited? */
3873 || visited[pred_bb->index])
3874 ;/* Nothing to do. */
3876 /* Does this predecessor generate this expression? */
3877 else if (TEST_BIT (comp[pred_bb->index], expr->bitmap_index))
3879 /* Is this the occurrence we're looking for?
3880 Note that there's only one generating occurrence per block
3881 so we just need to check the block number. */
3882 if (occr_bb == pred_bb)
3885 visited[pred_bb->index] = 1;
3887 /* Ignore this predecessor if it kills the expression. */
3888 else if (! TEST_BIT (transp[pred_bb->index], expr->bitmap_index))
3889 visited[pred_bb->index] = 1;
3891 /* Neither gen nor kill. */
3894 visited[pred_bb->index] = 1;
3895 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
3900 /* All paths have been checked. */
3904 /* The wrapper for pre_expr_reaches_here_work that ensures that any
3905 memory allocated for that function is returned. */
3908 pre_expr_reaches_here_p (basic_block occr_bb, struct expr *expr, basic_block bb)
3911 char *visited = xcalloc (last_basic_block, 1);
3913 rval = pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited);
3920 /* Given an expr, generate RTL which we can insert at the end of a BB,
3921 or on an edge. Set the block number of any insns generated to
3925 process_insert_insn (struct expr *expr)
3927 rtx reg = expr->reaching_reg;
3928 rtx exp = copy_rtx (expr->expr);
3933 /* If the expression is something that's an operand, like a constant,
3934 just copy it to a register. */
3935 if (general_operand (exp, GET_MODE (reg)))
3936 emit_move_insn (reg, exp);
3938 /* Otherwise, make a new insn to compute this expression and make sure the
3939 insn will be recognized (this also adds any needed CLOBBERs). Copy the
3940 expression to make sure we don't have any sharing issues. */
3943 rtx insn = emit_insn (gen_rtx_SET (VOIDmode, reg, exp));
3945 if (insn_invalid_p (insn))
3956 /* Add EXPR to the end of basic block BB.
3958 This is used by both the PRE and code hoisting.
3960 For PRE, we want to verify that the expr is either transparent
3961 or locally anticipatable in the target block. This check makes
3962 no sense for code hoisting. */
3965 insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
3967 rtx insn = BB_END (bb);
3969 rtx reg = expr->reaching_reg;
3970 int regno = REGNO (reg);
3973 pat = process_insert_insn (expr);
3974 gcc_assert (pat && INSN_P (pat));
3977 while (NEXT_INSN (pat_end) != NULL_RTX)
3978 pat_end = NEXT_INSN (pat_end);
3980 /* If the last insn is a jump, insert EXPR in front [taking care to
3981 handle cc0, etc. properly]. Similarly we need to care trapping
3982 instructions in presence of non-call exceptions. */
3985 || (NONJUMP_INSN_P (insn)
3986 && (EDGE_COUNT (bb->succs) > 1
3987 || EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL)))
3992 /* It should always be the case that we can put these instructions
3993 anywhere in the basic block with performing PRE optimizations.
3995 gcc_assert (!NONJUMP_INSN_P (insn) || !pre
3996 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
3997 || TEST_BIT (transp[bb->index], expr->bitmap_index));
3999 /* If this is a jump table, then we can't insert stuff here. Since
4000 we know the previous real insn must be the tablejump, we insert
4001 the new instruction just before the tablejump. */
4002 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4003 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4004 insn = prev_real_insn (insn);
4007 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4008 if cc0 isn't set. */
4009 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4011 insn = XEXP (note, 0);
4014 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4015 if (maybe_cc0_setter
4016 && INSN_P (maybe_cc0_setter)
4017 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4018 insn = maybe_cc0_setter;
4021 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4022 new_insn = emit_insn_before_noloc (pat, insn);
4025 /* Likewise if the last insn is a call, as will happen in the presence
4026 of exception handling. */
4027 else if (CALL_P (insn)
4028 && (EDGE_COUNT (bb->succs) > 1 || EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL))
4030 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4031 we search backward and place the instructions before the first
4032 parameter is loaded. Do this for everyone for consistency and a
4033 presumption that we'll get better code elsewhere as well.
4035 It should always be the case that we can put these instructions
4036 anywhere in the basic block with performing PRE optimizations.
4040 || TEST_BIT (antloc[bb->index], expr->bitmap_index)
4041 || TEST_BIT (transp[bb->index], expr->bitmap_index));
4043 /* Since different machines initialize their parameter registers
4044 in different orders, assume nothing. Collect the set of all
4045 parameter registers. */
4046 insn = find_first_parameter_load (insn, BB_HEAD (bb));
4048 /* If we found all the parameter loads, then we want to insert
4049 before the first parameter load.
4051 If we did not find all the parameter loads, then we might have
4052 stopped on the head of the block, which could be a CODE_LABEL.
4053 If we inserted before the CODE_LABEL, then we would be putting
4054 the insn in the wrong basic block. In that case, put the insn
4055 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
4056 while (LABEL_P (insn)
4057 || NOTE_INSN_BASIC_BLOCK_P (insn))
4058 insn = NEXT_INSN (insn);
4060 new_insn = emit_insn_before_noloc (pat, insn);
4063 new_insn = emit_insn_after_noloc (pat, insn);
4069 add_label_notes (PATTERN (pat), new_insn);
4070 note_stores (PATTERN (pat), record_set_info, pat);
4074 pat = NEXT_INSN (pat);
4077 gcse_create_count++;
4081 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
4082 bb->index, INSN_UID (new_insn));
4083 fprintf (gcse_file, "copying expression %d to reg %d\n",
4084 expr->bitmap_index, regno);
4088 /* Insert partially redundant expressions on edges in the CFG to make
4089 the expressions fully redundant. */
4092 pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
4094 int e, i, j, num_edges, set_size, did_insert = 0;
4097 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4098 if it reaches any of the deleted expressions. */
4100 set_size = pre_insert_map[0]->size;
4101 num_edges = NUM_EDGES (edge_list);
4102 inserted = sbitmap_vector_alloc (num_edges, expr_hash_table.n_elems);
4103 sbitmap_vector_zero (inserted, num_edges);
4105 for (e = 0; e < num_edges; e++)
4108 basic_block bb = INDEX_EDGE_PRED_BB (edge_list, e);
4110 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
4112 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
4114 for (j = indx; insert && j < (int) expr_hash_table.n_elems; j++, insert >>= 1)
4115 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4117 struct expr *expr = index_map[j];
4120 /* Now look at each deleted occurrence of this expression. */
4121 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4123 if (! occr->deleted_p)
4126 /* Insert this expression on this edge if if it would
4127 reach the deleted occurrence in BB. */
4128 if (!TEST_BIT (inserted[e], j))
4131 edge eg = INDEX_EDGE (edge_list, e);
4133 /* We can't insert anything on an abnormal and
4134 critical edge, so we insert the insn at the end of
4135 the previous block. There are several alternatives
4136 detailed in Morgans book P277 (sec 10.5) for
4137 handling this situation. This one is easiest for
4140 if (eg->flags & EDGE_ABNORMAL)
4141 insert_insn_end_bb (index_map[j], bb, 0);
4144 insn = process_insert_insn (index_map[j]);
4145 insert_insn_on_edge (insn, eg);
4150 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
4152 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
4153 fprintf (gcse_file, "copy expression %d\n",
4154 expr->bitmap_index);
4157 update_ld_motion_stores (expr);
4158 SET_BIT (inserted[e], j);
4160 gcse_create_count++;
4167 sbitmap_vector_free (inserted);
4171 /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
4172 Given "old_reg <- expr" (INSN), instead of adding after it
4173 reaching_reg <- old_reg
4174 it's better to do the following:
4175 reaching_reg <- expr
4176 old_reg <- reaching_reg
4177 because this way copy propagation can discover additional PRE
4178 opportunities. But if this fails, we try the old way.
4179 When "expr" is a store, i.e.
4180 given "MEM <- old_reg", instead of adding after it
4181 reaching_reg <- old_reg
4182 it's better to add it before as follows:
4183 reaching_reg <- old_reg
4184 MEM <- reaching_reg. */
4187 pre_insert_copy_insn (struct expr *expr, rtx insn)
4189 rtx reg = expr->reaching_reg;
4190 int regno = REGNO (reg);
4191 int indx = expr->bitmap_index;
4192 rtx pat = PATTERN (insn);
4197 /* This block matches the logic in hash_scan_insn. */
4198 switch (GET_CODE (pat))
4205 /* Search through the parallel looking for the set whose
4206 source was the expression that we're interested in. */
4208 for (i = 0; i < XVECLEN (pat, 0); i++)
4210 rtx x = XVECEXP (pat, 0, i);
4211 if (GET_CODE (x) == SET
4212 && expr_equiv_p (SET_SRC (x), expr->expr))
4224 if (REG_P (SET_DEST (set)))
4226 old_reg = SET_DEST (set);
4227 /* Check if we can modify the set destination in the original insn. */
4228 if (validate_change (insn, &SET_DEST (set), reg, 0))
4230 new_insn = gen_move_insn (old_reg, reg);
4231 new_insn = emit_insn_after (new_insn, insn);
4233 /* Keep register set table up to date. */
4234 record_one_set (regno, insn);
4238 new_insn = gen_move_insn (reg, old_reg);
4239 new_insn = emit_insn_after (new_insn, insn);
4241 /* Keep register set table up to date. */
4242 record_one_set (regno, new_insn);
4245 else /* This is possible only in case of a store to memory. */
4247 old_reg = SET_SRC (set);
4248 new_insn = gen_move_insn (reg, old_reg);
4250 /* Check if we can modify the set source in the original insn. */
4251 if (validate_change (insn, &SET_SRC (set), reg, 0))
4252 new_insn = emit_insn_before (new_insn, insn);
4254 new_insn = emit_insn_after (new_insn, insn);
4256 /* Keep register set table up to date. */
4257 record_one_set (regno, new_insn);
4260 gcse_create_count++;
4264 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4265 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4266 INSN_UID (insn), regno);
4269 /* Copy available expressions that reach the redundant expression
4270 to `reaching_reg'. */
4273 pre_insert_copies (void)
4275 unsigned int i, added_copy;
4280 /* For each available expression in the table, copy the result to
4281 `reaching_reg' if the expression reaches a deleted one.
4283 ??? The current algorithm is rather brute force.
4284 Need to do some profiling. */
4286 for (i = 0; i < expr_hash_table.size; i++)
4287 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4289 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4290 we don't want to insert a copy here because the expression may not
4291 really be redundant. So only insert an insn if the expression was
4292 deleted. This test also avoids further processing if the
4293 expression wasn't deleted anywhere. */
4294 if (expr->reaching_reg == NULL)
4297 /* Set when we add a copy for that expression. */
4300 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4302 if (! occr->deleted_p)
4305 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4307 rtx insn = avail->insn;
4309 /* No need to handle this one if handled already. */
4310 if (avail->copied_p)
4313 /* Don't handle this one if it's a redundant one. */
4314 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4317 /* Or if the expression doesn't reach the deleted one. */
4318 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail->insn),
4320 BLOCK_FOR_INSN (occr->insn)))
4325 /* Copy the result of avail to reaching_reg. */
4326 pre_insert_copy_insn (expr, insn);
4327 avail->copied_p = 1;
4332 update_ld_motion_stores (expr);
4336 /* Emit move from SRC to DEST noting the equivalence with expression computed
4339 gcse_emit_move_after (rtx src, rtx dest, rtx insn)
4342 rtx set = single_set (insn), set2;
4346 /* This should never fail since we're creating a reg->reg copy
4347 we've verified to be valid. */
4349 new = emit_insn_after (gen_move_insn (dest, src), insn);
4351 /* Note the equivalence for local CSE pass. */
4352 set2 = single_set (new);
4353 if (!set2 || !rtx_equal_p (SET_DEST (set2), dest))
4355 if ((note = find_reg_equal_equiv_note (insn)))
4356 eqv = XEXP (note, 0);
4358 eqv = SET_SRC (set);
4360 set_unique_reg_note (new, REG_EQUAL, copy_insn_1 (eqv));
4365 /* Delete redundant computations.
4366 Deletion is done by changing the insn to copy the `reaching_reg' of
4367 the expression into the result of the SET. It is left to later passes
4368 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4370 Returns nonzero if a change is made. */
4381 for (i = 0; i < expr_hash_table.size; i++)
4382 for (expr = expr_hash_table.table[i];
4384 expr = expr->next_same_hash)
4386 int indx = expr->bitmap_index;
4388 /* We only need to search antic_occr since we require
4391 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4393 rtx insn = occr->insn;
4395 basic_block bb = BLOCK_FOR_INSN (insn);
4397 /* We only delete insns that have a single_set. */
4398 if (TEST_BIT (pre_delete_map[bb->index], indx)
4399 && (set = single_set (insn)) != 0)
4401 /* Create a pseudo-reg to store the result of reaching
4402 expressions into. Get the mode for the new pseudo from
4403 the mode of the original destination pseudo. */
4404 if (expr->reaching_reg == NULL)
4406 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4408 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4410 occr->deleted_p = 1;
4411 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4418 "PRE: redundant insn %d (expression %d) in ",
4419 INSN_UID (insn), indx);
4420 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
4421 bb->index, REGNO (expr->reaching_reg));
4430 /* Perform GCSE optimizations using PRE.
4431 This is called by one_pre_gcse_pass after all the dataflow analysis
4434 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4435 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4436 Compiler Design and Implementation.
4438 ??? A new pseudo reg is created to hold the reaching expression. The nice
4439 thing about the classical approach is that it would try to use an existing
4440 reg. If the register can't be adequately optimized [i.e. we introduce
4441 reload problems], one could add a pass here to propagate the new register
4444 ??? We don't handle single sets in PARALLELs because we're [currently] not
4445 able to copy the rest of the parallel when we insert copies to create full
4446 redundancies from partial redundancies. However, there's no reason why we
4447 can't handle PARALLELs in the cases where there are no partial
4454 int did_insert, changed;
4455 struct expr **index_map;
4458 /* Compute a mapping from expression number (`bitmap_index') to
4459 hash table entry. */
4461 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
4462 for (i = 0; i < expr_hash_table.size; i++)
4463 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4464 index_map[expr->bitmap_index] = expr;
4466 /* Reset bitmap used to track which insns are redundant. */
4467 pre_redundant_insns = sbitmap_alloc (max_cuid);
4468 sbitmap_zero (pre_redundant_insns);
4470 /* Delete the redundant insns first so that
4471 - we know what register to use for the new insns and for the other
4472 ones with reaching expressions
4473 - we know which insns are redundant when we go to create copies */
4475 changed = pre_delete ();
4477 did_insert = pre_edge_insert (edge_list, index_map);
4479 /* In other places with reaching expressions, copy the expression to the
4480 specially allocated pseudo-reg that reaches the redundant expr. */
4481 pre_insert_copies ();
4484 commit_edge_insertions ();
4489 sbitmap_free (pre_redundant_insns);
4493 /* Top level routine to perform one PRE GCSE pass.
4495 Return nonzero if a change was made. */
4498 one_pre_gcse_pass (int pass)
4502 gcse_subst_count = 0;
4503 gcse_create_count = 0;
4505 alloc_hash_table (max_cuid, &expr_hash_table, 0);
4506 add_noreturn_fake_exit_edges ();
4508 compute_ld_motion_mems ();
4510 compute_hash_table (&expr_hash_table);
4511 trim_ld_motion_mems ();
4513 dump_hash_table (gcse_file, "Expression", &expr_hash_table);
4515 if (expr_hash_table.n_elems > 0)
4517 alloc_pre_mem (last_basic_block, expr_hash_table.n_elems);
4518 compute_pre_data ();
4519 changed |= pre_gcse ();
4520 free_edge_list (edge_list);
4525 remove_fake_exit_edges ();
4526 free_hash_table (&expr_hash_table);
4530 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
4531 current_function_name (), pass, bytes_used);
4532 fprintf (gcse_file, "%d substs, %d insns created\n",
4533 gcse_subst_count, gcse_create_count);
4539 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
4540 If notes are added to an insn which references a CODE_LABEL, the
4541 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
4542 because the following loop optimization pass requires them. */
4544 /* ??? This is very similar to the loop.c add_label_notes function. We
4545 could probably share code here. */
4547 /* ??? If there was a jump optimization pass after gcse and before loop,
4548 then we would not need to do this here, because jump would add the
4549 necessary REG_LABEL notes. */
4552 add_label_notes (rtx x, rtx insn)
4554 enum rtx_code code = GET_CODE (x);
4558 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4560 /* This code used to ignore labels that referred to dispatch tables to
4561 avoid flow generating (slightly) worse code.
4563 We no longer ignore such label references (see LABEL_REF handling in
4564 mark_jump_label for additional information). */
4566 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, XEXP (x, 0),
4568 if (LABEL_P (XEXP (x, 0)))
4569 LABEL_NUSES (XEXP (x, 0))++;
4573 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
4576 add_label_notes (XEXP (x, i), insn);
4577 else if (fmt[i] == 'E')
4578 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4579 add_label_notes (XVECEXP (x, i, j), insn);
4583 /* Compute transparent outgoing information for each block.
4585 An expression is transparent to an edge unless it is killed by
4586 the edge itself. This can only happen with abnormal control flow,
4587 when the edge is traversed through a call. This happens with
4588 non-local labels and exceptions.
4590 This would not be necessary if we split the edge. While this is
4591 normally impossible for abnormal critical edges, with some effort
4592 it should be possible with exception handling, since we still have
4593 control over which handler should be invoked. But due to increased
4594 EH table sizes, this may not be worthwhile. */
4597 compute_transpout (void)
4603 sbitmap_vector_ones (transpout, last_basic_block);
4607 /* Note that flow inserted a nop a the end of basic blocks that
4608 end in call instructions for reasons other than abnormal
4610 if (! CALL_P (BB_END (bb)))
4613 for (i = 0; i < expr_hash_table.size; i++)
4614 for (expr = expr_hash_table.table[i]; expr ; expr = expr->next_same_hash)
4615 if (MEM_P (expr->expr))
4617 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
4618 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
4621 /* ??? Optimally, we would use interprocedural alias
4622 analysis to determine if this mem is actually killed
4624 RESET_BIT (transpout[bb->index], expr->bitmap_index);
4629 /* Code Hoisting variables and subroutines. */
4631 /* Very busy expressions. */
4632 static sbitmap *hoist_vbein;
4633 static sbitmap *hoist_vbeout;
4635 /* Hoistable expressions. */
4636 static sbitmap *hoist_exprs;
4638 /* ??? We could compute post dominators and run this algorithm in
4639 reverse to perform tail merging, doing so would probably be
4640 more effective than the tail merging code in jump.c.
4642 It's unclear if tail merging could be run in parallel with
4643 code hoisting. It would be nice. */
4645 /* Allocate vars used for code hoisting analysis. */
4648 alloc_code_hoist_mem (int n_blocks, int n_exprs)
4650 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4651 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4652 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4654 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
4655 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
4656 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
4657 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
4660 /* Free vars used for code hoisting analysis. */
4663 free_code_hoist_mem (void)
4665 sbitmap_vector_free (antloc);
4666 sbitmap_vector_free (transp);
4667 sbitmap_vector_free (comp);
4669 sbitmap_vector_free (hoist_vbein);
4670 sbitmap_vector_free (hoist_vbeout);
4671 sbitmap_vector_free (hoist_exprs);
4672 sbitmap_vector_free (transpout);
4674 free_dominance_info (CDI_DOMINATORS);
4677 /* Compute the very busy expressions at entry/exit from each block.
4679 An expression is very busy if all paths from a given point
4680 compute the expression. */
4683 compute_code_hoist_vbeinout (void)
4685 int changed, passes;
4688 sbitmap_vector_zero (hoist_vbeout, last_basic_block);
4689 sbitmap_vector_zero (hoist_vbein, last_basic_block);
4698 /* We scan the blocks in the reverse order to speed up
4700 FOR_EACH_BB_REVERSE (bb)
4702 changed |= sbitmap_a_or_b_and_c_cg (hoist_vbein[bb->index], antloc[bb->index],
4703 hoist_vbeout[bb->index], transp[bb->index]);
4704 if (bb->next_bb != EXIT_BLOCK_PTR)
4705 sbitmap_intersection_of_succs (hoist_vbeout[bb->index], hoist_vbein, bb->index);
4712 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
4715 /* Top level routine to do the dataflow analysis needed by code hoisting. */
4718 compute_code_hoist_data (void)
4720 compute_local_properties (transp, comp, antloc, &expr_hash_table);
4721 compute_transpout ();
4722 compute_code_hoist_vbeinout ();
4723 calculate_dominance_info (CDI_DOMINATORS);
4725 fprintf (gcse_file, "\n");
4728 /* Determine if the expression identified by EXPR_INDEX would
4729 reach BB unimpared if it was placed at the end of EXPR_BB.
4731 It's unclear exactly what Muchnick meant by "unimpared". It seems
4732 to me that the expression must either be computed or transparent in
4733 *every* block in the path(s) from EXPR_BB to BB. Any other definition
4734 would allow the expression to be hoisted out of loops, even if
4735 the expression wasn't a loop invariant.
4737 Contrast this to reachability for PRE where an expression is
4738 considered reachable if *any* path reaches instead of *all*
4742 hoist_expr_reaches_here_p (basic_block expr_bb, int expr_index, basic_block bb, char *visited)
4746 int visited_allocated_locally = 0;
4749 if (visited == NULL)
4751 visited_allocated_locally = 1;
4752 visited = xcalloc (last_basic_block, 1);
4755 FOR_EACH_EDGE (pred, ei, bb->preds)
4757 basic_block pred_bb = pred->src;
4759 if (pred->src == ENTRY_BLOCK_PTR)
4761 else if (pred_bb == expr_bb)
4763 else if (visited[pred_bb->index])
4766 /* Does this predecessor generate this expression? */
4767 else if (TEST_BIT (comp[pred_bb->index], expr_index))
4769 else if (! TEST_BIT (transp[pred_bb->index], expr_index))
4775 visited[pred_bb->index] = 1;
4776 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
4781 if (visited_allocated_locally)
4784 return (pred == NULL);
4787 /* Actually perform code hoisting. */
4792 basic_block bb, dominated;
4794 unsigned int domby_len;
4796 struct expr **index_map;
4799 sbitmap_vector_zero (hoist_exprs, last_basic_block);
4801 /* Compute a mapping from expression number (`bitmap_index') to
4802 hash table entry. */
4804 index_map = xcalloc (expr_hash_table.n_elems, sizeof (struct expr *));
4805 for (i = 0; i < expr_hash_table.size; i++)
4806 for (expr = expr_hash_table.table[i]; expr != NULL; expr = expr->next_same_hash)
4807 index_map[expr->bitmap_index] = expr;
4809 /* Walk over each basic block looking for potentially hoistable
4810 expressions, nothing gets hoisted from the entry block. */
4814 int insn_inserted_p;
4816 domby_len = get_dominated_by (CDI_DOMINATORS, bb, &domby);
4817 /* Examine each expression that is very busy at the exit of this
4818 block. These are the potentially hoistable expressions. */
4819 for (i = 0; i < hoist_vbeout[bb->index]->n_bits; i++)
4823 if (TEST_BIT (hoist_vbeout[bb->index], i)
4824 && TEST_BIT (transpout[bb->index], i))
4826 /* We've found a potentially hoistable expression, now
4827 we look at every block BB dominates to see if it
4828 computes the expression. */
4829 for (j = 0; j < domby_len; j++)
4831 dominated = domby[j];
4832 /* Ignore self dominance. */
4833 if (bb == dominated)
4835 /* We've found a dominated block, now see if it computes
4836 the busy expression and whether or not moving that
4837 expression to the "beginning" of that block is safe. */
4838 if (!TEST_BIT (antloc[dominated->index], i))
4841 /* Note if the expression would reach the dominated block
4842 unimpared if it was placed at the end of BB.
4844 Keep track of how many times this expression is hoistable
4845 from a dominated block into BB. */
4846 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4850 /* If we found more than one hoistable occurrence of this
4851 expression, then note it in the bitmap of expressions to
4852 hoist. It makes no sense to hoist things which are computed
4853 in only one BB, and doing so tends to pessimize register
4854 allocation. One could increase this value to try harder
4855 to avoid any possible code expansion due to register
4856 allocation issues; however experiments have shown that
4857 the vast majority of hoistable expressions are only movable
4858 from two successors, so raising this threshold is likely
4859 to nullify any benefit we get from code hoisting. */
4862 SET_BIT (hoist_exprs[bb->index], i);
4867 /* If we found nothing to hoist, then quit now. */
4874 /* Loop over all the hoistable expressions. */
4875 for (i = 0; i < hoist_exprs[bb->index]->n_bits; i++)
4877 /* We want to insert the expression into BB only once, so
4878 note when we've inserted it. */
4879 insn_inserted_p = 0;
4881 /* These tests should be the same as the tests above. */
4882 if (TEST_BIT (hoist_vbeout[bb->index], i))
4884 /* We've found a potentially hoistable expression, now
4885 we look at every block BB dominates to see if it
4886 computes the expression. */
4887 for (j = 0; j < domby_len; j++)
4889 dominated = domby[j];
4890 /* Ignore self dominance. */
4891 if (bb == dominated)
4894 /* We've found a dominated block, now see if it computes
4895 the busy expression and whether or not moving that
4896 expression to the "beginning" of that block is safe. */
4897 if (!TEST_BIT (antloc[dominated->index], i))
4900 /* The expression is computed in the dominated block and
4901 it would be safe to compute it at the start of the
4902 dominated block. Now we have to determine if the
4903 expression would reach the dominated block if it was
4904 placed at the end of BB. */
4905 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
4907 struct expr *expr = index_map[i];
4908 struct occr *occr = expr->antic_occr;
4912 /* Find the right occurrence of this expression. */
4913 while (BLOCK_FOR_INSN (occr->insn) != dominated && occr)
4918 set = single_set (insn);
4921 /* Create a pseudo-reg to store the result of reaching
4922 expressions into. Get the mode for the new pseudo
4923 from the mode of the original destination pseudo. */
4924 if (expr->reaching_reg == NULL)
4926 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4928 gcse_emit_move_after (expr->reaching_reg, SET_DEST (set), insn);
4930 occr->deleted_p = 1;
4931 if (!insn_inserted_p)
4933 insert_insn_end_bb (index_map[i], bb, 0);
4934 insn_inserted_p = 1;
4946 /* Top level routine to perform one code hoisting (aka unification) pass
4948 Return nonzero if a change was made. */
4951 one_code_hoisting_pass (void)
4955 alloc_hash_table (max_cuid, &expr_hash_table, 0);
4956 compute_hash_table (&expr_hash_table);
4958 dump_hash_table (gcse_file, "Code Hosting Expressions", &expr_hash_table);
4960 if (expr_hash_table.n_elems > 0)
4962 alloc_code_hoist_mem (last_basic_block, expr_hash_table.n_elems);
4963 compute_code_hoist_data ();
4965 free_code_hoist_mem ();
4968 free_hash_table (&expr_hash_table);
4973 /* Here we provide the things required to do store motion towards
4974 the exit. In order for this to be effective, gcse also needed to
4975 be taught how to move a load when it is kill only by a store to itself.
4980 void foo(float scale)
4982 for (i=0; i<10; i++)
4986 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
4987 the load out since its live around the loop, and stored at the bottom
4990 The 'Load Motion' referred to and implemented in this file is
4991 an enhancement to gcse which when using edge based lcm, recognizes
4992 this situation and allows gcse to move the load out of the loop.
4994 Once gcse has hoisted the load, store motion can then push this
4995 load towards the exit, and we end up with no loads or stores of 'i'
4998 /* This will search the ldst list for a matching expression. If it
4999 doesn't find one, we create one and initialize it. */
5001 static struct ls_expr *
5004 int do_not_record_p = 0;
5005 struct ls_expr * ptr;
5008 hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
5009 NULL, /*have_reg_qty=*/false);
5011 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5012 if (ptr->hash_index == hash && expr_equiv_p (ptr->pattern, x))
5015 ptr = xmalloc (sizeof (struct ls_expr));
5017 ptr->next = pre_ldst_mems;
5020 ptr->pattern_regs = NULL_RTX;
5021 ptr->loads = NULL_RTX;
5022 ptr->stores = NULL_RTX;
5023 ptr->reaching_reg = NULL_RTX;
5026 ptr->hash_index = hash;
5027 pre_ldst_mems = ptr;
5032 /* Free up an individual ldst entry. */
5035 free_ldst_entry (struct ls_expr * ptr)
5037 free_INSN_LIST_list (& ptr->loads);
5038 free_INSN_LIST_list (& ptr->stores);
5043 /* Free up all memory associated with the ldst list. */
5046 free_ldst_mems (void)
5048 while (pre_ldst_mems)
5050 struct ls_expr * tmp = pre_ldst_mems;
5052 pre_ldst_mems = pre_ldst_mems->next;
5054 free_ldst_entry (tmp);
5057 pre_ldst_mems = NULL;
5060 /* Dump debugging info about the ldst list. */
5063 print_ldst_list (FILE * file)
5065 struct ls_expr * ptr;
5067 fprintf (file, "LDST list: \n");
5069 for (ptr = first_ls_expr(); ptr != NULL; ptr = next_ls_expr (ptr))
5071 fprintf (file, " Pattern (%3d): ", ptr->index);
5073 print_rtl (file, ptr->pattern);
5075 fprintf (file, "\n Loads : ");
5078 print_rtl (file, ptr->loads);
5080 fprintf (file, "(nil)");
5082 fprintf (file, "\n Stores : ");
5085 print_rtl (file, ptr->stores);
5087 fprintf (file, "(nil)");
5089 fprintf (file, "\n\n");
5092 fprintf (file, "\n");
5095 /* Returns 1 if X is in the list of ldst only expressions. */
5097 static struct ls_expr *
5098 find_rtx_in_ldst (rtx x)
5100 struct ls_expr * ptr;
5102 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5103 if (expr_equiv_p (ptr->pattern, x) && ! ptr->invalid)
5109 /* Assign each element of the list of mems a monotonically increasing value. */
5112 enumerate_ldsts (void)
5114 struct ls_expr * ptr;
5117 for (ptr = pre_ldst_mems; ptr != NULL; ptr = ptr->next)
5123 /* Return first item in the list. */
5125 static inline struct ls_expr *
5126 first_ls_expr (void)
5128 return pre_ldst_mems;
5131 /* Return the next item in the list after the specified one. */
5133 static inline struct ls_expr *
5134 next_ls_expr (struct ls_expr * ptr)
5139 /* Load Motion for loads which only kill themselves. */
5141 /* Return true if x is a simple MEM operation, with no registers or
5142 side effects. These are the types of loads we consider for the
5143 ld_motion list, otherwise we let the usual aliasing take care of it. */
5151 if (MEM_VOLATILE_P (x))
5154 if (GET_MODE (x) == BLKmode)
5157 /* If we are handling exceptions, we must be careful with memory references
5158 that may trap. If we are not, the behavior is undefined, so we may just
5160 if (flag_non_call_exceptions && may_trap_p (x))
5163 if (side_effects_p (x))
5166 /* Do not consider function arguments passed on stack. */
5167 if (reg_mentioned_p (stack_pointer_rtx, x))
5170 if (flag_float_store && FLOAT_MODE_P (GET_MODE (x)))
5176 /* Make sure there isn't a buried reference in this pattern anywhere.
5177 If there is, invalidate the entry for it since we're not capable
5178 of fixing it up just yet.. We have to be sure we know about ALL
5179 loads since the aliasing code will allow all entries in the
5180 ld_motion list to not-alias itself. If we miss a load, we will get
5181 the wrong value since gcse might common it and we won't know to
5185 invalidate_any_buried_refs (rtx x)
5189 struct ls_expr * ptr;
5191 /* Invalidate it in the list. */
5192 if (MEM_P (x) && simple_mem (x))
5194 ptr = ldst_entry (x);
5198 /* Recursively process the insn. */
5199 fmt = GET_RTX_FORMAT (GET_CODE (x));
5201 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
5204 invalidate_any_buried_refs (XEXP (x, i));
5205 else if (fmt[i] == 'E')
5206 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5207 invalidate_any_buried_refs (XVECEXP (x, i, j));
5211 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
5212 being defined as MEM loads and stores to symbols, with no side effects
5213 and no registers in the expression. For a MEM destination, we also
5214 check that the insn is still valid if we replace the destination with a
5215 REG, as is done in update_ld_motion_stores. If there are any uses/defs
5216 which don't match this criteria, they are invalidated and trimmed out
5220 compute_ld_motion_mems (void)
5222 struct ls_expr * ptr;
5226 pre_ldst_mems = NULL;
5230 for (insn = BB_HEAD (bb);
5231 insn && insn != NEXT_INSN (BB_END (bb));
5232 insn = NEXT_INSN (insn))
5236 if (GET_CODE (PATTERN (insn)) == SET)
5238 rtx src = SET_SRC (PATTERN (insn));
5239 rtx dest = SET_DEST (PATTERN (insn));
5241 /* Check for a simple LOAD... */
5242 if (MEM_P (src) && simple_mem (src))
5244 ptr = ldst_entry (src);
5246 ptr->loads = alloc_INSN_LIST (insn, ptr->loads);
5252 /* Make sure there isn't a buried load somewhere. */
5253 invalidate_any_buried_refs (src);
5256 /* Check for stores. Don't worry about aliased ones, they
5257 will block any movement we might do later. We only care
5258 about this exact pattern since those are the only
5259 circumstance that we will ignore the aliasing info. */
5260 if (MEM_P (dest) && simple_mem (dest))
5262 ptr = ldst_entry (dest);
5265 && GET_CODE (src) != ASM_OPERANDS
5266 /* Check for REG manually since want_to_gcse_p
5267 returns 0 for all REGs. */
5268 && can_assign_to_reg_p (src))
5269 ptr->stores = alloc_INSN_LIST (insn, ptr->stores);
5275 invalidate_any_buried_refs (PATTERN (insn));
5281 /* Remove any references that have been either invalidated or are not in the
5282 expression list for pre gcse. */
5285 trim_ld_motion_mems (void)
5287 struct ls_expr * * last = & pre_ldst_mems;
5288 struct ls_expr * ptr = pre_ldst_mems;
5294 /* Delete if entry has been made invalid. */
5297 /* Delete if we cannot find this mem in the expression list. */
5298 unsigned int hash = ptr->hash_index % expr_hash_table.size;
5300 for (expr = expr_hash_table.table[hash];
5302 expr = expr->next_same_hash)
5303 if (expr_equiv_p (expr->expr, ptr->pattern))
5307 expr = (struct expr *) 0;
5311 /* Set the expression field if we are keeping it. */
5319 free_ldst_entry (ptr);
5324 /* Show the world what we've found. */
5325 if (gcse_file && pre_ldst_mems != NULL)
5326 print_ldst_list (gcse_file);
5329 /* This routine will take an expression which we are replacing with
5330 a reaching register, and update any stores that are needed if
5331 that expression is in the ld_motion list. Stores are updated by
5332 copying their SRC to the reaching register, and then storing
5333 the reaching register into the store location. These keeps the
5334 correct value in the reaching register for the loads. */
5337 update_ld_motion_stores (struct expr * expr)
5339 struct ls_expr * mem_ptr;
5341 if ((mem_ptr = find_rtx_in_ldst (expr->expr)))
5343 /* We can try to find just the REACHED stores, but is shouldn't
5344 matter to set the reaching reg everywhere... some might be
5345 dead and should be eliminated later. */
5347 /* We replace (set mem expr) with (set reg expr) (set mem reg)
5348 where reg is the reaching reg used in the load. We checked in
5349 compute_ld_motion_mems that we can replace (set mem expr) with
5350 (set reg expr) in that insn. */
5351 rtx list = mem_ptr->stores;
5353 for ( ; list != NULL_RTX; list = XEXP (list, 1))
5355 rtx insn = XEXP (list, 0);
5356 rtx pat = PATTERN (insn);
5357 rtx src = SET_SRC (pat);
5358 rtx reg = expr->reaching_reg;
5361 /* If we've already copied it, continue. */
5362 if (expr->reaching_reg == src)
5367 fprintf (gcse_file, "PRE: store updated with reaching reg ");
5368 print_rtl (gcse_file, expr->reaching_reg);
5369 fprintf (gcse_file, ":\n ");
5370 print_inline_rtx (gcse_file, insn, 8);
5371 fprintf (gcse_file, "\n");
5374 copy = gen_move_insn ( reg, copy_rtx (SET_SRC (pat)));
5375 new = emit_insn_before (copy, insn);
5376 record_one_set (REGNO (reg), new);
5377 SET_SRC (pat) = reg;
5379 /* un-recognize this pattern since it's probably different now. */
5380 INSN_CODE (insn) = -1;
5381 gcse_create_count++;
5386 /* Store motion code. */
5388 #define ANTIC_STORE_LIST(x) ((x)->loads)
5389 #define AVAIL_STORE_LIST(x) ((x)->stores)
5390 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
5392 /* This is used to communicate the target bitvector we want to use in the
5393 reg_set_info routine when called via the note_stores mechanism. */
5394 static int * regvec;
5396 /* And current insn, for the same routine. */
5397 static rtx compute_store_table_current_insn;
5399 /* Used in computing the reverse edge graph bit vectors. */
5400 static sbitmap * st_antloc;
5402 /* Global holding the number of store expressions we are dealing with. */
5403 static int num_stores;
5405 /* Checks to set if we need to mark a register set. Called from
5409 reg_set_info (rtx dest, rtx setter ATTRIBUTE_UNUSED,
5412 sbitmap bb_reg = data;
5414 if (GET_CODE (dest) == SUBREG)
5415 dest = SUBREG_REG (dest);
5419 regvec[REGNO (dest)] = INSN_UID (compute_store_table_current_insn);
5421 SET_BIT (bb_reg, REGNO (dest));
5425 /* Clear any mark that says that this insn sets dest. Called from
5429 reg_clear_last_set (rtx dest, rtx setter ATTRIBUTE_UNUSED,
5432 int *dead_vec = data;
5434 if (GET_CODE (dest) == SUBREG)
5435 dest = SUBREG_REG (dest);
5438 dead_vec[REGNO (dest)] == INSN_UID (compute_store_table_current_insn))
5439 dead_vec[REGNO (dest)] = 0;
5442 /* Return zero if some of the registers in list X are killed
5443 due to set of registers in bitmap REGS_SET. */
5446 store_ops_ok (rtx x, int *regs_set)
5450 for (; x; x = XEXP (x, 1))
5453 if (regs_set[REGNO(reg)])
5460 /* Returns a list of registers mentioned in X. */
5462 extract_mentioned_regs (rtx x)
5464 return extract_mentioned_regs_helper (x, NULL_RTX);
5467 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
5470 extract_mentioned_regs_helper (rtx x, rtx accum)
5476 /* Repeat is used to turn tail-recursion into iteration. */
5482 code = GET_CODE (x);
5486 return alloc_EXPR_LIST (0, x, accum);
5496 /* We do not run this function with arguments having side effects. */
5515 i = GET_RTX_LENGTH (code) - 1;
5516 fmt = GET_RTX_FORMAT (code);
5522 rtx tem = XEXP (x, i);
5524 /* If we are about to do the last recursive call
5525 needed at this level, change it into iteration. */
5532 accum = extract_mentioned_regs_helper (tem, accum);
5534 else if (fmt[i] == 'E')
5538 for (j = 0; j < XVECLEN (x, i); j++)
5539 accum = extract_mentioned_regs_helper (XVECEXP (x, i, j), accum);
5546 /* Determine whether INSN is MEM store pattern that we will consider moving.
5547 REGS_SET_BEFORE is bitmap of registers set before (and including) the
5548 current insn, REGS_SET_AFTER is bitmap of registers set after (and
5549 including) the insn in this basic block. We must be passing through BB from
5550 head to end, as we are using this fact to speed things up.
5552 The results are stored this way:
5554 -- the first anticipatable expression is added into ANTIC_STORE_LIST
5555 -- if the processed expression is not anticipatable, NULL_RTX is added
5556 there instead, so that we can use it as indicator that no further
5557 expression of this type may be anticipatable
5558 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
5559 consequently, all of them but this head are dead and may be deleted.
5560 -- if the expression is not available, the insn due to that it fails to be
5561 available is stored in reaching_reg.
5563 The things are complicated a bit by fact that there already may be stores
5564 to the same MEM from other blocks; also caller must take care of the
5565 necessary cleanup of the temporary markers after end of the basic block.
5569 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
5571 struct ls_expr * ptr;
5573 int check_anticipatable, check_available;
5574 basic_block bb = BLOCK_FOR_INSN (insn);
5576 set = single_set (insn);
5580 dest = SET_DEST (set);
5582 if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
5583 || GET_MODE (dest) == BLKmode)
5586 if (side_effects_p (dest))
5589 /* If we are handling exceptions, we must be careful with memory references
5590 that may trap. If we are not, the behavior is undefined, so we may just
5592 if (flag_non_call_exceptions && may_trap_p (dest))
5595 /* Even if the destination cannot trap, the source may. In this case we'd
5596 need to handle updating the REG_EH_REGION note. */
5597 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
5600 ptr = ldst_entry (dest);
5601 if (!ptr->pattern_regs)
5602 ptr->pattern_regs = extract_mentioned_regs (dest);
5604 /* Do not check for anticipatability if we either found one anticipatable
5605 store already, or tested for one and found out that it was killed. */
5606 check_anticipatable = 0;
5607 if (!ANTIC_STORE_LIST (ptr))
5608 check_anticipatable = 1;
5611 tmp = XEXP (ANTIC_STORE_LIST (ptr), 0);
5613 && BLOCK_FOR_INSN (tmp) != bb)
5614 check_anticipatable = 1;
5616 if (check_anticipatable)
5618 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
5622 ANTIC_STORE_LIST (ptr) = alloc_INSN_LIST (tmp,
5623 ANTIC_STORE_LIST (ptr));
5626 /* It is not necessary to check whether store is available if we did
5627 it successfully before; if we failed before, do not bother to check
5628 until we reach the insn that caused us to fail. */
5629 check_available = 0;
5630 if (!AVAIL_STORE_LIST (ptr))
5631 check_available = 1;
5634 tmp = XEXP (AVAIL_STORE_LIST (ptr), 0);
5635 if (BLOCK_FOR_INSN (tmp) != bb)
5636 check_available = 1;
5638 if (check_available)
5640 /* Check that we have already reached the insn at that the check
5641 failed last time. */
5642 if (LAST_AVAIL_CHECK_FAILURE (ptr))
5644 for (tmp = BB_END (bb);
5645 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
5646 tmp = PREV_INSN (tmp))
5649 check_available = 0;
5652 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
5654 &LAST_AVAIL_CHECK_FAILURE (ptr));
5656 if (!check_available)
5657 AVAIL_STORE_LIST (ptr) = alloc_INSN_LIST (insn, AVAIL_STORE_LIST (ptr));
5660 /* Find available and anticipatable stores. */
5663 compute_store_table (void)
5669 int *last_set_in, *already_set;
5670 struct ls_expr * ptr, **prev_next_ptr_ptr;
5672 max_gcse_regno = max_reg_num ();
5674 reg_set_in_block = sbitmap_vector_alloc (last_basic_block,
5676 sbitmap_vector_zero (reg_set_in_block, last_basic_block);
5678 last_set_in = xcalloc (max_gcse_regno, sizeof (int));
5679 already_set = xmalloc (sizeof (int) * max_gcse_regno);
5681 /* Find all the stores we care about. */
5684 /* First compute the registers set in this block. */
5685 regvec = last_set_in;
5687 for (insn = BB_HEAD (bb);
5688 insn != NEXT_INSN (BB_END (bb));
5689 insn = NEXT_INSN (insn))
5691 if (! INSN_P (insn))
5696 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5697 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5699 last_set_in[regno] = INSN_UID (insn);
5700 SET_BIT (reg_set_in_block[bb->index], regno);
5704 pat = PATTERN (insn);
5705 compute_store_table_current_insn = insn;
5706 note_stores (pat, reg_set_info, reg_set_in_block[bb->index]);
5709 /* Now find the stores. */
5710 memset (already_set, 0, sizeof (int) * max_gcse_regno);
5711 regvec = already_set;
5712 for (insn = BB_HEAD (bb);
5713 insn != NEXT_INSN (BB_END (bb));
5714 insn = NEXT_INSN (insn))
5716 if (! INSN_P (insn))
5721 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5722 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
5723 already_set[regno] = 1;
5726 pat = PATTERN (insn);
5727 note_stores (pat, reg_set_info, NULL);
5729 /* Now that we've marked regs, look for stores. */
5730 find_moveable_store (insn, already_set, last_set_in);
5732 /* Unmark regs that are no longer set. */
5733 compute_store_table_current_insn = insn;
5734 note_stores (pat, reg_clear_last_set, last_set_in);
5737 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
5738 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
5739 && last_set_in[regno] == INSN_UID (insn))
5740 last_set_in[regno] = 0;
5744 #ifdef ENABLE_CHECKING
5745 /* last_set_in should now be all-zero. */
5746 for (regno = 0; regno < max_gcse_regno; regno++)
5747 gcc_assert (!last_set_in[regno]);
5750 /* Clear temporary marks. */
5751 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5753 LAST_AVAIL_CHECK_FAILURE(ptr) = NULL_RTX;
5754 if (ANTIC_STORE_LIST (ptr)
5755 && (tmp = XEXP (ANTIC_STORE_LIST (ptr), 0)) == NULL_RTX)
5756 ANTIC_STORE_LIST (ptr) = XEXP (ANTIC_STORE_LIST (ptr), 1);
5760 /* Remove the stores that are not available anywhere, as there will
5761 be no opportunity to optimize them. */
5762 for (ptr = pre_ldst_mems, prev_next_ptr_ptr = &pre_ldst_mems;
5764 ptr = *prev_next_ptr_ptr)
5766 if (!AVAIL_STORE_LIST (ptr))
5768 *prev_next_ptr_ptr = ptr->next;
5769 free_ldst_entry (ptr);
5772 prev_next_ptr_ptr = &ptr->next;
5775 ret = enumerate_ldsts ();
5779 fprintf (gcse_file, "ST_avail and ST_antic (shown under loads..)\n");
5780 print_ldst_list (gcse_file);
5788 /* Check to see if the load X is aliased with STORE_PATTERN.
5789 AFTER is true if we are checking the case when STORE_PATTERN occurs
5793 load_kills_store (rtx x, rtx store_pattern, int after)
5796 return anti_dependence (x, store_pattern);
5798 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
5802 /* Go through the entire insn X, looking for any loads which might alias
5803 STORE_PATTERN. Return true if found.
5804 AFTER is true if we are checking the case when STORE_PATTERN occurs
5805 after the insn X. */
5808 find_loads (rtx x, rtx store_pattern, int after)
5817 if (GET_CODE (x) == SET)
5822 if (load_kills_store (x, store_pattern, after))
5826 /* Recursively process the insn. */
5827 fmt = GET_RTX_FORMAT (GET_CODE (x));
5829 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
5832 ret |= find_loads (XEXP (x, i), store_pattern, after);
5833 else if (fmt[i] == 'E')
5834 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5835 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
5840 /* Check if INSN kills the store pattern X (is aliased with it).
5841 AFTER is true if we are checking the case when store X occurs
5842 after the insn. Return true if it it does. */
5845 store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)
5847 rtx reg, base, note;
5854 /* A normal or pure call might read from pattern,
5855 but a const call will not. */
5856 if (! CONST_OR_PURE_CALL_P (insn) || pure_call_p (insn))
5859 /* But even a const call reads its parameters. Check whether the
5860 base of some of registers used in mem is stack pointer. */
5861 for (reg = x_regs; reg; reg = XEXP (reg, 1))
5863 base = find_base_term (XEXP (reg, 0));
5865 || (GET_CODE (base) == ADDRESS
5866 && GET_MODE (base) == Pmode
5867 && XEXP (base, 0) == stack_pointer_rtx))
5874 if (GET_CODE (PATTERN (insn)) == SET)
5876 rtx pat = PATTERN (insn);
5877 rtx dest = SET_DEST (pat);
5879 if (GET_CODE (dest) == ZERO_EXTRACT)
5880 dest = XEXP (dest, 0);
5882 /* Check for memory stores to aliased objects. */
5884 && !expr_equiv_p (dest, x))
5888 if (output_dependence (dest, x))
5893 if (output_dependence (x, dest))
5897 if (find_loads (SET_SRC (pat), x, after))
5900 else if (find_loads (PATTERN (insn), x, after))
5903 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
5904 location aliased with X, then this insn kills X. */
5905 note = find_reg_equal_equiv_note (insn);
5908 note = XEXP (note, 0);
5910 /* However, if the note represents a must alias rather than a may
5911 alias relationship, then it does not kill X. */
5912 if (expr_equiv_p (note, x))
5915 /* See if there are any aliased loads in the note. */
5916 return find_loads (note, x, after);
5919 /* Returns true if the expression X is loaded or clobbered on or after INSN
5920 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
5921 or after the insn. X_REGS is list of registers mentioned in X. If the store
5922 is killed, return the last insn in that it occurs in FAIL_INSN. */
5925 store_killed_after (rtx x, rtx x_regs, rtx insn, basic_block bb,
5926 int *regs_set_after, rtx *fail_insn)
5928 rtx last = BB_END (bb), act;
5930 if (!store_ops_ok (x_regs, regs_set_after))
5932 /* We do not know where it will happen. */
5934 *fail_insn = NULL_RTX;
5938 /* Scan from the end, so that fail_insn is determined correctly. */
5939 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
5940 if (store_killed_in_insn (x, x_regs, act, false))
5950 /* Returns true if the expression X is loaded or clobbered on or before INSN
5951 within basic block BB. X_REGS is list of registers mentioned in X.
5952 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
5954 store_killed_before (rtx x, rtx x_regs, rtx insn, basic_block bb,
5955 int *regs_set_before)
5957 rtx first = BB_HEAD (bb);
5959 if (!store_ops_ok (x_regs, regs_set_before))
5962 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
5963 if (store_killed_in_insn (x, x_regs, insn, true))
5969 /* Fill in available, anticipatable, transparent and kill vectors in
5970 STORE_DATA, based on lists of available and anticipatable stores. */
5972 build_store_vectors (void)
5975 int *regs_set_in_block;
5977 struct ls_expr * ptr;
5980 /* Build the gen_vector. This is any store in the table which is not killed
5981 by aliasing later in its block. */
5982 ae_gen = sbitmap_vector_alloc (last_basic_block, num_stores);
5983 sbitmap_vector_zero (ae_gen, last_basic_block);
5985 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
5986 sbitmap_vector_zero (st_antloc, last_basic_block);
5988 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
5990 for (st = AVAIL_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
5992 insn = XEXP (st, 0);
5993 bb = BLOCK_FOR_INSN (insn);
5995 /* If we've already seen an available expression in this block,
5996 we can delete this one (It occurs earlier in the block). We'll
5997 copy the SRC expression to an unused register in case there
5998 are any side effects. */
5999 if (TEST_BIT (ae_gen[bb->index], ptr->index))
6001 rtx r = gen_reg_rtx (GET_MODE (ptr->pattern));
6003 fprintf (gcse_file, "Removing redundant store:\n");
6004 replace_store_insn (r, XEXP (st, 0), bb, ptr);
6007 SET_BIT (ae_gen[bb->index], ptr->index);
6010 for (st = ANTIC_STORE_LIST (ptr); st != NULL; st = XEXP (st, 1))
6012 insn = XEXP (st, 0);
6013 bb = BLOCK_FOR_INSN (insn);
6014 SET_BIT (st_antloc[bb->index], ptr->index);
6018 ae_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
6019 sbitmap_vector_zero (ae_kill, last_basic_block);
6021 transp = sbitmap_vector_alloc (last_basic_block, num_stores);
6022 sbitmap_vector_zero (transp, last_basic_block);
6023 regs_set_in_block = xmalloc (sizeof (int) * max_gcse_regno);
6027 for (regno = 0; regno < max_gcse_regno; regno++)
6028 regs_set_in_block[regno] = TEST_BIT (reg_set_in_block[bb->index], regno);
6030 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6032 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
6033 bb, regs_set_in_block, NULL))
6035 /* It should not be necessary to consider the expression
6036 killed if it is both anticipatable and available. */
6037 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
6038 || !TEST_BIT (ae_gen[bb->index], ptr->index))
6039 SET_BIT (ae_kill[bb->index], ptr->index);
6042 SET_BIT (transp[bb->index], ptr->index);
6046 free (regs_set_in_block);
6050 dump_sbitmap_vector (gcse_file, "st_antloc", "", st_antloc, last_basic_block);
6051 dump_sbitmap_vector (gcse_file, "st_kill", "", ae_kill, last_basic_block);
6052 dump_sbitmap_vector (gcse_file, "Transpt", "", transp, last_basic_block);
6053 dump_sbitmap_vector (gcse_file, "st_avloc", "", ae_gen, last_basic_block);
6057 /* Insert an instruction at the beginning of a basic block, and update
6058 the BB_HEAD if needed. */
6061 insert_insn_start_bb (rtx insn, basic_block bb)
6063 /* Insert at start of successor block. */
6064 rtx prev = PREV_INSN (BB_HEAD (bb));
6065 rtx before = BB_HEAD (bb);
6068 if (! LABEL_P (before)
6069 && (! NOTE_P (before)
6070 || NOTE_LINE_NUMBER (before) != NOTE_INSN_BASIC_BLOCK))
6073 if (prev == BB_END (bb))
6075 before = NEXT_INSN (before);
6078 insn = emit_insn_after_noloc (insn, prev);
6082 fprintf (gcse_file, "STORE_MOTION insert store at start of BB %d:\n",
6084 print_inline_rtx (gcse_file, insn, 6);
6085 fprintf (gcse_file, "\n");
6089 /* This routine will insert a store on an edge. EXPR is the ldst entry for
6090 the memory reference, and E is the edge to insert it on. Returns nonzero
6091 if an edge insertion was performed. */
6094 insert_store (struct ls_expr * expr, edge e)
6101 /* We did all the deleted before this insert, so if we didn't delete a
6102 store, then we haven't set the reaching reg yet either. */
6103 if (expr->reaching_reg == NULL_RTX)
6106 if (e->flags & EDGE_FAKE)
6109 reg = expr->reaching_reg;
6110 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
6112 /* If we are inserting this expression on ALL predecessor edges of a BB,
6113 insert it at the start of the BB, and reset the insert bits on the other
6114 edges so we don't try to insert it on the other edges. */
6116 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
6117 if (!(tmp->flags & EDGE_FAKE))
6119 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6121 gcc_assert (index != EDGE_INDEX_NO_EDGE);
6122 if (! TEST_BIT (pre_insert_map[index], expr->index))
6126 /* If tmp is NULL, we found an insertion on every edge, blank the
6127 insertion vector for these edges, and insert at the start of the BB. */
6128 if (!tmp && bb != EXIT_BLOCK_PTR)
6130 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
6132 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
6133 RESET_BIT (pre_insert_map[index], expr->index);
6135 insert_insn_start_bb (insn, bb);
6139 /* We can't put stores in the front of blocks pointed to by abnormal
6140 edges since that may put a store where one didn't used to be. */
6141 gcc_assert (!(e->flags & EDGE_ABNORMAL));
6143 insert_insn_on_edge (insn, e);
6147 fprintf (gcse_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
6148 e->src->index, e->dest->index);
6149 print_inline_rtx (gcse_file, insn, 6);
6150 fprintf (gcse_file, "\n");
6156 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
6157 memory location in SMEXPR set in basic block BB.
6159 This could be rather expensive. */
6162 remove_reachable_equiv_notes (basic_block bb, struct ls_expr *smexpr)
6164 edge_iterator *stack, ei;
6167 sbitmap visited = sbitmap_alloc (last_basic_block);
6168 rtx last, insn, note;
6169 rtx mem = smexpr->pattern;
6171 stack = xmalloc (sizeof (edge_iterator) * n_basic_blocks);
6173 ei = ei_start (bb->succs);
6175 sbitmap_zero (visited);
6177 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6185 sbitmap_free (visited);
6188 act = ei_edge (stack[--sp]);
6192 if (bb == EXIT_BLOCK_PTR
6193 || TEST_BIT (visited, bb->index))
6197 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6200 SET_BIT (visited, bb->index);
6202 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
6204 for (last = ANTIC_STORE_LIST (smexpr);
6205 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
6206 last = XEXP (last, 1))
6208 last = XEXP (last, 0);
6211 last = NEXT_INSN (BB_END (bb));
6213 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
6216 note = find_reg_equal_equiv_note (insn);
6217 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6221 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6223 remove_note (insn, note);
6228 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
6230 if (EDGE_COUNT (bb->succs) > 0)
6234 ei = ei_start (bb->succs);
6235 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
6240 /* This routine will replace a store with a SET to a specified register. */
6243 replace_store_insn (rtx reg, rtx del, basic_block bb, struct ls_expr *smexpr)
6245 rtx insn, mem, note, set, ptr, pair;
6247 mem = smexpr->pattern;
6248 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
6249 insn = emit_insn_after (insn, del);
6254 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
6255 print_inline_rtx (gcse_file, del, 6);
6256 fprintf (gcse_file, "\nSTORE MOTION replaced with insn:\n ");
6257 print_inline_rtx (gcse_file, insn, 6);
6258 fprintf (gcse_file, "\n");
6261 for (ptr = ANTIC_STORE_LIST (smexpr); ptr; ptr = XEXP (ptr, 1))
6262 if (XEXP (ptr, 0) == del)
6264 XEXP (ptr, 0) = insn;
6268 /* Move the notes from the deleted insn to its replacement, and patch
6269 up the LIBCALL notes. */
6270 REG_NOTES (insn) = REG_NOTES (del);
6272 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
6275 pair = XEXP (note, 0);
6276 note = find_reg_note (pair, REG_LIBCALL, NULL_RTX);
6277 XEXP (note, 0) = insn;
6279 note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
6282 pair = XEXP (note, 0);
6283 note = find_reg_note (pair, REG_RETVAL, NULL_RTX);
6284 XEXP (note, 0) = insn;
6289 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
6290 they are no longer accurate provided that they are reached by this
6291 definition, so drop them. */
6292 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
6295 set = single_set (insn);
6298 if (expr_equiv_p (SET_DEST (set), mem))
6300 note = find_reg_equal_equiv_note (insn);
6301 if (!note || !expr_equiv_p (XEXP (note, 0), mem))
6305 fprintf (gcse_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6307 remove_note (insn, note);
6309 remove_reachable_equiv_notes (bb, smexpr);
6313 /* Delete a store, but copy the value that would have been stored into
6314 the reaching_reg for later storing. */
6317 delete_store (struct ls_expr * expr, basic_block bb)
6321 if (expr->reaching_reg == NULL_RTX)
6322 expr->reaching_reg = gen_reg_rtx (GET_MODE (expr->pattern));
6324 reg = expr->reaching_reg;
6326 for (i = AVAIL_STORE_LIST (expr); i; i = XEXP (i, 1))
6329 if (BLOCK_FOR_INSN (del) == bb)
6331 /* We know there is only one since we deleted redundant
6332 ones during the available computation. */
6333 replace_store_insn (reg, del, bb, expr);
6339 /* Free memory used by store motion. */
6342 free_store_memory (void)
6347 sbitmap_vector_free (ae_gen);
6349 sbitmap_vector_free (ae_kill);
6351 sbitmap_vector_free (transp);
6353 sbitmap_vector_free (st_antloc);
6355 sbitmap_vector_free (pre_insert_map);
6357 sbitmap_vector_free (pre_delete_map);
6358 if (reg_set_in_block)
6359 sbitmap_vector_free (reg_set_in_block);
6361 ae_gen = ae_kill = transp = st_antloc = NULL;
6362 pre_insert_map = pre_delete_map = reg_set_in_block = NULL;
6365 /* Perform store motion. Much like gcse, except we move expressions the
6366 other way by looking at the flowgraph in reverse. */
6373 struct ls_expr * ptr;
6374 int update_flow = 0;
6378 fprintf (gcse_file, "before store motion\n");
6379 print_rtl (gcse_file, get_insns ());
6382 init_alias_analysis ();
6384 /* Find all the available and anticipatable stores. */
6385 num_stores = compute_store_table ();
6386 if (num_stores == 0)
6388 sbitmap_vector_free (reg_set_in_block);
6389 end_alias_analysis ();
6393 /* Now compute kill & transp vectors. */
6394 build_store_vectors ();
6395 add_noreturn_fake_exit_edges ();
6396 connect_infinite_loops_to_exit ();
6398 edge_list = pre_edge_rev_lcm (gcse_file, num_stores, transp, ae_gen,
6399 st_antloc, ae_kill, &pre_insert_map,
6402 /* Now we want to insert the new stores which are going to be needed. */
6403 for (ptr = first_ls_expr (); ptr != NULL; ptr = next_ls_expr (ptr))
6405 /* If any of the edges we have above are abnormal, we can't move this
6407 for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
6408 if (TEST_BIT (pre_insert_map[x], ptr->index)
6409 && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
6414 if (gcse_file != NULL)
6416 "Can't replace store %d: abnormal edge from %d to %d\n",
6417 ptr->index, INDEX_EDGE (edge_list, x)->src->index,
6418 INDEX_EDGE (edge_list, x)->dest->index);
6422 /* Now we want to insert the new stores which are going to be needed. */
6425 if (TEST_BIT (pre_delete_map[bb->index], ptr->index))
6426 delete_store (ptr, bb);
6428 for (x = 0; x < NUM_EDGES (edge_list); x++)
6429 if (TEST_BIT (pre_insert_map[x], ptr->index))
6430 update_flow |= insert_store (ptr, INDEX_EDGE (edge_list, x));
6434 commit_edge_insertions ();
6436 free_store_memory ();
6437 free_edge_list (edge_list);
6438 remove_fake_exit_edges ();
6439 end_alias_analysis ();
6443 /* Entry point for jump bypassing optimization pass. */
6446 bypass_jumps (FILE *file)
6450 /* We do not construct an accurate cfg in functions which call
6451 setjmp, so just punt to be safe. */
6452 if (current_function_calls_setjmp)
6455 /* For calling dump_foo fns from gdb. */
6456 debug_stderr = stderr;
6459 /* Identify the basic block information for this function, including
6460 successors and predecessors. */
6461 max_gcse_regno = max_reg_num ();
6464 dump_flow_info (file);
6466 /* Return if there's nothing to do, or it is too expensive. */
6467 if (n_basic_blocks <= 1 || is_too_expensive (_ ("jump bypassing disabled")))
6470 gcc_obstack_init (&gcse_obstack);
6473 /* We need alias. */
6474 init_alias_analysis ();
6476 /* Record where pseudo-registers are set. This data is kept accurate
6477 during each pass. ??? We could also record hard-reg information here
6478 [since it's unchanging], however it is currently done during hash table
6481 It may be tempting to compute MEM set information here too, but MEM sets
6482 will be subject to code motion one day and thus we need to compute
6483 information about memory sets when we build the hash tables. */
6485 alloc_reg_set_mem (max_gcse_regno);
6486 compute_sets (get_insns ());
6488 max_gcse_regno = max_reg_num ();
6489 alloc_gcse_mem (get_insns ());
6490 changed = one_cprop_pass (MAX_GCSE_PASSES + 2, 1, 1);
6495 fprintf (file, "BYPASS of %s: %d basic blocks, ",
6496 current_function_name (), n_basic_blocks);
6497 fprintf (file, "%d bytes\n\n", bytes_used);
6500 obstack_free (&gcse_obstack, NULL);
6501 free_reg_set_mem ();
6503 /* We are finished with alias. */
6504 end_alias_analysis ();
6505 allocate_reg_info (max_reg_num (), FALSE, FALSE);
6510 /* Return true if the graph is too expensive to optimize. PASS is the
6511 optimization about to be performed. */
6514 is_too_expensive (const char *pass)
6516 /* Trying to perform global optimizations on flow graphs which have
6517 a high connectivity will take a long time and is unlikely to be
6518 particularly useful.
6520 In normal circumstances a cfg should have about twice as many
6521 edges as blocks. But we do not want to punish small functions
6522 which have a couple switch statements. Rather than simply
6523 threshold the number of blocks, uses something with a more
6524 graceful degradation. */
6525 if (n_edges > 20000 + n_basic_blocks * 4)
6527 if (warn_disabled_optimization)
6528 warning ("%s: %d basic blocks and %d edges/basic block",
6529 pass, n_basic_blocks, n_edges / n_basic_blocks);
6534 /* If allocating memory for the cprop bitmap would take up too much
6535 storage it's better just to disable the optimization. */
6537 * SBITMAP_SET_SIZE (max_reg_num ())
6538 * sizeof (SBITMAP_ELT_TYPE)) > MAX_GCSE_MEMORY)
6540 if (warn_disabled_optimization)
6541 warning ("%s: %d basic blocks and %d registers",
6542 pass, n_basic_blocks, max_reg_num ());
6550 #include "gt-gcse.h"