1 /* Define control and data flow tables, and regsets.
2 Copyright (C) 1987, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 #ifndef GCC_BASIC_BLOCK_H
23 #define GCC_BASIC_BLOCK_H
28 #include "partition.h"
29 #include "hard-reg-set.h"
34 /* Head of register set linked list. */
35 typedef bitmap_head regset_head;
37 /* A pointer to a regset_head. */
38 typedef bitmap regset;
40 /* Allocate a register set with oballoc. */
41 #define ALLOC_REG_SET(OBSTACK) BITMAP_ALLOC (OBSTACK)
43 /* Do any cleanup needed on a regset when it is no longer used. */
44 #define FREE_REG_SET(REGSET) BITMAP_FREE (REGSET)
46 /* Initialize a new regset. */
47 #define INIT_REG_SET(HEAD) bitmap_initialize (HEAD, ®_obstack)
49 /* Clear a register set by freeing up the linked list. */
50 #define CLEAR_REG_SET(HEAD) bitmap_clear (HEAD)
52 /* Copy a register set to another register set. */
53 #define COPY_REG_SET(TO, FROM) bitmap_copy (TO, FROM)
55 /* Compare two register sets. */
56 #define REG_SET_EQUAL_P(A, B) bitmap_equal_p (A, B)
58 /* `and' a register set with a second register set. */
59 #define AND_REG_SET(TO, FROM) bitmap_and_into (TO, FROM)
61 /* `and' the complement of a register set with a register set. */
62 #define AND_COMPL_REG_SET(TO, FROM) bitmap_and_compl_into (TO, FROM)
64 /* Inclusive or a register set with a second register set. */
65 #define IOR_REG_SET(TO, FROM) bitmap_ior_into (TO, FROM)
67 /* Exclusive or a register set with a second register set. */
68 #define XOR_REG_SET(TO, FROM) bitmap_xor_into (TO, FROM)
70 /* Or into TO the register set FROM1 `and'ed with the complement of FROM2. */
71 #define IOR_AND_COMPL_REG_SET(TO, FROM1, FROM2) \
72 bitmap_ior_and_compl_into (TO, FROM1, FROM2)
74 /* Clear a single register in a register set. */
75 #define CLEAR_REGNO_REG_SET(HEAD, REG) bitmap_clear_bit (HEAD, REG)
77 /* Set a single register in a register set. */
78 #define SET_REGNO_REG_SET(HEAD, REG) bitmap_set_bit (HEAD, REG)
80 /* Return true if a register is set in a register set. */
81 #define REGNO_REG_SET_P(TO, REG) bitmap_bit_p (TO, REG)
83 /* Copy the hard registers in a register set to the hard register set. */
84 extern void reg_set_to_hard_reg_set (HARD_REG_SET *, bitmap);
85 #define REG_SET_TO_HARD_REG_SET(TO, FROM) \
87 CLEAR_HARD_REG_SET (TO); \
88 reg_set_to_hard_reg_set (&TO, FROM); \
91 typedef bitmap_iterator reg_set_iterator;
93 /* Loop over all registers in REGSET, starting with MIN, setting REGNUM to the
94 register number and executing CODE for all registers that are set. */
95 #define EXECUTE_IF_SET_IN_REG_SET(REGSET, MIN, REGNUM, RSI) \
96 EXECUTE_IF_SET_IN_BITMAP (REGSET, MIN, REGNUM, RSI)
98 /* Loop over all registers in REGSET1 and REGSET2, starting with MIN, setting
99 REGNUM to the register number and executing CODE for all registers that are
100 set in the first regset and not set in the second. */
101 #define EXECUTE_IF_AND_COMPL_IN_REG_SET(REGSET1, REGSET2, MIN, REGNUM, RSI) \
102 EXECUTE_IF_AND_COMPL_IN_BITMAP (REGSET1, REGSET2, MIN, REGNUM, RSI)
104 /* Loop over all registers in REGSET1 and REGSET2, starting with MIN, setting
105 REGNUM to the register number and executing CODE for all registers that are
106 set in both regsets. */
107 #define EXECUTE_IF_AND_IN_REG_SET(REGSET1, REGSET2, MIN, REGNUM, RSI) \
108 EXECUTE_IF_AND_IN_BITMAP (REGSET1, REGSET2, MIN, REGNUM, RSI) \
110 /* Type we use to hold basic block counters. Should be at least
111 64bit. Although a counter cannot be negative, we use a signed
112 type, because erroneous negative counts can be generated when the
113 flow graph is manipulated by various optimizations. A signed type
114 makes those easy to detect. */
115 typedef HOST_WIDEST_INT gcov_type;
117 /* Control flow edge information. */
118 struct edge_def GTY(())
120 /* The two blocks at the ends of the edge. */
121 struct basic_block_def *src;
122 struct basic_block_def *dest;
124 /* Instructions queued on the edge. */
125 union edge_def_insns {
126 tree GTY ((tag ("true"))) t;
127 rtx GTY ((tag ("false"))) r;
128 } GTY ((desc ("current_ir_type () == IR_GIMPLE"))) insns;
130 /* Auxiliary info specific to a pass. */
131 PTR GTY ((skip (""))) aux;
133 /* Location of any goto implicit in the edge, during tree-ssa. */
134 source_locus goto_locus;
136 int flags; /* see EDGE_* below */
137 int probability; /* biased by REG_BR_PROB_BASE */
138 gcov_type count; /* Expected number of executions calculated
141 /* The index number corresponding to this edge in the edge vector
143 unsigned int dest_idx;
146 typedef struct edge_def *edge;
148 DEF_VEC_ALLOC_P(edge,gc);
149 DEF_VEC_ALLOC_P(edge,heap);
151 #define EDGE_FALLTHRU 1 /* 'Straight line' flow */
152 #define EDGE_ABNORMAL 2 /* Strange flow, like computed
154 #define EDGE_ABNORMAL_CALL 4 /* Call with abnormal exit
155 like an exception, or sibcall */
156 #define EDGE_EH 8 /* Exception throw */
157 #define EDGE_FAKE 16 /* Not a real edge (profile.c) */
158 #define EDGE_DFS_BACK 32 /* A backwards edge */
159 #define EDGE_CAN_FALLTHRU 64 /* Candidate for straight line
161 #define EDGE_IRREDUCIBLE_LOOP 128 /* Part of irreducible loop. */
162 #define EDGE_SIBCALL 256 /* Edge from sibcall to exit. */
163 #define EDGE_LOOP_EXIT 512 /* Exit of a loop. */
164 #define EDGE_TRUE_VALUE 1024 /* Edge taken when controlling
165 predicate is nonzero. */
166 #define EDGE_FALSE_VALUE 2048 /* Edge taken when controlling
167 predicate is zero. */
168 #define EDGE_EXECUTABLE 4096 /* Edge is executable. Only
169 valid during SSA-CCP. */
170 #define EDGE_CROSSING 8192 /* Edge crosses between hot
171 and cold sections, when we
173 #define EDGE_ALL_FLAGS 16383
175 #define EDGE_COMPLEX (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_EH)
177 /* Counter summary from the last set of coverage counts read by
179 extern const struct gcov_ctr_summary *profile_info;
181 /* Declared in cfgloop.h. */
184 /* Declared in tree-flow.h. */
185 struct edge_prediction;
188 /* A basic block is a sequence of instructions with only entry and
189 only one exit. If any one of the instructions are executed, they
190 will all be executed, and in sequence from first to last.
192 There may be COND_EXEC instructions in the basic block. The
193 COND_EXEC *instructions* will be executed -- but if the condition
194 is false the conditionally executed *expressions* will of course
195 not be executed. We don't consider the conditionally executed
196 expression (which might have side-effects) to be in a separate
197 basic block because the program counter will always be at the same
198 location after the COND_EXEC instruction, regardless of whether the
199 condition is true or not.
201 Basic blocks need not start with a label nor end with a jump insn.
202 For example, a previous basic block may just "conditionally fall"
203 into the succeeding basic block, and the last basic block need not
204 end with a jump insn. Block 0 is a descendant of the entry block.
206 A basic block beginning with two labels cannot have notes between
209 Data for jump tables are stored in jump_insns that occur in no
210 basic block even though these insns can follow or precede insns in
213 /* Basic block information indexed by block number. */
214 struct basic_block_def GTY((chain_next ("%h.next_bb"), chain_prev ("%h.prev_bb")))
216 /* The edges into and out of the block. */
220 /* Auxiliary info specific to a pass. */
221 PTR GTY ((skip (""))) aux;
223 /* Innermost loop containing the block. */
224 struct loop *loop_father;
226 /* The dominance and postdominance information node. */
227 struct et_node * GTY ((skip (""))) dom[2];
229 /* Previous and next blocks in the chain. */
230 struct basic_block_def *prev_bb;
231 struct basic_block_def *next_bb;
233 union basic_block_il_dependent {
234 struct tree_bb_info * GTY ((tag ("0"))) tree;
235 struct rtl_bb_info * GTY ((tag ("1"))) rtl;
236 } GTY ((desc ("((%1.flags & BB_RTL) != 0)"))) il;
238 /* Expected number of executions: calculated in profile.c. */
241 /* The index of this block. */
244 /* The loop depth of this block. */
247 /* Expected frequency. Normalized to be in range 0 to BB_FREQ_MAX. */
250 /* Various flags. See BB_* below. */
254 struct rtl_bb_info GTY(())
256 /* The first and last insns of the block. */
260 /* In CFGlayout mode points to insn notes/jumptables to be placed just before
261 and after the block. */
265 /* This field is used by the bb-reorder and tracer passes. */
269 struct tree_bb_info GTY(())
271 /* Pointers to the first and last trees of the block. */
274 /* Chain of PHI nodes for this block. */
278 typedef struct basic_block_def *basic_block;
280 DEF_VEC_P(basic_block);
281 DEF_VEC_ALLOC_P(basic_block,gc);
282 DEF_VEC_ALLOC_P(basic_block,heap);
284 #define BB_FREQ_MAX 10000
286 /* Masks for basic_block.flags.
288 BB_HOT_PARTITION and BB_COLD_PARTITION should be preserved throughout
289 the compilation, so they are never cleared.
291 All other flags may be cleared by clear_bb_flags(). It is generally
292 a bad idea to rely on any flags being up-to-date. */
296 /* Only set on blocks that have just been created by create_bb. */
299 /* Set by find_unreachable_blocks. Do not rely on this being set in any
301 BB_REACHABLE = 1 << 1,
303 /* Set for blocks in an irreducible loop by loop analysis. */
304 BB_IRREDUCIBLE_LOOP = 1 << 2,
306 /* Set on blocks that may actually not be single-entry single-exit block. */
307 BB_SUPERBLOCK = 1 << 3,
309 /* Set on basic blocks that the scheduler should not touch. This is used
310 by SMS to prevent other schedulers from messing with the loop schedule. */
311 BB_DISABLE_SCHEDULE = 1 << 4,
313 /* Set on blocks that should be put in a hot section. */
314 BB_HOT_PARTITION = 1 << 5,
316 /* Set on blocks that should be put in a cold section. */
317 BB_COLD_PARTITION = 1 << 6,
319 /* Set on block that was duplicated. */
320 BB_DUPLICATED = 1 << 7,
322 /* Set if the label at the top of this block is the target of a non-local goto. */
323 BB_NON_LOCAL_GOTO_TARGET = 1 << 8,
325 /* Set on blocks that are in RTL format. */
328 /* Set on blocks that are forwarder blocks.
329 Only used in cfgcleanup.c. */
330 BB_FORWARDER_BLOCK = 1 << 10,
332 /* Set on blocks that cannot be threaded through.
333 Only used in cfgcleanup.c. */
334 BB_NONTHREADABLE_BLOCK = 1 << 11
337 /* Dummy flag for convenience in the hot/cold partitioning code. */
338 #define BB_UNPARTITIONED 0
340 /* Partitions, to be used when partitioning hot and cold basic blocks into
341 separate sections. */
342 #define BB_PARTITION(bb) ((bb)->flags & (BB_HOT_PARTITION|BB_COLD_PARTITION))
343 #define BB_SET_PARTITION(bb, part) do { \
344 basic_block bb_ = (bb); \
345 bb_->flags = ((bb_->flags & ~(BB_HOT_PARTITION|BB_COLD_PARTITION)) \
349 #define BB_COPY_PARTITION(dstbb, srcbb) \
350 BB_SET_PARTITION (dstbb, BB_PARTITION (srcbb))
352 /* A structure to group all the per-function control flow graph data.
353 The x_* prefixing is necessary because otherwise references to the
354 fields of this struct are interpreted as the defines for backward
355 source compatibility following the definition of this struct. */
356 struct control_flow_graph GTY(())
358 /* Block pointers for the exit and entry of a function.
359 These are always the head and tail of the basic block list. */
360 basic_block x_entry_block_ptr;
361 basic_block x_exit_block_ptr;
363 /* Index by basic block number, get basic block struct info. */
364 VEC(basic_block,gc) *x_basic_block_info;
366 /* Number of basic blocks in this flow graph. */
367 int x_n_basic_blocks;
369 /* Number of edges in this flow graph. */
372 /* The first free basic block number. */
373 int x_last_basic_block;
375 /* Mapping of labels to their associated blocks. At present
376 only used for the tree CFG. */
377 VEC(basic_block,gc) *x_label_to_block_map;
379 enum profile_status {
386 /* Defines for accessing the fields of the CFG structure for function FN. */
387 #define ENTRY_BLOCK_PTR_FOR_FUNCTION(FN) ((FN)->cfg->x_entry_block_ptr)
388 #define EXIT_BLOCK_PTR_FOR_FUNCTION(FN) ((FN)->cfg->x_exit_block_ptr)
389 #define basic_block_info_for_function(FN) ((FN)->cfg->x_basic_block_info)
390 #define n_basic_blocks_for_function(FN) ((FN)->cfg->x_n_basic_blocks)
391 #define n_edges_for_function(FN) ((FN)->cfg->x_n_edges)
392 #define last_basic_block_for_function(FN) ((FN)->cfg->x_last_basic_block)
393 #define label_to_block_map_for_function(FN) ((FN)->cfg->x_label_to_block_map)
395 #define BASIC_BLOCK_FOR_FUNCTION(FN,N) \
396 (VEC_index (basic_block, basic_block_info_for_function(FN), (N)))
398 /* Defines for textual backward source compatibility. */
399 #define ENTRY_BLOCK_PTR (cfun->cfg->x_entry_block_ptr)
400 #define EXIT_BLOCK_PTR (cfun->cfg->x_exit_block_ptr)
401 #define basic_block_info (cfun->cfg->x_basic_block_info)
402 #define n_basic_blocks (cfun->cfg->x_n_basic_blocks)
403 #define n_edges (cfun->cfg->x_n_edges)
404 #define last_basic_block (cfun->cfg->x_last_basic_block)
405 #define label_to_block_map (cfun->cfg->x_label_to_block_map)
406 #define profile_status (cfun->cfg->x_profile_status)
408 #define BASIC_BLOCK(N) (VEC_index (basic_block, basic_block_info, (N)))
409 #define SET_BASIC_BLOCK(N,BB) (VEC_replace (basic_block, basic_block_info, (N), (BB)))
411 /* For iterating over basic blocks. */
412 #define FOR_BB_BETWEEN(BB, FROM, TO, DIR) \
413 for (BB = FROM; BB != TO; BB = BB->DIR)
415 #define FOR_EACH_BB_FN(BB, FN) \
416 FOR_BB_BETWEEN (BB, (FN)->cfg->x_entry_block_ptr->next_bb, (FN)->cfg->x_exit_block_ptr, next_bb)
418 #define FOR_EACH_BB(BB) FOR_EACH_BB_FN (BB, cfun)
420 #define FOR_EACH_BB_REVERSE_FN(BB, FN) \
421 FOR_BB_BETWEEN (BB, (FN)->cfg->x_exit_block_ptr->prev_bb, (FN)->cfg->x_entry_block_ptr, prev_bb)
423 #define FOR_EACH_BB_REVERSE(BB) FOR_EACH_BB_REVERSE_FN(BB, cfun)
425 /* For iterating over insns in basic block. */
426 #define FOR_BB_INSNS(BB, INSN) \
427 for ((INSN) = BB_HEAD (BB); \
428 (INSN) && (INSN) != NEXT_INSN (BB_END (BB)); \
429 (INSN) = NEXT_INSN (INSN))
431 /* For iterating over insns in basic block when we might remove the
433 #define FOR_BB_INSNS_SAFE(BB, INSN, CURR) \
434 for ((INSN) = BB_HEAD (BB), (CURR) = (INSN) ? NEXT_INSN ((INSN)): NULL; \
435 (INSN) && (INSN) != NEXT_INSN (BB_END (BB)); \
436 (INSN) = (CURR), (CURR) = (INSN) ? NEXT_INSN ((INSN)) : NULL)
438 #define FOR_BB_INSNS_REVERSE(BB, INSN) \
439 for ((INSN) = BB_END (BB); \
440 (INSN) && (INSN) != PREV_INSN (BB_HEAD (BB)); \
441 (INSN) = PREV_INSN (INSN))
443 #define FOR_BB_INSNS_REVERSE_SAFE(BB, INSN, CURR) \
444 for ((INSN) = BB_END (BB),(CURR) = (INSN) ? PREV_INSN ((INSN)) : NULL; \
445 (INSN) && (INSN) != PREV_INSN (BB_HEAD (BB)); \
446 (INSN) = (CURR), (CURR) = (INSN) ? PREV_INSN ((INSN)) : NULL)
448 /* Cycles through _all_ basic blocks, even the fake ones (entry and
451 #define FOR_ALL_BB(BB) \
452 for (BB = ENTRY_BLOCK_PTR; BB; BB = BB->next_bb)
454 #define FOR_ALL_BB_FN(BB, FN) \
455 for (BB = ENTRY_BLOCK_PTR_FOR_FUNCTION (FN); BB; BB = BB->next_bb)
457 extern bitmap_obstack reg_obstack;
460 /* Stuff for recording basic block info. */
462 #define BB_HEAD(B) (B)->il.rtl->head_
463 #define BB_END(B) (B)->il.rtl->end_
465 /* Special block numbers [markers] for entry and exit. */
466 #define ENTRY_BLOCK (0)
467 #define EXIT_BLOCK (1)
469 /* The two blocks that are always in the cfg. */
470 #define NUM_FIXED_BLOCKS (2)
473 #define BLOCK_NUM(INSN) (BLOCK_FOR_INSN (INSN)->index + 0)
474 #define set_block_for_insn(INSN, BB) (BLOCK_FOR_INSN (INSN) = BB)
476 extern void compute_bb_for_insn (void);
477 extern unsigned int free_bb_for_insn (void);
478 extern void update_bb_for_insn (basic_block);
480 extern void free_basic_block_vars (void);
482 extern void insert_insn_on_edge (rtx, edge);
483 basic_block split_edge_and_insert (edge, rtx);
485 extern void commit_edge_insertions (void);
487 extern void remove_fake_edges (void);
488 extern void remove_fake_exit_edges (void);
489 extern void add_noreturn_fake_exit_edges (void);
490 extern void connect_infinite_loops_to_exit (void);
491 extern edge unchecked_make_edge (basic_block, basic_block, int);
492 extern edge cached_make_edge (sbitmap, basic_block, basic_block, int);
493 extern edge make_edge (basic_block, basic_block, int);
494 extern edge make_single_succ_edge (basic_block, basic_block, int);
495 extern void remove_edge (edge);
496 extern void redirect_edge_succ (edge, basic_block);
497 extern edge redirect_edge_succ_nodup (edge, basic_block);
498 extern void redirect_edge_pred (edge, basic_block);
499 extern basic_block create_basic_block_structure (rtx, rtx, rtx, basic_block);
500 extern void clear_bb_flags (void);
501 extern int post_order_compute (int *, bool, bool);
502 extern int inverted_post_order_compute (int *);
503 extern int pre_and_rev_post_order_compute (int *, int *, bool);
504 extern int dfs_enumerate_from (basic_block, int,
505 bool (*)(basic_block, void *),
506 basic_block *, int, void *);
507 extern void compute_dominance_frontiers (bitmap *);
508 extern void dump_bb_info (basic_block, bool, bool, int, const char *, FILE *);
509 extern void dump_edge_info (FILE *, edge, int);
510 extern void brief_dump_cfg (FILE *);
511 extern void clear_edges (void);
512 extern void scale_bbs_frequencies_int (basic_block *, int, int, int);
513 extern void scale_bbs_frequencies_gcov_type (basic_block *, int, gcov_type,
516 /* Structure to group all of the information to process IF-THEN and
517 IF-THEN-ELSE blocks for the conditional execution support. This
518 needs to be in a public file in case the IFCVT macros call
519 functions passing the ce_if_block data structure. */
521 typedef struct ce_if_block
523 basic_block test_bb; /* First test block. */
524 basic_block then_bb; /* THEN block. */
525 basic_block else_bb; /* ELSE block or NULL. */
526 basic_block join_bb; /* Join THEN/ELSE blocks. */
527 basic_block last_test_bb; /* Last bb to hold && or || tests. */
528 int num_multiple_test_blocks; /* # of && and || basic blocks. */
529 int num_and_and_blocks; /* # of && blocks. */
530 int num_or_or_blocks; /* # of || blocks. */
531 int num_multiple_test_insns; /* # of insns in && and || blocks. */
532 int and_and_p; /* Complex test is &&. */
533 int num_then_insns; /* # of insns in THEN block. */
534 int num_else_insns; /* # of insns in ELSE block. */
535 int pass; /* Pass number. */
537 #ifdef IFCVT_EXTRA_FIELDS
538 IFCVT_EXTRA_FIELDS /* Any machine dependent fields. */
543 /* This structure maintains an edge list vector. */
551 /* The base value for branch probability notes and edge probabilities. */
552 #define REG_BR_PROB_BASE 10000
554 /* This is the value which indicates no edge is present. */
555 #define EDGE_INDEX_NO_EDGE -1
557 /* EDGE_INDEX returns an integer index for an edge, or EDGE_INDEX_NO_EDGE
558 if there is no edge between the 2 basic blocks. */
559 #define EDGE_INDEX(el, pred, succ) (find_edge_index ((el), (pred), (succ)))
561 /* INDEX_EDGE_PRED_BB and INDEX_EDGE_SUCC_BB return a pointer to the basic
562 block which is either the pred or succ end of the indexed edge. */
563 #define INDEX_EDGE_PRED_BB(el, index) ((el)->index_to_edge[(index)]->src)
564 #define INDEX_EDGE_SUCC_BB(el, index) ((el)->index_to_edge[(index)]->dest)
566 /* INDEX_EDGE returns a pointer to the edge. */
567 #define INDEX_EDGE(el, index) ((el)->index_to_edge[(index)])
569 /* Number of edges in the compressed edge list. */
570 #define NUM_EDGES(el) ((el)->num_edges)
572 /* BB is assumed to contain conditional jump. Return the fallthru edge. */
573 #define FALLTHRU_EDGE(bb) (EDGE_SUCC ((bb), 0)->flags & EDGE_FALLTHRU \
574 ? EDGE_SUCC ((bb), 0) : EDGE_SUCC ((bb), 1))
576 /* BB is assumed to contain conditional jump. Return the branch edge. */
577 #define BRANCH_EDGE(bb) (EDGE_SUCC ((bb), 0)->flags & EDGE_FALLTHRU \
578 ? EDGE_SUCC ((bb), 1) : EDGE_SUCC ((bb), 0))
580 /* Return expected execution frequency of the edge E. */
581 #define EDGE_FREQUENCY(e) (((e)->src->frequency \
583 + REG_BR_PROB_BASE / 2) \
586 /* Return nonzero if edge is critical. */
587 #define EDGE_CRITICAL_P(e) (EDGE_COUNT ((e)->src->succs) >= 2 \
588 && EDGE_COUNT ((e)->dest->preds) >= 2)
590 #define EDGE_COUNT(ev) VEC_length (edge, (ev))
591 #define EDGE_I(ev,i) VEC_index (edge, (ev), (i))
592 #define EDGE_PRED(bb,i) VEC_index (edge, (bb)->preds, (i))
593 #define EDGE_SUCC(bb,i) VEC_index (edge, (bb)->succs, (i))
595 /* Returns true if BB has precisely one successor. */
598 single_succ_p (basic_block bb)
600 return EDGE_COUNT (bb->succs) == 1;
603 /* Returns true if BB has precisely one predecessor. */
606 single_pred_p (basic_block bb)
608 return EDGE_COUNT (bb->preds) == 1;
611 /* Returns the single successor edge of basic block BB. Aborts if
612 BB does not have exactly one successor. */
615 single_succ_edge (basic_block bb)
617 gcc_assert (single_succ_p (bb));
618 return EDGE_SUCC (bb, 0);
621 /* Returns the single predecessor edge of basic block BB. Aborts
622 if BB does not have exactly one predecessor. */
625 single_pred_edge (basic_block bb)
627 gcc_assert (single_pred_p (bb));
628 return EDGE_PRED (bb, 0);
631 /* Returns the single successor block of basic block BB. Aborts
632 if BB does not have exactly one successor. */
634 static inline basic_block
635 single_succ (basic_block bb)
637 return single_succ_edge (bb)->dest;
640 /* Returns the single predecessor block of basic block BB. Aborts
641 if BB does not have exactly one predecessor.*/
643 static inline basic_block
644 single_pred (basic_block bb)
646 return single_pred_edge (bb)->src;
649 /* Iterator object for edges. */
653 VEC(edge,gc) **container;
656 static inline VEC(edge,gc) *
657 ei_container (edge_iterator i)
659 gcc_assert (i.container);
663 #define ei_start(iter) ei_start_1 (&(iter))
664 #define ei_last(iter) ei_last_1 (&(iter))
666 /* Return an iterator pointing to the start of an edge vector. */
667 static inline edge_iterator
668 ei_start_1 (VEC(edge,gc) **ev)
678 /* Return an iterator pointing to the last element of an edge
680 static inline edge_iterator
681 ei_last_1 (VEC(edge,gc) **ev)
685 i.index = EDGE_COUNT (*ev) - 1;
691 /* Is the iterator `i' at the end of the sequence? */
693 ei_end_p (edge_iterator i)
695 return (i.index == EDGE_COUNT (ei_container (i)));
698 /* Is the iterator `i' at one position before the end of the
701 ei_one_before_end_p (edge_iterator i)
703 return (i.index + 1 == EDGE_COUNT (ei_container (i)));
706 /* Advance the iterator to the next element. */
708 ei_next (edge_iterator *i)
710 gcc_assert (i->index < EDGE_COUNT (ei_container (*i)));
714 /* Move the iterator to the previous element. */
716 ei_prev (edge_iterator *i)
718 gcc_assert (i->index > 0);
722 /* Return the edge pointed to by the iterator `i'. */
724 ei_edge (edge_iterator i)
726 return EDGE_I (ei_container (i), i.index);
729 /* Return an edge pointed to by the iterator. Do it safely so that
730 NULL is returned when the iterator is pointing at the end of the
733 ei_safe_edge (edge_iterator i)
735 return !ei_end_p (i) ? ei_edge (i) : NULL;
738 /* Return 1 if we should continue to iterate. Return 0 otherwise.
739 *Edge P is set to the next edge if we are to continue to iterate
740 and NULL otherwise. */
743 ei_cond (edge_iterator ei, edge *p)
757 /* This macro serves as a convenient way to iterate each edge in a
758 vector of predecessor or successor edges. It must not be used when
759 an element might be removed during the traversal, otherwise
760 elements will be missed. Instead, use a for-loop like that shown
761 in the following pseudo-code:
763 FOR (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
772 #define FOR_EACH_EDGE(EDGE,ITER,EDGE_VEC) \
773 for ((ITER) = ei_start ((EDGE_VEC)); \
774 ei_cond ((ITER), &(EDGE)); \
777 struct edge_list * create_edge_list (void);
778 void free_edge_list (struct edge_list *);
779 void print_edge_list (FILE *, struct edge_list *);
780 void verify_edge_list (FILE *, struct edge_list *);
781 int find_edge_index (struct edge_list *, basic_block, basic_block);
782 edge find_edge (basic_block, basic_block);
784 #define CLEANUP_EXPENSIVE 1 /* Do relatively expensive optimizations
785 except for edge forwarding */
786 #define CLEANUP_CROSSJUMP 2 /* Do crossjumping. */
787 #define CLEANUP_POST_REGSTACK 4 /* We run after reg-stack and need
788 to care REG_DEAD notes. */
789 #define CLEANUP_THREADING 8 /* Do jump threading. */
790 #define CLEANUP_NO_INSN_DEL 16 /* Do not try to delete trivially dead
792 #define CLEANUP_CFGLAYOUT 32 /* Do cleanup in cfglayout mode. */
794 /* The following are ORed in on top of the CLEANUP* flags in calls to
795 struct_equiv_block_eq. */
796 #define STRUCT_EQUIV_START 64 /* Initializes the search range. */
797 #define STRUCT_EQUIV_RERUN 128 /* Rerun to find register use in
798 found equivalence. */
799 #define STRUCT_EQUIV_FINAL 256 /* Make any changes necessary to get
800 actual equivalence. */
801 #define STRUCT_EQUIV_NEED_FULL_BLOCK 512 /* struct_equiv_block_eq is required
802 to match only full blocks */
803 #define STRUCT_EQUIV_MATCH_JUMPS 1024 /* Also include the jumps at the end of the block in the comparison. */
806 extern struct edge_list *pre_edge_lcm (int, sbitmap *, sbitmap *,
807 sbitmap *, sbitmap *, sbitmap **,
809 extern struct edge_list *pre_edge_rev_lcm (int, sbitmap *,
810 sbitmap *, sbitmap *,
811 sbitmap *, sbitmap **,
813 extern void compute_available (sbitmap *, sbitmap *, sbitmap *, sbitmap *);
816 extern bool maybe_hot_bb_p (basic_block);
817 extern bool probably_cold_bb_p (basic_block);
818 extern bool probably_never_executed_bb_p (basic_block);
819 extern bool tree_predicted_by_p (basic_block, enum br_predictor);
820 extern bool rtl_predicted_by_p (basic_block, enum br_predictor);
821 extern void tree_predict_edge (edge, enum br_predictor, int);
822 extern void rtl_predict_edge (edge, enum br_predictor, int);
823 extern void predict_edge_def (edge, enum br_predictor, enum prediction);
824 extern void guess_outgoing_edge_probabilities (basic_block);
825 extern void remove_predictions_associated_with_edge (edge);
826 extern bool edge_probability_reliable_p (edge);
827 extern bool br_prob_note_reliable_p (rtx);
830 extern void dump_regset (regset, FILE *);
831 extern void debug_regset (regset);
832 extern void init_flow (void);
833 extern void debug_bb (basic_block);
834 extern basic_block debug_bb_n (int);
835 extern void dump_regset (regset, FILE *);
836 extern void debug_regset (regset);
837 extern void expunge_block (basic_block);
838 extern void link_block (basic_block, basic_block);
839 extern void unlink_block (basic_block);
840 extern void compact_blocks (void);
841 extern basic_block alloc_block (void);
842 extern void alloc_aux_for_block (basic_block, int);
843 extern void alloc_aux_for_blocks (int);
844 extern void clear_aux_for_blocks (void);
845 extern void free_aux_for_blocks (void);
846 extern void alloc_aux_for_edge (edge, int);
847 extern void alloc_aux_for_edges (int);
848 extern void clear_aux_for_edges (void);
849 extern void free_aux_for_edges (void);
852 extern void find_unreachable_blocks (void);
853 extern bool forwarder_block_p (basic_block);
854 extern bool can_fallthru (basic_block, basic_block);
855 extern bool could_fall_through (basic_block, basic_block);
856 extern void flow_nodes_print (const char *, const sbitmap, FILE *);
857 extern void flow_edge_list_print (const char *, const edge *, int, FILE *);
860 extern basic_block force_nonfallthru (edge);
861 extern rtx block_label (basic_block);
862 extern bool purge_all_dead_edges (void);
863 extern bool purge_dead_edges (basic_block);
866 extern void find_many_sub_basic_blocks (sbitmap);
867 extern void rtl_make_eh_edge (sbitmap, basic_block, rtx);
868 extern void find_basic_blocks (rtx);
870 /* In cfgcleanup.c. */
871 extern bool cleanup_cfg (int);
872 extern bool delete_unreachable_blocks (void);
874 extern bool mark_dfs_back_edges (void);
875 extern void set_edge_can_fallthru_flag (void);
876 extern void update_br_prob_note (basic_block);
877 extern void fixup_abnormal_edges (void);
878 extern bool inside_basic_block_p (rtx);
879 extern bool control_flow_insn_p (rtx);
880 extern rtx get_last_bb_insn (basic_block);
882 /* In bb-reorder.c */
883 extern void reorder_basic_blocks (void);
890 CDI_POST_DOMINATORS = 2
895 DOM_NONE, /* Not computed at all. */
896 DOM_NO_FAST_QUERY, /* The data is OK, but the fast query data are not usable. */
897 DOM_OK /* Everything is ok. */
900 extern enum dom_state dom_info_state (enum cdi_direction);
901 extern void set_dom_info_availability (enum cdi_direction, enum dom_state);
902 extern bool dom_info_available_p (enum cdi_direction);
903 extern void calculate_dominance_info (enum cdi_direction);
904 extern void free_dominance_info (enum cdi_direction);
905 extern basic_block nearest_common_dominator (enum cdi_direction,
906 basic_block, basic_block);
907 extern basic_block nearest_common_dominator_for_set (enum cdi_direction,
909 extern void set_immediate_dominator (enum cdi_direction, basic_block,
911 extern basic_block get_immediate_dominator (enum cdi_direction, basic_block);
912 extern bool dominated_by_p (enum cdi_direction, basic_block, basic_block);
913 extern VEC (basic_block, heap) *get_dominated_by (enum cdi_direction, basic_block);
914 extern VEC (basic_block, heap) *get_dominated_by_region (enum cdi_direction,
917 extern void add_to_dominance_info (enum cdi_direction, basic_block);
918 extern void delete_from_dominance_info (enum cdi_direction, basic_block);
919 basic_block recompute_dominator (enum cdi_direction, basic_block);
920 extern void redirect_immediate_dominators (enum cdi_direction, basic_block,
922 extern void iterate_fix_dominators (enum cdi_direction,
923 VEC (basic_block, heap) *, bool);
924 extern void verify_dominators (enum cdi_direction);
925 extern basic_block first_dom_son (enum cdi_direction, basic_block);
926 extern basic_block next_dom_son (enum cdi_direction, basic_block);
927 unsigned bb_dom_dfs_in (enum cdi_direction, basic_block);
928 unsigned bb_dom_dfs_out (enum cdi_direction, basic_block);
930 extern edge try_redirect_by_replacing_jump (edge, basic_block, bool);
931 extern void break_superblocks (void);
932 extern void relink_block_chain (bool);
933 extern void check_bb_profile (basic_block, FILE *);
934 extern void update_bb_profile_for_threading (basic_block, int, gcov_type, edge);
935 extern void init_rtl_bb_info (basic_block);
937 extern void initialize_original_copy_tables (void);
938 extern void free_original_copy_tables (void);
939 extern void set_bb_original (basic_block, basic_block);
940 extern basic_block get_bb_original (basic_block);
941 extern void set_bb_copy (basic_block, basic_block);
942 extern basic_block get_bb_copy (basic_block);
943 void set_loop_copy (struct loop *, struct loop *);
944 struct loop *get_loop_copy (struct loop *);
947 extern rtx insert_insn_end_bb_new (rtx, basic_block);
949 #include "cfghooks.h"
951 /* In struct-equiv.c */
953 /* Constants used to size arrays in struct equiv_info (currently only one).
954 When these limits are exceeded, struct_equiv returns zero.
955 The maximum number of pseudo registers that are different in the two blocks,
956 but appear in equivalent places and are dead at the end (or where one of
957 a pair is dead at the end). */
958 #define STRUCT_EQUIV_MAX_LOCAL 16
959 /* The maximum number of references to an input register that struct_equiv
962 /* Structure used to track state during struct_equiv that can be rolled
963 back when we find we can't match an insn, or if we want to match part
964 of it in a different way.
965 This information pertains to the pair of partial blocks that has been
966 matched so far. Since this pair is structurally equivalent, this is
967 conceptually just one partial block expressed in two potentially
969 struct struct_equiv_checkpoint
971 int ninsns; /* Insns are matched so far. */
972 int local_count; /* Number of block-local registers. */
973 int input_count; /* Number of inputs to the block. */
975 /* X_START and Y_START are the first insns (in insn stream order)
976 of the partial blocks that have been considered for matching so far.
977 Since we are scanning backwards, they are also the instructions that
978 are currently considered - or the last ones that have been considered -
979 for matching (Unless we tracked back to these because a preceding
980 instruction failed to match). */
981 rtx x_start, y_start;
983 /* INPUT_VALID indicates if we have actually set up X_INPUT / Y_INPUT
984 during the current pass; we keep X_INPUT / Y_INPUT around between passes
985 so that we can match REG_EQUAL / REG_EQUIV notes referring to these. */
988 /* Some information would be expensive to exactly checkpoint, so we
989 merely increment VERSION any time information about local
990 registers, inputs and/or register liveness changes. When backtracking,
991 it is decremented for changes that can be undone, and if a discrepancy
992 remains, NEED_RERUN in the relevant struct equiv_info is set to indicate
993 that a new pass should be made over the entire block match to get
994 accurate register information. */
998 /* A struct equiv_info is used to pass information to struct_equiv and
999 to gather state while two basic blocks are checked for structural
1004 /* Fields set up by the caller to struct_equiv_block_eq */
1006 basic_block x_block, y_block; /* The two blocks being matched. */
1008 /* MODE carries the mode bits from cleanup_cfg if we are called from
1009 try_crossjump_to_edge, and additionally it carries the
1010 STRUCT_EQUIV_* bits described above. */
1013 /* INPUT_COST is the cost that adding an extra input to the matched blocks
1014 is supposed to have, and is taken into account when considering if the
1015 matched sequence should be extended backwards. input_cost < 0 means
1016 don't accept any inputs at all. */
1020 /* Fields to track state inside of struct_equiv_block_eq. Some of these
1021 are also outputs. */
1023 /* X_INPUT and Y_INPUT are used by struct_equiv to record a register that
1024 is used as an input parameter, i.e. where different registers are used
1025 as sources. This is only used for a register that is live at the end
1026 of the blocks, or in some identical code at the end of the blocks;
1027 Inputs that are dead at the end go into X_LOCAL / Y_LOCAL. */
1028 rtx x_input, y_input;
1029 /* When a previous pass has identified a valid input, INPUT_REG is set
1030 by struct_equiv_block_eq, and it is henceforth replaced in X_BLOCK
1034 /* COMMON_LIVE keeps track of the registers which are currently live
1035 (as we scan backwards from the end) and have the same numbers in both
1036 blocks. N.B. a register that is in common_live is unsuitable to become
1039 /* Likewise, X_LOCAL_LIVE / Y_LOCAL_LIVE keep track of registers that are
1040 local to one of the blocks; these registers must not be accepted as
1041 identical when encountered in both blocks. */
1042 regset x_local_live, y_local_live;
1044 /* EQUIV_USED indicates for which insns a REG_EQUAL or REG_EQUIV note is
1045 being used, to avoid having to backtrack in the next pass, so that we
1046 get accurate life info for this insn then. For each such insn,
1047 the bit with the number corresponding to the CUR.NINSNS value at the
1048 time of scanning is set. */
1051 /* Current state that can be saved & restored easily. */
1052 struct struct_equiv_checkpoint cur;
1053 /* BEST_MATCH is used to store the best match so far, weighing the
1054 cost of matched insns COSTS_N_INSNS (CUR.NINSNS) against the cost
1055 CUR.INPUT_COUNT * INPUT_COST of setting up the inputs. */
1056 struct struct_equiv_checkpoint best_match;
1057 /* If a checkpoint restore failed, or an input conflict newly arises,
1058 NEED_RERUN is set. This has to be tested by the caller to re-run
1059 the comparison if the match appears otherwise sound. The state kept in
1060 x_start, y_start, equiv_used and check_input_conflict ensures that
1061 we won't loop indefinitely. */
1063 /* If there is indication of an input conflict at the end,
1064 CHECK_INPUT_CONFLICT is set so that we'll check for input conflicts
1065 for each insn in the next pass. This is needed so that we won't discard
1066 a partial match if there is a longer match that has to be abandoned due
1067 to an input conflict. */
1068 bool check_input_conflict;
1069 /* HAD_INPUT_CONFLICT is set if CHECK_INPUT_CONFLICT was already set and we
1070 have passed a point where there were multiple dying inputs. This helps
1071 us decide if we should set check_input_conflict for the next pass. */
1072 bool had_input_conflict;
1074 /* LIVE_UPDATE controls if we want to change any life info at all. We
1075 set it to false during REG_EQUAL / REG_EUQIV note comparison of the final
1076 pass so that we don't introduce new registers just for the note; if we
1077 can't match the notes without the current register information, we drop
1081 /* X_LOCAL and Y_LOCAL are used to gather register numbers of register pairs
1082 that are local to X_BLOCK and Y_BLOCK, with CUR.LOCAL_COUNT being the index
1083 to the next free entry. */
1084 rtx x_local[STRUCT_EQUIV_MAX_LOCAL], y_local[STRUCT_EQUIV_MAX_LOCAL];
1085 /* LOCAL_RVALUE is nonzero if the corresponding X_LOCAL / Y_LOCAL entry
1086 was a source operand (including STRICT_LOW_PART) for the last invocation
1087 of struct_equiv mentioning it, zero if it was a destination-only operand.
1088 Since we are scanning backwards, this means the register is input/local
1089 for the (partial) block scanned so far. */
1090 bool local_rvalue[STRUCT_EQUIV_MAX_LOCAL];
1093 /* Additional fields that are computed for the convenience of the caller. */
1095 /* DYING_INPUTS is set to the number of local registers that turn out
1096 to be inputs to the (possibly partial) block. */
1098 /* X_END and Y_END are the last insns in X_BLOCK and Y_BLOCK, respectively,
1099 that are being compared. A final jump insn will not be included. */
1102 /* If we are matching tablejumps, X_LABEL in X_BLOCK corresponds to
1103 Y_LABEL in Y_BLOCK. */
1104 rtx x_label, y_label;
1108 extern bool insns_match_p (rtx, rtx, struct equiv_info *);
1109 extern int struct_equiv_block_eq (int, struct equiv_info *);
1110 extern bool struct_equiv_init (int, struct equiv_info *);
1111 extern bool rtx_equiv_p (rtx *, rtx, int, struct equiv_info *);
1114 extern bool condjump_equiv_p (struct equiv_info *, bool);
1116 /* Return true when one of the predecessor edges of BB is marked with EDGE_EH. */
1118 bb_has_eh_pred (basic_block bb)
1123 FOR_EACH_EDGE (e, ei, bb->preds)
1125 if (e->flags & EDGE_EH)
1131 /* In cfgloopmanip.c. */
1132 extern edge mfb_kj_edge;
1133 bool mfb_keep_just (edge);
1135 #endif /* GCC_BASIC_BLOCK_H */