1 /* Building internal representation for IRA.
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "insn-config.h"
40 #include "sparseset.h"
42 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
44 static ira_copy_t find_allocno_copy (ira_allocno_t, ira_allocno_t, rtx,
45 ira_loop_tree_node_t);
47 /* The root of the loop tree corresponding to the all function. */
48 ira_loop_tree_node_t ira_loop_tree_root;
50 /* Height of the loop tree. */
51 int ira_loop_tree_height;
53 /* All nodes representing basic blocks are referred through the
54 following array. We can not use basic block member `aux' for this
55 because it is used for insertion of insns on edges. */
56 ira_loop_tree_node_t ira_bb_nodes;
58 /* All nodes representing loops are referred through the following
60 ira_loop_tree_node_t ira_loop_nodes;
62 /* Map regno -> allocnos with given regno (see comments for
63 allocno member `next_regno_allocno'). */
64 ira_allocno_t *ira_regno_allocno_map;
66 /* Array of references to all allocnos. The order number of the
67 allocno corresponds to the index in the array. Removed allocnos
68 have NULL element value. */
69 ira_allocno_t *ira_allocnos;
71 /* Sizes of the previous array. */
74 /* Map conflict id -> allocno with given conflict id (see comments for
75 allocno member `conflict_id'). */
76 ira_allocno_t *ira_conflict_id_allocno_map;
78 /* Array of references to all copies. The order number of the copy
79 corresponds to the index in the array. Removed copies have NULL
81 ira_copy_t *ira_copies;
83 /* Size of the previous array. */
88 /* LAST_BASIC_BLOCK before generating additional insns because of live
89 range splitting. Emitting insns on a critical edge creates a new
91 static int last_basic_block_before_change;
93 /* The following function allocates the loop tree nodes. If LOOPS_P
94 is FALSE, the nodes corresponding to the loops (except the root
95 which corresponds the all function) will be not allocated but nodes
96 will still be allocated for basic blocks. */
98 create_loop_tree_nodes (bool loops_p)
105 VEC (edge, heap) *edges;
109 = ((struct ira_loop_tree_node *)
110 ira_allocate (sizeof (struct ira_loop_tree_node) * last_basic_block));
111 last_basic_block_before_change = last_basic_block;
112 for (i = 0; i < (unsigned int) last_basic_block; i++)
114 ira_bb_nodes[i].regno_allocno_map = NULL;
115 memset (ira_bb_nodes[i].reg_pressure, 0,
116 sizeof (ira_bb_nodes[i].reg_pressure));
117 ira_bb_nodes[i].all_allocnos = NULL;
118 ira_bb_nodes[i].modified_regnos = NULL;
119 ira_bb_nodes[i].border_allocnos = NULL;
120 ira_bb_nodes[i].local_copies = NULL;
122 ira_loop_nodes = ((struct ira_loop_tree_node *)
123 ira_allocate (sizeof (struct ira_loop_tree_node)
124 * VEC_length (loop_p, ira_loops.larray)));
125 max_regno = max_reg_num ();
126 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
128 if (loop != ira_loops.tree_root)
130 ira_loop_nodes[i].regno_allocno_map = NULL;
134 FOR_EACH_EDGE (e, ei, loop->header->preds)
135 if (e->src != loop->latch
136 && (e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
143 edges = get_loop_exit_edges (loop);
144 for (j = 0; VEC_iterate (edge, edges, j, e); j++)
145 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
150 VEC_free (edge, heap, edges);
154 ira_loop_nodes[i].regno_allocno_map
155 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t) * max_regno);
156 memset (ira_loop_nodes[i].regno_allocno_map, 0,
157 sizeof (ira_allocno_t) * max_regno);
158 memset (ira_loop_nodes[i].reg_pressure, 0,
159 sizeof (ira_loop_nodes[i].reg_pressure));
160 ira_loop_nodes[i].all_allocnos = ira_allocate_bitmap ();
161 ira_loop_nodes[i].modified_regnos = ira_allocate_bitmap ();
162 ira_loop_nodes[i].border_allocnos = ira_allocate_bitmap ();
163 ira_loop_nodes[i].local_copies = ira_allocate_bitmap ();
167 /* The function returns TRUE if there are more one allocation
170 more_one_region_p (void)
175 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
176 if (ira_loop_nodes[i].regno_allocno_map != NULL
177 && ira_loop_tree_root != &ira_loop_nodes[i])
182 /* Free the loop tree node of a loop. */
184 finish_loop_tree_node (ira_loop_tree_node_t loop)
186 if (loop->regno_allocno_map != NULL)
188 ira_assert (loop->bb == NULL);
189 ira_free_bitmap (loop->local_copies);
190 ira_free_bitmap (loop->border_allocnos);
191 ira_free_bitmap (loop->modified_regnos);
192 ira_free_bitmap (loop->all_allocnos);
193 ira_free (loop->regno_allocno_map);
194 loop->regno_allocno_map = NULL;
198 /* Free the loop tree nodes. */
200 finish_loop_tree_nodes (void)
205 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
206 finish_loop_tree_node (&ira_loop_nodes[i]);
207 ira_free (ira_loop_nodes);
208 for (i = 0; i < (unsigned int) last_basic_block_before_change; i++)
210 if (ira_bb_nodes[i].local_copies != NULL)
211 ira_free_bitmap (ira_bb_nodes[i].local_copies);
212 if (ira_bb_nodes[i].border_allocnos != NULL)
213 ira_free_bitmap (ira_bb_nodes[i].border_allocnos);
214 if (ira_bb_nodes[i].modified_regnos != NULL)
215 ira_free_bitmap (ira_bb_nodes[i].modified_regnos);
216 if (ira_bb_nodes[i].all_allocnos != NULL)
217 ira_free_bitmap (ira_bb_nodes[i].all_allocnos);
218 if (ira_bb_nodes[i].regno_allocno_map != NULL)
219 ira_free (ira_bb_nodes[i].regno_allocno_map);
221 ira_free (ira_bb_nodes);
226 /* The following recursive function adds LOOP to the loop tree
227 hierarchy. LOOP is added only once. */
229 add_loop_to_tree (struct loop *loop)
232 ira_loop_tree_node_t loop_node, parent_node;
234 /* We can not use loop node access macros here because of potential
235 checking and because the nodes are not initialized enough
237 if (loop_outer (loop) != NULL)
238 add_loop_to_tree (loop_outer (loop));
239 if (ira_loop_nodes[loop->num].regno_allocno_map != NULL
240 && ira_loop_nodes[loop->num].children == NULL)
242 /* We have not added loop node to the tree yet. */
243 loop_node = &ira_loop_nodes[loop->num];
244 loop_node->loop = loop;
245 loop_node->bb = NULL;
246 for (parent = loop_outer (loop);
248 parent = loop_outer (parent))
249 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
253 loop_node->next = NULL;
254 loop_node->subloop_next = NULL;
255 loop_node->parent = NULL;
259 parent_node = &ira_loop_nodes[parent->num];
260 loop_node->next = parent_node->children;
261 parent_node->children = loop_node;
262 loop_node->subloop_next = parent_node->subloops;
263 parent_node->subloops = loop_node;
264 loop_node->parent = parent_node;
269 /* The following recursive function sets up levels of nodes of the
270 tree given its root LOOP_NODE. The enumeration starts with LEVEL.
271 The function returns maximal value of level in the tree + 1. */
273 setup_loop_tree_level (ira_loop_tree_node_t loop_node, int level)
275 int height, max_height;
276 ira_loop_tree_node_t subloop_node;
278 ira_assert (loop_node->bb == NULL);
279 loop_node->level = level;
280 max_height = level + 1;
281 for (subloop_node = loop_node->subloops;
282 subloop_node != NULL;
283 subloop_node = subloop_node->subloop_next)
285 ira_assert (subloop_node->bb == NULL);
286 height = setup_loop_tree_level (subloop_node, level + 1);
287 if (height > max_height)
293 /* Create the loop tree. The algorithm is designed to provide correct
294 order of loops (they are ordered by their last loop BB) and basic
295 blocks in the chain formed by member next. */
297 form_loop_tree (void)
302 ira_loop_tree_node_t bb_node, loop_node;
305 /* We can not use loop/bb node access macros because of potential
306 checking and because the nodes are not initialized enough
308 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
309 if (ira_loop_nodes[i].regno_allocno_map != NULL)
311 ira_loop_nodes[i].children = NULL;
312 ira_loop_nodes[i].subloops = NULL;
316 bb_node = &ira_bb_nodes[bb->index];
318 bb_node->loop = NULL;
319 bb_node->subloops = NULL;
320 bb_node->children = NULL;
321 bb_node->subloop_next = NULL;
322 bb_node->next = NULL;
323 for (parent = bb->loop_father;
325 parent = loop_outer (parent))
326 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
328 add_loop_to_tree (parent);
329 loop_node = &ira_loop_nodes[parent->num];
330 bb_node->next = loop_node->children;
331 bb_node->parent = loop_node;
332 loop_node->children = bb_node;
334 ira_loop_tree_root = IRA_LOOP_NODE_BY_INDEX (ira_loops.tree_root->num);
335 ira_loop_tree_height = setup_loop_tree_level (ira_loop_tree_root, 0);
336 ira_assert (ira_loop_tree_root->regno_allocno_map != NULL);
341 /* Rebuild IRA_REGNO_ALLOCNO_MAP and REGNO_ALLOCNO_MAPs of the loop
344 rebuild_regno_allocno_maps (void)
347 int max_regno, regno;
349 ira_loop_tree_node_t loop_tree_node;
351 ira_allocno_iterator ai;
353 max_regno = max_reg_num ();
354 for (l = 0; VEC_iterate (loop_p, ira_loops.larray, l, loop); l++)
355 if (ira_loop_nodes[l].regno_allocno_map != NULL)
357 ira_free (ira_loop_nodes[l].regno_allocno_map);
358 ira_loop_nodes[l].regno_allocno_map
359 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
361 memset (ira_loop_nodes[l].regno_allocno_map, 0,
362 sizeof (ira_allocno_t) * max_regno);
364 ira_free (ira_regno_allocno_map);
365 ira_regno_allocno_map
366 = (ira_allocno_t *) ira_allocate (max_regno * sizeof (ira_allocno_t));
367 memset (ira_regno_allocno_map, 0, max_regno * sizeof (ira_allocno_t));
368 FOR_EACH_ALLOCNO (a, ai)
370 if (ALLOCNO_CAP_MEMBER (a) != NULL)
371 /* Caps are not in the regno allocno maps. */
373 regno = ALLOCNO_REGNO (a);
374 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
375 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
376 ira_regno_allocno_map[regno] = a;
377 if (loop_tree_node->regno_allocno_map[regno] == NULL)
378 /* Remember that we can create temporary allocnos to break
379 cycles in register shuffle. */
380 loop_tree_node->regno_allocno_map[regno] = a;
386 /* Pools for allocnos and allocno live ranges. */
387 static alloc_pool allocno_pool, allocno_live_range_pool;
389 /* Vec containing references to all created allocnos. It is a
390 container of array allocnos. */
391 static VEC(ira_allocno_t,heap) *allocno_vec;
393 /* Vec containing references to all created allocnos. It is a
394 container of ira_conflict_id_allocno_map. */
395 static VEC(ira_allocno_t,heap) *ira_conflict_id_allocno_map_vec;
397 /* Initialize data concerning allocnos. */
399 initiate_allocnos (void)
401 allocno_live_range_pool
402 = create_alloc_pool ("allocno live ranges",
403 sizeof (struct ira_allocno_live_range), 100);
405 = create_alloc_pool ("allocnos", sizeof (struct ira_allocno), 100);
406 allocno_vec = VEC_alloc (ira_allocno_t, heap, max_reg_num () * 2);
408 ira_allocnos_num = 0;
409 ira_conflict_id_allocno_map_vec
410 = VEC_alloc (ira_allocno_t, heap, max_reg_num () * 2);
411 ira_conflict_id_allocno_map = NULL;
412 ira_regno_allocno_map
413 = (ira_allocno_t *) ira_allocate (max_reg_num () * sizeof (ira_allocno_t));
414 memset (ira_regno_allocno_map, 0, max_reg_num () * sizeof (ira_allocno_t));
417 /* Create and return the allocno corresponding to REGNO in
418 LOOP_TREE_NODE. Add the allocno to the list of allocnos with the
419 same regno if CAP_P is FALSE. */
421 ira_create_allocno (int regno, bool cap_p, ira_loop_tree_node_t loop_tree_node)
425 a = (ira_allocno_t) pool_alloc (allocno_pool);
426 ALLOCNO_REGNO (a) = regno;
427 ALLOCNO_LOOP_TREE_NODE (a) = loop_tree_node;
430 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
431 ira_regno_allocno_map[regno] = a;
432 if (loop_tree_node->regno_allocno_map[regno] == NULL)
433 /* Remember that we can create temporary allocnos to break
434 cycles in register shuffle on region borders (see
436 loop_tree_node->regno_allocno_map[regno] = a;
438 ALLOCNO_CAP (a) = NULL;
439 ALLOCNO_CAP_MEMBER (a) = NULL;
440 ALLOCNO_NUM (a) = ira_allocnos_num;
441 bitmap_set_bit (loop_tree_node->all_allocnos, ALLOCNO_NUM (a));
442 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) = NULL;
443 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a) = 0;
444 COPY_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a), ira_no_alloc_regs);
445 COPY_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a), ira_no_alloc_regs);
446 ALLOCNO_NREFS (a) = 0;
447 ALLOCNO_FREQ (a) = 0;
448 ALLOCNO_HARD_REGNO (a) = -1;
449 ALLOCNO_CALL_FREQ (a) = 0;
450 ALLOCNO_CALLS_CROSSED_NUM (a) = 0;
452 ALLOCNO_NO_STACK_REG_P (a) = false;
453 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = false;
455 ALLOCNO_MEM_OPTIMIZED_DEST (a) = NULL;
456 ALLOCNO_MEM_OPTIMIZED_DEST_P (a) = false;
457 ALLOCNO_SOMEWHERE_RENAMED_P (a) = false;
458 ALLOCNO_CHILD_RENAMED_P (a) = false;
459 ALLOCNO_DONT_REASSIGN_P (a) = false;
460 ALLOCNO_BAD_SPILL_P (a) = false;
461 ALLOCNO_IN_GRAPH_P (a) = false;
462 ALLOCNO_ASSIGNED_P (a) = false;
463 ALLOCNO_MAY_BE_SPILLED_P (a) = false;
464 ALLOCNO_SPLAY_REMOVED_P (a) = false;
465 ALLOCNO_CONFLICT_VEC_P (a) = false;
466 ALLOCNO_MODE (a) = (regno < 0 ? VOIDmode : PSEUDO_REGNO_MODE (regno));
467 ALLOCNO_COPIES (a) = NULL;
468 ALLOCNO_HARD_REG_COSTS (a) = NULL;
469 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
470 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
471 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
472 ALLOCNO_LEFT_CONFLICTS_SIZE (a) = -1;
473 ALLOCNO_COVER_CLASS (a) = NO_REGS;
474 ALLOCNO_UPDATED_COVER_CLASS_COST (a) = 0;
475 ALLOCNO_COVER_CLASS_COST (a) = 0;
476 ALLOCNO_MEMORY_COST (a) = 0;
477 ALLOCNO_UPDATED_MEMORY_COST (a) = 0;
478 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) = 0;
479 ALLOCNO_NEXT_BUCKET_ALLOCNO (a) = NULL;
480 ALLOCNO_PREV_BUCKET_ALLOCNO (a) = NULL;
481 ALLOCNO_FIRST_COALESCED_ALLOCNO (a) = a;
482 ALLOCNO_NEXT_COALESCED_ALLOCNO (a) = a;
483 ALLOCNO_LIVE_RANGES (a) = NULL;
484 ALLOCNO_MIN (a) = INT_MAX;
485 ALLOCNO_MAX (a) = -1;
486 ALLOCNO_CONFLICT_ID (a) = ira_allocnos_num;
487 VEC_safe_push (ira_allocno_t, heap, allocno_vec, a);
488 ira_allocnos = VEC_address (ira_allocno_t, allocno_vec);
489 ira_allocnos_num = VEC_length (ira_allocno_t, allocno_vec);
490 VEC_safe_push (ira_allocno_t, heap, ira_conflict_id_allocno_map_vec, a);
491 ira_conflict_id_allocno_map
492 = VEC_address (ira_allocno_t, ira_conflict_id_allocno_map_vec);
496 /* Set up cover class for A and update its conflict hard registers. */
498 ira_set_allocno_cover_class (ira_allocno_t a, enum reg_class cover_class)
500 ALLOCNO_COVER_CLASS (a) = cover_class;
501 IOR_COMPL_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a),
502 reg_class_contents[cover_class]);
503 IOR_COMPL_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a),
504 reg_class_contents[cover_class]);
507 /* Merge hard register conflicts from allocno FROM into allocno TO. If
508 TOTAL_ONLY is true, we ignore ALLOCNO_CONFLICT_HARD_REGS. */
510 merge_hard_reg_conflicts (ira_allocno_t from, ira_allocno_t to,
514 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (to),
515 ALLOCNO_CONFLICT_HARD_REGS (from));
516 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (to),
517 ALLOCNO_TOTAL_CONFLICT_HARD_REGS (from));
519 if (!total_only && ALLOCNO_NO_STACK_REG_P (from))
520 ALLOCNO_NO_STACK_REG_P (to) = true;
521 if (ALLOCNO_TOTAL_NO_STACK_REG_P (from))
522 ALLOCNO_TOTAL_NO_STACK_REG_P (to) = true;
526 /* Return TRUE if the conflict vector with NUM elements is more
527 profitable than conflict bit vector for A. */
529 ira_conflict_vector_profitable_p (ira_allocno_t a, int num)
533 if (ALLOCNO_MAX (a) < ALLOCNO_MIN (a))
534 /* We prefer bit vector in such case because it does not result in
538 nw = (ALLOCNO_MAX (a) - ALLOCNO_MIN (a) + IRA_INT_BITS) / IRA_INT_BITS;
539 return (2 * sizeof (ira_allocno_t) * (num + 1)
540 < 3 * nw * sizeof (IRA_INT_TYPE));
543 /* Allocates and initialize the conflict vector of A for NUM
544 conflicting allocnos. */
546 ira_allocate_allocno_conflict_vec (ira_allocno_t a, int num)
551 ira_assert (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) == NULL);
552 num++; /* for NULL end marker */
553 size = sizeof (ira_allocno_t) * num;
554 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) = ira_allocate (size);
555 vec = (ira_allocno_t *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a);
557 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a) = 0;
558 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a) = size;
559 ALLOCNO_CONFLICT_VEC_P (a) = true;
562 /* Allocate and initialize the conflict bit vector of A. */
564 allocate_allocno_conflict_bit_vec (ira_allocno_t a)
568 ira_assert (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) == NULL);
569 size = ((ALLOCNO_MAX (a) - ALLOCNO_MIN (a) + IRA_INT_BITS)
570 / IRA_INT_BITS * sizeof (IRA_INT_TYPE));
571 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) = ira_allocate (size);
572 memset (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a), 0, size);
573 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a) = size;
574 ALLOCNO_CONFLICT_VEC_P (a) = false;
577 /* Allocate and initialize the conflict vector or conflict bit vector
578 of A for NUM conflicting allocnos whatever is more profitable. */
580 ira_allocate_allocno_conflicts (ira_allocno_t a, int num)
582 if (ira_conflict_vector_profitable_p (a, num))
583 ira_allocate_allocno_conflict_vec (a, num);
585 allocate_allocno_conflict_bit_vec (a);
588 /* Add A2 to the conflicts of A1. */
590 add_to_allocno_conflicts (ira_allocno_t a1, ira_allocno_t a2)
595 if (ALLOCNO_CONFLICT_VEC_P (a1))
599 num = ALLOCNO_CONFLICT_ALLOCNOS_NUM (a1) + 2;
600 if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1)
601 >= num * sizeof (ira_allocno_t))
602 vec = (ira_allocno_t *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1);
605 size = (3 * num / 2 + 1) * sizeof (ira_allocno_t);
606 vec = (ira_allocno_t *) ira_allocate (size);
607 memcpy (vec, ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1),
608 sizeof (ira_allocno_t) * ALLOCNO_CONFLICT_ALLOCNOS_NUM (a1));
609 ira_free (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1));
610 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1) = vec;
611 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) = size;
615 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a1)++;
619 int nw, added_head_nw, id;
622 id = ALLOCNO_CONFLICT_ID (a2);
623 vec = (IRA_INT_TYPE *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1);
624 if (ALLOCNO_MIN (a1) > id)
626 /* Expand head of the bit vector. */
627 added_head_nw = (ALLOCNO_MIN (a1) - id - 1) / IRA_INT_BITS + 1;
628 nw = (ALLOCNO_MAX (a1) - ALLOCNO_MIN (a1)) / IRA_INT_BITS + 1;
629 size = (nw + added_head_nw) * sizeof (IRA_INT_TYPE);
630 if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) >= size)
632 memmove ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
633 vec, nw * sizeof (IRA_INT_TYPE));
634 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
639 = (3 * (nw + added_head_nw) / 2 + 1) * sizeof (IRA_INT_TYPE);
640 vec = (IRA_INT_TYPE *) ira_allocate (size);
641 memcpy ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
642 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1),
643 nw * sizeof (IRA_INT_TYPE));
644 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
646 + (nw + added_head_nw) * sizeof (IRA_INT_TYPE),
647 0, size - (nw + added_head_nw) * sizeof (IRA_INT_TYPE));
648 ira_free (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1));
649 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1) = vec;
650 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) = size;
652 ALLOCNO_MIN (a1) -= added_head_nw * IRA_INT_BITS;
654 else if (ALLOCNO_MAX (a1) < id)
656 nw = (id - ALLOCNO_MIN (a1)) / IRA_INT_BITS + 1;
657 size = nw * sizeof (IRA_INT_TYPE);
658 if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) < size)
660 /* Expand tail of the bit vector. */
661 size = (3 * nw / 2 + 1) * sizeof (IRA_INT_TYPE);
662 vec = (IRA_INT_TYPE *) ira_allocate (size);
663 memcpy (vec, ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1),
664 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1));
665 memset ((char *) vec + ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1),
666 0, size - ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1));
667 ira_free (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1));
668 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1) = vec;
669 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) = size;
671 ALLOCNO_MAX (a1) = id;
673 SET_ALLOCNO_SET_BIT (vec, id, ALLOCNO_MIN (a1), ALLOCNO_MAX (a1));
677 /* Add A1 to the conflicts of A2 and vise versa. */
679 ira_add_allocno_conflict (ira_allocno_t a1, ira_allocno_t a2)
681 add_to_allocno_conflicts (a1, a2);
682 add_to_allocno_conflicts (a2, a1);
685 /* Clear all conflicts of allocno A. */
687 clear_allocno_conflicts (ira_allocno_t a)
689 if (ALLOCNO_CONFLICT_VEC_P (a))
691 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a) = 0;
692 ((ira_allocno_t *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a))[0] = NULL;
694 else if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a) != 0)
698 nw = (ALLOCNO_MAX (a) - ALLOCNO_MIN (a)) / IRA_INT_BITS + 1;
699 memset (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a), 0,
700 nw * sizeof (IRA_INT_TYPE));
704 /* The array used to find duplications in conflict vectors of
706 static int *allocno_conflict_check;
708 /* The value used to mark allocation presence in conflict vector of
709 the current allocno. */
710 static int curr_allocno_conflict_check_tick;
712 /* Remove duplications in conflict vector of A. */
714 compress_allocno_conflict_vec (ira_allocno_t a)
716 ira_allocno_t *vec, conflict_a;
719 ira_assert (ALLOCNO_CONFLICT_VEC_P (a));
720 vec = (ira_allocno_t *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a);
721 curr_allocno_conflict_check_tick++;
722 for (i = j = 0; (conflict_a = vec[i]) != NULL; i++)
724 if (allocno_conflict_check[ALLOCNO_NUM (conflict_a)]
725 != curr_allocno_conflict_check_tick)
727 allocno_conflict_check[ALLOCNO_NUM (conflict_a)]
728 = curr_allocno_conflict_check_tick;
729 vec[j++] = conflict_a;
732 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a) = j;
736 /* Remove duplications in conflict vectors of all allocnos. */
738 compress_conflict_vecs (void)
741 ira_allocno_iterator ai;
743 allocno_conflict_check
744 = (int *) ira_allocate (sizeof (int) * ira_allocnos_num);
745 memset (allocno_conflict_check, 0, sizeof (int) * ira_allocnos_num);
746 curr_allocno_conflict_check_tick = 0;
747 FOR_EACH_ALLOCNO (a, ai)
748 if (ALLOCNO_CONFLICT_VEC_P (a))
749 compress_allocno_conflict_vec (a);
750 ira_free (allocno_conflict_check);
753 /* This recursive function outputs allocno A and if it is a cap the
754 function outputs its members. */
756 ira_print_expanded_allocno (ira_allocno_t a)
760 fprintf (ira_dump_file, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
761 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
762 fprintf (ira_dump_file, ",b%d", bb->index);
764 fprintf (ira_dump_file, ",l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop->num);
765 if (ALLOCNO_CAP_MEMBER (a) != NULL)
767 fprintf (ira_dump_file, ":");
768 ira_print_expanded_allocno (ALLOCNO_CAP_MEMBER (a));
770 fprintf (ira_dump_file, ")");
773 /* Create and return the cap representing allocno A in the
776 create_cap_allocno (ira_allocno_t a)
779 ira_loop_tree_node_t parent;
780 enum reg_class cover_class;
782 ira_assert (ALLOCNO_FIRST_COALESCED_ALLOCNO (a) == a
783 && ALLOCNO_NEXT_COALESCED_ALLOCNO (a) == a);
784 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
785 cap = ira_create_allocno (ALLOCNO_REGNO (a), true, parent);
786 ALLOCNO_MODE (cap) = ALLOCNO_MODE (a);
787 cover_class = ALLOCNO_COVER_CLASS (a);
788 ira_set_allocno_cover_class (cap, cover_class);
789 ALLOCNO_AVAILABLE_REGS_NUM (cap) = ALLOCNO_AVAILABLE_REGS_NUM (a);
790 ALLOCNO_CAP_MEMBER (cap) = a;
791 ALLOCNO_CAP (a) = cap;
792 ALLOCNO_COVER_CLASS_COST (cap) = ALLOCNO_COVER_CLASS_COST (a);
793 ALLOCNO_MEMORY_COST (cap) = ALLOCNO_MEMORY_COST (a);
794 ira_allocate_and_copy_costs
795 (&ALLOCNO_HARD_REG_COSTS (cap), cover_class, ALLOCNO_HARD_REG_COSTS (a));
796 ira_allocate_and_copy_costs
797 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (cap), cover_class,
798 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
799 ALLOCNO_BAD_SPILL_P (cap) = ALLOCNO_BAD_SPILL_P (a);
800 ALLOCNO_NREFS (cap) = ALLOCNO_NREFS (a);
801 ALLOCNO_FREQ (cap) = ALLOCNO_FREQ (a);
802 ALLOCNO_CALL_FREQ (cap) = ALLOCNO_CALL_FREQ (a);
803 merge_hard_reg_conflicts (a, cap, false);
804 ALLOCNO_CALLS_CROSSED_NUM (cap) = ALLOCNO_CALLS_CROSSED_NUM (a);
805 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
807 fprintf (ira_dump_file, " Creating cap ");
808 ira_print_expanded_allocno (cap);
809 fprintf (ira_dump_file, "\n");
814 /* Create and return allocno live range with given attributes. */
816 ira_create_allocno_live_range (ira_allocno_t a, int start, int finish,
817 allocno_live_range_t next)
819 allocno_live_range_t p;
821 p = (allocno_live_range_t) pool_alloc (allocno_live_range_pool);
829 /* Copy allocno live range R and return the result. */
830 static allocno_live_range_t
831 copy_allocno_live_range (allocno_live_range_t r)
833 allocno_live_range_t p;
835 p = (allocno_live_range_t) pool_alloc (allocno_live_range_pool);
840 /* Copy allocno live range list given by its head R and return the
843 ira_copy_allocno_live_range_list (allocno_live_range_t r)
845 allocno_live_range_t p, first, last;
849 for (first = last = NULL; r != NULL; r = r->next)
851 p = copy_allocno_live_range (r);
861 /* Merge ranges R1 and R2 and returns the result. The function
862 maintains the order of ranges and tries to minimize number of the
865 ira_merge_allocno_live_ranges (allocno_live_range_t r1,
866 allocno_live_range_t r2)
868 allocno_live_range_t first, last, temp;
874 for (first = last = NULL; r1 != NULL && r2 != NULL;)
876 if (r1->start < r2->start)
882 if (r1->start <= r2->finish + 1)
884 /* Intersected ranges: merge r1 and r2 into r1. */
885 r1->start = r2->start;
886 if (r1->finish < r2->finish)
887 r1->finish = r2->finish;
890 ira_finish_allocno_live_range (temp);
893 /* To try to merge with subsequent ranges in r1. */
900 /* Add r1 to the result. */
911 /* To try to merge with subsequent ranges in r2. */
923 ira_assert (r1->next == NULL);
931 ira_assert (r2->next == NULL);
935 ira_assert (last->next == NULL);
940 /* Return TRUE if live ranges R1 and R2 intersect. */
942 ira_allocno_live_ranges_intersect_p (allocno_live_range_t r1,
943 allocno_live_range_t r2)
945 /* Remember the live ranges are always kept ordered. */
946 while (r1 != NULL && r2 != NULL)
948 if (r1->start > r2->finish)
950 else if (r2->start > r1->finish)
958 /* Free allocno live range R. */
960 ira_finish_allocno_live_range (allocno_live_range_t r)
962 pool_free (allocno_live_range_pool, r);
965 /* Free list of allocno live ranges starting with R. */
967 ira_finish_allocno_live_range_list (allocno_live_range_t r)
969 allocno_live_range_t next_r;
971 for (; r != NULL; r = next_r)
974 ira_finish_allocno_live_range (r);
978 /* Free updated register costs of allocno A. */
980 ira_free_allocno_updated_costs (ira_allocno_t a)
982 enum reg_class cover_class;
984 cover_class = ALLOCNO_COVER_CLASS (a);
985 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
986 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), cover_class);
987 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
988 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
989 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
991 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
994 /* Free the memory allocated for allocno A. */
996 finish_allocno (ira_allocno_t a)
998 enum reg_class cover_class = ALLOCNO_COVER_CLASS (a);
1000 ira_allocnos[ALLOCNO_NUM (a)] = NULL;
1001 ira_conflict_id_allocno_map[ALLOCNO_CONFLICT_ID (a)] = NULL;
1002 if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) != NULL)
1003 ira_free (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a));
1004 if (ALLOCNO_HARD_REG_COSTS (a) != NULL)
1005 ira_free_cost_vector (ALLOCNO_HARD_REG_COSTS (a), cover_class);
1006 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL)
1007 ira_free_cost_vector (ALLOCNO_CONFLICT_HARD_REG_COSTS (a), cover_class);
1008 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1009 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), cover_class);
1010 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1011 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1013 ira_finish_allocno_live_range_list (ALLOCNO_LIVE_RANGES (a));
1014 pool_free (allocno_pool, a);
1017 /* Free the memory allocated for all allocnos. */
1019 finish_allocnos (void)
1022 ira_allocno_iterator ai;
1024 FOR_EACH_ALLOCNO (a, ai)
1026 ira_free (ira_regno_allocno_map);
1027 VEC_free (ira_allocno_t, heap, ira_conflict_id_allocno_map_vec);
1028 VEC_free (ira_allocno_t, heap, allocno_vec);
1029 free_alloc_pool (allocno_pool);
1030 free_alloc_pool (allocno_live_range_pool);
1035 /* Pools for copies. */
1036 static alloc_pool copy_pool;
1038 /* Vec containing references to all created copies. It is a
1039 container of array ira_copies. */
1040 static VEC(ira_copy_t,heap) *copy_vec;
1042 /* The function initializes data concerning allocno copies. */
1044 initiate_copies (void)
1047 = create_alloc_pool ("copies", sizeof (struct ira_allocno_copy), 100);
1048 copy_vec = VEC_alloc (ira_copy_t, heap, get_max_uid ());
1053 /* Return copy connecting A1 and A2 and originated from INSN of
1054 LOOP_TREE_NODE if any. */
1056 find_allocno_copy (ira_allocno_t a1, ira_allocno_t a2, rtx insn,
1057 ira_loop_tree_node_t loop_tree_node)
1059 ira_copy_t cp, next_cp;
1060 ira_allocno_t another_a;
1062 for (cp = ALLOCNO_COPIES (a1); cp != NULL; cp = next_cp)
1064 if (cp->first == a1)
1066 next_cp = cp->next_first_allocno_copy;
1067 another_a = cp->second;
1069 else if (cp->second == a1)
1071 next_cp = cp->next_second_allocno_copy;
1072 another_a = cp->first;
1076 if (another_a == a2 && cp->insn == insn
1077 && cp->loop_tree_node == loop_tree_node)
1083 /* Create and return copy with given attributes LOOP_TREE_NODE, FIRST,
1084 SECOND, FREQ, CONSTRAINT_P, and INSN. */
1086 ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq,
1087 bool constraint_p, rtx insn,
1088 ira_loop_tree_node_t loop_tree_node)
1092 cp = (ira_copy_t) pool_alloc (copy_pool);
1093 cp->num = ira_copies_num;
1095 cp->second = second;
1097 cp->constraint_p = constraint_p;
1099 cp->loop_tree_node = loop_tree_node;
1100 VEC_safe_push (ira_copy_t, heap, copy_vec, cp);
1101 ira_copies = VEC_address (ira_copy_t, copy_vec);
1102 ira_copies_num = VEC_length (ira_copy_t, copy_vec);
1106 /* Attach a copy CP to allocnos involved into the copy. */
1108 ira_add_allocno_copy_to_list (ira_copy_t cp)
1110 ira_allocno_t first = cp->first, second = cp->second;
1112 cp->prev_first_allocno_copy = NULL;
1113 cp->prev_second_allocno_copy = NULL;
1114 cp->next_first_allocno_copy = ALLOCNO_COPIES (first);
1115 if (cp->next_first_allocno_copy != NULL)
1117 if (cp->next_first_allocno_copy->first == first)
1118 cp->next_first_allocno_copy->prev_first_allocno_copy = cp;
1120 cp->next_first_allocno_copy->prev_second_allocno_copy = cp;
1122 cp->next_second_allocno_copy = ALLOCNO_COPIES (second);
1123 if (cp->next_second_allocno_copy != NULL)
1125 if (cp->next_second_allocno_copy->second == second)
1126 cp->next_second_allocno_copy->prev_second_allocno_copy = cp;
1128 cp->next_second_allocno_copy->prev_first_allocno_copy = cp;
1130 ALLOCNO_COPIES (first) = cp;
1131 ALLOCNO_COPIES (second) = cp;
1134 /* Detach a copy CP from allocnos involved into the copy. */
1136 ira_remove_allocno_copy_from_list (ira_copy_t cp)
1138 ira_allocno_t first = cp->first, second = cp->second;
1139 ira_copy_t prev, next;
1141 next = cp->next_first_allocno_copy;
1142 prev = cp->prev_first_allocno_copy;
1144 ALLOCNO_COPIES (first) = next;
1145 else if (prev->first == first)
1146 prev->next_first_allocno_copy = next;
1148 prev->next_second_allocno_copy = next;
1151 if (next->first == first)
1152 next->prev_first_allocno_copy = prev;
1154 next->prev_second_allocno_copy = prev;
1156 cp->prev_first_allocno_copy = cp->next_first_allocno_copy = NULL;
1158 next = cp->next_second_allocno_copy;
1159 prev = cp->prev_second_allocno_copy;
1161 ALLOCNO_COPIES (second) = next;
1162 else if (prev->second == second)
1163 prev->next_second_allocno_copy = next;
1165 prev->next_first_allocno_copy = next;
1168 if (next->second == second)
1169 next->prev_second_allocno_copy = prev;
1171 next->prev_first_allocno_copy = prev;
1173 cp->prev_second_allocno_copy = cp->next_second_allocno_copy = NULL;
1176 /* Make a copy CP a canonical copy where number of the
1177 first allocno is less than the second one. */
1179 ira_swap_allocno_copy_ends_if_necessary (ira_copy_t cp)
1184 if (ALLOCNO_NUM (cp->first) <= ALLOCNO_NUM (cp->second))
1188 cp->first = cp->second;
1191 temp_cp = cp->prev_first_allocno_copy;
1192 cp->prev_first_allocno_copy = cp->prev_second_allocno_copy;
1193 cp->prev_second_allocno_copy = temp_cp;
1195 temp_cp = cp->next_first_allocno_copy;
1196 cp->next_first_allocno_copy = cp->next_second_allocno_copy;
1197 cp->next_second_allocno_copy = temp_cp;
1200 /* Create (or update frequency if the copy already exists) and return
1201 the copy of allocnos FIRST and SECOND with frequency FREQ
1202 corresponding to move insn INSN (if any) and originated from
1205 ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq,
1206 bool constraint_p, rtx insn,
1207 ira_loop_tree_node_t loop_tree_node)
1211 if ((cp = find_allocno_copy (first, second, insn, loop_tree_node)) != NULL)
1216 cp = ira_create_copy (first, second, freq, constraint_p, insn,
1218 ira_assert (first != NULL && second != NULL);
1219 ira_add_allocno_copy_to_list (cp);
1220 ira_swap_allocno_copy_ends_if_necessary (cp);
1224 /* Print info about copy CP into file F. */
1226 print_copy (FILE *f, ira_copy_t cp)
1228 fprintf (f, " cp%d:a%d(r%d)<->a%d(r%d)@%d:%s\n", cp->num,
1229 ALLOCNO_NUM (cp->first), ALLOCNO_REGNO (cp->first),
1230 ALLOCNO_NUM (cp->second), ALLOCNO_REGNO (cp->second), cp->freq,
1232 ? "move" : cp->constraint_p ? "constraint" : "shuffle");
1235 /* Print info about copy CP into stderr. */
1237 ira_debug_copy (ira_copy_t cp)
1239 print_copy (stderr, cp);
1242 /* Print info about all copies into file F. */
1244 print_copies (FILE *f)
1247 ira_copy_iterator ci;
1249 FOR_EACH_COPY (cp, ci)
1253 /* Print info about all copies into stderr. */
1255 ira_debug_copies (void)
1257 print_copies (stderr);
1260 /* Print info about copies involving allocno A into file F. */
1262 print_allocno_copies (FILE *f, ira_allocno_t a)
1264 ira_allocno_t another_a;
1265 ira_copy_t cp, next_cp;
1267 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1268 for (cp = ALLOCNO_COPIES (a); cp != NULL; cp = next_cp)
1272 next_cp = cp->next_first_allocno_copy;
1273 another_a = cp->second;
1275 else if (cp->second == a)
1277 next_cp = cp->next_second_allocno_copy;
1278 another_a = cp->first;
1282 fprintf (f, " cp%d:a%d(r%d)@%d", cp->num,
1283 ALLOCNO_NUM (another_a), ALLOCNO_REGNO (another_a), cp->freq);
1288 /* Print info about copies involving allocno A into stderr. */
1290 ira_debug_allocno_copies (ira_allocno_t a)
1292 print_allocno_copies (stderr, a);
1295 /* The function frees memory allocated for copy CP. */
1297 finish_copy (ira_copy_t cp)
1299 pool_free (copy_pool, cp);
1303 /* Free memory allocated for all copies. */
1305 finish_copies (void)
1308 ira_copy_iterator ci;
1310 FOR_EACH_COPY (cp, ci)
1312 VEC_free (ira_copy_t, heap, copy_vec);
1313 free_alloc_pool (copy_pool);
1318 /* Pools for cost vectors. It is defined only for cover classes. */
1319 static alloc_pool cost_vector_pool[N_REG_CLASSES];
1321 /* The function initiates work with hard register cost vectors. It
1322 creates allocation pool for each cover class. */
1324 initiate_cost_vectors (void)
1327 enum reg_class cover_class;
1329 for (i = 0; i < ira_reg_class_cover_size; i++)
1331 cover_class = ira_reg_class_cover[i];
1332 cost_vector_pool[cover_class]
1333 = create_alloc_pool ("cost vectors",
1335 * ira_class_hard_regs_num[cover_class],
1340 /* Allocate and return a cost vector VEC for COVER_CLASS. */
1342 ira_allocate_cost_vector (enum reg_class cover_class)
1344 return (int *) pool_alloc (cost_vector_pool[cover_class]);
1347 /* Free a cost vector VEC for COVER_CLASS. */
1349 ira_free_cost_vector (int *vec, enum reg_class cover_class)
1351 ira_assert (vec != NULL);
1352 pool_free (cost_vector_pool[cover_class], vec);
1355 /* Finish work with hard register cost vectors. Release allocation
1356 pool for each cover class. */
1358 finish_cost_vectors (void)
1361 enum reg_class cover_class;
1363 for (i = 0; i < ira_reg_class_cover_size; i++)
1365 cover_class = ira_reg_class_cover[i];
1366 free_alloc_pool (cost_vector_pool[cover_class]);
1372 /* The current loop tree node and its regno allocno map. */
1373 ira_loop_tree_node_t ira_curr_loop_tree_node;
1374 ira_allocno_t *ira_curr_regno_allocno_map;
1376 /* This recursive function traverses loop tree with root LOOP_NODE
1377 calling non-null functions PREORDER_FUNC and POSTORDER_FUNC
1378 correspondingly in preorder and postorder. The function sets up
1379 IRA_CURR_LOOP_TREE_NODE and IRA_CURR_REGNO_ALLOCNO_MAP. If BB_P,
1380 basic block nodes of LOOP_NODE is also processed (before its
1383 ira_traverse_loop_tree (bool bb_p, ira_loop_tree_node_t loop_node,
1384 void (*preorder_func) (ira_loop_tree_node_t),
1385 void (*postorder_func) (ira_loop_tree_node_t))
1387 ira_loop_tree_node_t subloop_node;
1389 ira_assert (loop_node->bb == NULL);
1390 ira_curr_loop_tree_node = loop_node;
1391 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1393 if (preorder_func != NULL)
1394 (*preorder_func) (loop_node);
1397 for (subloop_node = loop_node->children;
1398 subloop_node != NULL;
1399 subloop_node = subloop_node->next)
1400 if (subloop_node->bb != NULL)
1402 if (preorder_func != NULL)
1403 (*preorder_func) (subloop_node);
1405 if (postorder_func != NULL)
1406 (*postorder_func) (subloop_node);
1409 for (subloop_node = loop_node->subloops;
1410 subloop_node != NULL;
1411 subloop_node = subloop_node->subloop_next)
1413 ira_assert (subloop_node->bb == NULL);
1414 ira_traverse_loop_tree (bb_p, subloop_node,
1415 preorder_func, postorder_func);
1418 ira_curr_loop_tree_node = loop_node;
1419 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1421 if (postorder_func != NULL)
1422 (*postorder_func) (loop_node);
1427 /* The basic block currently being processed. */
1428 static basic_block curr_bb;
1430 /* This recursive function creates allocnos corresponding to
1431 pseudo-registers containing in X. True OUTPUT_P means that X is
1434 create_insn_allocnos (rtx x, bool output_p)
1438 enum rtx_code code = GET_CODE (x);
1444 if ((regno = REGNO (x)) >= FIRST_PSEUDO_REGISTER)
1448 if ((a = ira_curr_regno_allocno_map[regno]) == NULL)
1449 a = ira_create_allocno (regno, false, ira_curr_loop_tree_node);
1451 ALLOCNO_NREFS (a)++;
1452 ALLOCNO_FREQ (a) += REG_FREQ_FROM_BB (curr_bb);
1454 bitmap_set_bit (ira_curr_loop_tree_node->modified_regnos, regno);
1458 else if (code == SET)
1460 create_insn_allocnos (SET_DEST (x), true);
1461 create_insn_allocnos (SET_SRC (x), false);
1464 else if (code == CLOBBER)
1466 create_insn_allocnos (XEXP (x, 0), true);
1469 else if (code == MEM)
1471 create_insn_allocnos (XEXP (x, 0), false);
1474 else if (code == PRE_DEC || code == POST_DEC || code == PRE_INC ||
1475 code == POST_INC || code == POST_MODIFY || code == PRE_MODIFY)
1477 create_insn_allocnos (XEXP (x, 0), true);
1478 create_insn_allocnos (XEXP (x, 0), false);
1482 fmt = GET_RTX_FORMAT (code);
1483 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1486 create_insn_allocnos (XEXP (x, i), output_p);
1487 else if (fmt[i] == 'E')
1488 for (j = 0; j < XVECLEN (x, i); j++)
1489 create_insn_allocnos (XVECEXP (x, i, j), output_p);
1493 /* Create allocnos corresponding to pseudo-registers living in the
1494 basic block represented by the corresponding loop tree node
1497 create_bb_allocnos (ira_loop_tree_node_t bb_node)
1504 curr_bb = bb = bb_node->bb;
1505 ira_assert (bb != NULL);
1506 FOR_BB_INSNS_REVERSE (bb, insn)
1507 if (NONDEBUG_INSN_P (insn))
1508 create_insn_allocnos (PATTERN (insn), false);
1509 /* It might be a allocno living through from one subloop to
1511 EXECUTE_IF_SET_IN_REG_SET (DF_LR_IN (bb), FIRST_PSEUDO_REGISTER, i, bi)
1512 if (ira_curr_regno_allocno_map[i] == NULL)
1513 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1516 /* Create allocnos corresponding to pseudo-registers living on edge E
1517 (a loop entry or exit). Also mark the allocnos as living on the
1520 create_loop_allocnos (edge e)
1523 bitmap live_in_regs, border_allocnos;
1525 ira_loop_tree_node_t parent;
1527 live_in_regs = DF_LR_IN (e->dest);
1528 border_allocnos = ira_curr_loop_tree_node->border_allocnos;
1529 EXECUTE_IF_SET_IN_REG_SET (DF_LR_OUT (e->src),
1530 FIRST_PSEUDO_REGISTER, i, bi)
1531 if (bitmap_bit_p (live_in_regs, i))
1533 if (ira_curr_regno_allocno_map[i] == NULL)
1535 /* The order of creations is important for right
1536 ira_regno_allocno_map. */
1537 if ((parent = ira_curr_loop_tree_node->parent) != NULL
1538 && parent->regno_allocno_map[i] == NULL)
1539 ira_create_allocno (i, false, parent);
1540 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1542 bitmap_set_bit (border_allocnos,
1543 ALLOCNO_NUM (ira_curr_regno_allocno_map[i]));
1547 /* Create allocnos corresponding to pseudo-registers living in loop
1548 represented by the corresponding loop tree node LOOP_NODE. This
1549 function is called by ira_traverse_loop_tree. */
1551 create_loop_tree_node_allocnos (ira_loop_tree_node_t loop_node)
1553 if (loop_node->bb != NULL)
1554 create_bb_allocnos (loop_node);
1555 else if (loop_node != ira_loop_tree_root)
1560 VEC (edge, heap) *edges;
1562 FOR_EACH_EDGE (e, ei, loop_node->loop->header->preds)
1563 if (e->src != loop_node->loop->latch)
1564 create_loop_allocnos (e);
1566 edges = get_loop_exit_edges (loop_node->loop);
1567 for (i = 0; VEC_iterate (edge, edges, i, e); i++)
1568 create_loop_allocnos (e);
1569 VEC_free (edge, heap, edges);
1573 /* Propagate information about allocnos modified inside the loop given
1574 by its LOOP_TREE_NODE to its parent. */
1576 propagate_modified_regnos (ira_loop_tree_node_t loop_tree_node)
1578 if (loop_tree_node == ira_loop_tree_root)
1580 ira_assert (loop_tree_node->bb == NULL);
1581 bitmap_ior_into (loop_tree_node->parent->modified_regnos,
1582 loop_tree_node->modified_regnos);
1585 /* Propagate new info about allocno A (see comments about accumulated
1586 info in allocno definition) to the corresponding allocno on upper
1587 loop tree level. So allocnos on upper levels accumulate
1588 information about the corresponding allocnos in nested regions.
1589 The new info means allocno info finally calculated in this
1592 propagate_allocno_info (void)
1595 ira_allocno_t a, parent_a;
1596 ira_loop_tree_node_t parent;
1597 enum reg_class cover_class;
1599 if (flag_ira_region != IRA_REGION_ALL
1600 && flag_ira_region != IRA_REGION_MIXED)
1602 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
1603 for (a = ira_regno_allocno_map[i];
1605 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1606 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
1607 && (parent_a = parent->regno_allocno_map[i]) != NULL
1608 /* There are no caps yet at this point. So use
1609 border_allocnos to find allocnos for the propagation. */
1610 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE (a)->border_allocnos,
1613 if (! ALLOCNO_BAD_SPILL_P (a))
1614 ALLOCNO_BAD_SPILL_P (parent_a) = false;
1615 ALLOCNO_NREFS (parent_a) += ALLOCNO_NREFS (a);
1616 ALLOCNO_FREQ (parent_a) += ALLOCNO_FREQ (a);
1617 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
1618 merge_hard_reg_conflicts (a, parent_a, true);
1619 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
1620 += ALLOCNO_CALLS_CROSSED_NUM (a);
1621 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
1622 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
1623 cover_class = ALLOCNO_COVER_CLASS (a);
1624 ira_assert (cover_class == ALLOCNO_COVER_CLASS (parent_a));
1625 ira_allocate_and_accumulate_costs
1626 (&ALLOCNO_HARD_REG_COSTS (parent_a), cover_class,
1627 ALLOCNO_HARD_REG_COSTS (a));
1628 ira_allocate_and_accumulate_costs
1629 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a),
1631 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
1632 ALLOCNO_COVER_CLASS_COST (parent_a)
1633 += ALLOCNO_COVER_CLASS_COST (a);
1634 ALLOCNO_MEMORY_COST (parent_a) += ALLOCNO_MEMORY_COST (a);
1638 /* Create allocnos corresponding to pseudo-registers in the current
1639 function. Traverse the loop tree for this. */
1641 create_allocnos (void)
1643 /* We need to process BB first to correctly link allocnos by member
1644 next_regno_allocno. */
1645 ira_traverse_loop_tree (true, ira_loop_tree_root,
1646 create_loop_tree_node_allocnos, NULL);
1648 ira_traverse_loop_tree (false, ira_loop_tree_root, NULL,
1649 propagate_modified_regnos);
1654 /* The page contains function to remove some regions from a separate
1655 register allocation. We remove regions whose separate allocation
1656 will hardly improve the result. As a result we speed up regional
1657 register allocation. */
1659 /* The function changes allocno in range list given by R onto A. */
1661 change_allocno_in_range_list (allocno_live_range_t r, ira_allocno_t a)
1663 for (; r != NULL; r = r->next)
1667 /* Move all live ranges associated with allocno FROM to allocno TO. */
1669 move_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
1671 allocno_live_range_t lr = ALLOCNO_LIVE_RANGES (from);
1673 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
1675 fprintf (ira_dump_file,
1676 " Moving ranges of a%dr%d to a%dr%d: ",
1677 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
1678 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
1679 ira_print_live_range_list (ira_dump_file, lr);
1681 change_allocno_in_range_list (lr, to);
1682 ALLOCNO_LIVE_RANGES (to)
1683 = ira_merge_allocno_live_ranges (lr, ALLOCNO_LIVE_RANGES (to));
1684 ALLOCNO_LIVE_RANGES (from) = NULL;
1687 /* Copy all live ranges associated with allocno FROM to allocno TO. */
1689 copy_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
1691 allocno_live_range_t lr = ALLOCNO_LIVE_RANGES (from);
1693 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
1695 fprintf (ira_dump_file,
1696 " Copying ranges of a%dr%d to a%dr%d: ",
1697 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
1698 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
1699 ira_print_live_range_list (ira_dump_file, lr);
1701 lr = ira_copy_allocno_live_range_list (lr);
1702 change_allocno_in_range_list (lr, to);
1703 ALLOCNO_LIVE_RANGES (to)
1704 = ira_merge_allocno_live_ranges (lr, ALLOCNO_LIVE_RANGES (to));
1707 /* Return TRUE if NODE represents a loop with low register
1710 low_pressure_loop_node_p (ira_loop_tree_node_t node)
1713 enum reg_class cover_class;
1715 if (node->bb != NULL)
1718 for (i = 0; i < ira_reg_class_cover_size; i++)
1720 cover_class = ira_reg_class_cover[i];
1721 if (node->reg_pressure[cover_class]
1722 > ira_available_class_regs[cover_class])
1728 /* Sort loops for marking them for removal. We put already marked
1729 loops first, then less frequent loops next, and then outer loops
1732 loop_compare_func (const void *v1p, const void *v2p)
1735 ira_loop_tree_node_t l1 = *(const ira_loop_tree_node_t *) v1p;
1736 ira_loop_tree_node_t l2 = *(const ira_loop_tree_node_t *) v2p;
1738 ira_assert (l1->parent != NULL && l2->parent != NULL);
1739 if (l1->to_remove_p && ! l2->to_remove_p)
1741 if (! l1->to_remove_p && l2->to_remove_p)
1743 if ((diff = l1->loop->header->frequency - l2->loop->header->frequency) != 0)
1745 if ((diff = (int) loop_depth (l1->loop) - (int) loop_depth (l2->loop)) != 0)
1747 /* Make sorting stable. */
1748 return l1->loop->num - l2->loop->num;
1752 /* Mark loops which should be removed from regional allocation. We
1753 remove a loop with low register pressure inside another loop with
1754 register pressure. In this case a separate allocation of the loop
1755 hardly helps (for irregular register file architecture it could
1756 help by choosing a better hard register in the loop but we prefer
1757 faster allocation even in this case). We also remove cheap loops
1758 if there are more than IRA_MAX_LOOPS_NUM of them. */
1760 mark_loops_for_removal (void)
1763 ira_loop_tree_node_t *sorted_loops;
1767 = (ira_loop_tree_node_t *) ira_allocate (sizeof (ira_loop_tree_node_t)
1768 * VEC_length (loop_p,
1770 for (n = i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
1771 if (ira_loop_nodes[i].regno_allocno_map != NULL)
1773 if (ira_loop_nodes[i].parent == NULL)
1775 /* Don't remove the root. */
1776 ira_loop_nodes[i].to_remove_p = false;
1779 sorted_loops[n++] = &ira_loop_nodes[i];
1780 ira_loop_nodes[i].to_remove_p
1781 = (low_pressure_loop_node_p (ira_loop_nodes[i].parent)
1782 && low_pressure_loop_node_p (&ira_loop_nodes[i]));
1784 qsort (sorted_loops, n, sizeof (ira_loop_tree_node_t), loop_compare_func);
1785 for (i = 0; n - i + 1 > IRA_MAX_LOOPS_NUM; i++)
1787 sorted_loops[i]->to_remove_p = true;
1788 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1791 " Mark loop %d (header %d, freq %d, depth %d) for removal (%s)\n",
1792 sorted_loops[i]->loop->num, sorted_loops[i]->loop->header->index,
1793 sorted_loops[i]->loop->header->frequency,
1794 loop_depth (sorted_loops[i]->loop),
1795 low_pressure_loop_node_p (sorted_loops[i]->parent)
1796 && low_pressure_loop_node_p (sorted_loops[i])
1797 ? "low pressure" : "cheap loop");
1799 ira_free (sorted_loops);
1802 /* Mark all loops but root for removing. */
1804 mark_all_loops_for_removal (void)
1809 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
1810 if (ira_loop_nodes[i].regno_allocno_map != NULL)
1812 if (ira_loop_nodes[i].parent == NULL)
1814 /* Don't remove the root. */
1815 ira_loop_nodes[i].to_remove_p = false;
1818 ira_loop_nodes[i].to_remove_p = true;
1819 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1822 " Mark loop %d (header %d, freq %d, depth %d) for removal\n",
1823 ira_loop_nodes[i].loop->num,
1824 ira_loop_nodes[i].loop->header->index,
1825 ira_loop_nodes[i].loop->header->frequency,
1826 loop_depth (ira_loop_nodes[i].loop));
1830 /* Definition of vector of loop tree nodes. */
1831 DEF_VEC_P(ira_loop_tree_node_t);
1832 DEF_VEC_ALLOC_P(ira_loop_tree_node_t, heap);
1834 /* Vec containing references to all removed loop tree nodes. */
1835 static VEC(ira_loop_tree_node_t,heap) *removed_loop_vec;
1837 /* Vec containing references to all children of loop tree nodes. */
1838 static VEC(ira_loop_tree_node_t,heap) *children_vec;
1840 /* Remove subregions of NODE if their separate allocation will not
1841 improve the result. */
1843 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_node_t node)
1847 ira_loop_tree_node_t subnode;
1849 remove_p = node->to_remove_p;
1851 VEC_safe_push (ira_loop_tree_node_t, heap, children_vec, node);
1852 start = VEC_length (ira_loop_tree_node_t, children_vec);
1853 for (subnode = node->children; subnode != NULL; subnode = subnode->next)
1854 if (subnode->bb == NULL)
1855 remove_uneccesary_loop_nodes_from_loop_tree (subnode);
1857 VEC_safe_push (ira_loop_tree_node_t, heap, children_vec, subnode);
1858 node->children = node->subloops = NULL;
1861 VEC_safe_push (ira_loop_tree_node_t, heap, removed_loop_vec, node);
1864 while (VEC_length (ira_loop_tree_node_t, children_vec) > start)
1866 subnode = VEC_pop (ira_loop_tree_node_t, children_vec);
1867 subnode->parent = node;
1868 subnode->next = node->children;
1869 node->children = subnode;
1870 if (subnode->bb == NULL)
1872 subnode->subloop_next = node->subloops;
1873 node->subloops = subnode;
1878 /* Return TRUE if NODE is inside PARENT. */
1880 loop_is_inside_p (ira_loop_tree_node_t node, ira_loop_tree_node_t parent)
1882 for (node = node->parent; node != NULL; node = node->parent)
1888 /* Sort allocnos according to their order in regno allocno list. */
1890 regno_allocno_order_compare_func (const void *v1p, const void *v2p)
1892 ira_allocno_t a1 = *(const ira_allocno_t *) v1p;
1893 ira_allocno_t a2 = *(const ira_allocno_t *) v2p;
1894 ira_loop_tree_node_t n1 = ALLOCNO_LOOP_TREE_NODE (a1);
1895 ira_loop_tree_node_t n2 = ALLOCNO_LOOP_TREE_NODE (a2);
1897 if (loop_is_inside_p (n1, n2))
1899 else if (loop_is_inside_p (n2, n1))
1901 /* If allocnos are equally good, sort by allocno numbers, so that
1902 the results of qsort leave nothing to chance. We put allocnos
1903 with higher number first in the list because it is the original
1904 order for allocnos from loops on the same levels. */
1905 return ALLOCNO_NUM (a2) - ALLOCNO_NUM (a1);
1908 /* This array is used to sort allocnos to restore allocno order in
1909 the regno allocno list. */
1910 static ira_allocno_t *regno_allocnos;
1912 /* Restore allocno order for REGNO in the regno allocno list. */
1914 ira_rebuild_regno_allocno_list (int regno)
1919 for (n = 0, a = ira_regno_allocno_map[regno];
1921 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1922 regno_allocnos[n++] = a;
1924 qsort (regno_allocnos, n, sizeof (ira_allocno_t),
1925 regno_allocno_order_compare_func);
1926 for (i = 1; i < n; i++)
1927 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[i - 1]) = regno_allocnos[i];
1928 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[n - 1]) = NULL;
1929 ira_regno_allocno_map[regno] = regno_allocnos[0];
1930 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1931 fprintf (ira_dump_file, " Rebuilding regno allocno list for %d\n", regno);
1934 /* Propagate info from allocno FROM_A to allocno A. */
1936 propagate_some_info_from_allocno (ira_allocno_t a, ira_allocno_t from_a)
1938 enum reg_class cover_class;
1940 merge_hard_reg_conflicts (from_a, a, false);
1941 ALLOCNO_NREFS (a) += ALLOCNO_NREFS (from_a);
1942 ALLOCNO_FREQ (a) += ALLOCNO_FREQ (from_a);
1943 ALLOCNO_CALL_FREQ (a) += ALLOCNO_CALL_FREQ (from_a);
1944 ALLOCNO_CALLS_CROSSED_NUM (a) += ALLOCNO_CALLS_CROSSED_NUM (from_a);
1945 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a)
1946 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (from_a);
1947 if (! ALLOCNO_BAD_SPILL_P (from_a))
1948 ALLOCNO_BAD_SPILL_P (a) = false;
1949 cover_class = ALLOCNO_COVER_CLASS (from_a);
1950 ira_assert (cover_class == ALLOCNO_COVER_CLASS (a));
1951 ira_allocate_and_accumulate_costs (&ALLOCNO_HARD_REG_COSTS (a), cover_class,
1952 ALLOCNO_HARD_REG_COSTS (from_a));
1953 ira_allocate_and_accumulate_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1955 ALLOCNO_CONFLICT_HARD_REG_COSTS (from_a));
1956 ALLOCNO_COVER_CLASS_COST (a) += ALLOCNO_COVER_CLASS_COST (from_a);
1957 ALLOCNO_MEMORY_COST (a) += ALLOCNO_MEMORY_COST (from_a);
1960 /* Remove allocnos from loops removed from the allocation
1963 remove_unnecessary_allocnos (void)
1966 bool merged_p, rebuild_p;
1967 ira_allocno_t a, prev_a, next_a, parent_a;
1968 ira_loop_tree_node_t a_node, parent;
1971 regno_allocnos = NULL;
1972 for (regno = max_reg_num () - 1; regno >= FIRST_PSEUDO_REGISTER; regno--)
1975 for (prev_a = NULL, a = ira_regno_allocno_map[regno];
1979 next_a = ALLOCNO_NEXT_REGNO_ALLOCNO (a);
1980 a_node = ALLOCNO_LOOP_TREE_NODE (a);
1981 if (! a_node->to_remove_p)
1985 for (parent = a_node->parent;
1986 (parent_a = parent->regno_allocno_map[regno]) == NULL
1987 && parent->to_remove_p;
1988 parent = parent->parent)
1990 if (parent_a == NULL)
1992 /* There are no allocnos with the same regno in
1993 upper region -- just move the allocno to the
1996 ALLOCNO_LOOP_TREE_NODE (a) = parent;
1997 parent->regno_allocno_map[regno] = a;
1998 bitmap_set_bit (parent->all_allocnos, ALLOCNO_NUM (a));
2003 /* Remove the allocno and update info of allocno in
2004 the upper region. */
2006 ira_regno_allocno_map[regno] = next_a;
2008 ALLOCNO_NEXT_REGNO_ALLOCNO (prev_a) = next_a;
2009 move_allocno_live_ranges (a, parent_a);
2011 propagate_some_info_from_allocno (parent_a, a);
2012 /* Remove it from the corresponding regno allocno
2013 map to avoid info propagation of subsequent
2014 allocno into this already removed allocno. */
2015 a_node->regno_allocno_map[regno] = NULL;
2021 /* We need to restore the order in regno allocno list. */
2023 if (regno_allocnos == NULL)
2025 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
2026 * ira_allocnos_num);
2027 ira_rebuild_regno_allocno_list (regno);
2031 ira_rebuild_start_finish_chains ();
2032 if (regno_allocnos != NULL)
2033 ira_free (regno_allocnos);
2036 /* Remove allocnos from all loops but the root. */
2038 remove_low_level_allocnos (void)
2041 bool merged_p, propagate_p;
2042 ira_allocno_t a, top_a;
2043 ira_loop_tree_node_t a_node, parent;
2044 ira_allocno_iterator ai;
2047 FOR_EACH_ALLOCNO (a, ai)
2049 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2050 if (a_node == ira_loop_tree_root || ALLOCNO_CAP_MEMBER (a) != NULL)
2052 regno = ALLOCNO_REGNO (a);
2053 if ((top_a = ira_loop_tree_root->regno_allocno_map[regno]) == NULL)
2055 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2056 ira_loop_tree_root->regno_allocno_map[regno] = a;
2059 propagate_p = a_node->parent->regno_allocno_map[regno] == NULL;
2060 /* Remove the allocno and update info of allocno in the upper
2062 move_allocno_live_ranges (a, top_a);
2065 propagate_some_info_from_allocno (top_a, a);
2067 FOR_EACH_ALLOCNO (a, ai)
2069 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2070 if (a_node == ira_loop_tree_root)
2072 parent = a_node->parent;
2073 regno = ALLOCNO_REGNO (a);
2074 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2075 ira_assert (ALLOCNO_CAP (a) != NULL);
2076 else if (ALLOCNO_CAP (a) == NULL)
2077 ira_assert (parent->regno_allocno_map[regno] != NULL);
2079 FOR_EACH_ALLOCNO (a, ai)
2081 regno = ALLOCNO_REGNO (a);
2082 if (ira_loop_tree_root->regno_allocno_map[regno] == a)
2084 ira_regno_allocno_map[regno] = a;
2085 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = NULL;
2086 ALLOCNO_CAP_MEMBER (a) = NULL;
2087 COPY_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a),
2088 ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
2090 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
2091 ALLOCNO_NO_STACK_REG_P (a) = true;
2098 ira_rebuild_start_finish_chains ();
2101 /* Remove loops from consideration. We remove all loops except for
2102 root if ALL_P or loops for which a separate allocation will not
2103 improve the result. We have to do this after allocno creation and
2104 their costs and cover class evaluation because only after that the
2105 register pressure can be known and is calculated. */
2107 remove_unnecessary_regions (bool all_p)
2110 mark_all_loops_for_removal ();
2112 mark_loops_for_removal ();
2114 = VEC_alloc (ira_loop_tree_node_t, heap,
2115 last_basic_block + VEC_length (loop_p, ira_loops.larray));
2117 = VEC_alloc (ira_loop_tree_node_t, heap,
2118 last_basic_block + VEC_length (loop_p, ira_loops.larray));
2119 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_root) ;
2120 VEC_free (ira_loop_tree_node_t, heap, children_vec);
2122 remove_low_level_allocnos ();
2124 remove_unnecessary_allocnos ();
2125 while (VEC_length (ira_loop_tree_node_t, removed_loop_vec) > 0)
2126 finish_loop_tree_node (VEC_pop (ira_loop_tree_node_t, removed_loop_vec));
2127 VEC_free (ira_loop_tree_node_t, heap, removed_loop_vec);
2132 /* At this point true value of allocno attribute bad_spill_p means
2133 that there is an insn where allocno occurs and where the allocno
2134 can not be used as memory. The function updates the attribute, now
2135 it can be true only for allocnos which can not be used as memory in
2136 an insn and in whose live ranges there is other allocno deaths.
2137 Spilling allocnos with true value will not improve the code because
2138 it will not make other allocnos colorable and additional reloads
2139 for the corresponding pseudo will be generated in reload pass for
2140 each insn it occurs.
2142 This is a trick mentioned in one classic article of Chaitin etc
2143 which is frequently omitted in other implementations of RA based on
2146 update_bad_spill_attribute (void)
2150 ira_allocno_iterator ai;
2151 allocno_live_range_t r;
2152 enum reg_class cover_class;
2153 bitmap_head dead_points[N_REG_CLASSES];
2155 for (i = 0; i < ira_reg_class_cover_size; i++)
2157 cover_class = ira_reg_class_cover[i];
2158 bitmap_initialize (&dead_points[cover_class], ®_obstack);
2160 FOR_EACH_ALLOCNO (a, ai)
2162 cover_class = ALLOCNO_COVER_CLASS (a);
2163 if (cover_class == NO_REGS)
2165 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
2166 bitmap_set_bit (&dead_points[cover_class], r->finish);
2168 FOR_EACH_ALLOCNO (a, ai)
2170 cover_class = ALLOCNO_COVER_CLASS (a);
2171 if (cover_class == NO_REGS)
2173 if (! ALLOCNO_BAD_SPILL_P (a))
2175 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
2177 for (i = r->start + 1; i < r->finish; i++)
2178 if (bitmap_bit_p (&dead_points[cover_class], i))
2184 ALLOCNO_BAD_SPILL_P (a) = false;
2186 for (i = 0; i < ira_reg_class_cover_size; i++)
2188 cover_class = ira_reg_class_cover[i];
2189 bitmap_clear (&dead_points[cover_class]);
2195 /* Set up minimal and maximal live range points for allocnos. */
2197 setup_min_max_allocno_live_range_point (void)
2200 ira_allocno_t a, parent_a, cap;
2201 ira_allocno_iterator ai;
2202 allocno_live_range_t r;
2203 ira_loop_tree_node_t parent;
2205 FOR_EACH_ALLOCNO (a, ai)
2207 r = ALLOCNO_LIVE_RANGES (a);
2210 ALLOCNO_MAX (a) = r->finish;
2211 for (; r->next != NULL; r = r->next)
2213 ALLOCNO_MIN (a) = r->start;
2215 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2216 for (a = ira_regno_allocno_map[i];
2218 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2220 if (ALLOCNO_MAX (a) < 0)
2222 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2223 /* Accumulation of range info. */
2224 if (ALLOCNO_CAP (a) != NULL)
2226 for (cap = ALLOCNO_CAP (a); cap != NULL; cap = ALLOCNO_CAP (cap))
2228 if (ALLOCNO_MAX (cap) < ALLOCNO_MAX (a))
2229 ALLOCNO_MAX (cap) = ALLOCNO_MAX (a);
2230 if (ALLOCNO_MIN (cap) > ALLOCNO_MIN (a))
2231 ALLOCNO_MIN (cap) = ALLOCNO_MIN (a);
2235 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) == NULL)
2237 parent_a = parent->regno_allocno_map[i];
2238 if (ALLOCNO_MAX (parent_a) < ALLOCNO_MAX (a))
2239 ALLOCNO_MAX (parent_a) = ALLOCNO_MAX (a);
2240 if (ALLOCNO_MIN (parent_a) > ALLOCNO_MIN (a))
2241 ALLOCNO_MIN (parent_a) = ALLOCNO_MIN (a);
2243 #ifdef ENABLE_IRA_CHECKING
2244 FOR_EACH_ALLOCNO (a, ai)
2246 if ((0 <= ALLOCNO_MIN (a) && ALLOCNO_MIN (a) <= ira_max_point)
2247 && (0 <= ALLOCNO_MAX (a) && ALLOCNO_MAX (a) <= ira_max_point))
2254 /* Sort allocnos according to their live ranges. Allocnos with
2255 smaller cover class are put first unless we use priority coloring.
2256 Allocnos with the same cove class are ordered according their start
2257 (min). Allocnos with the same start are ordered according their
2260 allocno_range_compare_func (const void *v1p, const void *v2p)
2263 ira_allocno_t a1 = *(const ira_allocno_t *) v1p;
2264 ira_allocno_t a2 = *(const ira_allocno_t *) v2p;
2266 if (flag_ira_algorithm != IRA_ALGORITHM_PRIORITY
2267 && (diff = ALLOCNO_COVER_CLASS (a1) - ALLOCNO_COVER_CLASS (a2)) != 0)
2269 if ((diff = ALLOCNO_MIN (a1) - ALLOCNO_MIN (a2)) != 0)
2271 if ((diff = ALLOCNO_MAX (a1) - ALLOCNO_MAX (a2)) != 0)
2273 return ALLOCNO_NUM (a1) - ALLOCNO_NUM (a2);
2276 /* Sort ira_conflict_id_allocno_map and set up conflict id of
2279 sort_conflict_id_allocno_map (void)
2283 ira_allocno_iterator ai;
2286 FOR_EACH_ALLOCNO (a, ai)
2287 ira_conflict_id_allocno_map[num++] = a;
2288 qsort (ira_conflict_id_allocno_map, num, sizeof (ira_allocno_t),
2289 allocno_range_compare_func);
2290 for (i = 0; i < num; i++)
2291 if ((a = ira_conflict_id_allocno_map[i]) != NULL)
2292 ALLOCNO_CONFLICT_ID (a) = i;
2293 for (i = num; i < ira_allocnos_num; i++)
2294 ira_conflict_id_allocno_map[i] = NULL;
2297 /* Set up minimal and maximal conflict ids of allocnos with which
2298 given allocno can conflict. */
2300 setup_min_max_conflict_allocno_ids (void)
2303 int i, j, min, max, start, finish, first_not_finished, filled_area_start;
2304 int *live_range_min, *last_lived;
2307 live_range_min = (int *) ira_allocate (sizeof (int) * ira_allocnos_num);
2309 first_not_finished = -1;
2310 for (i = 0; i < ira_allocnos_num; i++)
2312 a = ira_conflict_id_allocno_map[i];
2316 || (flag_ira_algorithm != IRA_ALGORITHM_PRIORITY
2317 && cover_class != (int) ALLOCNO_COVER_CLASS (a)))
2319 cover_class = ALLOCNO_COVER_CLASS (a);
2321 first_not_finished = i;
2325 start = ALLOCNO_MIN (a);
2326 /* If we skip an allocno, the allocno with smaller ids will
2327 be also skipped because of the secondary sorting the
2328 range finishes (see function
2329 allocno_range_compare_func). */
2330 while (first_not_finished < i
2331 && start > ALLOCNO_MAX (ira_conflict_id_allocno_map
2332 [first_not_finished]))
2333 first_not_finished++;
2334 min = first_not_finished;
2337 /* We could increase min further in this case but it is good
2340 live_range_min[i] = ALLOCNO_MIN (a);
2341 ALLOCNO_MIN (a) = min;
2343 last_lived = (int *) ira_allocate (sizeof (int) * ira_max_point);
2345 filled_area_start = -1;
2346 for (i = ira_allocnos_num - 1; i >= 0; i--)
2348 a = ira_conflict_id_allocno_map[i];
2352 || (flag_ira_algorithm != IRA_ALGORITHM_PRIORITY
2353 && cover_class != (int) ALLOCNO_COVER_CLASS (a)))
2355 cover_class = ALLOCNO_COVER_CLASS (a);
2356 for (j = 0; j < ira_max_point; j++)
2358 filled_area_start = ira_max_point;
2360 min = live_range_min[i];
2361 finish = ALLOCNO_MAX (a);
2362 max = last_lived[finish];
2364 /* We could decrease max further in this case but it is good
2366 max = ALLOCNO_CONFLICT_ID (a) - 1;
2367 ALLOCNO_MAX (a) = max;
2368 /* In filling, we can go further A range finish to recognize
2369 intersection quickly because if the finish of subsequently
2370 processed allocno (it has smaller conflict id) range is
2371 further A range finish than they are definitely intersected
2372 (the reason for this is the allocnos with bigger conflict id
2373 have their range starts not smaller than allocnos with
2375 for (j = min; j < filled_area_start; j++)
2377 filled_area_start = min;
2379 ira_free (last_lived);
2380 ira_free (live_range_min);
2389 ira_allocno_iterator ai;
2390 ira_loop_tree_node_t loop_tree_node;
2392 FOR_EACH_ALLOCNO (a, ai)
2394 if (ALLOCNO_LOOP_TREE_NODE (a) == ira_loop_tree_root)
2396 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2397 create_cap_allocno (a);
2398 else if (ALLOCNO_CAP (a) == NULL)
2400 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2401 if (!bitmap_bit_p (loop_tree_node->border_allocnos, ALLOCNO_NUM (a)))
2402 create_cap_allocno (a);
2409 /* The page contains code transforming more one region internal
2410 representation (IR) to one region IR which is necessary for reload.
2411 This transformation is called IR flattening. We might just rebuild
2412 the IR for one region but we don't do it because it takes a lot of
2415 /* Map: regno -> allocnos which will finally represent the regno for
2416 IR with one region. */
2417 static ira_allocno_t *regno_top_level_allocno_map;
2419 /* Find the allocno that corresponds to A at a level one higher up in the
2420 loop tree. Returns NULL if A is a cap, or if it has no parent. */
2422 ira_parent_allocno (ira_allocno_t a)
2424 ira_loop_tree_node_t parent;
2426 if (ALLOCNO_CAP (a) != NULL)
2429 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
2433 return parent->regno_allocno_map[ALLOCNO_REGNO (a)];
2436 /* Find the allocno that corresponds to A at a level one higher up in the
2437 loop tree. If ALLOCNO_CAP is set for A, return that. */
2439 ira_parent_or_cap_allocno (ira_allocno_t a)
2441 if (ALLOCNO_CAP (a) != NULL)
2442 return ALLOCNO_CAP (a);
2444 return ira_parent_allocno (a);
2447 /* Process all allocnos originated from pseudo REGNO and copy live
2448 ranges, hard reg conflicts, and allocno stack reg attributes from
2449 low level allocnos to final allocnos which are destinations of
2450 removed stores at a loop exit. Return true if we copied live
2453 copy_info_to_removed_store_destinations (int regno)
2456 ira_allocno_t parent_a = NULL;
2457 ira_loop_tree_node_t parent;
2461 for (a = ira_regno_allocno_map[regno];
2463 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2465 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))])
2466 /* This allocno will be removed. */
2468 /* Caps will be removed. */
2469 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2470 for (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
2472 parent = parent->parent)
2473 if ((parent_a = parent->regno_allocno_map[regno]) == NULL
2474 || (parent_a == regno_top_level_allocno_map[REGNO (ALLOCNO_REG
2476 && ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)))
2478 if (parent == NULL || parent_a == NULL)
2480 copy_allocno_live_ranges (a, parent_a);
2481 merge_hard_reg_conflicts (a, parent_a, true);
2482 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
2483 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2484 += ALLOCNO_CALLS_CROSSED_NUM (a);
2485 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2486 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
2492 /* Flatten the IR. In other words, this function transforms IR as if
2493 it were built with one region (without loops). We could make it
2494 much simpler by rebuilding IR with one region, but unfortunately it
2495 takes a lot of time. MAX_REGNO_BEFORE_EMIT and
2496 IRA_MAX_POINT_BEFORE_EMIT are correspondingly MAX_REG_NUM () and
2497 IRA_MAX_POINT before emitting insns on the loop borders. */
2499 ira_flattening (int max_regno_before_emit, int ira_max_point_before_emit)
2504 bool new_pseudos_p, merged_p, mem_dest_p;
2506 enum reg_class cover_class;
2507 ira_allocno_t a, parent_a, first, second, node_first, node_second;
2509 ira_loop_tree_node_t node;
2510 allocno_live_range_t r;
2511 ira_allocno_iterator ai;
2512 ira_copy_iterator ci;
2513 sparseset allocnos_live;
2515 regno_top_level_allocno_map
2516 = (ira_allocno_t *) ira_allocate (max_reg_num () * sizeof (ira_allocno_t));
2517 memset (regno_top_level_allocno_map, 0,
2518 max_reg_num () * sizeof (ira_allocno_t));
2519 new_pseudos_p = merged_p = false;
2520 FOR_EACH_ALLOCNO (a, ai)
2522 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2523 /* Caps are not in the regno allocno maps and they are never
2524 will be transformed into allocnos existing after IR
2527 COPY_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a),
2528 ALLOCNO_CONFLICT_HARD_REGS (a));
2530 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = ALLOCNO_NO_STACK_REG_P (a);
2533 /* Fix final allocno attributes. */
2534 for (i = max_regno_before_emit - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2537 for (a = ira_regno_allocno_map[i];
2539 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2541 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2542 if (ALLOCNO_SOMEWHERE_RENAMED_P (a))
2543 new_pseudos_p = true;
2544 parent_a = ira_parent_allocno (a);
2545 if (parent_a == NULL)
2547 ALLOCNO_COPIES (a) = NULL;
2548 regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))] = a;
2551 ira_assert (ALLOCNO_CAP_MEMBER (parent_a) == NULL);
2553 if (ALLOCNO_MEM_OPTIMIZED_DEST (a) != NULL)
2555 if (REGNO (ALLOCNO_REG (a)) == REGNO (ALLOCNO_REG (parent_a)))
2557 merge_hard_reg_conflicts (a, parent_a, true);
2558 move_allocno_live_ranges (a, parent_a);
2560 ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)
2561 = (ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)
2562 || ALLOCNO_MEM_OPTIMIZED_DEST_P (a));
2565 new_pseudos_p = true;
2568 ALLOCNO_NREFS (parent_a) -= ALLOCNO_NREFS (a);
2569 ALLOCNO_FREQ (parent_a) -= ALLOCNO_FREQ (a);
2570 ALLOCNO_CALL_FREQ (parent_a) -= ALLOCNO_CALL_FREQ (a);
2571 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2572 -= ALLOCNO_CALLS_CROSSED_NUM (a);
2573 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2574 -= ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
2575 ira_assert (ALLOCNO_CALLS_CROSSED_NUM (parent_a) >= 0
2576 && ALLOCNO_NREFS (parent_a) >= 0
2577 && ALLOCNO_FREQ (parent_a) >= 0);
2578 cover_class = ALLOCNO_COVER_CLASS (parent_a);
2579 hard_regs_num = ira_class_hard_regs_num[cover_class];
2580 if (ALLOCNO_HARD_REG_COSTS (a) != NULL
2581 && ALLOCNO_HARD_REG_COSTS (parent_a) != NULL)
2582 for (j = 0; j < hard_regs_num; j++)
2583 ALLOCNO_HARD_REG_COSTS (parent_a)[j]
2584 -= ALLOCNO_HARD_REG_COSTS (a)[j];
2585 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL
2586 && ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a) != NULL)
2587 for (j = 0; j < hard_regs_num; j++)
2588 ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a)[j]
2589 -= ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[j];
2590 ALLOCNO_COVER_CLASS_COST (parent_a)
2591 -= ALLOCNO_COVER_CLASS_COST (a);
2592 ALLOCNO_MEMORY_COST (parent_a) -= ALLOCNO_MEMORY_COST (a);
2593 parent_a = ira_parent_allocno (parent_a);
2594 if (parent_a == NULL)
2597 ALLOCNO_COPIES (a) = NULL;
2598 regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))] = a;
2600 if (mem_dest_p && copy_info_to_removed_store_destinations (i))
2603 ira_assert (new_pseudos_p || ira_max_point_before_emit == ira_max_point);
2604 if (merged_p || ira_max_point_before_emit != ira_max_point)
2605 ira_rebuild_start_finish_chains ();
2608 /* Rebuild conflicts. */
2609 FOR_EACH_ALLOCNO (a, ai)
2611 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2612 || ALLOCNO_CAP_MEMBER (a) != NULL)
2614 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
2615 ira_assert (r->allocno == a);
2616 clear_allocno_conflicts (a);
2618 allocnos_live = sparseset_alloc (ira_allocnos_num);
2619 for (i = 0; i < ira_max_point; i++)
2621 for (r = ira_start_point_ranges[i]; r != NULL; r = r->start_next)
2624 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2625 || ALLOCNO_CAP_MEMBER (a) != NULL)
2627 num = ALLOCNO_NUM (a);
2628 cover_class = ALLOCNO_COVER_CLASS (a);
2629 sparseset_set_bit (allocnos_live, num);
2630 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, n)
2632 ira_allocno_t live_a = ira_allocnos[n];
2634 if (ira_reg_classes_intersect_p
2635 [cover_class][ALLOCNO_COVER_CLASS (live_a)]
2636 /* Don't set up conflict for the allocno with itself. */
2638 ira_add_allocno_conflict (a, live_a);
2642 for (r = ira_finish_point_ranges[i]; r != NULL; r = r->finish_next)
2643 sparseset_clear_bit (allocnos_live, ALLOCNO_NUM (r->allocno));
2645 sparseset_free (allocnos_live);
2646 compress_conflict_vecs ();
2648 /* Mark some copies for removing and change allocnos in the rest
2650 FOR_EACH_COPY (cp, ci)
2652 if (ALLOCNO_CAP_MEMBER (cp->first) != NULL
2653 || ALLOCNO_CAP_MEMBER (cp->second) != NULL)
2655 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2657 (ira_dump_file, " Remove cp%d:%c%dr%d-%c%dr%d\n",
2658 cp->num, ALLOCNO_CAP_MEMBER (cp->first) != NULL ? 'c' : 'a',
2659 ALLOCNO_NUM (cp->first), REGNO (ALLOCNO_REG (cp->first)),
2660 ALLOCNO_CAP_MEMBER (cp->second) != NULL ? 'c' : 'a',
2661 ALLOCNO_NUM (cp->second), REGNO (ALLOCNO_REG (cp->second)));
2662 cp->loop_tree_node = NULL;
2665 first = regno_top_level_allocno_map[REGNO (ALLOCNO_REG (cp->first))];
2666 second = regno_top_level_allocno_map[REGNO (ALLOCNO_REG (cp->second))];
2667 node = cp->loop_tree_node;
2669 keep_p = true; /* It copy generated in ira-emit.c. */
2672 /* Check that the copy was not propagated from level on
2673 which we will have different pseudos. */
2674 node_first = node->regno_allocno_map[ALLOCNO_REGNO (cp->first)];
2675 node_second = node->regno_allocno_map[ALLOCNO_REGNO (cp->second)];
2676 keep_p = ((REGNO (ALLOCNO_REG (first))
2677 == REGNO (ALLOCNO_REG (node_first)))
2678 && (REGNO (ALLOCNO_REG (second))
2679 == REGNO (ALLOCNO_REG (node_second))));
2683 cp->loop_tree_node = ira_loop_tree_root;
2685 cp->second = second;
2689 cp->loop_tree_node = NULL;
2690 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2691 fprintf (ira_dump_file, " Remove cp%d:a%dr%d-a%dr%d\n",
2692 cp->num, ALLOCNO_NUM (cp->first),
2693 REGNO (ALLOCNO_REG (cp->first)), ALLOCNO_NUM (cp->second),
2694 REGNO (ALLOCNO_REG (cp->second)));
2697 /* Remove unnecessary allocnos on lower levels of the loop tree. */
2698 FOR_EACH_ALLOCNO (a, ai)
2700 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2701 || ALLOCNO_CAP_MEMBER (a) != NULL)
2703 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2704 fprintf (ira_dump_file, " Remove a%dr%d\n",
2705 ALLOCNO_NUM (a), REGNO (ALLOCNO_REG (a)));
2709 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2710 ALLOCNO_REGNO (a) = REGNO (ALLOCNO_REG (a));
2711 ALLOCNO_CAP (a) = NULL;
2712 /* Restore updated costs for assignments from reload. */
2713 ALLOCNO_UPDATED_MEMORY_COST (a) = ALLOCNO_MEMORY_COST (a);
2714 ALLOCNO_UPDATED_COVER_CLASS_COST (a) = ALLOCNO_COVER_CLASS_COST (a);
2715 if (! ALLOCNO_ASSIGNED_P (a))
2716 ira_free_allocno_updated_costs (a);
2717 ira_assert (ALLOCNO_UPDATED_HARD_REG_COSTS (a) == NULL);
2718 ira_assert (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) == NULL);
2720 /* Remove unnecessary copies. */
2721 FOR_EACH_COPY (cp, ci)
2723 if (cp->loop_tree_node == NULL)
2725 ira_copies[cp->num] = NULL;
2730 (ALLOCNO_LOOP_TREE_NODE (cp->first) == ira_loop_tree_root
2731 && ALLOCNO_LOOP_TREE_NODE (cp->second) == ira_loop_tree_root);
2732 ira_add_allocno_copy_to_list (cp);
2733 ira_swap_allocno_copy_ends_if_necessary (cp);
2735 rebuild_regno_allocno_maps ();
2736 if (ira_max_point != ira_max_point_before_emit)
2737 ira_compress_allocno_live_ranges ();
2738 ira_free (regno_top_level_allocno_map);
2743 #ifdef ENABLE_IRA_CHECKING
2744 /* Check creation of all allocnos. Allocnos on lower levels should
2745 have allocnos or caps on all upper levels. */
2747 check_allocno_creation (void)
2750 ira_allocno_iterator ai;
2751 ira_loop_tree_node_t loop_tree_node;
2753 FOR_EACH_ALLOCNO (a, ai)
2755 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2756 ira_assert (bitmap_bit_p (loop_tree_node->all_allocnos,
2758 if (loop_tree_node == ira_loop_tree_root)
2760 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2761 ira_assert (ALLOCNO_CAP (a) != NULL);
2762 else if (ALLOCNO_CAP (a) == NULL)
2763 ira_assert (loop_tree_node->parent
2764 ->regno_allocno_map[ALLOCNO_REGNO (a)] != NULL
2765 && bitmap_bit_p (loop_tree_node->border_allocnos,
2771 /* Identify allocnos which prefer a register class with a single hard register.
2772 Adjust ALLOCNO_CONFLICT_HARD_REG_COSTS so that conflicting allocnos are
2773 less likely to use the preferred singleton register. */
2775 update_conflict_hard_reg_costs (void)
2778 ira_allocno_iterator ai;
2781 FOR_EACH_ALLOCNO (a, ai)
2783 enum reg_class cover_class = ALLOCNO_COVER_CLASS (a);
2784 enum reg_class pref = reg_preferred_class (ALLOCNO_REGNO (a));
2786 if (reg_class_size[pref] != 1)
2788 index = (ira_class_hard_reg_index[cover_class]
2789 [ira_class_hard_regs[pref][0]]);
2792 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) == NULL
2793 || ALLOCNO_HARD_REG_COSTS (a) == NULL)
2796 for (i = ira_class_hard_regs_num[cover_class] - 1; i >= 0; i--)
2797 if (ALLOCNO_HARD_REG_COSTS (a)[i] > ALLOCNO_COVER_CLASS_COST (a)
2798 && min > ALLOCNO_HARD_REG_COSTS (a)[i])
2799 min = ALLOCNO_HARD_REG_COSTS (a)[i];
2802 ira_allocate_and_set_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
2804 ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[index]
2805 -= min - ALLOCNO_COVER_CLASS_COST (a);
2809 /* Create a internal representation (IR) for IRA (allocnos, copies,
2810 loop tree nodes). If LOOPS_P is FALSE the nodes corresponding to
2811 the loops (except the root which corresponds the all function) and
2812 correspondingly allocnos for the loops will be not created. Such
2813 parameter value is used for Chaitin-Briggs coloring. The function
2814 returns TRUE if we generate loop structure (besides nodes
2815 representing all function and the basic blocks) for regional
2816 allocation. A true return means that we really need to flatten IR
2817 before the reload. */
2819 ira_build (bool loops_p)
2823 initiate_cost_vectors ();
2824 initiate_allocnos ();
2826 create_loop_tree_nodes (loops_p);
2830 ira_create_allocno_live_ranges ();
2831 remove_unnecessary_regions (false);
2832 ira_compress_allocno_live_ranges ();
2833 update_bad_spill_attribute ();
2834 loops_p = more_one_region_p ();
2837 propagate_allocno_info ();
2840 ira_tune_allocno_costs_and_cover_classes ();
2841 #ifdef ENABLE_IRA_CHECKING
2842 check_allocno_creation ();
2844 setup_min_max_allocno_live_range_point ();
2845 sort_conflict_id_allocno_map ();
2846 setup_min_max_conflict_allocno_ids ();
2847 ira_build_conflicts ();
2848 update_conflict_hard_reg_costs ();
2849 if (! ira_conflicts_p)
2852 ira_allocno_iterator ai;
2854 /* Remove all regions but root one. */
2857 remove_unnecessary_regions (true);
2860 /* We don't save hard registers around calls for fast allocation
2861 -- add caller clobbered registers as conflicting ones to
2862 allocno crossing calls. */
2863 FOR_EACH_ALLOCNO (a, ai)
2864 if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
2866 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a),
2868 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a),
2872 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
2873 print_copies (ira_dump_file);
2874 if (internal_flag_ira_verbose > 0 && ira_dump_file != NULL)
2878 allocno_live_range_t r;
2879 ira_allocno_iterator ai;
2882 FOR_EACH_ALLOCNO (a, ai)
2883 n += ALLOCNO_CONFLICT_ALLOCNOS_NUM (a);
2885 FOR_EACH_ALLOCNO (a, ai)
2886 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
2888 fprintf (ira_dump_file, " regions=%d, blocks=%d, points=%d\n",
2889 VEC_length (loop_p, ira_loops.larray), n_basic_blocks,
2891 fprintf (ira_dump_file,
2892 " allocnos=%d, copies=%d, conflicts=%d, ranges=%d\n",
2893 ira_allocnos_num, ira_copies_num, n, nr);
2898 /* Release the data created by function ira_build. */
2902 finish_loop_tree_nodes ();
2905 finish_cost_vectors ();
2906 ira_finish_allocno_live_ranges ();