1 /* Building internal representation for IRA.
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "insn-config.h"
35 #include "diagnostic-core.h"
41 #include "sparseset.h"
43 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
45 static ira_copy_t find_allocno_copy (ira_allocno_t, ira_allocno_t, rtx,
46 ira_loop_tree_node_t);
48 /* The root of the loop tree corresponding to the all function. */
49 ira_loop_tree_node_t ira_loop_tree_root;
51 /* Height of the loop tree. */
52 int ira_loop_tree_height;
54 /* All nodes representing basic blocks are referred through the
55 following array. We can not use basic block member `aux' for this
56 because it is used for insertion of insns on edges. */
57 ira_loop_tree_node_t ira_bb_nodes;
59 /* All nodes representing loops are referred through the following
61 ira_loop_tree_node_t ira_loop_nodes;
63 /* Map regno -> allocnos with given regno (see comments for
64 allocno member `next_regno_allocno'). */
65 ira_allocno_t *ira_regno_allocno_map;
67 /* Array of references to all allocnos. The order number of the
68 allocno corresponds to the index in the array. Removed allocnos
69 have NULL element value. */
70 ira_allocno_t *ira_allocnos;
72 /* Sizes of the previous array. */
75 /* Map conflict id -> allocno with given conflict id (see comments for
76 allocno member `conflict_id'). */
77 ira_allocno_t *ira_conflict_id_allocno_map;
79 /* Array of references to all copies. The order number of the copy
80 corresponds to the index in the array. Removed copies have NULL
82 ira_copy_t *ira_copies;
84 /* Size of the previous array. */
89 /* LAST_BASIC_BLOCK before generating additional insns because of live
90 range splitting. Emitting insns on a critical edge creates a new
92 static int last_basic_block_before_change;
94 /* The following function allocates the loop tree nodes. If LOOPS_P
95 is FALSE, the nodes corresponding to the loops (except the root
96 which corresponds the all function) will be not allocated but nodes
97 will still be allocated for basic blocks. */
99 create_loop_tree_nodes (bool loops_p)
106 VEC (edge, heap) *edges;
110 = ((struct ira_loop_tree_node *)
111 ira_allocate (sizeof (struct ira_loop_tree_node) * last_basic_block));
112 last_basic_block_before_change = last_basic_block;
113 for (i = 0; i < (unsigned int) last_basic_block; i++)
115 ira_bb_nodes[i].regno_allocno_map = NULL;
116 memset (ira_bb_nodes[i].reg_pressure, 0,
117 sizeof (ira_bb_nodes[i].reg_pressure));
118 ira_bb_nodes[i].all_allocnos = NULL;
119 ira_bb_nodes[i].modified_regnos = NULL;
120 ira_bb_nodes[i].border_allocnos = NULL;
121 ira_bb_nodes[i].local_copies = NULL;
123 ira_loop_nodes = ((struct ira_loop_tree_node *)
124 ira_allocate (sizeof (struct ira_loop_tree_node)
125 * VEC_length (loop_p, ira_loops.larray)));
126 max_regno = max_reg_num ();
127 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
129 if (loop != ira_loops.tree_root)
131 ira_loop_nodes[i].regno_allocno_map = NULL;
135 FOR_EACH_EDGE (e, ei, loop->header->preds)
136 if (e->src != loop->latch
137 && (e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
144 edges = get_loop_exit_edges (loop);
145 for (j = 0; VEC_iterate (edge, edges, j, e); j++)
146 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
151 VEC_free (edge, heap, edges);
155 ira_loop_nodes[i].regno_allocno_map
156 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t) * max_regno);
157 memset (ira_loop_nodes[i].regno_allocno_map, 0,
158 sizeof (ira_allocno_t) * max_regno);
159 memset (ira_loop_nodes[i].reg_pressure, 0,
160 sizeof (ira_loop_nodes[i].reg_pressure));
161 ira_loop_nodes[i].all_allocnos = ira_allocate_bitmap ();
162 ira_loop_nodes[i].modified_regnos = ira_allocate_bitmap ();
163 ira_loop_nodes[i].border_allocnos = ira_allocate_bitmap ();
164 ira_loop_nodes[i].local_copies = ira_allocate_bitmap ();
168 /* The function returns TRUE if there are more one allocation
171 more_one_region_p (void)
176 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
177 if (ira_loop_nodes[i].regno_allocno_map != NULL
178 && ira_loop_tree_root != &ira_loop_nodes[i])
183 /* Free the loop tree node of a loop. */
185 finish_loop_tree_node (ira_loop_tree_node_t loop)
187 if (loop->regno_allocno_map != NULL)
189 ira_assert (loop->bb == NULL);
190 ira_free_bitmap (loop->local_copies);
191 ira_free_bitmap (loop->border_allocnos);
192 ira_free_bitmap (loop->modified_regnos);
193 ira_free_bitmap (loop->all_allocnos);
194 ira_free (loop->regno_allocno_map);
195 loop->regno_allocno_map = NULL;
199 /* Free the loop tree nodes. */
201 finish_loop_tree_nodes (void)
206 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
207 finish_loop_tree_node (&ira_loop_nodes[i]);
208 ira_free (ira_loop_nodes);
209 for (i = 0; i < (unsigned int) last_basic_block_before_change; i++)
211 if (ira_bb_nodes[i].local_copies != NULL)
212 ira_free_bitmap (ira_bb_nodes[i].local_copies);
213 if (ira_bb_nodes[i].border_allocnos != NULL)
214 ira_free_bitmap (ira_bb_nodes[i].border_allocnos);
215 if (ira_bb_nodes[i].modified_regnos != NULL)
216 ira_free_bitmap (ira_bb_nodes[i].modified_regnos);
217 if (ira_bb_nodes[i].all_allocnos != NULL)
218 ira_free_bitmap (ira_bb_nodes[i].all_allocnos);
219 if (ira_bb_nodes[i].regno_allocno_map != NULL)
220 ira_free (ira_bb_nodes[i].regno_allocno_map);
222 ira_free (ira_bb_nodes);
227 /* The following recursive function adds LOOP to the loop tree
228 hierarchy. LOOP is added only once. */
230 add_loop_to_tree (struct loop *loop)
233 ira_loop_tree_node_t loop_node, parent_node;
235 /* We can not use loop node access macros here because of potential
236 checking and because the nodes are not initialized enough
238 if (loop_outer (loop) != NULL)
239 add_loop_to_tree (loop_outer (loop));
240 if (ira_loop_nodes[loop->num].regno_allocno_map != NULL
241 && ira_loop_nodes[loop->num].children == NULL)
243 /* We have not added loop node to the tree yet. */
244 loop_node = &ira_loop_nodes[loop->num];
245 loop_node->loop = loop;
246 loop_node->bb = NULL;
247 for (parent = loop_outer (loop);
249 parent = loop_outer (parent))
250 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
254 loop_node->next = NULL;
255 loop_node->subloop_next = NULL;
256 loop_node->parent = NULL;
260 parent_node = &ira_loop_nodes[parent->num];
261 loop_node->next = parent_node->children;
262 parent_node->children = loop_node;
263 loop_node->subloop_next = parent_node->subloops;
264 parent_node->subloops = loop_node;
265 loop_node->parent = parent_node;
270 /* The following recursive function sets up levels of nodes of the
271 tree given its root LOOP_NODE. The enumeration starts with LEVEL.
272 The function returns maximal value of level in the tree + 1. */
274 setup_loop_tree_level (ira_loop_tree_node_t loop_node, int level)
276 int height, max_height;
277 ira_loop_tree_node_t subloop_node;
279 ira_assert (loop_node->bb == NULL);
280 loop_node->level = level;
281 max_height = level + 1;
282 for (subloop_node = loop_node->subloops;
283 subloop_node != NULL;
284 subloop_node = subloop_node->subloop_next)
286 ira_assert (subloop_node->bb == NULL);
287 height = setup_loop_tree_level (subloop_node, level + 1);
288 if (height > max_height)
294 /* Create the loop tree. The algorithm is designed to provide correct
295 order of loops (they are ordered by their last loop BB) and basic
296 blocks in the chain formed by member next. */
298 form_loop_tree (void)
303 ira_loop_tree_node_t bb_node, loop_node;
306 /* We can not use loop/bb node access macros because of potential
307 checking and because the nodes are not initialized enough
309 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
310 if (ira_loop_nodes[i].regno_allocno_map != NULL)
312 ira_loop_nodes[i].children = NULL;
313 ira_loop_nodes[i].subloops = NULL;
317 bb_node = &ira_bb_nodes[bb->index];
319 bb_node->loop = NULL;
320 bb_node->subloops = NULL;
321 bb_node->children = NULL;
322 bb_node->subloop_next = NULL;
323 bb_node->next = NULL;
324 for (parent = bb->loop_father;
326 parent = loop_outer (parent))
327 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
329 add_loop_to_tree (parent);
330 loop_node = &ira_loop_nodes[parent->num];
331 bb_node->next = loop_node->children;
332 bb_node->parent = loop_node;
333 loop_node->children = bb_node;
335 ira_loop_tree_root = IRA_LOOP_NODE_BY_INDEX (ira_loops.tree_root->num);
336 ira_loop_tree_height = setup_loop_tree_level (ira_loop_tree_root, 0);
337 ira_assert (ira_loop_tree_root->regno_allocno_map != NULL);
342 /* Rebuild IRA_REGNO_ALLOCNO_MAP and REGNO_ALLOCNO_MAPs of the loop
345 rebuild_regno_allocno_maps (void)
348 int max_regno, regno;
350 ira_loop_tree_node_t loop_tree_node;
352 ira_allocno_iterator ai;
354 max_regno = max_reg_num ();
355 for (l = 0; VEC_iterate (loop_p, ira_loops.larray, l, loop); l++)
356 if (ira_loop_nodes[l].regno_allocno_map != NULL)
358 ira_free (ira_loop_nodes[l].regno_allocno_map);
359 ira_loop_nodes[l].regno_allocno_map
360 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
362 memset (ira_loop_nodes[l].regno_allocno_map, 0,
363 sizeof (ira_allocno_t) * max_regno);
365 ira_free (ira_regno_allocno_map);
366 ira_regno_allocno_map
367 = (ira_allocno_t *) ira_allocate (max_regno * sizeof (ira_allocno_t));
368 memset (ira_regno_allocno_map, 0, max_regno * sizeof (ira_allocno_t));
369 FOR_EACH_ALLOCNO (a, ai)
371 if (ALLOCNO_CAP_MEMBER (a) != NULL)
372 /* Caps are not in the regno allocno maps. */
374 regno = ALLOCNO_REGNO (a);
375 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
376 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
377 ira_regno_allocno_map[regno] = a;
378 if (loop_tree_node->regno_allocno_map[regno] == NULL)
379 /* Remember that we can create temporary allocnos to break
380 cycles in register shuffle. */
381 loop_tree_node->regno_allocno_map[regno] = a;
387 /* Pools for allocnos and live ranges. */
388 static alloc_pool allocno_pool, live_range_pool;
390 /* Vec containing references to all created allocnos. It is a
391 container of array allocnos. */
392 static VEC(ira_allocno_t,heap) *allocno_vec;
394 /* Vec containing references to all created allocnos. It is a
395 container of ira_conflict_id_allocno_map. */
396 static VEC(ira_allocno_t,heap) *ira_conflict_id_allocno_map_vec;
398 /* Initialize data concerning allocnos. */
400 initiate_allocnos (void)
403 = create_alloc_pool ("live ranges",
404 sizeof (struct live_range), 100);
406 = create_alloc_pool ("allocnos", sizeof (struct ira_allocno), 100);
407 allocno_vec = VEC_alloc (ira_allocno_t, heap, max_reg_num () * 2);
409 ira_allocnos_num = 0;
410 ira_conflict_id_allocno_map_vec
411 = VEC_alloc (ira_allocno_t, heap, max_reg_num () * 2);
412 ira_conflict_id_allocno_map = NULL;
413 ira_regno_allocno_map
414 = (ira_allocno_t *) ira_allocate (max_reg_num () * sizeof (ira_allocno_t));
415 memset (ira_regno_allocno_map, 0, max_reg_num () * sizeof (ira_allocno_t));
418 /* Create and return the allocno corresponding to REGNO in
419 LOOP_TREE_NODE. Add the allocno to the list of allocnos with the
420 same regno if CAP_P is FALSE. */
422 ira_create_allocno (int regno, bool cap_p, ira_loop_tree_node_t loop_tree_node)
426 a = (ira_allocno_t) pool_alloc (allocno_pool);
427 ALLOCNO_REGNO (a) = regno;
428 ALLOCNO_LOOP_TREE_NODE (a) = loop_tree_node;
431 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
432 ira_regno_allocno_map[regno] = a;
433 if (loop_tree_node->regno_allocno_map[regno] == NULL)
434 /* Remember that we can create temporary allocnos to break
435 cycles in register shuffle on region borders (see
437 loop_tree_node->regno_allocno_map[regno] = a;
439 ALLOCNO_CAP (a) = NULL;
440 ALLOCNO_CAP_MEMBER (a) = NULL;
441 ALLOCNO_NUM (a) = ira_allocnos_num;
442 bitmap_set_bit (loop_tree_node->all_allocnos, ALLOCNO_NUM (a));
443 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) = NULL;
444 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a) = 0;
445 COPY_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a), ira_no_alloc_regs);
446 COPY_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a), ira_no_alloc_regs);
447 ALLOCNO_NREFS (a) = 0;
448 ALLOCNO_FREQ (a) = 0;
449 ALLOCNO_HARD_REGNO (a) = -1;
450 ALLOCNO_CALL_FREQ (a) = 0;
451 ALLOCNO_CALLS_CROSSED_NUM (a) = 0;
453 ALLOCNO_NO_STACK_REG_P (a) = false;
454 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = false;
456 ALLOCNO_MEM_OPTIMIZED_DEST (a) = NULL;
457 ALLOCNO_MEM_OPTIMIZED_DEST_P (a) = false;
458 ALLOCNO_SOMEWHERE_RENAMED_P (a) = false;
459 ALLOCNO_CHILD_RENAMED_P (a) = false;
460 ALLOCNO_DONT_REASSIGN_P (a) = false;
461 ALLOCNO_BAD_SPILL_P (a) = false;
462 ALLOCNO_IN_GRAPH_P (a) = false;
463 ALLOCNO_ASSIGNED_P (a) = false;
464 ALLOCNO_MAY_BE_SPILLED_P (a) = false;
465 ALLOCNO_SPLAY_REMOVED_P (a) = false;
466 ALLOCNO_CONFLICT_VEC_P (a) = false;
467 ALLOCNO_MODE (a) = (regno < 0 ? VOIDmode : PSEUDO_REGNO_MODE (regno));
468 ALLOCNO_COPIES (a) = NULL;
469 ALLOCNO_HARD_REG_COSTS (a) = NULL;
470 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
471 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
472 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
473 ALLOCNO_LEFT_CONFLICTS_SIZE (a) = -1;
474 ALLOCNO_COVER_CLASS (a) = NO_REGS;
475 ALLOCNO_UPDATED_COVER_CLASS_COST (a) = 0;
476 ALLOCNO_COVER_CLASS_COST (a) = 0;
477 ALLOCNO_MEMORY_COST (a) = 0;
478 ALLOCNO_UPDATED_MEMORY_COST (a) = 0;
479 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) = 0;
480 ALLOCNO_NEXT_BUCKET_ALLOCNO (a) = NULL;
481 ALLOCNO_PREV_BUCKET_ALLOCNO (a) = NULL;
482 ALLOCNO_FIRST_COALESCED_ALLOCNO (a) = a;
483 ALLOCNO_NEXT_COALESCED_ALLOCNO (a) = a;
484 ALLOCNO_LIVE_RANGES (a) = NULL;
485 ALLOCNO_MIN (a) = INT_MAX;
486 ALLOCNO_MAX (a) = -1;
487 ALLOCNO_CONFLICT_ID (a) = ira_allocnos_num;
488 VEC_safe_push (ira_allocno_t, heap, allocno_vec, a);
489 ira_allocnos = VEC_address (ira_allocno_t, allocno_vec);
490 ira_allocnos_num = VEC_length (ira_allocno_t, allocno_vec);
491 VEC_safe_push (ira_allocno_t, heap, ira_conflict_id_allocno_map_vec, a);
492 ira_conflict_id_allocno_map
493 = VEC_address (ira_allocno_t, ira_conflict_id_allocno_map_vec);
497 /* Set up cover class for A and update its conflict hard registers. */
499 ira_set_allocno_cover_class (ira_allocno_t a, enum reg_class cover_class)
501 ALLOCNO_COVER_CLASS (a) = cover_class;
502 IOR_COMPL_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a),
503 reg_class_contents[cover_class]);
504 IOR_COMPL_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a),
505 reg_class_contents[cover_class]);
508 /* Merge hard register conflicts from allocno FROM into allocno TO. If
509 TOTAL_ONLY is true, we ignore ALLOCNO_CONFLICT_HARD_REGS. */
511 merge_hard_reg_conflicts (ira_allocno_t from, ira_allocno_t to,
515 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (to),
516 ALLOCNO_CONFLICT_HARD_REGS (from));
517 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (to),
518 ALLOCNO_TOTAL_CONFLICT_HARD_REGS (from));
520 if (!total_only && ALLOCNO_NO_STACK_REG_P (from))
521 ALLOCNO_NO_STACK_REG_P (to) = true;
522 if (ALLOCNO_TOTAL_NO_STACK_REG_P (from))
523 ALLOCNO_TOTAL_NO_STACK_REG_P (to) = true;
527 /* Return TRUE if the conflict vector with NUM elements is more
528 profitable than conflict bit vector for A. */
530 ira_conflict_vector_profitable_p (ira_allocno_t a, int num)
534 if (ALLOCNO_MAX (a) < ALLOCNO_MIN (a))
535 /* We prefer bit vector in such case because it does not result in
539 nw = (ALLOCNO_MAX (a) - ALLOCNO_MIN (a) + IRA_INT_BITS) / IRA_INT_BITS;
540 return (2 * sizeof (ira_allocno_t) * (num + 1)
541 < 3 * nw * sizeof (IRA_INT_TYPE));
544 /* Allocates and initialize the conflict vector of A for NUM
545 conflicting allocnos. */
547 ira_allocate_allocno_conflict_vec (ira_allocno_t a, int num)
552 ira_assert (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) == NULL);
553 num++; /* for NULL end marker */
554 size = sizeof (ira_allocno_t) * num;
555 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) = ira_allocate (size);
556 vec = (ira_allocno_t *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a);
558 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a) = 0;
559 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a) = size;
560 ALLOCNO_CONFLICT_VEC_P (a) = true;
563 /* Allocate and initialize the conflict bit vector of A. */
565 allocate_allocno_conflict_bit_vec (ira_allocno_t a)
569 ira_assert (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) == NULL);
570 size = ((ALLOCNO_MAX (a) - ALLOCNO_MIN (a) + IRA_INT_BITS)
571 / IRA_INT_BITS * sizeof (IRA_INT_TYPE));
572 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) = ira_allocate (size);
573 memset (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a), 0, size);
574 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a) = size;
575 ALLOCNO_CONFLICT_VEC_P (a) = false;
578 /* Allocate and initialize the conflict vector or conflict bit vector
579 of A for NUM conflicting allocnos whatever is more profitable. */
581 ira_allocate_allocno_conflicts (ira_allocno_t a, int num)
583 if (ira_conflict_vector_profitable_p (a, num))
584 ira_allocate_allocno_conflict_vec (a, num);
586 allocate_allocno_conflict_bit_vec (a);
589 /* Add A2 to the conflicts of A1. */
591 add_to_allocno_conflicts (ira_allocno_t a1, ira_allocno_t a2)
596 if (ALLOCNO_CONFLICT_VEC_P (a1))
600 num = ALLOCNO_CONFLICT_ALLOCNOS_NUM (a1) + 2;
601 if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1)
602 >= num * sizeof (ira_allocno_t))
603 vec = (ira_allocno_t *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1);
606 size = (3 * num / 2 + 1) * sizeof (ira_allocno_t);
607 vec = (ira_allocno_t *) ira_allocate (size);
608 memcpy (vec, ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1),
609 sizeof (ira_allocno_t) * ALLOCNO_CONFLICT_ALLOCNOS_NUM (a1));
610 ira_free (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1));
611 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1) = vec;
612 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) = size;
616 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a1)++;
620 int nw, added_head_nw, id;
623 id = ALLOCNO_CONFLICT_ID (a2);
624 vec = (IRA_INT_TYPE *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1);
625 if (ALLOCNO_MIN (a1) > id)
627 /* Expand head of the bit vector. */
628 added_head_nw = (ALLOCNO_MIN (a1) - id - 1) / IRA_INT_BITS + 1;
629 nw = (ALLOCNO_MAX (a1) - ALLOCNO_MIN (a1)) / IRA_INT_BITS + 1;
630 size = (nw + added_head_nw) * sizeof (IRA_INT_TYPE);
631 if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) >= size)
633 memmove ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
634 vec, nw * sizeof (IRA_INT_TYPE));
635 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
640 = (3 * (nw + added_head_nw) / 2 + 1) * sizeof (IRA_INT_TYPE);
641 vec = (IRA_INT_TYPE *) ira_allocate (size);
642 memcpy ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
643 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1),
644 nw * sizeof (IRA_INT_TYPE));
645 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
647 + (nw + added_head_nw) * sizeof (IRA_INT_TYPE),
648 0, size - (nw + added_head_nw) * sizeof (IRA_INT_TYPE));
649 ira_free (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1));
650 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1) = vec;
651 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) = size;
653 ALLOCNO_MIN (a1) -= added_head_nw * IRA_INT_BITS;
655 else if (ALLOCNO_MAX (a1) < id)
657 nw = (id - ALLOCNO_MIN (a1)) / IRA_INT_BITS + 1;
658 size = nw * sizeof (IRA_INT_TYPE);
659 if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) < size)
661 /* Expand tail of the bit vector. */
662 size = (3 * nw / 2 + 1) * sizeof (IRA_INT_TYPE);
663 vec = (IRA_INT_TYPE *) ira_allocate (size);
664 memcpy (vec, ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1),
665 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1));
666 memset ((char *) vec + ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1),
667 0, size - ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1));
668 ira_free (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1));
669 ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a1) = vec;
670 ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a1) = size;
672 ALLOCNO_MAX (a1) = id;
674 SET_MINMAX_SET_BIT (vec, id, ALLOCNO_MIN (a1), ALLOCNO_MAX (a1));
678 /* Add A1 to the conflicts of A2 and vise versa. */
680 ira_add_allocno_conflict (ira_allocno_t a1, ira_allocno_t a2)
682 add_to_allocno_conflicts (a1, a2);
683 add_to_allocno_conflicts (a2, a1);
686 /* Clear all conflicts of allocno A. */
688 clear_allocno_conflicts (ira_allocno_t a)
690 if (ALLOCNO_CONFLICT_VEC_P (a))
692 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a) = 0;
693 ((ira_allocno_t *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a))[0] = NULL;
695 else if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY_SIZE (a) != 0)
699 nw = (ALLOCNO_MAX (a) - ALLOCNO_MIN (a)) / IRA_INT_BITS + 1;
700 memset (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a), 0,
701 nw * sizeof (IRA_INT_TYPE));
705 /* The array used to find duplications in conflict vectors of
707 static int *allocno_conflict_check;
709 /* The value used to mark allocation presence in conflict vector of
710 the current allocno. */
711 static int curr_allocno_conflict_check_tick;
713 /* Remove duplications in conflict vector of A. */
715 compress_allocno_conflict_vec (ira_allocno_t a)
717 ira_allocno_t *vec, conflict_a;
720 ira_assert (ALLOCNO_CONFLICT_VEC_P (a));
721 vec = (ira_allocno_t *) ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a);
722 curr_allocno_conflict_check_tick++;
723 for (i = j = 0; (conflict_a = vec[i]) != NULL; i++)
725 if (allocno_conflict_check[ALLOCNO_NUM (conflict_a)]
726 != curr_allocno_conflict_check_tick)
728 allocno_conflict_check[ALLOCNO_NUM (conflict_a)]
729 = curr_allocno_conflict_check_tick;
730 vec[j++] = conflict_a;
733 ALLOCNO_CONFLICT_ALLOCNOS_NUM (a) = j;
737 /* Remove duplications in conflict vectors of all allocnos. */
739 compress_conflict_vecs (void)
742 ira_allocno_iterator ai;
744 allocno_conflict_check
745 = (int *) ira_allocate (sizeof (int) * ira_allocnos_num);
746 memset (allocno_conflict_check, 0, sizeof (int) * ira_allocnos_num);
747 curr_allocno_conflict_check_tick = 0;
748 FOR_EACH_ALLOCNO (a, ai)
749 if (ALLOCNO_CONFLICT_VEC_P (a))
750 compress_allocno_conflict_vec (a);
751 ira_free (allocno_conflict_check);
754 /* This recursive function outputs allocno A and if it is a cap the
755 function outputs its members. */
757 ira_print_expanded_allocno (ira_allocno_t a)
761 fprintf (ira_dump_file, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
762 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
763 fprintf (ira_dump_file, ",b%d", bb->index);
765 fprintf (ira_dump_file, ",l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop->num);
766 if (ALLOCNO_CAP_MEMBER (a) != NULL)
768 fprintf (ira_dump_file, ":");
769 ira_print_expanded_allocno (ALLOCNO_CAP_MEMBER (a));
771 fprintf (ira_dump_file, ")");
774 /* Create and return the cap representing allocno A in the
777 create_cap_allocno (ira_allocno_t a)
780 ira_loop_tree_node_t parent;
781 enum reg_class cover_class;
783 ira_assert (ALLOCNO_FIRST_COALESCED_ALLOCNO (a) == a
784 && ALLOCNO_NEXT_COALESCED_ALLOCNO (a) == a);
785 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
786 cap = ira_create_allocno (ALLOCNO_REGNO (a), true, parent);
787 ALLOCNO_MODE (cap) = ALLOCNO_MODE (a);
788 cover_class = ALLOCNO_COVER_CLASS (a);
789 ira_set_allocno_cover_class (cap, cover_class);
790 ALLOCNO_AVAILABLE_REGS_NUM (cap) = ALLOCNO_AVAILABLE_REGS_NUM (a);
791 ALLOCNO_CAP_MEMBER (cap) = a;
792 ALLOCNO_CAP (a) = cap;
793 ALLOCNO_COVER_CLASS_COST (cap) = ALLOCNO_COVER_CLASS_COST (a);
794 ALLOCNO_MEMORY_COST (cap) = ALLOCNO_MEMORY_COST (a);
795 ira_allocate_and_copy_costs
796 (&ALLOCNO_HARD_REG_COSTS (cap), cover_class, ALLOCNO_HARD_REG_COSTS (a));
797 ira_allocate_and_copy_costs
798 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (cap), cover_class,
799 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
800 ALLOCNO_BAD_SPILL_P (cap) = ALLOCNO_BAD_SPILL_P (a);
801 ALLOCNO_NREFS (cap) = ALLOCNO_NREFS (a);
802 ALLOCNO_FREQ (cap) = ALLOCNO_FREQ (a);
803 ALLOCNO_CALL_FREQ (cap) = ALLOCNO_CALL_FREQ (a);
804 merge_hard_reg_conflicts (a, cap, false);
805 ALLOCNO_CALLS_CROSSED_NUM (cap) = ALLOCNO_CALLS_CROSSED_NUM (a);
806 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
808 fprintf (ira_dump_file, " Creating cap ");
809 ira_print_expanded_allocno (cap);
810 fprintf (ira_dump_file, "\n");
815 /* Create and return allocno live range with given attributes. */
817 ira_create_allocno_live_range (ira_allocno_t a, int start, int finish,
822 p = (live_range_t) pool_alloc (live_range_pool);
830 /* Copy allocno live range R and return the result. */
832 copy_allocno_live_range (live_range_t r)
836 p = (live_range_t) pool_alloc (live_range_pool);
841 /* Copy allocno live range list given by its head R and return the
844 ira_copy_allocno_live_range_list (live_range_t r)
846 live_range_t p, first, last;
850 for (first = last = NULL; r != NULL; r = r->next)
852 p = copy_allocno_live_range (r);
862 /* Merge ranges R1 and R2 and returns the result. The function
863 maintains the order of ranges and tries to minimize number of the
866 ira_merge_allocno_live_ranges (live_range_t r1, live_range_t r2)
868 live_range_t first, last, temp;
874 for (first = last = NULL; r1 != NULL && r2 != NULL;)
876 if (r1->start < r2->start)
882 if (r1->start <= r2->finish + 1)
884 /* Intersected ranges: merge r1 and r2 into r1. */
885 r1->start = r2->start;
886 if (r1->finish < r2->finish)
887 r1->finish = r2->finish;
890 ira_finish_allocno_live_range (temp);
893 /* To try to merge with subsequent ranges in r1. */
900 /* Add r1 to the result. */
911 /* To try to merge with subsequent ranges in r2. */
923 ira_assert (r1->next == NULL);
931 ira_assert (r2->next == NULL);
935 ira_assert (last->next == NULL);
940 /* Return TRUE if live ranges R1 and R2 intersect. */
942 ira_allocno_live_ranges_intersect_p (live_range_t r1, live_range_t r2)
944 /* Remember the live ranges are always kept ordered. */
945 while (r1 != NULL && r2 != NULL)
947 if (r1->start > r2->finish)
949 else if (r2->start > r1->finish)
957 /* Free allocno live range R. */
959 ira_finish_allocno_live_range (live_range_t r)
961 pool_free (live_range_pool, r);
964 /* Free list of allocno live ranges starting with R. */
966 ira_finish_allocno_live_range_list (live_range_t r)
970 for (; r != NULL; r = next_r)
973 ira_finish_allocno_live_range (r);
977 /* Free updated register costs of allocno A. */
979 ira_free_allocno_updated_costs (ira_allocno_t a)
981 enum reg_class cover_class;
983 cover_class = ALLOCNO_COVER_CLASS (a);
984 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
985 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), cover_class);
986 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
987 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
988 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
990 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
993 /* Free the memory allocated for allocno A. */
995 finish_allocno (ira_allocno_t a)
997 enum reg_class cover_class = ALLOCNO_COVER_CLASS (a);
999 ira_allocnos[ALLOCNO_NUM (a)] = NULL;
1000 ira_conflict_id_allocno_map[ALLOCNO_CONFLICT_ID (a)] = NULL;
1001 if (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a) != NULL)
1002 ira_free (ALLOCNO_CONFLICT_ALLOCNO_ARRAY (a));
1003 if (ALLOCNO_HARD_REG_COSTS (a) != NULL)
1004 ira_free_cost_vector (ALLOCNO_HARD_REG_COSTS (a), cover_class);
1005 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL)
1006 ira_free_cost_vector (ALLOCNO_CONFLICT_HARD_REG_COSTS (a), cover_class);
1007 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1008 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), cover_class);
1009 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1010 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1012 ira_finish_allocno_live_range_list (ALLOCNO_LIVE_RANGES (a));
1013 pool_free (allocno_pool, a);
1016 /* Free the memory allocated for all allocnos. */
1018 finish_allocnos (void)
1021 ira_allocno_iterator ai;
1023 FOR_EACH_ALLOCNO (a, ai)
1025 ira_free (ira_regno_allocno_map);
1026 VEC_free (ira_allocno_t, heap, ira_conflict_id_allocno_map_vec);
1027 VEC_free (ira_allocno_t, heap, allocno_vec);
1028 free_alloc_pool (allocno_pool);
1029 free_alloc_pool (live_range_pool);
1034 /* Pools for copies. */
1035 static alloc_pool copy_pool;
1037 /* Vec containing references to all created copies. It is a
1038 container of array ira_copies. */
1039 static VEC(ira_copy_t,heap) *copy_vec;
1041 /* The function initializes data concerning allocno copies. */
1043 initiate_copies (void)
1046 = create_alloc_pool ("copies", sizeof (struct ira_allocno_copy), 100);
1047 copy_vec = VEC_alloc (ira_copy_t, heap, get_max_uid ());
1052 /* Return copy connecting A1 and A2 and originated from INSN of
1053 LOOP_TREE_NODE if any. */
1055 find_allocno_copy (ira_allocno_t a1, ira_allocno_t a2, rtx insn,
1056 ira_loop_tree_node_t loop_tree_node)
1058 ira_copy_t cp, next_cp;
1059 ira_allocno_t another_a;
1061 for (cp = ALLOCNO_COPIES (a1); cp != NULL; cp = next_cp)
1063 if (cp->first == a1)
1065 next_cp = cp->next_first_allocno_copy;
1066 another_a = cp->second;
1068 else if (cp->second == a1)
1070 next_cp = cp->next_second_allocno_copy;
1071 another_a = cp->first;
1075 if (another_a == a2 && cp->insn == insn
1076 && cp->loop_tree_node == loop_tree_node)
1082 /* Create and return copy with given attributes LOOP_TREE_NODE, FIRST,
1083 SECOND, FREQ, CONSTRAINT_P, and INSN. */
1085 ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq,
1086 bool constraint_p, rtx insn,
1087 ira_loop_tree_node_t loop_tree_node)
1091 cp = (ira_copy_t) pool_alloc (copy_pool);
1092 cp->num = ira_copies_num;
1094 cp->second = second;
1096 cp->constraint_p = constraint_p;
1098 cp->loop_tree_node = loop_tree_node;
1099 VEC_safe_push (ira_copy_t, heap, copy_vec, cp);
1100 ira_copies = VEC_address (ira_copy_t, copy_vec);
1101 ira_copies_num = VEC_length (ira_copy_t, copy_vec);
1105 /* Attach a copy CP to allocnos involved into the copy. */
1107 ira_add_allocno_copy_to_list (ira_copy_t cp)
1109 ira_allocno_t first = cp->first, second = cp->second;
1111 cp->prev_first_allocno_copy = NULL;
1112 cp->prev_second_allocno_copy = NULL;
1113 cp->next_first_allocno_copy = ALLOCNO_COPIES (first);
1114 if (cp->next_first_allocno_copy != NULL)
1116 if (cp->next_first_allocno_copy->first == first)
1117 cp->next_first_allocno_copy->prev_first_allocno_copy = cp;
1119 cp->next_first_allocno_copy->prev_second_allocno_copy = cp;
1121 cp->next_second_allocno_copy = ALLOCNO_COPIES (second);
1122 if (cp->next_second_allocno_copy != NULL)
1124 if (cp->next_second_allocno_copy->second == second)
1125 cp->next_second_allocno_copy->prev_second_allocno_copy = cp;
1127 cp->next_second_allocno_copy->prev_first_allocno_copy = cp;
1129 ALLOCNO_COPIES (first) = cp;
1130 ALLOCNO_COPIES (second) = cp;
1133 /* Detach a copy CP from allocnos involved into the copy. */
1135 ira_remove_allocno_copy_from_list (ira_copy_t cp)
1137 ira_allocno_t first = cp->first, second = cp->second;
1138 ira_copy_t prev, next;
1140 next = cp->next_first_allocno_copy;
1141 prev = cp->prev_first_allocno_copy;
1143 ALLOCNO_COPIES (first) = next;
1144 else if (prev->first == first)
1145 prev->next_first_allocno_copy = next;
1147 prev->next_second_allocno_copy = next;
1150 if (next->first == first)
1151 next->prev_first_allocno_copy = prev;
1153 next->prev_second_allocno_copy = prev;
1155 cp->prev_first_allocno_copy = cp->next_first_allocno_copy = NULL;
1157 next = cp->next_second_allocno_copy;
1158 prev = cp->prev_second_allocno_copy;
1160 ALLOCNO_COPIES (second) = next;
1161 else if (prev->second == second)
1162 prev->next_second_allocno_copy = next;
1164 prev->next_first_allocno_copy = next;
1167 if (next->second == second)
1168 next->prev_second_allocno_copy = prev;
1170 next->prev_first_allocno_copy = prev;
1172 cp->prev_second_allocno_copy = cp->next_second_allocno_copy = NULL;
1175 /* Make a copy CP a canonical copy where number of the
1176 first allocno is less than the second one. */
1178 ira_swap_allocno_copy_ends_if_necessary (ira_copy_t cp)
1183 if (ALLOCNO_NUM (cp->first) <= ALLOCNO_NUM (cp->second))
1187 cp->first = cp->second;
1190 temp_cp = cp->prev_first_allocno_copy;
1191 cp->prev_first_allocno_copy = cp->prev_second_allocno_copy;
1192 cp->prev_second_allocno_copy = temp_cp;
1194 temp_cp = cp->next_first_allocno_copy;
1195 cp->next_first_allocno_copy = cp->next_second_allocno_copy;
1196 cp->next_second_allocno_copy = temp_cp;
1199 /* Create (or update frequency if the copy already exists) and return
1200 the copy of allocnos FIRST and SECOND with frequency FREQ
1201 corresponding to move insn INSN (if any) and originated from
1204 ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq,
1205 bool constraint_p, rtx insn,
1206 ira_loop_tree_node_t loop_tree_node)
1210 if ((cp = find_allocno_copy (first, second, insn, loop_tree_node)) != NULL)
1215 cp = ira_create_copy (first, second, freq, constraint_p, insn,
1217 ira_assert (first != NULL && second != NULL);
1218 ira_add_allocno_copy_to_list (cp);
1219 ira_swap_allocno_copy_ends_if_necessary (cp);
1223 /* Print info about copy CP into file F. */
1225 print_copy (FILE *f, ira_copy_t cp)
1227 fprintf (f, " cp%d:a%d(r%d)<->a%d(r%d)@%d:%s\n", cp->num,
1228 ALLOCNO_NUM (cp->first), ALLOCNO_REGNO (cp->first),
1229 ALLOCNO_NUM (cp->second), ALLOCNO_REGNO (cp->second), cp->freq,
1231 ? "move" : cp->constraint_p ? "constraint" : "shuffle");
1234 /* Print info about copy CP into stderr. */
1236 ira_debug_copy (ira_copy_t cp)
1238 print_copy (stderr, cp);
1241 /* Print info about all copies into file F. */
1243 print_copies (FILE *f)
1246 ira_copy_iterator ci;
1248 FOR_EACH_COPY (cp, ci)
1252 /* Print info about all copies into stderr. */
1254 ira_debug_copies (void)
1256 print_copies (stderr);
1259 /* Print info about copies involving allocno A into file F. */
1261 print_allocno_copies (FILE *f, ira_allocno_t a)
1263 ira_allocno_t another_a;
1264 ira_copy_t cp, next_cp;
1266 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1267 for (cp = ALLOCNO_COPIES (a); cp != NULL; cp = next_cp)
1271 next_cp = cp->next_first_allocno_copy;
1272 another_a = cp->second;
1274 else if (cp->second == a)
1276 next_cp = cp->next_second_allocno_copy;
1277 another_a = cp->first;
1281 fprintf (f, " cp%d:a%d(r%d)@%d", cp->num,
1282 ALLOCNO_NUM (another_a), ALLOCNO_REGNO (another_a), cp->freq);
1287 /* Print info about copies involving allocno A into stderr. */
1289 ira_debug_allocno_copies (ira_allocno_t a)
1291 print_allocno_copies (stderr, a);
1294 /* The function frees memory allocated for copy CP. */
1296 finish_copy (ira_copy_t cp)
1298 pool_free (copy_pool, cp);
1302 /* Free memory allocated for all copies. */
1304 finish_copies (void)
1307 ira_copy_iterator ci;
1309 FOR_EACH_COPY (cp, ci)
1311 VEC_free (ira_copy_t, heap, copy_vec);
1312 free_alloc_pool (copy_pool);
1317 /* Pools for cost vectors. It is defined only for cover classes. */
1318 static alloc_pool cost_vector_pool[N_REG_CLASSES];
1320 /* The function initiates work with hard register cost vectors. It
1321 creates allocation pool for each cover class. */
1323 initiate_cost_vectors (void)
1326 enum reg_class cover_class;
1328 for (i = 0; i < ira_reg_class_cover_size; i++)
1330 cover_class = ira_reg_class_cover[i];
1331 cost_vector_pool[cover_class]
1332 = create_alloc_pool ("cost vectors",
1334 * ira_class_hard_regs_num[cover_class],
1339 /* Allocate and return a cost vector VEC for COVER_CLASS. */
1341 ira_allocate_cost_vector (enum reg_class cover_class)
1343 return (int *) pool_alloc (cost_vector_pool[cover_class]);
1346 /* Free a cost vector VEC for COVER_CLASS. */
1348 ira_free_cost_vector (int *vec, enum reg_class cover_class)
1350 ira_assert (vec != NULL);
1351 pool_free (cost_vector_pool[cover_class], vec);
1354 /* Finish work with hard register cost vectors. Release allocation
1355 pool for each cover class. */
1357 finish_cost_vectors (void)
1360 enum reg_class cover_class;
1362 for (i = 0; i < ira_reg_class_cover_size; i++)
1364 cover_class = ira_reg_class_cover[i];
1365 free_alloc_pool (cost_vector_pool[cover_class]);
1371 /* The current loop tree node and its regno allocno map. */
1372 ira_loop_tree_node_t ira_curr_loop_tree_node;
1373 ira_allocno_t *ira_curr_regno_allocno_map;
1375 /* This recursive function traverses loop tree with root LOOP_NODE
1376 calling non-null functions PREORDER_FUNC and POSTORDER_FUNC
1377 correspondingly in preorder and postorder. The function sets up
1378 IRA_CURR_LOOP_TREE_NODE and IRA_CURR_REGNO_ALLOCNO_MAP. If BB_P,
1379 basic block nodes of LOOP_NODE is also processed (before its
1382 ira_traverse_loop_tree (bool bb_p, ira_loop_tree_node_t loop_node,
1383 void (*preorder_func) (ira_loop_tree_node_t),
1384 void (*postorder_func) (ira_loop_tree_node_t))
1386 ira_loop_tree_node_t subloop_node;
1388 ira_assert (loop_node->bb == NULL);
1389 ira_curr_loop_tree_node = loop_node;
1390 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1392 if (preorder_func != NULL)
1393 (*preorder_func) (loop_node);
1396 for (subloop_node = loop_node->children;
1397 subloop_node != NULL;
1398 subloop_node = subloop_node->next)
1399 if (subloop_node->bb != NULL)
1401 if (preorder_func != NULL)
1402 (*preorder_func) (subloop_node);
1404 if (postorder_func != NULL)
1405 (*postorder_func) (subloop_node);
1408 for (subloop_node = loop_node->subloops;
1409 subloop_node != NULL;
1410 subloop_node = subloop_node->subloop_next)
1412 ira_assert (subloop_node->bb == NULL);
1413 ira_traverse_loop_tree (bb_p, subloop_node,
1414 preorder_func, postorder_func);
1417 ira_curr_loop_tree_node = loop_node;
1418 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1420 if (postorder_func != NULL)
1421 (*postorder_func) (loop_node);
1426 /* The basic block currently being processed. */
1427 static basic_block curr_bb;
1429 /* This recursive function creates allocnos corresponding to
1430 pseudo-registers containing in X. True OUTPUT_P means that X is
1433 create_insn_allocnos (rtx x, bool output_p)
1437 enum rtx_code code = GET_CODE (x);
1443 if ((regno = REGNO (x)) >= FIRST_PSEUDO_REGISTER)
1447 if ((a = ira_curr_regno_allocno_map[regno]) == NULL)
1448 a = ira_create_allocno (regno, false, ira_curr_loop_tree_node);
1450 ALLOCNO_NREFS (a)++;
1451 ALLOCNO_FREQ (a) += REG_FREQ_FROM_BB (curr_bb);
1453 bitmap_set_bit (ira_curr_loop_tree_node->modified_regnos, regno);
1457 else if (code == SET)
1459 create_insn_allocnos (SET_DEST (x), true);
1460 create_insn_allocnos (SET_SRC (x), false);
1463 else if (code == CLOBBER)
1465 create_insn_allocnos (XEXP (x, 0), true);
1468 else if (code == MEM)
1470 create_insn_allocnos (XEXP (x, 0), false);
1473 else if (code == PRE_DEC || code == POST_DEC || code == PRE_INC ||
1474 code == POST_INC || code == POST_MODIFY || code == PRE_MODIFY)
1476 create_insn_allocnos (XEXP (x, 0), true);
1477 create_insn_allocnos (XEXP (x, 0), false);
1481 fmt = GET_RTX_FORMAT (code);
1482 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1485 create_insn_allocnos (XEXP (x, i), output_p);
1486 else if (fmt[i] == 'E')
1487 for (j = 0; j < XVECLEN (x, i); j++)
1488 create_insn_allocnos (XVECEXP (x, i, j), output_p);
1492 /* Create allocnos corresponding to pseudo-registers living in the
1493 basic block represented by the corresponding loop tree node
1496 create_bb_allocnos (ira_loop_tree_node_t bb_node)
1503 curr_bb = bb = bb_node->bb;
1504 ira_assert (bb != NULL);
1505 FOR_BB_INSNS_REVERSE (bb, insn)
1506 if (NONDEBUG_INSN_P (insn))
1507 create_insn_allocnos (PATTERN (insn), false);
1508 /* It might be a allocno living through from one subloop to
1510 EXECUTE_IF_SET_IN_REG_SET (DF_LR_IN (bb), FIRST_PSEUDO_REGISTER, i, bi)
1511 if (ira_curr_regno_allocno_map[i] == NULL)
1512 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1515 /* Create allocnos corresponding to pseudo-registers living on edge E
1516 (a loop entry or exit). Also mark the allocnos as living on the
1519 create_loop_allocnos (edge e)
1522 bitmap live_in_regs, border_allocnos;
1524 ira_loop_tree_node_t parent;
1526 live_in_regs = DF_LR_IN (e->dest);
1527 border_allocnos = ira_curr_loop_tree_node->border_allocnos;
1528 EXECUTE_IF_SET_IN_REG_SET (DF_LR_OUT (e->src),
1529 FIRST_PSEUDO_REGISTER, i, bi)
1530 if (bitmap_bit_p (live_in_regs, i))
1532 if (ira_curr_regno_allocno_map[i] == NULL)
1534 /* The order of creations is important for right
1535 ira_regno_allocno_map. */
1536 if ((parent = ira_curr_loop_tree_node->parent) != NULL
1537 && parent->regno_allocno_map[i] == NULL)
1538 ira_create_allocno (i, false, parent);
1539 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1541 bitmap_set_bit (border_allocnos,
1542 ALLOCNO_NUM (ira_curr_regno_allocno_map[i]));
1546 /* Create allocnos corresponding to pseudo-registers living in loop
1547 represented by the corresponding loop tree node LOOP_NODE. This
1548 function is called by ira_traverse_loop_tree. */
1550 create_loop_tree_node_allocnos (ira_loop_tree_node_t loop_node)
1552 if (loop_node->bb != NULL)
1553 create_bb_allocnos (loop_node);
1554 else if (loop_node != ira_loop_tree_root)
1559 VEC (edge, heap) *edges;
1561 FOR_EACH_EDGE (e, ei, loop_node->loop->header->preds)
1562 if (e->src != loop_node->loop->latch)
1563 create_loop_allocnos (e);
1565 edges = get_loop_exit_edges (loop_node->loop);
1566 for (i = 0; VEC_iterate (edge, edges, i, e); i++)
1567 create_loop_allocnos (e);
1568 VEC_free (edge, heap, edges);
1572 /* Propagate information about allocnos modified inside the loop given
1573 by its LOOP_TREE_NODE to its parent. */
1575 propagate_modified_regnos (ira_loop_tree_node_t loop_tree_node)
1577 if (loop_tree_node == ira_loop_tree_root)
1579 ira_assert (loop_tree_node->bb == NULL);
1580 bitmap_ior_into (loop_tree_node->parent->modified_regnos,
1581 loop_tree_node->modified_regnos);
1584 /* Propagate new info about allocno A (see comments about accumulated
1585 info in allocno definition) to the corresponding allocno on upper
1586 loop tree level. So allocnos on upper levels accumulate
1587 information about the corresponding allocnos in nested regions.
1588 The new info means allocno info finally calculated in this
1591 propagate_allocno_info (void)
1594 ira_allocno_t a, parent_a;
1595 ira_loop_tree_node_t parent;
1596 enum reg_class cover_class;
1598 if (flag_ira_region != IRA_REGION_ALL
1599 && flag_ira_region != IRA_REGION_MIXED)
1601 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
1602 for (a = ira_regno_allocno_map[i];
1604 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1605 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
1606 && (parent_a = parent->regno_allocno_map[i]) != NULL
1607 /* There are no caps yet at this point. So use
1608 border_allocnos to find allocnos for the propagation. */
1609 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE (a)->border_allocnos,
1612 if (! ALLOCNO_BAD_SPILL_P (a))
1613 ALLOCNO_BAD_SPILL_P (parent_a) = false;
1614 ALLOCNO_NREFS (parent_a) += ALLOCNO_NREFS (a);
1615 ALLOCNO_FREQ (parent_a) += ALLOCNO_FREQ (a);
1616 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
1617 merge_hard_reg_conflicts (a, parent_a, true);
1618 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
1619 += ALLOCNO_CALLS_CROSSED_NUM (a);
1620 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
1621 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
1622 cover_class = ALLOCNO_COVER_CLASS (a);
1623 ira_assert (cover_class == ALLOCNO_COVER_CLASS (parent_a));
1624 ira_allocate_and_accumulate_costs
1625 (&ALLOCNO_HARD_REG_COSTS (parent_a), cover_class,
1626 ALLOCNO_HARD_REG_COSTS (a));
1627 ira_allocate_and_accumulate_costs
1628 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a),
1630 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
1631 ALLOCNO_COVER_CLASS_COST (parent_a)
1632 += ALLOCNO_COVER_CLASS_COST (a);
1633 ALLOCNO_MEMORY_COST (parent_a) += ALLOCNO_MEMORY_COST (a);
1637 /* Create allocnos corresponding to pseudo-registers in the current
1638 function. Traverse the loop tree for this. */
1640 create_allocnos (void)
1642 /* We need to process BB first to correctly link allocnos by member
1643 next_regno_allocno. */
1644 ira_traverse_loop_tree (true, ira_loop_tree_root,
1645 create_loop_tree_node_allocnos, NULL);
1647 ira_traverse_loop_tree (false, ira_loop_tree_root, NULL,
1648 propagate_modified_regnos);
1653 /* The page contains function to remove some regions from a separate
1654 register allocation. We remove regions whose separate allocation
1655 will hardly improve the result. As a result we speed up regional
1656 register allocation. */
1658 /* The function changes allocno in range list given by R onto A. */
1660 change_allocno_in_range_list (live_range_t r, ira_allocno_t a)
1662 for (; r != NULL; r = r->next)
1666 /* Move all live ranges associated with allocno FROM to allocno TO. */
1668 move_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
1670 live_range_t lr = ALLOCNO_LIVE_RANGES (from);
1672 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
1674 fprintf (ira_dump_file,
1675 " Moving ranges of a%dr%d to a%dr%d: ",
1676 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
1677 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
1678 ira_print_live_range_list (ira_dump_file, lr);
1680 change_allocno_in_range_list (lr, to);
1681 ALLOCNO_LIVE_RANGES (to)
1682 = ira_merge_allocno_live_ranges (lr, ALLOCNO_LIVE_RANGES (to));
1683 ALLOCNO_LIVE_RANGES (from) = NULL;
1686 /* Copy all live ranges associated with allocno FROM to allocno TO. */
1688 copy_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
1690 live_range_t lr = ALLOCNO_LIVE_RANGES (from);
1692 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
1694 fprintf (ira_dump_file,
1695 " Copying ranges of a%dr%d to a%dr%d: ",
1696 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
1697 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
1698 ira_print_live_range_list (ira_dump_file, lr);
1700 lr = ira_copy_allocno_live_range_list (lr);
1701 change_allocno_in_range_list (lr, to);
1702 ALLOCNO_LIVE_RANGES (to)
1703 = ira_merge_allocno_live_ranges (lr, ALLOCNO_LIVE_RANGES (to));
1706 /* Return TRUE if NODE represents a loop with low register
1709 low_pressure_loop_node_p (ira_loop_tree_node_t node)
1712 enum reg_class cover_class;
1714 if (node->bb != NULL)
1717 for (i = 0; i < ira_reg_class_cover_size; i++)
1719 cover_class = ira_reg_class_cover[i];
1720 if (node->reg_pressure[cover_class]
1721 > ira_available_class_regs[cover_class])
1727 /* Sort loops for marking them for removal. We put already marked
1728 loops first, then less frequent loops next, and then outer loops
1731 loop_compare_func (const void *v1p, const void *v2p)
1734 ira_loop_tree_node_t l1 = *(const ira_loop_tree_node_t *) v1p;
1735 ira_loop_tree_node_t l2 = *(const ira_loop_tree_node_t *) v2p;
1737 ira_assert (l1->parent != NULL && l2->parent != NULL);
1738 if (l1->to_remove_p && ! l2->to_remove_p)
1740 if (! l1->to_remove_p && l2->to_remove_p)
1742 if ((diff = l1->loop->header->frequency - l2->loop->header->frequency) != 0)
1744 if ((diff = (int) loop_depth (l1->loop) - (int) loop_depth (l2->loop)) != 0)
1746 /* Make sorting stable. */
1747 return l1->loop->num - l2->loop->num;
1751 /* Mark loops which should be removed from regional allocation. We
1752 remove a loop with low register pressure inside another loop with
1753 register pressure. In this case a separate allocation of the loop
1754 hardly helps (for irregular register file architecture it could
1755 help by choosing a better hard register in the loop but we prefer
1756 faster allocation even in this case). We also remove cheap loops
1757 if there are more than IRA_MAX_LOOPS_NUM of them. */
1759 mark_loops_for_removal (void)
1762 ira_loop_tree_node_t *sorted_loops;
1766 = (ira_loop_tree_node_t *) ira_allocate (sizeof (ira_loop_tree_node_t)
1767 * VEC_length (loop_p,
1769 for (n = i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
1770 if (ira_loop_nodes[i].regno_allocno_map != NULL)
1772 if (ira_loop_nodes[i].parent == NULL)
1774 /* Don't remove the root. */
1775 ira_loop_nodes[i].to_remove_p = false;
1778 sorted_loops[n++] = &ira_loop_nodes[i];
1779 ira_loop_nodes[i].to_remove_p
1780 = (low_pressure_loop_node_p (ira_loop_nodes[i].parent)
1781 && low_pressure_loop_node_p (&ira_loop_nodes[i]));
1783 qsort (sorted_loops, n, sizeof (ira_loop_tree_node_t), loop_compare_func);
1784 for (i = 0; n - i + 1 > IRA_MAX_LOOPS_NUM; i++)
1786 sorted_loops[i]->to_remove_p = true;
1787 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1790 " Mark loop %d (header %d, freq %d, depth %d) for removal (%s)\n",
1791 sorted_loops[i]->loop->num, sorted_loops[i]->loop->header->index,
1792 sorted_loops[i]->loop->header->frequency,
1793 loop_depth (sorted_loops[i]->loop),
1794 low_pressure_loop_node_p (sorted_loops[i]->parent)
1795 && low_pressure_loop_node_p (sorted_loops[i])
1796 ? "low pressure" : "cheap loop");
1798 ira_free (sorted_loops);
1801 /* Mark all loops but root for removing. */
1803 mark_all_loops_for_removal (void)
1808 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
1809 if (ira_loop_nodes[i].regno_allocno_map != NULL)
1811 if (ira_loop_nodes[i].parent == NULL)
1813 /* Don't remove the root. */
1814 ira_loop_nodes[i].to_remove_p = false;
1817 ira_loop_nodes[i].to_remove_p = true;
1818 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1821 " Mark loop %d (header %d, freq %d, depth %d) for removal\n",
1822 ira_loop_nodes[i].loop->num,
1823 ira_loop_nodes[i].loop->header->index,
1824 ira_loop_nodes[i].loop->header->frequency,
1825 loop_depth (ira_loop_nodes[i].loop));
1829 /* Definition of vector of loop tree nodes. */
1830 DEF_VEC_P(ira_loop_tree_node_t);
1831 DEF_VEC_ALLOC_P(ira_loop_tree_node_t, heap);
1833 /* Vec containing references to all removed loop tree nodes. */
1834 static VEC(ira_loop_tree_node_t,heap) *removed_loop_vec;
1836 /* Vec containing references to all children of loop tree nodes. */
1837 static VEC(ira_loop_tree_node_t,heap) *children_vec;
1839 /* Remove subregions of NODE if their separate allocation will not
1840 improve the result. */
1842 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_node_t node)
1846 ira_loop_tree_node_t subnode;
1848 remove_p = node->to_remove_p;
1850 VEC_safe_push (ira_loop_tree_node_t, heap, children_vec, node);
1851 start = VEC_length (ira_loop_tree_node_t, children_vec);
1852 for (subnode = node->children; subnode != NULL; subnode = subnode->next)
1853 if (subnode->bb == NULL)
1854 remove_uneccesary_loop_nodes_from_loop_tree (subnode);
1856 VEC_safe_push (ira_loop_tree_node_t, heap, children_vec, subnode);
1857 node->children = node->subloops = NULL;
1860 VEC_safe_push (ira_loop_tree_node_t, heap, removed_loop_vec, node);
1863 while (VEC_length (ira_loop_tree_node_t, children_vec) > start)
1865 subnode = VEC_pop (ira_loop_tree_node_t, children_vec);
1866 subnode->parent = node;
1867 subnode->next = node->children;
1868 node->children = subnode;
1869 if (subnode->bb == NULL)
1871 subnode->subloop_next = node->subloops;
1872 node->subloops = subnode;
1877 /* Return TRUE if NODE is inside PARENT. */
1879 loop_is_inside_p (ira_loop_tree_node_t node, ira_loop_tree_node_t parent)
1881 for (node = node->parent; node != NULL; node = node->parent)
1887 /* Sort allocnos according to their order in regno allocno list. */
1889 regno_allocno_order_compare_func (const void *v1p, const void *v2p)
1891 ira_allocno_t a1 = *(const ira_allocno_t *) v1p;
1892 ira_allocno_t a2 = *(const ira_allocno_t *) v2p;
1893 ira_loop_tree_node_t n1 = ALLOCNO_LOOP_TREE_NODE (a1);
1894 ira_loop_tree_node_t n2 = ALLOCNO_LOOP_TREE_NODE (a2);
1896 if (loop_is_inside_p (n1, n2))
1898 else if (loop_is_inside_p (n2, n1))
1900 /* If allocnos are equally good, sort by allocno numbers, so that
1901 the results of qsort leave nothing to chance. We put allocnos
1902 with higher number first in the list because it is the original
1903 order for allocnos from loops on the same levels. */
1904 return ALLOCNO_NUM (a2) - ALLOCNO_NUM (a1);
1907 /* This array is used to sort allocnos to restore allocno order in
1908 the regno allocno list. */
1909 static ira_allocno_t *regno_allocnos;
1911 /* Restore allocno order for REGNO in the regno allocno list. */
1913 ira_rebuild_regno_allocno_list (int regno)
1918 for (n = 0, a = ira_regno_allocno_map[regno];
1920 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1921 regno_allocnos[n++] = a;
1923 qsort (regno_allocnos, n, sizeof (ira_allocno_t),
1924 regno_allocno_order_compare_func);
1925 for (i = 1; i < n; i++)
1926 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[i - 1]) = regno_allocnos[i];
1927 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[n - 1]) = NULL;
1928 ira_regno_allocno_map[regno] = regno_allocnos[0];
1929 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1930 fprintf (ira_dump_file, " Rebuilding regno allocno list for %d\n", regno);
1933 /* Propagate info from allocno FROM_A to allocno A. */
1935 propagate_some_info_from_allocno (ira_allocno_t a, ira_allocno_t from_a)
1937 enum reg_class cover_class;
1939 merge_hard_reg_conflicts (from_a, a, false);
1940 ALLOCNO_NREFS (a) += ALLOCNO_NREFS (from_a);
1941 ALLOCNO_FREQ (a) += ALLOCNO_FREQ (from_a);
1942 ALLOCNO_CALL_FREQ (a) += ALLOCNO_CALL_FREQ (from_a);
1943 ALLOCNO_CALLS_CROSSED_NUM (a) += ALLOCNO_CALLS_CROSSED_NUM (from_a);
1944 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a)
1945 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (from_a);
1946 if (! ALLOCNO_BAD_SPILL_P (from_a))
1947 ALLOCNO_BAD_SPILL_P (a) = false;
1948 cover_class = ALLOCNO_COVER_CLASS (from_a);
1949 ira_assert (cover_class == ALLOCNO_COVER_CLASS (a));
1950 ira_allocate_and_accumulate_costs (&ALLOCNO_HARD_REG_COSTS (a), cover_class,
1951 ALLOCNO_HARD_REG_COSTS (from_a));
1952 ira_allocate_and_accumulate_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1954 ALLOCNO_CONFLICT_HARD_REG_COSTS (from_a));
1955 ALLOCNO_COVER_CLASS_COST (a) += ALLOCNO_COVER_CLASS_COST (from_a);
1956 ALLOCNO_MEMORY_COST (a) += ALLOCNO_MEMORY_COST (from_a);
1959 /* Remove allocnos from loops removed from the allocation
1962 remove_unnecessary_allocnos (void)
1965 bool merged_p, rebuild_p;
1966 ira_allocno_t a, prev_a, next_a, parent_a;
1967 ira_loop_tree_node_t a_node, parent;
1970 regno_allocnos = NULL;
1971 for (regno = max_reg_num () - 1; regno >= FIRST_PSEUDO_REGISTER; regno--)
1974 for (prev_a = NULL, a = ira_regno_allocno_map[regno];
1978 next_a = ALLOCNO_NEXT_REGNO_ALLOCNO (a);
1979 a_node = ALLOCNO_LOOP_TREE_NODE (a);
1980 if (! a_node->to_remove_p)
1984 for (parent = a_node->parent;
1985 (parent_a = parent->regno_allocno_map[regno]) == NULL
1986 && parent->to_remove_p;
1987 parent = parent->parent)
1989 if (parent_a == NULL)
1991 /* There are no allocnos with the same regno in
1992 upper region -- just move the allocno to the
1995 ALLOCNO_LOOP_TREE_NODE (a) = parent;
1996 parent->regno_allocno_map[regno] = a;
1997 bitmap_set_bit (parent->all_allocnos, ALLOCNO_NUM (a));
2002 /* Remove the allocno and update info of allocno in
2003 the upper region. */
2005 ira_regno_allocno_map[regno] = next_a;
2007 ALLOCNO_NEXT_REGNO_ALLOCNO (prev_a) = next_a;
2008 move_allocno_live_ranges (a, parent_a);
2010 propagate_some_info_from_allocno (parent_a, a);
2011 /* Remove it from the corresponding regno allocno
2012 map to avoid info propagation of subsequent
2013 allocno into this already removed allocno. */
2014 a_node->regno_allocno_map[regno] = NULL;
2020 /* We need to restore the order in regno allocno list. */
2022 if (regno_allocnos == NULL)
2024 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
2025 * ira_allocnos_num);
2026 ira_rebuild_regno_allocno_list (regno);
2030 ira_rebuild_start_finish_chains ();
2031 if (regno_allocnos != NULL)
2032 ira_free (regno_allocnos);
2035 /* Remove allocnos from all loops but the root. */
2037 remove_low_level_allocnos (void)
2040 bool merged_p, propagate_p;
2041 ira_allocno_t a, top_a;
2042 ira_loop_tree_node_t a_node, parent;
2043 ira_allocno_iterator ai;
2046 FOR_EACH_ALLOCNO (a, ai)
2048 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2049 if (a_node == ira_loop_tree_root || ALLOCNO_CAP_MEMBER (a) != NULL)
2051 regno = ALLOCNO_REGNO (a);
2052 if ((top_a = ira_loop_tree_root->regno_allocno_map[regno]) == NULL)
2054 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2055 ira_loop_tree_root->regno_allocno_map[regno] = a;
2058 propagate_p = a_node->parent->regno_allocno_map[regno] == NULL;
2059 /* Remove the allocno and update info of allocno in the upper
2061 move_allocno_live_ranges (a, top_a);
2064 propagate_some_info_from_allocno (top_a, a);
2066 FOR_EACH_ALLOCNO (a, ai)
2068 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2069 if (a_node == ira_loop_tree_root)
2071 parent = a_node->parent;
2072 regno = ALLOCNO_REGNO (a);
2073 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2074 ira_assert (ALLOCNO_CAP (a) != NULL);
2075 else if (ALLOCNO_CAP (a) == NULL)
2076 ira_assert (parent->regno_allocno_map[regno] != NULL);
2078 FOR_EACH_ALLOCNO (a, ai)
2080 regno = ALLOCNO_REGNO (a);
2081 if (ira_loop_tree_root->regno_allocno_map[regno] == a)
2083 ira_regno_allocno_map[regno] = a;
2084 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = NULL;
2085 ALLOCNO_CAP_MEMBER (a) = NULL;
2086 COPY_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a),
2087 ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
2089 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
2090 ALLOCNO_NO_STACK_REG_P (a) = true;
2097 ira_rebuild_start_finish_chains ();
2100 /* Remove loops from consideration. We remove all loops except for
2101 root if ALL_P or loops for which a separate allocation will not
2102 improve the result. We have to do this after allocno creation and
2103 their costs and cover class evaluation because only after that the
2104 register pressure can be known and is calculated. */
2106 remove_unnecessary_regions (bool all_p)
2109 mark_all_loops_for_removal ();
2111 mark_loops_for_removal ();
2113 = VEC_alloc (ira_loop_tree_node_t, heap,
2114 last_basic_block + VEC_length (loop_p, ira_loops.larray));
2116 = VEC_alloc (ira_loop_tree_node_t, heap,
2117 last_basic_block + VEC_length (loop_p, ira_loops.larray));
2118 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_root) ;
2119 VEC_free (ira_loop_tree_node_t, heap, children_vec);
2121 remove_low_level_allocnos ();
2123 remove_unnecessary_allocnos ();
2124 while (VEC_length (ira_loop_tree_node_t, removed_loop_vec) > 0)
2125 finish_loop_tree_node (VEC_pop (ira_loop_tree_node_t, removed_loop_vec));
2126 VEC_free (ira_loop_tree_node_t, heap, removed_loop_vec);
2131 /* At this point true value of allocno attribute bad_spill_p means
2132 that there is an insn where allocno occurs and where the allocno
2133 can not be used as memory. The function updates the attribute, now
2134 it can be true only for allocnos which can not be used as memory in
2135 an insn and in whose live ranges there is other allocno deaths.
2136 Spilling allocnos with true value will not improve the code because
2137 it will not make other allocnos colorable and additional reloads
2138 for the corresponding pseudo will be generated in reload pass for
2139 each insn it occurs.
2141 This is a trick mentioned in one classic article of Chaitin etc
2142 which is frequently omitted in other implementations of RA based on
2145 update_bad_spill_attribute (void)
2149 ira_allocno_iterator ai;
2151 enum reg_class cover_class;
2152 bitmap_head dead_points[N_REG_CLASSES];
2154 for (i = 0; i < ira_reg_class_cover_size; i++)
2156 cover_class = ira_reg_class_cover[i];
2157 bitmap_initialize (&dead_points[cover_class], ®_obstack);
2159 FOR_EACH_ALLOCNO (a, ai)
2161 cover_class = ALLOCNO_COVER_CLASS (a);
2162 if (cover_class == NO_REGS)
2164 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
2165 bitmap_set_bit (&dead_points[cover_class], r->finish);
2167 FOR_EACH_ALLOCNO (a, ai)
2169 cover_class = ALLOCNO_COVER_CLASS (a);
2170 if (cover_class == NO_REGS)
2172 if (! ALLOCNO_BAD_SPILL_P (a))
2174 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
2176 for (i = r->start + 1; i < r->finish; i++)
2177 if (bitmap_bit_p (&dead_points[cover_class], i))
2183 ALLOCNO_BAD_SPILL_P (a) = false;
2185 for (i = 0; i < ira_reg_class_cover_size; i++)
2187 cover_class = ira_reg_class_cover[i];
2188 bitmap_clear (&dead_points[cover_class]);
2194 /* Set up minimal and maximal live range points for allocnos. */
2196 setup_min_max_allocno_live_range_point (void)
2199 ira_allocno_t a, parent_a, cap;
2200 ira_allocno_iterator ai;
2202 ira_loop_tree_node_t parent;
2204 FOR_EACH_ALLOCNO (a, ai)
2206 r = ALLOCNO_LIVE_RANGES (a);
2209 ALLOCNO_MAX (a) = r->finish;
2210 for (; r->next != NULL; r = r->next)
2212 ALLOCNO_MIN (a) = r->start;
2214 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2215 for (a = ira_regno_allocno_map[i];
2217 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2219 if (ALLOCNO_MAX (a) < 0)
2221 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2222 /* Accumulation of range info. */
2223 if (ALLOCNO_CAP (a) != NULL)
2225 for (cap = ALLOCNO_CAP (a); cap != NULL; cap = ALLOCNO_CAP (cap))
2227 if (ALLOCNO_MAX (cap) < ALLOCNO_MAX (a))
2228 ALLOCNO_MAX (cap) = ALLOCNO_MAX (a);
2229 if (ALLOCNO_MIN (cap) > ALLOCNO_MIN (a))
2230 ALLOCNO_MIN (cap) = ALLOCNO_MIN (a);
2234 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) == NULL)
2236 parent_a = parent->regno_allocno_map[i];
2237 if (ALLOCNO_MAX (parent_a) < ALLOCNO_MAX (a))
2238 ALLOCNO_MAX (parent_a) = ALLOCNO_MAX (a);
2239 if (ALLOCNO_MIN (parent_a) > ALLOCNO_MIN (a))
2240 ALLOCNO_MIN (parent_a) = ALLOCNO_MIN (a);
2242 #ifdef ENABLE_IRA_CHECKING
2243 FOR_EACH_ALLOCNO (a, ai)
2245 if ((0 <= ALLOCNO_MIN (a) && ALLOCNO_MIN (a) <= ira_max_point)
2246 && (0 <= ALLOCNO_MAX (a) && ALLOCNO_MAX (a) <= ira_max_point))
2253 /* Sort allocnos according to their live ranges. Allocnos with
2254 smaller cover class are put first unless we use priority coloring.
2255 Allocnos with the same cove class are ordered according their start
2256 (min). Allocnos with the same start are ordered according their
2259 allocno_range_compare_func (const void *v1p, const void *v2p)
2262 ira_allocno_t a1 = *(const ira_allocno_t *) v1p;
2263 ira_allocno_t a2 = *(const ira_allocno_t *) v2p;
2265 if (flag_ira_algorithm != IRA_ALGORITHM_PRIORITY
2266 && (diff = ALLOCNO_COVER_CLASS (a1) - ALLOCNO_COVER_CLASS (a2)) != 0)
2268 if ((diff = ALLOCNO_MIN (a1) - ALLOCNO_MIN (a2)) != 0)
2270 if ((diff = ALLOCNO_MAX (a1) - ALLOCNO_MAX (a2)) != 0)
2272 return ALLOCNO_NUM (a1) - ALLOCNO_NUM (a2);
2275 /* Sort ira_conflict_id_allocno_map and set up conflict id of
2278 sort_conflict_id_allocno_map (void)
2282 ira_allocno_iterator ai;
2285 FOR_EACH_ALLOCNO (a, ai)
2286 ira_conflict_id_allocno_map[num++] = a;
2287 qsort (ira_conflict_id_allocno_map, num, sizeof (ira_allocno_t),
2288 allocno_range_compare_func);
2289 for (i = 0; i < num; i++)
2290 if ((a = ira_conflict_id_allocno_map[i]) != NULL)
2291 ALLOCNO_CONFLICT_ID (a) = i;
2292 for (i = num; i < ira_allocnos_num; i++)
2293 ira_conflict_id_allocno_map[i] = NULL;
2296 /* Set up minimal and maximal conflict ids of allocnos with which
2297 given allocno can conflict. */
2299 setup_min_max_conflict_allocno_ids (void)
2302 int i, j, min, max, start, finish, first_not_finished, filled_area_start;
2303 int *live_range_min, *last_lived;
2306 live_range_min = (int *) ira_allocate (sizeof (int) * ira_allocnos_num);
2308 first_not_finished = -1;
2309 for (i = 0; i < ira_allocnos_num; i++)
2311 a = ira_conflict_id_allocno_map[i];
2315 || (flag_ira_algorithm != IRA_ALGORITHM_PRIORITY
2316 && cover_class != (int) ALLOCNO_COVER_CLASS (a)))
2318 cover_class = ALLOCNO_COVER_CLASS (a);
2320 first_not_finished = i;
2324 start = ALLOCNO_MIN (a);
2325 /* If we skip an allocno, the allocno with smaller ids will
2326 be also skipped because of the secondary sorting the
2327 range finishes (see function
2328 allocno_range_compare_func). */
2329 while (first_not_finished < i
2330 && start > ALLOCNO_MAX (ira_conflict_id_allocno_map
2331 [first_not_finished]))
2332 first_not_finished++;
2333 min = first_not_finished;
2336 /* We could increase min further in this case but it is good
2339 live_range_min[i] = ALLOCNO_MIN (a);
2340 ALLOCNO_MIN (a) = min;
2342 last_lived = (int *) ira_allocate (sizeof (int) * ira_max_point);
2344 filled_area_start = -1;
2345 for (i = ira_allocnos_num - 1; i >= 0; i--)
2347 a = ira_conflict_id_allocno_map[i];
2351 || (flag_ira_algorithm != IRA_ALGORITHM_PRIORITY
2352 && cover_class != (int) ALLOCNO_COVER_CLASS (a)))
2354 cover_class = ALLOCNO_COVER_CLASS (a);
2355 for (j = 0; j < ira_max_point; j++)
2357 filled_area_start = ira_max_point;
2359 min = live_range_min[i];
2360 finish = ALLOCNO_MAX (a);
2361 max = last_lived[finish];
2363 /* We could decrease max further in this case but it is good
2365 max = ALLOCNO_CONFLICT_ID (a) - 1;
2366 ALLOCNO_MAX (a) = max;
2367 /* In filling, we can go further A range finish to recognize
2368 intersection quickly because if the finish of subsequently
2369 processed allocno (it has smaller conflict id) range is
2370 further A range finish than they are definitely intersected
2371 (the reason for this is the allocnos with bigger conflict id
2372 have their range starts not smaller than allocnos with
2374 for (j = min; j < filled_area_start; j++)
2376 filled_area_start = min;
2378 ira_free (last_lived);
2379 ira_free (live_range_min);
2388 ira_allocno_iterator ai;
2389 ira_loop_tree_node_t loop_tree_node;
2391 FOR_EACH_ALLOCNO (a, ai)
2393 if (ALLOCNO_LOOP_TREE_NODE (a) == ira_loop_tree_root)
2395 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2396 create_cap_allocno (a);
2397 else if (ALLOCNO_CAP (a) == NULL)
2399 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2400 if (!bitmap_bit_p (loop_tree_node->border_allocnos, ALLOCNO_NUM (a)))
2401 create_cap_allocno (a);
2408 /* The page contains code transforming more one region internal
2409 representation (IR) to one region IR which is necessary for reload.
2410 This transformation is called IR flattening. We might just rebuild
2411 the IR for one region but we don't do it because it takes a lot of
2414 /* Map: regno -> allocnos which will finally represent the regno for
2415 IR with one region. */
2416 static ira_allocno_t *regno_top_level_allocno_map;
2418 /* Find the allocno that corresponds to A at a level one higher up in the
2419 loop tree. Returns NULL if A is a cap, or if it has no parent. */
2421 ira_parent_allocno (ira_allocno_t a)
2423 ira_loop_tree_node_t parent;
2425 if (ALLOCNO_CAP (a) != NULL)
2428 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
2432 return parent->regno_allocno_map[ALLOCNO_REGNO (a)];
2435 /* Find the allocno that corresponds to A at a level one higher up in the
2436 loop tree. If ALLOCNO_CAP is set for A, return that. */
2438 ira_parent_or_cap_allocno (ira_allocno_t a)
2440 if (ALLOCNO_CAP (a) != NULL)
2441 return ALLOCNO_CAP (a);
2443 return ira_parent_allocno (a);
2446 /* Process all allocnos originated from pseudo REGNO and copy live
2447 ranges, hard reg conflicts, and allocno stack reg attributes from
2448 low level allocnos to final allocnos which are destinations of
2449 removed stores at a loop exit. Return true if we copied live
2452 copy_info_to_removed_store_destinations (int regno)
2455 ira_allocno_t parent_a = NULL;
2456 ira_loop_tree_node_t parent;
2460 for (a = ira_regno_allocno_map[regno];
2462 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2464 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))])
2465 /* This allocno will be removed. */
2467 /* Caps will be removed. */
2468 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2469 for (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
2471 parent = parent->parent)
2472 if ((parent_a = parent->regno_allocno_map[regno]) == NULL
2473 || (parent_a == regno_top_level_allocno_map[REGNO (ALLOCNO_REG
2475 && ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)))
2477 if (parent == NULL || parent_a == NULL)
2479 copy_allocno_live_ranges (a, parent_a);
2480 merge_hard_reg_conflicts (a, parent_a, true);
2481 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
2482 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2483 += ALLOCNO_CALLS_CROSSED_NUM (a);
2484 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2485 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
2491 /* Flatten the IR. In other words, this function transforms IR as if
2492 it were built with one region (without loops). We could make it
2493 much simpler by rebuilding IR with one region, but unfortunately it
2494 takes a lot of time. MAX_REGNO_BEFORE_EMIT and
2495 IRA_MAX_POINT_BEFORE_EMIT are correspondingly MAX_REG_NUM () and
2496 IRA_MAX_POINT before emitting insns on the loop borders. */
2498 ira_flattening (int max_regno_before_emit, int ira_max_point_before_emit)
2503 bool new_pseudos_p, merged_p, mem_dest_p;
2505 enum reg_class cover_class;
2506 ira_allocno_t a, parent_a, first, second, node_first, node_second;
2508 ira_loop_tree_node_t node;
2510 ira_allocno_iterator ai;
2511 ira_copy_iterator ci;
2512 sparseset allocnos_live;
2514 regno_top_level_allocno_map
2515 = (ira_allocno_t *) ira_allocate (max_reg_num () * sizeof (ira_allocno_t));
2516 memset (regno_top_level_allocno_map, 0,
2517 max_reg_num () * sizeof (ira_allocno_t));
2518 new_pseudos_p = merged_p = false;
2519 FOR_EACH_ALLOCNO (a, ai)
2521 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2522 /* Caps are not in the regno allocno maps and they are never
2523 will be transformed into allocnos existing after IR
2526 COPY_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a),
2527 ALLOCNO_CONFLICT_HARD_REGS (a));
2529 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = ALLOCNO_NO_STACK_REG_P (a);
2532 /* Fix final allocno attributes. */
2533 for (i = max_regno_before_emit - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2536 for (a = ira_regno_allocno_map[i];
2538 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2540 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2541 if (ALLOCNO_SOMEWHERE_RENAMED_P (a))
2542 new_pseudos_p = true;
2543 parent_a = ira_parent_allocno (a);
2544 if (parent_a == NULL)
2546 ALLOCNO_COPIES (a) = NULL;
2547 regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))] = a;
2550 ira_assert (ALLOCNO_CAP_MEMBER (parent_a) == NULL);
2552 if (ALLOCNO_MEM_OPTIMIZED_DEST (a) != NULL)
2554 if (REGNO (ALLOCNO_REG (a)) == REGNO (ALLOCNO_REG (parent_a)))
2556 merge_hard_reg_conflicts (a, parent_a, true);
2557 move_allocno_live_ranges (a, parent_a);
2559 ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)
2560 = (ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)
2561 || ALLOCNO_MEM_OPTIMIZED_DEST_P (a));
2564 new_pseudos_p = true;
2567 ALLOCNO_NREFS (parent_a) -= ALLOCNO_NREFS (a);
2568 ALLOCNO_FREQ (parent_a) -= ALLOCNO_FREQ (a);
2569 ALLOCNO_CALL_FREQ (parent_a) -= ALLOCNO_CALL_FREQ (a);
2570 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2571 -= ALLOCNO_CALLS_CROSSED_NUM (a);
2572 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2573 -= ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
2574 ira_assert (ALLOCNO_CALLS_CROSSED_NUM (parent_a) >= 0
2575 && ALLOCNO_NREFS (parent_a) >= 0
2576 && ALLOCNO_FREQ (parent_a) >= 0);
2577 cover_class = ALLOCNO_COVER_CLASS (parent_a);
2578 hard_regs_num = ira_class_hard_regs_num[cover_class];
2579 if (ALLOCNO_HARD_REG_COSTS (a) != NULL
2580 && ALLOCNO_HARD_REG_COSTS (parent_a) != NULL)
2581 for (j = 0; j < hard_regs_num; j++)
2582 ALLOCNO_HARD_REG_COSTS (parent_a)[j]
2583 -= ALLOCNO_HARD_REG_COSTS (a)[j];
2584 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL
2585 && ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a) != NULL)
2586 for (j = 0; j < hard_regs_num; j++)
2587 ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a)[j]
2588 -= ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[j];
2589 ALLOCNO_COVER_CLASS_COST (parent_a)
2590 -= ALLOCNO_COVER_CLASS_COST (a);
2591 ALLOCNO_MEMORY_COST (parent_a) -= ALLOCNO_MEMORY_COST (a);
2592 parent_a = ira_parent_allocno (parent_a);
2593 if (parent_a == NULL)
2596 ALLOCNO_COPIES (a) = NULL;
2597 regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))] = a;
2599 if (mem_dest_p && copy_info_to_removed_store_destinations (i))
2602 ira_assert (new_pseudos_p || ira_max_point_before_emit == ira_max_point);
2603 if (merged_p || ira_max_point_before_emit != ira_max_point)
2604 ira_rebuild_start_finish_chains ();
2607 /* Rebuild conflicts. */
2608 FOR_EACH_ALLOCNO (a, ai)
2610 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2611 || ALLOCNO_CAP_MEMBER (a) != NULL)
2613 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
2614 ira_assert (r->allocno == a);
2615 clear_allocno_conflicts (a);
2617 allocnos_live = sparseset_alloc (ira_allocnos_num);
2618 for (i = 0; i < ira_max_point; i++)
2620 for (r = ira_start_point_ranges[i]; r != NULL; r = r->start_next)
2623 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2624 || ALLOCNO_CAP_MEMBER (a) != NULL)
2626 num = ALLOCNO_NUM (a);
2627 cover_class = ALLOCNO_COVER_CLASS (a);
2628 sparseset_set_bit (allocnos_live, num);
2629 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, n)
2631 ira_allocno_t live_a = ira_allocnos[n];
2633 if (ira_reg_classes_intersect_p
2634 [cover_class][ALLOCNO_COVER_CLASS (live_a)]
2635 /* Don't set up conflict for the allocno with itself. */
2637 ira_add_allocno_conflict (a, live_a);
2641 for (r = ira_finish_point_ranges[i]; r != NULL; r = r->finish_next)
2642 sparseset_clear_bit (allocnos_live, ALLOCNO_NUM (r->allocno));
2644 sparseset_free (allocnos_live);
2645 compress_conflict_vecs ();
2647 /* Mark some copies for removing and change allocnos in the rest
2649 FOR_EACH_COPY (cp, ci)
2651 if (ALLOCNO_CAP_MEMBER (cp->first) != NULL
2652 || ALLOCNO_CAP_MEMBER (cp->second) != NULL)
2654 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2656 (ira_dump_file, " Remove cp%d:%c%dr%d-%c%dr%d\n",
2657 cp->num, ALLOCNO_CAP_MEMBER (cp->first) != NULL ? 'c' : 'a',
2658 ALLOCNO_NUM (cp->first), REGNO (ALLOCNO_REG (cp->first)),
2659 ALLOCNO_CAP_MEMBER (cp->second) != NULL ? 'c' : 'a',
2660 ALLOCNO_NUM (cp->second), REGNO (ALLOCNO_REG (cp->second)));
2661 cp->loop_tree_node = NULL;
2664 first = regno_top_level_allocno_map[REGNO (ALLOCNO_REG (cp->first))];
2665 second = regno_top_level_allocno_map[REGNO (ALLOCNO_REG (cp->second))];
2666 node = cp->loop_tree_node;
2668 keep_p = true; /* It copy generated in ira-emit.c. */
2671 /* Check that the copy was not propagated from level on
2672 which we will have different pseudos. */
2673 node_first = node->regno_allocno_map[ALLOCNO_REGNO (cp->first)];
2674 node_second = node->regno_allocno_map[ALLOCNO_REGNO (cp->second)];
2675 keep_p = ((REGNO (ALLOCNO_REG (first))
2676 == REGNO (ALLOCNO_REG (node_first)))
2677 && (REGNO (ALLOCNO_REG (second))
2678 == REGNO (ALLOCNO_REG (node_second))));
2682 cp->loop_tree_node = ira_loop_tree_root;
2684 cp->second = second;
2688 cp->loop_tree_node = NULL;
2689 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2690 fprintf (ira_dump_file, " Remove cp%d:a%dr%d-a%dr%d\n",
2691 cp->num, ALLOCNO_NUM (cp->first),
2692 REGNO (ALLOCNO_REG (cp->first)), ALLOCNO_NUM (cp->second),
2693 REGNO (ALLOCNO_REG (cp->second)));
2696 /* Remove unnecessary allocnos on lower levels of the loop tree. */
2697 FOR_EACH_ALLOCNO (a, ai)
2699 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2700 || ALLOCNO_CAP_MEMBER (a) != NULL)
2702 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2703 fprintf (ira_dump_file, " Remove a%dr%d\n",
2704 ALLOCNO_NUM (a), REGNO (ALLOCNO_REG (a)));
2708 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2709 ALLOCNO_REGNO (a) = REGNO (ALLOCNO_REG (a));
2710 ALLOCNO_CAP (a) = NULL;
2711 /* Restore updated costs for assignments from reload. */
2712 ALLOCNO_UPDATED_MEMORY_COST (a) = ALLOCNO_MEMORY_COST (a);
2713 ALLOCNO_UPDATED_COVER_CLASS_COST (a) = ALLOCNO_COVER_CLASS_COST (a);
2714 if (! ALLOCNO_ASSIGNED_P (a))
2715 ira_free_allocno_updated_costs (a);
2716 ira_assert (ALLOCNO_UPDATED_HARD_REG_COSTS (a) == NULL);
2717 ira_assert (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) == NULL);
2719 /* Remove unnecessary copies. */
2720 FOR_EACH_COPY (cp, ci)
2722 if (cp->loop_tree_node == NULL)
2724 ira_copies[cp->num] = NULL;
2729 (ALLOCNO_LOOP_TREE_NODE (cp->first) == ira_loop_tree_root
2730 && ALLOCNO_LOOP_TREE_NODE (cp->second) == ira_loop_tree_root);
2731 ira_add_allocno_copy_to_list (cp);
2732 ira_swap_allocno_copy_ends_if_necessary (cp);
2734 rebuild_regno_allocno_maps ();
2735 if (ira_max_point != ira_max_point_before_emit)
2736 ira_compress_allocno_live_ranges ();
2737 ira_free (regno_top_level_allocno_map);
2742 #ifdef ENABLE_IRA_CHECKING
2743 /* Check creation of all allocnos. Allocnos on lower levels should
2744 have allocnos or caps on all upper levels. */
2746 check_allocno_creation (void)
2749 ira_allocno_iterator ai;
2750 ira_loop_tree_node_t loop_tree_node;
2752 FOR_EACH_ALLOCNO (a, ai)
2754 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2755 ira_assert (bitmap_bit_p (loop_tree_node->all_allocnos,
2757 if (loop_tree_node == ira_loop_tree_root)
2759 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2760 ira_assert (ALLOCNO_CAP (a) != NULL);
2761 else if (ALLOCNO_CAP (a) == NULL)
2762 ira_assert (loop_tree_node->parent
2763 ->regno_allocno_map[ALLOCNO_REGNO (a)] != NULL
2764 && bitmap_bit_p (loop_tree_node->border_allocnos,
2770 /* Identify allocnos which prefer a register class with a single hard register.
2771 Adjust ALLOCNO_CONFLICT_HARD_REG_COSTS so that conflicting allocnos are
2772 less likely to use the preferred singleton register. */
2774 update_conflict_hard_reg_costs (void)
2777 ira_allocno_iterator ai;
2780 FOR_EACH_ALLOCNO (a, ai)
2782 enum reg_class cover_class = ALLOCNO_COVER_CLASS (a);
2783 enum reg_class pref = reg_preferred_class (ALLOCNO_REGNO (a));
2785 if (reg_class_size[pref] != 1)
2787 index = (ira_class_hard_reg_index[cover_class]
2788 [ira_class_hard_regs[pref][0]]);
2791 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) == NULL
2792 || ALLOCNO_HARD_REG_COSTS (a) == NULL)
2795 for (i = ira_class_hard_regs_num[cover_class] - 1; i >= 0; i--)
2796 if (ALLOCNO_HARD_REG_COSTS (a)[i] > ALLOCNO_COVER_CLASS_COST (a)
2797 && min > ALLOCNO_HARD_REG_COSTS (a)[i])
2798 min = ALLOCNO_HARD_REG_COSTS (a)[i];
2801 ira_allocate_and_set_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
2803 ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[index]
2804 -= min - ALLOCNO_COVER_CLASS_COST (a);
2808 /* Create a internal representation (IR) for IRA (allocnos, copies,
2809 loop tree nodes). If LOOPS_P is FALSE the nodes corresponding to
2810 the loops (except the root which corresponds the all function) and
2811 correspondingly allocnos for the loops will be not created. Such
2812 parameter value is used for Chaitin-Briggs coloring. The function
2813 returns TRUE if we generate loop structure (besides nodes
2814 representing all function and the basic blocks) for regional
2815 allocation. A true return means that we really need to flatten IR
2816 before the reload. */
2818 ira_build (bool loops_p)
2822 initiate_cost_vectors ();
2823 initiate_allocnos ();
2825 create_loop_tree_nodes (loops_p);
2829 ira_create_allocno_live_ranges ();
2830 remove_unnecessary_regions (false);
2831 ira_compress_allocno_live_ranges ();
2832 update_bad_spill_attribute ();
2833 loops_p = more_one_region_p ();
2836 propagate_allocno_info ();
2839 ira_tune_allocno_costs_and_cover_classes ();
2840 #ifdef ENABLE_IRA_CHECKING
2841 check_allocno_creation ();
2843 setup_min_max_allocno_live_range_point ();
2844 sort_conflict_id_allocno_map ();
2845 setup_min_max_conflict_allocno_ids ();
2846 ira_build_conflicts ();
2847 update_conflict_hard_reg_costs ();
2848 if (! ira_conflicts_p)
2851 ira_allocno_iterator ai;
2853 /* Remove all regions but root one. */
2856 remove_unnecessary_regions (true);
2859 /* We don't save hard registers around calls for fast allocation
2860 -- add caller clobbered registers as conflicting ones to
2861 allocno crossing calls. */
2862 FOR_EACH_ALLOCNO (a, ai)
2863 if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
2865 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a),
2867 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a),
2871 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
2872 print_copies (ira_dump_file);
2873 if (internal_flag_ira_verbose > 0 && ira_dump_file != NULL)
2878 ira_allocno_iterator ai;
2881 FOR_EACH_ALLOCNO (a, ai)
2882 n += ALLOCNO_CONFLICT_ALLOCNOS_NUM (a);
2884 FOR_EACH_ALLOCNO (a, ai)
2885 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
2887 fprintf (ira_dump_file, " regions=%d, blocks=%d, points=%d\n",
2888 VEC_length (loop_p, ira_loops.larray), n_basic_blocks,
2890 fprintf (ira_dump_file,
2891 " allocnos=%d, copies=%d, conflicts=%d, ranges=%d\n",
2892 ira_allocnos_num, ira_copies_num, n, nr);
2897 /* Release the data created by function ira_build. */
2901 finish_loop_tree_nodes ();
2904 finish_cost_vectors ();
2905 ira_finish_allocno_live_ranges ();