1 /* Building internal representation for IRA.
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "insn-config.h"
35 #include "diagnostic-core.h"
41 #include "sparseset.h"
43 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
45 static ira_copy_t find_allocno_copy (ira_allocno_t, ira_allocno_t, rtx,
46 ira_loop_tree_node_t);
48 /* The root of the loop tree corresponding to the all function. */
49 ira_loop_tree_node_t ira_loop_tree_root;
51 /* Height of the loop tree. */
52 int ira_loop_tree_height;
54 /* All nodes representing basic blocks are referred through the
55 following array. We can not use basic block member `aux' for this
56 because it is used for insertion of insns on edges. */
57 ira_loop_tree_node_t ira_bb_nodes;
59 /* All nodes representing loops are referred through the following
61 ira_loop_tree_node_t ira_loop_nodes;
63 /* Map regno -> allocnos with given regno (see comments for
64 allocno member `next_regno_allocno'). */
65 ira_allocno_t *ira_regno_allocno_map;
67 /* Array of references to all allocnos. The order number of the
68 allocno corresponds to the index in the array. Removed allocnos
69 have NULL element value. */
70 ira_allocno_t *ira_allocnos;
72 /* Sizes of the previous array. */
75 /* Count of conflict record structures we've created, used when creating
79 /* Map a conflict id to its conflict record. */
80 ira_object_t *ira_object_id_map;
82 /* Array of references to all copies. The order number of the copy
83 corresponds to the index in the array. Removed copies have NULL
85 ira_copy_t *ira_copies;
87 /* Size of the previous array. */
92 /* LAST_BASIC_BLOCK before generating additional insns because of live
93 range splitting. Emitting insns on a critical edge creates a new
95 static int last_basic_block_before_change;
97 /* The following function allocates the loop tree nodes. If LOOPS_P
98 is FALSE, the nodes corresponding to the loops (except the root
99 which corresponds the all function) will be not allocated but nodes
100 will still be allocated for basic blocks. */
102 create_loop_tree_nodes (bool loops_p)
109 VEC (edge, heap) *edges;
113 = ((struct ira_loop_tree_node *)
114 ira_allocate (sizeof (struct ira_loop_tree_node) * last_basic_block));
115 last_basic_block_before_change = last_basic_block;
116 for (i = 0; i < (unsigned int) last_basic_block; i++)
118 ira_bb_nodes[i].regno_allocno_map = NULL;
119 memset (ira_bb_nodes[i].reg_pressure, 0,
120 sizeof (ira_bb_nodes[i].reg_pressure));
121 ira_bb_nodes[i].all_allocnos = NULL;
122 ira_bb_nodes[i].modified_regnos = NULL;
123 ira_bb_nodes[i].border_allocnos = NULL;
124 ira_bb_nodes[i].local_copies = NULL;
126 ira_loop_nodes = ((struct ira_loop_tree_node *)
127 ira_allocate (sizeof (struct ira_loop_tree_node)
128 * VEC_length (loop_p, ira_loops.larray)));
129 max_regno = max_reg_num ();
130 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
132 if (loop != ira_loops.tree_root)
134 ira_loop_nodes[i].regno_allocno_map = NULL;
138 FOR_EACH_EDGE (e, ei, loop->header->preds)
139 if (e->src != loop->latch
140 && (e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
147 edges = get_loop_exit_edges (loop);
148 for (j = 0; VEC_iterate (edge, edges, j, e); j++)
149 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
154 VEC_free (edge, heap, edges);
158 ira_loop_nodes[i].regno_allocno_map
159 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t) * max_regno);
160 memset (ira_loop_nodes[i].regno_allocno_map, 0,
161 sizeof (ira_allocno_t) * max_regno);
162 memset (ira_loop_nodes[i].reg_pressure, 0,
163 sizeof (ira_loop_nodes[i].reg_pressure));
164 ira_loop_nodes[i].all_allocnos = ira_allocate_bitmap ();
165 ira_loop_nodes[i].modified_regnos = ira_allocate_bitmap ();
166 ira_loop_nodes[i].border_allocnos = ira_allocate_bitmap ();
167 ira_loop_nodes[i].local_copies = ira_allocate_bitmap ();
171 /* The function returns TRUE if there are more one allocation
174 more_one_region_p (void)
179 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
180 if (ira_loop_nodes[i].regno_allocno_map != NULL
181 && ira_loop_tree_root != &ira_loop_nodes[i])
186 /* Free the loop tree node of a loop. */
188 finish_loop_tree_node (ira_loop_tree_node_t loop)
190 if (loop->regno_allocno_map != NULL)
192 ira_assert (loop->bb == NULL);
193 ira_free_bitmap (loop->local_copies);
194 ira_free_bitmap (loop->border_allocnos);
195 ira_free_bitmap (loop->modified_regnos);
196 ira_free_bitmap (loop->all_allocnos);
197 ira_free (loop->regno_allocno_map);
198 loop->regno_allocno_map = NULL;
202 /* Free the loop tree nodes. */
204 finish_loop_tree_nodes (void)
209 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
210 finish_loop_tree_node (&ira_loop_nodes[i]);
211 ira_free (ira_loop_nodes);
212 for (i = 0; i < (unsigned int) last_basic_block_before_change; i++)
214 if (ira_bb_nodes[i].local_copies != NULL)
215 ira_free_bitmap (ira_bb_nodes[i].local_copies);
216 if (ira_bb_nodes[i].border_allocnos != NULL)
217 ira_free_bitmap (ira_bb_nodes[i].border_allocnos);
218 if (ira_bb_nodes[i].modified_regnos != NULL)
219 ira_free_bitmap (ira_bb_nodes[i].modified_regnos);
220 if (ira_bb_nodes[i].all_allocnos != NULL)
221 ira_free_bitmap (ira_bb_nodes[i].all_allocnos);
222 if (ira_bb_nodes[i].regno_allocno_map != NULL)
223 ira_free (ira_bb_nodes[i].regno_allocno_map);
225 ira_free (ira_bb_nodes);
230 /* The following recursive function adds LOOP to the loop tree
231 hierarchy. LOOP is added only once. */
233 add_loop_to_tree (struct loop *loop)
236 ira_loop_tree_node_t loop_node, parent_node;
238 /* We can not use loop node access macros here because of potential
239 checking and because the nodes are not initialized enough
241 if (loop_outer (loop) != NULL)
242 add_loop_to_tree (loop_outer (loop));
243 if (ira_loop_nodes[loop->num].regno_allocno_map != NULL
244 && ira_loop_nodes[loop->num].children == NULL)
246 /* We have not added loop node to the tree yet. */
247 loop_node = &ira_loop_nodes[loop->num];
248 loop_node->loop = loop;
249 loop_node->bb = NULL;
250 for (parent = loop_outer (loop);
252 parent = loop_outer (parent))
253 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
257 loop_node->next = NULL;
258 loop_node->subloop_next = NULL;
259 loop_node->parent = NULL;
263 parent_node = &ira_loop_nodes[parent->num];
264 loop_node->next = parent_node->children;
265 parent_node->children = loop_node;
266 loop_node->subloop_next = parent_node->subloops;
267 parent_node->subloops = loop_node;
268 loop_node->parent = parent_node;
273 /* The following recursive function sets up levels of nodes of the
274 tree given its root LOOP_NODE. The enumeration starts with LEVEL.
275 The function returns maximal value of level in the tree + 1. */
277 setup_loop_tree_level (ira_loop_tree_node_t loop_node, int level)
279 int height, max_height;
280 ira_loop_tree_node_t subloop_node;
282 ira_assert (loop_node->bb == NULL);
283 loop_node->level = level;
284 max_height = level + 1;
285 for (subloop_node = loop_node->subloops;
286 subloop_node != NULL;
287 subloop_node = subloop_node->subloop_next)
289 ira_assert (subloop_node->bb == NULL);
290 height = setup_loop_tree_level (subloop_node, level + 1);
291 if (height > max_height)
297 /* Create the loop tree. The algorithm is designed to provide correct
298 order of loops (they are ordered by their last loop BB) and basic
299 blocks in the chain formed by member next. */
301 form_loop_tree (void)
306 ira_loop_tree_node_t bb_node, loop_node;
309 /* We can not use loop/bb node access macros because of potential
310 checking and because the nodes are not initialized enough
312 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
313 if (ira_loop_nodes[i].regno_allocno_map != NULL)
315 ira_loop_nodes[i].children = NULL;
316 ira_loop_nodes[i].subloops = NULL;
320 bb_node = &ira_bb_nodes[bb->index];
322 bb_node->loop = NULL;
323 bb_node->subloops = NULL;
324 bb_node->children = NULL;
325 bb_node->subloop_next = NULL;
326 bb_node->next = NULL;
327 for (parent = bb->loop_father;
329 parent = loop_outer (parent))
330 if (ira_loop_nodes[parent->num].regno_allocno_map != NULL)
332 add_loop_to_tree (parent);
333 loop_node = &ira_loop_nodes[parent->num];
334 bb_node->next = loop_node->children;
335 bb_node->parent = loop_node;
336 loop_node->children = bb_node;
338 ira_loop_tree_root = IRA_LOOP_NODE_BY_INDEX (ira_loops.tree_root->num);
339 ira_loop_tree_height = setup_loop_tree_level (ira_loop_tree_root, 0);
340 ira_assert (ira_loop_tree_root->regno_allocno_map != NULL);
345 /* Rebuild IRA_REGNO_ALLOCNO_MAP and REGNO_ALLOCNO_MAPs of the loop
348 rebuild_regno_allocno_maps (void)
351 int max_regno, regno;
353 ira_loop_tree_node_t loop_tree_node;
355 ira_allocno_iterator ai;
357 max_regno = max_reg_num ();
358 for (l = 0; VEC_iterate (loop_p, ira_loops.larray, l, loop); l++)
359 if (ira_loop_nodes[l].regno_allocno_map != NULL)
361 ira_free (ira_loop_nodes[l].regno_allocno_map);
362 ira_loop_nodes[l].regno_allocno_map
363 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
365 memset (ira_loop_nodes[l].regno_allocno_map, 0,
366 sizeof (ira_allocno_t) * max_regno);
368 ira_free (ira_regno_allocno_map);
369 ira_regno_allocno_map
370 = (ira_allocno_t *) ira_allocate (max_regno * sizeof (ira_allocno_t));
371 memset (ira_regno_allocno_map, 0, max_regno * sizeof (ira_allocno_t));
372 FOR_EACH_ALLOCNO (a, ai)
374 if (ALLOCNO_CAP_MEMBER (a) != NULL)
375 /* Caps are not in the regno allocno maps. */
377 regno = ALLOCNO_REGNO (a);
378 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
379 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
380 ira_regno_allocno_map[regno] = a;
381 if (loop_tree_node->regno_allocno_map[regno] == NULL)
382 /* Remember that we can create temporary allocnos to break
383 cycles in register shuffle. */
384 loop_tree_node->regno_allocno_map[regno] = a;
389 /* Pools for allocnos, allocno live ranges and objects. */
390 static alloc_pool allocno_pool, live_range_pool, object_pool;
392 /* Vec containing references to all created allocnos. It is a
393 container of array allocnos. */
394 static VEC(ira_allocno_t,heap) *allocno_vec;
396 /* Vec containing references to all created ira_objects. It is a
397 container of ira_object_id_map. */
398 static VEC(ira_object_t,heap) *ira_object_id_map_vec;
400 /* Initialize data concerning allocnos. */
402 initiate_allocnos (void)
405 = create_alloc_pool ("live ranges",
406 sizeof (struct live_range), 100);
408 = create_alloc_pool ("allocnos", sizeof (struct ira_allocno), 100);
410 = create_alloc_pool ("objects", sizeof (struct ira_object), 100);
411 allocno_vec = VEC_alloc (ira_allocno_t, heap, max_reg_num () * 2);
413 ira_allocnos_num = 0;
415 ira_object_id_map_vec
416 = VEC_alloc (ira_object_t, heap, max_reg_num () * 2);
417 ira_object_id_map = NULL;
418 ira_regno_allocno_map
419 = (ira_allocno_t *) ira_allocate (max_reg_num () * sizeof (ira_allocno_t));
420 memset (ira_regno_allocno_map, 0, max_reg_num () * sizeof (ira_allocno_t));
423 /* Create and return an object corresponding to a new allocno A. */
425 ira_create_object (ira_allocno_t a)
427 enum reg_class cover_class = ALLOCNO_COVER_CLASS (a);
428 ira_object_t obj = (ira_object_t) pool_alloc (object_pool);
430 OBJECT_ALLOCNO (obj) = a;
431 OBJECT_CONFLICT_ID (obj) = ira_objects_num;
432 OBJECT_CONFLICT_VEC_P (obj) = false;
433 OBJECT_CONFLICT_ARRAY (obj) = NULL;
434 OBJECT_NUM_CONFLICTS (obj) = 0;
435 COPY_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj), ira_no_alloc_regs);
436 COPY_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj), ira_no_alloc_regs);
437 IOR_COMPL_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
438 reg_class_contents[cover_class]);
439 IOR_COMPL_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
440 reg_class_contents[cover_class]);
441 OBJECT_MIN (obj) = INT_MAX;
442 OBJECT_MAX (obj) = -1;
443 OBJECT_LIVE_RANGES (obj) = NULL;
445 VEC_safe_push (ira_object_t, heap, ira_object_id_map_vec, obj);
447 = VEC_address (ira_object_t, ira_object_id_map_vec);
448 ira_objects_num = VEC_length (ira_object_t, ira_object_id_map_vec);
452 /* Create and return the allocno corresponding to REGNO in
453 LOOP_TREE_NODE. Add the allocno to the list of allocnos with the
454 same regno if CAP_P is FALSE. */
456 ira_create_allocno (int regno, bool cap_p, ira_loop_tree_node_t loop_tree_node)
460 a = (ira_allocno_t) pool_alloc (allocno_pool);
461 ALLOCNO_REGNO (a) = regno;
462 ALLOCNO_LOOP_TREE_NODE (a) = loop_tree_node;
465 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = ira_regno_allocno_map[regno];
466 ira_regno_allocno_map[regno] = a;
467 if (loop_tree_node->regno_allocno_map[regno] == NULL)
468 /* Remember that we can create temporary allocnos to break
469 cycles in register shuffle on region borders (see
471 loop_tree_node->regno_allocno_map[regno] = a;
473 ALLOCNO_CAP (a) = NULL;
474 ALLOCNO_CAP_MEMBER (a) = NULL;
475 ALLOCNO_NUM (a) = ira_allocnos_num;
476 bitmap_set_bit (loop_tree_node->all_allocnos, ALLOCNO_NUM (a));
477 ALLOCNO_NREFS (a) = 0;
478 ALLOCNO_FREQ (a) = 0;
479 ALLOCNO_HARD_REGNO (a) = -1;
480 ALLOCNO_CALL_FREQ (a) = 0;
481 ALLOCNO_CALLS_CROSSED_NUM (a) = 0;
483 ALLOCNO_NO_STACK_REG_P (a) = false;
484 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = false;
486 ALLOCNO_MEM_OPTIMIZED_DEST (a) = NULL;
487 ALLOCNO_MEM_OPTIMIZED_DEST_P (a) = false;
488 ALLOCNO_SOMEWHERE_RENAMED_P (a) = false;
489 ALLOCNO_CHILD_RENAMED_P (a) = false;
490 ALLOCNO_DONT_REASSIGN_P (a) = false;
491 ALLOCNO_BAD_SPILL_P (a) = false;
492 ALLOCNO_IN_GRAPH_P (a) = false;
493 ALLOCNO_ASSIGNED_P (a) = false;
494 ALLOCNO_MAY_BE_SPILLED_P (a) = false;
495 ALLOCNO_SPLAY_REMOVED_P (a) = false;
496 ALLOCNO_MODE (a) = (regno < 0 ? VOIDmode : PSEUDO_REGNO_MODE (regno));
497 ALLOCNO_COPIES (a) = NULL;
498 ALLOCNO_HARD_REG_COSTS (a) = NULL;
499 ALLOCNO_CONFLICT_HARD_REG_COSTS (a) = NULL;
500 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
501 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
502 ALLOCNO_LEFT_CONFLICTS_SIZE (a) = -1;
503 ALLOCNO_COVER_CLASS (a) = NO_REGS;
504 ALLOCNO_UPDATED_COVER_CLASS_COST (a) = 0;
505 ALLOCNO_COVER_CLASS_COST (a) = 0;
506 ALLOCNO_MEMORY_COST (a) = 0;
507 ALLOCNO_UPDATED_MEMORY_COST (a) = 0;
508 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) = 0;
509 ALLOCNO_NEXT_BUCKET_ALLOCNO (a) = NULL;
510 ALLOCNO_PREV_BUCKET_ALLOCNO (a) = NULL;
511 ALLOCNO_FIRST_COALESCED_ALLOCNO (a) = a;
512 ALLOCNO_NEXT_COALESCED_ALLOCNO (a) = a;
514 VEC_safe_push (ira_allocno_t, heap, allocno_vec, a);
515 ira_allocnos = VEC_address (ira_allocno_t, allocno_vec);
516 ira_allocnos_num = VEC_length (ira_allocno_t, allocno_vec);
520 /* Set up cover class for A and update its conflict hard registers. */
522 ira_set_allocno_cover_class (ira_allocno_t a, enum reg_class cover_class)
524 ALLOCNO_COVER_CLASS (a) = cover_class;
527 /* Allocate an object for allocno A and set ALLOCNO_OBJECT. */
529 ira_create_allocno_object (ira_allocno_t a)
531 ALLOCNO_OBJECT (a) = ira_create_object (a);
534 /* For each allocno, create the corresponding ALLOCNO_OBJECT structure. */
536 create_allocno_objects (void)
539 ira_allocno_iterator ai;
541 FOR_EACH_ALLOCNO (a, ai)
542 ira_create_allocno_object (a);
545 /* Merge hard register conflicts from allocno FROM into allocno TO. If
546 TOTAL_ONLY is true, we ignore ALLOCNO_CONFLICT_HARD_REGS. */
548 merge_hard_reg_conflicts (ira_allocno_t from, ira_allocno_t to,
551 ira_object_t from_obj = ALLOCNO_OBJECT (from);
552 ira_object_t to_obj = ALLOCNO_OBJECT (to);
554 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (to_obj),
555 OBJECT_CONFLICT_HARD_REGS (from_obj));
556 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (to_obj),
557 OBJECT_TOTAL_CONFLICT_HARD_REGS (from_obj));
559 if (!total_only && ALLOCNO_NO_STACK_REG_P (from))
560 ALLOCNO_NO_STACK_REG_P (to) = true;
561 if (ALLOCNO_TOTAL_NO_STACK_REG_P (from))
562 ALLOCNO_TOTAL_NO_STACK_REG_P (to) = true;
566 /* Return TRUE if a conflict vector with NUM elements is more
567 profitable than a conflict bit vector for OBJ. */
569 ira_conflict_vector_profitable_p (ira_object_t obj, int num)
572 int max = OBJECT_MAX (obj);
573 int min = OBJECT_MIN (obj);
576 /* We prefer a bit vector in such case because it does not result
580 nw = (max - min + IRA_INT_BITS) / IRA_INT_BITS;
581 return (2 * sizeof (ira_object_t) * (num + 1)
582 < 3 * nw * sizeof (IRA_INT_TYPE));
585 /* Allocates and initialize the conflict vector of OBJ for NUM
586 conflicting objects. */
588 ira_allocate_conflict_vec (ira_object_t obj, int num)
593 ira_assert (OBJECT_CONFLICT_ARRAY (obj) == NULL);
594 num++; /* for NULL end marker */
595 size = sizeof (ira_object_t) * num;
596 OBJECT_CONFLICT_ARRAY (obj) = ira_allocate (size);
597 vec = (ira_object_t *) OBJECT_CONFLICT_ARRAY (obj);
599 OBJECT_NUM_CONFLICTS (obj) = 0;
600 OBJECT_CONFLICT_ARRAY_SIZE (obj) = size;
601 OBJECT_CONFLICT_VEC_P (obj) = true;
604 /* Allocate and initialize the conflict bit vector of OBJ. */
606 allocate_conflict_bit_vec (ira_object_t obj)
610 ira_assert (OBJECT_CONFLICT_ARRAY (obj) == NULL);
611 size = ((OBJECT_MAX (obj) - OBJECT_MIN (obj) + IRA_INT_BITS)
612 / IRA_INT_BITS * sizeof (IRA_INT_TYPE));
613 OBJECT_CONFLICT_ARRAY (obj) = ira_allocate (size);
614 memset (OBJECT_CONFLICT_ARRAY (obj), 0, size);
615 OBJECT_CONFLICT_ARRAY_SIZE (obj) = size;
616 OBJECT_CONFLICT_VEC_P (obj) = false;
619 /* Allocate and initialize the conflict vector or conflict bit vector
620 of A for NUM conflicting allocnos whatever is more profitable. */
622 ira_allocate_object_conflicts (ira_object_t a, int num)
624 if (ira_conflict_vector_profitable_p (a, num))
625 ira_allocate_conflict_vec (a, num);
627 allocate_conflict_bit_vec (a);
630 /* Add OBJ2 to the conflicts of OBJ1. */
632 add_to_conflicts (ira_object_t obj1, ira_object_t obj2)
637 if (OBJECT_CONFLICT_VEC_P (obj1))
639 ira_object_t *vec = OBJECT_CONFLICT_VEC (obj1);
640 int curr_num = OBJECT_NUM_CONFLICTS (obj1);
642 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) < num * sizeof (ira_object_t))
644 ira_object_t *newvec;
645 size = (3 * num / 2 + 1) * sizeof (ira_allocno_t);
646 newvec = (ira_object_t *) ira_allocate (size);
647 memcpy (newvec, vec, curr_num * sizeof (ira_object_t));
650 OBJECT_CONFLICT_ARRAY (obj1) = vec;
651 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
655 OBJECT_NUM_CONFLICTS (obj1)++;
659 int nw, added_head_nw, id;
660 IRA_INT_TYPE *vec = OBJECT_CONFLICT_BITVEC (obj1);
662 id = OBJECT_CONFLICT_ID (obj2);
663 if (OBJECT_MIN (obj1) > id)
665 /* Expand head of the bit vector. */
666 added_head_nw = (OBJECT_MIN (obj1) - id - 1) / IRA_INT_BITS + 1;
667 nw = (OBJECT_MAX (obj1) - OBJECT_MIN (obj1)) / IRA_INT_BITS + 1;
668 size = (nw + added_head_nw) * sizeof (IRA_INT_TYPE);
669 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) >= size)
671 memmove ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
672 vec, nw * sizeof (IRA_INT_TYPE));
673 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
678 = (3 * (nw + added_head_nw) / 2 + 1) * sizeof (IRA_INT_TYPE);
679 vec = (IRA_INT_TYPE *) ira_allocate (size);
680 memcpy ((char *) vec + added_head_nw * sizeof (IRA_INT_TYPE),
681 OBJECT_CONFLICT_ARRAY (obj1), nw * sizeof (IRA_INT_TYPE));
682 memset (vec, 0, added_head_nw * sizeof (IRA_INT_TYPE));
684 + (nw + added_head_nw) * sizeof (IRA_INT_TYPE),
685 0, size - (nw + added_head_nw) * sizeof (IRA_INT_TYPE));
686 ira_free (OBJECT_CONFLICT_ARRAY (obj1));
687 OBJECT_CONFLICT_ARRAY (obj1) = vec;
688 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
690 OBJECT_MIN (obj1) -= added_head_nw * IRA_INT_BITS;
692 else if (OBJECT_MAX (obj1) < id)
694 nw = (id - OBJECT_MIN (obj1)) / IRA_INT_BITS + 1;
695 size = nw * sizeof (IRA_INT_TYPE);
696 if (OBJECT_CONFLICT_ARRAY_SIZE (obj1) < size)
698 /* Expand tail of the bit vector. */
699 size = (3 * nw / 2 + 1) * sizeof (IRA_INT_TYPE);
700 vec = (IRA_INT_TYPE *) ira_allocate (size);
701 memcpy (vec, OBJECT_CONFLICT_ARRAY (obj1), OBJECT_CONFLICT_ARRAY_SIZE (obj1));
702 memset ((char *) vec + OBJECT_CONFLICT_ARRAY_SIZE (obj1),
703 0, size - OBJECT_CONFLICT_ARRAY_SIZE (obj1));
704 ira_free (OBJECT_CONFLICT_ARRAY (obj1));
705 OBJECT_CONFLICT_ARRAY (obj1) = vec;
706 OBJECT_CONFLICT_ARRAY_SIZE (obj1) = size;
708 OBJECT_MAX (obj1) = id;
710 SET_MINMAX_SET_BIT (vec, id, OBJECT_MIN (obj1), OBJECT_MAX (obj1));
714 /* Add OBJ1 to the conflicts of OBJ2 and vice versa. */
716 ira_add_conflict (ira_object_t obj1, ira_object_t obj2)
718 add_to_conflicts (obj1, obj2);
719 add_to_conflicts (obj2, obj1);
722 /* Clear all conflicts of OBJ. */
724 clear_conflicts (ira_object_t obj)
726 if (OBJECT_CONFLICT_VEC_P (obj))
728 OBJECT_NUM_CONFLICTS (obj) = 0;
729 OBJECT_CONFLICT_VEC (obj)[0] = NULL;
731 else if (OBJECT_CONFLICT_ARRAY_SIZE (obj) != 0)
735 nw = (OBJECT_MAX (obj) - OBJECT_MIN (obj)) / IRA_INT_BITS + 1;
736 memset (OBJECT_CONFLICT_BITVEC (obj), 0, nw * sizeof (IRA_INT_TYPE));
740 /* The array used to find duplications in conflict vectors of
742 static int *conflict_check;
744 /* The value used to mark allocation presence in conflict vector of
745 the current allocno. */
746 static int curr_conflict_check_tick;
748 /* Remove duplications in conflict vector of OBJ. */
750 compress_conflict_vec (ira_object_t obj)
752 ira_object_t *vec, conflict_obj;
755 ira_assert (OBJECT_CONFLICT_VEC_P (obj));
756 vec = OBJECT_CONFLICT_VEC (obj);
757 curr_conflict_check_tick++;
758 for (i = j = 0; (conflict_obj = vec[i]) != NULL; i++)
760 int id = OBJECT_CONFLICT_ID (conflict_obj);
761 if (conflict_check[id] != curr_conflict_check_tick)
763 conflict_check[id] = curr_conflict_check_tick;
764 vec[j++] = conflict_obj;
767 OBJECT_NUM_CONFLICTS (obj) = j;
771 /* Remove duplications in conflict vectors of all allocnos. */
773 compress_conflict_vecs (void)
776 ira_allocno_iterator ai;
778 conflict_check = (int *) ira_allocate (sizeof (int) * ira_objects_num);
779 memset (conflict_check, 0, sizeof (int) * ira_objects_num);
780 curr_conflict_check_tick = 0;
781 FOR_EACH_ALLOCNO (a, ai)
783 ira_object_t obj = ALLOCNO_OBJECT (a);
784 if (OBJECT_CONFLICT_VEC_P (obj))
785 compress_conflict_vec (obj);
787 ira_free (conflict_check);
790 /* This recursive function outputs allocno A and if it is a cap the
791 function outputs its members. */
793 ira_print_expanded_allocno (ira_allocno_t a)
797 fprintf (ira_dump_file, " a%d(r%d", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
798 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
799 fprintf (ira_dump_file, ",b%d", bb->index);
801 fprintf (ira_dump_file, ",l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop->num);
802 if (ALLOCNO_CAP_MEMBER (a) != NULL)
804 fprintf (ira_dump_file, ":");
805 ira_print_expanded_allocno (ALLOCNO_CAP_MEMBER (a));
807 fprintf (ira_dump_file, ")");
810 /* Create and return the cap representing allocno A in the
813 create_cap_allocno (ira_allocno_t a)
816 ira_loop_tree_node_t parent;
817 enum reg_class cover_class;
819 ira_assert (ALLOCNO_FIRST_COALESCED_ALLOCNO (a) == a
820 && ALLOCNO_NEXT_COALESCED_ALLOCNO (a) == a);
821 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
822 cap = ira_create_allocno (ALLOCNO_REGNO (a), true, parent);
823 ALLOCNO_MODE (cap) = ALLOCNO_MODE (a);
824 cover_class = ALLOCNO_COVER_CLASS (a);
825 ira_set_allocno_cover_class (cap, cover_class);
826 ira_create_allocno_object (cap);
827 ALLOCNO_AVAILABLE_REGS_NUM (cap) = ALLOCNO_AVAILABLE_REGS_NUM (a);
828 ALLOCNO_CAP_MEMBER (cap) = a;
829 ALLOCNO_CAP (a) = cap;
830 ALLOCNO_COVER_CLASS_COST (cap) = ALLOCNO_COVER_CLASS_COST (a);
831 ALLOCNO_MEMORY_COST (cap) = ALLOCNO_MEMORY_COST (a);
832 ira_allocate_and_copy_costs
833 (&ALLOCNO_HARD_REG_COSTS (cap), cover_class, ALLOCNO_HARD_REG_COSTS (a));
834 ira_allocate_and_copy_costs
835 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (cap), cover_class,
836 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
837 ALLOCNO_BAD_SPILL_P (cap) = ALLOCNO_BAD_SPILL_P (a);
838 ALLOCNO_NREFS (cap) = ALLOCNO_NREFS (a);
839 ALLOCNO_FREQ (cap) = ALLOCNO_FREQ (a);
840 ALLOCNO_CALL_FREQ (cap) = ALLOCNO_CALL_FREQ (a);
841 merge_hard_reg_conflicts (a, cap, false);
842 ALLOCNO_CALLS_CROSSED_NUM (cap) = ALLOCNO_CALLS_CROSSED_NUM (a);
843 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
845 fprintf (ira_dump_file, " Creating cap ");
846 ira_print_expanded_allocno (cap);
847 fprintf (ira_dump_file, "\n");
852 /* Create and return allocno live range with given attributes. */
854 ira_create_live_range (ira_object_t obj, int start, int finish,
859 p = (live_range_t) pool_alloc (live_range_pool);
867 /* Copy allocno live range R and return the result. */
869 copy_live_range (live_range_t r)
873 p = (live_range_t) pool_alloc (live_range_pool);
878 /* Copy allocno live range list given by its head R and return the
881 ira_copy_live_range_list (live_range_t r)
883 live_range_t p, first, last;
887 for (first = last = NULL; r != NULL; r = r->next)
889 p = copy_live_range (r);
899 /* Merge ranges R1 and R2 and returns the result. The function
900 maintains the order of ranges and tries to minimize number of the
903 ira_merge_live_ranges (live_range_t r1, live_range_t r2)
905 live_range_t first, last, temp;
911 for (first = last = NULL; r1 != NULL && r2 != NULL;)
913 if (r1->start < r2->start)
919 if (r1->start <= r2->finish + 1)
921 /* Intersected ranges: merge r1 and r2 into r1. */
922 r1->start = r2->start;
923 if (r1->finish < r2->finish)
924 r1->finish = r2->finish;
927 ira_finish_live_range (temp);
930 /* To try to merge with subsequent ranges in r1. */
937 /* Add r1 to the result. */
948 /* To try to merge with subsequent ranges in r2. */
960 ira_assert (r1->next == NULL);
968 ira_assert (r2->next == NULL);
972 ira_assert (last->next == NULL);
977 /* Return TRUE if live ranges R1 and R2 intersect. */
979 ira_live_ranges_intersect_p (live_range_t r1, live_range_t r2)
981 /* Remember the live ranges are always kept ordered. */
982 while (r1 != NULL && r2 != NULL)
984 if (r1->start > r2->finish)
986 else if (r2->start > r1->finish)
994 /* Free allocno live range R. */
996 ira_finish_live_range (live_range_t r)
998 pool_free (live_range_pool, r);
1001 /* Free list of allocno live ranges starting with R. */
1003 ira_finish_live_range_list (live_range_t r)
1005 live_range_t next_r;
1007 for (; r != NULL; r = next_r)
1010 ira_finish_live_range (r);
1014 /* Free updated register costs of allocno A. */
1016 ira_free_allocno_updated_costs (ira_allocno_t a)
1018 enum reg_class cover_class;
1020 cover_class = ALLOCNO_COVER_CLASS (a);
1021 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1022 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), cover_class);
1023 ALLOCNO_UPDATED_HARD_REG_COSTS (a) = NULL;
1024 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1025 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1027 ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) = NULL;
1030 /* Free the memory allocated for allocno A. */
1032 finish_allocno (ira_allocno_t a)
1034 enum reg_class cover_class = ALLOCNO_COVER_CLASS (a);
1035 ira_object_t obj = ALLOCNO_OBJECT (a);
1037 ira_finish_live_range_list (OBJECT_LIVE_RANGES (obj));
1038 ira_object_id_map[OBJECT_CONFLICT_ID (obj)] = NULL;
1039 if (OBJECT_CONFLICT_ARRAY (obj) != NULL)
1040 ira_free (OBJECT_CONFLICT_ARRAY (obj));
1041 pool_free (object_pool, obj);
1043 ira_allocnos[ALLOCNO_NUM (a)] = NULL;
1044 if (ALLOCNO_HARD_REG_COSTS (a) != NULL)
1045 ira_free_cost_vector (ALLOCNO_HARD_REG_COSTS (a), cover_class);
1046 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL)
1047 ira_free_cost_vector (ALLOCNO_CONFLICT_HARD_REG_COSTS (a), cover_class);
1048 if (ALLOCNO_UPDATED_HARD_REG_COSTS (a) != NULL)
1049 ira_free_cost_vector (ALLOCNO_UPDATED_HARD_REG_COSTS (a), cover_class);
1050 if (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) != NULL)
1051 ira_free_cost_vector (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a),
1053 pool_free (allocno_pool, a);
1056 /* Free the memory allocated for all allocnos. */
1058 finish_allocnos (void)
1061 ira_allocno_iterator ai;
1063 FOR_EACH_ALLOCNO (a, ai)
1065 ira_free (ira_regno_allocno_map);
1066 VEC_free (ira_object_t, heap, ira_object_id_map_vec);
1067 VEC_free (ira_allocno_t, heap, allocno_vec);
1068 free_alloc_pool (allocno_pool);
1069 free_alloc_pool (object_pool);
1070 free_alloc_pool (live_range_pool);
1075 /* Pools for copies. */
1076 static alloc_pool copy_pool;
1078 /* Vec containing references to all created copies. It is a
1079 container of array ira_copies. */
1080 static VEC(ira_copy_t,heap) *copy_vec;
1082 /* The function initializes data concerning allocno copies. */
1084 initiate_copies (void)
1087 = create_alloc_pool ("copies", sizeof (struct ira_allocno_copy), 100);
1088 copy_vec = VEC_alloc (ira_copy_t, heap, get_max_uid ());
1093 /* Return copy connecting A1 and A2 and originated from INSN of
1094 LOOP_TREE_NODE if any. */
1096 find_allocno_copy (ira_allocno_t a1, ira_allocno_t a2, rtx insn,
1097 ira_loop_tree_node_t loop_tree_node)
1099 ira_copy_t cp, next_cp;
1100 ira_allocno_t another_a;
1102 for (cp = ALLOCNO_COPIES (a1); cp != NULL; cp = next_cp)
1104 if (cp->first == a1)
1106 next_cp = cp->next_first_allocno_copy;
1107 another_a = cp->second;
1109 else if (cp->second == a1)
1111 next_cp = cp->next_second_allocno_copy;
1112 another_a = cp->first;
1116 if (another_a == a2 && cp->insn == insn
1117 && cp->loop_tree_node == loop_tree_node)
1123 /* Create and return copy with given attributes LOOP_TREE_NODE, FIRST,
1124 SECOND, FREQ, CONSTRAINT_P, and INSN. */
1126 ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq,
1127 bool constraint_p, rtx insn,
1128 ira_loop_tree_node_t loop_tree_node)
1132 cp = (ira_copy_t) pool_alloc (copy_pool);
1133 cp->num = ira_copies_num;
1135 cp->second = second;
1137 cp->constraint_p = constraint_p;
1139 cp->loop_tree_node = loop_tree_node;
1140 VEC_safe_push (ira_copy_t, heap, copy_vec, cp);
1141 ira_copies = VEC_address (ira_copy_t, copy_vec);
1142 ira_copies_num = VEC_length (ira_copy_t, copy_vec);
1146 /* Attach a copy CP to allocnos involved into the copy. */
1148 ira_add_allocno_copy_to_list (ira_copy_t cp)
1150 ira_allocno_t first = cp->first, second = cp->second;
1152 cp->prev_first_allocno_copy = NULL;
1153 cp->prev_second_allocno_copy = NULL;
1154 cp->next_first_allocno_copy = ALLOCNO_COPIES (first);
1155 if (cp->next_first_allocno_copy != NULL)
1157 if (cp->next_first_allocno_copy->first == first)
1158 cp->next_first_allocno_copy->prev_first_allocno_copy = cp;
1160 cp->next_first_allocno_copy->prev_second_allocno_copy = cp;
1162 cp->next_second_allocno_copy = ALLOCNO_COPIES (second);
1163 if (cp->next_second_allocno_copy != NULL)
1165 if (cp->next_second_allocno_copy->second == second)
1166 cp->next_second_allocno_copy->prev_second_allocno_copy = cp;
1168 cp->next_second_allocno_copy->prev_first_allocno_copy = cp;
1170 ALLOCNO_COPIES (first) = cp;
1171 ALLOCNO_COPIES (second) = cp;
1174 /* Detach a copy CP from allocnos involved into the copy. */
1176 ira_remove_allocno_copy_from_list (ira_copy_t cp)
1178 ira_allocno_t first = cp->first, second = cp->second;
1179 ira_copy_t prev, next;
1181 next = cp->next_first_allocno_copy;
1182 prev = cp->prev_first_allocno_copy;
1184 ALLOCNO_COPIES (first) = next;
1185 else if (prev->first == first)
1186 prev->next_first_allocno_copy = next;
1188 prev->next_second_allocno_copy = next;
1191 if (next->first == first)
1192 next->prev_first_allocno_copy = prev;
1194 next->prev_second_allocno_copy = prev;
1196 cp->prev_first_allocno_copy = cp->next_first_allocno_copy = NULL;
1198 next = cp->next_second_allocno_copy;
1199 prev = cp->prev_second_allocno_copy;
1201 ALLOCNO_COPIES (second) = next;
1202 else if (prev->second == second)
1203 prev->next_second_allocno_copy = next;
1205 prev->next_first_allocno_copy = next;
1208 if (next->second == second)
1209 next->prev_second_allocno_copy = prev;
1211 next->prev_first_allocno_copy = prev;
1213 cp->prev_second_allocno_copy = cp->next_second_allocno_copy = NULL;
1216 /* Make a copy CP a canonical copy where number of the
1217 first allocno is less than the second one. */
1219 ira_swap_allocno_copy_ends_if_necessary (ira_copy_t cp)
1224 if (ALLOCNO_NUM (cp->first) <= ALLOCNO_NUM (cp->second))
1228 cp->first = cp->second;
1231 temp_cp = cp->prev_first_allocno_copy;
1232 cp->prev_first_allocno_copy = cp->prev_second_allocno_copy;
1233 cp->prev_second_allocno_copy = temp_cp;
1235 temp_cp = cp->next_first_allocno_copy;
1236 cp->next_first_allocno_copy = cp->next_second_allocno_copy;
1237 cp->next_second_allocno_copy = temp_cp;
1240 /* Create (or update frequency if the copy already exists) and return
1241 the copy of allocnos FIRST and SECOND with frequency FREQ
1242 corresponding to move insn INSN (if any) and originated from
1245 ira_add_allocno_copy (ira_allocno_t first, ira_allocno_t second, int freq,
1246 bool constraint_p, rtx insn,
1247 ira_loop_tree_node_t loop_tree_node)
1251 if ((cp = find_allocno_copy (first, second, insn, loop_tree_node)) != NULL)
1256 cp = ira_create_copy (first, second, freq, constraint_p, insn,
1258 ira_assert (first != NULL && second != NULL);
1259 ira_add_allocno_copy_to_list (cp);
1260 ira_swap_allocno_copy_ends_if_necessary (cp);
1264 /* Print info about copy CP into file F. */
1266 print_copy (FILE *f, ira_copy_t cp)
1268 fprintf (f, " cp%d:a%d(r%d)<->a%d(r%d)@%d:%s\n", cp->num,
1269 ALLOCNO_NUM (cp->first), ALLOCNO_REGNO (cp->first),
1270 ALLOCNO_NUM (cp->second), ALLOCNO_REGNO (cp->second), cp->freq,
1272 ? "move" : cp->constraint_p ? "constraint" : "shuffle");
1275 /* Print info about copy CP into stderr. */
1277 ira_debug_copy (ira_copy_t cp)
1279 print_copy (stderr, cp);
1282 /* Print info about all copies into file F. */
1284 print_copies (FILE *f)
1287 ira_copy_iterator ci;
1289 FOR_EACH_COPY (cp, ci)
1293 /* Print info about all copies into stderr. */
1295 ira_debug_copies (void)
1297 print_copies (stderr);
1300 /* Print info about copies involving allocno A into file F. */
1302 print_allocno_copies (FILE *f, ira_allocno_t a)
1304 ira_allocno_t another_a;
1305 ira_copy_t cp, next_cp;
1307 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1308 for (cp = ALLOCNO_COPIES (a); cp != NULL; cp = next_cp)
1312 next_cp = cp->next_first_allocno_copy;
1313 another_a = cp->second;
1315 else if (cp->second == a)
1317 next_cp = cp->next_second_allocno_copy;
1318 another_a = cp->first;
1322 fprintf (f, " cp%d:a%d(r%d)@%d", cp->num,
1323 ALLOCNO_NUM (another_a), ALLOCNO_REGNO (another_a), cp->freq);
1328 /* Print info about copies involving allocno A into stderr. */
1330 ira_debug_allocno_copies (ira_allocno_t a)
1332 print_allocno_copies (stderr, a);
1335 /* The function frees memory allocated for copy CP. */
1337 finish_copy (ira_copy_t cp)
1339 pool_free (copy_pool, cp);
1343 /* Free memory allocated for all copies. */
1345 finish_copies (void)
1348 ira_copy_iterator ci;
1350 FOR_EACH_COPY (cp, ci)
1352 VEC_free (ira_copy_t, heap, copy_vec);
1353 free_alloc_pool (copy_pool);
1358 /* Pools for cost vectors. It is defined only for cover classes. */
1359 static alloc_pool cost_vector_pool[N_REG_CLASSES];
1361 /* The function initiates work with hard register cost vectors. It
1362 creates allocation pool for each cover class. */
1364 initiate_cost_vectors (void)
1367 enum reg_class cover_class;
1369 for (i = 0; i < ira_reg_class_cover_size; i++)
1371 cover_class = ira_reg_class_cover[i];
1372 cost_vector_pool[cover_class]
1373 = create_alloc_pool ("cost vectors",
1375 * ira_class_hard_regs_num[cover_class],
1380 /* Allocate and return a cost vector VEC for COVER_CLASS. */
1382 ira_allocate_cost_vector (enum reg_class cover_class)
1384 return (int *) pool_alloc (cost_vector_pool[cover_class]);
1387 /* Free a cost vector VEC for COVER_CLASS. */
1389 ira_free_cost_vector (int *vec, enum reg_class cover_class)
1391 ira_assert (vec != NULL);
1392 pool_free (cost_vector_pool[cover_class], vec);
1395 /* Finish work with hard register cost vectors. Release allocation
1396 pool for each cover class. */
1398 finish_cost_vectors (void)
1401 enum reg_class cover_class;
1403 for (i = 0; i < ira_reg_class_cover_size; i++)
1405 cover_class = ira_reg_class_cover[i];
1406 free_alloc_pool (cost_vector_pool[cover_class]);
1412 /* The current loop tree node and its regno allocno map. */
1413 ira_loop_tree_node_t ira_curr_loop_tree_node;
1414 ira_allocno_t *ira_curr_regno_allocno_map;
1416 /* This recursive function traverses loop tree with root LOOP_NODE
1417 calling non-null functions PREORDER_FUNC and POSTORDER_FUNC
1418 correspondingly in preorder and postorder. The function sets up
1419 IRA_CURR_LOOP_TREE_NODE and IRA_CURR_REGNO_ALLOCNO_MAP. If BB_P,
1420 basic block nodes of LOOP_NODE is also processed (before its
1423 ira_traverse_loop_tree (bool bb_p, ira_loop_tree_node_t loop_node,
1424 void (*preorder_func) (ira_loop_tree_node_t),
1425 void (*postorder_func) (ira_loop_tree_node_t))
1427 ira_loop_tree_node_t subloop_node;
1429 ira_assert (loop_node->bb == NULL);
1430 ira_curr_loop_tree_node = loop_node;
1431 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1433 if (preorder_func != NULL)
1434 (*preorder_func) (loop_node);
1437 for (subloop_node = loop_node->children;
1438 subloop_node != NULL;
1439 subloop_node = subloop_node->next)
1440 if (subloop_node->bb != NULL)
1442 if (preorder_func != NULL)
1443 (*preorder_func) (subloop_node);
1445 if (postorder_func != NULL)
1446 (*postorder_func) (subloop_node);
1449 for (subloop_node = loop_node->subloops;
1450 subloop_node != NULL;
1451 subloop_node = subloop_node->subloop_next)
1453 ira_assert (subloop_node->bb == NULL);
1454 ira_traverse_loop_tree (bb_p, subloop_node,
1455 preorder_func, postorder_func);
1458 ira_curr_loop_tree_node = loop_node;
1459 ira_curr_regno_allocno_map = ira_curr_loop_tree_node->regno_allocno_map;
1461 if (postorder_func != NULL)
1462 (*postorder_func) (loop_node);
1467 /* The basic block currently being processed. */
1468 static basic_block curr_bb;
1470 /* This recursive function creates allocnos corresponding to
1471 pseudo-registers containing in X. True OUTPUT_P means that X is
1474 create_insn_allocnos (rtx x, bool output_p)
1478 enum rtx_code code = GET_CODE (x);
1484 if ((regno = REGNO (x)) >= FIRST_PSEUDO_REGISTER)
1488 if ((a = ira_curr_regno_allocno_map[regno]) == NULL)
1489 a = ira_create_allocno (regno, false, ira_curr_loop_tree_node);
1491 ALLOCNO_NREFS (a)++;
1492 ALLOCNO_FREQ (a) += REG_FREQ_FROM_BB (curr_bb);
1494 bitmap_set_bit (ira_curr_loop_tree_node->modified_regnos, regno);
1498 else if (code == SET)
1500 create_insn_allocnos (SET_DEST (x), true);
1501 create_insn_allocnos (SET_SRC (x), false);
1504 else if (code == CLOBBER)
1506 create_insn_allocnos (XEXP (x, 0), true);
1509 else if (code == MEM)
1511 create_insn_allocnos (XEXP (x, 0), false);
1514 else if (code == PRE_DEC || code == POST_DEC || code == PRE_INC ||
1515 code == POST_INC || code == POST_MODIFY || code == PRE_MODIFY)
1517 create_insn_allocnos (XEXP (x, 0), true);
1518 create_insn_allocnos (XEXP (x, 0), false);
1522 fmt = GET_RTX_FORMAT (code);
1523 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1526 create_insn_allocnos (XEXP (x, i), output_p);
1527 else if (fmt[i] == 'E')
1528 for (j = 0; j < XVECLEN (x, i); j++)
1529 create_insn_allocnos (XVECEXP (x, i, j), output_p);
1533 /* Create allocnos corresponding to pseudo-registers living in the
1534 basic block represented by the corresponding loop tree node
1537 create_bb_allocnos (ira_loop_tree_node_t bb_node)
1544 curr_bb = bb = bb_node->bb;
1545 ira_assert (bb != NULL);
1546 FOR_BB_INSNS_REVERSE (bb, insn)
1547 if (NONDEBUG_INSN_P (insn))
1548 create_insn_allocnos (PATTERN (insn), false);
1549 /* It might be a allocno living through from one subloop to
1551 EXECUTE_IF_SET_IN_REG_SET (DF_LR_IN (bb), FIRST_PSEUDO_REGISTER, i, bi)
1552 if (ira_curr_regno_allocno_map[i] == NULL)
1553 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1556 /* Create allocnos corresponding to pseudo-registers living on edge E
1557 (a loop entry or exit). Also mark the allocnos as living on the
1560 create_loop_allocnos (edge e)
1563 bitmap live_in_regs, border_allocnos;
1565 ira_loop_tree_node_t parent;
1567 live_in_regs = DF_LR_IN (e->dest);
1568 border_allocnos = ira_curr_loop_tree_node->border_allocnos;
1569 EXECUTE_IF_SET_IN_REG_SET (DF_LR_OUT (e->src),
1570 FIRST_PSEUDO_REGISTER, i, bi)
1571 if (bitmap_bit_p (live_in_regs, i))
1573 if (ira_curr_regno_allocno_map[i] == NULL)
1575 /* The order of creations is important for right
1576 ira_regno_allocno_map. */
1577 if ((parent = ira_curr_loop_tree_node->parent) != NULL
1578 && parent->regno_allocno_map[i] == NULL)
1579 ira_create_allocno (i, false, parent);
1580 ira_create_allocno (i, false, ira_curr_loop_tree_node);
1582 bitmap_set_bit (border_allocnos,
1583 ALLOCNO_NUM (ira_curr_regno_allocno_map[i]));
1587 /* Create allocnos corresponding to pseudo-registers living in loop
1588 represented by the corresponding loop tree node LOOP_NODE. This
1589 function is called by ira_traverse_loop_tree. */
1591 create_loop_tree_node_allocnos (ira_loop_tree_node_t loop_node)
1593 if (loop_node->bb != NULL)
1594 create_bb_allocnos (loop_node);
1595 else if (loop_node != ira_loop_tree_root)
1600 VEC (edge, heap) *edges;
1602 FOR_EACH_EDGE (e, ei, loop_node->loop->header->preds)
1603 if (e->src != loop_node->loop->latch)
1604 create_loop_allocnos (e);
1606 edges = get_loop_exit_edges (loop_node->loop);
1607 for (i = 0; VEC_iterate (edge, edges, i, e); i++)
1608 create_loop_allocnos (e);
1609 VEC_free (edge, heap, edges);
1613 /* Propagate information about allocnos modified inside the loop given
1614 by its LOOP_TREE_NODE to its parent. */
1616 propagate_modified_regnos (ira_loop_tree_node_t loop_tree_node)
1618 if (loop_tree_node == ira_loop_tree_root)
1620 ira_assert (loop_tree_node->bb == NULL);
1621 bitmap_ior_into (loop_tree_node->parent->modified_regnos,
1622 loop_tree_node->modified_regnos);
1625 /* Propagate new info about allocno A (see comments about accumulated
1626 info in allocno definition) to the corresponding allocno on upper
1627 loop tree level. So allocnos on upper levels accumulate
1628 information about the corresponding allocnos in nested regions.
1629 The new info means allocno info finally calculated in this
1632 propagate_allocno_info (void)
1635 ira_allocno_t a, parent_a;
1636 ira_loop_tree_node_t parent;
1637 enum reg_class cover_class;
1639 if (flag_ira_region != IRA_REGION_ALL
1640 && flag_ira_region != IRA_REGION_MIXED)
1642 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
1643 for (a = ira_regno_allocno_map[i];
1645 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1646 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
1647 && (parent_a = parent->regno_allocno_map[i]) != NULL
1648 /* There are no caps yet at this point. So use
1649 border_allocnos to find allocnos for the propagation. */
1650 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE (a)->border_allocnos,
1653 if (! ALLOCNO_BAD_SPILL_P (a))
1654 ALLOCNO_BAD_SPILL_P (parent_a) = false;
1655 ALLOCNO_NREFS (parent_a) += ALLOCNO_NREFS (a);
1656 ALLOCNO_FREQ (parent_a) += ALLOCNO_FREQ (a);
1657 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
1658 merge_hard_reg_conflicts (a, parent_a, true);
1659 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
1660 += ALLOCNO_CALLS_CROSSED_NUM (a);
1661 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
1662 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
1663 cover_class = ALLOCNO_COVER_CLASS (a);
1664 ira_assert (cover_class == ALLOCNO_COVER_CLASS (parent_a));
1665 ira_allocate_and_accumulate_costs
1666 (&ALLOCNO_HARD_REG_COSTS (parent_a), cover_class,
1667 ALLOCNO_HARD_REG_COSTS (a));
1668 ira_allocate_and_accumulate_costs
1669 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a),
1671 ALLOCNO_CONFLICT_HARD_REG_COSTS (a));
1672 ALLOCNO_COVER_CLASS_COST (parent_a)
1673 += ALLOCNO_COVER_CLASS_COST (a);
1674 ALLOCNO_MEMORY_COST (parent_a) += ALLOCNO_MEMORY_COST (a);
1678 /* Create allocnos corresponding to pseudo-registers in the current
1679 function. Traverse the loop tree for this. */
1681 create_allocnos (void)
1683 /* We need to process BB first to correctly link allocnos by member
1684 next_regno_allocno. */
1685 ira_traverse_loop_tree (true, ira_loop_tree_root,
1686 create_loop_tree_node_allocnos, NULL);
1688 ira_traverse_loop_tree (false, ira_loop_tree_root, NULL,
1689 propagate_modified_regnos);
1694 /* The page contains function to remove some regions from a separate
1695 register allocation. We remove regions whose separate allocation
1696 will hardly improve the result. As a result we speed up regional
1697 register allocation. */
1699 /* The function changes the object in range list given by R to OBJ. */
1701 change_object_in_range_list (live_range_t r, ira_object_t obj)
1703 for (; r != NULL; r = r->next)
1707 /* Move all live ranges associated with allocno FROM to allocno TO. */
1709 move_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
1711 ira_object_t from_obj = ALLOCNO_OBJECT (from);
1712 ira_object_t to_obj = ALLOCNO_OBJECT (to);
1713 live_range_t lr = OBJECT_LIVE_RANGES (from_obj);
1715 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
1717 fprintf (ira_dump_file,
1718 " Moving ranges of a%dr%d to a%dr%d: ",
1719 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
1720 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
1721 ira_print_live_range_list (ira_dump_file, lr);
1723 change_object_in_range_list (lr, to_obj);
1724 OBJECT_LIVE_RANGES (to_obj)
1725 = ira_merge_live_ranges (lr, OBJECT_LIVE_RANGES (to_obj));
1726 OBJECT_LIVE_RANGES (from_obj) = NULL;
1729 /* Copy all live ranges associated with allocno FROM to allocno TO. */
1731 copy_allocno_live_ranges (ira_allocno_t from, ira_allocno_t to)
1733 ira_object_t from_obj = ALLOCNO_OBJECT (from);
1734 ira_object_t to_obj = ALLOCNO_OBJECT (to);
1735 live_range_t lr = OBJECT_LIVE_RANGES (from_obj);
1737 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
1739 fprintf (ira_dump_file,
1740 " Copying ranges of a%dr%d to a%dr%d: ",
1741 ALLOCNO_NUM (from), ALLOCNO_REGNO (from),
1742 ALLOCNO_NUM (to), ALLOCNO_REGNO (to));
1743 ira_print_live_range_list (ira_dump_file, lr);
1745 lr = ira_copy_live_range_list (lr);
1746 change_object_in_range_list (lr, to_obj);
1747 OBJECT_LIVE_RANGES (to_obj)
1748 = ira_merge_live_ranges (lr, OBJECT_LIVE_RANGES (to_obj));
1751 /* Return TRUE if NODE represents a loop with low register
1754 low_pressure_loop_node_p (ira_loop_tree_node_t node)
1757 enum reg_class cover_class;
1759 if (node->bb != NULL)
1762 for (i = 0; i < ira_reg_class_cover_size; i++)
1764 cover_class = ira_reg_class_cover[i];
1765 if (node->reg_pressure[cover_class]
1766 > ira_available_class_regs[cover_class])
1772 /* Sort loops for marking them for removal. We put already marked
1773 loops first, then less frequent loops next, and then outer loops
1776 loop_compare_func (const void *v1p, const void *v2p)
1779 ira_loop_tree_node_t l1 = *(const ira_loop_tree_node_t *) v1p;
1780 ira_loop_tree_node_t l2 = *(const ira_loop_tree_node_t *) v2p;
1782 ira_assert (l1->parent != NULL && l2->parent != NULL);
1783 if (l1->to_remove_p && ! l2->to_remove_p)
1785 if (! l1->to_remove_p && l2->to_remove_p)
1787 if ((diff = l1->loop->header->frequency - l2->loop->header->frequency) != 0)
1789 if ((diff = (int) loop_depth (l1->loop) - (int) loop_depth (l2->loop)) != 0)
1791 /* Make sorting stable. */
1792 return l1->loop->num - l2->loop->num;
1796 /* Mark loops which should be removed from regional allocation. We
1797 remove a loop with low register pressure inside another loop with
1798 register pressure. In this case a separate allocation of the loop
1799 hardly helps (for irregular register file architecture it could
1800 help by choosing a better hard register in the loop but we prefer
1801 faster allocation even in this case). We also remove cheap loops
1802 if there are more than IRA_MAX_LOOPS_NUM of them. */
1804 mark_loops_for_removal (void)
1807 ira_loop_tree_node_t *sorted_loops;
1811 = (ira_loop_tree_node_t *) ira_allocate (sizeof (ira_loop_tree_node_t)
1812 * VEC_length (loop_p,
1814 for (n = i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
1815 if (ira_loop_nodes[i].regno_allocno_map != NULL)
1817 if (ira_loop_nodes[i].parent == NULL)
1819 /* Don't remove the root. */
1820 ira_loop_nodes[i].to_remove_p = false;
1823 sorted_loops[n++] = &ira_loop_nodes[i];
1824 ira_loop_nodes[i].to_remove_p
1825 = (low_pressure_loop_node_p (ira_loop_nodes[i].parent)
1826 && low_pressure_loop_node_p (&ira_loop_nodes[i]));
1828 qsort (sorted_loops, n, sizeof (ira_loop_tree_node_t), loop_compare_func);
1829 for (i = 0; n - i + 1 > IRA_MAX_LOOPS_NUM; i++)
1831 sorted_loops[i]->to_remove_p = true;
1832 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1835 " Mark loop %d (header %d, freq %d, depth %d) for removal (%s)\n",
1836 sorted_loops[i]->loop->num, sorted_loops[i]->loop->header->index,
1837 sorted_loops[i]->loop->header->frequency,
1838 loop_depth (sorted_loops[i]->loop),
1839 low_pressure_loop_node_p (sorted_loops[i]->parent)
1840 && low_pressure_loop_node_p (sorted_loops[i])
1841 ? "low pressure" : "cheap loop");
1843 ira_free (sorted_loops);
1846 /* Mark all loops but root for removing. */
1848 mark_all_loops_for_removal (void)
1853 for (i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
1854 if (ira_loop_nodes[i].regno_allocno_map != NULL)
1856 if (ira_loop_nodes[i].parent == NULL)
1858 /* Don't remove the root. */
1859 ira_loop_nodes[i].to_remove_p = false;
1862 ira_loop_nodes[i].to_remove_p = true;
1863 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1866 " Mark loop %d (header %d, freq %d, depth %d) for removal\n",
1867 ira_loop_nodes[i].loop->num,
1868 ira_loop_nodes[i].loop->header->index,
1869 ira_loop_nodes[i].loop->header->frequency,
1870 loop_depth (ira_loop_nodes[i].loop));
1874 /* Definition of vector of loop tree nodes. */
1875 DEF_VEC_P(ira_loop_tree_node_t);
1876 DEF_VEC_ALLOC_P(ira_loop_tree_node_t, heap);
1878 /* Vec containing references to all removed loop tree nodes. */
1879 static VEC(ira_loop_tree_node_t,heap) *removed_loop_vec;
1881 /* Vec containing references to all children of loop tree nodes. */
1882 static VEC(ira_loop_tree_node_t,heap) *children_vec;
1884 /* Remove subregions of NODE if their separate allocation will not
1885 improve the result. */
1887 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_node_t node)
1891 ira_loop_tree_node_t subnode;
1893 remove_p = node->to_remove_p;
1895 VEC_safe_push (ira_loop_tree_node_t, heap, children_vec, node);
1896 start = VEC_length (ira_loop_tree_node_t, children_vec);
1897 for (subnode = node->children; subnode != NULL; subnode = subnode->next)
1898 if (subnode->bb == NULL)
1899 remove_uneccesary_loop_nodes_from_loop_tree (subnode);
1901 VEC_safe_push (ira_loop_tree_node_t, heap, children_vec, subnode);
1902 node->children = node->subloops = NULL;
1905 VEC_safe_push (ira_loop_tree_node_t, heap, removed_loop_vec, node);
1908 while (VEC_length (ira_loop_tree_node_t, children_vec) > start)
1910 subnode = VEC_pop (ira_loop_tree_node_t, children_vec);
1911 subnode->parent = node;
1912 subnode->next = node->children;
1913 node->children = subnode;
1914 if (subnode->bb == NULL)
1916 subnode->subloop_next = node->subloops;
1917 node->subloops = subnode;
1922 /* Return TRUE if NODE is inside PARENT. */
1924 loop_is_inside_p (ira_loop_tree_node_t node, ira_loop_tree_node_t parent)
1926 for (node = node->parent; node != NULL; node = node->parent)
1932 /* Sort allocnos according to their order in regno allocno list. */
1934 regno_allocno_order_compare_func (const void *v1p, const void *v2p)
1936 ira_allocno_t a1 = *(const ira_allocno_t *) v1p;
1937 ira_allocno_t a2 = *(const ira_allocno_t *) v2p;
1938 ira_loop_tree_node_t n1 = ALLOCNO_LOOP_TREE_NODE (a1);
1939 ira_loop_tree_node_t n2 = ALLOCNO_LOOP_TREE_NODE (a2);
1941 if (loop_is_inside_p (n1, n2))
1943 else if (loop_is_inside_p (n2, n1))
1945 /* If allocnos are equally good, sort by allocno numbers, so that
1946 the results of qsort leave nothing to chance. We put allocnos
1947 with higher number first in the list because it is the original
1948 order for allocnos from loops on the same levels. */
1949 return ALLOCNO_NUM (a2) - ALLOCNO_NUM (a1);
1952 /* This array is used to sort allocnos to restore allocno order in
1953 the regno allocno list. */
1954 static ira_allocno_t *regno_allocnos;
1956 /* Restore allocno order for REGNO in the regno allocno list. */
1958 ira_rebuild_regno_allocno_list (int regno)
1963 for (n = 0, a = ira_regno_allocno_map[regno];
1965 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1966 regno_allocnos[n++] = a;
1968 qsort (regno_allocnos, n, sizeof (ira_allocno_t),
1969 regno_allocno_order_compare_func);
1970 for (i = 1; i < n; i++)
1971 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[i - 1]) = regno_allocnos[i];
1972 ALLOCNO_NEXT_REGNO_ALLOCNO (regno_allocnos[n - 1]) = NULL;
1973 ira_regno_allocno_map[regno] = regno_allocnos[0];
1974 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1975 fprintf (ira_dump_file, " Rebuilding regno allocno list for %d\n", regno);
1978 /* Propagate info from allocno FROM_A to allocno A. */
1980 propagate_some_info_from_allocno (ira_allocno_t a, ira_allocno_t from_a)
1982 enum reg_class cover_class;
1984 merge_hard_reg_conflicts (from_a, a, false);
1985 ALLOCNO_NREFS (a) += ALLOCNO_NREFS (from_a);
1986 ALLOCNO_FREQ (a) += ALLOCNO_FREQ (from_a);
1987 ALLOCNO_CALL_FREQ (a) += ALLOCNO_CALL_FREQ (from_a);
1988 ALLOCNO_CALLS_CROSSED_NUM (a) += ALLOCNO_CALLS_CROSSED_NUM (from_a);
1989 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a)
1990 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (from_a);
1991 if (! ALLOCNO_BAD_SPILL_P (from_a))
1992 ALLOCNO_BAD_SPILL_P (a) = false;
1993 cover_class = ALLOCNO_COVER_CLASS (from_a);
1994 ira_assert (cover_class == ALLOCNO_COVER_CLASS (a));
1995 ira_allocate_and_accumulate_costs (&ALLOCNO_HARD_REG_COSTS (a), cover_class,
1996 ALLOCNO_HARD_REG_COSTS (from_a));
1997 ira_allocate_and_accumulate_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1999 ALLOCNO_CONFLICT_HARD_REG_COSTS (from_a));
2000 ALLOCNO_COVER_CLASS_COST (a) += ALLOCNO_COVER_CLASS_COST (from_a);
2001 ALLOCNO_MEMORY_COST (a) += ALLOCNO_MEMORY_COST (from_a);
2004 /* Remove allocnos from loops removed from the allocation
2007 remove_unnecessary_allocnos (void)
2010 bool merged_p, rebuild_p;
2011 ira_allocno_t a, prev_a, next_a, parent_a;
2012 ira_loop_tree_node_t a_node, parent;
2015 regno_allocnos = NULL;
2016 for (regno = max_reg_num () - 1; regno >= FIRST_PSEUDO_REGISTER; regno--)
2019 for (prev_a = NULL, a = ira_regno_allocno_map[regno];
2023 next_a = ALLOCNO_NEXT_REGNO_ALLOCNO (a);
2024 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2025 if (! a_node->to_remove_p)
2029 for (parent = a_node->parent;
2030 (parent_a = parent->regno_allocno_map[regno]) == NULL
2031 && parent->to_remove_p;
2032 parent = parent->parent)
2034 if (parent_a == NULL)
2036 /* There are no allocnos with the same regno in
2037 upper region -- just move the allocno to the
2040 ALLOCNO_LOOP_TREE_NODE (a) = parent;
2041 parent->regno_allocno_map[regno] = a;
2042 bitmap_set_bit (parent->all_allocnos, ALLOCNO_NUM (a));
2047 /* Remove the allocno and update info of allocno in
2048 the upper region. */
2050 ira_regno_allocno_map[regno] = next_a;
2052 ALLOCNO_NEXT_REGNO_ALLOCNO (prev_a) = next_a;
2053 move_allocno_live_ranges (a, parent_a);
2055 propagate_some_info_from_allocno (parent_a, a);
2056 /* Remove it from the corresponding regno allocno
2057 map to avoid info propagation of subsequent
2058 allocno into this already removed allocno. */
2059 a_node->regno_allocno_map[regno] = NULL;
2065 /* We need to restore the order in regno allocno list. */
2067 if (regno_allocnos == NULL)
2069 = (ira_allocno_t *) ira_allocate (sizeof (ira_allocno_t)
2070 * ira_allocnos_num);
2071 ira_rebuild_regno_allocno_list (regno);
2075 ira_rebuild_start_finish_chains ();
2076 if (regno_allocnos != NULL)
2077 ira_free (regno_allocnos);
2080 /* Remove allocnos from all loops but the root. */
2082 remove_low_level_allocnos (void)
2085 bool merged_p, propagate_p;
2086 ira_allocno_t a, top_a;
2087 ira_loop_tree_node_t a_node, parent;
2088 ira_allocno_iterator ai;
2091 FOR_EACH_ALLOCNO (a, ai)
2093 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2094 if (a_node == ira_loop_tree_root || ALLOCNO_CAP_MEMBER (a) != NULL)
2096 regno = ALLOCNO_REGNO (a);
2097 if ((top_a = ira_loop_tree_root->regno_allocno_map[regno]) == NULL)
2099 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2100 ira_loop_tree_root->regno_allocno_map[regno] = a;
2103 propagate_p = a_node->parent->regno_allocno_map[regno] == NULL;
2104 /* Remove the allocno and update info of allocno in the upper
2106 move_allocno_live_ranges (a, top_a);
2109 propagate_some_info_from_allocno (top_a, a);
2111 FOR_EACH_ALLOCNO (a, ai)
2113 a_node = ALLOCNO_LOOP_TREE_NODE (a);
2114 if (a_node == ira_loop_tree_root)
2116 parent = a_node->parent;
2117 regno = ALLOCNO_REGNO (a);
2118 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2119 ira_assert (ALLOCNO_CAP (a) != NULL);
2120 else if (ALLOCNO_CAP (a) == NULL)
2121 ira_assert (parent->regno_allocno_map[regno] != NULL);
2123 FOR_EACH_ALLOCNO (a, ai)
2125 regno = ALLOCNO_REGNO (a);
2126 if (ira_loop_tree_root->regno_allocno_map[regno] == a)
2128 ira_object_t obj = ALLOCNO_OBJECT (a);
2130 ira_regno_allocno_map[regno] = a;
2131 ALLOCNO_NEXT_REGNO_ALLOCNO (a) = NULL;
2132 ALLOCNO_CAP_MEMBER (a) = NULL;
2133 COPY_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
2134 OBJECT_TOTAL_CONFLICT_HARD_REGS (obj));
2136 if (ALLOCNO_TOTAL_NO_STACK_REG_P (a))
2137 ALLOCNO_NO_STACK_REG_P (a) = true;
2144 ira_rebuild_start_finish_chains ();
2147 /* Remove loops from consideration. We remove all loops except for
2148 root if ALL_P or loops for which a separate allocation will not
2149 improve the result. We have to do this after allocno creation and
2150 their costs and cover class evaluation because only after that the
2151 register pressure can be known and is calculated. */
2153 remove_unnecessary_regions (bool all_p)
2156 mark_all_loops_for_removal ();
2158 mark_loops_for_removal ();
2160 = VEC_alloc (ira_loop_tree_node_t, heap,
2161 last_basic_block + VEC_length (loop_p, ira_loops.larray));
2163 = VEC_alloc (ira_loop_tree_node_t, heap,
2164 last_basic_block + VEC_length (loop_p, ira_loops.larray));
2165 remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_root) ;
2166 VEC_free (ira_loop_tree_node_t, heap, children_vec);
2168 remove_low_level_allocnos ();
2170 remove_unnecessary_allocnos ();
2171 while (VEC_length (ira_loop_tree_node_t, removed_loop_vec) > 0)
2172 finish_loop_tree_node (VEC_pop (ira_loop_tree_node_t, removed_loop_vec));
2173 VEC_free (ira_loop_tree_node_t, heap, removed_loop_vec);
2178 /* At this point true value of allocno attribute bad_spill_p means
2179 that there is an insn where allocno occurs and where the allocno
2180 can not be used as memory. The function updates the attribute, now
2181 it can be true only for allocnos which can not be used as memory in
2182 an insn and in whose live ranges there is other allocno deaths.
2183 Spilling allocnos with true value will not improve the code because
2184 it will not make other allocnos colorable and additional reloads
2185 for the corresponding pseudo will be generated in reload pass for
2186 each insn it occurs.
2188 This is a trick mentioned in one classic article of Chaitin etc
2189 which is frequently omitted in other implementations of RA based on
2192 update_bad_spill_attribute (void)
2196 ira_allocno_iterator ai;
2198 enum reg_class cover_class;
2199 bitmap_head dead_points[N_REG_CLASSES];
2201 for (i = 0; i < ira_reg_class_cover_size; i++)
2203 cover_class = ira_reg_class_cover[i];
2204 bitmap_initialize (&dead_points[cover_class], ®_obstack);
2206 FOR_EACH_ALLOCNO (a, ai)
2208 ira_object_t obj = ALLOCNO_OBJECT (a);
2209 cover_class = ALLOCNO_COVER_CLASS (a);
2210 if (cover_class == NO_REGS)
2212 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
2213 bitmap_set_bit (&dead_points[cover_class], r->finish);
2215 FOR_EACH_ALLOCNO (a, ai)
2217 ira_object_t obj = ALLOCNO_OBJECT (a);
2218 cover_class = ALLOCNO_COVER_CLASS (a);
2219 if (cover_class == NO_REGS)
2221 if (! ALLOCNO_BAD_SPILL_P (a))
2223 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
2225 for (i = r->start + 1; i < r->finish; i++)
2226 if (bitmap_bit_p (&dead_points[cover_class], i))
2232 ALLOCNO_BAD_SPILL_P (a) = false;
2234 for (i = 0; i < ira_reg_class_cover_size; i++)
2236 cover_class = ira_reg_class_cover[i];
2237 bitmap_clear (&dead_points[cover_class]);
2243 /* Set up minimal and maximal live range points for allocnos. */
2245 setup_min_max_allocno_live_range_point (void)
2248 ira_allocno_t a, parent_a, cap;
2249 ira_allocno_iterator ai;
2251 ira_loop_tree_node_t parent;
2253 FOR_EACH_ALLOCNO (a, ai)
2255 ira_object_t obj = ALLOCNO_OBJECT (a);
2256 r = OBJECT_LIVE_RANGES (obj);
2259 OBJECT_MAX (obj) = r->finish;
2260 for (; r->next != NULL; r = r->next)
2262 OBJECT_MIN (obj) = r->start;
2264 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2265 for (a = ira_regno_allocno_map[i];
2267 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2269 ira_object_t obj = ALLOCNO_OBJECT (a);
2270 ira_object_t parent_obj;
2272 if (OBJECT_MAX (obj) < 0)
2274 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2275 /* Accumulation of range info. */
2276 if (ALLOCNO_CAP (a) != NULL)
2278 for (cap = ALLOCNO_CAP (a); cap != NULL; cap = ALLOCNO_CAP (cap))
2280 ira_object_t cap_obj = ALLOCNO_OBJECT (cap);
2281 if (OBJECT_MAX (cap_obj) < OBJECT_MAX (obj))
2282 OBJECT_MAX (cap_obj) = OBJECT_MAX (obj);
2283 if (OBJECT_MIN (cap_obj) > OBJECT_MIN (obj))
2284 OBJECT_MIN (cap_obj) = OBJECT_MIN (obj);
2288 if ((parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) == NULL)
2290 parent_a = parent->regno_allocno_map[i];
2291 parent_obj = ALLOCNO_OBJECT (parent_a);
2292 if (OBJECT_MAX (parent_obj) < OBJECT_MAX (obj))
2293 OBJECT_MAX (parent_obj) = OBJECT_MAX (obj);
2294 if (OBJECT_MIN (parent_obj) > OBJECT_MIN (obj))
2295 OBJECT_MIN (parent_obj) = OBJECT_MIN (obj);
2297 #ifdef ENABLE_IRA_CHECKING
2298 FOR_EACH_ALLOCNO (a, ai)
2300 ira_object_t obj = ALLOCNO_OBJECT (a);
2301 if ((0 <= OBJECT_MIN (obj) && OBJECT_MIN (obj) <= ira_max_point)
2302 && (0 <= OBJECT_MAX (obj) && OBJECT_MAX (obj) <= ira_max_point))
2309 /* Sort allocnos according to their live ranges. Allocnos with
2310 smaller cover class are put first unless we use priority coloring.
2311 Allocnos with the same cover class are ordered according their start
2312 (min). Allocnos with the same start are ordered according their
2315 allocno_range_compare_func (const void *v1p, const void *v2p)
2318 ira_object_t obj1 = *(const ira_object_t *) v1p;
2319 ira_object_t obj2 = *(const ira_object_t *) v2p;
2320 ira_allocno_t a1 = OBJECT_ALLOCNO (obj1);
2321 ira_allocno_t a2 = OBJECT_ALLOCNO (obj2);
2323 if (flag_ira_algorithm != IRA_ALGORITHM_PRIORITY
2324 && (diff = ALLOCNO_COVER_CLASS (a1) - ALLOCNO_COVER_CLASS (a2)) != 0)
2326 if ((diff = OBJECT_MIN (obj1) - OBJECT_MIN (obj2)) != 0)
2328 if ((diff = OBJECT_MAX (obj1) - OBJECT_MAX (obj2)) != 0)
2330 return ALLOCNO_NUM (a1) - ALLOCNO_NUM (a2);
2333 /* Sort ira_object_id_map and set up conflict id of allocnos. */
2335 sort_conflict_id_map (void)
2339 ira_allocno_iterator ai;
2342 FOR_EACH_ALLOCNO (a, ai)
2343 ira_object_id_map[num++] = ALLOCNO_OBJECT (a);
2344 qsort (ira_object_id_map, num, sizeof (ira_object_t),
2345 allocno_range_compare_func);
2346 for (i = 0; i < num; i++)
2348 ira_object_t obj = ira_object_id_map[i];
2349 gcc_assert (obj != NULL);
2350 OBJECT_CONFLICT_ID (obj) = i;
2352 for (i = num; i < ira_objects_num; i++)
2353 ira_object_id_map[i] = NULL;
2356 /* Set up minimal and maximal conflict ids of allocnos with which
2357 given allocno can conflict. */
2359 setup_min_max_conflict_allocno_ids (void)
2362 int i, j, min, max, start, finish, first_not_finished, filled_area_start;
2363 int *live_range_min, *last_lived;
2366 live_range_min = (int *) ira_allocate (sizeof (int) * ira_objects_num);
2368 first_not_finished = -1;
2369 for (i = 0; i < ira_objects_num; i++)
2371 ira_object_t obj = ira_object_id_map[i];
2375 a = OBJECT_ALLOCNO (obj);
2378 || (flag_ira_algorithm != IRA_ALGORITHM_PRIORITY
2379 && cover_class != (int) ALLOCNO_COVER_CLASS (a)))
2381 cover_class = ALLOCNO_COVER_CLASS (a);
2383 first_not_finished = i;
2387 start = OBJECT_MIN (obj);
2388 /* If we skip an allocno, the allocno with smaller ids will
2389 be also skipped because of the secondary sorting the
2390 range finishes (see function
2391 allocno_range_compare_func). */
2392 while (first_not_finished < i
2393 && start > OBJECT_MAX (ira_object_id_map
2394 [first_not_finished]))
2395 first_not_finished++;
2396 min = first_not_finished;
2399 /* We could increase min further in this case but it is good
2402 live_range_min[i] = OBJECT_MIN (obj);
2403 OBJECT_MIN (obj) = min;
2405 last_lived = (int *) ira_allocate (sizeof (int) * ira_max_point);
2407 filled_area_start = -1;
2408 for (i = ira_objects_num - 1; i >= 0; i--)
2410 ira_object_t obj = ira_object_id_map[i];
2414 a = OBJECT_ALLOCNO (obj);
2416 || (flag_ira_algorithm != IRA_ALGORITHM_PRIORITY
2417 && cover_class != (int) ALLOCNO_COVER_CLASS (a)))
2419 cover_class = ALLOCNO_COVER_CLASS (a);
2420 for (j = 0; j < ira_max_point; j++)
2422 filled_area_start = ira_max_point;
2424 min = live_range_min[i];
2425 finish = OBJECT_MAX (obj);
2426 max = last_lived[finish];
2428 /* We could decrease max further in this case but it is good
2430 max = OBJECT_CONFLICT_ID (obj) - 1;
2431 OBJECT_MAX (obj) = max;
2432 /* In filling, we can go further A range finish to recognize
2433 intersection quickly because if the finish of subsequently
2434 processed allocno (it has smaller conflict id) range is
2435 further A range finish than they are definitely intersected
2436 (the reason for this is the allocnos with bigger conflict id
2437 have their range starts not smaller than allocnos with
2439 for (j = min; j < filled_area_start; j++)
2441 filled_area_start = min;
2443 ira_free (last_lived);
2444 ira_free (live_range_min);
2453 ira_allocno_iterator ai;
2454 ira_loop_tree_node_t loop_tree_node;
2456 FOR_EACH_ALLOCNO (a, ai)
2458 if (ALLOCNO_LOOP_TREE_NODE (a) == ira_loop_tree_root)
2460 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2461 create_cap_allocno (a);
2462 else if (ALLOCNO_CAP (a) == NULL)
2464 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2465 if (!bitmap_bit_p (loop_tree_node->border_allocnos, ALLOCNO_NUM (a)))
2466 create_cap_allocno (a);
2473 /* The page contains code transforming more one region internal
2474 representation (IR) to one region IR which is necessary for reload.
2475 This transformation is called IR flattening. We might just rebuild
2476 the IR for one region but we don't do it because it takes a lot of
2479 /* Map: regno -> allocnos which will finally represent the regno for
2480 IR with one region. */
2481 static ira_allocno_t *regno_top_level_allocno_map;
2483 /* Find the allocno that corresponds to A at a level one higher up in the
2484 loop tree. Returns NULL if A is a cap, or if it has no parent. */
2486 ira_parent_allocno (ira_allocno_t a)
2488 ira_loop_tree_node_t parent;
2490 if (ALLOCNO_CAP (a) != NULL)
2493 parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
2497 return parent->regno_allocno_map[ALLOCNO_REGNO (a)];
2500 /* Find the allocno that corresponds to A at a level one higher up in the
2501 loop tree. If ALLOCNO_CAP is set for A, return that. */
2503 ira_parent_or_cap_allocno (ira_allocno_t a)
2505 if (ALLOCNO_CAP (a) != NULL)
2506 return ALLOCNO_CAP (a);
2508 return ira_parent_allocno (a);
2511 /* Process all allocnos originated from pseudo REGNO and copy live
2512 ranges, hard reg conflicts, and allocno stack reg attributes from
2513 low level allocnos to final allocnos which are destinations of
2514 removed stores at a loop exit. Return true if we copied live
2517 copy_info_to_removed_store_destinations (int regno)
2520 ira_allocno_t parent_a = NULL;
2521 ira_loop_tree_node_t parent;
2525 for (a = ira_regno_allocno_map[regno];
2527 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2529 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))])
2530 /* This allocno will be removed. */
2532 /* Caps will be removed. */
2533 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2534 for (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent;
2536 parent = parent->parent)
2537 if ((parent_a = parent->regno_allocno_map[regno]) == NULL
2538 || (parent_a == regno_top_level_allocno_map[REGNO (ALLOCNO_REG
2540 && ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)))
2542 if (parent == NULL || parent_a == NULL)
2544 copy_allocno_live_ranges (a, parent_a);
2545 merge_hard_reg_conflicts (a, parent_a, true);
2546 ALLOCNO_CALL_FREQ (parent_a) += ALLOCNO_CALL_FREQ (a);
2547 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2548 += ALLOCNO_CALLS_CROSSED_NUM (a);
2549 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2550 += ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
2556 /* Flatten the IR. In other words, this function transforms IR as if
2557 it were built with one region (without loops). We could make it
2558 much simpler by rebuilding IR with one region, but unfortunately it
2559 takes a lot of time. MAX_REGNO_BEFORE_EMIT and
2560 IRA_MAX_POINT_BEFORE_EMIT are correspondingly MAX_REG_NUM () and
2561 IRA_MAX_POINT before emitting insns on the loop borders. */
2563 ira_flattening (int max_regno_before_emit, int ira_max_point_before_emit)
2568 bool new_pseudos_p, merged_p, mem_dest_p;
2570 enum reg_class cover_class;
2571 ira_allocno_t a, parent_a, first, second, node_first, node_second;
2573 ira_loop_tree_node_t node;
2575 ira_allocno_iterator ai;
2576 ira_copy_iterator ci;
2578 regno_top_level_allocno_map
2579 = (ira_allocno_t *) ira_allocate (max_reg_num () * sizeof (ira_allocno_t));
2580 memset (regno_top_level_allocno_map, 0,
2581 max_reg_num () * sizeof (ira_allocno_t));
2582 new_pseudos_p = merged_p = false;
2583 FOR_EACH_ALLOCNO (a, ai)
2585 ira_object_t obj = ALLOCNO_OBJECT (a);
2586 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2587 /* Caps are not in the regno allocno maps and they are never
2588 will be transformed into allocnos existing after IR
2591 COPY_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
2592 OBJECT_CONFLICT_HARD_REGS (obj));
2594 ALLOCNO_TOTAL_NO_STACK_REG_P (a) = ALLOCNO_NO_STACK_REG_P (a);
2597 /* Fix final allocno attributes. */
2598 for (i = max_regno_before_emit - 1; i >= FIRST_PSEUDO_REGISTER; i--)
2601 for (a = ira_regno_allocno_map[i];
2603 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
2605 ira_assert (ALLOCNO_CAP_MEMBER (a) == NULL);
2606 if (ALLOCNO_SOMEWHERE_RENAMED_P (a))
2607 new_pseudos_p = true;
2608 parent_a = ira_parent_allocno (a);
2609 if (parent_a == NULL)
2611 ALLOCNO_COPIES (a) = NULL;
2612 regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))] = a;
2615 ira_assert (ALLOCNO_CAP_MEMBER (parent_a) == NULL);
2617 if (ALLOCNO_MEM_OPTIMIZED_DEST (a) != NULL)
2619 if (REGNO (ALLOCNO_REG (a)) == REGNO (ALLOCNO_REG (parent_a)))
2621 merge_hard_reg_conflicts (a, parent_a, true);
2622 move_allocno_live_ranges (a, parent_a);
2624 ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)
2625 = (ALLOCNO_MEM_OPTIMIZED_DEST_P (parent_a)
2626 || ALLOCNO_MEM_OPTIMIZED_DEST_P (a));
2629 new_pseudos_p = true;
2632 ALLOCNO_NREFS (parent_a) -= ALLOCNO_NREFS (a);
2633 ALLOCNO_FREQ (parent_a) -= ALLOCNO_FREQ (a);
2634 ALLOCNO_CALL_FREQ (parent_a) -= ALLOCNO_CALL_FREQ (a);
2635 ALLOCNO_CALLS_CROSSED_NUM (parent_a)
2636 -= ALLOCNO_CALLS_CROSSED_NUM (a);
2637 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (parent_a)
2638 -= ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a);
2639 ira_assert (ALLOCNO_CALLS_CROSSED_NUM (parent_a) >= 0
2640 && ALLOCNO_NREFS (parent_a) >= 0
2641 && ALLOCNO_FREQ (parent_a) >= 0);
2642 cover_class = ALLOCNO_COVER_CLASS (parent_a);
2643 hard_regs_num = ira_class_hard_regs_num[cover_class];
2644 if (ALLOCNO_HARD_REG_COSTS (a) != NULL
2645 && ALLOCNO_HARD_REG_COSTS (parent_a) != NULL)
2646 for (j = 0; j < hard_regs_num; j++)
2647 ALLOCNO_HARD_REG_COSTS (parent_a)[j]
2648 -= ALLOCNO_HARD_REG_COSTS (a)[j];
2649 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) != NULL
2650 && ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a) != NULL)
2651 for (j = 0; j < hard_regs_num; j++)
2652 ALLOCNO_CONFLICT_HARD_REG_COSTS (parent_a)[j]
2653 -= ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[j];
2654 ALLOCNO_COVER_CLASS_COST (parent_a)
2655 -= ALLOCNO_COVER_CLASS_COST (a);
2656 ALLOCNO_MEMORY_COST (parent_a) -= ALLOCNO_MEMORY_COST (a);
2657 parent_a = ira_parent_allocno (parent_a);
2658 if (parent_a == NULL)
2661 ALLOCNO_COPIES (a) = NULL;
2662 regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))] = a;
2664 if (mem_dest_p && copy_info_to_removed_store_destinations (i))
2667 ira_assert (new_pseudos_p || ira_max_point_before_emit == ira_max_point);
2668 if (merged_p || ira_max_point_before_emit != ira_max_point)
2669 ira_rebuild_start_finish_chains ();
2672 sparseset objects_live;
2674 /* Rebuild conflicts. */
2675 FOR_EACH_ALLOCNO (a, ai)
2677 ira_object_t obj = ALLOCNO_OBJECT (a);
2678 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2679 || ALLOCNO_CAP_MEMBER (a) != NULL)
2681 for (r = OBJECT_LIVE_RANGES (obj); r != NULL; r = r->next)
2682 ira_assert (r->object == obj);
2683 clear_conflicts (obj);
2685 objects_live = sparseset_alloc (ira_objects_num);
2686 for (i = 0; i < ira_max_point; i++)
2688 for (r = ira_start_point_ranges[i]; r != NULL; r = r->start_next)
2690 ira_object_t obj = r->object;
2691 a = OBJECT_ALLOCNO (obj);
2692 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2693 || ALLOCNO_CAP_MEMBER (a) != NULL)
2695 cover_class = ALLOCNO_COVER_CLASS (a);
2696 sparseset_set_bit (objects_live, OBJECT_CONFLICT_ID (obj));
2697 EXECUTE_IF_SET_IN_SPARSESET (objects_live, n)
2699 ira_object_t live_obj = ira_object_id_map[n];
2700 ira_allocno_t live_a = OBJECT_ALLOCNO (live_obj);
2701 enum reg_class live_cover = ALLOCNO_COVER_CLASS (live_a);
2703 if (ira_reg_classes_intersect_p[cover_class][live_cover]
2704 /* Don't set up conflict for the allocno with itself. */
2706 ira_add_conflict (obj, live_obj);
2710 for (r = ira_finish_point_ranges[i]; r != NULL; r = r->finish_next)
2711 sparseset_clear_bit (objects_live, OBJECT_CONFLICT_ID (r->object));
2713 sparseset_free (objects_live);
2714 compress_conflict_vecs ();
2716 /* Mark some copies for removing and change allocnos in the rest
2718 FOR_EACH_COPY (cp, ci)
2720 if (ALLOCNO_CAP_MEMBER (cp->first) != NULL
2721 || ALLOCNO_CAP_MEMBER (cp->second) != NULL)
2723 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2725 (ira_dump_file, " Remove cp%d:%c%dr%d-%c%dr%d\n",
2726 cp->num, ALLOCNO_CAP_MEMBER (cp->first) != NULL ? 'c' : 'a',
2727 ALLOCNO_NUM (cp->first), REGNO (ALLOCNO_REG (cp->first)),
2728 ALLOCNO_CAP_MEMBER (cp->second) != NULL ? 'c' : 'a',
2729 ALLOCNO_NUM (cp->second), REGNO (ALLOCNO_REG (cp->second)));
2730 cp->loop_tree_node = NULL;
2733 first = regno_top_level_allocno_map[REGNO (ALLOCNO_REG (cp->first))];
2734 second = regno_top_level_allocno_map[REGNO (ALLOCNO_REG (cp->second))];
2735 node = cp->loop_tree_node;
2737 keep_p = true; /* It copy generated in ira-emit.c. */
2740 /* Check that the copy was not propagated from level on
2741 which we will have different pseudos. */
2742 node_first = node->regno_allocno_map[ALLOCNO_REGNO (cp->first)];
2743 node_second = node->regno_allocno_map[ALLOCNO_REGNO (cp->second)];
2744 keep_p = ((REGNO (ALLOCNO_REG (first))
2745 == REGNO (ALLOCNO_REG (node_first)))
2746 && (REGNO (ALLOCNO_REG (second))
2747 == REGNO (ALLOCNO_REG (node_second))));
2751 cp->loop_tree_node = ira_loop_tree_root;
2753 cp->second = second;
2757 cp->loop_tree_node = NULL;
2758 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2759 fprintf (ira_dump_file, " Remove cp%d:a%dr%d-a%dr%d\n",
2760 cp->num, ALLOCNO_NUM (cp->first),
2761 REGNO (ALLOCNO_REG (cp->first)), ALLOCNO_NUM (cp->second),
2762 REGNO (ALLOCNO_REG (cp->second)));
2765 /* Remove unnecessary allocnos on lower levels of the loop tree. */
2766 FOR_EACH_ALLOCNO (a, ai)
2768 if (a != regno_top_level_allocno_map[REGNO (ALLOCNO_REG (a))]
2769 || ALLOCNO_CAP_MEMBER (a) != NULL)
2771 if (internal_flag_ira_verbose > 4 && ira_dump_file != NULL)
2772 fprintf (ira_dump_file, " Remove a%dr%d\n",
2773 ALLOCNO_NUM (a), REGNO (ALLOCNO_REG (a)));
2777 ALLOCNO_LOOP_TREE_NODE (a) = ira_loop_tree_root;
2778 ALLOCNO_REGNO (a) = REGNO (ALLOCNO_REG (a));
2779 ALLOCNO_CAP (a) = NULL;
2780 /* Restore updated costs for assignments from reload. */
2781 ALLOCNO_UPDATED_MEMORY_COST (a) = ALLOCNO_MEMORY_COST (a);
2782 ALLOCNO_UPDATED_COVER_CLASS_COST (a) = ALLOCNO_COVER_CLASS_COST (a);
2783 if (! ALLOCNO_ASSIGNED_P (a))
2784 ira_free_allocno_updated_costs (a);
2785 ira_assert (ALLOCNO_UPDATED_HARD_REG_COSTS (a) == NULL);
2786 ira_assert (ALLOCNO_UPDATED_CONFLICT_HARD_REG_COSTS (a) == NULL);
2788 /* Remove unnecessary copies. */
2789 FOR_EACH_COPY (cp, ci)
2791 if (cp->loop_tree_node == NULL)
2793 ira_copies[cp->num] = NULL;
2798 (ALLOCNO_LOOP_TREE_NODE (cp->first) == ira_loop_tree_root
2799 && ALLOCNO_LOOP_TREE_NODE (cp->second) == ira_loop_tree_root);
2800 ira_add_allocno_copy_to_list (cp);
2801 ira_swap_allocno_copy_ends_if_necessary (cp);
2803 rebuild_regno_allocno_maps ();
2804 if (ira_max_point != ira_max_point_before_emit)
2805 ira_compress_allocno_live_ranges ();
2806 ira_free (regno_top_level_allocno_map);
2811 #ifdef ENABLE_IRA_CHECKING
2812 /* Check creation of all allocnos. Allocnos on lower levels should
2813 have allocnos or caps on all upper levels. */
2815 check_allocno_creation (void)
2818 ira_allocno_iterator ai;
2819 ira_loop_tree_node_t loop_tree_node;
2821 FOR_EACH_ALLOCNO (a, ai)
2823 loop_tree_node = ALLOCNO_LOOP_TREE_NODE (a);
2824 ira_assert (bitmap_bit_p (loop_tree_node->all_allocnos,
2826 if (loop_tree_node == ira_loop_tree_root)
2828 if (ALLOCNO_CAP_MEMBER (a) != NULL)
2829 ira_assert (ALLOCNO_CAP (a) != NULL);
2830 else if (ALLOCNO_CAP (a) == NULL)
2831 ira_assert (loop_tree_node->parent
2832 ->regno_allocno_map[ALLOCNO_REGNO (a)] != NULL
2833 && bitmap_bit_p (loop_tree_node->border_allocnos,
2839 /* Identify allocnos which prefer a register class with a single hard register.
2840 Adjust ALLOCNO_CONFLICT_HARD_REG_COSTS so that conflicting allocnos are
2841 less likely to use the preferred singleton register. */
2843 update_conflict_hard_reg_costs (void)
2846 ira_allocno_iterator ai;
2849 FOR_EACH_ALLOCNO (a, ai)
2851 enum reg_class cover_class = ALLOCNO_COVER_CLASS (a);
2852 enum reg_class pref = reg_preferred_class (ALLOCNO_REGNO (a));
2854 if (reg_class_size[pref] != 1)
2856 index = (ira_class_hard_reg_index[cover_class]
2857 [ira_class_hard_regs[pref][0]]);
2860 if (ALLOCNO_CONFLICT_HARD_REG_COSTS (a) == NULL
2861 || ALLOCNO_HARD_REG_COSTS (a) == NULL)
2864 for (i = ira_class_hard_regs_num[cover_class] - 1; i >= 0; i--)
2865 if (ALLOCNO_HARD_REG_COSTS (a)[i] > ALLOCNO_COVER_CLASS_COST (a)
2866 && min > ALLOCNO_HARD_REG_COSTS (a)[i])
2867 min = ALLOCNO_HARD_REG_COSTS (a)[i];
2870 ira_allocate_and_set_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
2872 ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[index]
2873 -= min - ALLOCNO_COVER_CLASS_COST (a);
2877 /* Create a internal representation (IR) for IRA (allocnos, copies,
2878 loop tree nodes). If LOOPS_P is FALSE the nodes corresponding to
2879 the loops (except the root which corresponds the all function) and
2880 correspondingly allocnos for the loops will be not created. Such
2881 parameter value is used for Chaitin-Briggs coloring. The function
2882 returns TRUE if we generate loop structure (besides nodes
2883 representing all function and the basic blocks) for regional
2884 allocation. A true return means that we really need to flatten IR
2885 before the reload. */
2887 ira_build (bool loops_p)
2891 initiate_cost_vectors ();
2892 initiate_allocnos ();
2894 create_loop_tree_nodes (loops_p);
2898 create_allocno_objects ();
2899 ira_create_allocno_live_ranges ();
2900 remove_unnecessary_regions (false);
2901 ira_compress_allocno_live_ranges ();
2902 update_bad_spill_attribute ();
2903 loops_p = more_one_region_p ();
2906 propagate_allocno_info ();
2909 ira_tune_allocno_costs_and_cover_classes ();
2910 #ifdef ENABLE_IRA_CHECKING
2911 check_allocno_creation ();
2913 setup_min_max_allocno_live_range_point ();
2914 sort_conflict_id_map ();
2915 setup_min_max_conflict_allocno_ids ();
2916 ira_build_conflicts ();
2917 update_conflict_hard_reg_costs ();
2918 if (! ira_conflicts_p)
2921 ira_allocno_iterator ai;
2923 /* Remove all regions but root one. */
2926 remove_unnecessary_regions (true);
2929 /* We don't save hard registers around calls for fast allocation
2930 -- add caller clobbered registers as conflicting ones to
2931 allocno crossing calls. */
2932 FOR_EACH_ALLOCNO (a, ai)
2933 if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
2935 ira_object_t obj = ALLOCNO_OBJECT (a);
2936 IOR_HARD_REG_SET (OBJECT_TOTAL_CONFLICT_HARD_REGS (obj),
2938 IOR_HARD_REG_SET (OBJECT_CONFLICT_HARD_REGS (obj),
2942 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
2943 print_copies (ira_dump_file);
2944 if (internal_flag_ira_verbose > 0 && ira_dump_file != NULL)
2949 ira_allocno_iterator ai;
2952 FOR_EACH_ALLOCNO (a, ai)
2954 ira_object_t obj = ALLOCNO_OBJECT (a);
2955 n += OBJECT_NUM_CONFLICTS (obj);
2958 FOR_EACH_ALLOCNO (a, ai)
2959 for (r = OBJECT_LIVE_RANGES (ALLOCNO_OBJECT (a)); r != NULL;
2962 fprintf (ira_dump_file, " regions=%d, blocks=%d, points=%d\n",
2963 VEC_length (loop_p, ira_loops.larray), n_basic_blocks,
2965 fprintf (ira_dump_file,
2966 " allocnos=%d, copies=%d, conflicts=%d, ranges=%d\n",
2967 ira_allocnos_num, ira_copies_num, n, nr);
2972 /* Release the data created by function ira_build. */
2976 finish_loop_tree_nodes ();
2979 finish_cost_vectors ();
2980 ira_finish_allocno_live_ranges ();