1 /* IRA processing allocno lives to build allocno live ranges.
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
34 #include "insn-config.h"
36 #include "diagnostic-core.h"
40 #include "sparseset.h"
43 /* The code in this file is similar to one in global but the code
44 works on the allocno basis and creates live ranges instead of
45 pseudo-register conflicts. */
47 /* Program points are enumerated by numbers from range
48 0..IRA_MAX_POINT-1. There are approximately two times more program
49 points than insns. Program points are places in the program where
50 liveness info can be changed. In most general case (there are more
51 complicated cases too) some program points correspond to places
52 where input operand dies and other ones correspond to places where
53 output operands are born. */
56 /* Arrays of size IRA_MAX_POINT mapping a program point to the allocno
57 live ranges with given start/finish point. */
58 live_range_t *ira_start_point_ranges, *ira_finish_point_ranges;
60 /* Number of the current program point. */
61 static int curr_point;
63 /* Point where register pressure excess started or -1 if there is no
64 register pressure excess. Excess pressure for a register class at
65 some point means that there are more allocnos of given register
66 class living at the point than number of hard-registers of the
67 class available for the allocation. It is defined only for cover
69 static int high_pressure_start_point[N_REG_CLASSES];
71 /* Allocnos live at current point in the scan. */
72 static sparseset allocnos_live;
74 /* Set of hard regs (except eliminable ones) currently live. */
75 static HARD_REG_SET hard_regs_live;
77 /* The loop tree node corresponding to the current basic block. */
78 static ira_loop_tree_node_t curr_bb_node;
80 /* The number of the last processed call. */
81 static int last_call_num;
82 /* The number of last call at which given allocno was saved. */
83 static int *allocno_saved_at_call;
85 /* Record the birth of hard register REGNO, updating hard_regs_live
86 and hard reg conflict information for living allocno. */
88 make_hard_regno_born (int regno)
92 SET_HARD_REG_BIT (hard_regs_live, regno);
93 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
95 SET_HARD_REG_BIT (ALLOCNO_CONFLICT_HARD_REGS (ira_allocnos[i]),
97 SET_HARD_REG_BIT (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (ira_allocnos[i]),
102 /* Process the death of hard register REGNO. This updates
105 make_hard_regno_dead (int regno)
107 CLEAR_HARD_REG_BIT (hard_regs_live, regno);
110 /* Record the birth of allocno A, starting a new live range for
111 it if necessary, and updating hard reg conflict information. We also
112 record it in allocnos_live. */
114 make_allocno_born (ira_allocno_t a)
116 live_range_t p = ALLOCNO_LIVE_RANGES (a);
118 sparseset_set_bit (allocnos_live, ALLOCNO_NUM (a));
119 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a), hard_regs_live);
120 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a), hard_regs_live);
123 || (p->finish != curr_point && p->finish + 1 != curr_point))
124 ALLOCNO_LIVE_RANGES (a)
125 = ira_create_allocno_live_range (a, curr_point, -1,
126 ALLOCNO_LIVE_RANGES (a));
129 /* Update ALLOCNO_EXCESS_PRESSURE_POINTS_NUM for allocno A. */
131 update_allocno_pressure_excess_length (ira_allocno_t a)
134 enum reg_class cover_class, cl;
137 cover_class = ALLOCNO_COVER_CLASS (a);
139 (cl = ira_reg_class_super_classes[cover_class][i]) != LIM_REG_CLASSES;
142 if (high_pressure_start_point[cl] < 0)
144 p = ALLOCNO_LIVE_RANGES (a);
145 ira_assert (p != NULL);
146 start = (high_pressure_start_point[cl] > p->start
147 ? high_pressure_start_point[cl] : p->start);
148 ALLOCNO_EXCESS_PRESSURE_POINTS_NUM (a) += curr_point - start + 1;
152 /* Process the death of allocno A. This finishes the current live
155 make_allocno_dead (ira_allocno_t a)
159 p = ALLOCNO_LIVE_RANGES (a);
160 ira_assert (p != NULL);
161 p->finish = curr_point;
162 update_allocno_pressure_excess_length (a);
163 sparseset_clear_bit (allocnos_live, ALLOCNO_NUM (a));
166 /* The current register pressures for each cover class for the current
168 static int curr_reg_pressure[N_REG_CLASSES];
170 /* Record that register pressure for COVER_CLASS increased by N
171 registers. Update the current register pressure, maximal register
172 pressure for the current BB and the start point of the register
175 inc_register_pressure (enum reg_class cover_class, int n)
181 (cl = ira_reg_class_super_classes[cover_class][i]) != LIM_REG_CLASSES;
184 curr_reg_pressure[cl] += n;
185 if (high_pressure_start_point[cl] < 0
186 && (curr_reg_pressure[cl] > ira_available_class_regs[cl]))
187 high_pressure_start_point[cl] = curr_point;
188 if (curr_bb_node->reg_pressure[cl] < curr_reg_pressure[cl])
189 curr_bb_node->reg_pressure[cl] = curr_reg_pressure[cl];
193 /* Record that register pressure for COVER_CLASS has decreased by
194 NREGS registers; update current register pressure, start point of
195 the register pressure excess, and register pressure excess length
196 for living allocnos. */
199 dec_register_pressure (enum reg_class cover_class, int nregs)
207 (cl = ira_reg_class_super_classes[cover_class][i]) != LIM_REG_CLASSES;
210 curr_reg_pressure[cl] -= nregs;
211 ira_assert (curr_reg_pressure[cl] >= 0);
212 if (high_pressure_start_point[cl] >= 0
213 && curr_reg_pressure[cl] <= ira_available_class_regs[cl])
218 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, j)
219 update_allocno_pressure_excess_length (ira_allocnos[j]);
221 (cl = ira_reg_class_super_classes[cover_class][i])
224 if (high_pressure_start_point[cl] >= 0
225 && curr_reg_pressure[cl] <= ira_available_class_regs[cl])
226 high_pressure_start_point[cl] = -1;
230 /* Mark the pseudo register REGNO as live. Update all information about
231 live ranges and register pressure. */
233 mark_pseudo_regno_live (int regno)
235 ira_allocno_t a = ira_curr_regno_allocno_map[regno];
242 /* Invalidate because it is referenced. */
243 allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
245 if (sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)))
248 cl = ALLOCNO_COVER_CLASS (a);
249 nregs = ira_reg_class_nregs[cl][ALLOCNO_MODE (a)];
250 inc_register_pressure (cl, nregs);
251 make_allocno_born (a);
254 /* Mark the hard register REG as live. Store a 1 in hard_regs_live
255 for this register, record how many consecutive hardware registers
256 it actually needs. */
258 mark_hard_reg_live (rtx reg)
260 int regno = REGNO (reg);
262 if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
264 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
268 if (! TEST_HARD_REG_BIT (hard_regs_live, regno)
269 && ! TEST_HARD_REG_BIT (eliminable_regset, regno))
271 enum reg_class cover_class = ira_hard_regno_cover_class[regno];
272 inc_register_pressure (cover_class, 1);
273 make_hard_regno_born (regno);
280 /* Mark the register referenced by use or def REF as live. */
282 mark_ref_live (df_ref ref)
286 reg = DF_REF_REG (ref);
287 if (GET_CODE (reg) == SUBREG)
288 reg = SUBREG_REG (reg);
289 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
290 mark_pseudo_regno_live (REGNO (reg));
292 mark_hard_reg_live (reg);
295 /* Mark the pseudo register REGNO as dead. Update all information about
296 live ranges and register pressure. */
298 mark_pseudo_regno_dead (int regno)
300 ira_allocno_t a = ira_curr_regno_allocno_map[regno];
307 /* Invalidate because it is referenced. */
308 allocno_saved_at_call[ALLOCNO_NUM (a)] = 0;
310 if (! sparseset_bit_p (allocnos_live, ALLOCNO_NUM (a)))
313 cl = ALLOCNO_COVER_CLASS (a);
314 nregs = ira_reg_class_nregs[cl][ALLOCNO_MODE (a)];
315 dec_register_pressure (cl, nregs);
317 make_allocno_dead (a);
320 /* Mark the hard register REG as dead. Store a 0 in hard_regs_live
323 mark_hard_reg_dead (rtx reg)
325 int regno = REGNO (reg);
327 if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, regno))
329 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
333 if (TEST_HARD_REG_BIT (hard_regs_live, regno))
335 enum reg_class cover_class = ira_hard_regno_cover_class[regno];
336 dec_register_pressure (cover_class, 1);
337 make_hard_regno_dead (regno);
344 /* Mark the register referenced by definition DEF as dead, if the
345 definition is a total one. */
347 mark_ref_dead (df_ref def)
351 if (DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL)
352 || DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL))
355 reg = DF_REF_REG (def);
356 if (GET_CODE (reg) == SUBREG)
357 reg = SUBREG_REG (reg);
358 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
359 mark_pseudo_regno_dead (REGNO (reg));
361 mark_hard_reg_dead (reg);
364 /* Make pseudo REG conflicting with pseudo DREG, if the 1st pseudo
365 class is intersected with class CL. Advance the current program
366 point before making the conflict if ADVANCE_P. Return TRUE if we
367 will need to advance the current program point. */
369 make_pseudo_conflict (rtx reg, enum reg_class cl, rtx dreg, bool advance_p)
373 if (GET_CODE (reg) == SUBREG)
374 reg = SUBREG_REG (reg);
376 if (! REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
379 a = ira_curr_regno_allocno_map[REGNO (reg)];
380 if (! reg_classes_intersect_p (cl, ALLOCNO_COVER_CLASS (a)))
386 mark_pseudo_regno_live (REGNO (reg));
387 mark_pseudo_regno_live (REGNO (dreg));
388 mark_pseudo_regno_dead (REGNO (reg));
389 mark_pseudo_regno_dead (REGNO (dreg));
394 /* Check and make if necessary conflicts for pseudo DREG of class
395 DEF_CL of the current insn with input operand USE of class USE_CL.
396 Advance the current program point before making the conflict if
397 ADVANCE_P. Return TRUE if we will need to advance the current
400 check_and_make_def_use_conflict (rtx dreg, enum reg_class def_cl,
401 int use, enum reg_class use_cl,
404 if (! reg_classes_intersect_p (def_cl, use_cl))
407 advance_p = make_pseudo_conflict (recog_data.operand[use],
408 use_cl, dreg, advance_p);
409 /* Reload may end up swapping commutative operands, so you
410 have to take both orderings into account. The
411 constraints for the two operands can be completely
412 different. (Indeed, if the constraints for the two
413 operands are the same for all alternatives, there's no
414 point marking them as commutative.) */
415 if (use < recog_data.n_operands - 1
416 && recog_data.constraints[use][0] == '%')
418 = make_pseudo_conflict (recog_data.operand[use + 1],
419 use_cl, dreg, advance_p);
421 && recog_data.constraints[use - 1][0] == '%')
423 = make_pseudo_conflict (recog_data.operand[use - 1],
424 use_cl, dreg, advance_p);
428 /* Check and make if necessary conflicts for definition DEF of class
429 DEF_CL of the current insn with input operands. Process only
430 constraints of alternative ALT. */
432 check_and_make_def_conflict (int alt, int def, enum reg_class def_cl)
436 enum reg_class use_cl, acl;
438 rtx dreg = recog_data.operand[def];
440 if (def_cl == NO_REGS)
443 if (GET_CODE (dreg) == SUBREG)
444 dreg = SUBREG_REG (dreg);
446 if (! REG_P (dreg) || REGNO (dreg) < FIRST_PSEUDO_REGISTER)
449 a = ira_curr_regno_allocno_map[REGNO (dreg)];
450 acl = ALLOCNO_COVER_CLASS (a);
451 if (! reg_classes_intersect_p (acl, def_cl))
456 for (use = 0; use < recog_data.n_operands; use++)
460 if (use == def || recog_data.operand_type[use] == OP_OUT)
463 if (recog_op_alt[use][alt].anything_ok)
466 use_cl = recog_op_alt[use][alt].cl;
468 /* If there's any alternative that allows USE to match DEF, do not
469 record a conflict. If that causes us to create an invalid
470 instruction due to the earlyclobber, reload must fix it up. */
471 for (alt1 = 0; alt1 < recog_data.n_alternatives; alt1++)
472 if (recog_op_alt[use][alt1].matches == def
473 || (use < recog_data.n_operands - 1
474 && recog_data.constraints[use][0] == '%'
475 && recog_op_alt[use + 1][alt1].matches == def)
477 && recog_data.constraints[use - 1][0] == '%'
478 && recog_op_alt[use - 1][alt1].matches == def))
481 if (alt1 < recog_data.n_alternatives)
484 advance_p = check_and_make_def_use_conflict (dreg, def_cl, use,
487 if ((use_match = recog_op_alt[use][alt].matches) >= 0)
489 if (use_match == def)
492 if (recog_op_alt[use_match][alt].anything_ok)
495 use_cl = recog_op_alt[use_match][alt].cl;
496 advance_p = check_and_make_def_use_conflict (dreg, def_cl, use,
502 /* Make conflicts of early clobber pseudo registers of the current
503 insn with its inputs. Avoid introducing unnecessary conflicts by
504 checking classes of the constraints and pseudos because otherwise
505 significant code degradation is possible for some targets. */
507 make_early_clobber_and_input_conflicts (void)
511 enum reg_class def_cl;
513 for (alt = 0; alt < recog_data.n_alternatives; alt++)
514 for (def = 0; def < recog_data.n_operands; def++)
517 if (recog_op_alt[def][alt].earlyclobber)
519 if (recog_op_alt[def][alt].anything_ok)
522 def_cl = recog_op_alt[def][alt].cl;
523 check_and_make_def_conflict (alt, def, def_cl);
525 if ((def_match = recog_op_alt[def][alt].matches) >= 0
526 && (recog_op_alt[def_match][alt].earlyclobber
527 || recog_op_alt[def][alt].earlyclobber))
529 if (recog_op_alt[def_match][alt].anything_ok)
532 def_cl = recog_op_alt[def_match][alt].cl;
533 check_and_make_def_conflict (alt, def, def_cl);
538 /* Mark early clobber hard registers of the current INSN as live (if
539 LIVE_P) or dead. Return true if there are such registers. */
541 mark_hard_reg_early_clobbers (rtx insn, bool live_p)
546 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
547 if (DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MUST_CLOBBER))
549 rtx dreg = DF_REF_REG (*def_rec);
551 if (GET_CODE (dreg) == SUBREG)
552 dreg = SUBREG_REG (dreg);
553 if (! REG_P (dreg) || REGNO (dreg) >= FIRST_PSEUDO_REGISTER)
556 /* Hard register clobbers are believed to be early clobber
557 because there is no way to say that non-operand hard
558 register clobbers are not early ones. */
560 mark_ref_live (*def_rec);
562 mark_ref_dead (*def_rec);
569 /* Checks that CONSTRAINTS permits to use only one hard register. If
570 it is so, the function returns the class of the hard register.
571 Otherwise it returns NO_REGS. */
572 static enum reg_class
573 single_reg_class (const char *constraints, rtx op, rtx equiv_const)
576 enum reg_class cl, next_cl;
580 for (ignore_p = false;
582 constraints += CONSTRAINT_LEN (c, constraints))
602 || (equiv_const != NULL_RTX && CONSTANT_P (equiv_const)))
608 || (GET_CODE (op) == CONST_DOUBLE && GET_MODE (op) == VOIDmode)
609 || (equiv_const != NULL_RTX
610 && (CONST_INT_P (equiv_const)
611 || (GET_CODE (equiv_const) == CONST_DOUBLE
612 && GET_MODE (equiv_const) == VOIDmode))))
617 if ((CONSTANT_P (op) && !CONST_INT_P (op)
618 && (GET_CODE (op) != CONST_DOUBLE || GET_MODE (op) != VOIDmode))
619 || (equiv_const != NULL_RTX
620 && CONSTANT_P (equiv_const)
621 && !CONST_INT_P (equiv_const)
622 && (GET_CODE (equiv_const) != CONST_DOUBLE
623 || GET_MODE (equiv_const) != VOIDmode)))
635 if ((CONST_INT_P (op)
636 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, constraints))
637 || (equiv_const != NULL_RTX
638 && CONST_INT_P (equiv_const)
639 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (equiv_const),
646 if (GET_CODE (op) == CONST_DOUBLE
647 || (GET_CODE (op) == CONST_VECTOR
648 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT)
649 || (equiv_const != NULL_RTX
650 && (GET_CODE (equiv_const) == CONST_DOUBLE
651 || (GET_CODE (equiv_const) == CONST_VECTOR
652 && (GET_MODE_CLASS (GET_MODE (equiv_const))
653 == MODE_VECTOR_FLOAT)))))
659 if ((GET_CODE (op) == CONST_DOUBLE
660 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, constraints))
661 || (equiv_const != NULL_RTX
662 && GET_CODE (equiv_const) == CONST_DOUBLE
663 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (equiv_const,
666 /* ??? what about memory */
668 case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
669 case 'h': case 'j': case 'k': case 'l':
670 case 'q': case 't': case 'u':
671 case 'v': case 'w': case 'x': case 'y': case 'z':
672 case 'A': case 'B': case 'C': case 'D':
673 case 'Q': case 'R': case 'S': case 'T': case 'U':
674 case 'W': case 'Y': case 'Z':
677 : REG_CLASS_FROM_CONSTRAINT (c, constraints));
678 if ((cl != NO_REGS && next_cl != cl)
679 || (ira_available_class_regs[next_cl]
680 > ira_reg_class_nregs[next_cl][GET_MODE (op)]))
685 case '0': case '1': case '2': case '3': case '4':
686 case '5': case '6': case '7': case '8': case '9':
688 = single_reg_class (recog_data.constraints[c - '0'],
689 recog_data.operand[c - '0'], NULL_RTX);
690 if ((cl != NO_REGS && next_cl != cl)
691 || next_cl == NO_REGS
692 || (ira_available_class_regs[next_cl]
693 > ira_reg_class_nregs[next_cl][GET_MODE (op)]))
704 /* The function checks that operand OP_NUM of the current insn can use
705 only one hard register. If it is so, the function returns the
706 class of the hard register. Otherwise it returns NO_REGS. */
707 static enum reg_class
708 single_reg_operand_class (int op_num)
710 if (op_num < 0 || recog_data.n_alternatives == 0)
712 return single_reg_class (recog_data.constraints[op_num],
713 recog_data.operand[op_num], NULL_RTX);
716 /* The function sets up hard register set *SET to hard registers which
717 might be used by insn reloads because the constraints are too
720 ira_implicitly_set_insn_hard_regs (HARD_REG_SET *set)
726 enum machine_mode mode;
728 CLEAR_HARD_REG_SET (*set);
729 for (i = 0; i < recog_data.n_operands; i++)
731 op = recog_data.operand[i];
733 if (GET_CODE (op) == SUBREG)
734 op = SUBREG_REG (op);
736 if (GET_CODE (op) == SCRATCH
737 || (REG_P (op) && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER))
739 const char *p = recog_data.constraints[i];
741 mode = (GET_CODE (op) == SCRATCH
742 ? GET_MODE (op) : PSEUDO_REGNO_MODE (regno));
744 for (ignore_p = false; (c = *p); p += CONSTRAINT_LEN (c, p))
753 case 'a': case 'b': case 'c': case 'd': case 'e': case 'f':
754 case 'h': case 'j': case 'k': case 'l':
755 case 'q': case 't': case 'u':
756 case 'v': case 'w': case 'x': case 'y': case 'z':
757 case 'A': case 'B': case 'C': case 'D':
758 case 'Q': case 'R': case 'S': case 'T': case 'U':
759 case 'W': case 'Y': case 'Z':
762 : REG_CLASS_FROM_CONSTRAINT (c, p));
764 /* There is no register pressure problem if all of the
765 regs in this class are fixed. */
766 && ira_available_class_regs[cl] != 0
767 && (ira_available_class_regs[cl]
768 <= ira_reg_class_nregs[cl][mode]))
769 IOR_HARD_REG_SET (*set, reg_class_contents[cl]);
775 /* Processes input operands, if IN_P, or output operands otherwise of
776 the current insn with FREQ to find allocno which can use only one
777 hard register and makes other currently living allocnos conflicting
778 with the hard register. */
780 process_single_reg_class_operands (bool in_p, int freq)
786 ira_allocno_t operand_a, a;
788 for (i = 0; i < recog_data.n_operands; i++)
790 operand = recog_data.operand[i];
791 if (in_p && recog_data.operand_type[i] != OP_IN
792 && recog_data.operand_type[i] != OP_INOUT)
794 if (! in_p && recog_data.operand_type[i] != OP_OUT
795 && recog_data.operand_type[i] != OP_INOUT)
797 cl = single_reg_operand_class (i);
803 if (GET_CODE (operand) == SUBREG)
804 operand = SUBREG_REG (operand);
807 && (regno = REGNO (operand)) >= FIRST_PSEUDO_REGISTER)
809 enum machine_mode mode;
810 enum reg_class cover_class;
812 operand_a = ira_curr_regno_allocno_map[regno];
813 mode = ALLOCNO_MODE (operand_a);
814 cover_class = ALLOCNO_COVER_CLASS (operand_a);
815 if (ira_class_subset_p[cl][cover_class]
816 && ira_class_hard_regs_num[cl] != 0
817 && (ira_class_hard_reg_index[cover_class]
818 [ira_class_hard_regs[cl][0]]) >= 0
819 && reg_class_size[cl] <= (unsigned) CLASS_MAX_NREGS (cl, mode))
825 ? ira_get_register_move_cost (mode, cover_class, cl)
826 : ira_get_register_move_cost (mode, cl, cover_class)));
827 ira_allocate_and_set_costs
828 (&ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a), cover_class, 0);
829 size = ira_reg_class_nregs[cover_class][mode];
830 for (i = 0; i < size; i++)
831 ALLOCNO_CONFLICT_HARD_REG_COSTS (operand_a)
832 [ira_class_hard_reg_index
833 [cover_class][ira_class_hard_regs[cl][i]]]
838 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, px)
840 a = ira_allocnos[px];
843 /* We could increase costs of A instead of making it
844 conflicting with the hard register. But it works worse
845 because it will be spilled in reload in anyway. */
846 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a),
847 reg_class_contents[cl]);
848 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a),
849 reg_class_contents[cl]);
855 /* Return true when one of the predecessor edges of BB is marked with
856 EDGE_ABNORMAL_CALL or EDGE_EH. */
858 bb_has_abnormal_call_pred (basic_block bb)
863 FOR_EACH_EDGE (e, ei, bb->preds)
865 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
871 /* Process insns of the basic block given by its LOOP_TREE_NODE to
872 update allocno live ranges, allocno hard register conflicts,
873 intersected calls, and register pressure info for allocnos for the
874 basic block for and regions containing the basic block. */
876 process_bb_node_lives (ira_loop_tree_node_t loop_tree_node)
887 bb = loop_tree_node->bb;
890 for (i = 0; i < ira_reg_class_cover_size; i++)
892 curr_reg_pressure[ira_reg_class_cover[i]] = 0;
893 high_pressure_start_point[ira_reg_class_cover[i]] = -1;
895 curr_bb_node = loop_tree_node;
896 reg_live_out = DF_LR_OUT (bb);
897 sparseset_clear (allocnos_live);
898 REG_SET_TO_HARD_REG_SET (hard_regs_live, reg_live_out);
899 AND_COMPL_HARD_REG_SET (hard_regs_live, eliminable_regset);
900 AND_COMPL_HARD_REG_SET (hard_regs_live, ira_no_alloc_regs);
901 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
902 if (TEST_HARD_REG_BIT (hard_regs_live, i))
904 enum reg_class cover_class, cl;
906 cover_class = ira_class_translate[REGNO_REG_CLASS (i)];
908 (cl = ira_reg_class_super_classes[cover_class][j])
912 curr_reg_pressure[cl]++;
913 if (curr_bb_node->reg_pressure[cl] < curr_reg_pressure[cl])
914 curr_bb_node->reg_pressure[cl] = curr_reg_pressure[cl];
915 ira_assert (curr_reg_pressure[cl]
916 <= ira_available_class_regs[cl]);
919 EXECUTE_IF_SET_IN_BITMAP (reg_live_out, FIRST_PSEUDO_REGISTER, j, bi)
920 mark_pseudo_regno_live (j);
922 freq = REG_FREQ_FROM_BB (bb);
926 /* Invalidate all allocno_saved_at_call entries. */
929 /* Scan the code of this basic block, noting which allocnos and
930 hard regs are born or die.
932 Note that this loop treats uninitialized values as live until
933 the beginning of the block. For example, if an instruction
934 uses (reg:DI foo), and only (subreg:SI (reg:DI foo) 0) is ever
935 set, FOO will remain live until the beginning of the block.
936 Likewise if FOO is not set at all. This is unnecessarily
937 pessimistic, but it probably doesn't matter much in practice. */
938 FOR_BB_INSNS_REVERSE (bb, insn)
940 df_ref *def_rec, *use_rec;
943 if (!NONDEBUG_INSN_P (insn))
946 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
947 fprintf (ira_dump_file, " Insn %u(l%d): point = %d\n",
948 INSN_UID (insn), loop_tree_node->parent->loop->num,
951 /* Mark each defined value as live. We need to do this for
952 unused values because they still conflict with quantities
953 that are live at the time of the definition.
955 Ignore DF_REF_MAY_CLOBBERs on a call instruction. Such
956 references represent the effect of the called function
957 on a call-clobbered register. Marking the register as
958 live would stop us from allocating it to a call-crossing
960 call_p = CALL_P (insn);
961 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
962 if (!call_p || !DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MAY_CLOBBER))
963 mark_ref_live (*def_rec);
965 /* If INSN has multiple outputs, then any value used in one
966 of the outputs conflicts with the other outputs. Model this
967 by making the used value live during the output phase.
969 It is unsafe to use !single_set here since it will ignore
970 an unused output. Just because an output is unused does
971 not mean the compiler can assume the side effect will not
972 occur. Consider if ALLOCNO appears in the address of an
973 output and we reload the output. If we allocate ALLOCNO
974 to the same hard register as an unused output we could
975 set the hard register before the output reload insn. */
976 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
977 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
982 reg = DF_REF_REG (*use_rec);
983 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
987 set = XVECEXP (PATTERN (insn), 0, i);
988 if (GET_CODE (set) == SET
989 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
991 /* After the previous loop, this is a no-op if
992 REG is contained within SET_DEST (SET). */
993 mark_ref_live (*use_rec);
1000 preprocess_constraints ();
1001 process_single_reg_class_operands (false, freq);
1003 /* See which defined values die here. */
1004 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
1005 if (!call_p || !DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MAY_CLOBBER))
1006 mark_ref_dead (*def_rec);
1011 /* The current set of live allocnos are live across the call. */
1012 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
1014 ira_allocno_t a = ira_allocnos[i];
1016 if (allocno_saved_at_call[i] != last_call_num)
1017 /* Here we are mimicking caller-save.c behaviour
1018 which does not save hard register at a call if
1019 it was saved on previous call in the same basic
1020 block and the hard register was not mentioned
1021 between the two calls. */
1022 ALLOCNO_CALL_FREQ (a) += freq;
1023 /* Mark it as saved at the next call. */
1024 allocno_saved_at_call[i] = last_call_num + 1;
1025 ALLOCNO_CALLS_CROSSED_NUM (a)++;
1026 /* Don't allocate allocnos that cross setjmps or any
1027 call, if this function receives a nonlocal
1029 if (cfun->has_nonlocal_label
1030 || find_reg_note (insn, REG_SETJMP,
1031 NULL_RTX) != NULL_RTX)
1033 SET_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a));
1034 SET_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a));
1036 if (can_throw_internal (insn))
1038 IOR_HARD_REG_SET (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a),
1040 IOR_HARD_REG_SET (ALLOCNO_CONFLICT_HARD_REGS (a),
1046 make_early_clobber_and_input_conflicts ();
1050 /* Mark each used value as live. */
1051 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
1052 mark_ref_live (*use_rec);
1054 process_single_reg_class_operands (true, freq);
1056 set_p = mark_hard_reg_early_clobbers (insn, true);
1060 mark_hard_reg_early_clobbers (insn, false);
1062 /* Mark each hard reg as live again. For example, a
1063 hard register can be in clobber and in an insn
1065 for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
1067 rtx ureg = DF_REF_REG (*use_rec);
1069 if (GET_CODE (ureg) == SUBREG)
1070 ureg = SUBREG_REG (ureg);
1071 if (! REG_P (ureg) || REGNO (ureg) >= FIRST_PSEUDO_REGISTER)
1074 mark_ref_live (*use_rec);
1081 #ifdef EH_RETURN_DATA_REGNO
1082 if (bb_has_eh_pred (bb))
1085 unsigned int regno = EH_RETURN_DATA_REGNO (j);
1086 if (regno == INVALID_REGNUM)
1088 make_hard_regno_born (regno);
1092 /* Allocnos can't go in stack regs at the start of a basic block
1093 that is reached by an abnormal edge. Likewise for call
1094 clobbered regs, because caller-save, fixup_abnormal_edges and
1095 possibly the table driven EH machinery are not quite ready to
1096 handle such allocnos live across such edges. */
1097 if (bb_has_abnormal_pred (bb))
1100 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, px)
1102 ALLOCNO_NO_STACK_REG_P (ira_allocnos[px]) = true;
1103 ALLOCNO_TOTAL_NO_STACK_REG_P (ira_allocnos[px]) = true;
1105 for (px = FIRST_STACK_REG; px <= LAST_STACK_REG; px++)
1106 make_hard_regno_born (px);
1108 /* No need to record conflicts for call clobbered regs if we
1109 have nonlocal labels around, as we don't ever try to
1110 allocate such regs in this case. */
1111 if (!cfun->has_nonlocal_label && bb_has_abnormal_call_pred (bb))
1112 for (px = 0; px < FIRST_PSEUDO_REGISTER; px++)
1113 if (call_used_regs[px])
1114 make_hard_regno_born (px);
1117 EXECUTE_IF_SET_IN_SPARSESET (allocnos_live, i)
1118 make_allocno_dead (ira_allocnos[i]);
1123 /* Propagate register pressure to upper loop tree nodes: */
1124 if (loop_tree_node != ira_loop_tree_root)
1125 for (i = 0; i < ira_reg_class_cover_size; i++)
1127 enum reg_class cover_class;
1129 cover_class = ira_reg_class_cover[i];
1130 if (loop_tree_node->reg_pressure[cover_class]
1131 > loop_tree_node->parent->reg_pressure[cover_class])
1132 loop_tree_node->parent->reg_pressure[cover_class]
1133 = loop_tree_node->reg_pressure[cover_class];
1137 /* Create and set up IRA_START_POINT_RANGES and
1138 IRA_FINISH_POINT_RANGES. */
1140 create_start_finish_chains (void)
1143 ira_allocno_iterator ai;
1146 ira_start_point_ranges
1147 = (live_range_t *) ira_allocate (ira_max_point
1148 * sizeof (live_range_t));
1149 memset (ira_start_point_ranges, 0,
1150 ira_max_point * sizeof (live_range_t));
1151 ira_finish_point_ranges
1152 = (live_range_t *) ira_allocate (ira_max_point
1153 * sizeof (live_range_t));
1154 memset (ira_finish_point_ranges, 0,
1155 ira_max_point * sizeof (live_range_t));
1156 FOR_EACH_ALLOCNO (a, ai)
1158 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
1160 r->start_next = ira_start_point_ranges[r->start];
1161 ira_start_point_ranges[r->start] = r;
1162 r->finish_next = ira_finish_point_ranges[r->finish];
1163 ira_finish_point_ranges[r->finish] = r;
1168 /* Rebuild IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES after
1169 new live ranges and program points were added as a result if new
1172 ira_rebuild_start_finish_chains (void)
1174 ira_free (ira_finish_point_ranges);
1175 ira_free (ira_start_point_ranges);
1176 create_start_finish_chains ();
1179 /* Compress allocno live ranges by removing program points where
1182 remove_some_program_points_and_update_live_ranges (void)
1188 ira_allocno_iterator ai;
1190 bitmap born_or_died;
1193 born_or_died = ira_allocate_bitmap ();
1194 FOR_EACH_ALLOCNO (a, ai)
1196 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
1198 ira_assert (r->start <= r->finish);
1199 bitmap_set_bit (born_or_died, r->start);
1200 bitmap_set_bit (born_or_died, r->finish);
1203 map = (int *) ira_allocate (sizeof (int) * ira_max_point);
1205 EXECUTE_IF_SET_IN_BITMAP(born_or_died, 0, i, bi)
1209 ira_free_bitmap (born_or_died);
1210 if (internal_flag_ira_verbose > 1 && ira_dump_file != NULL)
1211 fprintf (ira_dump_file, "Compressing live ranges: from %d to %d - %d%%\n",
1212 ira_max_point, n, 100 * n / ira_max_point);
1214 FOR_EACH_ALLOCNO (a, ai)
1216 for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
1218 r->start = map[r->start];
1219 r->finish = map[r->finish];
1225 /* Print live ranges R to file F. */
1227 ira_print_live_range_list (FILE *f, live_range_t r)
1229 for (; r != NULL; r = r->next)
1230 fprintf (f, " [%d..%d]", r->start, r->finish);
1234 /* Print live ranges R to stderr. */
1236 ira_debug_live_range_list (live_range_t r)
1238 ira_print_live_range_list (stderr, r);
1241 /* Print live ranges of allocno A to file F. */
1243 print_allocno_live_ranges (FILE *f, ira_allocno_t a)
1245 fprintf (f, " a%d(r%d):", ALLOCNO_NUM (a), ALLOCNO_REGNO (a));
1246 ira_print_live_range_list (f, ALLOCNO_LIVE_RANGES (a));
1249 /* Print live ranges of allocno A to stderr. */
1251 ira_debug_allocno_live_ranges (ira_allocno_t a)
1253 print_allocno_live_ranges (stderr, a);
1256 /* Print live ranges of all allocnos to file F. */
1258 print_live_ranges (FILE *f)
1261 ira_allocno_iterator ai;
1263 FOR_EACH_ALLOCNO (a, ai)
1264 print_allocno_live_ranges (f, a);
1267 /* Print live ranges of all allocnos to stderr. */
1269 ira_debug_live_ranges (void)
1271 print_live_ranges (stderr);
1274 /* The main entry function creates live ranges, set up
1275 CONFLICT_HARD_REGS and TOTAL_CONFLICT_HARD_REGS for allocnos, and
1276 calculate register pressure info. */
1278 ira_create_allocno_live_ranges (void)
1280 allocnos_live = sparseset_alloc (ira_allocnos_num);
1283 allocno_saved_at_call
1284 = (int *) ira_allocate (ira_allocnos_num * sizeof (int));
1285 memset (allocno_saved_at_call, 0, ira_allocnos_num * sizeof (int));
1286 ira_traverse_loop_tree (true, ira_loop_tree_root, NULL,
1287 process_bb_node_lives);
1288 ira_max_point = curr_point;
1289 create_start_finish_chains ();
1290 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1291 print_live_ranges (ira_dump_file);
1293 ira_free (allocno_saved_at_call);
1294 sparseset_free (allocnos_live);
1297 /* Compress allocno live ranges. */
1299 ira_compress_allocno_live_ranges (void)
1301 remove_some_program_points_and_update_live_ranges ();
1302 ira_rebuild_start_finish_chains ();
1303 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1305 fprintf (ira_dump_file, "Ranges after the compression:\n");
1306 print_live_ranges (ira_dump_file);
1310 /* Free arrays IRA_START_POINT_RANGES and IRA_FINISH_POINT_RANGES. */
1312 ira_finish_allocno_live_ranges (void)
1314 ira_free (ira_finish_point_ranges);
1315 ira_free (ira_start_point_ranges);