1 /* IRA hard register and memory cost calculation for allocnos.
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
26 #include "hard-reg-set.h"
31 #include "basic-block.h"
33 #include "addresses.h"
34 #include "insn-config.h"
41 /* The file contains code is similar to one in regclass but the code
42 works on the allocno basis. */
44 #ifdef FORBIDDEN_INC_DEC_CLASSES
45 /* Indexed by n, is TRUE if allocno with number N is used in an
46 auto-inc or auto-dec context. */
47 static bool *in_inc_dec;
50 /* The `costs' struct records the cost of using hard registers of each
51 class considered for the calculation and of using memory for each
56 /* Costs for register classes start here. We process only some
57 register classes (cover classes on the 1st cost calculation
58 iteration and important classes on the 2nd iteration). */
62 /* Initialized once. It is a maximal possible size of the allocated
64 static int max_struct_costs_size;
66 /* Allocated and initialized once, and used to initialize cost values
68 static struct costs *init_cost;
70 /* Allocated once, and used for temporary purposes. */
71 static struct costs *temp_costs;
73 /* Allocated once, and used for the cost calculation. */
74 static struct costs *op_costs[MAX_RECOG_OPERANDS];
75 static struct costs *this_op_costs[MAX_RECOG_OPERANDS];
77 /* Original and accumulated costs of each class for each allocno. */
78 static struct costs *allocno_costs, *total_costs;
80 /* Classes used for cost calculation. They may be different on
81 different iterations of the cost calculations or in different
82 optimization modes. */
83 static enum reg_class *cost_classes;
85 /* The size of the previous array. */
86 static int cost_classes_num;
88 /* Map: cost class -> order number (they start with 0) of the cost
89 class. The order number is negative for non-cost classes. */
90 static int cost_class_nums[N_REG_CLASSES];
92 /* It is the current size of struct costs. */
93 static int struct_costs_size;
95 /* Return pointer to structure containing costs of allocno with given
97 #define COSTS_OF_ALLOCNO(arr, num) \
98 ((struct costs *) ((char *) (arr) + (num) * struct_costs_size))
100 /* Record register class preferences of each allocno. Null value
101 means no preferences. It happens on the 1st iteration of the cost
103 static enum reg_class *allocno_pref;
105 /* Allocated buffers for allocno_pref. */
106 static enum reg_class *allocno_pref_buffer;
108 /* Execution frequency of the current insn. */
109 static int frequency;
113 /* Compute the cost of loading X into (if TO_P is TRUE) or from (if
114 TO_P is FALSE) a register of class RCLASS in mode MODE. X must not
115 be a pseudo register. */
117 copy_cost (rtx x, enum machine_mode mode, enum reg_class rclass, bool to_p,
118 secondary_reload_info *prev_sri)
120 secondary_reload_info sri;
121 enum reg_class secondary_class = NO_REGS;
123 /* If X is a SCRATCH, there is actually nothing to move since we are
124 assuming optimal allocation. */
125 if (GET_CODE (x) == SCRATCH)
128 /* Get the class we will actually use for a reload. */
129 rclass = PREFERRED_RELOAD_CLASS (x, rclass);
131 /* If we need a secondary reload for an intermediate, the cost is
132 that to load the input into the intermediate register, then to
134 sri.prev_sri = prev_sri;
136 secondary_class = targetm.secondary_reload (to_p, x, rclass, mode, &sri);
138 if (ira_register_move_cost[mode] == NULL)
139 ira_init_register_move_cost (mode);
141 if (secondary_class != NO_REGS)
142 return (move_cost[mode][secondary_class][rclass] + sri.extra_cost
143 + copy_cost (x, mode, secondary_class, to_p, &sri));
145 /* For memory, use the memory move cost, for (hard) registers, use
146 the cost to move between the register classes, and use 2 for
147 everything else (constants). */
148 if (MEM_P (x) || rclass == NO_REGS)
149 return sri.extra_cost + ira_memory_move_cost[mode][rclass][to_p != 0];
152 (sri.extra_cost + move_cost[mode][REGNO_REG_CLASS (REGNO (x))][rclass]);
154 /* If this is a constant, we may eventually want to call rtx_cost
156 return sri.extra_cost + COSTS_N_INSNS (1);
161 /* Record the cost of using memory or hard registers of various
162 classes for the operands in INSN.
164 N_ALTS is the number of alternatives.
165 N_OPS is the number of operands.
166 OPS is an array of the operands.
167 MODES are the modes of the operands, in case any are VOIDmode.
168 CONSTRAINTS are the constraints to use for the operands. This array
169 is modified by this procedure.
171 This procedure works alternative by alternative. For each
172 alternative we assume that we will be able to allocate all allocnos
173 to their ideal register class and calculate the cost of using that
174 alternative. Then we compute, for each operand that is a
175 pseudo-register, the cost of having the allocno allocated to each
176 register class and using it in that alternative. To this cost is
177 added the cost of the alternative.
179 The cost of each class for this insn is its lowest cost among all
182 record_reg_classes (int n_alts, int n_ops, rtx *ops,
183 enum machine_mode *modes, const char **constraints,
184 rtx insn, struct costs **op_costs,
185 enum reg_class *allocno_pref)
191 /* Process each alternative, each time minimizing an operand's cost
192 with the cost for each operand in that alternative. */
193 for (alt = 0; alt < n_alts; alt++)
195 enum reg_class classes[MAX_RECOG_OPERANDS];
196 int allows_mem[MAX_RECOG_OPERANDS];
199 int alt_cost = 0, op_cost_add;
201 for (i = 0; i < n_ops; i++)
204 const char *p = constraints[i];
206 enum machine_mode mode = modes[i];
210 /* Initially show we know nothing about the register class. */
211 classes[i] = NO_REGS;
214 /* If this operand has no constraints at all, we can
215 conclude nothing about it since anything is valid. */
218 if (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER)
219 memset (this_op_costs[i], 0, struct_costs_size);
223 /* If this alternative is only relevant when this operand
224 matches a previous operand, we do different things
225 depending on whether this operand is a allocno-reg or not.
226 We must process any modifiers for the operand before we
227 can make this test. */
228 while (*p == '%' || *p == '=' || *p == '+' || *p == '&')
231 if (p[0] >= '0' && p[0] <= '0' + i && (p[1] == ',' || p[1] == 0))
233 /* Copy class and whether memory is allowed from the
234 matching alternative. Then perform any needed cost
235 computations and/or adjustments. */
237 classes[i] = classes[j];
238 allows_mem[i] = allows_mem[j];
240 if (! REG_P (op) || REGNO (op) < FIRST_PSEUDO_REGISTER)
242 /* If this matches the other operand, we have no
243 added cost and we win. */
244 if (rtx_equal_p (ops[j], op))
246 /* If we can put the other operand into a register,
247 add to the cost of this alternative the cost to
248 copy this operand to the register used for the
250 else if (classes[j] != NO_REGS)
252 alt_cost += copy_cost (op, mode, classes[j], 1, NULL);
256 else if (! REG_P (ops[j])
257 || REGNO (ops[j]) < FIRST_PSEUDO_REGISTER)
259 /* This op is an allocno but the one it matches is
262 /* If we can't put the other operand into a
263 register, this alternative can't be used. */
265 if (classes[j] == NO_REGS)
267 /* Otherwise, add to the cost of this alternative
268 the cost to copy the other operand to the hard
269 register used for this operand. */
271 alt_cost += copy_cost (ops[j], mode, classes[j], 1, NULL);
275 /* The costs of this operand are not the same as the
276 other operand since move costs are not symmetric.
277 Moreover, if we cannot tie them, this alternative
278 needs to do a copy, which is one insn. */
279 struct costs *pp = this_op_costs[i];
281 if (ira_register_move_cost[mode] == NULL)
282 ira_init_register_move_cost (mode);
284 for (k = 0; k < cost_classes_num; k++)
286 rclass = cost_classes[k];
288 = ((recog_data.operand_type[i] != OP_OUT
289 ? ira_may_move_in_cost[mode][rclass]
290 [classes[i]] * frequency : 0)
291 + (recog_data.operand_type[i] != OP_IN
292 ? ira_may_move_out_cost[mode][classes[i]]
293 [rclass] * frequency : 0));
296 /* If the alternative actually allows memory, make
297 things a bit cheaper since we won't need an extra
300 = ((recog_data.operand_type[i] != OP_IN
301 ? ira_memory_move_cost[mode][classes[i]][0] : 0)
302 + (recog_data.operand_type[i] != OP_OUT
303 ? ira_memory_move_cost[mode][classes[i]][1] : 0)
304 - allows_mem[i]) * frequency;
305 /* If we have assigned a class to this allocno in our
306 first pass, add a cost to this alternative
307 corresponding to what we would add if this allocno
308 were not in the appropriate class. We could use
309 cover class here but it is less accurate
313 enum reg_class pref_class
314 = allocno_pref[ALLOCNO_NUM
315 (ira_curr_regno_allocno_map
318 if (pref_class == NO_REGS)
320 += ((recog_data.operand_type[i] != OP_IN
321 ? ira_memory_move_cost[mode][classes[i]][0]
323 + (recog_data.operand_type[i] != OP_OUT
324 ? ira_memory_move_cost[mode][classes[i]][1]
326 else if (ira_reg_class_intersect
327 [pref_class][classes[i]] == NO_REGS)
328 alt_cost += (ira_register_move_cost
329 [mode][pref_class][classes[i]]);
331 if (REGNO (ops[i]) != REGNO (ops[j])
332 && ! find_reg_note (insn, REG_DEAD, op))
335 /* This is in place of ordinary cost computation for
336 this operand, so skip to the end of the
337 alternative (should be just one character). */
338 while (*p && *p++ != ',')
346 /* Scan all the constraint letters. See if the operand
347 matches any of the constraints. Collect the valid
348 register classes and see if this operand accepts
357 /* Ignore the next letter for this pass. */
363 case '!': case '#': case '&':
364 case '0': case '1': case '2': case '3': case '4':
365 case '5': case '6': case '7': case '8': case '9':
370 win = address_operand (op, GET_MODE (op));
371 /* We know this operand is an address, so we want it
372 to be allocated to a register that can be the
373 base of an address, i.e. BASE_REG_CLASS. */
375 = ira_reg_class_union[classes[i]]
376 [base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
379 case 'm': case 'o': case 'V':
380 /* It doesn't seem worth distinguishing between
381 offsettable and non-offsettable addresses
390 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
391 || GET_CODE (XEXP (op, 0)) == POST_DEC))
397 && (GET_CODE (XEXP (op, 0)) == PRE_INC
398 || GET_CODE (XEXP (op, 0)) == POST_INC))
404 if (GET_CODE (op) == CONST_DOUBLE
405 || (GET_CODE (op) == CONST_VECTOR
406 && (GET_MODE_CLASS (GET_MODE (op))
407 == MODE_VECTOR_FLOAT)))
413 if (GET_CODE (op) == CONST_DOUBLE
414 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
419 if (GET_CODE (op) == CONST_INT
420 || (GET_CODE (op) == CONST_DOUBLE
421 && GET_MODE (op) == VOIDmode))
426 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
431 if (GET_CODE (op) == CONST_INT
432 || (GET_CODE (op) == CONST_DOUBLE
433 && GET_MODE (op) == VOIDmode))
445 if (GET_CODE (op) == CONST_INT
446 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
457 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))))
461 classes[i] = ira_reg_class_union[classes[i]][GENERAL_REGS];
465 if (REG_CLASS_FROM_CONSTRAINT (c, p) != NO_REGS)
466 classes[i] = ira_reg_class_union[classes[i]]
467 [REG_CLASS_FROM_CONSTRAINT (c, p)];
468 #ifdef EXTRA_CONSTRAINT_STR
469 else if (EXTRA_CONSTRAINT_STR (op, c, p))
472 if (EXTRA_MEMORY_CONSTRAINT (c, p))
474 /* Every MEM can be reloaded to fit. */
479 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
481 /* Every address can be reloaded to fit. */
483 if (address_operand (op, GET_MODE (op)))
485 /* We know this operand is an address, so we
486 want it to be allocated to a hard register
487 that can be the base of an address,
488 i.e. BASE_REG_CLASS. */
490 = ira_reg_class_union[classes[i]]
491 [base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
496 p += CONSTRAINT_LEN (c, p);
503 /* How we account for this operand now depends on whether it
504 is a pseudo register or not. If it is, we first check if
505 any register classes are valid. If not, we ignore this
506 alternative, since we want to assume that all allocnos get
507 allocated for register preferencing. If some register
508 class is valid, compute the costs of moving the allocno
510 if (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER)
512 if (classes[i] == NO_REGS)
514 /* We must always fail if the operand is a REG, but
515 we did not find a suitable class.
517 Otherwise we may perform an uninitialized read
518 from this_op_costs after the `continue' statement
524 struct costs *pp = this_op_costs[i];
526 if (ira_register_move_cost[mode] == NULL)
527 ira_init_register_move_cost (mode);
529 for (k = 0; k < cost_classes_num; k++)
531 rclass = cost_classes[k];
533 = ((recog_data.operand_type[i] != OP_OUT
534 ? ira_may_move_in_cost[mode][rclass]
535 [classes[i]] * frequency : 0)
536 + (recog_data.operand_type[i] != OP_IN
537 ? ira_may_move_out_cost[mode][classes[i]]
538 [rclass] * frequency : 0));
541 /* If the alternative actually allows memory, make
542 things a bit cheaper since we won't need an extra
545 = ((recog_data.operand_type[i] != OP_IN
546 ? ira_memory_move_cost[mode][classes[i]][0] : 0)
547 + (recog_data.operand_type[i] != OP_OUT
548 ? ira_memory_move_cost[mode][classes[i]][1] : 0)
549 - allows_mem[i]) * frequency;
550 /* If we have assigned a class to this allocno in our
551 first pass, add a cost to this alternative
552 corresponding to what we would add if this allocno
553 were not in the appropriate class. We could use
554 cover class here but it is less accurate
558 enum reg_class pref_class
559 = allocno_pref[ALLOCNO_NUM
560 (ira_curr_regno_allocno_map
563 if (pref_class == NO_REGS)
565 += ((recog_data.operand_type[i] != OP_IN
566 ? ira_memory_move_cost[mode][classes[i]][0]
568 + (recog_data.operand_type[i] != OP_OUT
569 ? ira_memory_move_cost[mode][classes[i]][1]
571 else if (ira_reg_class_intersect[pref_class][classes[i]]
573 alt_cost += (ira_register_move_cost
574 [mode][pref_class][classes[i]]);
579 /* Otherwise, if this alternative wins, either because we
580 have already determined that or if we have a hard
581 register of the proper class, there is no cost for this
583 else if (win || (REG_P (op)
584 && reg_fits_class_p (op, classes[i],
588 /* If registers are valid, the cost of this alternative
589 includes copying the object to and/or from a
591 else if (classes[i] != NO_REGS)
593 if (recog_data.operand_type[i] != OP_OUT)
594 alt_cost += copy_cost (op, mode, classes[i], 1, NULL);
596 if (recog_data.operand_type[i] != OP_IN)
597 alt_cost += copy_cost (op, mode, classes[i], 0, NULL);
599 /* The only other way this alternative can be used is if
600 this is a constant that could be placed into memory. */
601 else if (CONSTANT_P (op) && (allows_addr || allows_mem[i]))
602 alt_cost += ira_memory_move_cost[mode][classes[i]][1];
610 op_cost_add = alt_cost * frequency;
611 /* Finally, update the costs with the information we've
612 calculated about this alternative. */
613 for (i = 0; i < n_ops; i++)
614 if (REG_P (ops[i]) && REGNO (ops[i]) >= FIRST_PSEUDO_REGISTER)
616 struct costs *pp = op_costs[i], *qq = this_op_costs[i];
617 int scale = 1 + (recog_data.operand_type[i] == OP_INOUT);
619 pp->mem_cost = MIN (pp->mem_cost,
620 (qq->mem_cost + op_cost_add) * scale);
622 for (k = 0; k < cost_classes_num; k++)
624 = MIN (pp->cost[k], (qq->cost[k] + op_cost_add) * scale);
628 /* If this insn is a single set copying operand 1 to operand 0 and
629 one operand is an allocno with the other a hard reg or an allocno
630 that prefers a hard register that is in its own register class
631 then we may want to adjust the cost of that register class to -1.
633 Avoid the adjustment if the source does not die to avoid
634 stressing of register allocator by preferrencing two colliding
635 registers into single class.
637 Also avoid the adjustment if a copy between hard registers of the
638 class is expensive (ten times the cost of a default copy is
639 considered arbitrarily expensive). This avoids losing when the
640 preferred class is very expensive as the source of a copy
642 if ((set = single_set (insn)) != 0
643 && ops[0] == SET_DEST (set) && ops[1] == SET_SRC (set)
644 && REG_P (ops[0]) && REG_P (ops[1])
645 && find_regno_note (insn, REG_DEAD, REGNO (ops[1])))
646 for (i = 0; i <= 1; i++)
647 if (REGNO (ops[i]) >= FIRST_PSEUDO_REGISTER)
649 unsigned int regno = REGNO (ops[!i]);
650 enum machine_mode mode = GET_MODE (ops[!i]);
654 if (regno < FIRST_PSEUDO_REGISTER)
655 for (k = 0; k < cost_classes_num; k++)
657 rclass = cost_classes[k];
658 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
659 && (reg_class_size[rclass]
660 == (unsigned) CLASS_MAX_NREGS (rclass, mode)))
662 if (reg_class_size[rclass] == 1)
663 op_costs[i]->cost[k] = -frequency;
667 nr < (unsigned) hard_regno_nregs[regno][mode];
669 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass],
673 if (nr == (unsigned) hard_regno_nregs[regno][mode])
674 op_costs[i]->cost[k] = -frequency;
683 /* Wrapper around REGNO_OK_FOR_INDEX_P, to allow pseudo registers. */
685 ok_for_index_p_nonstrict (rtx reg)
687 unsigned regno = REGNO (reg);
689 return regno >= FIRST_PSEUDO_REGISTER || REGNO_OK_FOR_INDEX_P (regno);
692 /* A version of regno_ok_for_base_p for use here, when all
693 pseudo-registers should count as OK. Arguments as for
694 regno_ok_for_base_p. */
696 ok_for_base_p_nonstrict (rtx reg, enum machine_mode mode,
697 enum rtx_code outer_code, enum rtx_code index_code)
699 unsigned regno = REGNO (reg);
701 if (regno >= FIRST_PSEUDO_REGISTER)
703 return ok_for_base_p_1 (regno, mode, outer_code, index_code);
706 /* Record the pseudo registers we must reload into hard registers in a
707 subexpression of a memory address, X.
709 If CONTEXT is 0, we are looking at the base part of an address,
710 otherwise we are looking at the index part.
712 MODE is the mode of the memory reference; OUTER_CODE and INDEX_CODE
713 give the context that the rtx appears in. These three arguments
714 are passed down to base_reg_class.
716 SCALE is twice the amount to multiply the cost by (it is twice so
717 we can represent half-cost adjustments). */
719 record_address_regs (enum machine_mode mode, rtx x, int context,
720 enum rtx_code outer_code, enum rtx_code index_code,
723 enum rtx_code code = GET_CODE (x);
724 enum reg_class rclass;
727 rclass = INDEX_REG_CLASS;
729 rclass = base_reg_class (mode, outer_code, index_code);
742 /* When we have an address that is a sum, we must determine
743 whether registers are "base" or "index" regs. If there is a
744 sum of two registers, we must choose one to be the "base".
745 Luckily, we can use the REG_POINTER to make a good choice
746 most of the time. We only need to do this on machines that
747 can have two registers in an address and where the base and
748 index register classes are different.
750 ??? This code used to set REGNO_POINTER_FLAG in some cases,
751 but that seems bogus since it should only be set when we are
752 sure the register is being used as a pointer. */
754 rtx arg0 = XEXP (x, 0);
755 rtx arg1 = XEXP (x, 1);
756 enum rtx_code code0 = GET_CODE (arg0);
757 enum rtx_code code1 = GET_CODE (arg1);
759 /* Look inside subregs. */
761 arg0 = SUBREG_REG (arg0), code0 = GET_CODE (arg0);
763 arg1 = SUBREG_REG (arg1), code1 = GET_CODE (arg1);
765 /* If this machine only allows one register per address, it
766 must be in the first operand. */
767 if (MAX_REGS_PER_ADDRESS == 1)
768 record_address_regs (mode, arg0, 0, PLUS, code1, scale);
770 /* If index and base registers are the same on this machine,
771 just record registers in any non-constant operands. We
772 assume here, as well as in the tests below, that all
773 addresses are in canonical form. */
774 else if (INDEX_REG_CLASS == base_reg_class (VOIDmode, PLUS, SCRATCH))
776 record_address_regs (mode, arg0, context, PLUS, code1, scale);
777 if (! CONSTANT_P (arg1))
778 record_address_regs (mode, arg1, context, PLUS, code0, scale);
781 /* If the second operand is a constant integer, it doesn't
782 change what class the first operand must be. */
783 else if (code1 == CONST_INT || code1 == CONST_DOUBLE)
784 record_address_regs (mode, arg0, context, PLUS, code1, scale);
785 /* If the second operand is a symbolic constant, the first
786 operand must be an index register. */
787 else if (code1 == SYMBOL_REF || code1 == CONST || code1 == LABEL_REF)
788 record_address_regs (mode, arg0, 1, PLUS, code1, scale);
789 /* If both operands are registers but one is already a hard
790 register of index or reg-base class, give the other the
791 class that the hard register is not. */
792 else if (code0 == REG && code1 == REG
793 && REGNO (arg0) < FIRST_PSEUDO_REGISTER
794 && (ok_for_base_p_nonstrict (arg0, mode, PLUS, REG)
795 || ok_for_index_p_nonstrict (arg0)))
796 record_address_regs (mode, arg1,
797 ok_for_base_p_nonstrict (arg0, mode, PLUS, REG)
800 else if (code0 == REG && code1 == REG
801 && REGNO (arg1) < FIRST_PSEUDO_REGISTER
802 && (ok_for_base_p_nonstrict (arg1, mode, PLUS, REG)
803 || ok_for_index_p_nonstrict (arg1)))
804 record_address_regs (mode, arg0,
805 ok_for_base_p_nonstrict (arg1, mode, PLUS, REG)
808 /* If one operand is known to be a pointer, it must be the
809 base with the other operand the index. Likewise if the
810 other operand is a MULT. */
811 else if ((code0 == REG && REG_POINTER (arg0)) || code1 == MULT)
813 record_address_regs (mode, arg0, 0, PLUS, code1, scale);
814 record_address_regs (mode, arg1, 1, PLUS, code0, scale);
816 else if ((code1 == REG && REG_POINTER (arg1)) || code0 == MULT)
818 record_address_regs (mode, arg0, 1, PLUS, code1, scale);
819 record_address_regs (mode, arg1, 0, PLUS, code0, scale);
821 /* Otherwise, count equal chances that each might be a base or
822 index register. This case should be rare. */
825 record_address_regs (mode, arg0, 0, PLUS, code1, scale / 2);
826 record_address_regs (mode, arg0, 1, PLUS, code1, scale / 2);
827 record_address_regs (mode, arg1, 0, PLUS, code0, scale / 2);
828 record_address_regs (mode, arg1, 1, PLUS, code0, scale / 2);
833 /* Double the importance of an allocno that is incremented or
834 decremented, since it would take two extra insns if it ends
835 up in the wrong place. */
838 record_address_regs (mode, XEXP (x, 0), 0, code,
839 GET_CODE (XEXP (XEXP (x, 1), 1)), 2 * scale);
840 if (REG_P (XEXP (XEXP (x, 1), 1)))
841 record_address_regs (mode, XEXP (XEXP (x, 1), 1), 1, code, REG,
849 /* Double the importance of an allocno that is incremented or
850 decremented, since it would take two extra insns if it ends
851 up in the wrong place. If the operand is a pseudo-register,
852 show it is being used in an INC_DEC context. */
853 #ifdef FORBIDDEN_INC_DEC_CLASSES
854 if (REG_P (XEXP (x, 0))
855 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER)
856 in_inc_dec[ALLOCNO_NUM (ira_curr_regno_allocno_map
857 [REGNO (XEXP (x, 0))])] = true;
859 record_address_regs (mode, XEXP (x, 0), 0, code, SCRATCH, 2 * scale);
867 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
870 pp = COSTS_OF_ALLOCNO (allocno_costs,
871 ALLOCNO_NUM (ira_curr_regno_allocno_map
873 pp->mem_cost += (ira_memory_move_cost[Pmode][rclass][1] * scale) / 2;
874 if (ira_register_move_cost[Pmode] == NULL)
875 ira_init_register_move_cost (Pmode);
876 for (k = 0; k < cost_classes_num; k++)
880 += (ira_may_move_in_cost[Pmode][i][rclass] * scale) / 2;
887 const char *fmt = GET_RTX_FORMAT (code);
889 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
891 record_address_regs (mode, XEXP (x, i), context, code, SCRATCH,
899 /* Calculate the costs of insn operands. */
901 record_operand_costs (rtx insn, struct costs **op_costs,
902 enum reg_class *allocno_pref)
904 const char *constraints[MAX_RECOG_OPERANDS];
905 enum machine_mode modes[MAX_RECOG_OPERANDS];
908 for (i = 0; i < recog_data.n_operands; i++)
910 constraints[i] = recog_data.constraints[i];
911 modes[i] = recog_data.operand_mode[i];
914 /* If we get here, we are set up to record the costs of all the
915 operands for this insn. Start by initializing the costs. Then
916 handle any address registers. Finally record the desired classes
917 for any allocnos, doing it twice if some pair of operands are
919 for (i = 0; i < recog_data.n_operands; i++)
921 memcpy (op_costs[i], init_cost, struct_costs_size);
923 if (GET_CODE (recog_data.operand[i]) == SUBREG)
924 recog_data.operand[i] = SUBREG_REG (recog_data.operand[i]);
926 if (MEM_P (recog_data.operand[i]))
927 record_address_regs (GET_MODE (recog_data.operand[i]),
928 XEXP (recog_data.operand[i], 0),
929 0, MEM, SCRATCH, frequency * 2);
930 else if (constraints[i][0] == 'p'
931 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0],
933 record_address_regs (VOIDmode, recog_data.operand[i], 0, ADDRESS,
934 SCRATCH, frequency * 2);
937 /* Check for commutative in a separate loop so everything will have
938 been initialized. We must do this even if one operand is a
939 constant--see addsi3 in m68k.md. */
940 for (i = 0; i < (int) recog_data.n_operands - 1; i++)
941 if (constraints[i][0] == '%')
943 const char *xconstraints[MAX_RECOG_OPERANDS];
946 /* Handle commutative operands by swapping the constraints.
947 We assume the modes are the same. */
948 for (j = 0; j < recog_data.n_operands; j++)
949 xconstraints[j] = constraints[j];
951 xconstraints[i] = constraints[i+1];
952 xconstraints[i+1] = constraints[i];
953 record_reg_classes (recog_data.n_alternatives, recog_data.n_operands,
954 recog_data.operand, modes,
955 xconstraints, insn, op_costs, allocno_pref);
957 record_reg_classes (recog_data.n_alternatives, recog_data.n_operands,
958 recog_data.operand, modes,
959 constraints, insn, op_costs, allocno_pref);
964 /* Process one insn INSN. Scan it and record each time it would save
965 code to put a certain allocnos in a certain class. Return the last
966 insn processed, so that the scan can be continued from there. */
968 scan_one_insn (rtx insn)
970 enum rtx_code pat_code;
977 pat_code = GET_CODE (PATTERN (insn));
978 if (pat_code == USE || pat_code == CLOBBER || pat_code == ASM_INPUT
979 || pat_code == ADDR_VEC || pat_code == ADDR_DIFF_VEC)
982 set = single_set (insn);
985 /* If this insn loads a parameter from its stack slot, then it
986 represents a savings, rather than a cost, if the parameter is
987 stored in memory. Record this fact. */
988 if (set != 0 && REG_P (SET_DEST (set)) && MEM_P (SET_SRC (set))
989 && (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != NULL_RTX
990 && MEM_P (XEXP (note, 0)))
992 enum reg_class cl = GENERAL_REGS;
993 rtx reg = SET_DEST (set);
994 int num = ALLOCNO_NUM (ira_curr_regno_allocno_map[REGNO (reg)]);
997 cl = allocno_pref[num];
998 COSTS_OF_ALLOCNO (allocno_costs, num)->mem_cost
999 -= ira_memory_move_cost[GET_MODE (reg)][cl][1] * frequency;
1000 record_address_regs (GET_MODE (SET_SRC (set)), XEXP (SET_SRC (set), 0),
1001 0, MEM, SCRATCH, frequency * 2);
1004 record_operand_costs (insn, op_costs, allocno_pref);
1006 /* Now add the cost for each operand to the total costs for its
1008 for (i = 0; i < recog_data.n_operands; i++)
1009 if (REG_P (recog_data.operand[i])
1010 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER)
1012 int regno = REGNO (recog_data.operand[i]);
1014 = COSTS_OF_ALLOCNO (allocno_costs,
1015 ALLOCNO_NUM (ira_curr_regno_allocno_map[regno]));
1016 struct costs *q = op_costs[i];
1018 p->mem_cost += q->mem_cost;
1019 for (k = 0; k < cost_classes_num; k++)
1020 p->cost[k] += q->cost[k];
1028 /* Print allocnos costs to file F. */
1030 print_costs (FILE *f)
1034 ira_allocno_iterator ai;
1037 FOR_EACH_ALLOCNO (a, ai)
1041 int regno = ALLOCNO_REGNO (a);
1043 i = ALLOCNO_NUM (a);
1044 fprintf (f, " a%d(r%d,", i, regno);
1045 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
1046 fprintf (f, "b%d", bb->index);
1048 fprintf (f, "l%d", ALLOCNO_LOOP_TREE_NODE (a)->loop->num);
1049 fprintf (f, ") costs:");
1050 for (k = 0; k < cost_classes_num; k++)
1052 rclass = cost_classes[k];
1053 if (contains_reg_of_mode[rclass][PSEUDO_REGNO_MODE (regno)]
1054 #ifdef FORBIDDEN_INC_DEC_CLASSES
1055 && (! in_inc_dec[i] || ! forbidden_inc_dec_class[rclass])
1057 #ifdef CANNOT_CHANGE_MODE_CLASS
1058 && ! invalid_mode_change_p (regno, (enum reg_class) rclass,
1059 PSEUDO_REGNO_MODE (regno))
1063 fprintf (f, " %s:%d", reg_class_names[rclass],
1064 COSTS_OF_ALLOCNO (allocno_costs, i)->cost[k]);
1065 if (flag_ira_algorithm == IRA_ALGORITHM_REGIONAL
1066 || flag_ira_algorithm == IRA_ALGORITHM_MIXED)
1067 fprintf (f, ",%d", COSTS_OF_ALLOCNO (total_costs, i)->cost[k]);
1070 fprintf (f, " MEM:%i\n", COSTS_OF_ALLOCNO (allocno_costs, i)->mem_cost);
1074 /* Traverse the BB represented by LOOP_TREE_NODE to update the allocno
1077 process_bb_node_for_costs (ira_loop_tree_node_t loop_tree_node)
1082 bb = loop_tree_node->bb;
1085 frequency = REG_FREQ_FROM_BB (bb);
1088 FOR_BB_INSNS (bb, insn)
1089 insn = scan_one_insn (insn);
1092 /* Find costs of register classes and memory for allocnos and their
1095 find_allocno_class_costs (void)
1102 #ifdef FORBIDDEN_INC_DEC_CLASSES
1103 in_inc_dec = ira_allocate (sizeof (bool) * ira_allocnos_num);
1104 #endif /* FORBIDDEN_INC_DEC_CLASSES */
1105 allocno_pref = NULL;
1106 /* Normally we scan the insns once and determine the best class to
1107 use for each allocno. However, if -fexpensive-optimizations are
1108 on, we do so twice, the second time using the tentative best
1109 classes to guide the selection. */
1110 for (pass = 0; pass <= flag_expensive_optimizations; pass++)
1112 if (internal_flag_ira_verbose > 0 && ira_dump_file)
1113 fprintf (ira_dump_file, "\nPass %i for finding allocno costs\n\n",
1115 /* We could use only cover classes. Unfortunately it does not
1116 work well for some targets where some subclass of cover class
1117 is costly and wrong cover class is chosen. */
1118 for (i = 0; i < N_REG_CLASSES; i++)
1119 cost_class_nums[i] = -1;
1120 for (cost_classes_num = 0;
1121 cost_classes_num < ira_important_classes_num;
1124 cost_classes[cost_classes_num]
1125 = ira_important_classes[cost_classes_num];
1126 cost_class_nums[cost_classes[cost_classes_num]]
1130 = sizeof (struct costs) + sizeof (int) * (cost_classes_num - 1);
1131 /* Zero out our accumulation of the cost of each class for each
1133 memset (allocno_costs, 0, ira_allocnos_num * struct_costs_size);
1134 #ifdef FORBIDDEN_INC_DEC_CLASSES
1135 memset (in_inc_dec, 0, ira_allocnos_num * sizeof (bool));
1138 /* Scan the instructions and record each time it would save code
1139 to put a certain allocno in a certain class. */
1140 ira_traverse_loop_tree (true, ira_loop_tree_root,
1141 process_bb_node_for_costs, NULL);
1143 memcpy (total_costs, allocno_costs,
1144 max_struct_costs_size * ira_allocnos_num);
1146 allocno_pref = allocno_pref_buffer;
1148 /* Now for each allocno look at how desirable each class is and
1149 find which class is preferred. */
1150 for (i = max_reg_num () - 1; i >= FIRST_PSEUDO_REGISTER; i--)
1152 ira_allocno_t a, parent_a;
1153 int rclass, a_num, parent_a_num;
1154 ira_loop_tree_node_t parent;
1156 enum reg_class best, alt_class, common_class;
1157 #ifdef FORBIDDEN_INC_DEC_CLASSES
1158 int inc_dec_p = false;
1161 if (ira_regno_allocno_map[i] == NULL)
1163 memset (temp_costs, 0, struct_costs_size);
1164 /* Find cost of all allocnos with the same regno. */
1165 for (a = ira_regno_allocno_map[i];
1167 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1169 a_num = ALLOCNO_NUM (a);
1170 if ((flag_ira_algorithm == IRA_ALGORITHM_REGIONAL
1171 || flag_ira_algorithm == IRA_ALGORITHM_MIXED)
1172 && (parent = ALLOCNO_LOOP_TREE_NODE (a)->parent) != NULL
1173 && (parent_a = parent->regno_allocno_map[i]) != NULL
1174 /* There are no caps yet. */
1175 && bitmap_bit_p (ALLOCNO_LOOP_TREE_NODE (a)->border_allocnos,
1178 /* Propagate costs to upper levels in the region
1180 parent_a_num = ALLOCNO_NUM (parent_a);
1181 for (k = 0; k < cost_classes_num; k++)
1182 COSTS_OF_ALLOCNO (total_costs, parent_a_num)->cost[k]
1183 += COSTS_OF_ALLOCNO (total_costs, a_num)->cost[k];
1184 COSTS_OF_ALLOCNO (total_costs, parent_a_num)->mem_cost
1185 += COSTS_OF_ALLOCNO (total_costs, a_num)->mem_cost;
1187 for (k = 0; k < cost_classes_num; k++)
1189 += COSTS_OF_ALLOCNO (allocno_costs, a_num)->cost[k];
1190 temp_costs->mem_cost
1191 += COSTS_OF_ALLOCNO (allocno_costs, a_num)->mem_cost;
1192 #ifdef FORBIDDEN_INC_DEC_CLASSES
1193 if (in_inc_dec[a_num])
1197 best_cost = (1 << (HOST_BITS_PER_INT - 2)) - 1;
1199 alt_class = NO_REGS;
1200 /* Find best common class for all allocnos with the same
1202 for (k = 0; k < cost_classes_num; k++)
1204 rclass = cost_classes[k];
1205 /* Ignore classes that are too small for this operand or
1206 invalid for an operand that was auto-incremented. */
1207 if (! contains_reg_of_mode[rclass][PSEUDO_REGNO_MODE (i)]
1208 #ifdef FORBIDDEN_INC_DEC_CLASSES
1209 || (inc_dec_p && forbidden_inc_dec_class[rclass])
1211 #ifdef CANNOT_CHANGE_MODE_CLASS
1212 || invalid_mode_change_p (i, (enum reg_class) rclass,
1213 PSEUDO_REGNO_MODE (i))
1217 if (temp_costs->cost[k] < best_cost)
1219 best_cost = temp_costs->cost[k];
1220 best = (enum reg_class) rclass;
1222 else if (temp_costs->cost[k] == best_cost)
1223 best = ira_reg_class_union[best][rclass];
1224 if (pass == flag_expensive_optimizations
1225 && temp_costs->cost[k] < temp_costs->mem_cost
1226 && (reg_class_size[reg_class_subunion[alt_class][rclass]]
1227 > reg_class_size[alt_class]))
1228 alt_class = reg_class_subunion[alt_class][rclass];
1230 if (pass == flag_expensive_optimizations)
1232 if (best_cost > temp_costs->mem_cost)
1233 best = alt_class = NO_REGS;
1234 else if (best == alt_class)
1235 alt_class = NO_REGS;
1236 setup_reg_classes (i, best, alt_class);
1237 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
1238 fprintf (ira_dump_file,
1239 " r%d: preferred %s, alternative %s\n",
1240 i, reg_class_names[best], reg_class_names[alt_class]);
1242 if (best_cost > temp_costs->mem_cost)
1243 common_class = NO_REGS;
1245 /* Make the common class a cover class. Remember all
1246 allocnos with the same regno should have the same cover
1248 common_class = ira_class_translate[best];
1249 for (a = ira_regno_allocno_map[i];
1251 a = ALLOCNO_NEXT_REGNO_ALLOCNO (a))
1253 a_num = ALLOCNO_NUM (a);
1254 if (common_class == NO_REGS)
1258 /* Finding best class which is subset of the common
1260 best_cost = (1 << (HOST_BITS_PER_INT - 2)) - 1;
1262 for (k = 0; k < cost_classes_num; k++)
1264 rclass = cost_classes[k];
1265 if (! ira_class_subset_p[rclass][common_class])
1267 /* Ignore classes that are too small for this
1268 operand or invalid for an operand that was
1269 auto-incremented. */
1270 if (! contains_reg_of_mode[rclass][PSEUDO_REGNO_MODE (i)]
1271 #ifdef FORBIDDEN_INC_DEC_CLASSES
1272 || (inc_dec_p && forbidden_inc_dec_class[rclass])
1274 #ifdef CANNOT_CHANGE_MODE_CLASS
1275 || invalid_mode_change_p (i, (enum reg_class) rclass,
1276 PSEUDO_REGNO_MODE (i))
1280 else if (COSTS_OF_ALLOCNO (total_costs, a_num)->cost[k]
1284 = COSTS_OF_ALLOCNO (total_costs, a_num)->cost[k];
1285 best = (enum reg_class) rclass;
1287 else if (COSTS_OF_ALLOCNO (total_costs, a_num)->cost[k]
1289 best = ira_reg_class_union[best][rclass];
1292 if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL
1293 && (pass == 0 || allocno_pref[a_num] != best))
1295 fprintf (ira_dump_file, " a%d (r%d,", a_num, i);
1296 if ((bb = ALLOCNO_LOOP_TREE_NODE (a)->bb) != NULL)
1297 fprintf (ira_dump_file, "b%d", bb->index);
1299 fprintf (ira_dump_file, "l%d",
1300 ALLOCNO_LOOP_TREE_NODE (a)->loop->num);
1301 fprintf (ira_dump_file, ") best %s, cover %s\n",
1302 reg_class_names[best],
1303 reg_class_names[ira_class_translate[best]]);
1305 allocno_pref[a_num] = best;
1309 if (internal_flag_ira_verbose > 4 && ira_dump_file)
1311 print_costs (ira_dump_file);
1312 fprintf (ira_dump_file,"\n");
1315 #ifdef FORBIDDEN_INC_DEC_CLASSES
1316 ira_free (in_inc_dec);
1322 /* Process moves involving hard regs to modify allocno hard register
1323 costs. We can do this only after determining allocno cover class.
1324 If a hard register forms a register class, than moves with the hard
1325 register are already taken into account in class costs for the
1328 process_bb_node_for_hard_reg_moves (ira_loop_tree_node_t loop_tree_node)
1330 int i, freq, cost, src_regno, dst_regno, hard_regno;
1333 enum reg_class rclass, hard_reg_class;
1334 enum machine_mode mode;
1336 rtx insn, set, src, dst;
1338 bb = loop_tree_node->bb;
1341 freq = REG_FREQ_FROM_BB (bb);
1344 FOR_BB_INSNS (bb, insn)
1346 if (! INSN_P (insn))
1348 set = single_set (insn);
1349 if (set == NULL_RTX)
1351 dst = SET_DEST (set);
1352 src = SET_SRC (set);
1353 if (! REG_P (dst) || ! REG_P (src))
1355 dst_regno = REGNO (dst);
1356 src_regno = REGNO (src);
1357 if (dst_regno >= FIRST_PSEUDO_REGISTER
1358 && src_regno < FIRST_PSEUDO_REGISTER)
1360 hard_regno = src_regno;
1362 a = ira_curr_regno_allocno_map[dst_regno];
1364 else if (src_regno >= FIRST_PSEUDO_REGISTER
1365 && dst_regno < FIRST_PSEUDO_REGISTER)
1367 hard_regno = dst_regno;
1369 a = ira_curr_regno_allocno_map[src_regno];
1373 rclass = ALLOCNO_COVER_CLASS (a);
1374 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], hard_regno))
1376 i = ira_class_hard_reg_index[rclass][hard_regno];
1379 mode = ALLOCNO_MODE (a);
1380 hard_reg_class = REGNO_REG_CLASS (hard_regno);
1381 cost = (to_p ? ira_register_move_cost[mode][hard_reg_class][rclass]
1382 : ira_register_move_cost[mode][rclass][hard_reg_class]) * freq;
1383 ira_allocate_and_set_costs (&ALLOCNO_HARD_REG_COSTS (a), rclass,
1384 ALLOCNO_COVER_CLASS_COST (a));
1385 ira_allocate_and_set_costs (&ALLOCNO_CONFLICT_HARD_REG_COSTS (a),
1387 ALLOCNO_HARD_REG_COSTS (a)[i] -= cost;
1388 ALLOCNO_CONFLICT_HARD_REG_COSTS (a)[i] -= cost;
1389 ALLOCNO_COVER_CLASS_COST (a) = MIN (ALLOCNO_COVER_CLASS_COST (a),
1390 ALLOCNO_HARD_REG_COSTS (a)[i]);
1394 /* After we find hard register and memory costs for allocnos, define
1395 its cover class and modify hard register cost because insns moving
1396 allocno to/from hard registers. */
1398 setup_allocno_cover_class_and_costs (void)
1400 int i, j, n, regno, num;
1402 enum reg_class cover_class, rclass;
1403 enum machine_mode mode;
1405 ira_allocno_iterator ai;
1407 FOR_EACH_ALLOCNO (a, ai)
1409 i = ALLOCNO_NUM (a);
1410 mode = ALLOCNO_MODE (a);
1411 cover_class = ira_class_translate[allocno_pref[i]];
1412 ira_assert (allocno_pref[i] == NO_REGS || cover_class != NO_REGS);
1413 ALLOCNO_MEMORY_COST (a) = COSTS_OF_ALLOCNO (allocno_costs, i)->mem_cost;
1414 ira_set_allocno_cover_class (a, cover_class);
1415 if (cover_class == NO_REGS)
1417 ALLOCNO_AVAILABLE_REGS_NUM (a) = ira_available_class_regs[cover_class];
1418 num = cost_class_nums[allocno_pref[i]];
1419 ira_assert (num >= 0);
1420 ALLOCNO_COVER_CLASS_COST (a)
1421 = COSTS_OF_ALLOCNO (allocno_costs, i)->cost[num];
1422 if (optimize && ALLOCNO_COVER_CLASS (a) != allocno_pref[i])
1424 n = ira_class_hard_regs_num[cover_class];
1425 ALLOCNO_HARD_REG_COSTS (a)
1426 = reg_costs = ira_allocate_cost_vector (cover_class);
1427 for (j = n - 1; j >= 0; j--)
1429 regno = ira_class_hard_regs[cover_class][j];
1430 rclass = REGNO_REG_CLASS (regno);
1431 num = cost_class_nums[rclass];
1434 /* The hard register class is not a cover class or a
1435 class not fully inside in a cover class -- use
1436 the allocno cover class. */
1437 ira_assert (ira_hard_regno_cover_class[regno] == cover_class);
1438 num = cost_class_nums[cover_class];
1440 reg_costs[j] = COSTS_OF_ALLOCNO (allocno_costs, i)->cost[num];
1445 ira_traverse_loop_tree (true, ira_loop_tree_root,
1446 process_bb_node_for_hard_reg_moves, NULL);
1451 /* Function called once during compiler work. */
1453 ira_init_costs_once (void)
1458 for (i = 0; i < MAX_RECOG_OPERANDS; i++)
1461 this_op_costs[i] = NULL;
1464 cost_classes = NULL;
1467 /* Free allocated temporary cost vectors. */
1469 free_ira_costs (void)
1473 if (init_cost != NULL)
1476 for (i = 0; i < MAX_RECOG_OPERANDS; i++)
1478 if (op_costs[i] != NULL)
1480 if (this_op_costs[i] != NULL)
1481 free (this_op_costs[i]);
1482 op_costs[i] = this_op_costs[i] = NULL;
1484 if (temp_costs != NULL)
1487 if (cost_classes != NULL)
1488 free (cost_classes);
1489 cost_classes = NULL;
1492 /* This is called each time register related information is
1495 ira_init_costs (void)
1500 max_struct_costs_size
1501 = sizeof (struct costs) + sizeof (int) * (ira_important_classes_num - 1);
1502 /* Don't use ira_allocate because vectors live through several IRA calls. */
1503 init_cost = (struct costs *) xmalloc (max_struct_costs_size);
1504 init_cost->mem_cost = 1000000;
1505 for (i = 0; i < ira_important_classes_num; i++)
1506 init_cost->cost[i] = 1000000;
1507 for (i = 0; i < MAX_RECOG_OPERANDS; i++)
1509 op_costs[i] = (struct costs *) xmalloc (max_struct_costs_size);
1510 this_op_costs[i] = (struct costs *) xmalloc (max_struct_costs_size);
1512 temp_costs = (struct costs *) xmalloc (max_struct_costs_size);
1513 cost_classes = (enum reg_class *) xmalloc (sizeof (enum reg_class)
1514 * ira_important_classes_num);
1517 /* Function called once at the end of compiler work. */
1519 ira_finish_costs_once (void)
1526 /* Entry function which defines cover class, memory and hard register
1527 costs for each allocno. */
1532 ira_allocno_iterator ai;
1534 allocno_costs = (struct costs *) ira_allocate (max_struct_costs_size
1535 * ira_allocnos_num);
1536 total_costs = (struct costs *) ira_allocate (max_struct_costs_size
1537 * ira_allocnos_num);
1539 = (enum reg_class *) ira_allocate (sizeof (enum reg_class)
1540 * ira_allocnos_num);
1541 find_allocno_class_costs ();
1542 setup_allocno_cover_class_and_costs ();
1543 /* Because we could process operands only as subregs, check mode of
1544 the registers themselves too. */
1545 FOR_EACH_ALLOCNO (a, ai)
1546 if (ira_register_move_cost[ALLOCNO_MODE (a)] == NULL
1547 && have_regs_of_mode[ALLOCNO_MODE (a)])
1548 ira_init_register_move_cost (ALLOCNO_MODE (a));
1549 ira_free (allocno_pref_buffer);
1550 ira_free (total_costs);
1551 ira_free (allocno_costs);
1556 /* Change hard register costs for allocnos which lives through
1557 function calls. This is called only when we found all intersected
1558 calls during building allocno live ranges. */
1560 ira_tune_allocno_costs_and_cover_classes (void)
1563 int cost, min_cost, *reg_costs;
1564 enum reg_class cover_class, rclass;
1565 enum machine_mode mode;
1567 ira_allocno_iterator ai;
1569 FOR_EACH_ALLOCNO (a, ai)
1571 cover_class = ALLOCNO_COVER_CLASS (a);
1572 if (cover_class == NO_REGS)
1574 mode = ALLOCNO_MODE (a);
1575 n = ira_class_hard_regs_num[cover_class];
1577 if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0)
1579 ira_allocate_and_set_costs
1580 (&ALLOCNO_HARD_REG_COSTS (a), cover_class,
1581 ALLOCNO_COVER_CLASS_COST (a));
1582 reg_costs = ALLOCNO_HARD_REG_COSTS (a);
1583 for (j = n - 1; j >= 0; j--)
1585 regno = ira_class_hard_regs[cover_class][j];
1586 rclass = REGNO_REG_CLASS (regno);
1588 if (! ira_hard_reg_not_in_set_p (regno, mode, call_used_reg_set)
1589 || HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
1590 cost += (ALLOCNO_CALL_FREQ (a)
1591 * (ira_memory_move_cost[mode][rclass][0]
1592 + ira_memory_move_cost[mode][rclass][1]));
1593 #ifdef IRA_HARD_REGNO_ADD_COST_MULTIPLIER
1594 cost += ((ira_memory_move_cost[mode][rclass][0]
1595 + ira_memory_move_cost[mode][rclass][1])
1597 * IRA_HARD_REGNO_ADD_COST_MULTIPLIER (regno) / 2);
1599 reg_costs[j] += cost;
1600 if (min_cost > reg_costs[j])
1601 min_cost = reg_costs[j];
1604 if (min_cost != INT_MAX)
1605 ALLOCNO_COVER_CLASS_COST (a) = min_cost;