for (cl = 0; cl < N_REG_CLASSES; cl++)
ira_reg_class_max_nregs[cl][m]
= ira_reg_class_min_nregs[cl][m]
- = CLASS_MAX_NREGS ((enum reg_class) cl, (enum machine_mode) m);
+ = targetm.class_max_nregs ((reg_class_t) cl, (enum machine_mode) m);
for (cl = 0; cl < N_REG_CLASSES; cl++)
for (i = 0;
(cl2 = alloc_reg_class_subclasses[cl][i]) != LIM_REG_CLASSES;
sizeof (move_table) * N_REG_CLASSES);
for (cl1 = 0; cl1 < N_REG_CLASSES; cl1++)
{
+ /* Some subclasses are to small to have enough registers to hold
+ a value of MODE. Just ignore them. */
+ if (ira_reg_class_max_nregs[cl1][mode] > ira_available_class_regs[cl1])
+ continue;
COPY_HARD_REG_SET (temp_hard_regset, reg_class_contents[cl1]);
AND_COMPL_HARD_REG_SET (temp_hard_regset, no_unit_alloc_regs);
if (hard_reg_set_empty_p (temp_hard_regset))
reg_class_contents[pclass]);
}
if (ALLOCNO_CALLS_CROSSED_NUM (a) != 0
- && ! ira_hard_reg_not_in_set_p (hard_regno, ALLOCNO_MODE (a),
- call_used_reg_set))
+ && ira_hard_reg_set_intersection_p (hard_regno, ALLOCNO_MODE (a),
+ call_used_reg_set))
{
ira_assert (!optimize || flag_caller_saves
|| regno >= ira_reg_equiv_len
|| ALLOCNO_EMIT_DATA (a)->mem_optimized_dest_p
|| (ALLOCNO_MEMORY_COST (a)
- ALLOCNO_CLASS_COST (a)) < 0);
- ira_assert (hard_regno < 0
- || ! ira_hard_reg_not_in_set_p (hard_regno, ALLOCNO_MODE (a),
- reg_class_contents
- [ALLOCNO_CLASS (a)]));
+ ira_assert
+ (hard_regno < 0
+ || ira_hard_reg_in_set_p (hard_regno, ALLOCNO_MODE (a),
+ reg_class_contents[ALLOCNO_CLASS (a)]));
}
}
{
hard_regno = ALLOCNO_HARD_REGNO (a);
ira_assert (hard_regno < 0
- || ! ira_hard_reg_not_in_set_p
- (hard_regno, ALLOCNO_MODE (a),
- reg_class_contents[ALLOCNO_CLASS (a)]));
+ || (ira_hard_reg_in_set_p
+ (hard_regno, ALLOCNO_MODE (a),
+ reg_class_contents[ALLOCNO_CLASS (a)])));
if (hard_regno < 0)
{
cost = ALLOCNO_MEMORY_COST (a);