}
}
+/* If not using flag_reorder_blocks_and_partition, decide early whether the
+ current function goes into the cold section, so that targets can use
+ current_function_section during RTL expansion. DECL describes the
+ function. */
+
+void
+decide_function_section (tree decl)
+{
+ first_function_block_is_cold = false;
+
+ if (flag_reorder_blocks_and_partition)
+ /* We will decide in assemble_start_function. */
+ return;
+
+ if (DECL_SECTION_NAME (decl))
+ {
+ struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ /* Calls to function_section rely on first_function_block_is_cold
+ being accurate. */
+ first_function_block_is_cold = (node
+ && node->frequency
+ == NODE_FREQUENCY_UNLIKELY_EXECUTED);
+ }
+
+ in_cold_section_p = first_function_block_is_cold;
+}
+
/* Output assembler code for the constant pool of a function and associated
with defining the name of the function. DECL describes the function.
NAME is the function's name. For the constant pool, we use the current
char tmp_label[100];
bool hot_label_written = false;
- first_function_block_is_cold = false;
if (flag_reorder_blocks_and_partition)
{
ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTB", const_labelno);
if (flag_reorder_blocks_and_partition)
{
+ first_function_block_is_cold = false;
+
switch_to_section (unlikely_text_section ());
assemble_align (DECL_ALIGN (decl));
ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_label);
hot_label_written = true;
first_function_block_is_cold = true;
}
- }
- else if (DECL_SECTION_NAME (decl))
- {
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
- /* Calls to function_section rely on first_function_block_is_cold
- being accurate. */
- first_function_block_is_cold = (node
- && node->frequency
- == NODE_FREQUENCY_UNLIKELY_EXECUTED);
+ in_cold_section_p = first_function_block_is_cold;
}
- in_cold_section_p = first_function_block_is_cold;
/* Switch to the correct text section for the start of the function. */
definition. */
struct cgraph_node *node = cgraph_get_create_node (decl);
if (!DECL_EXTERNAL (decl)
- && (!node->local.vtable_method || !cgraph_global_info_ready
- || !node->local.finalized))
+ && !node->local.finalized)
cgraph_mark_needed_node (node);
}
else if (TREE_CODE (decl) == VAR_DECL)
void **slot;
/* If we're not allowed to drop X into the constant pool, don't. */
- if (targetm.cannot_force_const_mem (x))
+ if (targetm.cannot_force_const_mem (mode, x))
return NULL_RTX;
/* Record that this function has used a constant pool entry. */
unsigned int align2;
if (local->index != NULL_TREE)
- fieldpos = (tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (local->val)), 1)
- * ((tree_low_cst (local->index, 0)
- - tree_low_cst (local->min_index, 0))));
+ {
+ double_int idx = double_int_sub (tree_to_double_int (local->index),
+ tree_to_double_int (local->min_index));
+ gcc_assert (double_int_fits_in_shwi_p (idx));
+ fieldpos = (tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (local->val)), 1)
+ * idx.low);
+ }
else if (local->field != NULL_TREE)
fieldpos = int_byte_position (local->field);
else
better be last. */
gcc_assert (!fieldsize || !DECL_CHAIN (local->field));
}
- else if (DECL_SIZE_UNIT (local->field))
- {
- /* ??? This can't be right. If the decl size overflows
- a host integer we will silently emit no data. */
- if (host_integerp (DECL_SIZE_UNIT (local->field), 1))
- fieldsize = tree_low_cst (DECL_SIZE_UNIT (local->field), 1);
- }
+ else
+ fieldsize = tree_low_cst (DECL_SIZE_UNIT (local->field), 1);
}
else
fieldsize = int_size_in_bytes (TREE_TYPE (local->type));