X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ffunction.c;h=5d0e7e5c9543de5e06a947a5205dfe5dd03c908b;hb=cfb69e87ee8fec936886b552574ab9c6eeb744bb;hp=2ed1748d9ff0bc85f784d5c66c693e4bf06e261e;hpb=342ad2d6a6167f3f9626947ac8affa9e0c92f611;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/function.c b/gcc/function.c index 2ed1748d9ff..5d0e7e5c954 100644 --- a/gcc/function.c +++ b/gcc/function.c @@ -1,7 +1,7 @@ /* Expands front end tree to back end RTL for GCC. Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997, - 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 - Free Software Foundation, Inc. + 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, + 2010 Free Software Foundation, Inc. This file is part of GCC. @@ -124,69 +124,16 @@ struct machine_function * (*init_machine_status) (void); /* The currently compiled function. */ struct function *cfun = 0; -/* These arrays record the INSN_UIDs of the prologue and epilogue insns. */ -static VEC(int,heap) *prologue; -static VEC(int,heap) *epilogue; - -/* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue - in this function. */ -static VEC(int,heap) *sibcall_epilogue; +/* These hashes record the prologue and epilogue insns. */ +static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) + htab_t prologue_insn_hash; +static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) + htab_t epilogue_insn_hash; -/* In order to evaluate some expressions, such as function calls returning - structures in memory, we need to temporarily allocate stack locations. - We record each allocated temporary in the following structure. - Associated with each temporary slot is a nesting level. When we pop up - one level, all temporaries associated with the previous level are freed. - Normally, all temporaries are freed after the execution of the statement - in which they were created. However, if we are inside a ({...}) grouping, - the result may be in a temporary and hence must be preserved. If the - result could be in a temporary, we preserve it if we can determine which - one it is in. If we cannot determine which temporary may contain the - result, all temporaries are preserved. A temporary is preserved by - pretending it was allocated at the previous nesting level. +htab_t types_used_by_vars_hash = NULL; +tree types_used_by_cur_var_decl = NULL; - Automatic variables are also assigned temporary slots, at the nesting - level where they are defined. They are marked a "kept" so that - free_temp_slots will not free them. */ - -struct temp_slot GTY(()) -{ - /* Points to next temporary slot. */ - struct temp_slot *next; - /* Points to previous temporary slot. */ - struct temp_slot *prev; - - /* The rtx to used to reference the slot. */ - rtx slot; - /* The rtx used to represent the address if not the address of the - slot above. May be an EXPR_LIST if multiple addresses exist. */ - rtx address; - /* The alignment (in bits) of the slot. */ - unsigned int align; - /* The size, in units, of the slot. */ - HOST_WIDE_INT size; - /* The type of the object in the slot, or zero if it doesn't correspond - to a type. We use this to determine whether a slot can be reused. - It can be reused if objects of the type of the new slot will always - conflict with objects of the type of the old slot. */ - tree type; - /* Nonzero if this temporary is currently in use. */ - char in_use; - /* Nonzero if this temporary has its address taken. */ - char addr_taken; - /* Nesting level at which this slot is being used. */ - int level; - /* Nonzero if this should survive a call to free_temp_slots. */ - int keep; - /* The offset of the slot from the frame_pointer, including extra space - for alignment. This info is for combine_temp_slots. */ - HOST_WIDE_INT base_offset; - /* The size of the slot, including extra space for alignment. This - info is for combine_temp_slots. */ - HOST_WIDE_INT full_size; -}; - /* Forward declarations. */ static struct temp_slot *find_temp_slot_from_address (rtx); @@ -198,8 +145,8 @@ static tree *get_block_vector (tree, int *); extern tree debug_find_var_in_block_tree (tree, tree); /* We always define `record_insns' even if it's not used so that we can always export `prologue_epilogue_contains'. */ -static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED; -static int contains (const_rtx, VEC(int,heap) **); +static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED; +static bool contains (const_rtx, htab_t); #ifdef HAVE_return static void emit_return_into_block (basic_block); #endif @@ -262,9 +209,9 @@ free_after_parsing (struct function *f) void free_after_compilation (struct function *f) { - VEC_free (int, heap, prologue); - VEC_free (int, heap, epilogue); - VEC_free (int, heap, sibcall_epilogue); + prologue_insn_hash = NULL; + epilogue_insn_hash = NULL; + if (crtl->emit.regno_pointer_align) free (crtl->emit.regno_pointer_align); @@ -296,14 +243,15 @@ get_frame_size (void) bool frame_offset_overflow (HOST_WIDE_INT offset, tree func) -{ +{ unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset; if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1)) /* Leave room for the fixed part of the frame. */ - 64 * UNITS_PER_WORD) { - error ("%Jtotal size of local objects too large", func); + error_at (DECL_SOURCE_LOCATION (func), + "total size of local objects too large"); return TRUE; } @@ -330,6 +278,75 @@ get_stack_local_alignment (tree type, enum machine_mode mode) return STACK_SLOT_ALIGNMENT (type, mode, alignment); } +/* Determine whether it is possible to fit a stack slot of size SIZE and + alignment ALIGNMENT into an area in the stack frame that starts at + frame offset START and has a length of LENGTH. If so, store the frame + offset to be used for the stack slot in *POFFSET and return true; + return false otherwise. This function will extend the frame size when + given a start/length pair that lies at the end of the frame. */ + +static bool +try_fit_stack_local (HOST_WIDE_INT start, HOST_WIDE_INT length, + HOST_WIDE_INT size, unsigned int alignment, + HOST_WIDE_INT *poffset) +{ + HOST_WIDE_INT this_frame_offset; + int frame_off, frame_alignment, frame_phase; + + /* Calculate how many bytes the start of local variables is off from + stack alignment. */ + frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; + frame_off = STARTING_FRAME_OFFSET % frame_alignment; + frame_phase = frame_off ? frame_alignment - frame_off : 0; + + /* Round the frame offset to the specified alignment. */ + + /* We must be careful here, since FRAME_OFFSET might be negative and + division with a negative dividend isn't as well defined as we might + like. So we instead assume that ALIGNMENT is a power of two and + use logical operations which are unambiguous. */ + if (FRAME_GROWS_DOWNWARD) + this_frame_offset + = (FLOOR_ROUND (start + length - size - frame_phase, + (unsigned HOST_WIDE_INT) alignment) + + frame_phase); + else + this_frame_offset + = (CEIL_ROUND (start - frame_phase, + (unsigned HOST_WIDE_INT) alignment) + + frame_phase); + + /* See if it fits. If this space is at the edge of the frame, + consider extending the frame to make it fit. Our caller relies on + this when allocating a new slot. */ + if (frame_offset == start && this_frame_offset < frame_offset) + frame_offset = this_frame_offset; + else if (this_frame_offset < start) + return false; + else if (start + length == frame_offset + && this_frame_offset + size > start + length) + frame_offset = this_frame_offset + size; + else if (this_frame_offset + size > start + length) + return false; + + *poffset = this_frame_offset; + return true; +} + +/* Create a new frame_space structure describing free space in the stack + frame beginning at START and ending at END, and chain it into the + function's frame_space_list. */ + +static void +add_frame_space (HOST_WIDE_INT start, HOST_WIDE_INT end) +{ + struct frame_space *space = GGC_NEW (struct frame_space); + space->next = crtl->frame_space_list; + crtl->frame_space_list = space; + space->start = start; + space->length = end - start; +} + /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it with machine mode MODE. @@ -350,8 +367,8 @@ assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, { rtx x, addr; int bigend_correction = 0; + HOST_WIDE_INT slot_offset, old_frame_offset; unsigned int alignment, alignment_in_bits; - int frame_off, frame_alignment, frame_phase; if (align == 0) { @@ -370,9 +387,6 @@ assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, alignment_in_bits = alignment * BITS_PER_UNIT; - if (FRAME_GROWS_DOWNWARD) - frame_offset -= size; - /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT. */ if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT) { @@ -412,38 +426,58 @@ assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, if (crtl->stack_alignment_needed < alignment_in_bits) crtl->stack_alignment_needed = alignment_in_bits; - if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed) - crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed; + if (crtl->max_used_stack_slot_alignment < alignment_in_bits) + crtl->max_used_stack_slot_alignment = alignment_in_bits; - /* Calculate how many bytes the start of local variables is off from - stack alignment. */ - frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; - frame_off = STARTING_FRAME_OFFSET % frame_alignment; - frame_phase = frame_off ? frame_alignment - frame_off : 0; + if (mode != BLKmode || size != 0) + { + struct frame_space **psp; - /* Round the frame offset to the specified alignment. The default is - to always honor requests to align the stack but a port may choose to - do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */ - if (STACK_ALIGNMENT_NEEDED - || mode != BLKmode - || size != 0) + for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next) + { + struct frame_space *space = *psp; + if (!try_fit_stack_local (space->start, space->length, size, + alignment, &slot_offset)) + continue; + *psp = space->next; + if (slot_offset > space->start) + add_frame_space (space->start, slot_offset); + if (slot_offset + size < space->start + space->length) + add_frame_space (slot_offset + size, + space->start + space->length); + goto found_space; + } + } + else if (!STACK_ALIGNMENT_NEEDED) { - /* We must be careful here, since FRAME_OFFSET might be negative and - division with a negative dividend isn't as well defined as we might - like. So we instead assume that ALIGNMENT is a power of two and - use logical operations which are unambiguous. */ - if (FRAME_GROWS_DOWNWARD) - frame_offset - = (FLOOR_ROUND (frame_offset - frame_phase, - (unsigned HOST_WIDE_INT) alignment) - + frame_phase); - else - frame_offset - = (CEIL_ROUND (frame_offset - frame_phase, - (unsigned HOST_WIDE_INT) alignment) - + frame_phase); + slot_offset = frame_offset; + goto found_space; } + old_frame_offset = frame_offset; + + if (FRAME_GROWS_DOWNWARD) + { + frame_offset -= size; + try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset); + + if (slot_offset > frame_offset) + add_frame_space (frame_offset, slot_offset); + if (slot_offset + size < old_frame_offset) + add_frame_space (slot_offset + size, old_frame_offset); + } + else + { + frame_offset += size; + try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset); + + if (slot_offset > old_frame_offset) + add_frame_space (old_frame_offset, slot_offset); + if (slot_offset + size < frame_offset) + add_frame_space (slot_offset + size, frame_offset); + } + + found_space: /* On a big-endian machine, if we are allocating more space than we will use, use the least significant bytes of those that are allocated. */ if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size) @@ -454,17 +488,14 @@ assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, if (virtuals_instantiated) addr = plus_constant (frame_pointer_rtx, trunc_int_for_mode - (frame_offset + bigend_correction + (slot_offset + bigend_correction + STARTING_FRAME_OFFSET, Pmode)); else addr = plus_constant (virtual_stack_vars_rtx, trunc_int_for_mode - (frame_offset + bigend_correction, + (slot_offset + bigend_correction, Pmode)); - if (!FRAME_GROWS_DOWNWARD) - frame_offset += size; - x = gen_rtx_MEM (mode, addr); set_mem_align (x, alignment_in_bits); MEM_NOTRAP_P (x) = 1; @@ -486,6 +517,68 @@ assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align) return assign_stack_local_1 (mode, size, align, false); } + +/* In order to evaluate some expressions, such as function calls returning + structures in memory, we need to temporarily allocate stack locations. + We record each allocated temporary in the following structure. + + Associated with each temporary slot is a nesting level. When we pop up + one level, all temporaries associated with the previous level are freed. + Normally, all temporaries are freed after the execution of the statement + in which they were created. However, if we are inside a ({...}) grouping, + the result may be in a temporary and hence must be preserved. If the + result could be in a temporary, we preserve it if we can determine which + one it is in. If we cannot determine which temporary may contain the + result, all temporaries are preserved. A temporary is preserved by + pretending it was allocated at the previous nesting level. + + Automatic variables are also assigned temporary slots, at the nesting + level where they are defined. They are marked a "kept" so that + free_temp_slots will not free them. */ + +struct GTY(()) temp_slot { + /* Points to next temporary slot. */ + struct temp_slot *next; + /* Points to previous temporary slot. */ + struct temp_slot *prev; + /* The rtx to used to reference the slot. */ + rtx slot; + /* The size, in units, of the slot. */ + HOST_WIDE_INT size; + /* The type of the object in the slot, or zero if it doesn't correspond + to a type. We use this to determine whether a slot can be reused. + It can be reused if objects of the type of the new slot will always + conflict with objects of the type of the old slot. */ + tree type; + /* The alignment (in bits) of the slot. */ + unsigned int align; + /* Nonzero if this temporary is currently in use. */ + char in_use; + /* Nonzero if this temporary has its address taken. */ + char addr_taken; + /* Nesting level at which this slot is being used. */ + int level; + /* Nonzero if this should survive a call to free_temp_slots. */ + int keep; + /* The offset of the slot from the frame_pointer, including extra space + for alignment. This info is for combine_temp_slots. */ + HOST_WIDE_INT base_offset; + /* The size of the slot, including extra space for alignment. This + info is for combine_temp_slots. */ + HOST_WIDE_INT full_size; +}; + +/* A table of addresses that represent a stack slot. The table is a mapping + from address RTXen to a temp slot. */ +static GTY((param_is(struct temp_slot_address_entry))) htab_t temp_slot_address_table; + +/* Entry for the above hash table. */ +struct GTY(()) temp_slot_address_entry { + hashval_t hash; + rtx address; + struct temp_slot *temp_slot; +}; + /* Removes temporary slot TEMP from LIST. */ static void @@ -555,6 +648,114 @@ make_slot_available (struct temp_slot *temp) temp->in_use = 0; temp->level = -1; } + +/* Compute the hash value for an address -> temp slot mapping. + The value is cached on the mapping entry. */ +static hashval_t +temp_slot_address_compute_hash (struct temp_slot_address_entry *t) +{ + int do_not_record = 0; + return hash_rtx (t->address, GET_MODE (t->address), + &do_not_record, NULL, false); +} + +/* Return the hash value for an address -> temp slot mapping. */ +static hashval_t +temp_slot_address_hash (const void *p) +{ + const struct temp_slot_address_entry *t; + t = (const struct temp_slot_address_entry *) p; + return t->hash; +} + +/* Compare two address -> temp slot mapping entries. */ +static int +temp_slot_address_eq (const void *p1, const void *p2) +{ + const struct temp_slot_address_entry *t1, *t2; + t1 = (const struct temp_slot_address_entry *) p1; + t2 = (const struct temp_slot_address_entry *) p2; + return exp_equiv_p (t1->address, t2->address, 0, true); +} + +/* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping. */ +static void +insert_temp_slot_address (rtx address, struct temp_slot *temp_slot) +{ + void **slot; + struct temp_slot_address_entry *t = GGC_NEW (struct temp_slot_address_entry); + t->address = address; + t->temp_slot = temp_slot; + t->hash = temp_slot_address_compute_hash (t); + slot = htab_find_slot_with_hash (temp_slot_address_table, t, t->hash, INSERT); + *slot = t; +} + +/* Remove an address -> temp slot mapping entry if the temp slot is + not in use anymore. Callback for remove_unused_temp_slot_addresses. */ +static int +remove_unused_temp_slot_addresses_1 (void **slot, void *data ATTRIBUTE_UNUSED) +{ + const struct temp_slot_address_entry *t; + t = (const struct temp_slot_address_entry *) *slot; + if (! t->temp_slot->in_use) + *slot = NULL; + return 1; +} + +/* Remove all mappings of addresses to unused temp slots. */ +static void +remove_unused_temp_slot_addresses (void) +{ + htab_traverse (temp_slot_address_table, + remove_unused_temp_slot_addresses_1, + NULL); +} + +/* Find the temp slot corresponding to the object at address X. */ + +static struct temp_slot * +find_temp_slot_from_address (rtx x) +{ + struct temp_slot *p; + struct temp_slot_address_entry tmp, *t; + + /* First try the easy way: + See if X exists in the address -> temp slot mapping. */ + tmp.address = x; + tmp.temp_slot = NULL; + tmp.hash = temp_slot_address_compute_hash (&tmp); + t = (struct temp_slot_address_entry *) + htab_find_with_hash (temp_slot_address_table, &tmp, tmp.hash); + if (t) + return t->temp_slot; + + /* If we have a sum involving a register, see if it points to a temp + slot. */ + if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0)) + && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0) + return p; + else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1)) + && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0) + return p; + + /* Last resort: Address is a virtual stack var address. */ + if (GET_CODE (x) == PLUS + && XEXP (x, 0) == virtual_stack_vars_rtx + && CONST_INT_P (XEXP (x, 1))) + { + int i; + for (i = max_slot_level (); i >= 0; i--) + for (p = *temp_slots_at_level (i); p; p = p->next) + { + if (INTVAL (XEXP (x, 1)) >= p->base_offset + && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size) + return p; + } + } + + return NULL; +} /* Allocate a temporary stack slot and record it for possible later reuse. @@ -592,7 +793,7 @@ assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, /* Try to find an available, already-allocated temporary of the proper mode which meets the size and alignment requirements. Choose the smallest one with the closest alignment. - + If assign_stack_temp is called outside of the tree->rtl expansion, we cannot reuse the stack slots (that may still refer to VIRTUAL_STACK_VARS_REGNUM). */ @@ -641,7 +842,6 @@ assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, p->full_size = best_p->full_size - rounded_size; p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size); p->align = best_p->align; - p->address = 0; p->type = best_p->type; insert_slot_to_list (p, &avail_temp_slots); @@ -700,7 +900,6 @@ assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, p->base_offset = frame_offset_old; p->full_size = frame_offset - frame_offset_old; } - p->address = 0; selected = p; } @@ -714,6 +913,7 @@ assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size, pp = temp_slots_at_level (p->level); insert_slot_to_list (p, pp); + insert_temp_slot_address (XEXP (p->slot, 0), p); /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */ slot = gen_rtx_MEM (mode, XEXP (p->slot, 0)); @@ -809,7 +1009,7 @@ assign_temp (tree type_or_decl, int keep, int memory_required, #ifdef PROMOTE_MODE if (! dont_promote) - mode = promote_mode (type, mode, &unsignedp, 0); + mode = promote_mode (type, mode, &unsignedp); #endif return gen_reg_rtx (mode); @@ -882,45 +1082,6 @@ combine_temp_slots (void) } } -/* Find the temp slot corresponding to the object at address X. */ - -static struct temp_slot * -find_temp_slot_from_address (rtx x) -{ - struct temp_slot *p; - rtx next; - int i; - - for (i = max_slot_level (); i >= 0; i--) - for (p = *temp_slots_at_level (i); p; p = p->next) - { - if (XEXP (p->slot, 0) == x - || p->address == x - || (GET_CODE (x) == PLUS - && XEXP (x, 0) == virtual_stack_vars_rtx - && GET_CODE (XEXP (x, 1)) == CONST_INT - && INTVAL (XEXP (x, 1)) >= p->base_offset - && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size)) - return p; - - else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST) - for (next = p->address; next; next = XEXP (next, 1)) - if (XEXP (next, 0) == x) - return p; - } - - /* If we have a sum involving a register, see if it points to a temp - slot. */ - if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0)) - && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0) - return p; - else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1)) - && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0) - return p; - - return 0; -} - /* Indicate that NEW_RTX is an alternate way of referring to the temp slot that previously was known by OLD_RTX. */ @@ -967,15 +1128,7 @@ update_temp_slot_address (rtx old_rtx, rtx new_rtx) } /* Otherwise add an alias for the temp's address. */ - else if (p->address == 0) - p->address = new_rtx; - else - { - if (GET_CODE (p->address) != EXPR_LIST) - p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX); - - p->address = gen_rtx_EXPR_LIST (VOIDmode, new_rtx, p->address); - } + insert_temp_slot_address (new_rtx, p); } /* If X could be a reference to a temporary slot, mark the fact that its @@ -1094,16 +1247,24 @@ void free_temp_slots (void) { struct temp_slot *p, *next; + bool some_available = false; for (p = *temp_slots_at_level (temp_slot_level); p; p = next) { next = p->next; if (!p->keep) - make_slot_available (p); + { + make_slot_available (p); + some_available = true; + } } - combine_temp_slots (); + if (some_available) + { + remove_unused_temp_slot_addresses (); + combine_temp_slots (); + } } /* Push deeper into the nesting level for stack temporaries. */ @@ -1121,14 +1282,20 @@ void pop_temp_slots (void) { struct temp_slot *p, *next; + bool some_available = false; for (p = *temp_slots_at_level (temp_slot_level); p; p = next) { next = p->next; make_slot_available (p); + some_available = true; } - combine_temp_slots (); + if (some_available) + { + remove_unused_temp_slot_addresses (); + combine_temp_slots (); + } temp_slot_level--; } @@ -1142,6 +1309,15 @@ init_temp_slots (void) avail_temp_slots = 0; used_temp_slots = 0; temp_slot_level = 0; + + /* Set up the table to map addresses to temp slots. */ + if (! temp_slot_address_table) + temp_slot_address_table = htab_create_ggc (32, + temp_slot_address_hash, + temp_slot_address_eq, + NULL); + else + htab_empty (temp_slot_address_table); } /* These routines are responsible for converting virtual register references @@ -1381,7 +1557,7 @@ instantiate_virtual_regs_in_insn (rtx insn) && recog_data.n_operands >= 3 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0) && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1) - && GET_CODE (recog_data.operand[2]) == CONST_INT + && CONST_INT_P (recog_data.operand[2]) && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset))) { offset += INTVAL (recog_data.operand[2]); @@ -1517,7 +1693,13 @@ instantiate_virtual_regs_in_insn (rtx insn) if (!safe_insn_predicate (insn_code, i, x)) { start_sequence (); - x = force_reg (insn_data[insn_code].operand[i].mode, x); + if (REG_P (x)) + { + gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER); + x = copy_to_reg (x); + } + else + x = force_reg (insn_data[insn_code].operand[i].mode, x); seq = get_insns (); end_sequence (); if (seq) @@ -1698,8 +1880,11 @@ instantiate_virtual_regs (void) || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC || GET_CODE (PATTERN (insn)) == ASM_INPUT) continue; - - instantiate_virtual_regs_in_insn (insn); + else if (DEBUG_INSN_P (insn)) + for_each_rtx (&INSN_VAR_LOCATION (insn), + instantiate_virtual_regs_in_rtx, NULL); + else + instantiate_virtual_regs_in_insn (insn); if (INSN_DELETED_P (insn)) continue; @@ -1707,7 +1892,7 @@ instantiate_virtual_regs (void) for_each_rtx (®_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL); /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */ - if (GET_CODE (insn) == CALL_INSN) + if (CALL_P (insn)) for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn), instantiate_virtual_regs_in_rtx, NULL); } @@ -1733,7 +1918,7 @@ struct rtl_opt_pass pass_instantiate_virtual_regs = NULL, /* sub */ NULL, /* next */ 0, /* static_pass_number */ - 0, /* tv_id */ + TV_NONE, /* tv_id */ 0, /* properties_required */ 0, /* properties_provided */ 0, /* properties_destroyed */ @@ -1751,65 +1936,64 @@ struct rtl_opt_pass pass_instantiate_virtual_regs = int aggregate_value_p (const_tree exp, const_tree fntype) { + const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp); int i, regno, nregs; rtx reg; - const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp); - - /* DECL node associated with FNTYPE when relevant, which we might need to - check for by-invisible-reference returns, typically for CALL_EXPR input - EXPressions. */ - const_tree fndecl = NULL_TREE; - if (fntype) switch (TREE_CODE (fntype)) { case CALL_EXPR: - fndecl = get_callee_fndecl (fntype); - fntype = fndecl ? TREE_TYPE (fndecl) : 0; + { + tree fndecl = get_callee_fndecl (fntype); + fntype = (fndecl + ? TREE_TYPE (fndecl) + : TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)))); + } break; case FUNCTION_DECL: - fndecl = fntype; - fntype = TREE_TYPE (fndecl); + fntype = TREE_TYPE (fntype); break; case FUNCTION_TYPE: case METHOD_TYPE: break; case IDENTIFIER_NODE: - fntype = 0; + fntype = NULL_TREE; break; default: - /* We don't expect other rtl types here. */ + /* We don't expect other tree types here. */ gcc_unreachable (); } - if (TREE_CODE (type) == VOID_TYPE) + if (VOID_TYPE_P (type)) return 0; + /* If a record should be passed the same as its first (and only) member + don't pass it as an aggregate. */ + if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type)) + return aggregate_value_p (first_field (type), fntype); + /* If the front end has decided that this needs to be passed by reference, do so. */ if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL) && DECL_BY_REFERENCE (exp)) return 1; - /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the - called function RESULT_DECL, meaning the function returns in memory by - invisible reference. This check lets front-ends not set TREE_ADDRESSABLE - on the function type, which used to be the way to request such a return - mechanism but might now be causing troubles at gimplification time if - temporaries with the function type need to be created. */ - if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl) - && DECL_BY_REFERENCE (DECL_RESULT (fndecl))) - return 1; - - if (targetm.calls.return_in_memory (type, fntype)) + /* Function types that are TREE_ADDRESSABLE force return in memory. */ + if (fntype && TREE_ADDRESSABLE (fntype)) return 1; + /* Types that are TREE_ADDRESSABLE must be constructed in memory, and thus can't be returned in registers. */ if (TREE_ADDRESSABLE (type)) return 1; + if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type)) return 1; + + if (targetm.calls.return_in_memory (type, fntype)) + return 1; + /* Make sure we have suitable call-clobbered regs to return the value in; if not, we must return it in memory. */ reg = hard_function_value (type, 0, fntype, 0); @@ -1824,6 +2008,7 @@ aggregate_value_p (const_tree exp, const_tree fntype) for (i = 0; i < nregs; i++) if (! call_used_regs[regno + i]) return 1; + return 0; } @@ -1835,7 +2020,7 @@ use_register_for_decl (const_tree decl) { if (!targetm.calls.allocate_stack_slots_for_args()) return true; - + /* Honor volatile. */ if (TREE_SIDE_EFFECTS (decl)) return false; @@ -1860,7 +2045,28 @@ use_register_for_decl (const_tree decl) if (DECL_IGNORED_P (decl)) return true; - return (optimize || DECL_REGISTER (decl)); + if (optimize) + return true; + + if (!DECL_REGISTER (decl)) + return false; + + switch (TREE_CODE (TREE_TYPE (decl))) + { + case RECORD_TYPE: + case UNION_TYPE: + case QUAL_UNION_TYPE: + /* When not optimizing, disregard register keyword for variables with + types containing methods, otherwise the methods won't be callable + from the debugger. */ + if (TYPE_METHODS (TREE_TYPE (decl))) + return false; + break; + default: + break; + } + + return true; } /* Return true if TYPE should be passed by invisible reference. */ @@ -1879,6 +2085,14 @@ pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode, /* GCC post 3.4 passes *all* variable sized types by reference. */ if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST) return true; + + /* If a record type should be passed the same as its first (and only) + member, use the type and mode of that member. */ + if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type)) + { + type = TREE_TYPE (first_field (type)); + mode = TYPE_MODE (type); + } } return targetm.calls.pass_by_reference (ca, mode, type, named_arg); @@ -1935,7 +2149,7 @@ struct assign_parm_data_one static void assign_parms_initialize_all (struct assign_parm_data_all *all) { - tree fntype; + tree fntype ATTRIBUTE_UNUSED; memset (all, 0, sizeof (*all)); @@ -1957,25 +2171,13 @@ assign_parms_initialize_all (struct assign_parm_data_all *all) entries of the component type. Return a new list of substitutions are needed, else the old list. */ -static tree -split_complex_args (tree args) +static void +split_complex_args (VEC(tree, heap) **args) { + unsigned i; tree p; - /* Before allocating memory, check for the common case of no complex. */ - for (p = args; p; p = TREE_CHAIN (p)) - { - tree type = TREE_TYPE (p); - if (TREE_CODE (type) == COMPLEX_TYPE - && targetm.calls.split_complex_arg (type)) - goto found; - } - return args; - - found: - args = copy_list (args); - - for (p = args; p; p = TREE_CHAIN (p)) + for (i = 0; VEC_iterate (tree, *args, i, p); ++i) { tree type = TREE_TYPE (p); if (TREE_CODE (type) == COMPLEX_TYPE @@ -1986,6 +2188,7 @@ split_complex_args (tree args) bool addressable = TREE_ADDRESSABLE (p); /* Rewrite the PARM_DECL's type with its component. */ + p = copy_node (p); TREE_TYPE (p) = subtype; DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p)); DECL_MODE (p) = VOIDmode; @@ -1999,34 +2202,36 @@ split_complex_args (tree args) DECL_IGNORED_P (p) = addressable; TREE_ADDRESSABLE (p) = 0; layout_decl (p, 0); + VEC_replace (tree, *args, i, p); /* Build a second synthetic decl. */ - decl = build_decl (PARM_DECL, NULL_TREE, subtype); + decl = build_decl (EXPR_LOCATION (p), + PARM_DECL, NULL_TREE, subtype); DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p); DECL_ARTIFICIAL (decl) = addressable; DECL_IGNORED_P (decl) = addressable; layout_decl (decl, 0); - - /* Splice it in; skip the new decl. */ - TREE_CHAIN (decl) = TREE_CHAIN (p); - TREE_CHAIN (p) = decl; - p = decl; + VEC_safe_insert (tree, heap, *args, ++i, decl); } } - - return args; } /* A subroutine of assign_parms. Adjust the parameter list to incorporate the hidden struct return argument, and (abi willing) complex args. Return the new parameter list. */ -static tree +static VEC(tree, heap) * assign_parms_augmented_arg_list (struct assign_parm_data_all *all) { tree fndecl = current_function_decl; tree fntype = TREE_TYPE (fndecl); - tree fnargs = DECL_ARGUMENTS (fndecl); + VEC(tree, heap) *fnargs = NULL; + tree arg; + + for (arg = DECL_ARGUMENTS (fndecl); arg; arg = TREE_CHAIN (arg)) + VEC_safe_push (tree, heap, fnargs, arg); + + all->orig_fnargs = DECL_ARGUMENTS (fndecl); /* If struct value address is treated as the first argument, make it so. */ if (aggregate_value_p (DECL_RESULT (fndecl), fndecl) @@ -2036,21 +2241,22 @@ assign_parms_augmented_arg_list (struct assign_parm_data_all *all) tree type = build_pointer_type (TREE_TYPE (fntype)); tree decl; - decl = build_decl (PARM_DECL, NULL_TREE, type); + decl = build_decl (DECL_SOURCE_LOCATION (fndecl), + PARM_DECL, NULL_TREE, type); DECL_ARG_TYPE (decl) = type; DECL_ARTIFICIAL (decl) = 1; DECL_IGNORED_P (decl) = 1; - TREE_CHAIN (decl) = fnargs; - fnargs = decl; + TREE_CHAIN (decl) = all->orig_fnargs; + all->orig_fnargs = decl; + VEC_safe_insert (tree, heap, fnargs, 0, decl); + all->function_result_decl = decl; } - all->orig_fnargs = fnargs; - /* If the target wants to split complex arguments into scalars, do so. */ if (targetm.calls.split_complex_arg) - fnargs = split_complex_args (fnargs); + split_complex_args (&fnargs); return fnargs; } @@ -2065,6 +2271,7 @@ assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm, { tree nominal_type, passed_type; enum machine_mode nominal_mode, passed_mode, promoted_mode; + int unsignedp; memset (data, 0, sizeof (*data)); @@ -2100,12 +2307,13 @@ assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm, passed_mode = TYPE_MODE (passed_type); nominal_mode = TYPE_MODE (nominal_type); - /* If the parm is to be passed as a transparent union, use the type of - the first field for the tests below. We have already verified that - the modes are the same. */ - if (TREE_CODE (passed_type) == UNION_TYPE - && TYPE_TRANSPARENT_UNION (passed_type)) - passed_type = TREE_TYPE (TYPE_FIELDS (passed_type)); + /* If the parm is to be passed as a transparent union or record, use the + type of the first field for the tests below. We have already verified + that the modes are the same. */ + if ((TREE_CODE (passed_type) == UNION_TYPE + || TREE_CODE (passed_type) == RECORD_TYPE) + && TYPE_TRANSPARENT_AGGR (passed_type)) + passed_type = TREE_TYPE (first_field (passed_type)); /* See if this arg was passed by invisible reference. */ if (pass_by_reference (&all->args_so_far, passed_mode, @@ -2117,13 +2325,9 @@ assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm, } /* Find mode as it is passed by the ABI. */ - promoted_mode = passed_mode; - if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl))) - { - int unsignedp = TYPE_UNSIGNED (passed_type); - promoted_mode = promote_mode (passed_type, promoted_mode, - &unsignedp, 1); - } + unsignedp = TYPE_UNSIGNED (passed_type); + promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp, + TREE_TYPE (current_function_decl), 0); egress: data->nominal_type = nominal_type; @@ -2331,20 +2535,25 @@ assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data) stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx); stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm); - set_mem_attributes (stack_parm, parm, 1); - /* set_mem_attributes could set MEM_SIZE to the passed mode's size, - while promoted mode's size is needed. */ - if (data->promoted_mode != BLKmode - && data->promoted_mode != DECL_MODE (parm)) + if (!data->passed_pointer) { - set_mem_size (stack_parm, GEN_INT (GET_MODE_SIZE (data->promoted_mode))); - if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm)) + set_mem_attributes (stack_parm, parm, 1); + /* set_mem_attributes could set MEM_SIZE to the passed mode's size, + while promoted mode's size is needed. */ + if (data->promoted_mode != BLKmode + && data->promoted_mode != DECL_MODE (parm)) { - int offset = subreg_lowpart_offset (DECL_MODE (parm), - data->promoted_mode); - if (offset) - set_mem_offset (stack_parm, - plus_constant (MEM_OFFSET (stack_parm), -offset)); + set_mem_size (stack_parm, + GEN_INT (GET_MODE_SIZE (data->promoted_mode))); + if (MEM_EXPR (stack_parm) && MEM_OFFSET (stack_parm)) + { + int offset = subreg_lowpart_offset (DECL_MODE (parm), + data->promoted_mode); + if (offset) + set_mem_offset (stack_parm, + plus_constant (MEM_OFFSET (stack_parm), + -offset)); + } } } @@ -2357,7 +2566,7 @@ assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data) up with a guess at the alignment based on OFFSET_RTX. */ if (data->locate.where_pad != downward || data->entry_parm) align = boundary; - else if (GET_CODE (offset_rtx) == CONST_INT) + else if (CONST_INT_P (offset_rtx)) { align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary; align = align & -align; @@ -2390,7 +2599,7 @@ assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data) locations. The Irix 6 ABI has examples of this. */ if (GET_CODE (entry_parm) == PARALLEL) emit_group_store (validize_mem (stack_parm), entry_parm, - data->passed_type, + data->passed_type, int_size_in_bytes (data->passed_type)); else { @@ -2515,7 +2724,7 @@ assign_parm_setup_block_p (struct assign_parm_data_one *data) return false; } -/* A subroutine of assign_parms. Arrange for the parameter to be +/* A subroutine of assign_parms. Arrange for the parameter to be present and valid in DATA->STACK_RTL. */ static void @@ -2671,12 +2880,11 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, bool did_conversion = false; /* Store the parm in a pseudoregister during the function, but we may - need to do it in a wider mode. */ - - /* This is not really promoting for a call. However we need to be - consistent with assign_parm_find_data_types and expand_expr_real_1. */ + need to do it in a wider mode. Using 2 here makes the result + consistent with promote_decl_mode and thus expand_expr_real_1. */ promoted_nominal_mode - = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1); + = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp, + TREE_TYPE (current_function_decl), 2); parmreg = gen_reg_rtx (promoted_nominal_mode); @@ -2696,7 +2904,8 @@ assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm, assign_parm_remove_parallels (data); - /* Copy the value into the register. */ + /* Copy the value into the register, thus bridging between + assign_parm_find_data_types and expand_expr_real_1. */ if (data->nominal_mode != data->passed_mode || promoted_nominal_mode != data->promoted_mode) { @@ -2874,9 +3083,17 @@ assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm, TYPE_UNSIGNED (TREE_TYPE (parm))); if (data->stack_parm) - /* ??? This may need a big-endian conversion on sparc64. */ - data->stack_parm - = adjust_address (data->stack_parm, data->nominal_mode, 0); + { + int offset = subreg_lowpart_offset (data->nominal_mode, + GET_MODE (data->stack_parm)); + /* ??? This may need a big-endian conversion on sparc64. */ + data->stack_parm + = adjust_address (data->stack_parm, data->nominal_mode, 0); + if (offset && MEM_OFFSET (data->stack_parm)) + set_mem_offset (data->stack_parm, + plus_constant (MEM_OFFSET (data->stack_parm), + offset)); + } } if (data->entry_parm != data->stack_parm) @@ -2885,10 +3102,13 @@ assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm, if (data->stack_parm == 0) { + int align = STACK_SLOT_ALIGNMENT (data->passed_type, + GET_MODE (data->entry_parm), + TYPE_ALIGN (data->passed_type)); data->stack_parm = assign_stack_local (GET_MODE (data->entry_parm), GET_MODE_SIZE (GET_MODE (data->entry_parm)), - TYPE_ALIGN (data->passed_type)); + align); set_mem_attributes (data->stack_parm, parm, 1); } @@ -2925,12 +3145,14 @@ assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm, undo the frobbing that we did in assign_parms_augmented_arg_list. */ static void -assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs) +assign_parms_unsplit_complex (struct assign_parm_data_all *all, + VEC(tree, heap) *fnargs) { tree parm; tree orig_fnargs = all->orig_fnargs; + unsigned i = 0; - for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm)) + for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i) { if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE && targetm.calls.split_complex_arg (TREE_TYPE (parm))) @@ -2938,8 +3160,8 @@ assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs) rtx tmp, real, imag; enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm)); - real = DECL_RTL (fnargs); - imag = DECL_RTL (TREE_CHAIN (fnargs)); + real = DECL_RTL (VEC_index (tree, fnargs, i)); + imag = DECL_RTL (VEC_index (tree, fnargs, i + 1)); if (inner != GET_MODE (real)) { real = gen_lowpart_SUBREG (inner, real); @@ -2950,11 +3172,13 @@ assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs) { rtx rmem, imem; HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm)); + int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm), + DECL_MODE (parm), + TYPE_ALIGN (TREE_TYPE (parm))); /* split_complex_arg put the real and imag parts in pseudos. Move them to memory. */ - tmp = assign_stack_local (DECL_MODE (parm), size, - TYPE_ALIGN (TREE_TYPE (parm))); + tmp = assign_stack_local (DECL_MODE (parm), size, align); set_mem_attributes (tmp, parm, 1); rmem = adjust_address_nv (tmp, inner, 0); imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner)); @@ -2970,8 +3194,8 @@ assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs) tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag); SET_DECL_RTL (parm, tmp); - real = DECL_INCOMING_RTL (fnargs); - imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs)); + real = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i)); + imag = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i + 1)); if (inner != GET_MODE (real)) { real = gen_lowpart_SUBREG (inner, real); @@ -2979,20 +3203,8 @@ assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs) } tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag); set_decl_incoming_rtl (parm, tmp, false); - fnargs = TREE_CHAIN (fnargs); - } - else - { - SET_DECL_RTL (parm, DECL_RTL (fnargs)); - set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs), false); - - /* Set MEM_EXPR to the original decl, i.e. to PARM, - instead of the copy of decl, i.e. FNARGS. */ - if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm))) - set_mem_expr (DECL_INCOMING_RTL (parm), parm); + i++; } - - fnargs = TREE_CHAIN (fnargs); } } @@ -3003,7 +3215,9 @@ static void assign_parms (tree fndecl) { struct assign_parm_data_all all; - tree fnargs, parm; + tree parm; + VEC(tree, heap) *fnargs; + unsigned i; crtl->args.internal_arg_pointer = targetm.calls.internal_arg_pointer (); @@ -3011,7 +3225,7 @@ assign_parms (tree fndecl) assign_parms_initialize_all (&all); fnargs = assign_parms_augmented_arg_list (&all); - for (parm = fnargs; parm; parm = TREE_CHAIN (parm)) + for (i = 0; VEC_iterate (tree, fnargs, i, parm); ++i) { struct assign_parm_data_one data; @@ -3031,15 +3245,19 @@ assign_parms (tree fndecl) { unsigned int align = FUNCTION_ARG_BOUNDARY (data.promoted_mode, data.passed_type); + align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode, + align); if (TYPE_ALIGN (data.nominal_type) > align) - align = TYPE_ALIGN (data.passed_type); + align = MINIMUM_ALIGNMENT (data.nominal_type, + TYPE_MODE (data.nominal_type), + TYPE_ALIGN (data.nominal_type)); if (crtl->stack_alignment_estimated < align) { gcc_assert (!crtl->stack_realign_processed); crtl->stack_alignment_estimated = align; } } - + if (cfun->stdarg && !TREE_CHAIN (parm)) assign_parms_setup_varargs (&all, &data, false); @@ -3070,9 +3288,11 @@ assign_parms (tree fndecl) assign_parm_setup_stack (&all, parm, &data); } - if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs) + if (targetm.calls.split_complex_arg) assign_parms_unsplit_complex (&all, fnargs); + VEC_free (tree, heap, fnargs); + /* Output all parameter conversion instructions (possibly including calls) now that all parameters have been copied out of hard registers. */ emit_insn (all.first_conversion_insn); @@ -3096,7 +3316,7 @@ assign_parms (tree fndecl) crtl->stack_alignment_estimated = align; } } - } + } } /* If we are receiving a struct value address as the first argument, set up @@ -3141,7 +3361,7 @@ assign_parms (tree fndecl) = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant) : expand_expr (size_diffop (all.stack_args_size.var, size_int (-all.stack_args_size.constant)), - NULL_RTX, VOIDmode, 0)); + NULL_RTX, VOIDmode, EXPAND_NORMAL)); #else crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size); #endif @@ -3224,13 +3444,15 @@ gimple_seq gimplify_parameters (void) { struct assign_parm_data_all all; - tree fnargs, parm; + tree parm; gimple_seq stmts = NULL; + VEC(tree, heap) *fnargs; + unsigned i; assign_parms_initialize_all (&all); fnargs = assign_parms_augmented_arg_list (&all); - for (parm = fnargs; parm; parm = TREE_CHAIN (parm)) + for (i = 0; VEC_iterate (tree, fnargs, i, parm); ++i) { struct assign_parm_data_one data; @@ -3308,6 +3530,8 @@ gimplify_parameters (void) } } + VEC_free (tree, heap, fnargs); + return stmts; } @@ -3416,8 +3640,6 @@ locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs, calling function side. */ if (crtl->stack_alignment_needed < boundary) crtl->stack_alignment_needed = boundary; - if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed) - crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed; if (crtl->preferred_stack_boundary < boundary) crtl->preferred_stack_boundary = boundary; @@ -3621,7 +3843,7 @@ setjmp_vars_warning (bitmap setjmp_crosses, tree block) && DECL_RTL_SET_P (decl) && REG_P (DECL_RTL (decl)) && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) - warning (OPT_Wclobbered, "variable %q+D might be clobbered by" + warning (OPT_Wclobbered, "variable %q+D might be clobbered by" " % or %", decl); } @@ -3641,14 +3863,14 @@ setjmp_args_warning (bitmap setjmp_crosses) if (DECL_RTL (decl) != 0 && REG_P (DECL_RTL (decl)) && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl)))) - warning (OPT_Wclobbered, + warning (OPT_Wclobbered, "argument %q+D might be clobbered by % or %", decl); } /* Generate warning messages for variables live across setjmp. */ -void +void generate_setjmp_warnings (void) { bitmap setjmp_crosses = regstat_get_setjmp_crosses (); @@ -3950,7 +4172,7 @@ pop_cfun (void) /* Return value of funcdef and increase it. */ int -get_next_funcdef_no (void) +get_next_funcdef_no (void) { return funcdef_no++; } @@ -3976,8 +4198,6 @@ allocate_struct_function (tree fndecl, bool abstract_p) cfun = GGC_CNEW (struct function); - cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL; - init_eh_for_function (); if (init_machine_status) @@ -4009,7 +4229,7 @@ allocate_struct_function (tree fndecl, bool abstract_p) && TYPE_ARG_TYPES (fntype) != 0 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) != void_type_node)); - + /* Assume all registers in stdarg functions need to be saved. */ cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE; cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE; @@ -4033,6 +4253,7 @@ static void prepare_function_start (void) { gcc_assert (!crtl->emit.x_last_insn); + init_temp_slots (); init_emit (); init_varasm_status (); init_expr (); @@ -4089,18 +4310,11 @@ init_function_start (tree subr) warning (OPT_Waggregate_return, "function returns an aggregate"); } -/* Make sure all values used by the optimization passes have sane - defaults. */ +/* Make sure all values used by the optimization passes have sane defaults. */ unsigned int init_function_for_compilation (void) { reg_renumber = 0; - - /* No prologue/epilogue insns yet. Make sure that these vectors are - empty. */ - gcc_assert (VEC_length (int, prologue) == 0); - gcc_assert (VEC_length (int, epilogue) == 0); - gcc_assert (VEC_length (int, sibcall_epilogue) == 0); return 0; } @@ -4108,13 +4322,13 @@ struct rtl_opt_pass pass_init_function = { { RTL_PASS, - NULL, /* name */ - NULL, /* gate */ - init_function_for_compilation, /* execute */ + "*init_function", /* name */ + NULL, /* gate */ + init_function_for_compilation, /* execute */ NULL, /* sub */ NULL, /* next */ 0, /* static_pass_number */ - 0, /* tv_id */ + TV_NONE, /* tv_id */ 0, /* properties_required */ 0, /* properties_provided */ 0, /* properties_destroyed */ @@ -4149,12 +4363,8 @@ stack_protect_prologue (void) tree guard_decl = targetm.stack_protect_guard (); rtx x, y; - /* Avoid expand_expr here, because we don't want guard_decl pulled - into registers unless absolutely necessary. And we know that - crtl->stack_protect_guard is a local stack slot, so this skips - all the fluff. */ - x = validize_mem (DECL_RTL (crtl->stack_protect_guard)); - y = validize_mem (DECL_RTL (guard_decl)); + x = expand_normal (crtl->stack_protect_guard); + y = expand_normal (guard_decl); /* Allow the target to copy from Y to X without leaking Y into a register. */ @@ -4187,12 +4397,8 @@ stack_protect_epilogue (void) rtx label = gen_label_rtx (); rtx x, y, tmp; - /* Avoid expand_expr here, because we don't want guard_decl pulled - into registers unless absolutely necessary. And we know that - crtl->stack_protect_guard is a local stack slot, so this skips - all the fluff. */ - x = validize_mem (DECL_RTL (crtl->stack_protect_guard)); - y = validize_mem (DECL_RTL (guard_decl)); + x = expand_normal (crtl->stack_protect_guard); + y = expand_normal (guard_decl); /* Allow the target to compare Y with X without leaking either into a register. */ @@ -4337,13 +4543,21 @@ expand_function_start (tree subr) if (cfun->static_chain_decl) { tree parm = cfun->static_chain_decl; - rtx local = gen_reg_rtx (Pmode); + rtx local, chain, insn; + + local = gen_reg_rtx (Pmode); + chain = targetm.calls.static_chain (current_function_decl, true); - set_decl_incoming_rtl (parm, static_chain_incoming_rtx, false); + set_decl_incoming_rtl (parm, chain, false); SET_DECL_RTL (parm, local); mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm)))); - emit_move_insn (local, static_chain_incoming_rtx); + insn = emit_move_insn (local, chain); + + /* Mark the register as eliminable, similar to parameters. */ + if (MEM_P (chain) + && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0))) + set_unique_reg_note (insn, REG_EQUIV, chain); } /* If the function receives a non-local goto, then store the @@ -4514,9 +4728,12 @@ expand_function_end (void) for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) if (CALL_P (insn)) { + rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE); start_sequence (); - probe_stack_range (STACK_OLD_CHECK_PROTECT, - GEN_INT (STACK_CHECK_MAX_FRAME_SIZE)); + if (STACK_CHECK_MOVING_SP) + anti_adjust_stack_and_probe (max_frame_size, true); + else + probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size); seq = get_insns (); end_sequence (); emit_insn_before (seq, stack_check_probe_note); @@ -4611,10 +4828,9 @@ expand_function_end (void) else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl)) { int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result)); - - if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl))) - promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl), - &unsignedp, 1); + promote_function_mode (TREE_TYPE (decl_result), + GET_MODE (decl_rtl), &unsignedp, + TREE_TYPE (current_function_decl), 1); convert_move (real_decl_rtl, decl_rtl, unsignedp); } @@ -4695,7 +4911,6 @@ expand_function_end (void) start_sequence (); clobber_return_register (); - expand_naked_return (); seq = get_insns (); end_sequence (); @@ -4703,7 +4918,8 @@ expand_function_end (void) } /* Output the label for the naked return from the function. */ - emit_label (naked_return_label); + if (naked_return_label) + emit_label (naked_return_label); /* @@@ This is a kludge. We want to ensure that instructions that may trap are not moved into the epilogue by scheduling, because @@ -4766,16 +4982,42 @@ get_arg_pointer_save_area (void) return ret; } -/* Extend a vector that records the INSN_UIDs of INSNS - (a list of one or more insns). */ +/* Add a list of INSNS to the hash HASHP, possibly allocating HASHP + for the first time. */ static void -record_insns (rtx insns, VEC(int,heap) **vecp) +record_insns (rtx insns, rtx end, htab_t *hashp) { rtx tmp; + htab_t hash = *hashp; - for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp)) - VEC_safe_push (int, heap, *vecp, INSN_UID (tmp)); + if (hash == NULL) + *hashp = hash + = htab_create_ggc (17, htab_hash_pointer, htab_eq_pointer, NULL); + + for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp)) + { + void **slot = htab_find_slot (hash, tmp, INSERT); + gcc_assert (*slot == NULL); + *slot = tmp; + } +} + +/* INSN has been duplicated as COPY, as part of duping a basic block. + If INSN is an epilogue insn, then record COPY as epilogue as well. */ + +void +maybe_copy_epilogue_insn (rtx insn, rtx copy) +{ + void **slot; + + if (epilogue_insn_hash == NULL + || htab_find (epilogue_insn_hash, insn) == NULL) + return; + + slot = htab_find_slot (epilogue_insn_hash, copy, INSERT); + gcc_assert (*slot == NULL); + *slot = copy; } /* Set the locator of the insn chain starting at INSN to LOC. */ @@ -4790,52 +5032,37 @@ set_insn_locators (rtx insn, int loc) } } -/* Determine how many INSN_UIDs in VEC are part of INSN. Because we can - be running after reorg, SEQUENCE rtl is possible. */ +/* Determine if any INSNs in HASH are, or are part of, INSN. Because + we can be running after reorg, SEQUENCE rtl is possible. */ -static int -contains (const_rtx insn, VEC(int,heap) **vec) +static bool +contains (const_rtx insn, htab_t hash) { - int i, j; + if (hash == NULL) + return false; - if (NONJUMP_INSN_P (insn) - && GET_CODE (PATTERN (insn)) == SEQUENCE) + if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) { - int count = 0; + int i; for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--) - for (j = VEC_length (int, *vec) - 1; j >= 0; --j) - if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) - == VEC_index (int, *vec, j)) - count++; - return count; + if (htab_find (hash, XVECEXP (PATTERN (insn), 0, i))) + return true; + return false; } - else - { - for (j = VEC_length (int, *vec) - 1; j >= 0; --j) - if (INSN_UID (insn) == VEC_index (int, *vec, j)) - return 1; - } - return 0; + + return htab_find (hash, insn) != NULL; } int prologue_epilogue_contains (const_rtx insn) { - if (contains (insn, &prologue)) + if (contains (insn, prologue_insn_hash)) return 1; - if (contains (insn, &epilogue)) + if (contains (insn, epilogue_insn_hash)) return 1; return 0; } -int -sibcall_epilogue_contains (const_rtx insn) -{ - if (sibcall_epilogue) - return contains (insn, &sibcall_epilogue); - return 0; -} - #ifdef HAVE_return /* Insert gen_return at the end of block BB. This also means updating block_for_insn appropriately. */ @@ -4872,15 +5099,15 @@ thread_prologue_and_epilogue_insns (void) seq = gen_prologue (); emit_insn (seq); - /* Insert an explicit USE for the frame pointer + /* Insert an explicit USE for the frame pointer if the profiling is on and the frame pointer is required. */ if (crtl->profile && frame_pointer_needed) emit_use (hard_frame_pointer_rtx); /* Retain a map of the prologue insns. */ - record_insns (seq, &prologue); + record_insns (seq, NULL, &prologue_insn_hash); emit_note (NOTE_INSN_PROLOGUE_END); - + #ifndef PROFILE_BEFORE_PROLOGUE /* Ensure that instructions are not moved into the prologue when profiling is on. The call to the profiling routine can be @@ -5010,6 +5237,38 @@ thread_prologue_and_epilogue_insns (void) } } #endif + + /* A small fib -- epilogue is not yet completed, but we wish to re-use + this marker for the splits of EH_RETURN patterns, and nothing else + uses the flag in the meantime. */ + epilogue_completed = 1; + +#ifdef HAVE_eh_return + /* Find non-fallthru edges that end with EH_RETURN instructions. On + some targets, these get split to a special version of the epilogue + code. In order to be able to properly annotate these with unwind + info, try to split them now. If we get a valid split, drop an + EPILOGUE_BEG note and mark the insns as epilogue insns. */ + FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) + { + rtx prev, last, trial; + + if (e->flags & EDGE_FALLTHRU) + continue; + last = BB_END (e->src); + if (!eh_returnjump_p (last)) + continue; + + prev = PREV_INSN (last); + trial = try_split (PATTERN (last), last, 1); + if (trial == last) + continue; + + record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash); + emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev); + } +#endif + /* Find the edge that falls through to EXIT. Other edges may exist due to RETURN instructions, but those don't need epilogues. There really shouldn't be a mixture -- either all should have @@ -5030,7 +5289,7 @@ thread_prologue_and_epilogue_insns (void) emit_jump_insn (seq); /* Retain a map of the epilogue insns. */ - record_insns (seq, &epilogue); + record_insns (seq, NULL, &epilogue_insn_hash); set_insn_locators (seq, epilogue_locator); seq = get_insns (); @@ -5092,6 +5351,7 @@ epilogue_done: } start_sequence (); + emit_note (NOTE_INSN_EPILOGUE_BEG); emit_insn (gen_sibcall_epilogue ()); seq = get_insns (); end_sequence (); @@ -5099,7 +5359,7 @@ epilogue_done: /* Retain a map of the epilogue insns. Used in life analysis to avoid getting rid of sibcall epilogue insns. Do this before we actually emit the sequence. */ - record_insns (seq, &sibcall_epilogue); + record_insns (seq, NULL, &epilogue_insn_hash); set_insn_locators (seq, epilogue_locator); emit_insn_before (seq, insn); @@ -5120,7 +5380,7 @@ epilogue_done: for (insn = epilogue_end; insn; insn = next) { next = NEXT_INSN (insn); - if (NOTE_P (insn) + if (NOTE_P (insn) && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)) reorder_insns (insn, insn, PREV_INSN (epilogue_end)); } @@ -5133,23 +5393,26 @@ epilogue_done: df_update_entry_exit_and_calls (); } -/* Reposition the prologue-end and epilogue-begin notes after instruction - scheduling and delayed branch scheduling. */ +/* Reposition the prologue-end and epilogue-begin notes after + instruction scheduling. */ void reposition_prologue_and_epilogue_notes (void) { -#if defined (HAVE_prologue) || defined (HAVE_epilogue) - rtx insn, last, note; - int len; - - if ((len = VEC_length (int, prologue)) > 0) +#if defined (HAVE_prologue) || defined (HAVE_epilogue) \ + || defined (HAVE_sibcall_epilogue) + /* Since the hash table is created on demand, the fact that it is + non-null is a signal that it is non-empty. */ + if (prologue_insn_hash != NULL) { - last = 0, note = 0; - - /* Scan from the beginning until we reach the last prologue insn. - We apparently can't depend on basic_block_{head,end} after - reorg has run. */ + size_t len = htab_elements (prologue_insn_hash); + rtx insn, last = NULL, note = NULL; + + /* Scan from the beginning until we reach the last prologue insn. */ + /* ??? While we do have the CFG intact, there are two problems: + (1) The prologue can contain loops (typically probing the stack), + which means that the end of the prologue isn't in the first bb. + (2) Sometimes the PROLOGUE_END note gets pushed into the next bb. */ for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) { if (NOTE_P (insn)) @@ -5157,7 +5420,7 @@ reposition_prologue_and_epilogue_notes (void) if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END) note = insn; } - else if (contains (insn, &prologue)) + else if (contains (insn, prologue_insn_hash)) { last = insn; if (--len == 0) @@ -5167,14 +5430,17 @@ reposition_prologue_and_epilogue_notes (void) if (last) { - /* Find the prologue-end note if we haven't already, and - move it to just after the last prologue insn. */ - if (note == 0) + if (note == NULL) { - for (note = last; (note = NEXT_INSN (note));) - if (NOTE_P (note) - && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END) - break; + /* Scan forward looking for the PROLOGUE_END note. It should + be right at the beginning of the block, possibly with other + insn notes that got moved there. */ + for (note = NEXT_INSN (last); ; note = NEXT_INSN (note)) + { + if (NOTE_P (note) + && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END) + break; + } } /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */ @@ -5184,42 +5450,51 @@ reposition_prologue_and_epilogue_notes (void) } } - if ((len = VEC_length (int, epilogue)) > 0) + if (epilogue_insn_hash != NULL) { - last = 0, note = 0; + edge_iterator ei; + edge e; - /* Scan from the end until we reach the first epilogue insn. - We apparently can't depend on basic_block_{head,end} after - reorg has run. */ - for (insn = get_last_insn (); insn; insn = PREV_INSN (insn)) + FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds) { - if (NOTE_P (insn)) - { - if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG) - note = insn; - } - else if (contains (insn, &epilogue)) + rtx insn, first = NULL, note = NULL; + basic_block bb = e->src; + + /* Scan from the beginning until we reach the first epilogue insn. */ + FOR_BB_INSNS (bb, insn) { - last = insn; - if (--len == 0) - break; + if (NOTE_P (insn)) + { + if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG) + { + note = insn; + if (first != NULL) + break; + } + } + else if (first == NULL && contains (insn, epilogue_insn_hash)) + { + first = insn; + if (note != NULL) + break; + } } - } - if (last) - { - /* Find the epilogue-begin note if we haven't already, and - move it to just before the first epilogue insn. */ - if (note == 0) + if (note) { - for (note = insn; (note = PREV_INSN (note));) - if (NOTE_P (note) - && NOTE_KIND (note) == NOTE_INSN_EPILOGUE_BEG) - break; + /* If the function has a single basic block, and no real + epilogue insns (e.g. sibcall with no cleanup), the + epilogue note can get scheduled before the prologue + note. If we have frame related prologue insns, having + them scanned during the epilogue will result in a crash. + In this case re-order the epilogue note to just before + the last insn in the block. */ + if (first == NULL) + first = BB_END (bb); + + if (PREV_INSN (first) != note) + reorder_insns (note, note, PREV_INSN (first)); } - - if (PREV_INSN (last) != note) - reorder_insns (note, note, PREV_INSN (last)); } } #endif /* HAVE_prologue or HAVE_epilogue */ @@ -5229,15 +5504,10 @@ reposition_prologue_and_epilogue_notes (void) const char * current_function_name (void) { + if (cfun == NULL) + return ""; return lang_hooks.decl_printable_name (cfun->decl, 2); } - -/* Returns the raw (mangled) name of the current function. */ -const char * -current_function_assembler_name (void) -{ - return IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (cfun->decl)); -} static unsigned int @@ -5251,6 +5521,7 @@ rest_of_handle_check_leaf_regs (void) } /* Insert a TYPE into the used types hash table of CFUN. */ + static void used_types_insert_helper (tree type, struct function *func) { @@ -5272,23 +5543,102 @@ void used_types_insert (tree t) { while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE) - t = TREE_TYPE (t); - t = TYPE_MAIN_VARIANT (t); + if (TYPE_NAME (t)) + break; + else + t = TREE_TYPE (t); + if (TYPE_NAME (t) == NULL_TREE + || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t))) + t = TYPE_MAIN_VARIANT (t); if (debug_info_level > DINFO_LEVEL_NONE) - used_types_insert_helper (t, cfun); + { + if (cfun) + used_types_insert_helper (t, cfun); + else + /* So this might be a type referenced by a global variable. + Record that type so that we can later decide to emit its debug + information. */ + types_used_by_cur_var_decl = + tree_cons (t, NULL, types_used_by_cur_var_decl); + + } +} + +/* Helper to Hash a struct types_used_by_vars_entry. */ + +static hashval_t +hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry) +{ + gcc_assert (entry && entry->var_decl && entry->type); + + return iterative_hash_object (entry->type, + iterative_hash_object (entry->var_decl, 0)); +} + +/* Hash function of the types_used_by_vars_entry hash table. */ + +hashval_t +types_used_by_vars_do_hash (const void *x) +{ + const struct types_used_by_vars_entry *entry = + (const struct types_used_by_vars_entry *) x; + + return hash_types_used_by_vars_entry (entry); +} + +/*Equality function of the types_used_by_vars_entry hash table. */ + +int +types_used_by_vars_eq (const void *x1, const void *x2) +{ + const struct types_used_by_vars_entry *e1 = + (const struct types_used_by_vars_entry *) x1; + const struct types_used_by_vars_entry *e2 = + (const struct types_used_by_vars_entry *)x2; + + return (e1->var_decl == e2->var_decl && e1->type == e2->type); +} + +/* Inserts an entry into the types_used_by_vars_hash hash table. */ + +void +types_used_by_var_decl_insert (tree type, tree var_decl) +{ + if (type != NULL && var_decl != NULL) + { + void **slot; + struct types_used_by_vars_entry e; + e.var_decl = var_decl; + e.type = type; + if (types_used_by_vars_hash == NULL) + types_used_by_vars_hash = + htab_create_ggc (37, types_used_by_vars_do_hash, + types_used_by_vars_eq, NULL); + slot = htab_find_slot_with_hash (types_used_by_vars_hash, &e, + hash_types_used_by_vars_entry (&e), INSERT); + if (*slot == NULL) + { + struct types_used_by_vars_entry *entry; + entry = (struct types_used_by_vars_entry*) ggc_alloc + (sizeof (struct types_used_by_vars_entry)); + entry->type = type; + entry->var_decl = var_decl; + *slot = entry; + } + } } struct rtl_opt_pass pass_leaf_regs = { { RTL_PASS, - NULL, /* name */ + "*leaf_regs", /* name */ NULL, /* gate */ rest_of_handle_check_leaf_regs, /* execute */ NULL, /* sub */ NULL, /* next */ 0, /* static_pass_number */ - 0, /* tv_id */ + TV_NONE, /* tv_id */ 0, /* properties_required */ 0, /* properties_provided */ 0, /* properties_destroyed */ @@ -5335,7 +5685,7 @@ struct rtl_opt_pass pass_thread_prologue_and_epilogue = /* This mini-pass fixes fall-out from SSA in asm statements that have - in-out constraints. Say you start with + in-out constraints. Say you start with orig = inout; asm ("": "+mr" (inout)); @@ -5394,6 +5744,9 @@ match_asm_constraints_1 (rtx insn, rtx *p_sets, int noutputs) char *end; int match, j; + if (*constraint == '%') + constraint++; + match = strtoul (constraint, &end, 10); if (end == constraint) continue; @@ -5520,7 +5873,7 @@ struct rtl_opt_pass pass_match_asm_constraints = NULL, /* sub */ NULL, /* next */ 0, /* static_pass_number */ - 0, /* tv_id */ + TV_NONE, /* tv_id */ 0, /* properties_required */ 0, /* properties_provided */ 0, /* properties_destroyed */