X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Fstmt.c;h=93d643a7bf0a8bdfa53fc627cad0c4c58ffc266a;hb=fe10f73e500e04b2006cdcfd1349e93dfdcebd0e;hp=d2583ca5458c50ef68541268bbacfa23b84589d3;hpb=82c7907c61991be035c7cd5bbc8227b80ea98b22;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/stmt.c b/gcc/stmt.c index d2583ca5458..93d643a7bf0 100644 --- a/gcc/stmt.c +++ b/gcc/stmt.c @@ -1,7 +1,7 @@ /* Expands front end tree to back end RTL for GCC Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, - 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 - Free Software Foundation, Inc. + 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, + 2010, 2011, 2012 Free Software Foundation, Inc. This file is part of GCC. @@ -41,7 +41,7 @@ along with GCC; see the file COPYING3. If not see #include "libfuncs.h" #include "recog.h" #include "machmode.h" -#include "toplev.h" +#include "diagnostic-core.h" #include "output.h" #include "ggc.h" #include "langhooks.h" @@ -52,6 +52,9 @@ along with GCC; see the file COPYING3. If not see #include "regs.h" #include "alloc-pool.h" #include "pretty-print.h" +#include "bitmap.h" +#include "params.h" + /* Functions and data structures for expanding case statements. */ @@ -684,13 +687,14 @@ expand_asm_operands (tree string, tree outputs, tree inputs, for (tail = clobbers; tail; tail = TREE_CHAIN (tail)) { const char *regname; + int nregs; if (TREE_VALUE (tail) == error_mark_node) return; regname = TREE_STRING_POINTER (TREE_VALUE (tail)); - i = decode_reg_name (regname); - if (i >= 0 || i == -4) + i = decode_reg_name_and_count (regname, &nregs); + if (i == -4) ++nclobbers; else if (i == -2) error ("unknown register name %qs in %", regname); @@ -698,14 +702,21 @@ expand_asm_operands (tree string, tree outputs, tree inputs, /* Mark clobbered registers. */ if (i >= 0) { - /* Clobbering the PIC register is an error. */ - if (i == (int) PIC_OFFSET_TABLE_REGNUM) + int reg; + + for (reg = i; reg < i + nregs; reg++) { - error ("PIC register %qs clobbered in %", regname); - return; - } + ++nclobbers; + + /* Clobbering the PIC register is an error. */ + if (reg == (int) PIC_OFFSET_TABLE_REGNUM) + { + error ("PIC register clobbered by %qs in %", regname); + return; + } - SET_HARD_REG_BIT (clobbered_regs, i); + SET_HARD_REG_BIT (clobbered_regs, reg); + } } } @@ -773,6 +784,10 @@ expand_asm_operands (tree string, tree outputs, tree inputs, /* Second pass evaluates arguments. */ + /* Make sure stack is consistent for asm goto. */ + if (nlabels > 0) + do_pending_stack_adjust (); + ninout = 0; for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) { @@ -937,7 +952,7 @@ expand_asm_operands (tree string, tree outputs, tree inputs, ASM_OPERANDS_INPUT (body, i) = op; ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i) - = gen_rtx_ASM_INPUT (TYPE_MODE (type), + = gen_rtx_ASM_INPUT (TYPE_MODE (type), ggc_strdup (constraints[i + noutputs])); if (tree_conflicts_with_clobbers_p (val, &clobbered_regs)) @@ -1026,7 +1041,8 @@ expand_asm_operands (tree string, tree outputs, tree inputs, for (tail = clobbers; tail; tail = TREE_CHAIN (tail)) { const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail)); - int j = decode_reg_name (regname); + int reg, nregs; + int j = decode_reg_name_and_count (regname, &nregs); rtx clobbered_reg; if (j < 0) @@ -1048,30 +1064,39 @@ expand_asm_operands (tree string, tree outputs, tree inputs, continue; } - /* Use QImode since that's guaranteed to clobber just one reg. */ - clobbered_reg = gen_rtx_REG (QImode, j); - - /* Do sanity check for overlap between clobbers and respectively - input and outputs that hasn't been handled. Such overlap - should have been detected and reported above. */ - if (!clobber_conflict_found) + for (reg = j; reg < j + nregs; reg++) { - int opno; - - /* We test the old body (obody) contents to avoid tripping - over the under-construction body. */ - for (opno = 0; opno < noutputs; opno++) - if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno])) - internal_error ("asm clobber conflict with output operand"); - - for (opno = 0; opno < ninputs - ninout; opno++) - if (reg_overlap_mentioned_p (clobbered_reg, - ASM_OPERANDS_INPUT (obody, opno))) - internal_error ("asm clobber conflict with input operand"); - } + /* Use QImode since that's guaranteed to clobber just + * one reg. */ + clobbered_reg = gen_rtx_REG (QImode, reg); + + /* Do sanity check for overlap between clobbers and + respectively input and outputs that hasn't been + handled. Such overlap should have been detected and + reported above. */ + if (!clobber_conflict_found) + { + int opno; + + /* We test the old body (obody) contents to avoid + tripping over the under-construction body. */ + for (opno = 0; opno < noutputs; opno++) + if (reg_overlap_mentioned_p (clobbered_reg, + output_rtx[opno])) + internal_error + ("asm clobber conflict with output operand"); + + for (opno = 0; opno < ninputs - ninout; opno++) + if (reg_overlap_mentioned_p (clobbered_reg, + ASM_OPERANDS_INPUT (obody, + opno))) + internal_error + ("asm clobber conflict with input operand"); + } - XVECEXP (body, 0, i++) - = gen_rtx_CLOBBER (VOIDmode, clobbered_reg); + XVECEXP (body, 0, i++) + = gen_rtx_CLOBBER (VOIDmode, clobbered_reg); + } } if (nlabels > 0) @@ -1099,6 +1124,7 @@ expand_asm_stmt (gimple stmt) size_t i, n; const char *s; tree str, out, in, cl, labels; + location_t locus = gimple_location (stmt); /* Meh... convert the gimple asm operands into real tree lists. Eventually we should make all routines work on the vectors instead @@ -1144,7 +1170,7 @@ expand_asm_stmt (gimple stmt) if (gimple_asm_input_p (stmt)) { - expand_asm_loc (str, gimple_asm_volatile_p (stmt), input_location); + expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus); return; } @@ -1160,7 +1186,7 @@ expand_asm_stmt (gimple stmt) /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of OUTPUTS some trees for where the values were actually stored. */ expand_asm_operands (str, outputs, in, cl, labels, - gimple_asm_volatile_p (stmt), input_location); + gimple_asm_volatile_p (stmt), locus); /* Copy all the intermediate outputs into the specified outputs. */ for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) @@ -1227,11 +1253,11 @@ check_operand_nalternatives (tree outputs, tree inputs) static bool check_unique_operand_names (tree outputs, tree inputs, tree labels) { - tree i, j; + tree i, j, i_name = NULL_TREE; for (i = outputs; i ; i = TREE_CHAIN (i)) { - tree i_name = TREE_PURPOSE (TREE_PURPOSE (i)); + i_name = TREE_PURPOSE (TREE_PURPOSE (i)); if (! i_name) continue; @@ -1242,7 +1268,7 @@ check_unique_operand_names (tree outputs, tree inputs, tree labels) for (i = inputs; i ; i = TREE_CHAIN (i)) { - tree i_name = TREE_PURPOSE (TREE_PURPOSE (i)); + i_name = TREE_PURPOSE (TREE_PURPOSE (i)); if (! i_name) continue; @@ -1256,7 +1282,7 @@ check_unique_operand_names (tree outputs, tree inputs, tree labels) for (i = labels; i ; i = TREE_CHAIN (i)) { - tree i_name = TREE_PURPOSE (i); + i_name = TREE_PURPOSE (i); if (! i_name) continue; @@ -1271,8 +1297,7 @@ check_unique_operand_names (tree outputs, tree inputs, tree labels) return true; failure: - error ("duplicate asm operand name %qs", - TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i)))); + error ("duplicate asm operand name %qs", TREE_STRING_POINTER (i_name)); return false; } @@ -1316,7 +1341,7 @@ resolve_asm_operand_names (tree string, tree outputs, tree inputs, tree labels) break; else { - c += 1; + c += 1 + (c[1] == '%'); continue; } } @@ -1338,7 +1363,7 @@ resolve_asm_operand_names (tree string, tree outputs, tree inputs, tree labels) p += 2; else { - p += 1; + p += 1 + (p[1] == '%'); continue; } @@ -1430,7 +1455,7 @@ expand_expr_stmt (tree exp) if (TYPE_MODE (type) == VOIDmode) ; else if (TYPE_MODE (type) != BLKmode) - value = copy_to_reg (value); + copy_to_reg (value); else { rtx lab = gen_label_rtx (); @@ -1591,8 +1616,11 @@ expand_value_return (rtx val) tree type = TREE_TYPE (decl); int unsignedp = TYPE_UNSIGNED (type); enum machine_mode old_mode = DECL_MODE (decl); - enum machine_mode mode = promote_function_mode (type, old_mode, - &unsignedp, funtype, 1); + enum machine_mode mode; + if (DECL_BY_REFERENCE (decl)) + mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2); + else + mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1); if (mode != old_mode) val = convert_modes (mode, old_mode, val, unsignedp); @@ -1656,119 +1684,21 @@ expand_return (tree retval) expand_value_return (result_rtl); /* If the result is an aggregate that is being returned in one (or more) - registers, load the registers here. The compiler currently can't handle - copying a BLKmode value into registers. We could put this code in a - more general area (for use by everyone instead of just function - call/return), but until this feature is generally usable it is kept here - (and in expand_call). */ + registers, load the registers here. */ else if (retval_rhs != 0 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode && REG_P (result_rtl)) { - int i; - unsigned HOST_WIDE_INT bitpos, xbitpos; - unsigned HOST_WIDE_INT padding_correction = 0; - unsigned HOST_WIDE_INT bytes - = int_size_in_bytes (TREE_TYPE (retval_rhs)); - int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD; - unsigned int bitsize - = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD); - rtx *result_pseudos = XALLOCAVEC (rtx, n_regs); - rtx result_reg, src = NULL_RTX, dst = NULL_RTX; - rtx result_val = expand_normal (retval_rhs); - enum machine_mode tmpmode, result_reg_mode; - - if (bytes == 0) + val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs); + if (val) { - expand_null_return (); - return; - } - - /* If the structure doesn't take up a whole number of words, see - whether the register value should be padded on the left or on - the right. Set PADDING_CORRECTION to the number of padding - bits needed on the left side. - - In most ABIs, the structure will be returned at the least end of - the register, which translates to right padding on little-endian - targets and left padding on big-endian targets. The opposite - holds if the structure is returned at the most significant - end of the register. */ - if (bytes % UNITS_PER_WORD != 0 - && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs)) - ? !BYTES_BIG_ENDIAN - : BYTES_BIG_ENDIAN)) - padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) - * BITS_PER_UNIT)); - - /* Copy the structure BITSIZE bits at a time. */ - for (bitpos = 0, xbitpos = padding_correction; - bitpos < bytes * BITS_PER_UNIT; - bitpos += bitsize, xbitpos += bitsize) - { - /* We need a new destination pseudo each time xbitpos is - on a word boundary and when xbitpos == padding_correction - (the first time through). */ - if (xbitpos % BITS_PER_WORD == 0 - || xbitpos == padding_correction) - { - /* Generate an appropriate register. */ - dst = gen_reg_rtx (word_mode); - result_pseudos[xbitpos / BITS_PER_WORD] = dst; - - /* Clear the destination before we move anything into it. */ - emit_move_insn (dst, CONST0_RTX (GET_MODE (dst))); - } - - /* We need a new source operand each time bitpos is on a word - boundary. */ - if (bitpos % BITS_PER_WORD == 0) - src = operand_subword_force (result_val, - bitpos / BITS_PER_WORD, - BLKmode); - - /* Use bitpos for the source extraction (left justified) and - xbitpos for the destination store (right justified). */ - store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode, - extract_bit_field (src, bitsize, - bitpos % BITS_PER_WORD, 1, - NULL_RTX, word_mode, word_mode)); + /* Use the mode of the result value on the return register. */ + PUT_MODE (result_rtl, GET_MODE (val)); + expand_value_return (val); } - - tmpmode = GET_MODE (result_rtl); - if (tmpmode == BLKmode) - { - /* Find the smallest integer mode large enough to hold the - entire structure and use that mode instead of BLKmode - on the USE insn for the return register. */ - for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT); - tmpmode != VOIDmode; - tmpmode = GET_MODE_WIDER_MODE (tmpmode)) - /* Have we found a large enough mode? */ - if (GET_MODE_SIZE (tmpmode) >= bytes) - break; - - /* A suitable mode should have been found. */ - gcc_assert (tmpmode != VOIDmode); - - PUT_MODE (result_rtl, tmpmode); - } - - if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode)) - result_reg_mode = word_mode; else - result_reg_mode = tmpmode; - result_reg = gen_reg_rtx (result_reg_mode); - - for (i = 0; i < n_regs; i++) - emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode), - result_pseudos[i]); - - if (tmpmode != result_reg_mode) - result_reg = gen_lowpart (tmpmode, result_reg); - - expand_value_return (result_reg); + expand_null_return (); } else if (retval_rhs != 0 && !VOID_TYPE_P (TREE_TYPE (retval_rhs)) @@ -1827,7 +1757,7 @@ expand_nl_goto_receiver (void) decrementing fp by STARTING_FRAME_OFFSET. */ emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx); -#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM +#if !HARD_FRAME_POINTER_IS_ARG_POINTER if (fixed_regs[ARG_POINTER_REGNUM]) { #ifdef ELIMINABLE_REGS @@ -1978,7 +1908,7 @@ expand_stack_save (void) rtx ret = NULL_RTX; do_pending_stack_adjust (); - emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX); + emit_stack_save (SAVE_BLOCK, &ret); return ret; } @@ -1986,10 +1916,13 @@ expand_stack_save (void) void expand_stack_restore (tree var) { - rtx sa = expand_normal (var); + rtx prev, sa = expand_normal (var); sa = convert_memory_address (Pmode, sa); - emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX); + + prev = get_last_insn (); + emit_stack_restore (SAVE_BLOCK, sa); + fixup_args_size_notes (prev, get_last_insn (), 0); } /* Do the insertion of a case label into case_list. The labels are @@ -2068,7 +2001,7 @@ add_case_node (struct case_node *head, tree type, tree low, tree high, /* By default, enable case bit tests on targets with ashlsi3. */ #ifndef CASE_USE_BIT_TESTS -#define CASE_USE_BIT_TESTS (optab_handler (ashl_optab, word_mode)->insn_code \ +#define CASE_USE_BIT_TESTS (optab_handler (ashl_optab, word_mode) \ != CODE_FOR_nothing) #endif @@ -2093,19 +2026,21 @@ struct case_bit_test static bool lshift_cheap_p (void) { - static bool init = false; - static bool cheap = true; + static bool init[2] = {false, false}; + static bool cheap[2] = {true, true}; - if (!init) + bool speed_p = optimize_insn_for_speed_p (); + + if (!init[speed_p]) { rtx reg = gen_rtx_REG (word_mode, 10000); - int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET, - optimize_insn_for_speed_p ()); - cheap = cost < COSTS_N_INSNS (3); - init = true; + int cost = set_src_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), + speed_p); + cheap[speed_p] = cost < COSTS_N_INSNS (3); + init[speed_p] = true; } - return cheap; + return cheap[speed_p]; } /* Comparison function for qsort to order bit tests by decreasing @@ -2221,6 +2156,39 @@ emit_case_bit_tests (tree index_type, tree index_expr, tree minval, #define HAVE_tablejump 0 #endif +/* Return true if a switch should be expanded as a bit test. + INDEX_EXPR is the index expression, RANGE is the difference between + highest and lowest case, UNIQ is number of unique case node targets + not counting the default case and COUNT is the number of comparisons + needed, not counting the default case. */ +bool +expand_switch_using_bit_tests_p (tree index_expr, tree range, + unsigned int uniq, unsigned int count) +{ + return (CASE_USE_BIT_TESTS + && ! TREE_CONSTANT (index_expr) + && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0 + && compare_tree_int (range, 0) > 0 + && lshift_cheap_p () + && ((uniq == 1 && count >= 3) + || (uniq == 2 && count >= 5) + || (uniq == 3 && count >= 6))); +} + +/* Return the smallest number of different values for which it is best to use a + jump-table instead of a tree of conditional branches. */ + +static unsigned int +case_values_threshold (void) +{ + unsigned int threshold = PARAM_VALUE (PARAM_CASE_VALUES_THRESHOLD); + + if (threshold == 0) + threshold = targetm.case_values_threshold (); + + return threshold; +} + /* Terminate a case (Pascal/Ada) or switch (C) statement in which ORIG_INDEX is the expression to be tested. If ORIG_TYPE is not NULL, it is the original ORIG_INDEX @@ -2331,11 +2299,8 @@ expand_case (gimple stmt) /* If we have not seen this label yet, then increase the number of unique case node targets seen. */ lab = label_rtx (n->code_label); - if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab))) - { - bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab)); - uniq++; - } + if (bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab))) + uniq++; } BITMAP_FREE (label_bitmap); @@ -2358,14 +2323,7 @@ expand_case (gimple stmt) /* Try implementing this switch statement by a short sequence of bit-wise comparisons. However, we let the binary-tree case below handle constant index expressions. */ - if (CASE_USE_BIT_TESTS - && ! TREE_CONSTANT (index_expr) - && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0 - && compare_tree_int (range, 0) > 0 - && lshift_cheap_p () - && ((uniq == 1 && count >= 3) - || (uniq == 2 && count >= 5) - || (uniq == 3 && count >= 6))) + if (expand_switch_using_bit_tests_p (index_expr, range, uniq, count)) { /* Optimize the case where all the case values fit in a word without having to subtract MINVAL. In this case, @@ -2385,7 +2343,7 @@ expand_case (gimple stmt) If the switch-index is a constant, do it this way because we can optimize it. */ - else if (count < targetm.case_values_threshold () + else if (count < case_values_threshold () || compare_tree_int (range, (optimize_insn_for_size_p () ? 3 : 10) * count) > 0 /* RANGE may be signed, and really large ranges will show up @@ -2534,7 +2492,7 @@ do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label, int unsignedp) { do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode, - NULL_RTX, NULL_RTX, label); + NULL_RTX, NULL_RTX, label, -1); } /* Not all case values are encountered equally. This function