OSDN Git Service

* config/cris/cris.c (cris_function_value_regno_p): Make static.
[pf3gnuchains/gcc-fork.git] / gcc / calls.c
index 26c3200..382de7f 100644 (file)
@@ -274,6 +274,7 @@ emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNU
   if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
     {
       tree t = fndecl;
+
       /* Although a built-in FUNCTION_DECL and its non-__builtin
         counterpart compare equal and get a shared mem_attrs, they
         produce different dump output in compare-debug compilations,
@@ -281,10 +282,14 @@ emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNU
         adds a different (but equivalent) entry, while the other
         doesn't run the garbage collector at the same spot and then
         shares the mem_attr with the equivalent entry. */
-      if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
-         && built_in_decls[DECL_FUNCTION_CODE (t)])
-       t = built_in_decls[DECL_FUNCTION_CODE (t)];
-      set_mem_expr (funmem, t);
+      if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
+       {
+         tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
+         if (t2)
+           t = t2;
+       }
+
+       set_mem_expr (funmem, t);
     }
   else if (fntree)
     set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
@@ -606,6 +611,69 @@ alloca_call_p (const_tree exp)
   return false;
 }
 
+/* Return TRUE if FNDECL is either a TM builtin or a TM cloned
+   function.  Return FALSE otherwise.  */
+
+static bool
+is_tm_builtin (const_tree fndecl)
+{
+  if (fndecl == NULL)
+    return false;
+
+  if (decl_is_tm_clone (fndecl))
+    return true;
+
+  if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
+    {
+      switch (DECL_FUNCTION_CODE (fndecl))
+       {
+       case BUILT_IN_TM_COMMIT:
+       case BUILT_IN_TM_COMMIT_EH:
+       case BUILT_IN_TM_ABORT:
+       case BUILT_IN_TM_IRREVOCABLE:
+       case BUILT_IN_TM_GETTMCLONE_IRR:
+       case BUILT_IN_TM_MEMCPY:
+       case BUILT_IN_TM_MEMMOVE:
+       case BUILT_IN_TM_MEMSET:
+       CASE_BUILT_IN_TM_STORE (1):
+       CASE_BUILT_IN_TM_STORE (2):
+       CASE_BUILT_IN_TM_STORE (4):
+       CASE_BUILT_IN_TM_STORE (8):
+       CASE_BUILT_IN_TM_STORE (FLOAT):
+       CASE_BUILT_IN_TM_STORE (DOUBLE):
+       CASE_BUILT_IN_TM_STORE (LDOUBLE):
+       CASE_BUILT_IN_TM_STORE (M64):
+       CASE_BUILT_IN_TM_STORE (M128):
+       CASE_BUILT_IN_TM_STORE (M256):
+       CASE_BUILT_IN_TM_LOAD (1):
+       CASE_BUILT_IN_TM_LOAD (2):
+       CASE_BUILT_IN_TM_LOAD (4):
+       CASE_BUILT_IN_TM_LOAD (8):
+       CASE_BUILT_IN_TM_LOAD (FLOAT):
+       CASE_BUILT_IN_TM_LOAD (DOUBLE):
+       CASE_BUILT_IN_TM_LOAD (LDOUBLE):
+       CASE_BUILT_IN_TM_LOAD (M64):
+       CASE_BUILT_IN_TM_LOAD (M128):
+       CASE_BUILT_IN_TM_LOAD (M256):
+       case BUILT_IN_TM_LOG:
+       case BUILT_IN_TM_LOG_1:
+       case BUILT_IN_TM_LOG_2:
+       case BUILT_IN_TM_LOG_4:
+       case BUILT_IN_TM_LOG_8:
+       case BUILT_IN_TM_LOG_FLOAT:
+       case BUILT_IN_TM_LOG_DOUBLE:
+       case BUILT_IN_TM_LOG_LDOUBLE:
+       case BUILT_IN_TM_LOG_M64:
+       case BUILT_IN_TM_LOG_M128:
+       case BUILT_IN_TM_LOG_M256:
+         return true;
+       default:
+         break;
+       }
+    }
+  return false;
+}
+
 /* Detect flags (function attributes) from the function decl or type node.  */
 
 int
@@ -639,10 +707,28 @@ flags_from_decl_or_type (const_tree exp)
       if (TREE_NOTHROW (exp))
        flags |= ECF_NOTHROW;
 
+      if (flag_tm)
+       {
+         if (is_tm_builtin (exp))
+           flags |= ECF_TM_BUILTIN;
+         else if ((flags & ECF_CONST) != 0
+                  || lookup_attribute ("transaction_pure",
+                                       TYPE_ATTRIBUTES (TREE_TYPE (exp))))
+           flags |= ECF_TM_PURE;
+       }
+
       flags = special_function_p (exp, flags);
     }
-  else if (TYPE_P (exp) && TYPE_READONLY (exp))
-    flags |= ECF_CONST;
+  else if (TYPE_P (exp))
+    {
+      if (TYPE_READONLY (exp))
+       flags |= ECF_CONST;
+
+      if (flag_tm
+         && ((flags & ECF_CONST) != 0
+             || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
+       flags |= ECF_TM_PURE;
+    }
 
   if (TREE_THIS_VOLATILE (exp))
     {
@@ -742,7 +828,7 @@ precompute_register_parameters (int num_actuals, struct arg_data *args,
                     || (GET_CODE (args[i].value) == SUBREG
                         && REG_P (SUBREG_REG (args[i].value)))))
                 && args[i].mode != BLKmode
-                && rtx_cost (args[i].value, SET, optimize_insn_for_speed_p ())
+                && set_src_cost (args[i].value, optimize_insn_for_speed_p ())
                    > COSTS_N_INSNS (1)
                 && ((*reg_parm_seen
                      && targetm.small_register_classes_for_mode_p (args[i].mode))
@@ -1756,7 +1842,8 @@ load_register_parameters (struct arg_data *args, int num_actuals,
          if (GET_CODE (reg) == PARALLEL)
            use_group_regs (call_fusage, reg);
          else if (nregs == -1)
-           use_reg (call_fusage, reg);
+           use_reg_mode (call_fusage, reg,
+                         TYPE_MODE (TREE_TYPE (args[i].tree_value)));
          else if (nregs > 0)
            use_regs (call_fusage, REGNO (reg), nregs);
        }
@@ -2815,10 +2902,10 @@ expand_call (tree exp, rtx target, int ignore)
              }
 
          if (args[i].stack)
-           call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
-                                            gen_rtx_USE (VOIDmode,
-                                                         args[i].stack),
-                                            call_fusage);
+           call_fusage
+             = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
+                                  gen_rtx_USE (VOIDmode, args[i].stack),
+                                  call_fusage);
        }
 
       /* If we have a parm that is passed in registers but not in memory
@@ -3576,20 +3663,29 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
       argvec[count].partial
        = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
 
-      locate_and_pad_parm (mode, NULL_TREE,
+      if (argvec[count].reg == 0
+         || argvec[count].partial != 0
+         || reg_parm_stack_space > 0)
+       {
+         locate_and_pad_parm (mode, NULL_TREE,
 #ifdef STACK_PARMS_IN_REG_PARM_AREA
-                          1,
+                              1,
 #else
-                          argvec[count].reg != 0,
+                              argvec[count].reg != 0,
+#endif
+                              argvec[count].partial,
+                              NULL_TREE, &args_size, &argvec[count].locate);
+         args_size.constant += argvec[count].locate.size.constant;
+         gcc_assert (!argvec[count].locate.size.var);
+       }
+#ifdef BLOCK_REG_PADDING
+      else
+       /* The argument is passed entirely in registers.  See at which
+          end it should be padded.  */
+       argvec[count].locate.where_pad =
+         BLOCK_REG_PADDING (mode, NULL_TREE,
+                            GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
 #endif
-                          argvec[count].partial,
-                          NULL_TREE, &args_size, &argvec[count].locate);
-
-      gcc_assert (!argvec[count].locate.size.var);
-
-      if (argvec[count].reg == 0 || argvec[count].partial != 0
-         || reg_parm_stack_space > 0)
-       args_size.constant += argvec[count].locate.size.constant;
 
       targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
     }