+Fri Sep 29 13:37:59 MET DST 2000 Jan Hubicka <jh@suse.cz>
+
+ * final.c (final_scan_insn): Remove extra extract_insn call;
+ Use caching for constrain_operands.
+ (cleanup_subreg_operands): Use caching for extract_insn.
+ * recog.c (constrain_operands_cached): New.
+ * recog.h (constrain_operands_cached): Declare.
+ * i386.c (ix86_attr_length_immediate_default,
+ ix86_attr_length_address_default, ix86_agi_dependant): Cache
+ extract_insn call.
+
+ * recog.c (asm_noperands): Tweak.
+ (extract_insn): Do not call asm_noperads for non-asm instructions.
+
+Fri Sep 29 13:20:42 MET DST 2000 Jan Hubicka <jh@suse.cz>
+
+ * recog.c (recog_memoized): Rename to recog_memoized_1.
+ * recog.h (recog_memoized): Rename to recog_memoized_1.
+ (recog_memoized): New macro.
+ * rtl.h (single_set): Rename to single_set_1
+ (single_set): New macro.
+ * rtlanal.c (single_set): Rename to single_set_1; expect clobbers
+ to be last.
+
+ * i386.md (strmovsi_1, strmovhi_1 strmovqi_1):
+ Do not use match_dup of input operands at outputs.
+ Use register_operand for memory expression.
+ (rep_movsi): Put use last, canonicalize.
+ Use register_operand for memory expression.
+ (rep_movqi): Put use last.
+ Use register_operand for memory expression.
+ (strsetsi_1, strset_hi_1, strsetqi_1): Do not use match_dup
+ of input operands at outputs. Use register_operand for memory
+ expression.
+ (rep_stossi): Put use last; canonicalize; fix match_dup in
+ the address expression
+ (rep_stosqi): Likewise.
+ (memcmp expander): Update calls.
+ (cmpstrsi_nz_1, cmpstrsi_1, strlensi_1): Avoid match_dups in
+ the clobbers.
+
+ * i386.md (fp_jcc_3, fp_jcc_4, jp_fcc_5): if_then_else operand is
+ VOIDmode.
+ (fp_jcc_4, fp_jcc_3): Refuse unordered comparisons.
+
2000-09-28 David O'Brien <obrien@FreeBSD.org>
* config/i386/freebsd-aout.h: New, FreeBSD a.out config file.
asm_noperands (body)
rtx body;
{
- if (GET_CODE (body) == ASM_OPERANDS)
- /* No output operands: return number of input operands. */
- return ASM_OPERANDS_INPUT_LENGTH (body);
- if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
- /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
- return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
- else if (GET_CODE (body) == PARALLEL
- && GET_CODE (XVECEXP (body, 0, 0)) == SET
- && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
+ switch (GET_CODE (body))
{
- /* Multiple output operands, or 1 output plus some clobbers:
- body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
- int i;
- int n_sets;
-
- /* Count backwards through CLOBBERs to determine number of SETs. */
- for (i = XVECLEN (body, 0); i > 0; i--)
+ case ASM_OPERANDS:
+ /* No output operands: return number of input operands. */
+ return ASM_OPERANDS_INPUT_LENGTH (body);
+ case SET:
+ if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
+ /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
+ return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
+ else
+ return -1;
+ case PARALLEL:
+ if (GET_CODE (XVECEXP (body, 0, 0)) == SET
+ && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
{
- if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
- break;
- if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
- return -1;
- }
+ /* Multiple output operands, or 1 output plus some clobbers:
+ body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
+ int i;
+ int n_sets;
- /* N_SETS is now number of output operands. */
- n_sets = i;
+ /* Count backwards through CLOBBERs to determine number of SETs. */
+ for (i = XVECLEN (body, 0); i > 0; i--)
+ {
+ if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
+ break;
+ if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
+ return -1;
+ }
- /* Verify that all the SETs we have
- came from a single original asm_operands insn
- (so that invalid combinations are blocked). */
- for (i = 0; i < n_sets; i++)
- {
- rtx elt = XVECEXP (body, 0, i);
- if (GET_CODE (elt) != SET)
- return -1;
- if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
- return -1;
- /* If these ASM_OPERANDS rtx's came from different original insns
- then they aren't allowed together. */
- if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
- != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
- return -1;
+ /* N_SETS is now number of output operands. */
+ n_sets = i;
+
+ /* Verify that all the SETs we have
+ came from a single original asm_operands insn
+ (so that invalid combinations are blocked). */
+ for (i = 0; i < n_sets; i++)
+ {
+ rtx elt = XVECEXP (body, 0, i);
+ if (GET_CODE (elt) != SET)
+ return -1;
+ if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
+ return -1;
+ /* If these ASM_OPERANDS rtx's came from different original insns
+ then they aren't allowed together. */
+ if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
+ != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
+ return -1;
+ }
+ return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
+ + n_sets);
}
- return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
- + n_sets);
- }
- else if (GET_CODE (body) == PARALLEL
- && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
- {
- /* 0 outputs, but some clobbers:
- body is [(asm_operands ...) (clobber (reg ...))...]. */
- int i;
+ else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
+ {
+ /* 0 outputs, but some clobbers:
+ body is [(asm_operands ...) (clobber (reg ...))...]. */
+ int i;
- /* Make sure all the other parallel things really are clobbers. */
- for (i = XVECLEN (body, 0) - 1; i > 0; i--)
- if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
- return -1;
+ /* Make sure all the other parallel things really are clobbers. */
+ for (i = XVECLEN (body, 0) - 1; i > 0; i--)
+ if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
+ return -1;
- return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
+ return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
+ }
+ else
+ return -1;
+ default:
+ return -1;
}
- else
- return -1;
}
/* Assuming BODY is an insn body that uses ASM_OPERANDS,
&& !constrain_operands (reload_completed))
fatal_insn_not_found (insn);
}
+/* Do cached constrain_operand and complain about failures. */
+int
+constrain_operands_cached (strict)
+ int strict;
+{
+ if (which_alternative == -1)
+ return constrain_operands (strict);
+ else
+ return 1;
+}
\f
/* Analyze INSN and fill in recog_data. */
return;
case SET:
+ if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
+ goto asm_insn;
+ else
+ goto normal_insn;
case PARALLEL:
+ if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
+ && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
+ || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
+ goto asm_insn;
+ else
+ goto normal_insn;
case ASM_OPERANDS:
+ asm_insn:
recog_data.n_operands = noperands = asm_noperands (body);
if (noperands >= 0)
{
}
break;
}
-
- /* FALLTHROUGH */
+ fatal_insn_not_found (insn);
default:
+ normal_insn:
/* Ordinary insn: recognize it, get the operands via insn_extract
and get the constraints. */
/* Random number that should be large enough for all purposes. */
#define MAX_RECOG_ALTERNATIVES 30
+#define recog_memoized(I) (INSN_CODE (I) >= 0 \
+ ? INSN_CODE (I) : recog_memoized_1 (I))
/* Types of operands. */
enum op_type {
extern void init_recog PARAMS ((void));
extern void init_recog_no_volatile PARAMS ((void));
-extern int recog_memoized PARAMS ((rtx));
+extern int recog_memoized_1 PARAMS ((rtx));
extern int check_asm_operands PARAMS ((rtx));
extern int asm_operand_ok PARAMS ((rtx, const char *));
extern int validate_change PARAMS ((rtx, rtx *, rtx, int));
extern int num_validated_changes PARAMS ((void));
extern void cancel_changes PARAMS ((int));
extern int constrain_operands PARAMS ((int));
+extern int constrain_operands_cached PARAMS ((int));
extern int memory_address_p PARAMS ((enum machine_mode, rtx));
extern int strict_memory_address_p PARAMS ((enum machine_mode, rtx));
extern int validate_replace_rtx_subexp PARAMS ((rtx, rtx, rtx, rtx *));