1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987, 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002,
3 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "conditions.h"
36 #include "insn-attr.h"
44 #include "tm-constrs.h"
47 #include "target-def.h"
49 static void vax_output_function_prologue (FILE *, HOST_WIDE_INT);
50 static void vax_file_start (void);
51 static void vax_init_libfuncs (void);
52 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
54 static int vax_address_cost_1 (rtx);
55 static int vax_address_cost (rtx, bool);
56 static bool vax_rtx_costs (rtx, int, int, int *, bool);
57 static rtx vax_struct_value_rtx (tree, int);
58 static rtx vax_builtin_setjmp_frame_value (void);
60 /* Initialize the GCC target structure. */
61 #undef TARGET_ASM_ALIGNED_HI_OP
62 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
64 #undef TARGET_ASM_FUNCTION_PROLOGUE
65 #define TARGET_ASM_FUNCTION_PROLOGUE vax_output_function_prologue
67 #undef TARGET_ASM_FILE_START
68 #define TARGET_ASM_FILE_START vax_file_start
69 #undef TARGET_ASM_FILE_START_APP_OFF
70 #define TARGET_ASM_FILE_START_APP_OFF true
72 #undef TARGET_INIT_LIBFUNCS
73 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
75 #undef TARGET_ASM_OUTPUT_MI_THUNK
76 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
77 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
78 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
80 #undef TARGET_DEFAULT_TARGET_FLAGS
81 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
83 #undef TARGET_RTX_COSTS
84 #define TARGET_RTX_COSTS vax_rtx_costs
85 #undef TARGET_ADDRESS_COST
86 #define TARGET_ADDRESS_COST vax_address_cost
88 #undef TARGET_PROMOTE_PROTOTYPES
89 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
91 #undef TARGET_STRUCT_VALUE_RTX
92 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
94 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
95 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
97 struct gcc_target targetm = TARGET_INITIALIZER;
99 /* Set global variables as needed for the options enabled. */
102 override_options (void)
104 /* We're VAX floating point, not IEEE floating point. */
106 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
109 /* Generate the assembly code for function entry. FILE is a stdio
110 stream to output the code to. SIZE is an int: how many units of
111 temporary storage to allocate.
113 Refer to the array `regs_ever_live' to determine which registers to
114 save; `regs_ever_live[I]' is nonzero if register number I is ever
115 used in the function. This function is responsible for knowing
116 which registers should not be saved even if used. */
119 vax_output_function_prologue (FILE * file, HOST_WIDE_INT size)
124 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
125 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
128 fprintf (file, "\t.word 0x%x\n", mask);
130 if (dwarf2out_do_frame ())
132 const char *label = dwarf2out_cfi_label ();
135 for (regno = FIRST_PSEUDO_REGISTER-1; regno >= 0; --regno)
136 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
137 dwarf2out_reg_save (label, regno, offset -= 4);
139 dwarf2out_reg_save (label, PC_REGNUM, offset -= 4);
140 dwarf2out_reg_save (label, FRAME_POINTER_REGNUM, offset -= 4);
141 dwarf2out_reg_save (label, ARG_POINTER_REGNUM, offset -= 4);
142 dwarf2out_def_cfa (label, FRAME_POINTER_REGNUM, -(offset - 4));
145 size -= STARTING_FRAME_OFFSET;
147 asm_fprintf (file, "\tmovab %wd(%Rsp),%Rsp\n", -size);
149 asm_fprintf (file, "\tsubl2 $%wd,%Rsp\n", size);
152 /* When debugging with stabs, we want to output an extra dummy label
153 so that gas can distinguish between D_float and G_float prior to
154 processing the .stabs directive identifying type double. */
156 vax_file_start (void)
158 default_file_start ();
160 if (write_symbols == DBX_DEBUG)
161 fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
164 /* We can use the BSD C library routines for the libgcc calls that are
165 still generated, since that's what they boil down to anyways. When
166 ELF, avoid the user's namespace. */
169 vax_init_libfuncs (void)
171 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
172 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
175 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
178 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
183 for (i = 0; i < n; i++)
186 for (i = 0; i < n; i++)
188 if (MEM_P (operands[i])
189 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
190 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
192 rtx addr = XEXP (operands[i], 0);
193 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
195 else if (optimize_size && MEM_P (operands[i])
196 && REG_P (XEXP (operands[i], 0))
197 && (code != MINUS || operands[1] != const0_rtx)
198 && find_regno_note (insn, REG_DEAD,
199 REGNO (XEXP (operands[i], 0))))
201 low[i] = gen_rtx_MEM (SImode,
202 gen_rtx_POST_INC (Pmode,
203 XEXP (operands[i], 0)));
204 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
208 low[i] = operand_subword (operands[i], 0, 0, DImode);
209 operands[i] = operand_subword (operands[i], 1, 0, DImode);
215 print_operand_address (FILE * file, rtx addr)
218 rtx reg1, breg, ireg;
222 switch (GET_CODE (addr))
226 addr = XEXP (addr, 0);
230 fprintf (file, "(%s)", reg_names[REGNO (addr)]);
234 fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
238 fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
242 /* There can be either two or three things added here. One must be a
243 REG. One can be either a REG or a MULT of a REG and an appropriate
244 constant, and the third can only be a constant or a MEM.
246 We get these two or three things and put the constant or MEM in
247 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
248 a register and can't tell yet if it is a base or index register,
251 reg1 = 0; ireg = 0; breg = 0; offset = 0;
253 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
254 || MEM_P (XEXP (addr, 0)))
256 offset = XEXP (addr, 0);
257 addr = XEXP (addr, 1);
259 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
260 || MEM_P (XEXP (addr, 1)))
262 offset = XEXP (addr, 1);
263 addr = XEXP (addr, 0);
265 else if (GET_CODE (XEXP (addr, 1)) == MULT)
267 ireg = XEXP (addr, 1);
268 addr = XEXP (addr, 0);
270 else if (GET_CODE (XEXP (addr, 0)) == MULT)
272 ireg = XEXP (addr, 0);
273 addr = XEXP (addr, 1);
275 else if (REG_P (XEXP (addr, 1)))
277 reg1 = XEXP (addr, 1);
278 addr = XEXP (addr, 0);
280 else if (REG_P (XEXP (addr, 0)))
282 reg1 = XEXP (addr, 0);
283 addr = XEXP (addr, 1);
295 else if (GET_CODE (addr) == MULT)
299 gcc_assert (GET_CODE (addr) == PLUS);
300 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
301 || MEM_P (XEXP (addr, 0)))
305 if (CONST_INT_P (offset))
306 offset = plus_constant (XEXP (addr, 0), INTVAL (offset));
309 gcc_assert (CONST_INT_P (XEXP (addr, 0)));
310 offset = plus_constant (offset, INTVAL (XEXP (addr, 0)));
313 offset = XEXP (addr, 0);
315 else if (REG_P (XEXP (addr, 0)))
318 ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
320 reg1 = XEXP (addr, 0);
324 gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
326 ireg = XEXP (addr, 0);
329 if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
330 || MEM_P (XEXP (addr, 1)))
334 if (CONST_INT_P (offset))
335 offset = plus_constant (XEXP (addr, 1), INTVAL (offset));
338 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
339 offset = plus_constant (offset, INTVAL (XEXP (addr, 1)));
342 offset = XEXP (addr, 1);
344 else if (REG_P (XEXP (addr, 1)))
347 ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
349 reg1 = XEXP (addr, 1);
353 gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
355 ireg = XEXP (addr, 1);
359 /* If REG1 is nonzero, figure out if it is a base or index register. */
363 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
366 || (flag_pic && symbolic_operand (offset, SImode)))))
377 if (flag_pic && symbolic_operand (offset, SImode))
382 output_operand_lossage ("symbol used with both base and indexed registers");
385 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
386 if (flag_pic > 1 && GET_CODE (offset) == CONST
387 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
388 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
391 output_operand_lossage ("symbol with offset used in PIC mode");
395 /* symbol(reg) isn't PIC, but symbol[reg] is. */
404 output_address (offset);
408 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
412 if (GET_CODE (ireg) == MULT)
413 ireg = XEXP (ireg, 0);
414 gcc_assert (REG_P (ireg));
415 fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
420 output_addr_const (file, addr);
425 print_operand (FILE *file, rtx x, int code)
428 fputc (ASM_DOUBLE_CHAR, file);
429 else if (code == '|')
430 fputs (REGISTER_PREFIX, file);
431 else if (code == 'c')
432 fputs (cond_name (x), file);
433 else if (code == 'C')
434 fputs (rev_cond_name (x), file);
435 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
436 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
437 else if (code == 'P' && CONST_INT_P (x))
438 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
439 else if (code == 'N' && CONST_INT_P (x))
440 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
441 /* rotl instruction cannot deal with negative arguments. */
442 else if (code == 'R' && CONST_INT_P (x))
443 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
444 else if (code == 'H' && CONST_INT_P (x))
445 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
446 else if (code == 'h' && CONST_INT_P (x))
447 fprintf (file, "$%d", (short) - INTVAL (x));
448 else if (code == 'B' && CONST_INT_P (x))
449 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
450 else if (code == 'b' && CONST_INT_P (x))
451 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
452 else if (code == 'M' && CONST_INT_P (x))
453 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
455 fprintf (file, "%s", reg_names[REGNO (x)]);
457 output_address (XEXP (x, 0));
458 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
461 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
462 sizeof (dstr), 0, 1);
463 fprintf (file, "$0f%s", dstr);
465 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
468 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
469 sizeof (dstr), 0, 1);
470 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
474 if (flag_pic > 1 && symbolic_operand (x, SImode))
477 output_operand_lossage ("symbol used as immediate operand");
480 output_addr_const (file, x);
487 switch (GET_CODE (op))
516 rev_cond_name (rtx op)
518 switch (GET_CODE (op))
547 vax_float_literal (rtx c)
549 enum machine_mode mode;
550 REAL_VALUE_TYPE r, s;
553 if (GET_CODE (c) != CONST_DOUBLE)
558 if (c == const_tiny_rtx[(int) mode][0]
559 || c == const_tiny_rtx[(int) mode][1]
560 || c == const_tiny_rtx[(int) mode][2])
563 REAL_VALUE_FROM_CONST_DOUBLE (r, c);
565 for (i = 0; i < 7; i++)
569 REAL_VALUE_FROM_INT (s, x, 0, mode);
571 if (REAL_VALUES_EQUAL (r, s))
573 ok = exact_real_inverse (mode, &s);
575 if (REAL_VALUES_EQUAL (r, s))
582 /* Return the cost in cycles of a memory address, relative to register
585 Each of the following adds the indicated number of cycles:
589 1 - indexing and/or offset(register)
594 vax_address_cost_1 (rtx addr)
596 int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
597 rtx plus_op0 = 0, plus_op1 = 0;
599 switch (GET_CODE (addr))
609 indexed = 1; /* 2 on VAX 2 */
612 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
614 offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
618 offset = 1; /* 2 on VAX 2 */
620 case LABEL_REF: /* this is probably a byte offset from the pc */
626 plus_op1 = XEXP (addr, 0);
628 plus_op0 = XEXP (addr, 0);
629 addr = XEXP (addr, 1);
632 indir = 2; /* 3 on VAX 2 */
633 addr = XEXP (addr, 0);
639 /* Up to 3 things can be added in an address. They are stored in
640 plus_op0, plus_op1, and addr. */
654 /* Indexing and register+offset can both be used (except on a VAX 2)
655 without increasing execution time over either one alone. */
656 if (reg && indexed && offset)
657 return reg + indir + offset + predec;
658 return reg + indexed + indir + offset + predec;
662 vax_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
664 return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
667 /* Cost of an expression on a VAX. This version has costs tuned for the
668 CVAX chip (found in the VAX 3 series) with comments for variations on
671 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
672 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
673 costs on a per cpu basis. */
676 vax_rtx_costs (rtx x, int code, int outer_code, int *total,
677 bool speed ATTRIBUTE_UNUSED)
679 enum machine_mode mode = GET_MODE (x);
680 int i = 0; /* may be modified in switch */
681 const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
685 /* On a VAX, constants from 0..63 are cheap because they can use the
686 1 byte literal constant format. Compare to -1 should be made cheap
687 so that decrement-and-branch insns can be formed more easily (if
688 the value -1 is copied to a register some decrement-and-branch
689 patterns will not match). */
696 if (outer_code == AND)
698 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
701 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
702 || (outer_code == COMPARE
704 || ((outer_code == PLUS || outer_code == MINUS)
705 && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
719 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
720 *total = vax_float_literal (x) ? 5 : 8;
722 *total = ((CONST_DOUBLE_HIGH (x) == 0
723 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
724 || (outer_code == PLUS
725 && CONST_DOUBLE_HIGH (x) == -1
726 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
732 return true; /* Implies register operand. */
736 return true; /* Implies register operand. */
742 *total = 16; /* 4 on VAX 9000 */
745 *total = 9; /* 4 on VAX 9000, 12 on VAX 2 */
748 *total = 16; /* 6 on VAX 9000, 28 on VAX 2 */
753 *total = 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
756 *total = MAX_COST; /* Mode is not supported. */
764 *total = MAX_COST; /* Mode is not supported. */
772 *total = 30; /* Highly variable. */
773 else if (mode == DFmode)
774 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
777 *total = 11; /* 25 on VAX 2 */
787 *total = MAX_COST; /* Mode is not supported. */
794 *total = (6 /* 4 on VAX 9000 */
795 + (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
799 *total = 7; /* 17 on VAX 2 */
808 *total = 10; /* 6 on VAX 9000 */
813 *total = 6; /* 5 on VAX 2, 4 on VAX 9000 */
814 if (CONST_INT_P (XEXP (x, 1)))
815 fmt = "e"; /* all constant rotate counts are short */
820 *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
821 /* Small integer operands can use subl2 and addl2. */
822 if ((CONST_INT_P (XEXP (x, 1)))
823 && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
833 /* AND is special because the first operand is complemented. */
835 if (CONST_INT_P (XEXP (x, 0)))
837 if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
847 else if (mode == SFmode)
849 else if (mode == DImode)
865 if (mode == DImode || mode == DFmode)
866 *total = 5; /* 7 on VAX 2 */
868 *total = 3; /* 4 on VAX 2 */
870 if (!REG_P (x) && GET_CODE (x) != POST_INC)
871 *total += vax_address_cost_1 (x);
877 *total = 3; /* FIXME: Costs need to be checked */
884 /* Now look inside the expression. Operands which are not registers or
885 short constants add to the cost.
887 FMT and I may have been adjusted in the switch above for instructions
888 which require special handling. */
890 while (*fmt++ == 'e')
892 rtx op = XEXP (x, i);
895 code = GET_CODE (op);
897 /* A NOT is likely to be found as the first operand of an AND
898 (in which case the relevant cost is of the operand inside
899 the not) and not likely to be found anywhere else. */
901 op = XEXP (op, 0), code = GET_CODE (op);
906 if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
907 && GET_MODE (x) != QImode)
908 *total += 1; /* 2 on VAX 2 */
913 *total += 1; /* 2 on VAX 2 */
916 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
918 /* Registers are faster than floating point constants -- even
919 those constants which can be encoded in a single byte. */
920 if (vax_float_literal (op))
923 *total += (GET_MODE (x) == DFmode) ? 3 : 2;
927 if (CONST_DOUBLE_HIGH (op) != 0
928 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
933 *total += 1; /* 2 on VAX 2 */
934 if (!REG_P (XEXP (op, 0)))
935 *total += vax_address_cost_1 (XEXP (op, 0));
948 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
949 Used for C++ multiple inheritance.
950 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
951 addl2 $DELTA, 4(ap) #adjust first argument
952 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
956 vax_output_mi_thunk (FILE * file,
957 tree thunk ATTRIBUTE_UNUSED,
959 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
962 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
963 asm_fprintf (file, ",4(%Rap)\n");
964 fprintf (file, "\tjmp ");
965 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
966 fprintf (file, "+2\n");
970 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
971 int incoming ATTRIBUTE_UNUSED)
973 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
977 vax_builtin_setjmp_frame_value (void)
979 return hard_frame_pointer_rtx;
982 /* Worker function for NOTICE_UPDATE_CC. */
985 vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
987 if (GET_CODE (exp) == SET)
989 if (GET_CODE (SET_SRC (exp)) == CALL)
991 else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
992 && GET_CODE (SET_DEST (exp)) != PC)
995 /* The integer operations below don't set carry or
996 set it in an incompatible way. That's ok though
997 as the Z bit is all we need when doing unsigned
998 comparisons on the result of these insns (since
999 they're always with 0). Set CC_NO_OVERFLOW to
1000 generate the correct unsigned branches. */
1001 switch (GET_CODE (SET_SRC (exp)))
1004 if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1012 cc_status.flags = CC_NO_OVERFLOW;
1017 cc_status.value1 = SET_DEST (exp);
1018 cc_status.value2 = SET_SRC (exp);
1021 else if (GET_CODE (exp) == PARALLEL
1022 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1024 if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1026 else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1028 cc_status.flags = 0;
1029 cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1030 cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1033 /* PARALLELs whose first element sets the PC are aob,
1034 sob insns. They do change the cc's. */
1039 if (cc_status.value1 && REG_P (cc_status.value1)
1041 && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1042 cc_status.value2 = 0;
1043 if (cc_status.value1 && MEM_P (cc_status.value1)
1045 && MEM_P (cc_status.value2))
1046 cc_status.value2 = 0;
1047 /* Actual condition, one line up, should be that value2's address
1048 depends on value1, but that is too much of a pain. */
1051 /* Output integer move instructions. */
1054 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1055 enum machine_mode mode)
1058 const char *pattern_hi, *pattern_lo;
1063 if (operands[1] == const0_rtx)
1065 if (TARGET_QMATH && optimize_size
1066 && (CONST_INT_P (operands[1])
1067 || GET_CODE (operands[1]) == CONST_DOUBLE))
1069 unsigned HOST_WIDE_INT hval, lval;
1072 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1074 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1076 /* Make sure only the low 32 bits are valid. */
1077 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1078 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1082 lval = INTVAL (operands[1]);
1086 /* Here we see if we are trying to see if the 64bit value is really
1087 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1088 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1089 8 bytes - 1 shift byte - 1 short literal byte. */
1091 && (n = exact_log2 (lval & (- lval))) != -1
1092 && (lval >> n) < 64)
1096 #if HOST_BITS_PER_WIDE_INT == 32
1097 /* On 32bit platforms, if the 6bits didn't overflow into the
1098 upper 32bit value that value better be 0. If we have
1099 overflowed, make sure it wasn't too much. */
1102 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1103 n = 0; /* failure */
1105 lval |= hval << (32 - n);
1108 /* If n is 0, then ashq is not the best way to emit this. */
1111 operands[1] = GEN_INT (lval);
1112 operands[2] = GEN_INT (n);
1113 return "ashq %2,%1,%0";
1115 #if HOST_BITS_PER_WIDE_INT == 32
1117 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1118 upper 32bit value. */
1120 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1121 && (hval >> n) < 64)
1123 operands[1] = GEN_INT (hval >> n);
1124 operands[2] = GEN_INT (n + 32);
1125 return "ashq %2,%1,%0";
1131 && (!MEM_P (operands[0])
1132 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1133 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1134 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1135 && ((CONST_INT_P (operands[1])
1136 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1137 || GET_CODE (operands[1]) == CONST_DOUBLE))
1139 hi[0] = operands[0];
1140 hi[1] = operands[1];
1142 split_quadword_operands (insn, SET, hi, lo, 2);
1144 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1145 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1147 /* The patterns are just movl/movl or pushl/pushl then a movq will
1148 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1149 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1151 if ((!strncmp (pattern_lo, "movl", 4)
1152 && !strncmp (pattern_hi, "movl", 4))
1153 || (!strncmp (pattern_lo, "pushl", 5)
1154 && !strncmp (pattern_hi, "pushl", 5)))
1155 return "movq %1,%0";
1157 if (MEM_P (operands[0])
1158 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1160 output_asm_insn (pattern_hi, hi);
1161 operands[0] = lo[0];
1162 operands[1] = lo[1];
1163 operands[2] = lo[2];
1168 output_asm_insn (pattern_lo, lo);
1169 operands[0] = hi[0];
1170 operands[1] = hi[1];
1171 operands[2] = hi[2];
1175 return "movq %1,%0";
1178 if (symbolic_operand (operands[1], SImode))
1180 if (push_operand (operands[0], SImode))
1181 return "pushab %a1";
1182 return "movab %a1,%0";
1185 if (operands[1] == const0_rtx)
1187 if (push_operand (operands[1], SImode))
1192 if (CONST_INT_P (operands[1])
1193 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1195 HOST_WIDE_INT i = INTVAL (operands[1]);
1197 if ((unsigned HOST_WIDE_INT)(~i) < 64)
1198 return "mcoml %N1,%0";
1199 if ((unsigned HOST_WIDE_INT)i < 0x100)
1200 return "movzbl %1,%0";
1201 if (i >= -0x80 && i < 0)
1202 return "cvtbl %1,%0";
1204 && (n = exact_log2 (i & (-i))) != -1
1205 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1207 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1208 operands[2] = GEN_INT (n);
1209 return "ashl %2,%1,%0";
1211 if ((unsigned HOST_WIDE_INT)i < 0x10000)
1212 return "movzwl %1,%0";
1213 if (i >= -0x8000 && i < 0)
1214 return "cvtwl %1,%0";
1216 if (push_operand (operands[0], SImode))
1218 return "movl %1,%0";
1221 if (CONST_INT_P (operands[1]))
1223 HOST_WIDE_INT i = INTVAL (operands[1]);
1226 else if ((unsigned HOST_WIDE_INT)i < 64)
1227 return "movw %1,%0";
1228 else if ((unsigned HOST_WIDE_INT)~i < 64)
1229 return "mcomw %H1,%0";
1230 else if ((unsigned HOST_WIDE_INT)i < 256)
1231 return "movzbw %1,%0";
1232 else if (i >= -0x80 && i < 0)
1233 return "cvtbw %1,%0";
1235 return "movw %1,%0";
1238 if (CONST_INT_P (operands[1]))
1240 HOST_WIDE_INT i = INTVAL (operands[1]);
1243 else if ((unsigned HOST_WIDE_INT)~i < 64)
1244 return "mcomb %B1,%0";
1246 return "movb %1,%0";
1253 /* Output integer add instructions.
1255 The space-time-opcode tradeoffs for addition vary by model of VAX.
1257 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1258 but it not faster on other models.
1260 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1261 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1262 a register is used in an address too soon after it is set.
1263 Compromise by using movab only when it is shorter than the add
1264 or the base register in the address is one of sp, ap, and fp,
1265 which are not modified very often. */
1268 vax_output_int_add (rtx insn, rtx *operands, enum machine_mode mode)
1275 const char *pattern;
1279 if (TARGET_QMATH && 0)
1282 split_quadword_operands (insn, PLUS, operands, low, 3);
1286 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1287 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1288 gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1289 gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1292 /* No reason to add a 0 to the low part and thus no carry, so just
1293 emit the appropriate add/sub instruction. */
1294 if (low[2] == const0_rtx)
1295 return vax_output_int_add (NULL, operands, SImode);
1297 /* Are we doing addition or subtraction? */
1298 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1300 /* We can't use vax_output_int_add since some the patterns don't
1301 modify the carry bit. */
1304 if (low[2] == constm1_rtx)
1305 pattern = "decl %0";
1307 pattern = "subl2 $%n2,%0";
1311 if (low[2] == const1_rtx)
1312 pattern = "incl %0";
1314 pattern = "addl2 %2,%0";
1316 output_asm_insn (pattern, low);
1318 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1319 two 32bit parts, we complement each and then add one to
1320 low part. We know that the low part can't overflow since
1321 it's value can never be 0. */
1323 return "sbwc %N2,%0";
1324 return "adwc %2,%0";
1327 /* Add low parts. */
1328 if (rtx_equal_p (operands[0], operands[1]))
1330 if (low[2] == const0_rtx)
1331 /* Should examine operand, punt if not POST_INC. */
1332 pattern = "tstl %0", carry = 0;
1333 else if (low[2] == const1_rtx)
1334 pattern = "incl %0";
1336 pattern = "addl2 %2,%0";
1340 if (low[2] == const0_rtx)
1341 pattern = "movl %1,%0", carry = 0;
1343 pattern = "addl3 %2,%1,%0";
1346 output_asm_insn (pattern, low);
1348 /* If CARRY is 0, we don't have any carry value to worry about. */
1349 return get_insn_template (CODE_FOR_addsi3, insn);
1350 /* %0 = C + %1 + %2 */
1351 if (!rtx_equal_p (operands[0], operands[1]))
1352 output_asm_insn ((operands[1] == const0_rtx
1354 : "movl %1,%0"), operands);
1355 return "adwc %2,%0";
1359 if (rtx_equal_p (operands[0], operands[1]))
1361 if (operands[2] == const1_rtx)
1363 if (operands[2] == constm1_rtx)
1365 if (CONST_INT_P (operands[2])
1366 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1367 return "subl2 $%n2,%0";
1368 if (CONST_INT_P (operands[2])
1369 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1370 && REG_P (operands[1])
1371 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1372 || REGNO (operands[1]) > 11))
1373 return "movab %c2(%1),%0";
1374 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1375 return "movab %a2[%0],%0";
1376 return "addl2 %2,%0";
1379 if (rtx_equal_p (operands[0], operands[2]))
1381 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1382 return "movab %a1[%0],%0";
1383 return "addl2 %1,%0";
1386 if (CONST_INT_P (operands[2])
1387 && INTVAL (operands[2]) < 32767
1388 && INTVAL (operands[2]) > -32768
1389 && REG_P (operands[1])
1390 && push_operand (operands[0], SImode))
1391 return "pushab %c2(%1)";
1393 if (CONST_INT_P (operands[2])
1394 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1395 return "subl3 $%n2,%1,%0";
1397 if (CONST_INT_P (operands[2])
1398 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1399 && REG_P (operands[1])
1400 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1401 || REGNO (operands[1]) > 11))
1402 return "movab %c2(%1),%0";
1404 /* Add this if using gcc on a VAX 3xxx:
1405 if (REG_P (operands[1]) && REG_P (operands[2]))
1406 return "movab (%1)[%2],%0";
1409 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1411 if (push_operand (operands[0], SImode))
1412 return "pushab %a2[%1]";
1413 return "movab %a2[%1],%0";
1416 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1418 if (push_operand (operands[0], SImode))
1419 return "pushab %a1[%2]";
1420 return "movab %a1[%2],%0";
1423 if (flag_pic && REG_P (operands[0])
1424 && symbolic_operand (operands[2], SImode))
1425 return "movab %a2,%0;addl2 %1,%0";
1428 && (symbolic_operand (operands[1], SImode)
1429 || symbolic_operand (operands[1], SImode)))
1432 return "addl3 %1,%2,%0";
1435 if (rtx_equal_p (operands[0], operands[1]))
1437 if (operands[2] == const1_rtx)
1439 if (operands[2] == constm1_rtx)
1441 if (CONST_INT_P (operands[2])
1442 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1443 return "subw2 $%n2,%0";
1444 return "addw2 %2,%0";
1446 if (rtx_equal_p (operands[0], operands[2]))
1447 return "addw2 %1,%0";
1448 if (CONST_INT_P (operands[2])
1449 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1450 return "subw3 $%n2,%1,%0";
1451 return "addw3 %1,%2,%0";
1454 if (rtx_equal_p (operands[0], operands[1]))
1456 if (operands[2] == const1_rtx)
1458 if (operands[2] == constm1_rtx)
1460 if (CONST_INT_P (operands[2])
1461 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1462 return "subb2 $%n2,%0";
1463 return "addb2 %2,%0";
1465 if (rtx_equal_p (operands[0], operands[2]))
1466 return "addb2 %1,%0";
1467 if (CONST_INT_P (operands[2])
1468 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1469 return "subb3 $%n2,%1,%0";
1470 return "addb3 %1,%2,%0";
1478 vax_output_int_subtract (rtx insn, rtx *operands, enum machine_mode mode)
1485 const char *pattern;
1488 if (TARGET_QMATH && 0)
1491 split_quadword_operands (insn, MINUS, operands, low, 3);
1495 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1497 /* Negation is tricky. It's basically complement and increment.
1498 Negate hi, then lo, and subtract the carry back. */
1499 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1500 || (MEM_P (operands[0])
1501 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1502 fatal_insn ("illegal operand detected", insn);
1503 output_asm_insn ("mnegl %2,%0", operands);
1504 output_asm_insn ("mnegl %2,%0", low);
1505 return "sbwc $0,%0";
1507 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1508 gcc_assert (rtx_equal_p (low[0], low[1]));
1509 if (low[2] == const1_rtx)
1510 output_asm_insn ("decl %0", low);
1512 output_asm_insn ("subl2 %2,%0", low);
1513 return "sbwc %2,%0";
1516 /* Subtract low parts. */
1517 if (rtx_equal_p (operands[0], operands[1]))
1519 if (low[2] == const0_rtx)
1520 pattern = 0, carry = 0;
1521 else if (low[2] == constm1_rtx)
1522 pattern = "decl %0";
1524 pattern = "subl2 %2,%0";
1528 if (low[2] == constm1_rtx)
1529 pattern = "decl %0";
1530 else if (low[2] == const0_rtx)
1531 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1533 pattern = "subl3 %2,%1,%0";
1536 output_asm_insn (pattern, low);
1539 if (!rtx_equal_p (operands[0], operands[1]))
1540 return "movl %1,%0;sbwc %2,%0";
1541 return "sbwc %2,%0";
1542 /* %0 = %2 - %1 - C */
1544 return get_insn_template (CODE_FOR_subsi3, insn);
1552 /* True if X is an rtx for a constant that is a valid address. */
1555 legitimate_constant_address_p (rtx x)
1557 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1558 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1560 if (GET_CODE (x) != CONST)
1562 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1564 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1565 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1571 /* True if the constant value X is a legitimate general operand.
1572 It is given that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
1575 legitimate_constant_p (rtx x ATTRIBUTE_UNUSED)
1580 /* The other macros defined here are used only in legitimate_address_p (). */
1582 /* Nonzero if X is a hard reg that can be used as an index
1583 or, if not strict, if it is a pseudo reg. */
1584 #define INDEX_REGISTER_P(X, STRICT) \
1585 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1587 /* Nonzero if X is a hard reg that can be used as a base reg
1588 or, if not strict, if it is a pseudo reg. */
1589 #define BASE_REGISTER_P(X, STRICT) \
1590 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1592 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1594 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1595 are no SYMBOL_REFs for external symbols present. */
1598 indirectable_constant_address_p (rtx x, bool indirect)
1600 if (GET_CODE (x) == SYMBOL_REF)
1601 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1603 if (GET_CODE (x) == CONST)
1605 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1606 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1608 return CONSTANT_ADDRESS_P (x);
1611 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1614 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1616 return CONSTANT_ADDRESS_P (x);
1619 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1621 /* True if X is an address which can be indirected. External symbols
1622 could be in a sharable image library, so we disallow those. */
1625 indirectable_address_p (rtx x, bool strict, bool indirect)
1627 if (indirectable_constant_address_p (x, indirect)
1628 || BASE_REGISTER_P (x, strict))
1630 if (GET_CODE (x) != PLUS
1631 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1632 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1634 return indirectable_constant_address_p (XEXP (x, 1), indirect);
1637 /* Return true if x is a valid address not using indexing.
1638 (This much is the easy part.) */
1640 nonindexed_address_p (rtx x, bool strict)
1645 extern rtx *reg_equiv_mem;
1646 if (! reload_in_progress
1647 || reg_equiv_mem[REGNO (x)] == 0
1648 || indirectable_address_p (reg_equiv_mem[REGNO (x)], strict, false))
1651 if (indirectable_constant_address_p (x, false))
1653 if (indirectable_address_p (x, strict, false))
1655 xfoo0 = XEXP (x, 0);
1656 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1658 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1659 && BASE_REGISTER_P (xfoo0, strict))
1664 /* True if PROD is either a reg times size of mode MODE and MODE is less
1665 than or equal 8 bytes, or just a reg if MODE is one byte. */
1668 index_term_p (rtx prod, enum machine_mode mode, bool strict)
1672 if (GET_MODE_SIZE (mode) == 1)
1673 return BASE_REGISTER_P (prod, strict);
1675 if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1678 xfoo0 = XEXP (prod, 0);
1679 xfoo1 = XEXP (prod, 1);
1681 if (CONST_INT_P (xfoo0)
1682 && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1683 && INDEX_REGISTER_P (xfoo1, strict))
1686 if (CONST_INT_P (xfoo1)
1687 && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1688 && INDEX_REGISTER_P (xfoo0, strict))
1694 /* Return true if X is the sum of a register
1695 and a valid index term for mode MODE. */
1697 reg_plus_index_p (rtx x, enum machine_mode mode, bool strict)
1701 if (GET_CODE (x) != PLUS)
1704 xfoo0 = XEXP (x, 0);
1705 xfoo1 = XEXP (x, 1);
1707 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1710 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1716 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1718 indexable_address_p (rtx xfoo0, rtx xfoo1, enum machine_mode mode, bool strict)
1720 if (!CONSTANT_ADDRESS_P (xfoo0))
1722 if (BASE_REGISTER_P (xfoo1, strict))
1723 return !flag_pic || mode == QImode;
1724 if (flag_pic && symbolic_operand (xfoo0, SImode))
1726 return reg_plus_index_p (xfoo1, mode, strict);
1729 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1730 that is a valid memory address for an instruction.
1731 The MODE argument is the machine mode for the MEM expression
1732 that wants to use this address. */
1734 legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1738 if (nonindexed_address_p (x, strict))
1741 if (GET_CODE (x) != PLUS)
1744 /* Handle <address>[index] represented with index-sum outermost */
1746 xfoo0 = XEXP (x, 0);
1747 xfoo1 = XEXP (x, 1);
1749 if (index_term_p (xfoo0, mode, strict)
1750 && nonindexed_address_p (xfoo1, strict))
1753 if (index_term_p (xfoo1, mode, strict)
1754 && nonindexed_address_p (xfoo0, strict))
1757 /* Handle offset(reg)[index] with offset added outermost */
1759 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1760 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1766 /* Return true if x (a legitimate address expression) has an effect that
1767 depends on the machine mode it is used for. On the VAX, the predecrement
1768 and postincrement address depend thus (the amount of decrement or
1769 increment being the length of the operand) and all indexed address depend
1770 thus (because the index scale factor is the length of the operand). */
1773 vax_mode_dependent_address_p (rtx x)
1777 /* Auto-increment cases are now dealt with generically in recog.c. */
1778 if (GET_CODE (x) != PLUS)
1781 xfoo0 = XEXP (x, 0);
1782 xfoo1 = XEXP (x, 1);
1784 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1786 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1788 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1790 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1797 fixup_mathdi_operand (rtx x, enum machine_mode mode)
1799 if (illegal_addsub_di_memory_operand (x, mode))
1801 rtx addr = XEXP (x, 0);
1802 rtx temp = gen_reg_rtx (Pmode);
1804 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1805 if (GET_CODE (addr) == CONST && flag_pic)
1807 offset = XEXP (XEXP (addr, 0), 1);
1808 addr = XEXP (XEXP (addr, 0), 0);
1811 emit_move_insn (temp, addr);
1813 temp = gen_rtx_PLUS (Pmode, temp, offset);
1814 x = gen_rtx_MEM (DImode, temp);
1820 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1822 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1825 rtx (*gen_old_insn)(rtx, rtx, rtx);
1826 rtx (*gen_si_insn)(rtx, rtx, rtx);
1827 rtx (*gen_insn)(rtx, rtx, rtx);
1831 gen_old_insn = gen_adddi3_old;
1832 gen_si_insn = gen_addsi3;
1833 gen_insn = gen_adcdi3;
1835 else if (code == MINUS)
1837 gen_old_insn = gen_subdi3_old;
1838 gen_si_insn = gen_subsi3;
1839 gen_insn = gen_sbcdi3;
1844 /* If this is addition (thus operands are commutative) and if there is one
1845 addend that duplicates the desination, we want that addend to be the
1848 && rtx_equal_p (operands[0], operands[2])
1849 && !rtx_equal_p (operands[1], operands[2]))
1852 operands[2] = operands[1];
1858 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1862 if (!rtx_equal_p (operands[0], operands[1])
1863 && (REG_P (operands[0]) && MEM_P (operands[1])))
1865 emit_move_insn (operands[0], operands[1]);
1866 operands[1] = operands[0];
1869 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1870 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1871 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1873 if (!rtx_equal_p (operands[0], operands[1]))
1874 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1875 operand_subword (operands[1], 0, 0, DImode));
1877 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1878 operand_subword (operands[1], 1, 0, DImode),
1879 operand_subword (operands[2], 1, 0, DImode)));
1883 /* If are adding the same value together, that's really a multiply by 2,
1884 and that's just a left shift of 1. */
1885 if (rtx_equal_p (operands[1], operands[2]))
1887 gcc_assert (code != MINUS);
1888 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1892 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1894 /* If an operand is the same as operand[0], use the operand[0] rtx
1895 because fixup will an equivalent rtx but not an equal one. */
1897 if (rtx_equal_p (operands[0], operands[1]))
1898 operands[1] = operands[0];
1900 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1902 if (rtx_equal_p (operands[0], operands[2]))
1903 operands[2] = operands[0];
1905 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1907 /* If we are subtracting not from ourselves [d = a - b], and because the
1908 carry ops are two operand only, we would need to do a move prior to
1909 the subtract. And if d == b, we would need a temp otherwise
1910 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1911 into d = -b, d += a. Since -b can never overflow, even if b == d,
1914 If we are doing addition, since the carry ops are two operand, if
1915 we aren't adding to ourselves, move the first addend to the
1916 destination first. */
1918 gcc_assert (operands[1] != const0_rtx || code == MINUS);
1919 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
1921 if (code == MINUS && CONSTANT_P (operands[1]))
1923 temp = gen_reg_rtx (DImode);
1924 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
1926 gen_insn = gen_adcdi3;
1927 operands[2] = operands[1];
1928 operands[1] = operands[0];
1931 emit_move_insn (operands[0], operands[1]);
1934 /* Subtracting a constant will have been rewritten to an addition of the
1935 negative of that constant before we get here. */
1936 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
1937 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
1942 adjacent_operands_p (rtx lo, rtx hi, enum machine_mode mode)
1944 HOST_WIDE_INT lo_offset;
1945 HOST_WIDE_INT hi_offset;
1947 if (GET_CODE (lo) != GET_CODE (hi))
1951 return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
1952 if (CONST_INT_P (lo))
1953 return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
1954 if (CONST_INT_P (lo))
1955 return mode != SImode;
1960 if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
1966 if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
1967 return rtx_equal_p (lo, hi);
1969 switch (GET_CODE (lo))
1979 if (!CONST_INT_P (XEXP (lo, 1)))
1981 lo_offset = INTVAL (XEXP (lo, 1));
1988 switch (GET_CODE (hi))
1998 if (!CONST_INT_P (XEXP (hi, 1)))
2000 hi_offset = INTVAL (XEXP (hi, 1));
2007 if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2010 return rtx_equal_p (lo, hi)
2011 && hi_offset - lo_offset == GET_MODE_SIZE (mode);