1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987, 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002,
3 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
35 #include "insn-attr.h"
41 #include "diagnostic-core.h"
43 #include "tm-constrs.h"
46 #include "target-def.h"
48 static void vax_option_override (void);
49 static bool vax_legitimate_address_p (enum machine_mode, rtx, bool);
50 static void vax_output_function_prologue (FILE *, HOST_WIDE_INT);
51 static void vax_file_start (void);
52 static void vax_init_libfuncs (void);
53 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
55 static int vax_address_cost_1 (rtx);
56 static int vax_address_cost (rtx, bool);
57 static bool vax_rtx_costs (rtx, int, int, int *, bool);
58 static rtx vax_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
60 static void vax_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
62 static rtx vax_struct_value_rtx (tree, int);
63 static rtx vax_builtin_setjmp_frame_value (void);
64 static void vax_asm_trampoline_template (FILE *);
65 static void vax_trampoline_init (rtx, tree, rtx);
66 static int vax_return_pops_args (tree, tree, int);
68 /* Initialize the GCC target structure. */
69 #undef TARGET_ASM_ALIGNED_HI_OP
70 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
72 #undef TARGET_ASM_FUNCTION_PROLOGUE
73 #define TARGET_ASM_FUNCTION_PROLOGUE vax_output_function_prologue
75 #undef TARGET_ASM_FILE_START
76 #define TARGET_ASM_FILE_START vax_file_start
77 #undef TARGET_ASM_FILE_START_APP_OFF
78 #define TARGET_ASM_FILE_START_APP_OFF true
80 #undef TARGET_INIT_LIBFUNCS
81 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
83 #undef TARGET_ASM_OUTPUT_MI_THUNK
84 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
85 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
86 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
88 #undef TARGET_DEFAULT_TARGET_FLAGS
89 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
91 #undef TARGET_RTX_COSTS
92 #define TARGET_RTX_COSTS vax_rtx_costs
93 #undef TARGET_ADDRESS_COST
94 #define TARGET_ADDRESS_COST vax_address_cost
96 #undef TARGET_PROMOTE_PROTOTYPES
97 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
99 #undef TARGET_FUNCTION_ARG
100 #define TARGET_FUNCTION_ARG vax_function_arg
101 #undef TARGET_FUNCTION_ARG_ADVANCE
102 #define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
104 #undef TARGET_STRUCT_VALUE_RTX
105 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
107 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
108 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
110 #undef TARGET_LEGITIMATE_ADDRESS_P
111 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
113 #undef TARGET_FRAME_POINTER_REQUIRED
114 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
116 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
117 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
118 #undef TARGET_TRAMPOLINE_INIT
119 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
120 #undef TARGET_RETURN_POPS_ARGS
121 #define TARGET_RETURN_POPS_ARGS vax_return_pops_args
123 #undef TARGET_OPTION_OVERRIDE
124 #define TARGET_OPTION_OVERRIDE vax_option_override
126 struct gcc_target targetm = TARGET_INITIALIZER;
128 /* Set global variables as needed for the options enabled. */
131 vax_option_override (void)
133 /* We're VAX floating point, not IEEE floating point. */
135 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
137 #ifdef SUBTARGET_OVERRIDE_OPTIONS
138 SUBTARGET_OVERRIDE_OPTIONS;
142 /* Generate the assembly code for function entry. FILE is a stdio
143 stream to output the code to. SIZE is an int: how many units of
144 temporary storage to allocate.
146 Refer to the array `regs_ever_live' to determine which registers to
147 save; `regs_ever_live[I]' is nonzero if register number I is ever
148 used in the function. This function is responsible for knowing
149 which registers should not be saved even if used. */
152 vax_output_function_prologue (FILE * file, HOST_WIDE_INT size)
157 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
158 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
161 fprintf (file, "\t.word 0x%x\n", mask);
163 if (dwarf2out_do_frame ())
165 const char *label = dwarf2out_cfi_label (false);
168 for (regno = FIRST_PSEUDO_REGISTER-1; regno >= 0; --regno)
169 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
170 dwarf2out_reg_save (label, regno, offset -= 4);
172 dwarf2out_reg_save (label, PC_REGNUM, offset -= 4);
173 dwarf2out_reg_save (label, FRAME_POINTER_REGNUM, offset -= 4);
174 dwarf2out_reg_save (label, ARG_POINTER_REGNUM, offset -= 4);
175 dwarf2out_def_cfa (label, FRAME_POINTER_REGNUM, -(offset - 4));
178 size -= STARTING_FRAME_OFFSET;
180 asm_fprintf (file, "\tmovab %wd(%Rsp),%Rsp\n", -size);
182 asm_fprintf (file, "\tsubl2 $%wd,%Rsp\n", size);
185 /* When debugging with stabs, we want to output an extra dummy label
186 so that gas can distinguish between D_float and G_float prior to
187 processing the .stabs directive identifying type double. */
189 vax_file_start (void)
191 default_file_start ();
193 if (write_symbols == DBX_DEBUG)
194 fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
197 /* We can use the BSD C library routines for the libgcc calls that are
198 still generated, since that's what they boil down to anyways. When
199 ELF, avoid the user's namespace. */
202 vax_init_libfuncs (void)
204 if (TARGET_BSD_DIVMOD)
206 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
207 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
211 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
214 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
219 for (i = 0; i < n; i++)
222 for (i = 0; i < n; i++)
224 if (MEM_P (operands[i])
225 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
226 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
228 rtx addr = XEXP (operands[i], 0);
229 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
231 else if (optimize_size && MEM_P (operands[i])
232 && REG_P (XEXP (operands[i], 0))
233 && (code != MINUS || operands[1] != const0_rtx)
234 && find_regno_note (insn, REG_DEAD,
235 REGNO (XEXP (operands[i], 0))))
237 low[i] = gen_rtx_MEM (SImode,
238 gen_rtx_POST_INC (Pmode,
239 XEXP (operands[i], 0)));
240 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
244 low[i] = operand_subword (operands[i], 0, 0, DImode);
245 operands[i] = operand_subword (operands[i], 1, 0, DImode);
251 print_operand_address (FILE * file, rtx addr)
254 rtx reg1, breg, ireg;
258 switch (GET_CODE (addr))
262 addr = XEXP (addr, 0);
266 fprintf (file, "(%s)", reg_names[REGNO (addr)]);
270 fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
274 fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
278 /* There can be either two or three things added here. One must be a
279 REG. One can be either a REG or a MULT of a REG and an appropriate
280 constant, and the third can only be a constant or a MEM.
282 We get these two or three things and put the constant or MEM in
283 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
284 a register and can't tell yet if it is a base or index register,
287 reg1 = 0; ireg = 0; breg = 0; offset = 0;
289 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
290 || MEM_P (XEXP (addr, 0)))
292 offset = XEXP (addr, 0);
293 addr = XEXP (addr, 1);
295 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
296 || MEM_P (XEXP (addr, 1)))
298 offset = XEXP (addr, 1);
299 addr = XEXP (addr, 0);
301 else if (GET_CODE (XEXP (addr, 1)) == MULT)
303 ireg = XEXP (addr, 1);
304 addr = XEXP (addr, 0);
306 else if (GET_CODE (XEXP (addr, 0)) == MULT)
308 ireg = XEXP (addr, 0);
309 addr = XEXP (addr, 1);
311 else if (REG_P (XEXP (addr, 1)))
313 reg1 = XEXP (addr, 1);
314 addr = XEXP (addr, 0);
316 else if (REG_P (XEXP (addr, 0)))
318 reg1 = XEXP (addr, 0);
319 addr = XEXP (addr, 1);
331 else if (GET_CODE (addr) == MULT)
335 gcc_assert (GET_CODE (addr) == PLUS);
336 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
337 || MEM_P (XEXP (addr, 0)))
341 if (CONST_INT_P (offset))
342 offset = plus_constant (XEXP (addr, 0), INTVAL (offset));
345 gcc_assert (CONST_INT_P (XEXP (addr, 0)));
346 offset = plus_constant (offset, INTVAL (XEXP (addr, 0)));
349 offset = XEXP (addr, 0);
351 else if (REG_P (XEXP (addr, 0)))
354 ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
356 reg1 = XEXP (addr, 0);
360 gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
362 ireg = XEXP (addr, 0);
365 if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
366 || MEM_P (XEXP (addr, 1)))
370 if (CONST_INT_P (offset))
371 offset = plus_constant (XEXP (addr, 1), INTVAL (offset));
374 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
375 offset = plus_constant (offset, INTVAL (XEXP (addr, 1)));
378 offset = XEXP (addr, 1);
380 else if (REG_P (XEXP (addr, 1)))
383 ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
385 reg1 = XEXP (addr, 1);
389 gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
391 ireg = XEXP (addr, 1);
395 /* If REG1 is nonzero, figure out if it is a base or index register. */
399 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
402 || (flag_pic && symbolic_operand (offset, SImode)))))
413 if (flag_pic && symbolic_operand (offset, SImode))
418 output_operand_lossage ("symbol used with both base and indexed registers");
421 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
422 if (flag_pic > 1 && GET_CODE (offset) == CONST
423 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
424 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
427 output_operand_lossage ("symbol with offset used in PIC mode");
431 /* symbol(reg) isn't PIC, but symbol[reg] is. */
440 output_address (offset);
444 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
448 if (GET_CODE (ireg) == MULT)
449 ireg = XEXP (ireg, 0);
450 gcc_assert (REG_P (ireg));
451 fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
456 output_addr_const (file, addr);
461 print_operand (FILE *file, rtx x, int code)
464 fputc (ASM_DOUBLE_CHAR, file);
465 else if (code == '|')
466 fputs (REGISTER_PREFIX, file);
467 else if (code == 'c')
468 fputs (cond_name (x), file);
469 else if (code == 'C')
470 fputs (rev_cond_name (x), file);
471 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
472 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
473 else if (code == 'P' && CONST_INT_P (x))
474 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
475 else if (code == 'N' && CONST_INT_P (x))
476 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
477 /* rotl instruction cannot deal with negative arguments. */
478 else if (code == 'R' && CONST_INT_P (x))
479 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
480 else if (code == 'H' && CONST_INT_P (x))
481 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
482 else if (code == 'h' && CONST_INT_P (x))
483 fprintf (file, "$%d", (short) - INTVAL (x));
484 else if (code == 'B' && CONST_INT_P (x))
485 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
486 else if (code == 'b' && CONST_INT_P (x))
487 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
488 else if (code == 'M' && CONST_INT_P (x))
489 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
491 fprintf (file, "%s", reg_names[REGNO (x)]);
493 output_address (XEXP (x, 0));
494 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
497 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
498 sizeof (dstr), 0, 1);
499 fprintf (file, "$0f%s", dstr);
501 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
504 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
505 sizeof (dstr), 0, 1);
506 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
510 if (flag_pic > 1 && symbolic_operand (x, SImode))
513 output_operand_lossage ("symbol used as immediate operand");
516 output_addr_const (file, x);
523 switch (GET_CODE (op))
552 rev_cond_name (rtx op)
554 switch (GET_CODE (op))
583 vax_float_literal (rtx c)
585 enum machine_mode mode;
586 REAL_VALUE_TYPE r, s;
589 if (GET_CODE (c) != CONST_DOUBLE)
594 if (c == const_tiny_rtx[(int) mode][0]
595 || c == const_tiny_rtx[(int) mode][1]
596 || c == const_tiny_rtx[(int) mode][2])
599 REAL_VALUE_FROM_CONST_DOUBLE (r, c);
601 for (i = 0; i < 7; i++)
605 REAL_VALUE_FROM_INT (s, x, 0, mode);
607 if (REAL_VALUES_EQUAL (r, s))
609 ok = exact_real_inverse (mode, &s);
611 if (REAL_VALUES_EQUAL (r, s))
618 /* Return the cost in cycles of a memory address, relative to register
621 Each of the following adds the indicated number of cycles:
625 1 - indexing and/or offset(register)
630 vax_address_cost_1 (rtx addr)
632 int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
633 rtx plus_op0 = 0, plus_op1 = 0;
635 switch (GET_CODE (addr))
645 indexed = 1; /* 2 on VAX 2 */
648 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
650 offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
654 offset = 1; /* 2 on VAX 2 */
656 case LABEL_REF: /* this is probably a byte offset from the pc */
662 plus_op1 = XEXP (addr, 0);
664 plus_op0 = XEXP (addr, 0);
665 addr = XEXP (addr, 1);
668 indir = 2; /* 3 on VAX 2 */
669 addr = XEXP (addr, 0);
675 /* Up to 3 things can be added in an address. They are stored in
676 plus_op0, plus_op1, and addr. */
690 /* Indexing and register+offset can both be used (except on a VAX 2)
691 without increasing execution time over either one alone. */
692 if (reg && indexed && offset)
693 return reg + indir + offset + predec;
694 return reg + indexed + indir + offset + predec;
698 vax_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
700 return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
703 /* Cost of an expression on a VAX. This version has costs tuned for the
704 CVAX chip (found in the VAX 3 series) with comments for variations on
707 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
708 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
709 costs on a per cpu basis. */
712 vax_rtx_costs (rtx x, int code, int outer_code, int *total,
713 bool speed ATTRIBUTE_UNUSED)
715 enum machine_mode mode = GET_MODE (x);
716 int i = 0; /* may be modified in switch */
717 const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
721 /* On a VAX, constants from 0..63 are cheap because they can use the
722 1 byte literal constant format. Compare to -1 should be made cheap
723 so that decrement-and-branch insns can be formed more easily (if
724 the value -1 is copied to a register some decrement-and-branch
725 patterns will not match). */
732 if (outer_code == AND)
734 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
737 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
738 || (outer_code == COMPARE
740 || ((outer_code == PLUS || outer_code == MINUS)
741 && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
755 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
756 *total = vax_float_literal (x) ? 5 : 8;
758 *total = ((CONST_DOUBLE_HIGH (x) == 0
759 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
760 || (outer_code == PLUS
761 && CONST_DOUBLE_HIGH (x) == -1
762 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
768 return true; /* Implies register operand. */
772 return true; /* Implies register operand. */
778 *total = 16; /* 4 on VAX 9000 */
781 *total = 9; /* 4 on VAX 9000, 12 on VAX 2 */
784 *total = 16; /* 6 on VAX 9000, 28 on VAX 2 */
789 *total = 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
792 *total = MAX_COST; /* Mode is not supported. */
800 *total = MAX_COST; /* Mode is not supported. */
808 *total = 30; /* Highly variable. */
809 else if (mode == DFmode)
810 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
813 *total = 11; /* 25 on VAX 2 */
823 *total = MAX_COST; /* Mode is not supported. */
830 *total = (6 /* 4 on VAX 9000 */
831 + (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
835 *total = 7; /* 17 on VAX 2 */
844 *total = 10; /* 6 on VAX 9000 */
849 *total = 6; /* 5 on VAX 2, 4 on VAX 9000 */
850 if (CONST_INT_P (XEXP (x, 1)))
851 fmt = "e"; /* all constant rotate counts are short */
856 *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
857 /* Small integer operands can use subl2 and addl2. */
858 if ((CONST_INT_P (XEXP (x, 1)))
859 && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
869 /* AND is special because the first operand is complemented. */
871 if (CONST_INT_P (XEXP (x, 0)))
873 if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
883 else if (mode == SFmode)
885 else if (mode == DImode)
901 if (mode == DImode || mode == DFmode)
902 *total = 5; /* 7 on VAX 2 */
904 *total = 3; /* 4 on VAX 2 */
906 if (!REG_P (x) && GET_CODE (x) != POST_INC)
907 *total += vax_address_cost_1 (x);
913 *total = 3; /* FIXME: Costs need to be checked */
920 /* Now look inside the expression. Operands which are not registers or
921 short constants add to the cost.
923 FMT and I may have been adjusted in the switch above for instructions
924 which require special handling. */
926 while (*fmt++ == 'e')
928 rtx op = XEXP (x, i);
931 code = GET_CODE (op);
933 /* A NOT is likely to be found as the first operand of an AND
934 (in which case the relevant cost is of the operand inside
935 the not) and not likely to be found anywhere else. */
937 op = XEXP (op, 0), code = GET_CODE (op);
942 if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
943 && GET_MODE (x) != QImode)
944 *total += 1; /* 2 on VAX 2 */
949 *total += 1; /* 2 on VAX 2 */
952 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
954 /* Registers are faster than floating point constants -- even
955 those constants which can be encoded in a single byte. */
956 if (vax_float_literal (op))
959 *total += (GET_MODE (x) == DFmode) ? 3 : 2;
963 if (CONST_DOUBLE_HIGH (op) != 0
964 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
969 *total += 1; /* 2 on VAX 2 */
970 if (!REG_P (XEXP (op, 0)))
971 *total += vax_address_cost_1 (XEXP (op, 0));
984 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
985 Used for C++ multiple inheritance.
986 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
987 addl2 $DELTA, 4(ap) #adjust first argument
988 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
992 vax_output_mi_thunk (FILE * file,
993 tree thunk ATTRIBUTE_UNUSED,
995 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
998 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
999 asm_fprintf (file, ",4(%Rap)\n");
1000 fprintf (file, "\tjmp ");
1001 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1002 fprintf (file, "+2\n");
1006 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1007 int incoming ATTRIBUTE_UNUSED)
1009 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1013 vax_builtin_setjmp_frame_value (void)
1015 return hard_frame_pointer_rtx;
1018 /* Worker function for NOTICE_UPDATE_CC. */
1021 vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1023 if (GET_CODE (exp) == SET)
1025 if (GET_CODE (SET_SRC (exp)) == CALL)
1027 else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1028 && GET_CODE (SET_DEST (exp)) != PC)
1030 cc_status.flags = 0;
1031 /* The integer operations below don't set carry or
1032 set it in an incompatible way. That's ok though
1033 as the Z bit is all we need when doing unsigned
1034 comparisons on the result of these insns (since
1035 they're always with 0). Set CC_NO_OVERFLOW to
1036 generate the correct unsigned branches. */
1037 switch (GET_CODE (SET_SRC (exp)))
1040 if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1048 cc_status.flags = CC_NO_OVERFLOW;
1053 cc_status.value1 = SET_DEST (exp);
1054 cc_status.value2 = SET_SRC (exp);
1057 else if (GET_CODE (exp) == PARALLEL
1058 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1060 if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1062 else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1064 cc_status.flags = 0;
1065 cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1066 cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1069 /* PARALLELs whose first element sets the PC are aob,
1070 sob insns. They do change the cc's. */
1075 if (cc_status.value1 && REG_P (cc_status.value1)
1077 && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1078 cc_status.value2 = 0;
1079 if (cc_status.value1 && MEM_P (cc_status.value1)
1081 && MEM_P (cc_status.value2))
1082 cc_status.value2 = 0;
1083 /* Actual condition, one line up, should be that value2's address
1084 depends on value1, but that is too much of a pain. */
1087 /* Output integer move instructions. */
1090 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1091 enum machine_mode mode)
1094 const char *pattern_hi, *pattern_lo;
1099 if (operands[1] == const0_rtx)
1101 if (TARGET_QMATH && optimize_size
1102 && (CONST_INT_P (operands[1])
1103 || GET_CODE (operands[1]) == CONST_DOUBLE))
1105 unsigned HOST_WIDE_INT hval, lval;
1108 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1110 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1112 /* Make sure only the low 32 bits are valid. */
1113 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1114 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1118 lval = INTVAL (operands[1]);
1122 /* Here we see if we are trying to see if the 64bit value is really
1123 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1124 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1125 8 bytes - 1 shift byte - 1 short literal byte. */
1127 && (n = exact_log2 (lval & (- lval))) != -1
1128 && (lval >> n) < 64)
1132 #if HOST_BITS_PER_WIDE_INT == 32
1133 /* On 32bit platforms, if the 6bits didn't overflow into the
1134 upper 32bit value that value better be 0. If we have
1135 overflowed, make sure it wasn't too much. */
1138 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1139 n = 0; /* failure */
1141 lval |= hval << (32 - n);
1144 /* If n is 0, then ashq is not the best way to emit this. */
1147 operands[1] = GEN_INT (lval);
1148 operands[2] = GEN_INT (n);
1149 return "ashq %2,%1,%0";
1151 #if HOST_BITS_PER_WIDE_INT == 32
1153 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1154 upper 32bit value. */
1156 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1157 && (hval >> n) < 64)
1159 operands[1] = GEN_INT (hval >> n);
1160 operands[2] = GEN_INT (n + 32);
1161 return "ashq %2,%1,%0";
1167 && (!MEM_P (operands[0])
1168 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1169 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1170 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1171 && ((CONST_INT_P (operands[1])
1172 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1173 || GET_CODE (operands[1]) == CONST_DOUBLE))
1175 hi[0] = operands[0];
1176 hi[1] = operands[1];
1178 split_quadword_operands (insn, SET, hi, lo, 2);
1180 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1181 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1183 /* The patterns are just movl/movl or pushl/pushl then a movq will
1184 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1185 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1187 if ((!strncmp (pattern_lo, "movl", 4)
1188 && !strncmp (pattern_hi, "movl", 4))
1189 || (!strncmp (pattern_lo, "pushl", 5)
1190 && !strncmp (pattern_hi, "pushl", 5)))
1191 return "movq %1,%0";
1193 if (MEM_P (operands[0])
1194 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1196 output_asm_insn (pattern_hi, hi);
1197 operands[0] = lo[0];
1198 operands[1] = lo[1];
1199 operands[2] = lo[2];
1204 output_asm_insn (pattern_lo, lo);
1205 operands[0] = hi[0];
1206 operands[1] = hi[1];
1207 operands[2] = hi[2];
1211 return "movq %1,%0";
1214 if (symbolic_operand (operands[1], SImode))
1216 if (push_operand (operands[0], SImode))
1217 return "pushab %a1";
1218 return "movab %a1,%0";
1221 if (operands[1] == const0_rtx)
1223 if (push_operand (operands[1], SImode))
1228 if (CONST_INT_P (operands[1])
1229 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1231 HOST_WIDE_INT i = INTVAL (operands[1]);
1233 if ((unsigned HOST_WIDE_INT)(~i) < 64)
1234 return "mcoml %N1,%0";
1235 if ((unsigned HOST_WIDE_INT)i < 0x100)
1236 return "movzbl %1,%0";
1237 if (i >= -0x80 && i < 0)
1238 return "cvtbl %1,%0";
1240 && (n = exact_log2 (i & (-i))) != -1
1241 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1243 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1244 operands[2] = GEN_INT (n);
1245 return "ashl %2,%1,%0";
1247 if ((unsigned HOST_WIDE_INT)i < 0x10000)
1248 return "movzwl %1,%0";
1249 if (i >= -0x8000 && i < 0)
1250 return "cvtwl %1,%0";
1252 if (push_operand (operands[0], SImode))
1254 return "movl %1,%0";
1257 if (CONST_INT_P (operands[1]))
1259 HOST_WIDE_INT i = INTVAL (operands[1]);
1262 else if ((unsigned HOST_WIDE_INT)i < 64)
1263 return "movw %1,%0";
1264 else if ((unsigned HOST_WIDE_INT)~i < 64)
1265 return "mcomw %H1,%0";
1266 else if ((unsigned HOST_WIDE_INT)i < 256)
1267 return "movzbw %1,%0";
1268 else if (i >= -0x80 && i < 0)
1269 return "cvtbw %1,%0";
1271 return "movw %1,%0";
1274 if (CONST_INT_P (operands[1]))
1276 HOST_WIDE_INT i = INTVAL (operands[1]);
1279 else if ((unsigned HOST_WIDE_INT)~i < 64)
1280 return "mcomb %B1,%0";
1282 return "movb %1,%0";
1289 /* Output integer add instructions.
1291 The space-time-opcode tradeoffs for addition vary by model of VAX.
1293 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1294 but it not faster on other models.
1296 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1297 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1298 a register is used in an address too soon after it is set.
1299 Compromise by using movab only when it is shorter than the add
1300 or the base register in the address is one of sp, ap, and fp,
1301 which are not modified very often. */
1304 vax_output_int_add (rtx insn, rtx *operands, enum machine_mode mode)
1311 const char *pattern;
1315 if (TARGET_QMATH && 0)
1318 split_quadword_operands (insn, PLUS, operands, low, 3);
1322 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1323 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1324 gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1325 gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1328 /* No reason to add a 0 to the low part and thus no carry, so just
1329 emit the appropriate add/sub instruction. */
1330 if (low[2] == const0_rtx)
1331 return vax_output_int_add (NULL, operands, SImode);
1333 /* Are we doing addition or subtraction? */
1334 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1336 /* We can't use vax_output_int_add since some the patterns don't
1337 modify the carry bit. */
1340 if (low[2] == constm1_rtx)
1341 pattern = "decl %0";
1343 pattern = "subl2 $%n2,%0";
1347 if (low[2] == const1_rtx)
1348 pattern = "incl %0";
1350 pattern = "addl2 %2,%0";
1352 output_asm_insn (pattern, low);
1354 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1355 two 32bit parts, we complement each and then add one to
1356 low part. We know that the low part can't overflow since
1357 it's value can never be 0. */
1359 return "sbwc %N2,%0";
1360 return "adwc %2,%0";
1363 /* Add low parts. */
1364 if (rtx_equal_p (operands[0], operands[1]))
1366 if (low[2] == const0_rtx)
1367 /* Should examine operand, punt if not POST_INC. */
1368 pattern = "tstl %0", carry = 0;
1369 else if (low[2] == const1_rtx)
1370 pattern = "incl %0";
1372 pattern = "addl2 %2,%0";
1376 if (low[2] == const0_rtx)
1377 pattern = "movl %1,%0", carry = 0;
1379 pattern = "addl3 %2,%1,%0";
1382 output_asm_insn (pattern, low);
1384 /* If CARRY is 0, we don't have any carry value to worry about. */
1385 return get_insn_template (CODE_FOR_addsi3, insn);
1386 /* %0 = C + %1 + %2 */
1387 if (!rtx_equal_p (operands[0], operands[1]))
1388 output_asm_insn ((operands[1] == const0_rtx
1390 : "movl %1,%0"), operands);
1391 return "adwc %2,%0";
1395 if (rtx_equal_p (operands[0], operands[1]))
1397 if (operands[2] == const1_rtx)
1399 if (operands[2] == constm1_rtx)
1401 if (CONST_INT_P (operands[2])
1402 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1403 return "subl2 $%n2,%0";
1404 if (CONST_INT_P (operands[2])
1405 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1406 && REG_P (operands[1])
1407 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1408 || REGNO (operands[1]) > 11))
1409 return "movab %c2(%1),%0";
1410 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1411 return "movab %a2[%0],%0";
1412 return "addl2 %2,%0";
1415 if (rtx_equal_p (operands[0], operands[2]))
1417 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1418 return "movab %a1[%0],%0";
1419 return "addl2 %1,%0";
1422 if (CONST_INT_P (operands[2])
1423 && INTVAL (operands[2]) < 32767
1424 && INTVAL (operands[2]) > -32768
1425 && REG_P (operands[1])
1426 && push_operand (operands[0], SImode))
1427 return "pushab %c2(%1)";
1429 if (CONST_INT_P (operands[2])
1430 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1431 return "subl3 $%n2,%1,%0";
1433 if (CONST_INT_P (operands[2])
1434 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1435 && REG_P (operands[1])
1436 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1437 || REGNO (operands[1]) > 11))
1438 return "movab %c2(%1),%0";
1440 /* Add this if using gcc on a VAX 3xxx:
1441 if (REG_P (operands[1]) && REG_P (operands[2]))
1442 return "movab (%1)[%2],%0";
1445 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1447 if (push_operand (operands[0], SImode))
1448 return "pushab %a2[%1]";
1449 return "movab %a2[%1],%0";
1452 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1454 if (push_operand (operands[0], SImode))
1455 return "pushab %a1[%2]";
1456 return "movab %a1[%2],%0";
1459 if (flag_pic && REG_P (operands[0])
1460 && symbolic_operand (operands[2], SImode))
1461 return "movab %a2,%0;addl2 %1,%0";
1464 && (symbolic_operand (operands[1], SImode)
1465 || symbolic_operand (operands[1], SImode)))
1468 return "addl3 %1,%2,%0";
1471 if (rtx_equal_p (operands[0], operands[1]))
1473 if (operands[2] == const1_rtx)
1475 if (operands[2] == constm1_rtx)
1477 if (CONST_INT_P (operands[2])
1478 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1479 return "subw2 $%n2,%0";
1480 return "addw2 %2,%0";
1482 if (rtx_equal_p (operands[0], operands[2]))
1483 return "addw2 %1,%0";
1484 if (CONST_INT_P (operands[2])
1485 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1486 return "subw3 $%n2,%1,%0";
1487 return "addw3 %1,%2,%0";
1490 if (rtx_equal_p (operands[0], operands[1]))
1492 if (operands[2] == const1_rtx)
1494 if (operands[2] == constm1_rtx)
1496 if (CONST_INT_P (operands[2])
1497 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1498 return "subb2 $%n2,%0";
1499 return "addb2 %2,%0";
1501 if (rtx_equal_p (operands[0], operands[2]))
1502 return "addb2 %1,%0";
1503 if (CONST_INT_P (operands[2])
1504 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1505 return "subb3 $%n2,%1,%0";
1506 return "addb3 %1,%2,%0";
1514 vax_output_int_subtract (rtx insn, rtx *operands, enum machine_mode mode)
1521 const char *pattern;
1524 if (TARGET_QMATH && 0)
1527 split_quadword_operands (insn, MINUS, operands, low, 3);
1531 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1533 /* Negation is tricky. It's basically complement and increment.
1534 Negate hi, then lo, and subtract the carry back. */
1535 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1536 || (MEM_P (operands[0])
1537 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1538 fatal_insn ("illegal operand detected", insn);
1539 output_asm_insn ("mnegl %2,%0", operands);
1540 output_asm_insn ("mnegl %2,%0", low);
1541 return "sbwc $0,%0";
1543 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1544 gcc_assert (rtx_equal_p (low[0], low[1]));
1545 if (low[2] == const1_rtx)
1546 output_asm_insn ("decl %0", low);
1548 output_asm_insn ("subl2 %2,%0", low);
1549 return "sbwc %2,%0";
1552 /* Subtract low parts. */
1553 if (rtx_equal_p (operands[0], operands[1]))
1555 if (low[2] == const0_rtx)
1556 pattern = 0, carry = 0;
1557 else if (low[2] == constm1_rtx)
1558 pattern = "decl %0";
1560 pattern = "subl2 %2,%0";
1564 if (low[2] == constm1_rtx)
1565 pattern = "decl %0";
1566 else if (low[2] == const0_rtx)
1567 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1569 pattern = "subl3 %2,%1,%0";
1572 output_asm_insn (pattern, low);
1575 if (!rtx_equal_p (operands[0], operands[1]))
1576 return "movl %1,%0;sbwc %2,%0";
1577 return "sbwc %2,%0";
1578 /* %0 = %2 - %1 - C */
1580 return get_insn_template (CODE_FOR_subsi3, insn);
1588 /* True if X is an rtx for a constant that is a valid address. */
1591 legitimate_constant_address_p (rtx x)
1593 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1594 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1596 if (GET_CODE (x) != CONST)
1598 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1600 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1601 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1607 /* True if the constant value X is a legitimate general operand.
1608 It is given that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
1611 legitimate_constant_p (rtx x ATTRIBUTE_UNUSED)
1616 /* The other macros defined here are used only in legitimate_address_p (). */
1618 /* Nonzero if X is a hard reg that can be used as an index
1619 or, if not strict, if it is a pseudo reg. */
1620 #define INDEX_REGISTER_P(X, STRICT) \
1621 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1623 /* Nonzero if X is a hard reg that can be used as a base reg
1624 or, if not strict, if it is a pseudo reg. */
1625 #define BASE_REGISTER_P(X, STRICT) \
1626 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1628 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1630 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1631 are no SYMBOL_REFs for external symbols present. */
1634 indirectable_constant_address_p (rtx x, bool indirect)
1636 if (GET_CODE (x) == SYMBOL_REF)
1637 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1639 if (GET_CODE (x) == CONST)
1641 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1642 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1644 return CONSTANT_ADDRESS_P (x);
1647 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1650 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1652 return CONSTANT_ADDRESS_P (x);
1655 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1657 /* True if X is an address which can be indirected. External symbols
1658 could be in a sharable image library, so we disallow those. */
1661 indirectable_address_p (rtx x, bool strict, bool indirect)
1663 if (indirectable_constant_address_p (x, indirect)
1664 || BASE_REGISTER_P (x, strict))
1666 if (GET_CODE (x) != PLUS
1667 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1668 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1670 return indirectable_constant_address_p (XEXP (x, 1), indirect);
1673 /* Return true if x is a valid address not using indexing.
1674 (This much is the easy part.) */
1676 nonindexed_address_p (rtx x, bool strict)
1681 extern rtx *reg_equiv_mem;
1682 if (! reload_in_progress
1683 || reg_equiv_mem[REGNO (x)] == 0
1684 || indirectable_address_p (reg_equiv_mem[REGNO (x)], strict, false))
1687 if (indirectable_constant_address_p (x, false))
1689 if (indirectable_address_p (x, strict, false))
1691 xfoo0 = XEXP (x, 0);
1692 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1694 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1695 && BASE_REGISTER_P (xfoo0, strict))
1700 /* True if PROD is either a reg times size of mode MODE and MODE is less
1701 than or equal 8 bytes, or just a reg if MODE is one byte. */
1704 index_term_p (rtx prod, enum machine_mode mode, bool strict)
1708 if (GET_MODE_SIZE (mode) == 1)
1709 return BASE_REGISTER_P (prod, strict);
1711 if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1714 xfoo0 = XEXP (prod, 0);
1715 xfoo1 = XEXP (prod, 1);
1717 if (CONST_INT_P (xfoo0)
1718 && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1719 && INDEX_REGISTER_P (xfoo1, strict))
1722 if (CONST_INT_P (xfoo1)
1723 && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1724 && INDEX_REGISTER_P (xfoo0, strict))
1730 /* Return true if X is the sum of a register
1731 and a valid index term for mode MODE. */
1733 reg_plus_index_p (rtx x, enum machine_mode mode, bool strict)
1737 if (GET_CODE (x) != PLUS)
1740 xfoo0 = XEXP (x, 0);
1741 xfoo1 = XEXP (x, 1);
1743 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1746 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1752 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1754 indexable_address_p (rtx xfoo0, rtx xfoo1, enum machine_mode mode, bool strict)
1756 if (!CONSTANT_ADDRESS_P (xfoo0))
1758 if (BASE_REGISTER_P (xfoo1, strict))
1759 return !flag_pic || mode == QImode;
1760 if (flag_pic && symbolic_operand (xfoo0, SImode))
1762 return reg_plus_index_p (xfoo1, mode, strict);
1765 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1766 that is a valid memory address for an instruction.
1767 The MODE argument is the machine mode for the MEM expression
1768 that wants to use this address. */
1770 vax_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1774 if (nonindexed_address_p (x, strict))
1777 if (GET_CODE (x) != PLUS)
1780 /* Handle <address>[index] represented with index-sum outermost */
1782 xfoo0 = XEXP (x, 0);
1783 xfoo1 = XEXP (x, 1);
1785 if (index_term_p (xfoo0, mode, strict)
1786 && nonindexed_address_p (xfoo1, strict))
1789 if (index_term_p (xfoo1, mode, strict)
1790 && nonindexed_address_p (xfoo0, strict))
1793 /* Handle offset(reg)[index] with offset added outermost */
1795 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1796 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1802 /* Return true if x (a legitimate address expression) has an effect that
1803 depends on the machine mode it is used for. On the VAX, the predecrement
1804 and postincrement address depend thus (the amount of decrement or
1805 increment being the length of the operand) and all indexed address depend
1806 thus (because the index scale factor is the length of the operand). */
1809 vax_mode_dependent_address_p (rtx x)
1813 /* Auto-increment cases are now dealt with generically in recog.c. */
1814 if (GET_CODE (x) != PLUS)
1817 xfoo0 = XEXP (x, 0);
1818 xfoo1 = XEXP (x, 1);
1820 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1822 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1824 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1826 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1833 fixup_mathdi_operand (rtx x, enum machine_mode mode)
1835 if (illegal_addsub_di_memory_operand (x, mode))
1837 rtx addr = XEXP (x, 0);
1838 rtx temp = gen_reg_rtx (Pmode);
1840 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1841 if (GET_CODE (addr) == CONST && flag_pic)
1843 offset = XEXP (XEXP (addr, 0), 1);
1844 addr = XEXP (XEXP (addr, 0), 0);
1847 emit_move_insn (temp, addr);
1849 temp = gen_rtx_PLUS (Pmode, temp, offset);
1850 x = gen_rtx_MEM (DImode, temp);
1856 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1858 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1861 rtx (*gen_old_insn)(rtx, rtx, rtx);
1862 rtx (*gen_si_insn)(rtx, rtx, rtx);
1863 rtx (*gen_insn)(rtx, rtx, rtx);
1867 gen_old_insn = gen_adddi3_old;
1868 gen_si_insn = gen_addsi3;
1869 gen_insn = gen_adcdi3;
1871 else if (code == MINUS)
1873 gen_old_insn = gen_subdi3_old;
1874 gen_si_insn = gen_subsi3;
1875 gen_insn = gen_sbcdi3;
1880 /* If this is addition (thus operands are commutative) and if there is one
1881 addend that duplicates the desination, we want that addend to be the
1884 && rtx_equal_p (operands[0], operands[2])
1885 && !rtx_equal_p (operands[1], operands[2]))
1888 operands[2] = operands[1];
1894 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1898 if (!rtx_equal_p (operands[0], operands[1])
1899 && (REG_P (operands[0]) && MEM_P (operands[1])))
1901 emit_move_insn (operands[0], operands[1]);
1902 operands[1] = operands[0];
1905 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1906 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1907 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1909 if (!rtx_equal_p (operands[0], operands[1]))
1910 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1911 operand_subword (operands[1], 0, 0, DImode));
1913 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1914 operand_subword (operands[1], 1, 0, DImode),
1915 operand_subword (operands[2], 1, 0, DImode)));
1919 /* If are adding the same value together, that's really a multiply by 2,
1920 and that's just a left shift of 1. */
1921 if (rtx_equal_p (operands[1], operands[2]))
1923 gcc_assert (code != MINUS);
1924 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1928 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1930 /* If an operand is the same as operand[0], use the operand[0] rtx
1931 because fixup will an equivalent rtx but not an equal one. */
1933 if (rtx_equal_p (operands[0], operands[1]))
1934 operands[1] = operands[0];
1936 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1938 if (rtx_equal_p (operands[0], operands[2]))
1939 operands[2] = operands[0];
1941 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1943 /* If we are subtracting not from ourselves [d = a - b], and because the
1944 carry ops are two operand only, we would need to do a move prior to
1945 the subtract. And if d == b, we would need a temp otherwise
1946 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1947 into d = -b, d += a. Since -b can never overflow, even if b == d,
1950 If we are doing addition, since the carry ops are two operand, if
1951 we aren't adding to ourselves, move the first addend to the
1952 destination first. */
1954 gcc_assert (operands[1] != const0_rtx || code == MINUS);
1955 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
1957 if (code == MINUS && CONSTANT_P (operands[1]))
1959 temp = gen_reg_rtx (DImode);
1960 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
1962 gen_insn = gen_adcdi3;
1963 operands[2] = operands[1];
1964 operands[1] = operands[0];
1967 emit_move_insn (operands[0], operands[1]);
1970 /* Subtracting a constant will have been rewritten to an addition of the
1971 negative of that constant before we get here. */
1972 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
1973 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
1978 adjacent_operands_p (rtx lo, rtx hi, enum machine_mode mode)
1980 HOST_WIDE_INT lo_offset;
1981 HOST_WIDE_INT hi_offset;
1983 if (GET_CODE (lo) != GET_CODE (hi))
1987 return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
1988 if (CONST_INT_P (lo))
1989 return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
1990 if (CONST_INT_P (lo))
1991 return mode != SImode;
1996 if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
2002 if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
2003 return rtx_equal_p (lo, hi);
2005 switch (GET_CODE (lo))
2015 if (!CONST_INT_P (XEXP (lo, 1)))
2017 lo_offset = INTVAL (XEXP (lo, 1));
2024 switch (GET_CODE (hi))
2034 if (!CONST_INT_P (XEXP (hi, 1)))
2036 hi_offset = INTVAL (XEXP (hi, 1));
2043 if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2046 return rtx_equal_p (lo, hi)
2047 && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2050 /* Output assembler code for a block containing the constant parts
2051 of a trampoline, leaving space for the variable parts. */
2053 /* On the VAX, the trampoline contains an entry mask and two instructions:
2055 movl $STATIC,r0 (store the functions static chain)
2056 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2059 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2061 assemble_aligned_integer (2, const0_rtx);
2062 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2063 assemble_aligned_integer (4, const0_rtx);
2064 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2065 assemble_aligned_integer (2, GEN_INT (0x9f17));
2066 assemble_aligned_integer (4, const0_rtx);
2069 /* We copy the register-mask from the function's pure code
2070 to the start of the trampoline. */
2073 vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2075 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2078 emit_block_move (m_tramp, assemble_trampoline_template (),
2079 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2081 mem = adjust_address (m_tramp, HImode, 0);
2082 emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2084 mem = adjust_address (m_tramp, SImode, 4);
2085 emit_move_insn (mem, cxt);
2086 mem = adjust_address (m_tramp, SImode, 11);
2087 emit_move_insn (mem, plus_constant (fnaddr, 2));
2088 emit_insn (gen_sync_istream ());
2091 /* Value is the number of bytes of arguments automatically
2092 popped when returning from a subroutine call.
2093 FUNDECL is the declaration node of the function (as a tree),
2094 FUNTYPE is the data type of the function (as a tree),
2095 or for a library call it is an identifier node for the subroutine name.
2096 SIZE is the number of bytes of arguments passed on the stack.
2098 On the VAX, the RET insn pops a maximum of 255 args for any function. */
2101 vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2102 tree funtype ATTRIBUTE_UNUSED, int size)
2104 return size > 255 * 4 ? 0 : size;
2107 /* Define where to put the arguments to a function.
2108 Value is zero to push the argument on the stack,
2109 or a hard register in which to store the argument.
2111 MODE is the argument's machine mode.
2112 TYPE is the data type of the argument (as a tree).
2113 This is null for libcalls where that information may
2115 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2116 the preceding args and about the function being called.
2117 NAMED is nonzero if this argument is a named parameter
2118 (otherwise it is an extra parameter matching an ellipsis). */
2120 /* On the VAX all args are pushed. */
2123 vax_function_arg (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
2124 enum machine_mode mode ATTRIBUTE_UNUSED,
2125 const_tree type ATTRIBUTE_UNUSED,
2126 bool named ATTRIBUTE_UNUSED)
2131 /* Update the data in CUM to advance over an argument of mode MODE and
2132 data type TYPE. (TYPE is null for libcalls where that information
2133 may not be available.) */
2136 vax_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
2137 const_tree type, bool named ATTRIBUTE_UNUSED)
2139 *cum += (mode != BLKmode
2140 ? (GET_MODE_SIZE (mode) + 3) & ~3
2141 : (int_size_in_bytes (type) + 3) & ~3);