1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987, 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002,
3 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
35 #include "insn-attr.h"
41 #include "diagnostic-core.h"
43 #include "tm-constrs.h"
46 #include "target-def.h"
48 static void vax_option_override (void);
49 static bool vax_legitimate_address_p (enum machine_mode, rtx, bool);
50 static void vax_output_function_prologue (FILE *, HOST_WIDE_INT);
51 static void vax_file_start (void);
52 static void vax_init_libfuncs (void);
53 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
55 static int vax_address_cost_1 (rtx);
56 static int vax_address_cost (rtx, bool);
57 static bool vax_rtx_costs (rtx, int, int, int *, bool);
58 static rtx vax_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
60 static void vax_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
62 static rtx vax_struct_value_rtx (tree, int);
63 static rtx vax_builtin_setjmp_frame_value (void);
64 static void vax_asm_trampoline_template (FILE *);
65 static void vax_trampoline_init (rtx, tree, rtx);
66 static int vax_return_pops_args (tree, tree, int);
68 /* Initialize the GCC target structure. */
69 #undef TARGET_ASM_ALIGNED_HI_OP
70 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
72 #undef TARGET_ASM_FUNCTION_PROLOGUE
73 #define TARGET_ASM_FUNCTION_PROLOGUE vax_output_function_prologue
75 #undef TARGET_ASM_FILE_START
76 #define TARGET_ASM_FILE_START vax_file_start
77 #undef TARGET_ASM_FILE_START_APP_OFF
78 #define TARGET_ASM_FILE_START_APP_OFF true
80 #undef TARGET_INIT_LIBFUNCS
81 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
83 #undef TARGET_ASM_OUTPUT_MI_THUNK
84 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
85 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
86 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
88 #undef TARGET_DEFAULT_TARGET_FLAGS
89 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
91 #undef TARGET_RTX_COSTS
92 #define TARGET_RTX_COSTS vax_rtx_costs
93 #undef TARGET_ADDRESS_COST
94 #define TARGET_ADDRESS_COST vax_address_cost
96 #undef TARGET_PROMOTE_PROTOTYPES
97 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
99 #undef TARGET_FUNCTION_ARG
100 #define TARGET_FUNCTION_ARG vax_function_arg
101 #undef TARGET_FUNCTION_ARG_ADVANCE
102 #define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
104 #undef TARGET_STRUCT_VALUE_RTX
105 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
107 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
108 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
110 #undef TARGET_LEGITIMATE_ADDRESS_P
111 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
113 #undef TARGET_FRAME_POINTER_REQUIRED
114 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
116 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
117 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
118 #undef TARGET_TRAMPOLINE_INIT
119 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
120 #undef TARGET_RETURN_POPS_ARGS
121 #define TARGET_RETURN_POPS_ARGS vax_return_pops_args
123 #undef TARGET_OPTION_OVERRIDE
124 #define TARGET_OPTION_OVERRIDE vax_option_override
126 struct gcc_target targetm = TARGET_INITIALIZER;
128 /* Set global variables as needed for the options enabled. */
131 vax_option_override (void)
133 /* We're VAX floating point, not IEEE floating point. */
135 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
137 #ifdef SUBTARGET_OVERRIDE_OPTIONS
138 SUBTARGET_OVERRIDE_OPTIONS;
142 /* Generate the assembly code for function entry. FILE is a stdio
143 stream to output the code to. SIZE is an int: how many units of
144 temporary storage to allocate.
146 Refer to the array `regs_ever_live' to determine which registers to
147 save; `regs_ever_live[I]' is nonzero if register number I is ever
148 used in the function. This function is responsible for knowing
149 which registers should not be saved even if used. */
152 vax_output_function_prologue (FILE * file, HOST_WIDE_INT size)
157 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
158 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
161 fprintf (file, "\t.word 0x%x\n", mask);
163 if (dwarf2out_do_frame ())
165 const char *label = dwarf2out_cfi_label (false);
168 for (regno = FIRST_PSEUDO_REGISTER-1; regno >= 0; --regno)
169 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
170 dwarf2out_reg_save (label, regno, offset -= 4);
172 dwarf2out_reg_save (label, PC_REGNUM, offset -= 4);
173 dwarf2out_reg_save (label, FRAME_POINTER_REGNUM, offset -= 4);
174 dwarf2out_reg_save (label, ARG_POINTER_REGNUM, offset -= 4);
175 dwarf2out_def_cfa (label, FRAME_POINTER_REGNUM, -(offset - 4));
178 size -= STARTING_FRAME_OFFSET;
180 asm_fprintf (file, "\tmovab %wd(%Rsp),%Rsp\n", -size);
182 asm_fprintf (file, "\tsubl2 $%wd,%Rsp\n", size);
185 /* When debugging with stabs, we want to output an extra dummy label
186 so that gas can distinguish between D_float and G_float prior to
187 processing the .stabs directive identifying type double. */
189 vax_file_start (void)
191 default_file_start ();
193 if (write_symbols == DBX_DEBUG)
194 fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
197 /* We can use the BSD C library routines for the libgcc calls that are
198 still generated, since that's what they boil down to anyways. When
199 ELF, avoid the user's namespace. */
202 vax_init_libfuncs (void)
204 if (TARGET_BSD_DIVMOD)
206 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
207 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
211 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
214 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
219 for (i = 0; i < n; i++)
222 for (i = 0; i < n; i++)
224 if (MEM_P (operands[i])
225 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
226 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
228 rtx addr = XEXP (operands[i], 0);
229 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
231 else if (optimize_size && MEM_P (operands[i])
232 && REG_P (XEXP (operands[i], 0))
233 && (code != MINUS || operands[1] != const0_rtx)
234 && find_regno_note (insn, REG_DEAD,
235 REGNO (XEXP (operands[i], 0))))
237 low[i] = gen_rtx_MEM (SImode,
238 gen_rtx_POST_INC (Pmode,
239 XEXP (operands[i], 0)));
240 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
244 low[i] = operand_subword (operands[i], 0, 0, DImode);
245 operands[i] = operand_subword (operands[i], 1, 0, DImode);
251 print_operand_address (FILE * file, rtx addr)
254 rtx reg1, breg, ireg;
258 switch (GET_CODE (addr))
262 addr = XEXP (addr, 0);
266 fprintf (file, "(%s)", reg_names[REGNO (addr)]);
270 fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
274 fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
278 /* There can be either two or three things added here. One must be a
279 REG. One can be either a REG or a MULT of a REG and an appropriate
280 constant, and the third can only be a constant or a MEM.
282 We get these two or three things and put the constant or MEM in
283 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
284 a register and can't tell yet if it is a base or index register,
287 reg1 = 0; ireg = 0; breg = 0; offset = 0;
289 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
290 || MEM_P (XEXP (addr, 0)))
292 offset = XEXP (addr, 0);
293 addr = XEXP (addr, 1);
295 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
296 || MEM_P (XEXP (addr, 1)))
298 offset = XEXP (addr, 1);
299 addr = XEXP (addr, 0);
301 else if (GET_CODE (XEXP (addr, 1)) == MULT)
303 ireg = XEXP (addr, 1);
304 addr = XEXP (addr, 0);
306 else if (GET_CODE (XEXP (addr, 0)) == MULT)
308 ireg = XEXP (addr, 0);
309 addr = XEXP (addr, 1);
311 else if (REG_P (XEXP (addr, 1)))
313 reg1 = XEXP (addr, 1);
314 addr = XEXP (addr, 0);
316 else if (REG_P (XEXP (addr, 0)))
318 reg1 = XEXP (addr, 0);
319 addr = XEXP (addr, 1);
331 else if (GET_CODE (addr) == MULT)
335 gcc_assert (GET_CODE (addr) == PLUS);
336 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
337 || MEM_P (XEXP (addr, 0)))
341 if (CONST_INT_P (offset))
342 offset = plus_constant (XEXP (addr, 0), INTVAL (offset));
345 gcc_assert (CONST_INT_P (XEXP (addr, 0)));
346 offset = plus_constant (offset, INTVAL (XEXP (addr, 0)));
349 offset = XEXP (addr, 0);
351 else if (REG_P (XEXP (addr, 0)))
354 ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
356 reg1 = XEXP (addr, 0);
360 gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
362 ireg = XEXP (addr, 0);
365 if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
366 || MEM_P (XEXP (addr, 1)))
370 if (CONST_INT_P (offset))
371 offset = plus_constant (XEXP (addr, 1), INTVAL (offset));
374 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
375 offset = plus_constant (offset, INTVAL (XEXP (addr, 1)));
378 offset = XEXP (addr, 1);
380 else if (REG_P (XEXP (addr, 1)))
383 ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
385 reg1 = XEXP (addr, 1);
389 gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
391 ireg = XEXP (addr, 1);
395 /* If REG1 is nonzero, figure out if it is a base or index register. */
399 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
402 || (flag_pic && symbolic_operand (offset, SImode)))))
413 if (flag_pic && symbolic_operand (offset, SImode))
418 output_operand_lossage ("symbol used with both base and indexed registers");
421 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
422 if (flag_pic > 1 && GET_CODE (offset) == CONST
423 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
424 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
427 output_operand_lossage ("symbol with offset used in PIC mode");
431 /* symbol(reg) isn't PIC, but symbol[reg] is. */
440 output_address (offset);
444 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
448 if (GET_CODE (ireg) == MULT)
449 ireg = XEXP (ireg, 0);
450 gcc_assert (REG_P (ireg));
451 fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
456 output_addr_const (file, addr);
461 print_operand (FILE *file, rtx x, int code)
464 fputc (ASM_DOUBLE_CHAR, file);
465 else if (code == '|')
466 fputs (REGISTER_PREFIX, file);
467 else if (code == 'c')
468 fputs (cond_name (x), file);
469 else if (code == 'C')
470 fputs (rev_cond_name (x), file);
471 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
472 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
473 else if (code == 'P' && CONST_INT_P (x))
474 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
475 else if (code == 'N' && CONST_INT_P (x))
476 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
477 /* rotl instruction cannot deal with negative arguments. */
478 else if (code == 'R' && CONST_INT_P (x))
479 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
480 else if (code == 'H' && CONST_INT_P (x))
481 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
482 else if (code == 'h' && CONST_INT_P (x))
483 fprintf (file, "$%d", (short) - INTVAL (x));
484 else if (code == 'B' && CONST_INT_P (x))
485 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
486 else if (code == 'b' && CONST_INT_P (x))
487 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
488 else if (code == 'M' && CONST_INT_P (x))
489 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
491 fprintf (file, "%s", reg_names[REGNO (x)]);
493 output_address (XEXP (x, 0));
494 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
497 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
498 sizeof (dstr), 0, 1);
499 fprintf (file, "$0f%s", dstr);
501 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
504 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
505 sizeof (dstr), 0, 1);
506 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
510 if (flag_pic > 1 && symbolic_operand (x, SImode))
513 output_operand_lossage ("symbol used as immediate operand");
516 output_addr_const (file, x);
523 switch (GET_CODE (op))
552 rev_cond_name (rtx op)
554 switch (GET_CODE (op))
583 vax_float_literal (rtx c)
585 enum machine_mode mode;
586 REAL_VALUE_TYPE r, s;
589 if (GET_CODE (c) != CONST_DOUBLE)
594 if (c == const_tiny_rtx[(int) mode][0]
595 || c == const_tiny_rtx[(int) mode][1]
596 || c == const_tiny_rtx[(int) mode][2])
599 REAL_VALUE_FROM_CONST_DOUBLE (r, c);
601 for (i = 0; i < 7; i++)
605 REAL_VALUE_FROM_INT (s, x, 0, mode);
607 if (REAL_VALUES_EQUAL (r, s))
609 ok = exact_real_inverse (mode, &s);
611 if (REAL_VALUES_EQUAL (r, s))
618 /* Return the cost in cycles of a memory address, relative to register
621 Each of the following adds the indicated number of cycles:
625 1 - indexing and/or offset(register)
630 vax_address_cost_1 (rtx addr)
632 int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
633 rtx plus_op0 = 0, plus_op1 = 0;
635 switch (GET_CODE (addr))
645 indexed = 1; /* 2 on VAX 2 */
648 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
650 offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
654 offset = 1; /* 2 on VAX 2 */
656 case LABEL_REF: /* this is probably a byte offset from the pc */
662 plus_op1 = XEXP (addr, 0);
664 plus_op0 = XEXP (addr, 0);
665 addr = XEXP (addr, 1);
668 indir = 2; /* 3 on VAX 2 */
669 addr = XEXP (addr, 0);
675 /* Up to 3 things can be added in an address. They are stored in
676 plus_op0, plus_op1, and addr. */
690 /* Indexing and register+offset can both be used (except on a VAX 2)
691 without increasing execution time over either one alone. */
692 if (reg && indexed && offset)
693 return reg + indir + offset + predec;
694 return reg + indexed + indir + offset + predec;
698 vax_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
700 return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
703 /* Cost of an expression on a VAX. This version has costs tuned for the
704 CVAX chip (found in the VAX 3 series) with comments for variations on
707 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
708 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
709 costs on a per cpu basis. */
712 vax_rtx_costs (rtx x, int code, int outer_code, int *total,
713 bool speed ATTRIBUTE_UNUSED)
715 enum machine_mode mode = GET_MODE (x);
716 int i = 0; /* may be modified in switch */
717 const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
721 /* On a VAX, constants from 0..63 are cheap because they can use the
722 1 byte literal constant format. Compare to -1 should be made cheap
723 so that decrement-and-branch insns can be formed more easily (if
724 the value -1 is copied to a register some decrement-and-branch
725 patterns will not match). */
732 if (outer_code == AND)
734 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
737 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
738 || (outer_code == COMPARE
740 || ((outer_code == PLUS || outer_code == MINUS)
741 && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
755 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
756 *total = vax_float_literal (x) ? 5 : 8;
758 *total = ((CONST_DOUBLE_HIGH (x) == 0
759 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
760 || (outer_code == PLUS
761 && CONST_DOUBLE_HIGH (x) == -1
762 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
768 return true; /* Implies register operand. */
772 return true; /* Implies register operand. */
778 *total = 16; /* 4 on VAX 9000 */
781 *total = 9; /* 4 on VAX 9000, 12 on VAX 2 */
784 *total = 16; /* 6 on VAX 9000, 28 on VAX 2 */
789 *total = 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
792 *total = MAX_COST; /* Mode is not supported. */
800 *total = MAX_COST; /* Mode is not supported. */
808 *total = 30; /* Highly variable. */
809 else if (mode == DFmode)
810 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
813 *total = 11; /* 25 on VAX 2 */
823 *total = MAX_COST; /* Mode is not supported. */
830 *total = (6 /* 4 on VAX 9000 */
831 + (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
835 *total = 7; /* 17 on VAX 2 */
844 *total = 10; /* 6 on VAX 9000 */
849 *total = 6; /* 5 on VAX 2, 4 on VAX 9000 */
850 if (CONST_INT_P (XEXP (x, 1)))
851 fmt = "e"; /* all constant rotate counts are short */
856 *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
857 /* Small integer operands can use subl2 and addl2. */
858 if ((CONST_INT_P (XEXP (x, 1)))
859 && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
869 /* AND is special because the first operand is complemented. */
871 if (CONST_INT_P (XEXP (x, 0)))
873 if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
883 else if (mode == SFmode)
885 else if (mode == DImode)
901 if (mode == DImode || mode == DFmode)
902 *total = 5; /* 7 on VAX 2 */
904 *total = 3; /* 4 on VAX 2 */
906 if (!REG_P (x) && GET_CODE (x) != POST_INC)
907 *total += vax_address_cost_1 (x);
913 *total = 3; /* FIXME: Costs need to be checked */
920 /* Now look inside the expression. Operands which are not registers or
921 short constants add to the cost.
923 FMT and I may have been adjusted in the switch above for instructions
924 which require special handling. */
926 while (*fmt++ == 'e')
928 rtx op = XEXP (x, i);
931 code = GET_CODE (op);
933 /* A NOT is likely to be found as the first operand of an AND
934 (in which case the relevant cost is of the operand inside
935 the not) and not likely to be found anywhere else. */
937 op = XEXP (op, 0), code = GET_CODE (op);
942 if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
943 && GET_MODE (x) != QImode)
944 *total += 1; /* 2 on VAX 2 */
949 *total += 1; /* 2 on VAX 2 */
952 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
954 /* Registers are faster than floating point constants -- even
955 those constants which can be encoded in a single byte. */
956 if (vax_float_literal (op))
959 *total += (GET_MODE (x) == DFmode) ? 3 : 2;
963 if (CONST_DOUBLE_HIGH (op) != 0
964 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
969 *total += 1; /* 2 on VAX 2 */
970 if (!REG_P (XEXP (op, 0)))
971 *total += vax_address_cost_1 (XEXP (op, 0));
984 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
985 Used for C++ multiple inheritance.
986 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
987 addl2 $DELTA, 4(ap) #adjust first argument
988 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
992 vax_output_mi_thunk (FILE * file,
993 tree thunk ATTRIBUTE_UNUSED,
995 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
998 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
999 asm_fprintf (file, ",4(%Rap)\n");
1000 fprintf (file, "\tjmp ");
1001 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1002 fprintf (file, "+2\n");
1006 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1007 int incoming ATTRIBUTE_UNUSED)
1009 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1013 vax_builtin_setjmp_frame_value (void)
1015 return hard_frame_pointer_rtx;
1018 /* Worker function for NOTICE_UPDATE_CC. */
1021 vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1023 if (GET_CODE (exp) == SET)
1025 if (GET_CODE (SET_SRC (exp)) == CALL)
1027 else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1028 && GET_CODE (SET_DEST (exp)) != PC)
1030 cc_status.flags = 0;
1031 /* The integer operations below don't set carry or
1032 set it in an incompatible way. That's ok though
1033 as the Z bit is all we need when doing unsigned
1034 comparisons on the result of these insns (since
1035 they're always with 0). Set CC_NO_OVERFLOW to
1036 generate the correct unsigned branches. */
1037 switch (GET_CODE (SET_SRC (exp)))
1040 if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1048 cc_status.flags = CC_NO_OVERFLOW;
1053 cc_status.value1 = SET_DEST (exp);
1054 cc_status.value2 = SET_SRC (exp);
1057 else if (GET_CODE (exp) == PARALLEL
1058 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1060 if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1062 else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1064 cc_status.flags = 0;
1065 cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1066 cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1069 /* PARALLELs whose first element sets the PC are aob,
1070 sob insns. They do change the cc's. */
1075 if (cc_status.value1 && REG_P (cc_status.value1)
1077 && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1078 cc_status.value2 = 0;
1079 if (cc_status.value1 && MEM_P (cc_status.value1)
1081 && MEM_P (cc_status.value2))
1082 cc_status.value2 = 0;
1083 /* Actual condition, one line up, should be that value2's address
1084 depends on value1, but that is too much of a pain. */
1087 /* Output integer move instructions. */
1090 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1091 enum machine_mode mode)
1094 const char *pattern_hi, *pattern_lo;
1099 if (operands[1] == const0_rtx)
1101 if (TARGET_QMATH && optimize_size
1102 && (CONST_INT_P (operands[1])
1103 || GET_CODE (operands[1]) == CONST_DOUBLE))
1105 unsigned HOST_WIDE_INT hval, lval;
1108 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1110 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1112 /* Make sure only the low 32 bits are valid. */
1113 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1114 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1118 lval = INTVAL (operands[1]);
1122 /* Here we see if we are trying to see if the 64bit value is really
1123 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1124 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1125 8 bytes - 1 shift byte - 1 short literal byte. */
1127 && (n = exact_log2 (lval & (- lval))) != -1
1128 && (lval >> n) < 64)
1132 /* On 32bit platforms, if the 6bits didn't overflow into the
1133 upper 32bit value that value better be 0. If we have
1134 overflowed, make sure it wasn't too much. */
1135 if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
1137 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1138 n = 0; /* failure */
1140 lval |= hval << (32 - n);
1142 /* If n is 0, then ashq is not the best way to emit this. */
1145 operands[1] = GEN_INT (lval);
1146 operands[2] = GEN_INT (n);
1147 return "ashq %2,%1,%0";
1149 #if HOST_BITS_PER_WIDE_INT == 32
1151 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1152 upper 32bit value. */
1154 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1155 && (hval >> n) < 64)
1157 operands[1] = GEN_INT (hval >> n);
1158 operands[2] = GEN_INT (n + 32);
1159 return "ashq %2,%1,%0";
1165 && (!MEM_P (operands[0])
1166 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1167 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1168 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1169 && ((CONST_INT_P (operands[1])
1170 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1171 || GET_CODE (operands[1]) == CONST_DOUBLE))
1173 hi[0] = operands[0];
1174 hi[1] = operands[1];
1176 split_quadword_operands (insn, SET, hi, lo, 2);
1178 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1179 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1181 /* The patterns are just movl/movl or pushl/pushl then a movq will
1182 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1183 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1185 if ((!strncmp (pattern_lo, "movl", 4)
1186 && !strncmp (pattern_hi, "movl", 4))
1187 || (!strncmp (pattern_lo, "pushl", 5)
1188 && !strncmp (pattern_hi, "pushl", 5)))
1189 return "movq %1,%0";
1191 if (MEM_P (operands[0])
1192 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1194 output_asm_insn (pattern_hi, hi);
1195 operands[0] = lo[0];
1196 operands[1] = lo[1];
1197 operands[2] = lo[2];
1202 output_asm_insn (pattern_lo, lo);
1203 operands[0] = hi[0];
1204 operands[1] = hi[1];
1205 operands[2] = hi[2];
1209 return "movq %1,%0";
1212 if (symbolic_operand (operands[1], SImode))
1214 if (push_operand (operands[0], SImode))
1215 return "pushab %a1";
1216 return "movab %a1,%0";
1219 if (operands[1] == const0_rtx)
1221 if (push_operand (operands[1], SImode))
1226 if (CONST_INT_P (operands[1])
1227 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1229 HOST_WIDE_INT i = INTVAL (operands[1]);
1231 if ((unsigned HOST_WIDE_INT)(~i) < 64)
1232 return "mcoml %N1,%0";
1233 if ((unsigned HOST_WIDE_INT)i < 0x100)
1234 return "movzbl %1,%0";
1235 if (i >= -0x80 && i < 0)
1236 return "cvtbl %1,%0";
1238 && (n = exact_log2 (i & (-i))) != -1
1239 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1241 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1242 operands[2] = GEN_INT (n);
1243 return "ashl %2,%1,%0";
1245 if ((unsigned HOST_WIDE_INT)i < 0x10000)
1246 return "movzwl %1,%0";
1247 if (i >= -0x8000 && i < 0)
1248 return "cvtwl %1,%0";
1250 if (push_operand (operands[0], SImode))
1252 return "movl %1,%0";
1255 if (CONST_INT_P (operands[1]))
1257 HOST_WIDE_INT i = INTVAL (operands[1]);
1260 else if ((unsigned HOST_WIDE_INT)i < 64)
1261 return "movw %1,%0";
1262 else if ((unsigned HOST_WIDE_INT)~i < 64)
1263 return "mcomw %H1,%0";
1264 else if ((unsigned HOST_WIDE_INT)i < 256)
1265 return "movzbw %1,%0";
1266 else if (i >= -0x80 && i < 0)
1267 return "cvtbw %1,%0";
1269 return "movw %1,%0";
1272 if (CONST_INT_P (operands[1]))
1274 HOST_WIDE_INT i = INTVAL (operands[1]);
1277 else if ((unsigned HOST_WIDE_INT)~i < 64)
1278 return "mcomb %B1,%0";
1280 return "movb %1,%0";
1287 /* Output integer add instructions.
1289 The space-time-opcode tradeoffs for addition vary by model of VAX.
1291 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1292 but it not faster on other models.
1294 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1295 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1296 a register is used in an address too soon after it is set.
1297 Compromise by using movab only when it is shorter than the add
1298 or the base register in the address is one of sp, ap, and fp,
1299 which are not modified very often. */
1302 vax_output_int_add (rtx insn, rtx *operands, enum machine_mode mode)
1309 const char *pattern;
1313 if (TARGET_QMATH && 0)
1316 split_quadword_operands (insn, PLUS, operands, low, 3);
1320 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1321 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1322 gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1323 gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1326 /* No reason to add a 0 to the low part and thus no carry, so just
1327 emit the appropriate add/sub instruction. */
1328 if (low[2] == const0_rtx)
1329 return vax_output_int_add (NULL, operands, SImode);
1331 /* Are we doing addition or subtraction? */
1332 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1334 /* We can't use vax_output_int_add since some the patterns don't
1335 modify the carry bit. */
1338 if (low[2] == constm1_rtx)
1339 pattern = "decl %0";
1341 pattern = "subl2 $%n2,%0";
1345 if (low[2] == const1_rtx)
1346 pattern = "incl %0";
1348 pattern = "addl2 %2,%0";
1350 output_asm_insn (pattern, low);
1352 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1353 two 32bit parts, we complement each and then add one to
1354 low part. We know that the low part can't overflow since
1355 it's value can never be 0. */
1357 return "sbwc %N2,%0";
1358 return "adwc %2,%0";
1361 /* Add low parts. */
1362 if (rtx_equal_p (operands[0], operands[1]))
1364 if (low[2] == const0_rtx)
1365 /* Should examine operand, punt if not POST_INC. */
1366 pattern = "tstl %0", carry = 0;
1367 else if (low[2] == const1_rtx)
1368 pattern = "incl %0";
1370 pattern = "addl2 %2,%0";
1374 if (low[2] == const0_rtx)
1375 pattern = "movl %1,%0", carry = 0;
1377 pattern = "addl3 %2,%1,%0";
1380 output_asm_insn (pattern, low);
1382 /* If CARRY is 0, we don't have any carry value to worry about. */
1383 return get_insn_template (CODE_FOR_addsi3, insn);
1384 /* %0 = C + %1 + %2 */
1385 if (!rtx_equal_p (operands[0], operands[1]))
1386 output_asm_insn ((operands[1] == const0_rtx
1388 : "movl %1,%0"), operands);
1389 return "adwc %2,%0";
1393 if (rtx_equal_p (operands[0], operands[1]))
1395 if (operands[2] == const1_rtx)
1397 if (operands[2] == constm1_rtx)
1399 if (CONST_INT_P (operands[2])
1400 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1401 return "subl2 $%n2,%0";
1402 if (CONST_INT_P (operands[2])
1403 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1404 && REG_P (operands[1])
1405 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1406 || REGNO (operands[1]) > 11))
1407 return "movab %c2(%1),%0";
1408 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1409 return "movab %a2[%0],%0";
1410 return "addl2 %2,%0";
1413 if (rtx_equal_p (operands[0], operands[2]))
1415 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1416 return "movab %a1[%0],%0";
1417 return "addl2 %1,%0";
1420 if (CONST_INT_P (operands[2])
1421 && INTVAL (operands[2]) < 32767
1422 && INTVAL (operands[2]) > -32768
1423 && REG_P (operands[1])
1424 && push_operand (operands[0], SImode))
1425 return "pushab %c2(%1)";
1427 if (CONST_INT_P (operands[2])
1428 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1429 return "subl3 $%n2,%1,%0";
1431 if (CONST_INT_P (operands[2])
1432 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1433 && REG_P (operands[1])
1434 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1435 || REGNO (operands[1]) > 11))
1436 return "movab %c2(%1),%0";
1438 /* Add this if using gcc on a VAX 3xxx:
1439 if (REG_P (operands[1]) && REG_P (operands[2]))
1440 return "movab (%1)[%2],%0";
1443 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1445 if (push_operand (operands[0], SImode))
1446 return "pushab %a2[%1]";
1447 return "movab %a2[%1],%0";
1450 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1452 if (push_operand (operands[0], SImode))
1453 return "pushab %a1[%2]";
1454 return "movab %a1[%2],%0";
1457 if (flag_pic && REG_P (operands[0])
1458 && symbolic_operand (operands[2], SImode))
1459 return "movab %a2,%0;addl2 %1,%0";
1462 && (symbolic_operand (operands[1], SImode)
1463 || symbolic_operand (operands[1], SImode)))
1466 return "addl3 %1,%2,%0";
1469 if (rtx_equal_p (operands[0], operands[1]))
1471 if (operands[2] == const1_rtx)
1473 if (operands[2] == constm1_rtx)
1475 if (CONST_INT_P (operands[2])
1476 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1477 return "subw2 $%n2,%0";
1478 return "addw2 %2,%0";
1480 if (rtx_equal_p (operands[0], operands[2]))
1481 return "addw2 %1,%0";
1482 if (CONST_INT_P (operands[2])
1483 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1484 return "subw3 $%n2,%1,%0";
1485 return "addw3 %1,%2,%0";
1488 if (rtx_equal_p (operands[0], operands[1]))
1490 if (operands[2] == const1_rtx)
1492 if (operands[2] == constm1_rtx)
1494 if (CONST_INT_P (operands[2])
1495 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1496 return "subb2 $%n2,%0";
1497 return "addb2 %2,%0";
1499 if (rtx_equal_p (operands[0], operands[2]))
1500 return "addb2 %1,%0";
1501 if (CONST_INT_P (operands[2])
1502 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1503 return "subb3 $%n2,%1,%0";
1504 return "addb3 %1,%2,%0";
1512 vax_output_int_subtract (rtx insn, rtx *operands, enum machine_mode mode)
1519 const char *pattern;
1522 if (TARGET_QMATH && 0)
1525 split_quadword_operands (insn, MINUS, operands, low, 3);
1529 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1531 /* Negation is tricky. It's basically complement and increment.
1532 Negate hi, then lo, and subtract the carry back. */
1533 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1534 || (MEM_P (operands[0])
1535 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1536 fatal_insn ("illegal operand detected", insn);
1537 output_asm_insn ("mnegl %2,%0", operands);
1538 output_asm_insn ("mnegl %2,%0", low);
1539 return "sbwc $0,%0";
1541 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1542 gcc_assert (rtx_equal_p (low[0], low[1]));
1543 if (low[2] == const1_rtx)
1544 output_asm_insn ("decl %0", low);
1546 output_asm_insn ("subl2 %2,%0", low);
1547 return "sbwc %2,%0";
1550 /* Subtract low parts. */
1551 if (rtx_equal_p (operands[0], operands[1]))
1553 if (low[2] == const0_rtx)
1554 pattern = 0, carry = 0;
1555 else if (low[2] == constm1_rtx)
1556 pattern = "decl %0";
1558 pattern = "subl2 %2,%0";
1562 if (low[2] == constm1_rtx)
1563 pattern = "decl %0";
1564 else if (low[2] == const0_rtx)
1565 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1567 pattern = "subl3 %2,%1,%0";
1570 output_asm_insn (pattern, low);
1573 if (!rtx_equal_p (operands[0], operands[1]))
1574 return "movl %1,%0;sbwc %2,%0";
1575 return "sbwc %2,%0";
1576 /* %0 = %2 - %1 - C */
1578 return get_insn_template (CODE_FOR_subsi3, insn);
1586 /* True if X is an rtx for a constant that is a valid address. */
1589 legitimate_constant_address_p (rtx x)
1591 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1592 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1594 if (GET_CODE (x) != CONST)
1596 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1598 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1599 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1605 /* True if the constant value X is a legitimate general operand.
1606 It is given that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
1609 legitimate_constant_p (rtx x ATTRIBUTE_UNUSED)
1614 /* The other macros defined here are used only in legitimate_address_p (). */
1616 /* Nonzero if X is a hard reg that can be used as an index
1617 or, if not strict, if it is a pseudo reg. */
1618 #define INDEX_REGISTER_P(X, STRICT) \
1619 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1621 /* Nonzero if X is a hard reg that can be used as a base reg
1622 or, if not strict, if it is a pseudo reg. */
1623 #define BASE_REGISTER_P(X, STRICT) \
1624 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1626 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1628 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1629 are no SYMBOL_REFs for external symbols present. */
1632 indirectable_constant_address_p (rtx x, bool indirect)
1634 if (GET_CODE (x) == SYMBOL_REF)
1635 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1637 if (GET_CODE (x) == CONST)
1639 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1640 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1642 return CONSTANT_ADDRESS_P (x);
1645 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1648 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1650 return CONSTANT_ADDRESS_P (x);
1653 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1655 /* True if X is an address which can be indirected. External symbols
1656 could be in a sharable image library, so we disallow those. */
1659 indirectable_address_p (rtx x, bool strict, bool indirect)
1661 if (indirectable_constant_address_p (x, indirect)
1662 || BASE_REGISTER_P (x, strict))
1664 if (GET_CODE (x) != PLUS
1665 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1666 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1668 return indirectable_constant_address_p (XEXP (x, 1), indirect);
1671 /* Return true if x is a valid address not using indexing.
1672 (This much is the easy part.) */
1674 nonindexed_address_p (rtx x, bool strict)
1679 extern rtx *reg_equiv_mem;
1680 if (! reload_in_progress
1681 || reg_equiv_mem[REGNO (x)] == 0
1682 || indirectable_address_p (reg_equiv_mem[REGNO (x)], strict, false))
1685 if (indirectable_constant_address_p (x, false))
1687 if (indirectable_address_p (x, strict, false))
1689 xfoo0 = XEXP (x, 0);
1690 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1692 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1693 && BASE_REGISTER_P (xfoo0, strict))
1698 /* True if PROD is either a reg times size of mode MODE and MODE is less
1699 than or equal 8 bytes, or just a reg if MODE is one byte. */
1702 index_term_p (rtx prod, enum machine_mode mode, bool strict)
1706 if (GET_MODE_SIZE (mode) == 1)
1707 return BASE_REGISTER_P (prod, strict);
1709 if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1712 xfoo0 = XEXP (prod, 0);
1713 xfoo1 = XEXP (prod, 1);
1715 if (CONST_INT_P (xfoo0)
1716 && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1717 && INDEX_REGISTER_P (xfoo1, strict))
1720 if (CONST_INT_P (xfoo1)
1721 && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1722 && INDEX_REGISTER_P (xfoo0, strict))
1728 /* Return true if X is the sum of a register
1729 and a valid index term for mode MODE. */
1731 reg_plus_index_p (rtx x, enum machine_mode mode, bool strict)
1735 if (GET_CODE (x) != PLUS)
1738 xfoo0 = XEXP (x, 0);
1739 xfoo1 = XEXP (x, 1);
1741 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1744 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1750 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1752 indexable_address_p (rtx xfoo0, rtx xfoo1, enum machine_mode mode, bool strict)
1754 if (!CONSTANT_ADDRESS_P (xfoo0))
1756 if (BASE_REGISTER_P (xfoo1, strict))
1757 return !flag_pic || mode == QImode;
1758 if (flag_pic && symbolic_operand (xfoo0, SImode))
1760 return reg_plus_index_p (xfoo1, mode, strict);
1763 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1764 that is a valid memory address for an instruction.
1765 The MODE argument is the machine mode for the MEM expression
1766 that wants to use this address. */
1768 vax_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1772 if (nonindexed_address_p (x, strict))
1775 if (GET_CODE (x) != PLUS)
1778 /* Handle <address>[index] represented with index-sum outermost */
1780 xfoo0 = XEXP (x, 0);
1781 xfoo1 = XEXP (x, 1);
1783 if (index_term_p (xfoo0, mode, strict)
1784 && nonindexed_address_p (xfoo1, strict))
1787 if (index_term_p (xfoo1, mode, strict)
1788 && nonindexed_address_p (xfoo0, strict))
1791 /* Handle offset(reg)[index] with offset added outermost */
1793 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1794 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1800 /* Return true if x (a legitimate address expression) has an effect that
1801 depends on the machine mode it is used for. On the VAX, the predecrement
1802 and postincrement address depend thus (the amount of decrement or
1803 increment being the length of the operand) and all indexed address depend
1804 thus (because the index scale factor is the length of the operand). */
1807 vax_mode_dependent_address_p (rtx x)
1811 /* Auto-increment cases are now dealt with generically in recog.c. */
1812 if (GET_CODE (x) != PLUS)
1815 xfoo0 = XEXP (x, 0);
1816 xfoo1 = XEXP (x, 1);
1818 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1820 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1822 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1824 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1831 fixup_mathdi_operand (rtx x, enum machine_mode mode)
1833 if (illegal_addsub_di_memory_operand (x, mode))
1835 rtx addr = XEXP (x, 0);
1836 rtx temp = gen_reg_rtx (Pmode);
1838 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1839 if (GET_CODE (addr) == CONST && flag_pic)
1841 offset = XEXP (XEXP (addr, 0), 1);
1842 addr = XEXP (XEXP (addr, 0), 0);
1845 emit_move_insn (temp, addr);
1847 temp = gen_rtx_PLUS (Pmode, temp, offset);
1848 x = gen_rtx_MEM (DImode, temp);
1854 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1856 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1859 rtx (*gen_old_insn)(rtx, rtx, rtx);
1860 rtx (*gen_si_insn)(rtx, rtx, rtx);
1861 rtx (*gen_insn)(rtx, rtx, rtx);
1865 gen_old_insn = gen_adddi3_old;
1866 gen_si_insn = gen_addsi3;
1867 gen_insn = gen_adcdi3;
1869 else if (code == MINUS)
1871 gen_old_insn = gen_subdi3_old;
1872 gen_si_insn = gen_subsi3;
1873 gen_insn = gen_sbcdi3;
1878 /* If this is addition (thus operands are commutative) and if there is one
1879 addend that duplicates the desination, we want that addend to be the
1882 && rtx_equal_p (operands[0], operands[2])
1883 && !rtx_equal_p (operands[1], operands[2]))
1886 operands[2] = operands[1];
1892 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1896 if (!rtx_equal_p (operands[0], operands[1])
1897 && (REG_P (operands[0]) && MEM_P (operands[1])))
1899 emit_move_insn (operands[0], operands[1]);
1900 operands[1] = operands[0];
1903 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1904 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1905 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1907 if (!rtx_equal_p (operands[0], operands[1]))
1908 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1909 operand_subword (operands[1], 0, 0, DImode));
1911 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1912 operand_subword (operands[1], 1, 0, DImode),
1913 operand_subword (operands[2], 1, 0, DImode)));
1917 /* If are adding the same value together, that's really a multiply by 2,
1918 and that's just a left shift of 1. */
1919 if (rtx_equal_p (operands[1], operands[2]))
1921 gcc_assert (code != MINUS);
1922 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1926 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1928 /* If an operand is the same as operand[0], use the operand[0] rtx
1929 because fixup will an equivalent rtx but not an equal one. */
1931 if (rtx_equal_p (operands[0], operands[1]))
1932 operands[1] = operands[0];
1934 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1936 if (rtx_equal_p (operands[0], operands[2]))
1937 operands[2] = operands[0];
1939 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1941 /* If we are subtracting not from ourselves [d = a - b], and because the
1942 carry ops are two operand only, we would need to do a move prior to
1943 the subtract. And if d == b, we would need a temp otherwise
1944 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1945 into d = -b, d += a. Since -b can never overflow, even if b == d,
1948 If we are doing addition, since the carry ops are two operand, if
1949 we aren't adding to ourselves, move the first addend to the
1950 destination first. */
1952 gcc_assert (operands[1] != const0_rtx || code == MINUS);
1953 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
1955 if (code == MINUS && CONSTANT_P (operands[1]))
1957 temp = gen_reg_rtx (DImode);
1958 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
1960 gen_insn = gen_adcdi3;
1961 operands[2] = operands[1];
1962 operands[1] = operands[0];
1965 emit_move_insn (operands[0], operands[1]);
1968 /* Subtracting a constant will have been rewritten to an addition of the
1969 negative of that constant before we get here. */
1970 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
1971 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
1976 adjacent_operands_p (rtx lo, rtx hi, enum machine_mode mode)
1978 HOST_WIDE_INT lo_offset;
1979 HOST_WIDE_INT hi_offset;
1981 if (GET_CODE (lo) != GET_CODE (hi))
1985 return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
1986 if (CONST_INT_P (lo))
1987 return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
1988 if (CONST_INT_P (lo))
1989 return mode != SImode;
1994 if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
2000 if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
2001 return rtx_equal_p (lo, hi);
2003 switch (GET_CODE (lo))
2013 if (!CONST_INT_P (XEXP (lo, 1)))
2015 lo_offset = INTVAL (XEXP (lo, 1));
2022 switch (GET_CODE (hi))
2032 if (!CONST_INT_P (XEXP (hi, 1)))
2034 hi_offset = INTVAL (XEXP (hi, 1));
2041 if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2044 return rtx_equal_p (lo, hi)
2045 && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2048 /* Output assembler code for a block containing the constant parts
2049 of a trampoline, leaving space for the variable parts. */
2051 /* On the VAX, the trampoline contains an entry mask and two instructions:
2053 movl $STATIC,r0 (store the functions static chain)
2054 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2057 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2059 assemble_aligned_integer (2, const0_rtx);
2060 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2061 assemble_aligned_integer (4, const0_rtx);
2062 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2063 assemble_aligned_integer (2, GEN_INT (0x9f17));
2064 assemble_aligned_integer (4, const0_rtx);
2067 /* We copy the register-mask from the function's pure code
2068 to the start of the trampoline. */
2071 vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2073 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2076 emit_block_move (m_tramp, assemble_trampoline_template (),
2077 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2079 mem = adjust_address (m_tramp, HImode, 0);
2080 emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2082 mem = adjust_address (m_tramp, SImode, 4);
2083 emit_move_insn (mem, cxt);
2084 mem = adjust_address (m_tramp, SImode, 11);
2085 emit_move_insn (mem, plus_constant (fnaddr, 2));
2086 emit_insn (gen_sync_istream ());
2089 /* Value is the number of bytes of arguments automatically
2090 popped when returning from a subroutine call.
2091 FUNDECL is the declaration node of the function (as a tree),
2092 FUNTYPE is the data type of the function (as a tree),
2093 or for a library call it is an identifier node for the subroutine name.
2094 SIZE is the number of bytes of arguments passed on the stack.
2096 On the VAX, the RET insn pops a maximum of 255 args for any function. */
2099 vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2100 tree funtype ATTRIBUTE_UNUSED, int size)
2102 return size > 255 * 4 ? 0 : size;
2105 /* Define where to put the arguments to a function.
2106 Value is zero to push the argument on the stack,
2107 or a hard register in which to store the argument.
2109 MODE is the argument's machine mode.
2110 TYPE is the data type of the argument (as a tree).
2111 This is null for libcalls where that information may
2113 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2114 the preceding args and about the function being called.
2115 NAMED is nonzero if this argument is a named parameter
2116 (otherwise it is an extra parameter matching an ellipsis). */
2118 /* On the VAX all args are pushed. */
2121 vax_function_arg (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
2122 enum machine_mode mode ATTRIBUTE_UNUSED,
2123 const_tree type ATTRIBUTE_UNUSED,
2124 bool named ATTRIBUTE_UNUSED)
2129 /* Update the data in CUM to advance over an argument of mode MODE and
2130 data type TYPE. (TYPE is null for libcalls where that information
2131 may not be available.) */
2134 vax_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
2135 const_tree type, bool named ATTRIBUTE_UNUSED)
2137 *cum += (mode != BLKmode
2138 ? (GET_MODE_SIZE (mode) + 3) & ~3
2139 : (int_size_in_bytes (type) + 3) & ~3);