1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987, 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002,
3 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
35 #include "insn-attr.h"
41 #include "diagnostic-core.h"
44 #include "tm-constrs.h"
47 #include "target-def.h"
49 static void vax_option_override (void);
50 static bool vax_legitimate_address_p (enum machine_mode, rtx, bool);
51 static void vax_output_function_prologue (FILE *, HOST_WIDE_INT);
52 static void vax_file_start (void);
53 static void vax_init_libfuncs (void);
54 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
56 static int vax_address_cost_1 (rtx);
57 static int vax_address_cost (rtx, bool);
58 static bool vax_rtx_costs (rtx, int, int, int *, bool);
59 static rtx vax_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
61 static void vax_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
63 static rtx vax_struct_value_rtx (tree, int);
64 static rtx vax_builtin_setjmp_frame_value (void);
65 static void vax_asm_trampoline_template (FILE *);
66 static void vax_trampoline_init (rtx, tree, rtx);
67 static int vax_return_pops_args (tree, tree, int);
69 /* Initialize the GCC target structure. */
70 #undef TARGET_ASM_ALIGNED_HI_OP
71 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
73 #undef TARGET_ASM_FUNCTION_PROLOGUE
74 #define TARGET_ASM_FUNCTION_PROLOGUE vax_output_function_prologue
76 #undef TARGET_ASM_FILE_START
77 #define TARGET_ASM_FILE_START vax_file_start
78 #undef TARGET_ASM_FILE_START_APP_OFF
79 #define TARGET_ASM_FILE_START_APP_OFF true
81 #undef TARGET_INIT_LIBFUNCS
82 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
84 #undef TARGET_ASM_OUTPUT_MI_THUNK
85 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
86 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
87 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
89 #undef TARGET_DEFAULT_TARGET_FLAGS
90 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
92 #undef TARGET_RTX_COSTS
93 #define TARGET_RTX_COSTS vax_rtx_costs
94 #undef TARGET_ADDRESS_COST
95 #define TARGET_ADDRESS_COST vax_address_cost
97 #undef TARGET_PROMOTE_PROTOTYPES
98 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
100 #undef TARGET_FUNCTION_ARG
101 #define TARGET_FUNCTION_ARG vax_function_arg
102 #undef TARGET_FUNCTION_ARG_ADVANCE
103 #define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
105 #undef TARGET_STRUCT_VALUE_RTX
106 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
108 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
109 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
111 #undef TARGET_LEGITIMATE_ADDRESS_P
112 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
114 #undef TARGET_FRAME_POINTER_REQUIRED
115 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
117 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
118 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
119 #undef TARGET_TRAMPOLINE_INIT
120 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
121 #undef TARGET_RETURN_POPS_ARGS
122 #define TARGET_RETURN_POPS_ARGS vax_return_pops_args
124 #undef TARGET_OPTION_OVERRIDE
125 #define TARGET_OPTION_OVERRIDE vax_option_override
127 struct gcc_target targetm = TARGET_INITIALIZER;
129 /* Set global variables as needed for the options enabled. */
132 vax_option_override (void)
134 /* We're VAX floating point, not IEEE floating point. */
136 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
138 #ifdef SUBTARGET_OVERRIDE_OPTIONS
139 SUBTARGET_OVERRIDE_OPTIONS;
143 /* Generate the assembly code for function entry. FILE is a stdio
144 stream to output the code to. SIZE is an int: how many units of
145 temporary storage to allocate.
147 Refer to the array `regs_ever_live' to determine which registers to
148 save; `regs_ever_live[I]' is nonzero if register number I is ever
149 used in the function. This function is responsible for knowing
150 which registers should not be saved even if used. */
153 vax_output_function_prologue (FILE * file, HOST_WIDE_INT size)
158 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
159 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
162 fprintf (file, "\t.word 0x%x\n", mask);
164 if (dwarf2out_do_frame ())
166 const char *label = dwarf2out_cfi_label (false);
169 for (regno = FIRST_PSEUDO_REGISTER-1; regno >= 0; --regno)
170 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
171 dwarf2out_reg_save (label, regno, offset -= 4);
173 dwarf2out_reg_save (label, PC_REGNUM, offset -= 4);
174 dwarf2out_reg_save (label, FRAME_POINTER_REGNUM, offset -= 4);
175 dwarf2out_reg_save (label, ARG_POINTER_REGNUM, offset -= 4);
176 dwarf2out_def_cfa (label, FRAME_POINTER_REGNUM, -(offset - 4));
179 size -= STARTING_FRAME_OFFSET;
181 asm_fprintf (file, "\tmovab %wd(%Rsp),%Rsp\n", -size);
183 asm_fprintf (file, "\tsubl2 $%wd,%Rsp\n", size);
186 /* When debugging with stabs, we want to output an extra dummy label
187 so that gas can distinguish between D_float and G_float prior to
188 processing the .stabs directive identifying type double. */
190 vax_file_start (void)
192 default_file_start ();
194 if (write_symbols == DBX_DEBUG)
195 fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
198 /* We can use the BSD C library routines for the libgcc calls that are
199 still generated, since that's what they boil down to anyways. When
200 ELF, avoid the user's namespace. */
203 vax_init_libfuncs (void)
205 if (TARGET_BSD_DIVMOD)
207 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
208 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
212 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
215 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
220 for (i = 0; i < n; i++)
223 for (i = 0; i < n; i++)
225 if (MEM_P (operands[i])
226 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
227 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
229 rtx addr = XEXP (operands[i], 0);
230 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
232 else if (optimize_size && MEM_P (operands[i])
233 && REG_P (XEXP (operands[i], 0))
234 && (code != MINUS || operands[1] != const0_rtx)
235 && find_regno_note (insn, REG_DEAD,
236 REGNO (XEXP (operands[i], 0))))
238 low[i] = gen_rtx_MEM (SImode,
239 gen_rtx_POST_INC (Pmode,
240 XEXP (operands[i], 0)));
241 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
245 low[i] = operand_subword (operands[i], 0, 0, DImode);
246 operands[i] = operand_subword (operands[i], 1, 0, DImode);
252 print_operand_address (FILE * file, rtx addr)
255 rtx reg1, breg, ireg;
259 switch (GET_CODE (addr))
263 addr = XEXP (addr, 0);
267 fprintf (file, "(%s)", reg_names[REGNO (addr)]);
271 fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
275 fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
279 /* There can be either two or three things added here. One must be a
280 REG. One can be either a REG or a MULT of a REG and an appropriate
281 constant, and the third can only be a constant or a MEM.
283 We get these two or three things and put the constant or MEM in
284 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
285 a register and can't tell yet if it is a base or index register,
288 reg1 = 0; ireg = 0; breg = 0; offset = 0;
290 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
291 || MEM_P (XEXP (addr, 0)))
293 offset = XEXP (addr, 0);
294 addr = XEXP (addr, 1);
296 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
297 || MEM_P (XEXP (addr, 1)))
299 offset = XEXP (addr, 1);
300 addr = XEXP (addr, 0);
302 else if (GET_CODE (XEXP (addr, 1)) == MULT)
304 ireg = XEXP (addr, 1);
305 addr = XEXP (addr, 0);
307 else if (GET_CODE (XEXP (addr, 0)) == MULT)
309 ireg = XEXP (addr, 0);
310 addr = XEXP (addr, 1);
312 else if (REG_P (XEXP (addr, 1)))
314 reg1 = XEXP (addr, 1);
315 addr = XEXP (addr, 0);
317 else if (REG_P (XEXP (addr, 0)))
319 reg1 = XEXP (addr, 0);
320 addr = XEXP (addr, 1);
332 else if (GET_CODE (addr) == MULT)
336 gcc_assert (GET_CODE (addr) == PLUS);
337 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
338 || MEM_P (XEXP (addr, 0)))
342 if (CONST_INT_P (offset))
343 offset = plus_constant (XEXP (addr, 0), INTVAL (offset));
346 gcc_assert (CONST_INT_P (XEXP (addr, 0)));
347 offset = plus_constant (offset, INTVAL (XEXP (addr, 0)));
350 offset = XEXP (addr, 0);
352 else if (REG_P (XEXP (addr, 0)))
355 ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
357 reg1 = XEXP (addr, 0);
361 gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
363 ireg = XEXP (addr, 0);
366 if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
367 || MEM_P (XEXP (addr, 1)))
371 if (CONST_INT_P (offset))
372 offset = plus_constant (XEXP (addr, 1), INTVAL (offset));
375 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
376 offset = plus_constant (offset, INTVAL (XEXP (addr, 1)));
379 offset = XEXP (addr, 1);
381 else if (REG_P (XEXP (addr, 1)))
384 ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
386 reg1 = XEXP (addr, 1);
390 gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
392 ireg = XEXP (addr, 1);
396 /* If REG1 is nonzero, figure out if it is a base or index register. */
400 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
403 || (flag_pic && symbolic_operand (offset, SImode)))))
414 if (flag_pic && symbolic_operand (offset, SImode))
419 output_operand_lossage ("symbol used with both base and indexed registers");
422 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
423 if (flag_pic > 1 && GET_CODE (offset) == CONST
424 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
425 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
428 output_operand_lossage ("symbol with offset used in PIC mode");
432 /* symbol(reg) isn't PIC, but symbol[reg] is. */
441 output_address (offset);
445 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
449 if (GET_CODE (ireg) == MULT)
450 ireg = XEXP (ireg, 0);
451 gcc_assert (REG_P (ireg));
452 fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
457 output_addr_const (file, addr);
462 print_operand (FILE *file, rtx x, int code)
465 fputc (ASM_DOUBLE_CHAR, file);
466 else if (code == '|')
467 fputs (REGISTER_PREFIX, file);
468 else if (code == 'c')
469 fputs (cond_name (x), file);
470 else if (code == 'C')
471 fputs (rev_cond_name (x), file);
472 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
473 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
474 else if (code == 'P' && CONST_INT_P (x))
475 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
476 else if (code == 'N' && CONST_INT_P (x))
477 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
478 /* rotl instruction cannot deal with negative arguments. */
479 else if (code == 'R' && CONST_INT_P (x))
480 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
481 else if (code == 'H' && CONST_INT_P (x))
482 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
483 else if (code == 'h' && CONST_INT_P (x))
484 fprintf (file, "$%d", (short) - INTVAL (x));
485 else if (code == 'B' && CONST_INT_P (x))
486 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
487 else if (code == 'b' && CONST_INT_P (x))
488 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
489 else if (code == 'M' && CONST_INT_P (x))
490 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
492 fprintf (file, "%s", reg_names[REGNO (x)]);
494 output_address (XEXP (x, 0));
495 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
498 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
499 sizeof (dstr), 0, 1);
500 fprintf (file, "$0f%s", dstr);
502 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
505 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
506 sizeof (dstr), 0, 1);
507 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
511 if (flag_pic > 1 && symbolic_operand (x, SImode))
514 output_operand_lossage ("symbol used as immediate operand");
517 output_addr_const (file, x);
524 switch (GET_CODE (op))
553 rev_cond_name (rtx op)
555 switch (GET_CODE (op))
584 vax_float_literal (rtx c)
586 enum machine_mode mode;
587 REAL_VALUE_TYPE r, s;
590 if (GET_CODE (c) != CONST_DOUBLE)
595 if (c == const_tiny_rtx[(int) mode][0]
596 || c == const_tiny_rtx[(int) mode][1]
597 || c == const_tiny_rtx[(int) mode][2])
600 REAL_VALUE_FROM_CONST_DOUBLE (r, c);
602 for (i = 0; i < 7; i++)
606 REAL_VALUE_FROM_INT (s, x, 0, mode);
608 if (REAL_VALUES_EQUAL (r, s))
610 ok = exact_real_inverse (mode, &s);
612 if (REAL_VALUES_EQUAL (r, s))
619 /* Return the cost in cycles of a memory address, relative to register
622 Each of the following adds the indicated number of cycles:
626 1 - indexing and/or offset(register)
631 vax_address_cost_1 (rtx addr)
633 int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
634 rtx plus_op0 = 0, plus_op1 = 0;
636 switch (GET_CODE (addr))
646 indexed = 1; /* 2 on VAX 2 */
649 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
651 offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
655 offset = 1; /* 2 on VAX 2 */
657 case LABEL_REF: /* this is probably a byte offset from the pc */
663 plus_op1 = XEXP (addr, 0);
665 plus_op0 = XEXP (addr, 0);
666 addr = XEXP (addr, 1);
669 indir = 2; /* 3 on VAX 2 */
670 addr = XEXP (addr, 0);
676 /* Up to 3 things can be added in an address. They are stored in
677 plus_op0, plus_op1, and addr. */
691 /* Indexing and register+offset can both be used (except on a VAX 2)
692 without increasing execution time over either one alone. */
693 if (reg && indexed && offset)
694 return reg + indir + offset + predec;
695 return reg + indexed + indir + offset + predec;
699 vax_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
701 return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
704 /* Cost of an expression on a VAX. This version has costs tuned for the
705 CVAX chip (found in the VAX 3 series) with comments for variations on
708 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
709 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
710 costs on a per cpu basis. */
713 vax_rtx_costs (rtx x, int code, int outer_code, int *total,
714 bool speed ATTRIBUTE_UNUSED)
716 enum machine_mode mode = GET_MODE (x);
717 int i = 0; /* may be modified in switch */
718 const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
722 /* On a VAX, constants from 0..63 are cheap because they can use the
723 1 byte literal constant format. Compare to -1 should be made cheap
724 so that decrement-and-branch insns can be formed more easily (if
725 the value -1 is copied to a register some decrement-and-branch
726 patterns will not match). */
733 if (outer_code == AND)
735 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
738 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
739 || (outer_code == COMPARE
741 || ((outer_code == PLUS || outer_code == MINUS)
742 && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
756 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
757 *total = vax_float_literal (x) ? 5 : 8;
759 *total = ((CONST_DOUBLE_HIGH (x) == 0
760 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
761 || (outer_code == PLUS
762 && CONST_DOUBLE_HIGH (x) == -1
763 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
769 return true; /* Implies register operand. */
773 return true; /* Implies register operand. */
779 *total = 16; /* 4 on VAX 9000 */
782 *total = 9; /* 4 on VAX 9000, 12 on VAX 2 */
785 *total = 16; /* 6 on VAX 9000, 28 on VAX 2 */
790 *total = 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
793 *total = MAX_COST; /* Mode is not supported. */
801 *total = MAX_COST; /* Mode is not supported. */
809 *total = 30; /* Highly variable. */
810 else if (mode == DFmode)
811 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
814 *total = 11; /* 25 on VAX 2 */
824 *total = MAX_COST; /* Mode is not supported. */
831 *total = (6 /* 4 on VAX 9000 */
832 + (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
836 *total = 7; /* 17 on VAX 2 */
845 *total = 10; /* 6 on VAX 9000 */
850 *total = 6; /* 5 on VAX 2, 4 on VAX 9000 */
851 if (CONST_INT_P (XEXP (x, 1)))
852 fmt = "e"; /* all constant rotate counts are short */
857 *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
858 /* Small integer operands can use subl2 and addl2. */
859 if ((CONST_INT_P (XEXP (x, 1)))
860 && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
870 /* AND is special because the first operand is complemented. */
872 if (CONST_INT_P (XEXP (x, 0)))
874 if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
884 else if (mode == SFmode)
886 else if (mode == DImode)
902 if (mode == DImode || mode == DFmode)
903 *total = 5; /* 7 on VAX 2 */
905 *total = 3; /* 4 on VAX 2 */
907 if (!REG_P (x) && GET_CODE (x) != POST_INC)
908 *total += vax_address_cost_1 (x);
914 *total = 3; /* FIXME: Costs need to be checked */
921 /* Now look inside the expression. Operands which are not registers or
922 short constants add to the cost.
924 FMT and I may have been adjusted in the switch above for instructions
925 which require special handling. */
927 while (*fmt++ == 'e')
929 rtx op = XEXP (x, i);
932 code = GET_CODE (op);
934 /* A NOT is likely to be found as the first operand of an AND
935 (in which case the relevant cost is of the operand inside
936 the not) and not likely to be found anywhere else. */
938 op = XEXP (op, 0), code = GET_CODE (op);
943 if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
944 && GET_MODE (x) != QImode)
945 *total += 1; /* 2 on VAX 2 */
950 *total += 1; /* 2 on VAX 2 */
953 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
955 /* Registers are faster than floating point constants -- even
956 those constants which can be encoded in a single byte. */
957 if (vax_float_literal (op))
960 *total += (GET_MODE (x) == DFmode) ? 3 : 2;
964 if (CONST_DOUBLE_HIGH (op) != 0
965 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
970 *total += 1; /* 2 on VAX 2 */
971 if (!REG_P (XEXP (op, 0)))
972 *total += vax_address_cost_1 (XEXP (op, 0));
985 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
986 Used for C++ multiple inheritance.
987 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
988 addl2 $DELTA, 4(ap) #adjust first argument
989 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
993 vax_output_mi_thunk (FILE * file,
994 tree thunk ATTRIBUTE_UNUSED,
996 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
999 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
1000 asm_fprintf (file, ",4(%Rap)\n");
1001 fprintf (file, "\tjmp ");
1002 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1003 fprintf (file, "+2\n");
1007 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1008 int incoming ATTRIBUTE_UNUSED)
1010 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1014 vax_builtin_setjmp_frame_value (void)
1016 return hard_frame_pointer_rtx;
1019 /* Worker function for NOTICE_UPDATE_CC. */
1022 vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1024 if (GET_CODE (exp) == SET)
1026 if (GET_CODE (SET_SRC (exp)) == CALL)
1028 else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1029 && GET_CODE (SET_DEST (exp)) != PC)
1031 cc_status.flags = 0;
1032 /* The integer operations below don't set carry or
1033 set it in an incompatible way. That's ok though
1034 as the Z bit is all we need when doing unsigned
1035 comparisons on the result of these insns (since
1036 they're always with 0). Set CC_NO_OVERFLOW to
1037 generate the correct unsigned branches. */
1038 switch (GET_CODE (SET_SRC (exp)))
1041 if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1049 cc_status.flags = CC_NO_OVERFLOW;
1054 cc_status.value1 = SET_DEST (exp);
1055 cc_status.value2 = SET_SRC (exp);
1058 else if (GET_CODE (exp) == PARALLEL
1059 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1061 if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1063 else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1065 cc_status.flags = 0;
1066 cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1067 cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1070 /* PARALLELs whose first element sets the PC are aob,
1071 sob insns. They do change the cc's. */
1076 if (cc_status.value1 && REG_P (cc_status.value1)
1078 && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1079 cc_status.value2 = 0;
1080 if (cc_status.value1 && MEM_P (cc_status.value1)
1082 && MEM_P (cc_status.value2))
1083 cc_status.value2 = 0;
1084 /* Actual condition, one line up, should be that value2's address
1085 depends on value1, but that is too much of a pain. */
1088 /* Output integer move instructions. */
1091 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1092 enum machine_mode mode)
1095 const char *pattern_hi, *pattern_lo;
1100 if (operands[1] == const0_rtx)
1102 if (TARGET_QMATH && optimize_size
1103 && (CONST_INT_P (operands[1])
1104 || GET_CODE (operands[1]) == CONST_DOUBLE))
1106 unsigned HOST_WIDE_INT hval, lval;
1109 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1111 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1113 /* Make sure only the low 32 bits are valid. */
1114 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1115 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1119 lval = INTVAL (operands[1]);
1123 /* Here we see if we are trying to see if the 64bit value is really
1124 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1125 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1126 8 bytes - 1 shift byte - 1 short literal byte. */
1128 && (n = exact_log2 (lval & (- lval))) != -1
1129 && (lval >> n) < 64)
1133 /* On 32bit platforms, if the 6bits didn't overflow into the
1134 upper 32bit value that value better be 0. If we have
1135 overflowed, make sure it wasn't too much. */
1136 if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
1138 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1139 n = 0; /* failure */
1141 lval |= hval << (32 - n);
1143 /* If n is 0, then ashq is not the best way to emit this. */
1146 operands[1] = GEN_INT (lval);
1147 operands[2] = GEN_INT (n);
1148 return "ashq %2,%1,%0";
1150 #if HOST_BITS_PER_WIDE_INT == 32
1152 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1153 upper 32bit value. */
1155 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1156 && (hval >> n) < 64)
1158 operands[1] = GEN_INT (hval >> n);
1159 operands[2] = GEN_INT (n + 32);
1160 return "ashq %2,%1,%0";
1166 && (!MEM_P (operands[0])
1167 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1168 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1169 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1170 && ((CONST_INT_P (operands[1])
1171 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1172 || GET_CODE (operands[1]) == CONST_DOUBLE))
1174 hi[0] = operands[0];
1175 hi[1] = operands[1];
1177 split_quadword_operands (insn, SET, hi, lo, 2);
1179 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1180 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1182 /* The patterns are just movl/movl or pushl/pushl then a movq will
1183 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1184 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1186 if ((!strncmp (pattern_lo, "movl", 4)
1187 && !strncmp (pattern_hi, "movl", 4))
1188 || (!strncmp (pattern_lo, "pushl", 5)
1189 && !strncmp (pattern_hi, "pushl", 5)))
1190 return "movq %1,%0";
1192 if (MEM_P (operands[0])
1193 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1195 output_asm_insn (pattern_hi, hi);
1196 operands[0] = lo[0];
1197 operands[1] = lo[1];
1198 operands[2] = lo[2];
1203 output_asm_insn (pattern_lo, lo);
1204 operands[0] = hi[0];
1205 operands[1] = hi[1];
1206 operands[2] = hi[2];
1210 return "movq %1,%0";
1213 if (symbolic_operand (operands[1], SImode))
1215 if (push_operand (operands[0], SImode))
1216 return "pushab %a1";
1217 return "movab %a1,%0";
1220 if (operands[1] == const0_rtx)
1222 if (push_operand (operands[1], SImode))
1227 if (CONST_INT_P (operands[1])
1228 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1230 HOST_WIDE_INT i = INTVAL (operands[1]);
1232 if ((unsigned HOST_WIDE_INT)(~i) < 64)
1233 return "mcoml %N1,%0";
1234 if ((unsigned HOST_WIDE_INT)i < 0x100)
1235 return "movzbl %1,%0";
1236 if (i >= -0x80 && i < 0)
1237 return "cvtbl %1,%0";
1239 && (n = exact_log2 (i & (-i))) != -1
1240 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1242 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1243 operands[2] = GEN_INT (n);
1244 return "ashl %2,%1,%0";
1246 if ((unsigned HOST_WIDE_INT)i < 0x10000)
1247 return "movzwl %1,%0";
1248 if (i >= -0x8000 && i < 0)
1249 return "cvtwl %1,%0";
1251 if (push_operand (operands[0], SImode))
1253 return "movl %1,%0";
1256 if (CONST_INT_P (operands[1]))
1258 HOST_WIDE_INT i = INTVAL (operands[1]);
1261 else if ((unsigned HOST_WIDE_INT)i < 64)
1262 return "movw %1,%0";
1263 else if ((unsigned HOST_WIDE_INT)~i < 64)
1264 return "mcomw %H1,%0";
1265 else if ((unsigned HOST_WIDE_INT)i < 256)
1266 return "movzbw %1,%0";
1267 else if (i >= -0x80 && i < 0)
1268 return "cvtbw %1,%0";
1270 return "movw %1,%0";
1273 if (CONST_INT_P (operands[1]))
1275 HOST_WIDE_INT i = INTVAL (operands[1]);
1278 else if ((unsigned HOST_WIDE_INT)~i < 64)
1279 return "mcomb %B1,%0";
1281 return "movb %1,%0";
1288 /* Output integer add instructions.
1290 The space-time-opcode tradeoffs for addition vary by model of VAX.
1292 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1293 but it not faster on other models.
1295 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1296 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1297 a register is used in an address too soon after it is set.
1298 Compromise by using movab only when it is shorter than the add
1299 or the base register in the address is one of sp, ap, and fp,
1300 which are not modified very often. */
1303 vax_output_int_add (rtx insn, rtx *operands, enum machine_mode mode)
1310 const char *pattern;
1314 if (TARGET_QMATH && 0)
1317 split_quadword_operands (insn, PLUS, operands, low, 3);
1321 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1322 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1323 gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1324 gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1327 /* No reason to add a 0 to the low part and thus no carry, so just
1328 emit the appropriate add/sub instruction. */
1329 if (low[2] == const0_rtx)
1330 return vax_output_int_add (NULL, operands, SImode);
1332 /* Are we doing addition or subtraction? */
1333 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1335 /* We can't use vax_output_int_add since some the patterns don't
1336 modify the carry bit. */
1339 if (low[2] == constm1_rtx)
1340 pattern = "decl %0";
1342 pattern = "subl2 $%n2,%0";
1346 if (low[2] == const1_rtx)
1347 pattern = "incl %0";
1349 pattern = "addl2 %2,%0";
1351 output_asm_insn (pattern, low);
1353 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1354 two 32bit parts, we complement each and then add one to
1355 low part. We know that the low part can't overflow since
1356 it's value can never be 0. */
1358 return "sbwc %N2,%0";
1359 return "adwc %2,%0";
1362 /* Add low parts. */
1363 if (rtx_equal_p (operands[0], operands[1]))
1365 if (low[2] == const0_rtx)
1366 /* Should examine operand, punt if not POST_INC. */
1367 pattern = "tstl %0", carry = 0;
1368 else if (low[2] == const1_rtx)
1369 pattern = "incl %0";
1371 pattern = "addl2 %2,%0";
1375 if (low[2] == const0_rtx)
1376 pattern = "movl %1,%0", carry = 0;
1378 pattern = "addl3 %2,%1,%0";
1381 output_asm_insn (pattern, low);
1383 /* If CARRY is 0, we don't have any carry value to worry about. */
1384 return get_insn_template (CODE_FOR_addsi3, insn);
1385 /* %0 = C + %1 + %2 */
1386 if (!rtx_equal_p (operands[0], operands[1]))
1387 output_asm_insn ((operands[1] == const0_rtx
1389 : "movl %1,%0"), operands);
1390 return "adwc %2,%0";
1394 if (rtx_equal_p (operands[0], operands[1]))
1396 if (operands[2] == const1_rtx)
1398 if (operands[2] == constm1_rtx)
1400 if (CONST_INT_P (operands[2])
1401 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1402 return "subl2 $%n2,%0";
1403 if (CONST_INT_P (operands[2])
1404 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1405 && REG_P (operands[1])
1406 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1407 || REGNO (operands[1]) > 11))
1408 return "movab %c2(%1),%0";
1409 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1410 return "movab %a2[%0],%0";
1411 return "addl2 %2,%0";
1414 if (rtx_equal_p (operands[0], operands[2]))
1416 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1417 return "movab %a1[%0],%0";
1418 return "addl2 %1,%0";
1421 if (CONST_INT_P (operands[2])
1422 && INTVAL (operands[2]) < 32767
1423 && INTVAL (operands[2]) > -32768
1424 && REG_P (operands[1])
1425 && push_operand (operands[0], SImode))
1426 return "pushab %c2(%1)";
1428 if (CONST_INT_P (operands[2])
1429 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1430 return "subl3 $%n2,%1,%0";
1432 if (CONST_INT_P (operands[2])
1433 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1434 && REG_P (operands[1])
1435 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1436 || REGNO (operands[1]) > 11))
1437 return "movab %c2(%1),%0";
1439 /* Add this if using gcc on a VAX 3xxx:
1440 if (REG_P (operands[1]) && REG_P (operands[2]))
1441 return "movab (%1)[%2],%0";
1444 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1446 if (push_operand (operands[0], SImode))
1447 return "pushab %a2[%1]";
1448 return "movab %a2[%1],%0";
1451 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1453 if (push_operand (operands[0], SImode))
1454 return "pushab %a1[%2]";
1455 return "movab %a1[%2],%0";
1458 if (flag_pic && REG_P (operands[0])
1459 && symbolic_operand (operands[2], SImode))
1460 return "movab %a2,%0;addl2 %1,%0";
1463 && (symbolic_operand (operands[1], SImode)
1464 || symbolic_operand (operands[1], SImode)))
1467 return "addl3 %1,%2,%0";
1470 if (rtx_equal_p (operands[0], operands[1]))
1472 if (operands[2] == const1_rtx)
1474 if (operands[2] == constm1_rtx)
1476 if (CONST_INT_P (operands[2])
1477 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1478 return "subw2 $%n2,%0";
1479 return "addw2 %2,%0";
1481 if (rtx_equal_p (operands[0], operands[2]))
1482 return "addw2 %1,%0";
1483 if (CONST_INT_P (operands[2])
1484 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1485 return "subw3 $%n2,%1,%0";
1486 return "addw3 %1,%2,%0";
1489 if (rtx_equal_p (operands[0], operands[1]))
1491 if (operands[2] == const1_rtx)
1493 if (operands[2] == constm1_rtx)
1495 if (CONST_INT_P (operands[2])
1496 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1497 return "subb2 $%n2,%0";
1498 return "addb2 %2,%0";
1500 if (rtx_equal_p (operands[0], operands[2]))
1501 return "addb2 %1,%0";
1502 if (CONST_INT_P (operands[2])
1503 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1504 return "subb3 $%n2,%1,%0";
1505 return "addb3 %1,%2,%0";
1513 vax_output_int_subtract (rtx insn, rtx *operands, enum machine_mode mode)
1520 const char *pattern;
1523 if (TARGET_QMATH && 0)
1526 split_quadword_operands (insn, MINUS, operands, low, 3);
1530 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1532 /* Negation is tricky. It's basically complement and increment.
1533 Negate hi, then lo, and subtract the carry back. */
1534 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1535 || (MEM_P (operands[0])
1536 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1537 fatal_insn ("illegal operand detected", insn);
1538 output_asm_insn ("mnegl %2,%0", operands);
1539 output_asm_insn ("mnegl %2,%0", low);
1540 return "sbwc $0,%0";
1542 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1543 gcc_assert (rtx_equal_p (low[0], low[1]));
1544 if (low[2] == const1_rtx)
1545 output_asm_insn ("decl %0", low);
1547 output_asm_insn ("subl2 %2,%0", low);
1548 return "sbwc %2,%0";
1551 /* Subtract low parts. */
1552 if (rtx_equal_p (operands[0], operands[1]))
1554 if (low[2] == const0_rtx)
1555 pattern = 0, carry = 0;
1556 else if (low[2] == constm1_rtx)
1557 pattern = "decl %0";
1559 pattern = "subl2 %2,%0";
1563 if (low[2] == constm1_rtx)
1564 pattern = "decl %0";
1565 else if (low[2] == const0_rtx)
1566 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1568 pattern = "subl3 %2,%1,%0";
1571 output_asm_insn (pattern, low);
1574 if (!rtx_equal_p (operands[0], operands[1]))
1575 return "movl %1,%0;sbwc %2,%0";
1576 return "sbwc %2,%0";
1577 /* %0 = %2 - %1 - C */
1579 return get_insn_template (CODE_FOR_subsi3, insn);
1587 /* True if X is an rtx for a constant that is a valid address. */
1590 legitimate_constant_address_p (rtx x)
1592 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1593 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1595 if (GET_CODE (x) != CONST)
1597 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1599 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1600 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1606 /* The other macros defined here are used only in legitimate_address_p (). */
1608 /* Nonzero if X is a hard reg that can be used as an index
1609 or, if not strict, if it is a pseudo reg. */
1610 #define INDEX_REGISTER_P(X, STRICT) \
1611 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1613 /* Nonzero if X is a hard reg that can be used as a base reg
1614 or, if not strict, if it is a pseudo reg. */
1615 #define BASE_REGISTER_P(X, STRICT) \
1616 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1618 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1620 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1621 are no SYMBOL_REFs for external symbols present. */
1624 indirectable_constant_address_p (rtx x, bool indirect)
1626 if (GET_CODE (x) == SYMBOL_REF)
1627 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1629 if (GET_CODE (x) == CONST)
1631 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1632 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1634 return CONSTANT_ADDRESS_P (x);
1637 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1640 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1642 return CONSTANT_ADDRESS_P (x);
1645 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1647 /* True if X is an address which can be indirected. External symbols
1648 could be in a sharable image library, so we disallow those. */
1651 indirectable_address_p (rtx x, bool strict, bool indirect)
1653 if (indirectable_constant_address_p (x, indirect)
1654 || BASE_REGISTER_P (x, strict))
1656 if (GET_CODE (x) != PLUS
1657 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1658 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1660 return indirectable_constant_address_p (XEXP (x, 1), indirect);
1663 /* Return true if x is a valid address not using indexing.
1664 (This much is the easy part.) */
1666 nonindexed_address_p (rtx x, bool strict)
1671 if (! reload_in_progress
1672 || reg_equiv_mem (REGNO (x)) == 0
1673 || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
1676 if (indirectable_constant_address_p (x, false))
1678 if (indirectable_address_p (x, strict, false))
1680 xfoo0 = XEXP (x, 0);
1681 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1683 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1684 && BASE_REGISTER_P (xfoo0, strict))
1689 /* True if PROD is either a reg times size of mode MODE and MODE is less
1690 than or equal 8 bytes, or just a reg if MODE is one byte. */
1693 index_term_p (rtx prod, enum machine_mode mode, bool strict)
1697 if (GET_MODE_SIZE (mode) == 1)
1698 return BASE_REGISTER_P (prod, strict);
1700 if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1703 xfoo0 = XEXP (prod, 0);
1704 xfoo1 = XEXP (prod, 1);
1706 if (CONST_INT_P (xfoo0)
1707 && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1708 && INDEX_REGISTER_P (xfoo1, strict))
1711 if (CONST_INT_P (xfoo1)
1712 && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1713 && INDEX_REGISTER_P (xfoo0, strict))
1719 /* Return true if X is the sum of a register
1720 and a valid index term for mode MODE. */
1722 reg_plus_index_p (rtx x, enum machine_mode mode, bool strict)
1726 if (GET_CODE (x) != PLUS)
1729 xfoo0 = XEXP (x, 0);
1730 xfoo1 = XEXP (x, 1);
1732 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1735 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1741 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1743 indexable_address_p (rtx xfoo0, rtx xfoo1, enum machine_mode mode, bool strict)
1745 if (!CONSTANT_ADDRESS_P (xfoo0))
1747 if (BASE_REGISTER_P (xfoo1, strict))
1748 return !flag_pic || mode == QImode;
1749 if (flag_pic && symbolic_operand (xfoo0, SImode))
1751 return reg_plus_index_p (xfoo1, mode, strict);
1754 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1755 that is a valid memory address for an instruction.
1756 The MODE argument is the machine mode for the MEM expression
1757 that wants to use this address. */
1759 vax_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1763 if (nonindexed_address_p (x, strict))
1766 if (GET_CODE (x) != PLUS)
1769 /* Handle <address>[index] represented with index-sum outermost */
1771 xfoo0 = XEXP (x, 0);
1772 xfoo1 = XEXP (x, 1);
1774 if (index_term_p (xfoo0, mode, strict)
1775 && nonindexed_address_p (xfoo1, strict))
1778 if (index_term_p (xfoo1, mode, strict)
1779 && nonindexed_address_p (xfoo0, strict))
1782 /* Handle offset(reg)[index] with offset added outermost */
1784 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1785 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1791 /* Return true if x (a legitimate address expression) has an effect that
1792 depends on the machine mode it is used for. On the VAX, the predecrement
1793 and postincrement address depend thus (the amount of decrement or
1794 increment being the length of the operand) and all indexed address depend
1795 thus (because the index scale factor is the length of the operand). */
1798 vax_mode_dependent_address_p (rtx x)
1802 /* Auto-increment cases are now dealt with generically in recog.c. */
1803 if (GET_CODE (x) != PLUS)
1806 xfoo0 = XEXP (x, 0);
1807 xfoo1 = XEXP (x, 1);
1809 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1811 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1813 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1815 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1822 fixup_mathdi_operand (rtx x, enum machine_mode mode)
1824 if (illegal_addsub_di_memory_operand (x, mode))
1826 rtx addr = XEXP (x, 0);
1827 rtx temp = gen_reg_rtx (Pmode);
1829 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1830 if (GET_CODE (addr) == CONST && flag_pic)
1832 offset = XEXP (XEXP (addr, 0), 1);
1833 addr = XEXP (XEXP (addr, 0), 0);
1836 emit_move_insn (temp, addr);
1838 temp = gen_rtx_PLUS (Pmode, temp, offset);
1839 x = gen_rtx_MEM (DImode, temp);
1845 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1847 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1850 rtx (*gen_old_insn)(rtx, rtx, rtx);
1851 rtx (*gen_si_insn)(rtx, rtx, rtx);
1852 rtx (*gen_insn)(rtx, rtx, rtx);
1856 gen_old_insn = gen_adddi3_old;
1857 gen_si_insn = gen_addsi3;
1858 gen_insn = gen_adcdi3;
1860 else if (code == MINUS)
1862 gen_old_insn = gen_subdi3_old;
1863 gen_si_insn = gen_subsi3;
1864 gen_insn = gen_sbcdi3;
1869 /* If this is addition (thus operands are commutative) and if there is one
1870 addend that duplicates the desination, we want that addend to be the
1873 && rtx_equal_p (operands[0], operands[2])
1874 && !rtx_equal_p (operands[1], operands[2]))
1877 operands[2] = operands[1];
1883 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1887 if (!rtx_equal_p (operands[0], operands[1])
1888 && (REG_P (operands[0]) && MEM_P (operands[1])))
1890 emit_move_insn (operands[0], operands[1]);
1891 operands[1] = operands[0];
1894 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1895 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1896 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1898 if (!rtx_equal_p (operands[0], operands[1]))
1899 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1900 operand_subword (operands[1], 0, 0, DImode));
1902 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1903 operand_subword (operands[1], 1, 0, DImode),
1904 operand_subword (operands[2], 1, 0, DImode)));
1908 /* If are adding the same value together, that's really a multiply by 2,
1909 and that's just a left shift of 1. */
1910 if (rtx_equal_p (operands[1], operands[2]))
1912 gcc_assert (code != MINUS);
1913 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1917 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1919 /* If an operand is the same as operand[0], use the operand[0] rtx
1920 because fixup will an equivalent rtx but not an equal one. */
1922 if (rtx_equal_p (operands[0], operands[1]))
1923 operands[1] = operands[0];
1925 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1927 if (rtx_equal_p (operands[0], operands[2]))
1928 operands[2] = operands[0];
1930 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1932 /* If we are subtracting not from ourselves [d = a - b], and because the
1933 carry ops are two operand only, we would need to do a move prior to
1934 the subtract. And if d == b, we would need a temp otherwise
1935 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1936 into d = -b, d += a. Since -b can never overflow, even if b == d,
1939 If we are doing addition, since the carry ops are two operand, if
1940 we aren't adding to ourselves, move the first addend to the
1941 destination first. */
1943 gcc_assert (operands[1] != const0_rtx || code == MINUS);
1944 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
1946 if (code == MINUS && CONSTANT_P (operands[1]))
1948 temp = gen_reg_rtx (DImode);
1949 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
1951 gen_insn = gen_adcdi3;
1952 operands[2] = operands[1];
1953 operands[1] = operands[0];
1956 emit_move_insn (operands[0], operands[1]);
1959 /* Subtracting a constant will have been rewritten to an addition of the
1960 negative of that constant before we get here. */
1961 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
1962 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
1967 adjacent_operands_p (rtx lo, rtx hi, enum machine_mode mode)
1969 HOST_WIDE_INT lo_offset;
1970 HOST_WIDE_INT hi_offset;
1972 if (GET_CODE (lo) != GET_CODE (hi))
1976 return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
1977 if (CONST_INT_P (lo))
1978 return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
1979 if (CONST_INT_P (lo))
1980 return mode != SImode;
1985 if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
1991 if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
1992 return rtx_equal_p (lo, hi);
1994 switch (GET_CODE (lo))
2004 if (!CONST_INT_P (XEXP (lo, 1)))
2006 lo_offset = INTVAL (XEXP (lo, 1));
2013 switch (GET_CODE (hi))
2023 if (!CONST_INT_P (XEXP (hi, 1)))
2025 hi_offset = INTVAL (XEXP (hi, 1));
2032 if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2035 return rtx_equal_p (lo, hi)
2036 && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2039 /* Output assembler code for a block containing the constant parts
2040 of a trampoline, leaving space for the variable parts. */
2042 /* On the VAX, the trampoline contains an entry mask and two instructions:
2044 movl $STATIC,r0 (store the functions static chain)
2045 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2048 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2050 assemble_aligned_integer (2, const0_rtx);
2051 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2052 assemble_aligned_integer (4, const0_rtx);
2053 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2054 assemble_aligned_integer (2, GEN_INT (0x9f17));
2055 assemble_aligned_integer (4, const0_rtx);
2058 /* We copy the register-mask from the function's pure code
2059 to the start of the trampoline. */
2062 vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2064 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2067 emit_block_move (m_tramp, assemble_trampoline_template (),
2068 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2070 mem = adjust_address (m_tramp, HImode, 0);
2071 emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2073 mem = adjust_address (m_tramp, SImode, 4);
2074 emit_move_insn (mem, cxt);
2075 mem = adjust_address (m_tramp, SImode, 11);
2076 emit_move_insn (mem, plus_constant (fnaddr, 2));
2077 emit_insn (gen_sync_istream ());
2080 /* Value is the number of bytes of arguments automatically
2081 popped when returning from a subroutine call.
2082 FUNDECL is the declaration node of the function (as a tree),
2083 FUNTYPE is the data type of the function (as a tree),
2084 or for a library call it is an identifier node for the subroutine name.
2085 SIZE is the number of bytes of arguments passed on the stack.
2087 On the VAX, the RET insn pops a maximum of 255 args for any function. */
2090 vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2091 tree funtype ATTRIBUTE_UNUSED, int size)
2093 return size > 255 * 4 ? 0 : size;
2096 /* Define where to put the arguments to a function.
2097 Value is zero to push the argument on the stack,
2098 or a hard register in which to store the argument.
2100 MODE is the argument's machine mode.
2101 TYPE is the data type of the argument (as a tree).
2102 This is null for libcalls where that information may
2104 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2105 the preceding args and about the function being called.
2106 NAMED is nonzero if this argument is a named parameter
2107 (otherwise it is an extra parameter matching an ellipsis). */
2109 /* On the VAX all args are pushed. */
2112 vax_function_arg (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
2113 enum machine_mode mode ATTRIBUTE_UNUSED,
2114 const_tree type ATTRIBUTE_UNUSED,
2115 bool named ATTRIBUTE_UNUSED)
2120 /* Update the data in CUM to advance over an argument of mode MODE and
2121 data type TYPE. (TYPE is null for libcalls where that information
2122 may not be available.) */
2125 vax_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
2126 const_tree type, bool named ATTRIBUTE_UNUSED)
2128 *cum += (mode != BLKmode
2129 ? (GET_MODE_SIZE (mode) + 3) & ~3
2130 : (int_size_in_bytes (type) + 3) & ~3);