1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987, 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002,
3 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
35 #include "insn-attr.h"
41 #include "diagnostic-core.h"
44 #include "tm-constrs.h"
47 #include "target-def.h"
49 static void vax_option_override (void);
50 static bool vax_legitimate_address_p (enum machine_mode, rtx, bool);
51 static void vax_file_start (void);
52 static void vax_init_libfuncs (void);
53 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
55 static int vax_address_cost_1 (rtx);
56 static int vax_address_cost (rtx, bool);
57 static bool vax_rtx_costs (rtx, int, int, int *, bool);
58 static rtx vax_function_arg (cumulative_args_t, enum machine_mode,
60 static void vax_function_arg_advance (cumulative_args_t, enum machine_mode,
62 static rtx vax_struct_value_rtx (tree, int);
63 static rtx vax_builtin_setjmp_frame_value (void);
64 static void vax_asm_trampoline_template (FILE *);
65 static void vax_trampoline_init (rtx, tree, rtx);
66 static int vax_return_pops_args (tree, tree, int);
68 /* Initialize the GCC target structure. */
69 #undef TARGET_ASM_ALIGNED_HI_OP
70 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
72 #undef TARGET_ASM_FILE_START
73 #define TARGET_ASM_FILE_START vax_file_start
74 #undef TARGET_ASM_FILE_START_APP_OFF
75 #define TARGET_ASM_FILE_START_APP_OFF true
77 #undef TARGET_INIT_LIBFUNCS
78 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
80 #undef TARGET_ASM_OUTPUT_MI_THUNK
81 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
82 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
83 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
85 #undef TARGET_RTX_COSTS
86 #define TARGET_RTX_COSTS vax_rtx_costs
87 #undef TARGET_ADDRESS_COST
88 #define TARGET_ADDRESS_COST vax_address_cost
90 #undef TARGET_PROMOTE_PROTOTYPES
91 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
93 #undef TARGET_FUNCTION_ARG
94 #define TARGET_FUNCTION_ARG vax_function_arg
95 #undef TARGET_FUNCTION_ARG_ADVANCE
96 #define TARGET_FUNCTION_ARG_ADVANCE vax_function_arg_advance
98 #undef TARGET_STRUCT_VALUE_RTX
99 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
101 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
102 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
104 #undef TARGET_LEGITIMATE_ADDRESS_P
105 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
107 #undef TARGET_FRAME_POINTER_REQUIRED
108 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
110 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
111 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
112 #undef TARGET_TRAMPOLINE_INIT
113 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
114 #undef TARGET_RETURN_POPS_ARGS
115 #define TARGET_RETURN_POPS_ARGS vax_return_pops_args
117 #undef TARGET_OPTION_OVERRIDE
118 #define TARGET_OPTION_OVERRIDE vax_option_override
120 struct gcc_target targetm = TARGET_INITIALIZER;
122 /* Set global variables as needed for the options enabled. */
125 vax_option_override (void)
127 /* We're VAX floating point, not IEEE floating point. */
129 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
131 #ifdef SUBTARGET_OVERRIDE_OPTIONS
132 SUBTARGET_OVERRIDE_OPTIONS;
137 vax_add_reg_cfa_offset (rtx insn, int offset, rtx src)
141 x = plus_constant (frame_pointer_rtx, offset);
142 x = gen_rtx_MEM (SImode, x);
143 x = gen_rtx_SET (VOIDmode, x, src);
144 add_reg_note (insn, REG_CFA_OFFSET, x);
147 /* Generate the assembly code for function entry. FILE is a stdio
148 stream to output the code to. SIZE is an int: how many units of
149 temporary storage to allocate.
151 Refer to the array `regs_ever_live' to determine which registers to
152 save; `regs_ever_live[I]' is nonzero if register number I is ever
153 used in the function. This function is responsible for knowing
154 which registers should not be saved even if used. */
157 vax_expand_prologue (void)
164 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
165 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
168 insn = emit_insn (gen_procedure_entry_mask (GEN_INT (mask)));
169 RTX_FRAME_RELATED_P (insn) = 1;
171 /* The layout of the CALLG/S stack frame is follows:
176 ... Registers saved as specified by MASK
186 The rest of the prologue will adjust the SP for the local frame. */
188 vax_add_reg_cfa_offset (insn, 4, arg_pointer_rtx);
189 vax_add_reg_cfa_offset (insn, 8, frame_pointer_rtx);
190 vax_add_reg_cfa_offset (insn, 12, pc_rtx);
193 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
194 if (mask & (1 << regno))
196 vax_add_reg_cfa_offset (insn, offset, gen_rtx_REG (SImode, regno));
200 /* Because add_reg_note pushes the notes, adding this last means that
201 it will be processed first. This is required to allow the other
202 notes be interpreted properly. */
203 add_reg_note (insn, REG_CFA_DEF_CFA,
204 plus_constant (frame_pointer_rtx, offset));
206 /* Allocate the local stack frame. */
207 size = get_frame_size ();
208 size -= STARTING_FRAME_OFFSET;
209 emit_insn (gen_addsi3 (stack_pointer_rtx,
210 stack_pointer_rtx, GEN_INT (-size)));
212 /* Do not allow instructions referencing local stack memory to be
213 scheduled before the frame is allocated. This is more pedantic
214 than anything else, given that VAX does not currently have a
215 scheduling description. */
216 emit_insn (gen_blockage ());
219 /* When debugging with stabs, we want to output an extra dummy label
220 so that gas can distinguish between D_float and G_float prior to
221 processing the .stabs directive identifying type double. */
223 vax_file_start (void)
225 default_file_start ();
227 if (write_symbols == DBX_DEBUG)
228 fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
231 /* We can use the BSD C library routines for the libgcc calls that are
232 still generated, since that's what they boil down to anyways. When
233 ELF, avoid the user's namespace. */
236 vax_init_libfuncs (void)
238 if (TARGET_BSD_DIVMOD)
240 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
241 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
245 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
248 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
253 for (i = 0; i < n; i++)
256 for (i = 0; i < n; i++)
258 if (MEM_P (operands[i])
259 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
260 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
262 rtx addr = XEXP (operands[i], 0);
263 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
265 else if (optimize_size && MEM_P (operands[i])
266 && REG_P (XEXP (operands[i], 0))
267 && (code != MINUS || operands[1] != const0_rtx)
268 && find_regno_note (insn, REG_DEAD,
269 REGNO (XEXP (operands[i], 0))))
271 low[i] = gen_rtx_MEM (SImode,
272 gen_rtx_POST_INC (Pmode,
273 XEXP (operands[i], 0)));
274 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
278 low[i] = operand_subword (operands[i], 0, 0, DImode);
279 operands[i] = operand_subword (operands[i], 1, 0, DImode);
285 print_operand_address (FILE * file, rtx addr)
288 rtx reg1, breg, ireg;
292 switch (GET_CODE (addr))
296 addr = XEXP (addr, 0);
300 fprintf (file, "(%s)", reg_names[REGNO (addr)]);
304 fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
308 fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
312 /* There can be either two or three things added here. One must be a
313 REG. One can be either a REG or a MULT of a REG and an appropriate
314 constant, and the third can only be a constant or a MEM.
316 We get these two or three things and put the constant or MEM in
317 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
318 a register and can't tell yet if it is a base or index register,
321 reg1 = 0; ireg = 0; breg = 0; offset = 0;
323 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
324 || MEM_P (XEXP (addr, 0)))
326 offset = XEXP (addr, 0);
327 addr = XEXP (addr, 1);
329 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
330 || MEM_P (XEXP (addr, 1)))
332 offset = XEXP (addr, 1);
333 addr = XEXP (addr, 0);
335 else if (GET_CODE (XEXP (addr, 1)) == MULT)
337 ireg = XEXP (addr, 1);
338 addr = XEXP (addr, 0);
340 else if (GET_CODE (XEXP (addr, 0)) == MULT)
342 ireg = XEXP (addr, 0);
343 addr = XEXP (addr, 1);
345 else if (REG_P (XEXP (addr, 1)))
347 reg1 = XEXP (addr, 1);
348 addr = XEXP (addr, 0);
350 else if (REG_P (XEXP (addr, 0)))
352 reg1 = XEXP (addr, 0);
353 addr = XEXP (addr, 1);
365 else if (GET_CODE (addr) == MULT)
369 gcc_assert (GET_CODE (addr) == PLUS);
370 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
371 || MEM_P (XEXP (addr, 0)))
375 if (CONST_INT_P (offset))
376 offset = plus_constant (XEXP (addr, 0), INTVAL (offset));
379 gcc_assert (CONST_INT_P (XEXP (addr, 0)));
380 offset = plus_constant (offset, INTVAL (XEXP (addr, 0)));
383 offset = XEXP (addr, 0);
385 else if (REG_P (XEXP (addr, 0)))
388 ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
390 reg1 = XEXP (addr, 0);
394 gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
396 ireg = XEXP (addr, 0);
399 if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
400 || MEM_P (XEXP (addr, 1)))
404 if (CONST_INT_P (offset))
405 offset = plus_constant (XEXP (addr, 1), INTVAL (offset));
408 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
409 offset = plus_constant (offset, INTVAL (XEXP (addr, 1)));
412 offset = XEXP (addr, 1);
414 else if (REG_P (XEXP (addr, 1)))
417 ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
419 reg1 = XEXP (addr, 1);
423 gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
425 ireg = XEXP (addr, 1);
429 /* If REG1 is nonzero, figure out if it is a base or index register. */
433 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
436 || (flag_pic && symbolic_operand (offset, SImode)))))
447 if (flag_pic && symbolic_operand (offset, SImode))
452 output_operand_lossage ("symbol used with both base and indexed registers");
455 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
456 if (flag_pic > 1 && GET_CODE (offset) == CONST
457 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
458 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
461 output_operand_lossage ("symbol with offset used in PIC mode");
465 /* symbol(reg) isn't PIC, but symbol[reg] is. */
474 output_address (offset);
478 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
482 if (GET_CODE (ireg) == MULT)
483 ireg = XEXP (ireg, 0);
484 gcc_assert (REG_P (ireg));
485 fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
490 output_addr_const (file, addr);
495 print_operand (FILE *file, rtx x, int code)
498 fputc (ASM_DOUBLE_CHAR, file);
499 else if (code == '|')
500 fputs (REGISTER_PREFIX, file);
501 else if (code == 'c')
502 fputs (cond_name (x), file);
503 else if (code == 'C')
504 fputs (rev_cond_name (x), file);
505 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
506 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
507 else if (code == 'P' && CONST_INT_P (x))
508 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
509 else if (code == 'N' && CONST_INT_P (x))
510 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
511 /* rotl instruction cannot deal with negative arguments. */
512 else if (code == 'R' && CONST_INT_P (x))
513 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
514 else if (code == 'H' && CONST_INT_P (x))
515 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
516 else if (code == 'h' && CONST_INT_P (x))
517 fprintf (file, "$%d", (short) - INTVAL (x));
518 else if (code == 'B' && CONST_INT_P (x))
519 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
520 else if (code == 'b' && CONST_INT_P (x))
521 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
522 else if (code == 'M' && CONST_INT_P (x))
523 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
524 else if (code == 'x' && CONST_INT_P (x))
525 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
527 fprintf (file, "%s", reg_names[REGNO (x)]);
529 output_address (XEXP (x, 0));
530 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
533 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
534 sizeof (dstr), 0, 1);
535 fprintf (file, "$0f%s", dstr);
537 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
540 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
541 sizeof (dstr), 0, 1);
542 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
546 if (flag_pic > 1 && symbolic_operand (x, SImode))
549 output_operand_lossage ("symbol used as immediate operand");
552 output_addr_const (file, x);
559 switch (GET_CODE (op))
588 rev_cond_name (rtx op)
590 switch (GET_CODE (op))
619 vax_float_literal (rtx c)
621 enum machine_mode mode;
622 REAL_VALUE_TYPE r, s;
625 if (GET_CODE (c) != CONST_DOUBLE)
630 if (c == const_tiny_rtx[(int) mode][0]
631 || c == const_tiny_rtx[(int) mode][1]
632 || c == const_tiny_rtx[(int) mode][2])
635 REAL_VALUE_FROM_CONST_DOUBLE (r, c);
637 for (i = 0; i < 7; i++)
641 REAL_VALUE_FROM_INT (s, x, 0, mode);
643 if (REAL_VALUES_EQUAL (r, s))
645 ok = exact_real_inverse (mode, &s);
647 if (REAL_VALUES_EQUAL (r, s))
654 /* Return the cost in cycles of a memory address, relative to register
657 Each of the following adds the indicated number of cycles:
661 1 - indexing and/or offset(register)
666 vax_address_cost_1 (rtx addr)
668 int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
669 rtx plus_op0 = 0, plus_op1 = 0;
671 switch (GET_CODE (addr))
681 indexed = 1; /* 2 on VAX 2 */
684 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
686 offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
690 offset = 1; /* 2 on VAX 2 */
692 case LABEL_REF: /* this is probably a byte offset from the pc */
698 plus_op1 = XEXP (addr, 0);
700 plus_op0 = XEXP (addr, 0);
701 addr = XEXP (addr, 1);
704 indir = 2; /* 3 on VAX 2 */
705 addr = XEXP (addr, 0);
711 /* Up to 3 things can be added in an address. They are stored in
712 plus_op0, plus_op1, and addr. */
726 /* Indexing and register+offset can both be used (except on a VAX 2)
727 without increasing execution time over either one alone. */
728 if (reg && indexed && offset)
729 return reg + indir + offset + predec;
730 return reg + indexed + indir + offset + predec;
734 vax_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
736 return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
739 /* Cost of an expression on a VAX. This version has costs tuned for the
740 CVAX chip (found in the VAX 3 series) with comments for variations on
743 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
744 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
745 costs on a per cpu basis. */
748 vax_rtx_costs (rtx x, int code, int outer_code, int *total,
749 bool speed ATTRIBUTE_UNUSED)
751 enum machine_mode mode = GET_MODE (x);
752 int i = 0; /* may be modified in switch */
753 const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
757 /* On a VAX, constants from 0..63 are cheap because they can use the
758 1 byte literal constant format. Compare to -1 should be made cheap
759 so that decrement-and-branch insns can be formed more easily (if
760 the value -1 is copied to a register some decrement-and-branch
761 patterns will not match). */
768 if (outer_code == AND)
770 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
773 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
774 || (outer_code == COMPARE
776 || ((outer_code == PLUS || outer_code == MINUS)
777 && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
791 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
792 *total = vax_float_literal (x) ? 5 : 8;
794 *total = ((CONST_DOUBLE_HIGH (x) == 0
795 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
796 || (outer_code == PLUS
797 && CONST_DOUBLE_HIGH (x) == -1
798 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
804 return true; /* Implies register operand. */
808 return true; /* Implies register operand. */
814 *total = 16; /* 4 on VAX 9000 */
817 *total = 9; /* 4 on VAX 9000, 12 on VAX 2 */
820 *total = 16; /* 6 on VAX 9000, 28 on VAX 2 */
825 *total = 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
828 *total = MAX_COST; /* Mode is not supported. */
836 *total = MAX_COST; /* Mode is not supported. */
844 *total = 30; /* Highly variable. */
845 else if (mode == DFmode)
846 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
849 *total = 11; /* 25 on VAX 2 */
859 *total = MAX_COST; /* Mode is not supported. */
866 *total = (6 /* 4 on VAX 9000 */
867 + (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
871 *total = 7; /* 17 on VAX 2 */
880 *total = 10; /* 6 on VAX 9000 */
885 *total = 6; /* 5 on VAX 2, 4 on VAX 9000 */
886 if (CONST_INT_P (XEXP (x, 1)))
887 fmt = "e"; /* all constant rotate counts are short */
892 *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
893 /* Small integer operands can use subl2 and addl2. */
894 if ((CONST_INT_P (XEXP (x, 1)))
895 && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
905 /* AND is special because the first operand is complemented. */
907 if (CONST_INT_P (XEXP (x, 0)))
909 if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
919 else if (mode == SFmode)
921 else if (mode == DImode)
937 if (mode == DImode || mode == DFmode)
938 *total = 5; /* 7 on VAX 2 */
940 *total = 3; /* 4 on VAX 2 */
942 if (!REG_P (x) && GET_CODE (x) != POST_INC)
943 *total += vax_address_cost_1 (x);
949 *total = 3; /* FIXME: Costs need to be checked */
956 /* Now look inside the expression. Operands which are not registers or
957 short constants add to the cost.
959 FMT and I may have been adjusted in the switch above for instructions
960 which require special handling. */
962 while (*fmt++ == 'e')
964 rtx op = XEXP (x, i);
967 code = GET_CODE (op);
969 /* A NOT is likely to be found as the first operand of an AND
970 (in which case the relevant cost is of the operand inside
971 the not) and not likely to be found anywhere else. */
973 op = XEXP (op, 0), code = GET_CODE (op);
978 if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
979 && GET_MODE (x) != QImode)
980 *total += 1; /* 2 on VAX 2 */
985 *total += 1; /* 2 on VAX 2 */
988 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
990 /* Registers are faster than floating point constants -- even
991 those constants which can be encoded in a single byte. */
992 if (vax_float_literal (op))
995 *total += (GET_MODE (x) == DFmode) ? 3 : 2;
999 if (CONST_DOUBLE_HIGH (op) != 0
1000 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
1005 *total += 1; /* 2 on VAX 2 */
1006 if (!REG_P (XEXP (op, 0)))
1007 *total += vax_address_cost_1 (XEXP (op, 0));
1020 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
1021 Used for C++ multiple inheritance.
1022 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
1023 addl2 $DELTA, 4(ap) #adjust first argument
1024 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
1028 vax_output_mi_thunk (FILE * file,
1029 tree thunk ATTRIBUTE_UNUSED,
1030 HOST_WIDE_INT delta,
1031 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1034 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
1035 asm_fprintf (file, ",4(%Rap)\n");
1036 fprintf (file, "\tjmp ");
1037 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1038 fprintf (file, "+2\n");
1042 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1043 int incoming ATTRIBUTE_UNUSED)
1045 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
1049 vax_builtin_setjmp_frame_value (void)
1051 return hard_frame_pointer_rtx;
1054 /* Worker function for NOTICE_UPDATE_CC. */
1057 vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
1059 if (GET_CODE (exp) == SET)
1061 if (GET_CODE (SET_SRC (exp)) == CALL)
1063 else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
1064 && GET_CODE (SET_DEST (exp)) != PC)
1066 cc_status.flags = 0;
1067 /* The integer operations below don't set carry or
1068 set it in an incompatible way. That's ok though
1069 as the Z bit is all we need when doing unsigned
1070 comparisons on the result of these insns (since
1071 they're always with 0). Set CC_NO_OVERFLOW to
1072 generate the correct unsigned branches. */
1073 switch (GET_CODE (SET_SRC (exp)))
1076 if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1084 cc_status.flags = CC_NO_OVERFLOW;
1089 cc_status.value1 = SET_DEST (exp);
1090 cc_status.value2 = SET_SRC (exp);
1093 else if (GET_CODE (exp) == PARALLEL
1094 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1096 if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1098 else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1100 cc_status.flags = 0;
1101 cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1102 cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1105 /* PARALLELs whose first element sets the PC are aob,
1106 sob insns. They do change the cc's. */
1111 if (cc_status.value1 && REG_P (cc_status.value1)
1113 && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1114 cc_status.value2 = 0;
1115 if (cc_status.value1 && MEM_P (cc_status.value1)
1117 && MEM_P (cc_status.value2))
1118 cc_status.value2 = 0;
1119 /* Actual condition, one line up, should be that value2's address
1120 depends on value1, but that is too much of a pain. */
1123 /* Output integer move instructions. */
1126 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1127 enum machine_mode mode)
1130 const char *pattern_hi, *pattern_lo;
1135 if (operands[1] == const0_rtx)
1137 if (TARGET_QMATH && optimize_size
1138 && (CONST_INT_P (operands[1])
1139 || GET_CODE (operands[1]) == CONST_DOUBLE))
1141 unsigned HOST_WIDE_INT hval, lval;
1144 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1146 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1148 /* Make sure only the low 32 bits are valid. */
1149 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1150 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1154 lval = INTVAL (operands[1]);
1158 /* Here we see if we are trying to see if the 64bit value is really
1159 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1160 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1161 8 bytes - 1 shift byte - 1 short literal byte. */
1163 && (n = exact_log2 (lval & (- lval))) != -1
1164 && (lval >> n) < 64)
1168 /* On 32bit platforms, if the 6bits didn't overflow into the
1169 upper 32bit value that value better be 0. If we have
1170 overflowed, make sure it wasn't too much. */
1171 if (HOST_BITS_PER_WIDE_INT == 32 && hval != 0)
1173 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1174 n = 0; /* failure */
1176 lval |= hval << (32 - n);
1178 /* If n is 0, then ashq is not the best way to emit this. */
1181 operands[1] = GEN_INT (lval);
1182 operands[2] = GEN_INT (n);
1183 return "ashq %2,%1,%0";
1185 #if HOST_BITS_PER_WIDE_INT == 32
1187 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1188 upper 32bit value. */
1190 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1191 && (hval >> n) < 64)
1193 operands[1] = GEN_INT (hval >> n);
1194 operands[2] = GEN_INT (n + 32);
1195 return "ashq %2,%1,%0";
1201 && (!MEM_P (operands[0])
1202 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1203 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1204 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1205 && ((CONST_INT_P (operands[1])
1206 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1207 || GET_CODE (operands[1]) == CONST_DOUBLE))
1209 hi[0] = operands[0];
1210 hi[1] = operands[1];
1212 split_quadword_operands (insn, SET, hi, lo, 2);
1214 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1215 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1217 /* The patterns are just movl/movl or pushl/pushl then a movq will
1218 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1219 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1221 if ((!strncmp (pattern_lo, "movl", 4)
1222 && !strncmp (pattern_hi, "movl", 4))
1223 || (!strncmp (pattern_lo, "pushl", 5)
1224 && !strncmp (pattern_hi, "pushl", 5)))
1225 return "movq %1,%0";
1227 if (MEM_P (operands[0])
1228 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1230 output_asm_insn (pattern_hi, hi);
1231 operands[0] = lo[0];
1232 operands[1] = lo[1];
1233 operands[2] = lo[2];
1238 output_asm_insn (pattern_lo, lo);
1239 operands[0] = hi[0];
1240 operands[1] = hi[1];
1241 operands[2] = hi[2];
1245 return "movq %1,%0";
1248 if (symbolic_operand (operands[1], SImode))
1250 if (push_operand (operands[0], SImode))
1251 return "pushab %a1";
1252 return "movab %a1,%0";
1255 if (operands[1] == const0_rtx)
1257 if (push_operand (operands[1], SImode))
1262 if (CONST_INT_P (operands[1])
1263 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1265 HOST_WIDE_INT i = INTVAL (operands[1]);
1267 if ((unsigned HOST_WIDE_INT)(~i) < 64)
1268 return "mcoml %N1,%0";
1269 if ((unsigned HOST_WIDE_INT)i < 0x100)
1270 return "movzbl %1,%0";
1271 if (i >= -0x80 && i < 0)
1272 return "cvtbl %1,%0";
1274 && (n = exact_log2 (i & (-i))) != -1
1275 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1277 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1278 operands[2] = GEN_INT (n);
1279 return "ashl %2,%1,%0";
1281 if ((unsigned HOST_WIDE_INT)i < 0x10000)
1282 return "movzwl %1,%0";
1283 if (i >= -0x8000 && i < 0)
1284 return "cvtwl %1,%0";
1286 if (push_operand (operands[0], SImode))
1288 return "movl %1,%0";
1291 if (CONST_INT_P (operands[1]))
1293 HOST_WIDE_INT i = INTVAL (operands[1]);
1296 else if ((unsigned HOST_WIDE_INT)i < 64)
1297 return "movw %1,%0";
1298 else if ((unsigned HOST_WIDE_INT)~i < 64)
1299 return "mcomw %H1,%0";
1300 else if ((unsigned HOST_WIDE_INT)i < 256)
1301 return "movzbw %1,%0";
1302 else if (i >= -0x80 && i < 0)
1303 return "cvtbw %1,%0";
1305 return "movw %1,%0";
1308 if (CONST_INT_P (operands[1]))
1310 HOST_WIDE_INT i = INTVAL (operands[1]);
1313 else if ((unsigned HOST_WIDE_INT)~i < 64)
1314 return "mcomb %B1,%0";
1316 return "movb %1,%0";
1323 /* Output integer add instructions.
1325 The space-time-opcode tradeoffs for addition vary by model of VAX.
1327 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1328 but it not faster on other models.
1330 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1331 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1332 a register is used in an address too soon after it is set.
1333 Compromise by using movab only when it is shorter than the add
1334 or the base register in the address is one of sp, ap, and fp,
1335 which are not modified very often. */
1338 vax_output_int_add (rtx insn, rtx *operands, enum machine_mode mode)
1345 const char *pattern;
1349 if (TARGET_QMATH && 0)
1352 split_quadword_operands (insn, PLUS, operands, low, 3);
1356 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1357 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1358 gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1359 gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1362 /* No reason to add a 0 to the low part and thus no carry, so just
1363 emit the appropriate add/sub instruction. */
1364 if (low[2] == const0_rtx)
1365 return vax_output_int_add (NULL, operands, SImode);
1367 /* Are we doing addition or subtraction? */
1368 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1370 /* We can't use vax_output_int_add since some the patterns don't
1371 modify the carry bit. */
1374 if (low[2] == constm1_rtx)
1375 pattern = "decl %0";
1377 pattern = "subl2 $%n2,%0";
1381 if (low[2] == const1_rtx)
1382 pattern = "incl %0";
1384 pattern = "addl2 %2,%0";
1386 output_asm_insn (pattern, low);
1388 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1389 two 32bit parts, we complement each and then add one to
1390 low part. We know that the low part can't overflow since
1391 it's value can never be 0. */
1393 return "sbwc %N2,%0";
1394 return "adwc %2,%0";
1397 /* Add low parts. */
1398 if (rtx_equal_p (operands[0], operands[1]))
1400 if (low[2] == const0_rtx)
1401 /* Should examine operand, punt if not POST_INC. */
1402 pattern = "tstl %0", carry = 0;
1403 else if (low[2] == const1_rtx)
1404 pattern = "incl %0";
1406 pattern = "addl2 %2,%0";
1410 if (low[2] == const0_rtx)
1411 pattern = "movl %1,%0", carry = 0;
1413 pattern = "addl3 %2,%1,%0";
1416 output_asm_insn (pattern, low);
1418 /* If CARRY is 0, we don't have any carry value to worry about. */
1419 return get_insn_template (CODE_FOR_addsi3, insn);
1420 /* %0 = C + %1 + %2 */
1421 if (!rtx_equal_p (operands[0], operands[1]))
1422 output_asm_insn ((operands[1] == const0_rtx
1424 : "movl %1,%0"), operands);
1425 return "adwc %2,%0";
1429 if (rtx_equal_p (operands[0], operands[1]))
1431 if (operands[2] == const1_rtx)
1433 if (operands[2] == constm1_rtx)
1435 if (CONST_INT_P (operands[2])
1436 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1437 return "subl2 $%n2,%0";
1438 if (CONST_INT_P (operands[2])
1439 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1440 && REG_P (operands[1])
1441 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1442 || REGNO (operands[1]) > 11))
1443 return "movab %c2(%1),%0";
1444 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1445 return "movab %a2[%0],%0";
1446 return "addl2 %2,%0";
1449 if (rtx_equal_p (operands[0], operands[2]))
1451 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1452 return "movab %a1[%0],%0";
1453 return "addl2 %1,%0";
1456 if (CONST_INT_P (operands[2])
1457 && INTVAL (operands[2]) < 32767
1458 && INTVAL (operands[2]) > -32768
1459 && REG_P (operands[1])
1460 && push_operand (operands[0], SImode))
1461 return "pushab %c2(%1)";
1463 if (CONST_INT_P (operands[2])
1464 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1465 return "subl3 $%n2,%1,%0";
1467 if (CONST_INT_P (operands[2])
1468 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1469 && REG_P (operands[1])
1470 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1471 || REGNO (operands[1]) > 11))
1472 return "movab %c2(%1),%0";
1474 /* Add this if using gcc on a VAX 3xxx:
1475 if (REG_P (operands[1]) && REG_P (operands[2]))
1476 return "movab (%1)[%2],%0";
1479 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1481 if (push_operand (operands[0], SImode))
1482 return "pushab %a2[%1]";
1483 return "movab %a2[%1],%0";
1486 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1488 if (push_operand (operands[0], SImode))
1489 return "pushab %a1[%2]";
1490 return "movab %a1[%2],%0";
1493 if (flag_pic && REG_P (operands[0])
1494 && symbolic_operand (operands[2], SImode))
1495 return "movab %a2,%0;addl2 %1,%0";
1498 && (symbolic_operand (operands[1], SImode)
1499 || symbolic_operand (operands[1], SImode)))
1502 return "addl3 %1,%2,%0";
1505 if (rtx_equal_p (operands[0], operands[1]))
1507 if (operands[2] == const1_rtx)
1509 if (operands[2] == constm1_rtx)
1511 if (CONST_INT_P (operands[2])
1512 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1513 return "subw2 $%n2,%0";
1514 return "addw2 %2,%0";
1516 if (rtx_equal_p (operands[0], operands[2]))
1517 return "addw2 %1,%0";
1518 if (CONST_INT_P (operands[2])
1519 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1520 return "subw3 $%n2,%1,%0";
1521 return "addw3 %1,%2,%0";
1524 if (rtx_equal_p (operands[0], operands[1]))
1526 if (operands[2] == const1_rtx)
1528 if (operands[2] == constm1_rtx)
1530 if (CONST_INT_P (operands[2])
1531 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1532 return "subb2 $%n2,%0";
1533 return "addb2 %2,%0";
1535 if (rtx_equal_p (operands[0], operands[2]))
1536 return "addb2 %1,%0";
1537 if (CONST_INT_P (operands[2])
1538 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1539 return "subb3 $%n2,%1,%0";
1540 return "addb3 %1,%2,%0";
1548 vax_output_int_subtract (rtx insn, rtx *operands, enum machine_mode mode)
1555 const char *pattern;
1558 if (TARGET_QMATH && 0)
1561 split_quadword_operands (insn, MINUS, operands, low, 3);
1565 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1567 /* Negation is tricky. It's basically complement and increment.
1568 Negate hi, then lo, and subtract the carry back. */
1569 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1570 || (MEM_P (operands[0])
1571 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1572 fatal_insn ("illegal operand detected", insn);
1573 output_asm_insn ("mnegl %2,%0", operands);
1574 output_asm_insn ("mnegl %2,%0", low);
1575 return "sbwc $0,%0";
1577 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1578 gcc_assert (rtx_equal_p (low[0], low[1]));
1579 if (low[2] == const1_rtx)
1580 output_asm_insn ("decl %0", low);
1582 output_asm_insn ("subl2 %2,%0", low);
1583 return "sbwc %2,%0";
1586 /* Subtract low parts. */
1587 if (rtx_equal_p (operands[0], operands[1]))
1589 if (low[2] == const0_rtx)
1590 pattern = 0, carry = 0;
1591 else if (low[2] == constm1_rtx)
1592 pattern = "decl %0";
1594 pattern = "subl2 %2,%0";
1598 if (low[2] == constm1_rtx)
1599 pattern = "decl %0";
1600 else if (low[2] == const0_rtx)
1601 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1603 pattern = "subl3 %2,%1,%0";
1606 output_asm_insn (pattern, low);
1609 if (!rtx_equal_p (operands[0], operands[1]))
1610 return "movl %1,%0;sbwc %2,%0";
1611 return "sbwc %2,%0";
1612 /* %0 = %2 - %1 - C */
1614 return get_insn_template (CODE_FOR_subsi3, insn);
1622 /* True if X is an rtx for a constant that is a valid address. */
1625 legitimate_constant_address_p (rtx x)
1627 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1628 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1630 if (GET_CODE (x) != CONST)
1632 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1634 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1635 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1641 /* The other macros defined here are used only in legitimate_address_p (). */
1643 /* Nonzero if X is a hard reg that can be used as an index
1644 or, if not strict, if it is a pseudo reg. */
1645 #define INDEX_REGISTER_P(X, STRICT) \
1646 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1648 /* Nonzero if X is a hard reg that can be used as a base reg
1649 or, if not strict, if it is a pseudo reg. */
1650 #define BASE_REGISTER_P(X, STRICT) \
1651 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1653 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1655 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1656 are no SYMBOL_REFs for external symbols present. */
1659 indirectable_constant_address_p (rtx x, bool indirect)
1661 if (GET_CODE (x) == SYMBOL_REF)
1662 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1664 if (GET_CODE (x) == CONST)
1666 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1667 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1669 return CONSTANT_ADDRESS_P (x);
1672 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1675 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1677 return CONSTANT_ADDRESS_P (x);
1680 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1682 /* True if X is an address which can be indirected. External symbols
1683 could be in a sharable image library, so we disallow those. */
1686 indirectable_address_p (rtx x, bool strict, bool indirect)
1688 if (indirectable_constant_address_p (x, indirect)
1689 || BASE_REGISTER_P (x, strict))
1691 if (GET_CODE (x) != PLUS
1692 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1693 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1695 return indirectable_constant_address_p (XEXP (x, 1), indirect);
1698 /* Return true if x is a valid address not using indexing.
1699 (This much is the easy part.) */
1701 nonindexed_address_p (rtx x, bool strict)
1706 if (! reload_in_progress
1707 || reg_equiv_mem (REGNO (x)) == 0
1708 || indirectable_address_p (reg_equiv_mem (REGNO (x)), strict, false))
1711 if (indirectable_constant_address_p (x, false))
1713 if (indirectable_address_p (x, strict, false))
1715 xfoo0 = XEXP (x, 0);
1716 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1718 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1719 && BASE_REGISTER_P (xfoo0, strict))
1724 /* True if PROD is either a reg times size of mode MODE and MODE is less
1725 than or equal 8 bytes, or just a reg if MODE is one byte. */
1728 index_term_p (rtx prod, enum machine_mode mode, bool strict)
1732 if (GET_MODE_SIZE (mode) == 1)
1733 return BASE_REGISTER_P (prod, strict);
1735 if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1738 xfoo0 = XEXP (prod, 0);
1739 xfoo1 = XEXP (prod, 1);
1741 if (CONST_INT_P (xfoo0)
1742 && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1743 && INDEX_REGISTER_P (xfoo1, strict))
1746 if (CONST_INT_P (xfoo1)
1747 && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1748 && INDEX_REGISTER_P (xfoo0, strict))
1754 /* Return true if X is the sum of a register
1755 and a valid index term for mode MODE. */
1757 reg_plus_index_p (rtx x, enum machine_mode mode, bool strict)
1761 if (GET_CODE (x) != PLUS)
1764 xfoo0 = XEXP (x, 0);
1765 xfoo1 = XEXP (x, 1);
1767 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1770 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1776 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1778 indexable_address_p (rtx xfoo0, rtx xfoo1, enum machine_mode mode, bool strict)
1780 if (!CONSTANT_ADDRESS_P (xfoo0))
1782 if (BASE_REGISTER_P (xfoo1, strict))
1783 return !flag_pic || mode == QImode;
1784 if (flag_pic && symbolic_operand (xfoo0, SImode))
1786 return reg_plus_index_p (xfoo1, mode, strict);
1789 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1790 that is a valid memory address for an instruction.
1791 The MODE argument is the machine mode for the MEM expression
1792 that wants to use this address. */
1794 vax_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1798 if (nonindexed_address_p (x, strict))
1801 if (GET_CODE (x) != PLUS)
1804 /* Handle <address>[index] represented with index-sum outermost */
1806 xfoo0 = XEXP (x, 0);
1807 xfoo1 = XEXP (x, 1);
1809 if (index_term_p (xfoo0, mode, strict)
1810 && nonindexed_address_p (xfoo1, strict))
1813 if (index_term_p (xfoo1, mode, strict)
1814 && nonindexed_address_p (xfoo0, strict))
1817 /* Handle offset(reg)[index] with offset added outermost */
1819 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1820 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1826 /* Return true if x (a legitimate address expression) has an effect that
1827 depends on the machine mode it is used for. On the VAX, the predecrement
1828 and postincrement address depend thus (the amount of decrement or
1829 increment being the length of the operand) and all indexed address depend
1830 thus (because the index scale factor is the length of the operand). */
1833 vax_mode_dependent_address_p (rtx x)
1837 /* Auto-increment cases are now dealt with generically in recog.c. */
1838 if (GET_CODE (x) != PLUS)
1841 xfoo0 = XEXP (x, 0);
1842 xfoo1 = XEXP (x, 1);
1844 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1846 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1848 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1850 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1857 fixup_mathdi_operand (rtx x, enum machine_mode mode)
1859 if (illegal_addsub_di_memory_operand (x, mode))
1861 rtx addr = XEXP (x, 0);
1862 rtx temp = gen_reg_rtx (Pmode);
1864 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1865 if (GET_CODE (addr) == CONST && flag_pic)
1867 offset = XEXP (XEXP (addr, 0), 1);
1868 addr = XEXP (XEXP (addr, 0), 0);
1871 emit_move_insn (temp, addr);
1873 temp = gen_rtx_PLUS (Pmode, temp, offset);
1874 x = gen_rtx_MEM (DImode, temp);
1880 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1882 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1885 rtx (*gen_old_insn)(rtx, rtx, rtx);
1886 rtx (*gen_si_insn)(rtx, rtx, rtx);
1887 rtx (*gen_insn)(rtx, rtx, rtx);
1891 gen_old_insn = gen_adddi3_old;
1892 gen_si_insn = gen_addsi3;
1893 gen_insn = gen_adcdi3;
1895 else if (code == MINUS)
1897 gen_old_insn = gen_subdi3_old;
1898 gen_si_insn = gen_subsi3;
1899 gen_insn = gen_sbcdi3;
1904 /* If this is addition (thus operands are commutative) and if there is one
1905 addend that duplicates the desination, we want that addend to be the
1908 && rtx_equal_p (operands[0], operands[2])
1909 && !rtx_equal_p (operands[1], operands[2]))
1912 operands[2] = operands[1];
1918 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1922 if (!rtx_equal_p (operands[0], operands[1])
1923 && (REG_P (operands[0]) && MEM_P (operands[1])))
1925 emit_move_insn (operands[0], operands[1]);
1926 operands[1] = operands[0];
1929 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1930 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1931 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1933 if (!rtx_equal_p (operands[0], operands[1]))
1934 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1935 operand_subword (operands[1], 0, 0, DImode));
1937 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1938 operand_subword (operands[1], 1, 0, DImode),
1939 operand_subword (operands[2], 1, 0, DImode)));
1943 /* If are adding the same value together, that's really a multiply by 2,
1944 and that's just a left shift of 1. */
1945 if (rtx_equal_p (operands[1], operands[2]))
1947 gcc_assert (code != MINUS);
1948 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1952 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1954 /* If an operand is the same as operand[0], use the operand[0] rtx
1955 because fixup will an equivalent rtx but not an equal one. */
1957 if (rtx_equal_p (operands[0], operands[1]))
1958 operands[1] = operands[0];
1960 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1962 if (rtx_equal_p (operands[0], operands[2]))
1963 operands[2] = operands[0];
1965 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1967 /* If we are subtracting not from ourselves [d = a - b], and because the
1968 carry ops are two operand only, we would need to do a move prior to
1969 the subtract. And if d == b, we would need a temp otherwise
1970 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1971 into d = -b, d += a. Since -b can never overflow, even if b == d,
1974 If we are doing addition, since the carry ops are two operand, if
1975 we aren't adding to ourselves, move the first addend to the
1976 destination first. */
1978 gcc_assert (operands[1] != const0_rtx || code == MINUS);
1979 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
1981 if (code == MINUS && CONSTANT_P (operands[1]))
1983 temp = gen_reg_rtx (DImode);
1984 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
1986 gen_insn = gen_adcdi3;
1987 operands[2] = operands[1];
1988 operands[1] = operands[0];
1991 emit_move_insn (operands[0], operands[1]);
1994 /* Subtracting a constant will have been rewritten to an addition of the
1995 negative of that constant before we get here. */
1996 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
1997 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
2002 adjacent_operands_p (rtx lo, rtx hi, enum machine_mode mode)
2004 HOST_WIDE_INT lo_offset;
2005 HOST_WIDE_INT hi_offset;
2007 if (GET_CODE (lo) != GET_CODE (hi))
2011 return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
2012 if (CONST_INT_P (lo))
2013 return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
2014 if (CONST_INT_P (lo))
2015 return mode != SImode;
2020 if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
2026 if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
2027 return rtx_equal_p (lo, hi);
2029 switch (GET_CODE (lo))
2039 if (!CONST_INT_P (XEXP (lo, 1)))
2041 lo_offset = INTVAL (XEXP (lo, 1));
2048 switch (GET_CODE (hi))
2058 if (!CONST_INT_P (XEXP (hi, 1)))
2060 hi_offset = INTVAL (XEXP (hi, 1));
2067 if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2070 return rtx_equal_p (lo, hi)
2071 && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2074 /* Output assembler code for a block containing the constant parts
2075 of a trampoline, leaving space for the variable parts. */
2077 /* On the VAX, the trampoline contains an entry mask and two instructions:
2079 movl $STATIC,r0 (store the functions static chain)
2080 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2083 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2085 assemble_aligned_integer (2, const0_rtx);
2086 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2087 assemble_aligned_integer (4, const0_rtx);
2088 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2089 assemble_aligned_integer (2, GEN_INT (0x9f17));
2090 assemble_aligned_integer (4, const0_rtx);
2093 /* We copy the register-mask from the function's pure code
2094 to the start of the trampoline. */
2097 vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2099 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2102 emit_block_move (m_tramp, assemble_trampoline_template (),
2103 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2105 mem = adjust_address (m_tramp, HImode, 0);
2106 emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2108 mem = adjust_address (m_tramp, SImode, 4);
2109 emit_move_insn (mem, cxt);
2110 mem = adjust_address (m_tramp, SImode, 11);
2111 emit_move_insn (mem, plus_constant (fnaddr, 2));
2112 emit_insn (gen_sync_istream ());
2115 /* Value is the number of bytes of arguments automatically
2116 popped when returning from a subroutine call.
2117 FUNDECL is the declaration node of the function (as a tree),
2118 FUNTYPE is the data type of the function (as a tree),
2119 or for a library call it is an identifier node for the subroutine name.
2120 SIZE is the number of bytes of arguments passed on the stack.
2122 On the VAX, the RET insn pops a maximum of 255 args for any function. */
2125 vax_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
2126 tree funtype ATTRIBUTE_UNUSED, int size)
2128 return size > 255 * 4 ? 0 : size;
2131 /* Define where to put the arguments to a function.
2132 Value is zero to push the argument on the stack,
2133 or a hard register in which to store the argument.
2135 MODE is the argument's machine mode.
2136 TYPE is the data type of the argument (as a tree).
2137 This is null for libcalls where that information may
2139 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2140 the preceding args and about the function being called.
2141 NAMED is nonzero if this argument is a named parameter
2142 (otherwise it is an extra parameter matching an ellipsis). */
2144 /* On the VAX all args are pushed. */
2147 vax_function_arg (cumulative_args_t cum ATTRIBUTE_UNUSED,
2148 enum machine_mode mode ATTRIBUTE_UNUSED,
2149 const_tree type ATTRIBUTE_UNUSED,
2150 bool named ATTRIBUTE_UNUSED)
2155 /* Update the data in CUM to advance over an argument of mode MODE and
2156 data type TYPE. (TYPE is null for libcalls where that information
2157 may not be available.) */
2160 vax_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2161 const_tree type, bool named ATTRIBUTE_UNUSED)
2163 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2165 *cum += (mode != BLKmode
2166 ? (GET_MODE_SIZE (mode) + 3) & ~3
2167 : (int_size_in_bytes (type) + 3) & ~3);