1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987, 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002,
3 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "conditions.h"
36 #include "insn-attr.h"
44 #include "tm-constrs.h"
47 #include "target-def.h"
49 static bool vax_legitimate_address_p (enum machine_mode, rtx, bool);
50 static void vax_output_function_prologue (FILE *, HOST_WIDE_INT);
51 static void vax_file_start (void);
52 static void vax_init_libfuncs (void);
53 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
55 static int vax_address_cost_1 (rtx);
56 static int vax_address_cost (rtx, bool);
57 static bool vax_rtx_costs (rtx, int, int, int *, bool);
58 static rtx vax_struct_value_rtx (tree, int);
59 static rtx vax_builtin_setjmp_frame_value (void);
61 /* Initialize the GCC target structure. */
62 #undef TARGET_ASM_ALIGNED_HI_OP
63 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
65 #undef TARGET_ASM_FUNCTION_PROLOGUE
66 #define TARGET_ASM_FUNCTION_PROLOGUE vax_output_function_prologue
68 #undef TARGET_ASM_FILE_START
69 #define TARGET_ASM_FILE_START vax_file_start
70 #undef TARGET_ASM_FILE_START_APP_OFF
71 #define TARGET_ASM_FILE_START_APP_OFF true
73 #undef TARGET_INIT_LIBFUNCS
74 #define TARGET_INIT_LIBFUNCS vax_init_libfuncs
76 #undef TARGET_ASM_OUTPUT_MI_THUNK
77 #define TARGET_ASM_OUTPUT_MI_THUNK vax_output_mi_thunk
78 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
79 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
81 #undef TARGET_DEFAULT_TARGET_FLAGS
82 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
84 #undef TARGET_RTX_COSTS
85 #define TARGET_RTX_COSTS vax_rtx_costs
86 #undef TARGET_ADDRESS_COST
87 #define TARGET_ADDRESS_COST vax_address_cost
89 #undef TARGET_PROMOTE_PROTOTYPES
90 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
92 #undef TARGET_STRUCT_VALUE_RTX
93 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
95 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
96 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
98 #undef TARGET_LEGITIMATE_ADDRESS_P
99 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
101 struct gcc_target targetm = TARGET_INITIALIZER;
103 /* Set global variables as needed for the options enabled. */
106 override_options (void)
108 /* We're VAX floating point, not IEEE floating point. */
110 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
113 /* Generate the assembly code for function entry. FILE is a stdio
114 stream to output the code to. SIZE is an int: how many units of
115 temporary storage to allocate.
117 Refer to the array `regs_ever_live' to determine which registers to
118 save; `regs_ever_live[I]' is nonzero if register number I is ever
119 used in the function. This function is responsible for knowing
120 which registers should not be saved even if used. */
123 vax_output_function_prologue (FILE * file, HOST_WIDE_INT size)
128 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
129 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
132 fprintf (file, "\t.word 0x%x\n", mask);
134 if (dwarf2out_do_frame ())
136 const char *label = dwarf2out_cfi_label ();
139 for (regno = FIRST_PSEUDO_REGISTER-1; regno >= 0; --regno)
140 if (df_regs_ever_live_p (regno) && !call_used_regs[regno])
141 dwarf2out_reg_save (label, regno, offset -= 4);
143 dwarf2out_reg_save (label, PC_REGNUM, offset -= 4);
144 dwarf2out_reg_save (label, FRAME_POINTER_REGNUM, offset -= 4);
145 dwarf2out_reg_save (label, ARG_POINTER_REGNUM, offset -= 4);
146 dwarf2out_def_cfa (label, FRAME_POINTER_REGNUM, -(offset - 4));
149 size -= STARTING_FRAME_OFFSET;
151 asm_fprintf (file, "\tmovab %wd(%Rsp),%Rsp\n", -size);
153 asm_fprintf (file, "\tsubl2 $%wd,%Rsp\n", size);
156 /* When debugging with stabs, we want to output an extra dummy label
157 so that gas can distinguish between D_float and G_float prior to
158 processing the .stabs directive identifying type double. */
160 vax_file_start (void)
162 default_file_start ();
164 if (write_symbols == DBX_DEBUG)
165 fprintf (asm_out_file, "___vax_%c_doubles:\n", ASM_DOUBLE_CHAR);
168 /* We can use the BSD C library routines for the libgcc calls that are
169 still generated, since that's what they boil down to anyways. When
170 ELF, avoid the user's namespace. */
173 vax_init_libfuncs (void)
175 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
176 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
179 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
182 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
187 for (i = 0; i < n; i++)
190 for (i = 0; i < n; i++)
192 if (MEM_P (operands[i])
193 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
194 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
196 rtx addr = XEXP (operands[i], 0);
197 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
199 else if (optimize_size && MEM_P (operands[i])
200 && REG_P (XEXP (operands[i], 0))
201 && (code != MINUS || operands[1] != const0_rtx)
202 && find_regno_note (insn, REG_DEAD,
203 REGNO (XEXP (operands[i], 0))))
205 low[i] = gen_rtx_MEM (SImode,
206 gen_rtx_POST_INC (Pmode,
207 XEXP (operands[i], 0)));
208 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
212 low[i] = operand_subword (operands[i], 0, 0, DImode);
213 operands[i] = operand_subword (operands[i], 1, 0, DImode);
219 print_operand_address (FILE * file, rtx addr)
222 rtx reg1, breg, ireg;
226 switch (GET_CODE (addr))
230 addr = XEXP (addr, 0);
234 fprintf (file, "(%s)", reg_names[REGNO (addr)]);
238 fprintf (file, "-(%s)", reg_names[REGNO (XEXP (addr, 0))]);
242 fprintf (file, "(%s)+", reg_names[REGNO (XEXP (addr, 0))]);
246 /* There can be either two or three things added here. One must be a
247 REG. One can be either a REG or a MULT of a REG and an appropriate
248 constant, and the third can only be a constant or a MEM.
250 We get these two or three things and put the constant or MEM in
251 OFFSET, the MULT or REG in IREG, and the REG in BREG. If we have
252 a register and can't tell yet if it is a base or index register,
255 reg1 = 0; ireg = 0; breg = 0; offset = 0;
257 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
258 || MEM_P (XEXP (addr, 0)))
260 offset = XEXP (addr, 0);
261 addr = XEXP (addr, 1);
263 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
264 || MEM_P (XEXP (addr, 1)))
266 offset = XEXP (addr, 1);
267 addr = XEXP (addr, 0);
269 else if (GET_CODE (XEXP (addr, 1)) == MULT)
271 ireg = XEXP (addr, 1);
272 addr = XEXP (addr, 0);
274 else if (GET_CODE (XEXP (addr, 0)) == MULT)
276 ireg = XEXP (addr, 0);
277 addr = XEXP (addr, 1);
279 else if (REG_P (XEXP (addr, 1)))
281 reg1 = XEXP (addr, 1);
282 addr = XEXP (addr, 0);
284 else if (REG_P (XEXP (addr, 0)))
286 reg1 = XEXP (addr, 0);
287 addr = XEXP (addr, 1);
299 else if (GET_CODE (addr) == MULT)
303 gcc_assert (GET_CODE (addr) == PLUS);
304 if (CONSTANT_ADDRESS_P (XEXP (addr, 0))
305 || MEM_P (XEXP (addr, 0)))
309 if (CONST_INT_P (offset))
310 offset = plus_constant (XEXP (addr, 0), INTVAL (offset));
313 gcc_assert (CONST_INT_P (XEXP (addr, 0)));
314 offset = plus_constant (offset, INTVAL (XEXP (addr, 0)));
317 offset = XEXP (addr, 0);
319 else if (REG_P (XEXP (addr, 0)))
322 ireg = reg1, breg = XEXP (addr, 0), reg1 = 0;
324 reg1 = XEXP (addr, 0);
328 gcc_assert (GET_CODE (XEXP (addr, 0)) == MULT);
330 ireg = XEXP (addr, 0);
333 if (CONSTANT_ADDRESS_P (XEXP (addr, 1))
334 || MEM_P (XEXP (addr, 1)))
338 if (CONST_INT_P (offset))
339 offset = plus_constant (XEXP (addr, 1), INTVAL (offset));
342 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
343 offset = plus_constant (offset, INTVAL (XEXP (addr, 1)));
346 offset = XEXP (addr, 1);
348 else if (REG_P (XEXP (addr, 1)))
351 ireg = reg1, breg = XEXP (addr, 1), reg1 = 0;
353 reg1 = XEXP (addr, 1);
357 gcc_assert (GET_CODE (XEXP (addr, 1)) == MULT);
359 ireg = XEXP (addr, 1);
363 /* If REG1 is nonzero, figure out if it is a base or index register. */
367 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
370 || (flag_pic && symbolic_operand (offset, SImode)))))
381 if (flag_pic && symbolic_operand (offset, SImode))
386 output_operand_lossage ("symbol used with both base and indexed registers");
389 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
390 if (flag_pic > 1 && GET_CODE (offset) == CONST
391 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
392 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
395 output_operand_lossage ("symbol with offset used in PIC mode");
399 /* symbol(reg) isn't PIC, but symbol[reg] is. */
408 output_address (offset);
412 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
416 if (GET_CODE (ireg) == MULT)
417 ireg = XEXP (ireg, 0);
418 gcc_assert (REG_P (ireg));
419 fprintf (file, "[%s]", reg_names[REGNO (ireg)]);
424 output_addr_const (file, addr);
429 print_operand (FILE *file, rtx x, int code)
432 fputc (ASM_DOUBLE_CHAR, file);
433 else if (code == '|')
434 fputs (REGISTER_PREFIX, file);
435 else if (code == 'c')
436 fputs (cond_name (x), file);
437 else if (code == 'C')
438 fputs (rev_cond_name (x), file);
439 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
440 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
441 else if (code == 'P' && CONST_INT_P (x))
442 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
443 else if (code == 'N' && CONST_INT_P (x))
444 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
445 /* rotl instruction cannot deal with negative arguments. */
446 else if (code == 'R' && CONST_INT_P (x))
447 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
448 else if (code == 'H' && CONST_INT_P (x))
449 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
450 else if (code == 'h' && CONST_INT_P (x))
451 fprintf (file, "$%d", (short) - INTVAL (x));
452 else if (code == 'B' && CONST_INT_P (x))
453 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
454 else if (code == 'b' && CONST_INT_P (x))
455 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
456 else if (code == 'M' && CONST_INT_P (x))
457 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
459 fprintf (file, "%s", reg_names[REGNO (x)]);
461 output_address (XEXP (x, 0));
462 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
465 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
466 sizeof (dstr), 0, 1);
467 fprintf (file, "$0f%s", dstr);
469 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
472 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
473 sizeof (dstr), 0, 1);
474 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
478 if (flag_pic > 1 && symbolic_operand (x, SImode))
481 output_operand_lossage ("symbol used as immediate operand");
484 output_addr_const (file, x);
491 switch (GET_CODE (op))
520 rev_cond_name (rtx op)
522 switch (GET_CODE (op))
551 vax_float_literal (rtx c)
553 enum machine_mode mode;
554 REAL_VALUE_TYPE r, s;
557 if (GET_CODE (c) != CONST_DOUBLE)
562 if (c == const_tiny_rtx[(int) mode][0]
563 || c == const_tiny_rtx[(int) mode][1]
564 || c == const_tiny_rtx[(int) mode][2])
567 REAL_VALUE_FROM_CONST_DOUBLE (r, c);
569 for (i = 0; i < 7; i++)
573 REAL_VALUE_FROM_INT (s, x, 0, mode);
575 if (REAL_VALUES_EQUAL (r, s))
577 ok = exact_real_inverse (mode, &s);
579 if (REAL_VALUES_EQUAL (r, s))
586 /* Return the cost in cycles of a memory address, relative to register
589 Each of the following adds the indicated number of cycles:
593 1 - indexing and/or offset(register)
598 vax_address_cost_1 (rtx addr)
600 int reg = 0, indexed = 0, indir = 0, offset = 0, predec = 0;
601 rtx plus_op0 = 0, plus_op1 = 0;
603 switch (GET_CODE (addr))
613 indexed = 1; /* 2 on VAX 2 */
616 /* byte offsets cost nothing (on a VAX 2, they cost 1 cycle) */
618 offset = (unsigned HOST_WIDE_INT)(INTVAL(addr)+128) > 256;
622 offset = 1; /* 2 on VAX 2 */
624 case LABEL_REF: /* this is probably a byte offset from the pc */
630 plus_op1 = XEXP (addr, 0);
632 plus_op0 = XEXP (addr, 0);
633 addr = XEXP (addr, 1);
636 indir = 2; /* 3 on VAX 2 */
637 addr = XEXP (addr, 0);
643 /* Up to 3 things can be added in an address. They are stored in
644 plus_op0, plus_op1, and addr. */
658 /* Indexing and register+offset can both be used (except on a VAX 2)
659 without increasing execution time over either one alone. */
660 if (reg && indexed && offset)
661 return reg + indir + offset + predec;
662 return reg + indexed + indir + offset + predec;
666 vax_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
668 return (1 + (REG_P (x) ? 0 : vax_address_cost_1 (x)));
671 /* Cost of an expression on a VAX. This version has costs tuned for the
672 CVAX chip (found in the VAX 3 series) with comments for variations on
675 FIXME: The costs need review, particularly for TRUNCATE, FLOAT_EXTEND
676 and FLOAT_TRUNCATE. We need a -mcpu option to allow provision of
677 costs on a per cpu basis. */
680 vax_rtx_costs (rtx x, int code, int outer_code, int *total,
681 bool speed ATTRIBUTE_UNUSED)
683 enum machine_mode mode = GET_MODE (x);
684 int i = 0; /* may be modified in switch */
685 const char *fmt = GET_RTX_FORMAT (code); /* may be modified in switch */
689 /* On a VAX, constants from 0..63 are cheap because they can use the
690 1 byte literal constant format. Compare to -1 should be made cheap
691 so that decrement-and-branch insns can be formed more easily (if
692 the value -1 is copied to a register some decrement-and-branch
693 patterns will not match). */
700 if (outer_code == AND)
702 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
705 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
706 || (outer_code == COMPARE
708 || ((outer_code == PLUS || outer_code == MINUS)
709 && (unsigned HOST_WIDE_INT) -INTVAL (x) <= 077))
723 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
724 *total = vax_float_literal (x) ? 5 : 8;
726 *total = ((CONST_DOUBLE_HIGH (x) == 0
727 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
728 || (outer_code == PLUS
729 && CONST_DOUBLE_HIGH (x) == -1
730 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
736 return true; /* Implies register operand. */
740 return true; /* Implies register operand. */
746 *total = 16; /* 4 on VAX 9000 */
749 *total = 9; /* 4 on VAX 9000, 12 on VAX 2 */
752 *total = 16; /* 6 on VAX 9000, 28 on VAX 2 */
757 *total = 10; /* 3-4 on VAX 9000, 20-28 on VAX 2 */
760 *total = MAX_COST; /* Mode is not supported. */
768 *total = MAX_COST; /* Mode is not supported. */
776 *total = 30; /* Highly variable. */
777 else if (mode == DFmode)
778 /* divide takes 28 cycles if the result is not zero, 13 otherwise */
781 *total = 11; /* 25 on VAX 2 */
791 *total = MAX_COST; /* Mode is not supported. */
798 *total = (6 /* 4 on VAX 9000 */
799 + (mode == DFmode) + (GET_MODE (XEXP (x, 0)) != SImode));
803 *total = 7; /* 17 on VAX 2 */
812 *total = 10; /* 6 on VAX 9000 */
817 *total = 6; /* 5 on VAX 2, 4 on VAX 9000 */
818 if (CONST_INT_P (XEXP (x, 1)))
819 fmt = "e"; /* all constant rotate counts are short */
824 *total = (mode == DFmode) ? 13 : 8; /* 6/8 on VAX 9000, 16/15 on VAX 2 */
825 /* Small integer operands can use subl2 and addl2. */
826 if ((CONST_INT_P (XEXP (x, 1)))
827 && (unsigned HOST_WIDE_INT)(INTVAL (XEXP (x, 1)) + 63) < 127)
837 /* AND is special because the first operand is complemented. */
839 if (CONST_INT_P (XEXP (x, 0)))
841 if ((unsigned HOST_WIDE_INT)~INTVAL (XEXP (x, 0)) > 63)
851 else if (mode == SFmode)
853 else if (mode == DImode)
869 if (mode == DImode || mode == DFmode)
870 *total = 5; /* 7 on VAX 2 */
872 *total = 3; /* 4 on VAX 2 */
874 if (!REG_P (x) && GET_CODE (x) != POST_INC)
875 *total += vax_address_cost_1 (x);
881 *total = 3; /* FIXME: Costs need to be checked */
888 /* Now look inside the expression. Operands which are not registers or
889 short constants add to the cost.
891 FMT and I may have been adjusted in the switch above for instructions
892 which require special handling. */
894 while (*fmt++ == 'e')
896 rtx op = XEXP (x, i);
899 code = GET_CODE (op);
901 /* A NOT is likely to be found as the first operand of an AND
902 (in which case the relevant cost is of the operand inside
903 the not) and not likely to be found anywhere else. */
905 op = XEXP (op, 0), code = GET_CODE (op);
910 if ((unsigned HOST_WIDE_INT)INTVAL (op) > 63
911 && GET_MODE (x) != QImode)
912 *total += 1; /* 2 on VAX 2 */
917 *total += 1; /* 2 on VAX 2 */
920 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT)
922 /* Registers are faster than floating point constants -- even
923 those constants which can be encoded in a single byte. */
924 if (vax_float_literal (op))
927 *total += (GET_MODE (x) == DFmode) ? 3 : 2;
931 if (CONST_DOUBLE_HIGH (op) != 0
932 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
937 *total += 1; /* 2 on VAX 2 */
938 if (!REG_P (XEXP (op, 0)))
939 *total += vax_address_cost_1 (XEXP (op, 0));
952 /* Output code to add DELTA to the first argument, and then jump to FUNCTION.
953 Used for C++ multiple inheritance.
954 .mask ^m<r2,r3,r4,r5,r6,r7,r8,r9,r10,r11> #conservative entry mask
955 addl2 $DELTA, 4(ap) #adjust first argument
956 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
960 vax_output_mi_thunk (FILE * file,
961 tree thunk ATTRIBUTE_UNUSED,
963 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
966 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
967 asm_fprintf (file, ",4(%Rap)\n");
968 fprintf (file, "\tjmp ");
969 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
970 fprintf (file, "+2\n");
974 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
975 int incoming ATTRIBUTE_UNUSED)
977 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
981 vax_builtin_setjmp_frame_value (void)
983 return hard_frame_pointer_rtx;
986 /* Worker function for NOTICE_UPDATE_CC. */
989 vax_notice_update_cc (rtx exp, rtx insn ATTRIBUTE_UNUSED)
991 if (GET_CODE (exp) == SET)
993 if (GET_CODE (SET_SRC (exp)) == CALL)
995 else if (GET_CODE (SET_DEST (exp)) != ZERO_EXTRACT
996 && GET_CODE (SET_DEST (exp)) != PC)
999 /* The integer operations below don't set carry or
1000 set it in an incompatible way. That's ok though
1001 as the Z bit is all we need when doing unsigned
1002 comparisons on the result of these insns (since
1003 they're always with 0). Set CC_NO_OVERFLOW to
1004 generate the correct unsigned branches. */
1005 switch (GET_CODE (SET_SRC (exp)))
1008 if (GET_MODE_CLASS (GET_MODE (exp)) == MODE_FLOAT)
1016 cc_status.flags = CC_NO_OVERFLOW;
1021 cc_status.value1 = SET_DEST (exp);
1022 cc_status.value2 = SET_SRC (exp);
1025 else if (GET_CODE (exp) == PARALLEL
1026 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
1028 if (GET_CODE (SET_SRC (XVECEXP (exp, 0, 0))) == CALL)
1030 else if (GET_CODE (SET_DEST (XVECEXP (exp, 0, 0))) != PC)
1032 cc_status.flags = 0;
1033 cc_status.value1 = SET_DEST (XVECEXP (exp, 0, 0));
1034 cc_status.value2 = SET_SRC (XVECEXP (exp, 0, 0));
1037 /* PARALLELs whose first element sets the PC are aob,
1038 sob insns. They do change the cc's. */
1043 if (cc_status.value1 && REG_P (cc_status.value1)
1045 && reg_overlap_mentioned_p (cc_status.value1, cc_status.value2))
1046 cc_status.value2 = 0;
1047 if (cc_status.value1 && MEM_P (cc_status.value1)
1049 && MEM_P (cc_status.value2))
1050 cc_status.value2 = 0;
1051 /* Actual condition, one line up, should be that value2's address
1052 depends on value1, but that is too much of a pain. */
1055 /* Output integer move instructions. */
1058 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
1059 enum machine_mode mode)
1062 const char *pattern_hi, *pattern_lo;
1067 if (operands[1] == const0_rtx)
1069 if (TARGET_QMATH && optimize_size
1070 && (CONST_INT_P (operands[1])
1071 || GET_CODE (operands[1]) == CONST_DOUBLE))
1073 unsigned HOST_WIDE_INT hval, lval;
1076 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1078 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1080 /* Make sure only the low 32 bits are valid. */
1081 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1082 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1086 lval = INTVAL (operands[1]);
1090 /* Here we see if we are trying to see if the 64bit value is really
1091 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1092 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1093 8 bytes - 1 shift byte - 1 short literal byte. */
1095 && (n = exact_log2 (lval & (- lval))) != -1
1096 && (lval >> n) < 64)
1100 #if HOST_BITS_PER_WIDE_INT == 32
1101 /* On 32bit platforms, if the 6bits didn't overflow into the
1102 upper 32bit value that value better be 0. If we have
1103 overflowed, make sure it wasn't too much. */
1106 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1107 n = 0; /* failure */
1109 lval |= hval << (32 - n);
1112 /* If n is 0, then ashq is not the best way to emit this. */
1115 operands[1] = GEN_INT (lval);
1116 operands[2] = GEN_INT (n);
1117 return "ashq %2,%1,%0";
1119 #if HOST_BITS_PER_WIDE_INT == 32
1121 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1122 upper 32bit value. */
1124 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1125 && (hval >> n) < 64)
1127 operands[1] = GEN_INT (hval >> n);
1128 operands[2] = GEN_INT (n + 32);
1129 return "ashq %2,%1,%0";
1135 && (!MEM_P (operands[0])
1136 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1137 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1138 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1139 && ((CONST_INT_P (operands[1])
1140 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1141 || GET_CODE (operands[1]) == CONST_DOUBLE))
1143 hi[0] = operands[0];
1144 hi[1] = operands[1];
1146 split_quadword_operands (insn, SET, hi, lo, 2);
1148 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1149 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1151 /* The patterns are just movl/movl or pushl/pushl then a movq will
1152 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1153 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1155 if ((!strncmp (pattern_lo, "movl", 4)
1156 && !strncmp (pattern_hi, "movl", 4))
1157 || (!strncmp (pattern_lo, "pushl", 5)
1158 && !strncmp (pattern_hi, "pushl", 5)))
1159 return "movq %1,%0";
1161 if (MEM_P (operands[0])
1162 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1164 output_asm_insn (pattern_hi, hi);
1165 operands[0] = lo[0];
1166 operands[1] = lo[1];
1167 operands[2] = lo[2];
1172 output_asm_insn (pattern_lo, lo);
1173 operands[0] = hi[0];
1174 operands[1] = hi[1];
1175 operands[2] = hi[2];
1179 return "movq %1,%0";
1182 if (symbolic_operand (operands[1], SImode))
1184 if (push_operand (operands[0], SImode))
1185 return "pushab %a1";
1186 return "movab %a1,%0";
1189 if (operands[1] == const0_rtx)
1191 if (push_operand (operands[1], SImode))
1196 if (CONST_INT_P (operands[1])
1197 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1199 HOST_WIDE_INT i = INTVAL (operands[1]);
1201 if ((unsigned HOST_WIDE_INT)(~i) < 64)
1202 return "mcoml %N1,%0";
1203 if ((unsigned HOST_WIDE_INT)i < 0x100)
1204 return "movzbl %1,%0";
1205 if (i >= -0x80 && i < 0)
1206 return "cvtbl %1,%0";
1208 && (n = exact_log2 (i & (-i))) != -1
1209 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1211 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1212 operands[2] = GEN_INT (n);
1213 return "ashl %2,%1,%0";
1215 if ((unsigned HOST_WIDE_INT)i < 0x10000)
1216 return "movzwl %1,%0";
1217 if (i >= -0x8000 && i < 0)
1218 return "cvtwl %1,%0";
1220 if (push_operand (operands[0], SImode))
1222 return "movl %1,%0";
1225 if (CONST_INT_P (operands[1]))
1227 HOST_WIDE_INT i = INTVAL (operands[1]);
1230 else if ((unsigned HOST_WIDE_INT)i < 64)
1231 return "movw %1,%0";
1232 else if ((unsigned HOST_WIDE_INT)~i < 64)
1233 return "mcomw %H1,%0";
1234 else if ((unsigned HOST_WIDE_INT)i < 256)
1235 return "movzbw %1,%0";
1236 else if (i >= -0x80 && i < 0)
1237 return "cvtbw %1,%0";
1239 return "movw %1,%0";
1242 if (CONST_INT_P (operands[1]))
1244 HOST_WIDE_INT i = INTVAL (operands[1]);
1247 else if ((unsigned HOST_WIDE_INT)~i < 64)
1248 return "mcomb %B1,%0";
1250 return "movb %1,%0";
1257 /* Output integer add instructions.
1259 The space-time-opcode tradeoffs for addition vary by model of VAX.
1261 On a VAX 3 "movab (r1)[r2],r3" is faster than "addl3 r1,r2,r3",
1262 but it not faster on other models.
1264 "movab #(r1),r2" is usually shorter than "addl3 #,r1,r2", and is
1265 faster on a VAX 3, but some VAXen (e.g. VAX 9000) will stall if
1266 a register is used in an address too soon after it is set.
1267 Compromise by using movab only when it is shorter than the add
1268 or the base register in the address is one of sp, ap, and fp,
1269 which are not modified very often. */
1272 vax_output_int_add (rtx insn, rtx *operands, enum machine_mode mode)
1279 const char *pattern;
1283 if (TARGET_QMATH && 0)
1286 split_quadword_operands (insn, PLUS, operands, low, 3);
1290 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1291 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1292 gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1293 gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1296 /* No reason to add a 0 to the low part and thus no carry, so just
1297 emit the appropriate add/sub instruction. */
1298 if (low[2] == const0_rtx)
1299 return vax_output_int_add (NULL, operands, SImode);
1301 /* Are we doing addition or subtraction? */
1302 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1304 /* We can't use vax_output_int_add since some the patterns don't
1305 modify the carry bit. */
1308 if (low[2] == constm1_rtx)
1309 pattern = "decl %0";
1311 pattern = "subl2 $%n2,%0";
1315 if (low[2] == const1_rtx)
1316 pattern = "incl %0";
1318 pattern = "addl2 %2,%0";
1320 output_asm_insn (pattern, low);
1322 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1323 two 32bit parts, we complement each and then add one to
1324 low part. We know that the low part can't overflow since
1325 it's value can never be 0. */
1327 return "sbwc %N2,%0";
1328 return "adwc %2,%0";
1331 /* Add low parts. */
1332 if (rtx_equal_p (operands[0], operands[1]))
1334 if (low[2] == const0_rtx)
1335 /* Should examine operand, punt if not POST_INC. */
1336 pattern = "tstl %0", carry = 0;
1337 else if (low[2] == const1_rtx)
1338 pattern = "incl %0";
1340 pattern = "addl2 %2,%0";
1344 if (low[2] == const0_rtx)
1345 pattern = "movl %1,%0", carry = 0;
1347 pattern = "addl3 %2,%1,%0";
1350 output_asm_insn (pattern, low);
1352 /* If CARRY is 0, we don't have any carry value to worry about. */
1353 return get_insn_template (CODE_FOR_addsi3, insn);
1354 /* %0 = C + %1 + %2 */
1355 if (!rtx_equal_p (operands[0], operands[1]))
1356 output_asm_insn ((operands[1] == const0_rtx
1358 : "movl %1,%0"), operands);
1359 return "adwc %2,%0";
1363 if (rtx_equal_p (operands[0], operands[1]))
1365 if (operands[2] == const1_rtx)
1367 if (operands[2] == constm1_rtx)
1369 if (CONST_INT_P (operands[2])
1370 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1371 return "subl2 $%n2,%0";
1372 if (CONST_INT_P (operands[2])
1373 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1374 && REG_P (operands[1])
1375 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1376 || REGNO (operands[1]) > 11))
1377 return "movab %c2(%1),%0";
1378 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1379 return "movab %a2[%0],%0";
1380 return "addl2 %2,%0";
1383 if (rtx_equal_p (operands[0], operands[2]))
1385 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1386 return "movab %a1[%0],%0";
1387 return "addl2 %1,%0";
1390 if (CONST_INT_P (operands[2])
1391 && INTVAL (operands[2]) < 32767
1392 && INTVAL (operands[2]) > -32768
1393 && REG_P (operands[1])
1394 && push_operand (operands[0], SImode))
1395 return "pushab %c2(%1)";
1397 if (CONST_INT_P (operands[2])
1398 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1399 return "subl3 $%n2,%1,%0";
1401 if (CONST_INT_P (operands[2])
1402 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1403 && REG_P (operands[1])
1404 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1405 || REGNO (operands[1]) > 11))
1406 return "movab %c2(%1),%0";
1408 /* Add this if using gcc on a VAX 3xxx:
1409 if (REG_P (operands[1]) && REG_P (operands[2]))
1410 return "movab (%1)[%2],%0";
1413 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1415 if (push_operand (operands[0], SImode))
1416 return "pushab %a2[%1]";
1417 return "movab %a2[%1],%0";
1420 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1422 if (push_operand (operands[0], SImode))
1423 return "pushab %a1[%2]";
1424 return "movab %a1[%2],%0";
1427 if (flag_pic && REG_P (operands[0])
1428 && symbolic_operand (operands[2], SImode))
1429 return "movab %a2,%0;addl2 %1,%0";
1432 && (symbolic_operand (operands[1], SImode)
1433 || symbolic_operand (operands[1], SImode)))
1436 return "addl3 %1,%2,%0";
1439 if (rtx_equal_p (operands[0], operands[1]))
1441 if (operands[2] == const1_rtx)
1443 if (operands[2] == constm1_rtx)
1445 if (CONST_INT_P (operands[2])
1446 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1447 return "subw2 $%n2,%0";
1448 return "addw2 %2,%0";
1450 if (rtx_equal_p (operands[0], operands[2]))
1451 return "addw2 %1,%0";
1452 if (CONST_INT_P (operands[2])
1453 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1454 return "subw3 $%n2,%1,%0";
1455 return "addw3 %1,%2,%0";
1458 if (rtx_equal_p (operands[0], operands[1]))
1460 if (operands[2] == const1_rtx)
1462 if (operands[2] == constm1_rtx)
1464 if (CONST_INT_P (operands[2])
1465 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1466 return "subb2 $%n2,%0";
1467 return "addb2 %2,%0";
1469 if (rtx_equal_p (operands[0], operands[2]))
1470 return "addb2 %1,%0";
1471 if (CONST_INT_P (operands[2])
1472 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1473 return "subb3 $%n2,%1,%0";
1474 return "addb3 %1,%2,%0";
1482 vax_output_int_subtract (rtx insn, rtx *operands, enum machine_mode mode)
1489 const char *pattern;
1492 if (TARGET_QMATH && 0)
1495 split_quadword_operands (insn, MINUS, operands, low, 3);
1499 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1501 /* Negation is tricky. It's basically complement and increment.
1502 Negate hi, then lo, and subtract the carry back. */
1503 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1504 || (MEM_P (operands[0])
1505 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1506 fatal_insn ("illegal operand detected", insn);
1507 output_asm_insn ("mnegl %2,%0", operands);
1508 output_asm_insn ("mnegl %2,%0", low);
1509 return "sbwc $0,%0";
1511 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1512 gcc_assert (rtx_equal_p (low[0], low[1]));
1513 if (low[2] == const1_rtx)
1514 output_asm_insn ("decl %0", low);
1516 output_asm_insn ("subl2 %2,%0", low);
1517 return "sbwc %2,%0";
1520 /* Subtract low parts. */
1521 if (rtx_equal_p (operands[0], operands[1]))
1523 if (low[2] == const0_rtx)
1524 pattern = 0, carry = 0;
1525 else if (low[2] == constm1_rtx)
1526 pattern = "decl %0";
1528 pattern = "subl2 %2,%0";
1532 if (low[2] == constm1_rtx)
1533 pattern = "decl %0";
1534 else if (low[2] == const0_rtx)
1535 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1537 pattern = "subl3 %2,%1,%0";
1540 output_asm_insn (pattern, low);
1543 if (!rtx_equal_p (operands[0], operands[1]))
1544 return "movl %1,%0;sbwc %2,%0";
1545 return "sbwc %2,%0";
1546 /* %0 = %2 - %1 - C */
1548 return get_insn_template (CODE_FOR_subsi3, insn);
1556 /* True if X is an rtx for a constant that is a valid address. */
1559 legitimate_constant_address_p (rtx x)
1561 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1562 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1564 if (GET_CODE (x) != CONST)
1566 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1568 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1569 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1575 /* True if the constant value X is a legitimate general operand.
1576 It is given that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
1579 legitimate_constant_p (rtx x ATTRIBUTE_UNUSED)
1584 /* The other macros defined here are used only in legitimate_address_p (). */
1586 /* Nonzero if X is a hard reg that can be used as an index
1587 or, if not strict, if it is a pseudo reg. */
1588 #define INDEX_REGISTER_P(X, STRICT) \
1589 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_INDEX_P (REGNO (X))))
1591 /* Nonzero if X is a hard reg that can be used as a base reg
1592 or, if not strict, if it is a pseudo reg. */
1593 #define BASE_REGISTER_P(X, STRICT) \
1594 (REG_P (X) && (!(STRICT) || REGNO_OK_FOR_BASE_P (REGNO (X))))
1596 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1598 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1599 are no SYMBOL_REFs for external symbols present. */
1602 indirectable_constant_address_p (rtx x, bool indirect)
1604 if (GET_CODE (x) == SYMBOL_REF)
1605 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1607 if (GET_CODE (x) == CONST)
1609 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1610 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1612 return CONSTANT_ADDRESS_P (x);
1615 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1618 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1620 return CONSTANT_ADDRESS_P (x);
1623 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1625 /* True if X is an address which can be indirected. External symbols
1626 could be in a sharable image library, so we disallow those. */
1629 indirectable_address_p (rtx x, bool strict, bool indirect)
1631 if (indirectable_constant_address_p (x, indirect)
1632 || BASE_REGISTER_P (x, strict))
1634 if (GET_CODE (x) != PLUS
1635 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1636 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1638 return indirectable_constant_address_p (XEXP (x, 1), indirect);
1641 /* Return true if x is a valid address not using indexing.
1642 (This much is the easy part.) */
1644 nonindexed_address_p (rtx x, bool strict)
1649 extern rtx *reg_equiv_mem;
1650 if (! reload_in_progress
1651 || reg_equiv_mem[REGNO (x)] == 0
1652 || indirectable_address_p (reg_equiv_mem[REGNO (x)], strict, false))
1655 if (indirectable_constant_address_p (x, false))
1657 if (indirectable_address_p (x, strict, false))
1659 xfoo0 = XEXP (x, 0);
1660 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1662 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1663 && BASE_REGISTER_P (xfoo0, strict))
1668 /* True if PROD is either a reg times size of mode MODE and MODE is less
1669 than or equal 8 bytes, or just a reg if MODE is one byte. */
1672 index_term_p (rtx prod, enum machine_mode mode, bool strict)
1676 if (GET_MODE_SIZE (mode) == 1)
1677 return BASE_REGISTER_P (prod, strict);
1679 if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1682 xfoo0 = XEXP (prod, 0);
1683 xfoo1 = XEXP (prod, 1);
1685 if (CONST_INT_P (xfoo0)
1686 && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1687 && INDEX_REGISTER_P (xfoo1, strict))
1690 if (CONST_INT_P (xfoo1)
1691 && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1692 && INDEX_REGISTER_P (xfoo0, strict))
1698 /* Return true if X is the sum of a register
1699 and a valid index term for mode MODE. */
1701 reg_plus_index_p (rtx x, enum machine_mode mode, bool strict)
1705 if (GET_CODE (x) != PLUS)
1708 xfoo0 = XEXP (x, 0);
1709 xfoo1 = XEXP (x, 1);
1711 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1714 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1720 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1722 indexable_address_p (rtx xfoo0, rtx xfoo1, enum machine_mode mode, bool strict)
1724 if (!CONSTANT_ADDRESS_P (xfoo0))
1726 if (BASE_REGISTER_P (xfoo1, strict))
1727 return !flag_pic || mode == QImode;
1728 if (flag_pic && symbolic_operand (xfoo0, SImode))
1730 return reg_plus_index_p (xfoo1, mode, strict);
1733 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1734 that is a valid memory address for an instruction.
1735 The MODE argument is the machine mode for the MEM expression
1736 that wants to use this address. */
1738 vax_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1742 if (nonindexed_address_p (x, strict))
1745 if (GET_CODE (x) != PLUS)
1748 /* Handle <address>[index] represented with index-sum outermost */
1750 xfoo0 = XEXP (x, 0);
1751 xfoo1 = XEXP (x, 1);
1753 if (index_term_p (xfoo0, mode, strict)
1754 && nonindexed_address_p (xfoo1, strict))
1757 if (index_term_p (xfoo1, mode, strict)
1758 && nonindexed_address_p (xfoo0, strict))
1761 /* Handle offset(reg)[index] with offset added outermost */
1763 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1764 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1770 /* Return true if x (a legitimate address expression) has an effect that
1771 depends on the machine mode it is used for. On the VAX, the predecrement
1772 and postincrement address depend thus (the amount of decrement or
1773 increment being the length of the operand) and all indexed address depend
1774 thus (because the index scale factor is the length of the operand). */
1777 vax_mode_dependent_address_p (rtx x)
1781 /* Auto-increment cases are now dealt with generically in recog.c. */
1782 if (GET_CODE (x) != PLUS)
1785 xfoo0 = XEXP (x, 0);
1786 xfoo1 = XEXP (x, 1);
1788 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1790 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1792 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1794 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1801 fixup_mathdi_operand (rtx x, enum machine_mode mode)
1803 if (illegal_addsub_di_memory_operand (x, mode))
1805 rtx addr = XEXP (x, 0);
1806 rtx temp = gen_reg_rtx (Pmode);
1808 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1809 if (GET_CODE (addr) == CONST && flag_pic)
1811 offset = XEXP (XEXP (addr, 0), 1);
1812 addr = XEXP (XEXP (addr, 0), 0);
1815 emit_move_insn (temp, addr);
1817 temp = gen_rtx_PLUS (Pmode, temp, offset);
1818 x = gen_rtx_MEM (DImode, temp);
1824 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1826 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1829 rtx (*gen_old_insn)(rtx, rtx, rtx);
1830 rtx (*gen_si_insn)(rtx, rtx, rtx);
1831 rtx (*gen_insn)(rtx, rtx, rtx);
1835 gen_old_insn = gen_adddi3_old;
1836 gen_si_insn = gen_addsi3;
1837 gen_insn = gen_adcdi3;
1839 else if (code == MINUS)
1841 gen_old_insn = gen_subdi3_old;
1842 gen_si_insn = gen_subsi3;
1843 gen_insn = gen_sbcdi3;
1848 /* If this is addition (thus operands are commutative) and if there is one
1849 addend that duplicates the desination, we want that addend to be the
1852 && rtx_equal_p (operands[0], operands[2])
1853 && !rtx_equal_p (operands[1], operands[2]))
1856 operands[2] = operands[1];
1862 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1866 if (!rtx_equal_p (operands[0], operands[1])
1867 && (REG_P (operands[0]) && MEM_P (operands[1])))
1869 emit_move_insn (operands[0], operands[1]);
1870 operands[1] = operands[0];
1873 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1874 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1875 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1877 if (!rtx_equal_p (operands[0], operands[1]))
1878 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1879 operand_subword (operands[1], 0, 0, DImode));
1881 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1882 operand_subword (operands[1], 1, 0, DImode),
1883 operand_subword (operands[2], 1, 0, DImode)));
1887 /* If are adding the same value together, that's really a multiply by 2,
1888 and that's just a left shift of 1. */
1889 if (rtx_equal_p (operands[1], operands[2]))
1891 gcc_assert (code != MINUS);
1892 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1896 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1898 /* If an operand is the same as operand[0], use the operand[0] rtx
1899 because fixup will an equivalent rtx but not an equal one. */
1901 if (rtx_equal_p (operands[0], operands[1]))
1902 operands[1] = operands[0];
1904 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1906 if (rtx_equal_p (operands[0], operands[2]))
1907 operands[2] = operands[0];
1909 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1911 /* If we are subtracting not from ourselves [d = a - b], and because the
1912 carry ops are two operand only, we would need to do a move prior to
1913 the subtract. And if d == b, we would need a temp otherwise
1914 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1915 into d = -b, d += a. Since -b can never overflow, even if b == d,
1918 If we are doing addition, since the carry ops are two operand, if
1919 we aren't adding to ourselves, move the first addend to the
1920 destination first. */
1922 gcc_assert (operands[1] != const0_rtx || code == MINUS);
1923 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
1925 if (code == MINUS && CONSTANT_P (operands[1]))
1927 temp = gen_reg_rtx (DImode);
1928 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
1930 gen_insn = gen_adcdi3;
1931 operands[2] = operands[1];
1932 operands[1] = operands[0];
1935 emit_move_insn (operands[0], operands[1]);
1938 /* Subtracting a constant will have been rewritten to an addition of the
1939 negative of that constant before we get here. */
1940 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
1941 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
1946 adjacent_operands_p (rtx lo, rtx hi, enum machine_mode mode)
1948 HOST_WIDE_INT lo_offset;
1949 HOST_WIDE_INT hi_offset;
1951 if (GET_CODE (lo) != GET_CODE (hi))
1955 return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
1956 if (CONST_INT_P (lo))
1957 return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
1958 if (CONST_INT_P (lo))
1959 return mode != SImode;
1964 if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
1970 if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
1971 return rtx_equal_p (lo, hi);
1973 switch (GET_CODE (lo))
1983 if (!CONST_INT_P (XEXP (lo, 1)))
1985 lo_offset = INTVAL (XEXP (lo, 1));
1992 switch (GET_CODE (hi))
2002 if (!CONST_INT_P (XEXP (hi, 1)))
2004 hi_offset = INTVAL (XEXP (hi, 1));
2011 if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2014 return rtx_equal_p (lo, hi)
2015 && hi_offset - lo_offset == GET_MODE_SIZE (mode);