1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 1992, 1994, 1995, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
39 #ifdef EXTRA_CONSTRAINT
40 /* If EXTRA_CONSTRAINT is defined, then the 'S'
41 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
42 asm statements that need 'S' for class SIREG will break. */
43 error EXTRA_CONSTRAINT conflicts with S constraint letter
44 /* The previous line used to be #error, but some compilers barf
45 even if the conditional was untrue. */
48 enum reg_mem /* Type of an operand for ix86_{binary,unary}_operator_ok */
55 /* Processor costs (relative to an add) */
56 struct processor_costs i386_cost = { /* 386 specific costs */
57 1, /* cost of an add instruction (2 cycles) */
58 1, /* cost of a lea instruction */
59 3, /* variable shift costs */
60 2, /* constant shift costs */
61 6, /* cost of starting a multiply */
62 1, /* cost of multiply per each bit set */
63 23 /* cost of a divide/mod */
66 struct processor_costs i486_cost = { /* 486 specific costs */
67 1, /* cost of an add instruction */
68 1, /* cost of a lea instruction */
69 3, /* variable shift costs */
70 2, /* constant shift costs */
71 12, /* cost of starting a multiply */
72 1, /* cost of multiply per each bit set */
73 40 /* cost of a divide/mod */
76 struct processor_costs pentium_cost = {
77 1, /* cost of an add instruction */
78 1, /* cost of a lea instruction */
79 3, /* variable shift costs */
80 1, /* constant shift costs */
81 12, /* cost of starting a multiply */
82 1, /* cost of multiply per each bit set */
83 25 /* cost of a divide/mod */
86 struct processor_costs *ix86_cost = &pentium_cost;
88 #define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
90 extern FILE *asm_out_file;
91 extern char *strcat ();
93 char *singlemove_string ();
94 char *output_move_const_single ();
95 char *output_fp_cc0_set ();
97 char *hi_reg_name[] = HI_REGISTER_NAMES;
98 char *qi_reg_name[] = QI_REGISTER_NAMES;
99 char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
101 /* Array of the smallest class containing reg number REGNO, indexed by
102 REGNO. Used by REGNO_REG_CLASS in i386.h. */
104 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
107 AREG, DREG, CREG, BREG,
109 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
111 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
112 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
117 /* Test and compare insns in i386.md store the information needed to
118 generate branch and scc insns here. */
120 struct rtx_def *i386_compare_op0 = NULL_RTX;
121 struct rtx_def *i386_compare_op1 = NULL_RTX;
122 struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
124 /* which cpu are we scheduling for */
125 enum processor_type ix86_cpu;
127 /* which instruction set architecture to use. */
130 /* Strings to hold which cpu and instruction set architecture to use. */
131 char *ix86_cpu_string; /* for -mcpu=<xxx> */
132 char *ix86_isa_string; /* for -misa=<xxx> */
134 /* Register allocation order */
135 char *i386_reg_alloc_order;
136 static char regs_allocated[FIRST_PSEUDO_REGISTER];
138 /* # of registers to use to pass arguments. */
139 char *i386_regparm_string; /* # registers to use to pass args */
140 int i386_regparm; /* i386_regparm_string as a number */
142 /* Alignment to use for loops and jumps */
143 char *i386_align_loops_string; /* power of two alignment for loops */
144 char *i386_align_jumps_string; /* power of two alignment for non-loop jumps */
145 char *i386_align_funcs_string; /* power of two alignment for functions */
147 int i386_align_loops; /* power of two alignment for loops */
148 int i386_align_jumps; /* power of two alignment for non-loop jumps */
149 int i386_align_funcs; /* power of two alignment for functions */
151 /* Sometimes certain combinations of command options do not make
152 sense on a particular target machine. You can define a macro
153 `OVERRIDE_OPTIONS' to take account of this. This macro, if
154 defined, is executed once just after all the command options have
157 Don't use this macro to turn on various extra optimizations for
158 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
169 char *name; /* Canonical processor name. */
170 enum processor_type processor; /* Processor type enum value. */
171 struct processor_costs *cost; /* Processor costs */
172 int target_enable; /* Target flags to enable. */
173 int target_disable; /* Target flags to disable. */
174 } processor_target_table[]
175 = {{PROCESSOR_COMMON_STRING, PROCESSOR_COMMON, &i486_cost, 0, 0},
176 {PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
177 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
178 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
179 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
180 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentium_cost, 0, 0},
181 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO, &pentium_cost, 0, 0}};
183 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
185 #ifdef SUBTARGET_OVERRIDE_OPTIONS
186 SUBTARGET_OVERRIDE_OPTIONS;
189 /* Validate registers in register allocation order */
190 if (i386_reg_alloc_order)
192 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
196 case 'a': regno = 0; break;
197 case 'd': regno = 1; break;
198 case 'c': regno = 2; break;
199 case 'b': regno = 3; break;
200 case 'S': regno = 4; break;
201 case 'D': regno = 5; break;
202 case 'B': regno = 6; break;
204 default: fatal ("Register '%c' is unknown", ch);
207 if (regs_allocated[regno])
208 fatal ("Register '%c' was already specified in the allocation order", ch);
210 regs_allocated[regno] = 1;
214 /* Get the architectural level. */
215 if (ix86_isa_string == (char *)0)
216 ix86_isa_string = PROCESSOR_DEFAULT_STRING;
218 for (i = 0; i < ptt_size; i++)
219 if (! strcmp (ix86_isa_string, processor_target_table[i].name))
221 ix86_isa = processor_target_table[i].processor;
222 if (ix86_cpu_string == (char *)0)
223 ix86_cpu_string = processor_target_table[i].name;
229 error ("bad value (%s) for -misa= switch", ix86_isa_string);
230 ix86_isa_string = PROCESSOR_DEFAULT_STRING;
231 ix86_isa = PROCESSOR_DEFAULT;
234 for (j = 0; j < ptt_size; j++)
235 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
237 ix86_cpu = processor_target_table[j].processor;
238 if (i > j && (int)ix86_isa >= (int)PROCESSOR_PENTIUMPRO)
239 error ("-mcpu=%s does not support -march=%s", ix86_cpu_string, ix86_isa_string);
241 target_flags |= processor_target_table[j].target_enable;
242 target_flags &= ~processor_target_table[j].target_disable;
248 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
249 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
250 ix86_cpu = PROCESSOR_DEFAULT;
253 /* Validate -mregparm= value */
254 if (i386_regparm_string)
256 i386_regparm = atoi (i386_regparm_string);
257 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
258 fatal ("-mregparm=%d is not between 0 and %d", i386_regparm, REGPARM_MAX);
261 def_align = (TARGET_386) ? 2 : 4;
263 /* Validate -malign-loops= value, or provide default */
264 if (i386_align_loops_string)
266 i386_align_loops = atoi (i386_align_loops_string);
267 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
268 fatal ("-malign-loops=%d is not between 0 and %d",
269 i386_align_loops, MAX_CODE_ALIGN);
272 i386_align_loops = 2;
274 /* Validate -malign-jumps= value, or provide default */
275 if (i386_align_jumps_string)
277 i386_align_jumps = atoi (i386_align_jumps_string);
278 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
279 fatal ("-malign-jumps=%d is not between 0 and %d",
280 i386_align_jumps, MAX_CODE_ALIGN);
283 i386_align_jumps = def_align;
285 /* Validate -malign-functions= value, or provide default */
286 if (i386_align_funcs_string)
288 i386_align_funcs = atoi (i386_align_funcs_string);
289 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
290 fatal ("-malign-functions=%d is not between 0 and %d",
291 i386_align_funcs, MAX_CODE_ALIGN);
294 i386_align_funcs = def_align;
296 if (TARGET_OMIT_LEAF_FRAME_POINTER) /* keep nonleaf frame pointers */
297 flag_omit_frame_pointer = 1;
299 /* pic references don't explicitly mention pic_offset_table_rtx */
301 target_flags &= ~MASK_SCHEDULE_PROLOGUE;
304 /* A C statement (sans semicolon) to choose the order in which to
305 allocate hard registers for pseudo-registers local to a basic
308 Store the desired register order in the array `reg_alloc_order'.
309 Element 0 should be the register to allocate first; element 1, the
310 next register; and so on.
312 The macro body should not assume anything about the contents of
313 `reg_alloc_order' before execution of the macro.
315 On most machines, it is not necessary to define this macro. */
318 order_regs_for_local_alloc ()
320 int i, ch, order, regno;
322 /* User specified the register allocation order */
323 if (i386_reg_alloc_order)
325 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
329 case 'a': regno = 0; break;
330 case 'd': regno = 1; break;
331 case 'c': regno = 2; break;
332 case 'b': regno = 3; break;
333 case 'S': regno = 4; break;
334 case 'D': regno = 5; break;
335 case 'B': regno = 6; break;
338 reg_alloc_order[order++] = regno;
341 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
343 if (!regs_allocated[i])
344 reg_alloc_order[order++] = i;
348 /* If users did not specify a register allocation order, use natural order */
351 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
352 reg_alloc_order[i] = i;
358 optimization_options (level)
361 /* For -O2, and beyond, turn off -fschedule-insns by default. It tends to
362 make the problem with not enough registers even worse */
363 #ifdef INSN_SCHEDULING
365 flag_schedule_insns = 0;
369 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
370 attribute for DECL. The attributes in ATTRIBUTES have previously been
374 i386_valid_decl_attribute_p (decl, attributes, identifier, args)
383 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
384 attribute for TYPE. The attributes in ATTRIBUTES have previously been
388 i386_valid_type_attribute_p (type, attributes, identifier, args)
394 if (TREE_CODE (type) != FUNCTION_TYPE
395 && TREE_CODE (type) != FIELD_DECL
396 && TREE_CODE (type) != TYPE_DECL)
399 /* Stdcall attribute says callee is responsible for popping arguments
400 if they are not variable. */
401 if (is_attribute_p ("stdcall", identifier))
402 return (args == NULL_TREE);
404 /* Cdecl attribute says the callee is a normal C declaration */
405 if (is_attribute_p ("cdecl", identifier))
406 return (args == NULL_TREE);
408 /* Regparm attribute specifies how many integer arguments are to be
409 passed in registers */
410 if (is_attribute_p ("regparm", identifier))
414 if (!args || TREE_CODE (args) != TREE_LIST
415 || TREE_CHAIN (args) != NULL_TREE
416 || TREE_VALUE (args) == NULL_TREE)
419 cst = TREE_VALUE (args);
420 if (TREE_CODE (cst) != INTEGER_CST)
423 if (TREE_INT_CST_HIGH (cst) != 0
424 || TREE_INT_CST_LOW (cst) < 0
425 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
434 /* Return 0 if the attributes for two types are incompatible, 1 if they
435 are compatible, and 2 if they are nearly compatible (which causes a
436 warning to be generated). */
439 i386_comp_type_attributes (type1, type2)
447 /* Value is the number of bytes of arguments automatically
448 popped when returning from a subroutine call.
449 FUNDECL is the declaration node of the function (as a tree),
450 FUNTYPE is the data type of the function (as a tree),
451 or for a library call it is an identifier node for the subroutine name.
452 SIZE is the number of bytes of arguments passed on the stack.
454 On the 80386, the RTD insn may be used to pop them if the number
455 of args is fixed, but if the number is variable then the caller
456 must pop them all. RTD can't be used for library calls now
457 because the library is compiled with the Unix compiler.
458 Use of RTD is a selectable option, since it is incompatible with
459 standard Unix calling sequences. If the option is not selected,
460 the caller must always pop the args.
462 The attribute stdcall is equivalent to RTD on a per module basis. */
465 i386_return_pops_args (fundecl, funtype, size)
470 int rtd = TARGET_RTD;
472 if (TREE_CODE (funtype) == IDENTIFIER_NODE)
475 /* Cdecl functions override -mrtd, and never pop the stack */
476 if (!lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
478 /* Stdcall functions will pop the stack if not variable args */
479 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
483 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
484 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype))) == void_type_node)))
488 /* Lose any fake structure return argument */
489 if (aggregate_value_p (TREE_TYPE (funtype)))
490 return GET_MODE_SIZE (Pmode);
496 /* Argument support functions. */
498 /* Initialize a variable CUM of type CUMULATIVE_ARGS
499 for a call to a function whose data type is FNTYPE.
500 For a library call, FNTYPE is 0. */
503 init_cumulative_args (cum, fntype, libname)
504 CUMULATIVE_ARGS *cum; /* argument info to initialize */
505 tree fntype; /* tree ptr for function decl */
506 rtx libname; /* SYMBOL_REF of library name or 0 */
508 static CUMULATIVE_ARGS zero_cum;
509 tree param, next_param;
511 if (TARGET_DEBUG_ARG)
513 fprintf (stderr, "\ninit_cumulative_args (");
516 tree ret_type = TREE_TYPE (fntype);
517 fprintf (stderr, "fntype code = %s, ret code = %s",
518 tree_code_name[ (int)TREE_CODE (fntype) ],
519 tree_code_name[ (int)TREE_CODE (ret_type) ]);
522 fprintf (stderr, "no fntype");
525 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
530 /* Set up the number of registers to use for passing arguments. */
531 cum->nregs = i386_regparm;
534 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
536 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
539 /* Determine if this function has variable arguments. This is
540 indicated by the last argument being 'void_type_mode' if there
541 are no variable arguments. If there are variable arguments, then
542 we won't pass anything in registers */
546 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
550 next_param = TREE_CHAIN (param);
551 if (next_param == (tree)0 && TREE_VALUE (param) != void_type_node)
556 if (TARGET_DEBUG_ARG)
557 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
562 /* Update the data in CUM to advance over an argument
563 of mode MODE and data type TYPE.
564 (TYPE is null for libcalls where that information may not be available.) */
567 function_arg_advance (cum, mode, type, named)
568 CUMULATIVE_ARGS *cum; /* current arg information */
569 enum machine_mode mode; /* current arg mode */
570 tree type; /* type of the argument or 0 if lib support */
571 int named; /* whether or not the argument was named */
573 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
574 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
576 if (TARGET_DEBUG_ARG)
578 "function_adv( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d )\n\n",
579 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
594 /* Define where to put the arguments to a function.
595 Value is zero to push the argument on the stack,
596 or a hard register in which to store the argument.
598 MODE is the argument's machine mode.
599 TYPE is the data type of the argument (as a tree).
600 This is null for libcalls where that information may
602 CUM is a variable of type CUMULATIVE_ARGS which gives info about
603 the preceding args and about the function being called.
604 NAMED is nonzero if this argument is a named parameter
605 (otherwise it is an extra parameter matching an ellipsis). */
608 function_arg (cum, mode, type, named)
609 CUMULATIVE_ARGS *cum; /* current arg information */
610 enum machine_mode mode; /* current arg mode */
611 tree type; /* type of the argument or 0 if lib support */
612 int named; /* != 0 for normal args, == 0 for ... args */
615 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
616 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
620 default: /* for now, pass fp/complex values on the stack */
628 if (words <= cum->nregs)
629 ret = gen_rtx (REG, mode, cum->regno);
633 if (TARGET_DEBUG_ARG)
636 "function_arg( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d",
637 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
640 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
642 fprintf (stderr, ", stack");
644 fprintf (stderr, " )\n");
650 /* For an arg passed partly in registers and partly in memory,
651 this is the number of registers used.
652 For args passed entirely in registers or entirely in memory, zero. */
655 function_arg_partial_nregs (cum, mode, type, named)
656 CUMULATIVE_ARGS *cum; /* current arg information */
657 enum machine_mode mode; /* current arg mode */
658 tree type; /* type of the argument or 0 if lib support */
659 int named; /* != 0 for normal args, == 0 for ... args */
665 /* Output an insn whose source is a 386 integer register. SRC is the
666 rtx for the register, and TEMPLATE is the op-code template. SRC may
667 be either SImode or DImode.
669 The template will be output with operands[0] as SRC, and operands[1]
670 as a pointer to the top of the 386 stack. So a call from floatsidf2
671 would look like this:
673 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
675 where %z0 corresponds to the caller's operands[1], and is used to
676 emit the proper size suffix.
678 ??? Extend this to handle HImode - a 387 can load and store HImode
682 output_op_from_reg (src, template)
687 int size = GET_MODE_SIZE (GET_MODE (src));
690 xops[1] = AT_SP (Pmode);
691 xops[2] = GEN_INT (size);
692 xops[3] = stack_pointer_rtx;
694 if (size > UNITS_PER_WORD)
697 if (size > 2 * UNITS_PER_WORD)
699 high = gen_rtx (REG, SImode, REGNO (src) + 2);
700 output_asm_insn (AS1 (push%L0,%0), &high);
702 high = gen_rtx (REG, SImode, REGNO (src) + 1);
703 output_asm_insn (AS1 (push%L0,%0), &high);
705 output_asm_insn (AS1 (push%L0,%0), &src);
707 output_asm_insn (template, xops);
709 output_asm_insn (AS2 (add%L3,%2,%3), xops);
712 /* Output an insn to pop an value from the 387 top-of-stack to 386
713 register DEST. The 387 register stack is popped if DIES is true. If
714 the mode of DEST is an integer mode, a `fist' integer store is done,
715 otherwise a `fst' float store is done. */
718 output_to_reg (dest, dies)
723 int size = GET_MODE_SIZE (GET_MODE (dest));
725 xops[0] = AT_SP (Pmode);
726 xops[1] = stack_pointer_rtx;
727 xops[2] = GEN_INT (size);
730 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
732 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
735 output_asm_insn (AS1 (fistp%z3,%y0), xops);
737 output_asm_insn (AS1 (fist%z3,%y0), xops);
739 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
742 output_asm_insn (AS1 (fstp%z3,%y0), xops);
745 if (GET_MODE (dest) == XFmode)
747 output_asm_insn (AS1 (fstp%z3,%y0), xops);
748 output_asm_insn (AS1 (fld%z3,%y0), xops);
751 output_asm_insn (AS1 (fst%z3,%y0), xops);
757 output_asm_insn (AS1 (pop%L0,%0), &dest);
759 if (size > UNITS_PER_WORD)
761 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
762 output_asm_insn (AS1 (pop%L0,%0), &dest);
763 if (size > 2 * UNITS_PER_WORD)
765 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
766 output_asm_insn (AS1 (pop%L0,%0), &dest);
772 singlemove_string (operands)
776 if (GET_CODE (operands[0]) == MEM
777 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
779 if (XEXP (x, 0) != stack_pointer_rtx)
783 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
785 return output_move_const_single (operands);
787 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
788 return AS2 (mov%L0,%1,%0);
789 else if (CONSTANT_P (operands[1]))
790 return AS2 (mov%L0,%1,%0);
793 output_asm_insn ("push%L1 %1", operands);
798 /* Return a REG that occurs in ADDR with coefficient 1.
799 ADDR can be effectively incremented by incrementing REG. */
805 while (GET_CODE (addr) == PLUS)
807 if (GET_CODE (XEXP (addr, 0)) == REG)
808 addr = XEXP (addr, 0);
809 else if (GET_CODE (XEXP (addr, 1)) == REG)
810 addr = XEXP (addr, 1);
811 else if (CONSTANT_P (XEXP (addr, 0)))
812 addr = XEXP (addr, 1);
813 else if (CONSTANT_P (XEXP (addr, 1)))
814 addr = XEXP (addr, 0);
818 if (GET_CODE (addr) == REG)
824 /* Output an insn to add the constant N to the register X. */
835 output_asm_insn (AS1 (dec%L0,%0), xops);
837 output_asm_insn (AS1 (inc%L0,%0), xops);
840 xops[1] = GEN_INT (-n);
841 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
845 xops[1] = GEN_INT (n);
846 output_asm_insn (AS2 (add%L0,%1,%0), xops);
851 /* Output assembler code to perform a doubleword move insn
852 with operands OPERANDS. */
855 output_move_double (operands)
858 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
862 rtx addreg0 = 0, addreg1 = 0;
863 int dest_overlapped_low = 0;
864 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
869 /* First classify both operands. */
871 if (REG_P (operands[0]))
873 else if (offsettable_memref_p (operands[0]))
875 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
877 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
879 else if (GET_CODE (operands[0]) == MEM)
884 if (REG_P (operands[1]))
886 else if (CONSTANT_P (operands[1]))
888 else if (offsettable_memref_p (operands[1]))
890 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
892 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
894 else if (GET_CODE (operands[1]) == MEM)
899 /* Check for the cases that the operand constraints are not
900 supposed to allow to happen. Abort if we get one,
901 because generating code for these cases is painful. */
903 if (optype0 == RNDOP || optype1 == RNDOP)
906 /* If one operand is decrementing and one is incrementing
907 decrement the former register explicitly
908 and change that operand into ordinary indexing. */
910 if (optype0 == PUSHOP && optype1 == POPOP)
912 /* ??? Can this ever happen on i386? */
913 operands[0] = XEXP (XEXP (operands[0], 0), 0);
914 asm_add (-size, operands[0]);
915 if (GET_MODE (operands[1]) == XFmode)
916 operands[0] = gen_rtx (MEM, XFmode, operands[0]);
917 else if (GET_MODE (operands[0]) == DFmode)
918 operands[0] = gen_rtx (MEM, DFmode, operands[0]);
920 operands[0] = gen_rtx (MEM, DImode, operands[0]);
924 if (optype0 == POPOP && optype1 == PUSHOP)
926 /* ??? Can this ever happen on i386? */
927 operands[1] = XEXP (XEXP (operands[1], 0), 0);
928 asm_add (-size, operands[1]);
929 if (GET_MODE (operands[1]) == XFmode)
930 operands[1] = gen_rtx (MEM, XFmode, operands[1]);
931 else if (GET_MODE (operands[1]) == DFmode)
932 operands[1] = gen_rtx (MEM, DFmode, operands[1]);
934 operands[1] = gen_rtx (MEM, DImode, operands[1]);
938 /* If an operand is an unoffsettable memory ref, find a register
939 we can increment temporarily to make it refer to the second word. */
941 if (optype0 == MEMOP)
942 addreg0 = find_addr_reg (XEXP (operands[0], 0));
944 if (optype1 == MEMOP)
945 addreg1 = find_addr_reg (XEXP (operands[1], 0));
947 /* Ok, we can do one word at a time.
948 Normally we do the low-numbered word first,
949 but if either operand is autodecrementing then we
950 do the high-numbered word first.
952 In either case, set up in LATEHALF the operands to use
953 for the high-numbered word and in some cases alter the
954 operands in OPERANDS to be suitable for the low-numbered word. */
958 if (optype0 == REGOP)
960 middlehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
961 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 2);
963 else if (optype0 == OFFSOP)
965 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
966 latehalf[0] = adj_offsettable_operand (operands[0], 8);
970 middlehalf[0] = operands[0];
971 latehalf[0] = operands[0];
974 if (optype1 == REGOP)
976 middlehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
977 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 2);
979 else if (optype1 == OFFSOP)
981 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
982 latehalf[1] = adj_offsettable_operand (operands[1], 8);
984 else if (optype1 == CNSTOP)
986 if (GET_CODE (operands[1]) == CONST_DOUBLE)
988 REAL_VALUE_TYPE r; long l[3];
990 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
991 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
992 operands[1] = GEN_INT (l[0]);
993 middlehalf[1] = GEN_INT (l[1]);
994 latehalf[1] = GEN_INT (l[2]);
996 else if (CONSTANT_P (operands[1]))
997 /* No non-CONST_DOUBLE constant should ever appear here. */
1002 middlehalf[1] = operands[1];
1003 latehalf[1] = operands[1];
1006 else /* size is not 12: */
1008 if (optype0 == REGOP)
1009 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1010 else if (optype0 == OFFSOP)
1011 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1013 latehalf[0] = operands[0];
1015 if (optype1 == REGOP)
1016 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
1017 else if (optype1 == OFFSOP)
1018 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1019 else if (optype1 == CNSTOP)
1020 split_double (operands[1], &operands[1], &latehalf[1]);
1022 latehalf[1] = operands[1];
1025 /* If insn is effectively movd N (sp),-(sp) then we will do the
1026 high word first. We should use the adjusted operand 1
1027 (which is N+4 (sp) or N+8 (sp))
1028 for the low word and middle word as well,
1029 to compensate for the first decrement of sp. */
1030 if (optype0 == PUSHOP
1031 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1032 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1033 middlehalf[1] = operands[1] = latehalf[1];
1035 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1036 if the upper part of reg N does not appear in the MEM, arrange to
1037 emit the move late-half first. Otherwise, compute the MEM address
1038 into the upper part of N and use that as a pointer to the memory
1040 if (optype0 == REGOP
1041 && (optype1 == OFFSOP || optype1 == MEMOP))
1043 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1044 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1046 /* If both halves of dest are used in the src memory address,
1047 compute the address into latehalf of dest. */
1049 xops[0] = latehalf[0];
1050 xops[1] = XEXP (operands[1], 0);
1051 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1052 if( GET_MODE (operands[1]) == XFmode )
1055 operands[1] = gen_rtx (MEM, XFmode, latehalf[0]);
1056 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1057 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1061 operands[1] = gen_rtx (MEM, DImode, latehalf[0]);
1062 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1066 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1068 /* Check for two regs used by both source and dest. */
1069 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1070 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1073 /* JRV says this can't happen: */
1074 if (addreg0 || addreg1)
1077 /* Only the middle reg conflicts; simply put it last. */
1078 output_asm_insn (singlemove_string (operands), operands);
1079 output_asm_insn (singlemove_string (latehalf), latehalf);
1080 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1083 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1084 /* If the low half of dest is mentioned in the source memory
1085 address, the arrange to emit the move late half first. */
1086 dest_overlapped_low = 1;
1089 /* If one or both operands autodecrementing,
1090 do the two words, high-numbered first. */
1092 /* Likewise, the first move would clobber the source of the second one,
1093 do them in the other order. This happens only for registers;
1094 such overlap can't happen in memory unless the user explicitly
1095 sets it up, and that is an undefined circumstance. */
1098 if (optype0 == PUSHOP || optype1 == PUSHOP
1099 || (optype0 == REGOP && optype1 == REGOP
1100 && REGNO (operands[0]) == REGNO (latehalf[1]))
1101 || dest_overlapped_low)
1103 if (optype0 == PUSHOP || optype1 == PUSHOP
1104 || (optype0 == REGOP && optype1 == REGOP
1105 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1106 || REGNO (operands[0]) == REGNO (latehalf[1])))
1107 || dest_overlapped_low)
1109 /* Make any unoffsettable addresses point at high-numbered word. */
1111 asm_add (size-4, addreg0);
1113 asm_add (size-4, addreg1);
1116 output_asm_insn (singlemove_string (latehalf), latehalf);
1118 /* Undo the adds we just did. */
1120 asm_add (-4, addreg0);
1122 asm_add (-4, addreg1);
1126 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1128 asm_add (-4, addreg0);
1130 asm_add (-4, addreg1);
1133 /* Do low-numbered word. */
1134 return singlemove_string (operands);
1137 /* Normal case: do the two words, low-numbered first. */
1139 output_asm_insn (singlemove_string (operands), operands);
1141 /* Do the middle one of the three words for long double */
1145 asm_add (4, addreg0);
1147 asm_add (4, addreg1);
1149 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1152 /* Make any unoffsettable addresses point at high-numbered word. */
1154 asm_add (4, addreg0);
1156 asm_add (4, addreg1);
1159 output_asm_insn (singlemove_string (latehalf), latehalf);
1161 /* Undo the adds we just did. */
1163 asm_add (4-size, addreg0);
1165 asm_add (4-size, addreg1);
1171 #define MAX_TMPS 2 /* max temporary registers used */
1173 /* Output the appropriate code to move push memory on the stack */
1176 output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1188 } tmp_info[MAX_TMPS];
1190 rtx src = operands[1];
1193 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1194 int stack_offset = 0;
1198 if (!offsettable_memref_p (src))
1199 fatal_insn ("Source is not offsettable", insn);
1201 if ((length & 3) != 0)
1202 fatal_insn ("Pushing non-word aligned size", insn);
1204 /* Figure out which temporary registers we have available */
1205 for (i = tmp_start; i < n_operands; i++)
1207 if (GET_CODE (operands[i]) == REG)
1209 if (reg_overlap_mentioned_p (operands[i], src))
1212 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1213 if (max_tmps == MAX_TMPS)
1219 for (offset = length - 4; offset >= 0; offset -= 4)
1221 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1222 output_asm_insn (AS1(push%L0,%0), xops);
1228 for (offset = length - 4; offset >= 0; )
1230 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1232 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1233 tmp_info[num_tmps].push = AS1(push%L0,%1);
1234 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1238 for (i = 0; i < num_tmps; i++)
1239 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1241 for (i = 0; i < num_tmps; i++)
1242 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1245 stack_offset += 4*num_tmps;
1253 /* Output the appropriate code to move data between two memory locations */
1256 output_move_memory (operands, insn, length, tmp_start, n_operands)
1267 } tmp_info[MAX_TMPS];
1269 rtx dest = operands[0];
1270 rtx src = operands[1];
1271 rtx qi_tmp = NULL_RTX;
1277 if (GET_CODE (dest) == MEM
1278 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1279 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1280 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1282 if (!offsettable_memref_p (src))
1283 fatal_insn ("Source is not offsettable", insn);
1285 if (!offsettable_memref_p (dest))
1286 fatal_insn ("Destination is not offsettable", insn);
1288 /* Figure out which temporary registers we have available */
1289 for (i = tmp_start; i < n_operands; i++)
1291 if (GET_CODE (operands[i]) == REG)
1293 if ((length & 1) != 0 && !qi_tmp && QI_REG_P (operands[i]))
1294 qi_tmp = operands[i];
1296 if (reg_overlap_mentioned_p (operands[i], dest))
1297 fatal_insn ("Temporary register overlaps the destination", insn);
1299 if (reg_overlap_mentioned_p (operands[i], src))
1300 fatal_insn ("Temporary register overlaps the source", insn);
1302 tmp_info[ max_tmps++ ].xops[2] = operands[i];
1303 if (max_tmps == MAX_TMPS)
1309 fatal_insn ("No scratch registers were found to do memory->memory moves", insn);
1311 if ((length & 1) != 0)
1314 fatal_insn ("No byte register found when moving odd # of bytes.", insn);
1319 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1323 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1324 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1325 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1326 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1330 else if (length >= 2)
1332 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1333 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1334 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1335 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1343 for (i = 0; i < num_tmps; i++)
1344 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1346 for (i = 0; i < num_tmps; i++)
1347 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1352 xops[0] = adj_offsettable_operand (dest, offset);
1353 xops[1] = adj_offsettable_operand (src, offset);
1355 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1356 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1364 standard_80387_constant_p (x)
1367 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1372 if (setjmp (handler))
1375 set_float_handler (handler);
1376 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1377 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1378 is1 = REAL_VALUES_EQUAL (d, dconst1);
1379 set_float_handler (NULL_PTR);
1387 /* Note that on the 80387, other constants, such as pi,
1388 are much slower to load as standard constants
1389 than to load from doubles in memory! */
1396 output_move_const_single (operands)
1399 if (FP_REG_P (operands[0]))
1401 int conval = standard_80387_constant_p (operands[1]);
1409 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1411 REAL_VALUE_TYPE r; long l;
1413 if (GET_MODE (operands[1]) == XFmode)
1416 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1417 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1418 operands[1] = GEN_INT (l);
1420 return singlemove_string (operands);
1423 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1424 reference and a constant. */
1427 symbolic_operand (op, mode)
1429 enum machine_mode mode;
1431 switch (GET_CODE (op))
1438 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1439 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1440 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1446 /* Test for a valid operand for a call instruction.
1447 Don't allow the arg pointer register or virtual regs
1448 since they may change into reg + const, which the patterns
1449 can't handle yet. */
1452 call_insn_operand (op, mode)
1454 enum machine_mode mode;
1456 if (GET_CODE (op) == MEM
1457 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1458 /* This makes a difference for PIC. */
1459 && general_operand (XEXP (op, 0), Pmode))
1460 || (GET_CODE (XEXP (op, 0)) == REG
1461 && XEXP (op, 0) != arg_pointer_rtx
1462 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1463 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1468 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1472 expander_call_insn_operand (op, mode)
1474 enum machine_mode mode;
1476 if (GET_CODE (op) == MEM
1477 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1478 || (GET_CODE (XEXP (op, 0)) == REG
1479 && XEXP (op, 0) != arg_pointer_rtx
1480 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1481 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1486 /* Return 1 if OP is a comparison operator that can use the condition code
1487 generated by an arithmetic operation. */
1490 arithmetic_comparison_operator (op, mode)
1492 enum machine_mode mode;
1496 if (mode != VOIDmode && mode != GET_MODE (op))
1498 code = GET_CODE (op);
1499 if (GET_RTX_CLASS (code) != '<')
1502 return (code != GT && code != LE);
1505 /* Returns 1 if OP contains a symbol reference */
1508 symbolic_reference_mentioned_p (op)
1514 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1517 fmt = GET_RTX_FORMAT (GET_CODE (op));
1518 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1524 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1525 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1528 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1535 /* Attempt to expand a binary operator. Make the expansion closer to the
1536 actual machine, then just general_operand, which will allow 3 separate
1537 memory references (one output, two input) in a single insn. Return
1538 whether the insn fails, or succeeds. */
1541 ix86_expand_binary_operator (code, mode, operands)
1543 enum machine_mode mode;
1550 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1551 if (GET_RTX_CLASS (code) == 'c'
1552 && (rtx_equal_p (operands[0], operands[2])
1553 || immediate_operand (operands[1], mode)))
1555 rtx temp = operands[1];
1556 operands[1] = operands[2];
1560 /* If optimizing, copy to regs to improve CSE */
1561 if (TARGET_PSEUDO && optimize && ((reload_in_progress | reload_completed) == 0))
1563 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1564 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1566 if (GET_CODE (operands[2]) == MEM)
1567 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1569 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1571 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1572 emit_move_insn (temp, operands[1]);
1578 if (!ix86_binary_operator_ok (code, mode, operands))
1580 /* If not optimizing, try to make a valid insn (optimize code previously did
1581 this above to improve chances of CSE) */
1583 if ((!TARGET_PSEUDO || !optimize)
1584 && ((reload_in_progress | reload_completed) == 0)
1585 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1588 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1590 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1594 if (GET_CODE (operands[2]) == MEM)
1596 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1600 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1602 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1603 emit_move_insn (temp, operands[1]);
1608 if (modified && !ix86_binary_operator_ok (code, mode, operands))
1618 /* Return TRUE or FALSE depending on whether the binary operator meets the
1619 appropriate constraints. */
1622 ix86_binary_operator_ok (code, mode, operands)
1624 enum machine_mode mode;
1627 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1628 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1631 /* Attempt to expand a unary operator. Make the expansion closer to the
1632 actual machine, then just general_operand, which will allow 2 separate
1633 memory references (one output, one input) in a single insn. Return
1634 whether the insn fails, or succeeds. */
1637 ix86_expand_unary_operator (code, mode, operands)
1639 enum machine_mode mode;
1644 /* If optimizing, copy to regs to improve CSE */
1647 && ((reload_in_progress | reload_completed) == 0)
1648 && GET_CODE (operands[1]) == MEM)
1650 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1653 if (!ix86_unary_operator_ok (code, mode, operands))
1655 if ((!TARGET_PSEUDO || !optimize)
1656 && ((reload_in_progress | reload_completed) == 0)
1657 && GET_CODE (operands[1]) == MEM)
1659 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1660 if (!ix86_unary_operator_ok (code, mode, operands))
1670 /* Return TRUE or FALSE depending on whether the unary operator meets the
1671 appropriate constraints. */
1674 ix86_unary_operator_ok (code, mode, operands)
1676 enum machine_mode mode;
1684 static rtx pic_label_rtx;
1686 /* This function generates code for -fpic that loads %ebx with
1687 with the return address of the caller and then returns. */
1689 asm_output_function_prefix (file, name)
1694 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1695 || current_function_uses_const_pool);
1696 xops[0] = pic_offset_table_rtx;
1697 xops[1] = stack_pointer_rtx;
1699 /* deep branch prediction favors having a return for every call */
1700 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1702 if (pic_label_rtx == 0)
1703 pic_label_rtx = (rtx) gen_label_rtx ();
1704 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (pic_label_rtx));
1705 output_asm_insn ("movl (%1),%0", xops);
1706 output_asm_insn ("ret", xops);
1710 /* Set up the stack and frame (if desired) for the function. */
1713 function_prologue (file, size)
1720 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1721 || current_function_uses_const_pool);
1722 long tsize = get_frame_size ();
1724 /* pic references don't explicitly mention pic_offset_table_rtx */
1725 if (TARGET_SCHEDULE_PROLOGUE)
1728 xops[0] = stack_pointer_rtx;
1729 xops[1] = frame_pointer_rtx;
1730 xops[2] = GEN_INT (tsize);
1731 if (frame_pointer_needed)
1733 output_asm_insn ("push%L1 %1", xops);
1734 output_asm_insn (AS2 (mov%L0,%0,%1), xops);
1738 output_asm_insn (AS2 (sub%L0,%2,%0), xops);
1740 /* Note If use enter it is NOT reversed args.
1741 This one is not reversed from intel!!
1742 I think enter is slower. Also sdb doesn't like it.
1743 But if you want it the code is:
1745 xops[3] = const0_rtx;
1746 output_asm_insn ("enter %2,%3", xops);
1749 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1750 for (regno = limit - 1; regno >= 0; regno--)
1751 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1752 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1754 xops[0] = gen_rtx (REG, SImode, regno);
1755 output_asm_insn ("push%L0 %0", xops);
1758 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1760 xops[0] = pic_offset_table_rtx;
1761 if (pic_label_rtx == 0)
1762 pic_label_rtx = (rtx) gen_label_rtx ();
1763 xops[1] = pic_label_rtx;
1765 output_asm_insn (AS1 (call,%P1), xops);
1766 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
1768 else if (pic_reg_used)
1770 xops[0] = pic_offset_table_rtx;
1771 xops[1] = (rtx) gen_label_rtx ();
1773 output_asm_insn (AS1 (call,%P1), xops);
1774 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (xops[1]));
1775 output_asm_insn (AS1 (pop%L0,%0), xops);
1776 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
1780 /* This function generates the assembly code for function entry.
1781 FILE is an stdio stream to output the code to.
1782 SIZE is an int: how many units of temporary storage to allocate. */
1785 ix86_expand_prologue ()
1790 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1791 || current_function_uses_const_pool);
1792 long tsize = get_frame_size ();
1794 if (!TARGET_SCHEDULE_PROLOGUE)
1797 xops[0] = stack_pointer_rtx;
1798 xops[1] = frame_pointer_rtx;
1799 xops[2] = GEN_INT (tsize);
1800 if (frame_pointer_needed)
1802 emit_insn (gen_rtx (SET, 0,
1803 gen_rtx (MEM, SImode,
1804 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1805 frame_pointer_rtx));
1806 emit_move_insn (xops[1], xops[0]);
1810 emit_insn (gen_rtx (SET, SImode,
1812 gen_rtx (MINUS, SImode,
1816 /* Note If use enter it is NOT reversed args.
1817 This one is not reversed from intel!!
1818 I think enter is slower. Also sdb doesn't like it.
1819 But if you want it the code is:
1821 xops[3] = const0_rtx;
1822 output_asm_insn ("enter %2,%3", xops);
1825 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1826 for (regno = limit - 1; regno >= 0; regno--)
1827 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1828 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1830 xops[0] = gen_rtx (REG, SImode, regno);
1831 emit_insn (gen_rtx (SET, 0,
1832 gen_rtx (MEM, SImode,
1833 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1837 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1839 xops[0] = pic_offset_table_rtx;
1840 if (pic_label_rtx == 0)
1841 pic_label_rtx = (rtx) gen_label_rtx ();
1842 xops[1] = pic_label_rtx;
1844 emit_insn (gen_prologue_get_pc (xops[0], gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1845 emit_insn (gen_prologue_set_got (xops[0],
1846 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1847 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1849 else if (pic_reg_used)
1851 xops[0] = pic_offset_table_rtx;
1852 xops[1] = (rtx) gen_label_rtx ();
1854 emit_insn (gen_prologue_get_pc (xops[0], gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1855 emit_insn (gen_pop (xops[0]));
1856 emit_insn (gen_prologue_set_got (xops[0],
1857 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1858 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER (xops[1]))));
1862 /* Restore function stack, frame, and registers. */
1865 function_epilogue (file, size)
1871 /* Return 1 if it is appropriate to emit `ret' instructions in the
1872 body of a function. Do this only if the epilogue is simple, needing a
1873 couple of insns. Prior to reloading, we can't tell how many registers
1874 must be saved, so return 0 then. Return 0 if there is no frame
1875 marker to de-allocate.
1877 If NON_SAVING_SETJMP is defined and true, then it is not possible
1878 for the epilogue to be simple, so return 0. This is a special case
1879 since NON_SAVING_SETJMP will not cause regs_ever_live to change
1880 until final, but jump_optimize may need to know sooner if a
1884 ix86_can_use_return_insn_p ()
1888 int reglimit = (frame_pointer_needed
1889 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1890 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1891 || current_function_uses_const_pool);
1893 #ifdef NON_SAVING_SETJMP
1894 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
1898 if (! reload_completed)
1901 for (regno = reglimit - 1; regno >= 0; regno--)
1902 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1903 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1906 return nregs == 0 || ! frame_pointer_needed;
1910 /* This function generates the assembly code for function exit.
1911 FILE is an stdio stream to output the code to.
1912 SIZE is an int: how many units of temporary storage to deallocate. */
1915 ix86_expand_epilogue ()
1918 register int nregs, limit;
1921 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1922 || current_function_uses_const_pool);
1923 long tsize = get_frame_size ();
1925 /* Compute the number of registers to pop */
1927 limit = (frame_pointer_needed
1928 ? FRAME_POINTER_REGNUM
1929 : STACK_POINTER_REGNUM);
1933 for (regno = limit - 1; regno >= 0; regno--)
1934 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1935 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1938 /* sp is often unreliable so we must go off the frame pointer,
1941 /* In reality, we may not care if sp is unreliable, because we can
1942 restore the register relative to the frame pointer. In theory,
1943 since each move is the same speed as a pop, and we don't need the
1944 leal, this is faster. For now restore multiple registers the old
1947 offset = -tsize - (nregs * UNITS_PER_WORD);
1949 xops[2] = stack_pointer_rtx;
1951 if (nregs > 1 || ! frame_pointer_needed)
1953 if (frame_pointer_needed)
1955 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
1956 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
1957 /* output_asm_insn (AS2 (lea%L2,%0,%2), xops);*/
1960 for (regno = 0; regno < limit; regno++)
1961 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1962 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1964 xops[0] = gen_rtx (REG, SImode, regno);
1965 emit_insn (gen_pop (xops[0]));
1966 /* output_asm_insn ("pop%L0 %0", xops);*/
1970 for (regno = 0; regno < limit; regno++)
1971 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1972 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1974 xops[0] = gen_rtx (REG, SImode, regno);
1975 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
1976 emit_move_insn (xops[0], xops[1]);
1977 /* output_asm_insn (AS2 (mov%L0,%1,%0), xops);*/
1981 if (frame_pointer_needed)
1983 /* If not an i386, mov & pop is faster than "leave". */
1985 if (TARGET_USE_LEAVE)
1986 emit_insn (gen_leave());
1987 /* output_asm_insn ("leave", xops);*/
1990 xops[0] = frame_pointer_rtx;
1991 xops[1] = stack_pointer_rtx;
1992 emit_insn (gen_epilogue_set_stack_ptr());
1993 /* output_asm_insn (AS2 (mov%L2,%0,%2), xops);*/
1994 emit_insn (gen_pop (xops[0]));
1995 /* output_asm_insn ("pop%L0 %0", xops);*/
2000 /* If there is no frame pointer, we must still release the frame. */
2002 xops[0] = GEN_INT (tsize);
2003 emit_insn (gen_rtx (SET, SImode,
2005 gen_rtx (PLUS, SImode,
2008 /* output_asm_insn (AS2 (add%L2,%0,%2), xops);*/
2011 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2012 if (profile_block_flag == 2)
2014 FUNCTION_BLOCK_PROFILER_EXIT(file);
2018 if (current_function_pops_args && current_function_args_size)
2020 xops[1] = GEN_INT (current_function_pops_args);
2022 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2023 asked to pop more, pop return address, do explicit add, and jump
2024 indirectly to the caller. */
2026 if (current_function_pops_args >= 32768)
2028 /* ??? Which register to use here? */
2029 xops[0] = gen_rtx (REG, SImode, 2);
2030 emit_insn (gen_pop (xops[0]));
2031 /* output_asm_insn ("pop%L0 %0", xops);*/
2032 emit_insn (gen_rtx (SET, SImode,
2034 gen_rtx (PLUS, SImode,
2037 /* output_asm_insn (AS2 (add%L2,%1,%2), xops);*/
2038 emit_jump_insn (xops[0]);
2039 /* output_asm_insn ("jmp %*%0", xops);*/
2042 emit_jump_insn (gen_return_pop_internal (xops[1]));
2043 /* output_asm_insn ("ret %1", xops);*/
2046 /* output_asm_insn ("ret", xops);*/
2047 emit_jump_insn (gen_return_internal ());
2051 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2052 that is a valid memory address for an instruction.
2053 The MODE argument is the machine mode for the MEM expression
2054 that wants to use this address.
2056 On x86, legitimate addresses are:
2057 base movl (base),reg
2058 displacement movl disp,reg
2059 base + displacement movl disp(base),reg
2060 index + base movl (base,index),reg
2061 (index + base) + displacement movl disp(base,index),reg
2062 index*scale movl (,index,scale),reg
2063 index*scale + disp movl disp(,index,scale),reg
2064 index*scale + base movl (base,index,scale),reg
2065 (index*scale + base) + disp movl disp(base,index,scale),reg
2067 In each case, scale can be 1, 2, 4, 8. */
2069 /* This is exactly the same as print_operand_addr, except that
2070 it recognizes addresses instead of printing them.
2072 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2073 convert common non-canonical forms to canonical form so that they will
2076 #define ADDR_INVALID(msg,insn) \
2078 if (TARGET_DEBUG_ADDR) \
2080 fprintf (stderr, msg); \
2086 legitimate_address_p (mode, addr, strict)
2087 enum machine_mode mode;
2091 rtx base = NULL_RTX;
2092 rtx indx = NULL_RTX;
2093 rtx scale = NULL_RTX;
2094 rtx disp = NULL_RTX;
2096 if (TARGET_DEBUG_ADDR)
2099 "\n==========\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2100 GET_MODE_NAME (mode), strict);
2105 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2106 base = addr; /* base reg */
2108 else if (GET_CODE (addr) == PLUS)
2110 rtx op0 = XEXP (addr, 0);
2111 rtx op1 = XEXP (addr, 1);
2112 enum rtx_code code0 = GET_CODE (op0);
2113 enum rtx_code code1 = GET_CODE (op1);
2115 if (code0 == REG || code0 == SUBREG)
2117 if (code1 == REG || code1 == SUBREG)
2119 indx = op0; /* index + base */
2125 base = op0; /* base + displacement */
2130 else if (code0 == MULT)
2132 indx = XEXP (op0, 0);
2133 scale = XEXP (op0, 1);
2135 if (code1 == REG || code1 == SUBREG)
2136 base = op1; /* index*scale + base */
2139 disp = op1; /* index*scale + disp */
2142 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2144 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2145 scale = XEXP (XEXP (op0, 0), 1);
2146 base = XEXP (op0, 1);
2150 else if (code0 == PLUS)
2152 indx = XEXP (op0, 0); /* index + base + disp */
2153 base = XEXP (op0, 1);
2159 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2164 else if (GET_CODE (addr) == MULT)
2166 indx = XEXP (addr, 0); /* index*scale */
2167 scale = XEXP (addr, 1);
2171 disp = addr; /* displacement */
2173 /* Allow arg pointer and stack pointer as index if there is not scaling */
2174 if (base && indx && !scale
2175 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2182 /* Validate base register */
2183 /* Don't allow SUBREG's here, it can lead to spill failures when the base
2184 is one word out of a two word structure, which is represented internally
2188 if (GET_CODE (base) != REG)
2190 ADDR_INVALID ("Base is not a register.\n", base);
2194 if ((strict && !REG_OK_FOR_BASE_STRICT_P (base))
2195 || (!strict && !REG_OK_FOR_BASE_NONSTRICT_P (base)))
2197 ADDR_INVALID ("Base is not valid.\n", base);
2202 /* Validate index register */
2203 /* Don't allow SUBREG's here, it can lead to spill failures when the index
2204 is one word out of a two word structure, which is represented internally
2208 if (GET_CODE (indx) != REG)
2210 ADDR_INVALID ("Index is not a register.\n", indx);
2214 if ((strict && !REG_OK_FOR_INDEX_STRICT_P (indx))
2215 || (!strict && !REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2217 ADDR_INVALID ("Index is not valid.\n", indx);
2222 abort (); /* scale w/o index invalid */
2224 /* Validate scale factor */
2227 HOST_WIDE_INT value;
2229 if (GET_CODE (scale) != CONST_INT)
2231 ADDR_INVALID ("Scale is not valid.\n", scale);
2235 value = INTVAL (scale);
2236 if (value != 1 && value != 2 && value != 4 && value != 8)
2238 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2243 /* Validate displacement
2244 Constant pool addresses must be handled special. They are
2245 considered legitimate addresses, but only if not used with regs.
2246 When printed, the output routines know to print the reference with the
2247 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2250 if (GET_CODE (disp) == SYMBOL_REF
2251 && CONSTANT_POOL_ADDRESS_P (disp)
2256 else if (!CONSTANT_ADDRESS_P (disp))
2258 ADDR_INVALID ("Displacement is not valid.\n", disp);
2262 else if (GET_CODE (disp) == CONST_DOUBLE)
2264 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2268 else if (flag_pic && SYMBOLIC_CONST (disp)
2269 && base != pic_offset_table_rtx
2270 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2272 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp);
2276 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp)
2277 && (base != NULL_RTX || indx != NULL_RTX))
2279 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n", disp);
2284 if (TARGET_DEBUG_ADDR)
2285 fprintf (stderr, "Address is valid.\n");
2287 /* Everything looks valid, return true */
2292 /* Return a legitimate reference for ORIG (an address) using the
2293 register REG. If REG is 0, a new pseudo is generated.
2295 There are three types of references that must be handled:
2297 1. Global data references must load the address from the GOT, via
2298 the PIC reg. An insn is emitted to do this load, and the reg is
2301 2. Static data references must compute the address as an offset
2302 from the GOT, whose base is in the PIC reg. An insn is emitted to
2303 compute the address into a reg, and the reg is returned. Static
2304 data objects have SYMBOL_REF_FLAG set to differentiate them from
2305 global data objects.
2307 3. Constant pool addresses must be handled special. They are
2308 considered legitimate addresses, but only if not used with regs.
2309 When printed, the output routines know to print the reference with the
2310 PIC reg, even though the PIC reg doesn't appear in the RTL.
2312 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2313 reg also appears in the address (except for constant pool references,
2316 "switch" statements also require special handling when generating
2317 PIC code. See comments by the `casesi' insn in i386.md for details. */
2320 legitimize_pic_address (orig, reg)
2327 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
2329 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
2334 reg = gen_reg_rtx (Pmode);
2336 if ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FLAG (addr))
2337 || GET_CODE (addr) == LABEL_REF)
2338 new = gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig);
2340 new = gen_rtx (MEM, Pmode,
2341 gen_rtx (PLUS, Pmode,
2342 pic_offset_table_rtx, orig));
2344 emit_move_insn (reg, new);
2346 current_function_uses_pic_offset_table = 1;
2349 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
2353 if (GET_CODE (addr) == CONST)
2355 addr = XEXP (addr, 0);
2356 if (GET_CODE (addr) != PLUS)
2360 if (XEXP (addr, 0) == pic_offset_table_rtx)
2364 reg = gen_reg_rtx (Pmode);
2366 base = legitimize_pic_address (XEXP (addr, 0), reg);
2367 addr = legitimize_pic_address (XEXP (addr, 1),
2368 base == reg ? NULL_RTX : reg);
2370 if (GET_CODE (addr) == CONST_INT)
2371 return plus_constant (base, INTVAL (addr));
2373 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
2375 base = gen_rtx (PLUS, Pmode, base, XEXP (addr, 0));
2376 addr = XEXP (addr, 1);
2378 return gen_rtx (PLUS, Pmode, base, addr);
2384 /* Emit insns to move operands[1] into operands[0]. */
2387 emit_pic_move (operands, mode)
2389 enum machine_mode mode;
2391 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2393 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2394 operands[1] = (rtx) force_reg (SImode, operands[1]);
2396 operands[1] = legitimize_pic_address (operands[1], temp);
2400 /* Try machine-dependent ways of modifying an illegitimate address
2401 to be legitimate. If we find one, return the new, valid address.
2402 This macro is used in only one place: `memory_address' in explow.c.
2404 OLDX is the address as it was before break_out_memory_refs was called.
2405 In some cases it is useful to look at this to decide what needs to be done.
2407 MODE and WIN are passed so that this macro can use
2408 GO_IF_LEGITIMATE_ADDRESS.
2410 It is always safe for this macro to do nothing. It exists to recognize
2411 opportunities to optimize the output.
2413 For the 80386, we handle X+REG by loading X into a register R and
2414 using R+REG. R will go in a general reg and indexing will be used.
2415 However, if REG is a broken-out memory address or multiplication,
2416 nothing needs to be done because REG can certainly go in a general reg.
2418 When -fpic is used, special handling is needed for symbolic references.
2419 See comments by legitimize_pic_address in i386.c for details. */
2422 legitimize_address (x, oldx, mode)
2425 enum machine_mode mode;
2430 if (TARGET_DEBUG_ADDR)
2432 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n", GET_MODE_NAME (mode));
2436 if (flag_pic && SYMBOLIC_CONST (x))
2437 return legitimize_pic_address (x, 0);
2439 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2440 if (GET_CODE (x) == ASHIFT
2441 && GET_CODE (XEXP (x, 1)) == CONST_INT
2442 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2445 x = gen_rtx (MULT, Pmode,
2446 force_reg (Pmode, XEXP (x, 0)),
2447 GEN_INT (1 << log));
2450 if (GET_CODE (x) == PLUS)
2452 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2453 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2454 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2455 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2458 XEXP (x, 0) = gen_rtx (MULT, Pmode,
2459 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2460 GEN_INT (1 << log));
2463 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2464 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2465 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2468 XEXP (x, 1) = gen_rtx (MULT, Pmode,
2469 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2470 GEN_INT (1 << log));
2473 /* Put multiply first if it isn't already */
2474 if (GET_CODE (XEXP (x, 1)) == MULT)
2476 rtx tmp = XEXP (x, 0);
2477 XEXP (x, 0) = XEXP (x, 1);
2482 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2483 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2484 created by virtual register instantiation, register elimination, and
2485 similar optimizations. */
2486 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
2489 x = gen_rtx (PLUS, Pmode,
2490 gen_rtx (PLUS, Pmode, XEXP (x, 0), XEXP (XEXP (x, 1), 0)),
2491 XEXP (XEXP (x, 1), 1));
2494 /* Canonicalize (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2495 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2496 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
2497 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2498 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
2499 && CONSTANT_P (XEXP (x, 1)))
2501 rtx constant, other;
2503 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2505 constant = XEXP (x, 1);
2506 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
2508 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
2510 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
2511 other = XEXP (x, 1);
2519 x = gen_rtx (PLUS, Pmode,
2520 gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
2521 XEXP (XEXP (XEXP (x, 0), 1), 0)),
2522 plus_constant (other, INTVAL (constant)));
2526 if (changed && legitimate_address_p (mode, x, FALSE))
2529 if (GET_CODE (XEXP (x, 0)) == MULT)
2532 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
2535 if (GET_CODE (XEXP (x, 1)) == MULT)
2538 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
2542 && GET_CODE (XEXP (x, 1)) == REG
2543 && GET_CODE (XEXP (x, 0)) == REG)
2546 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
2549 x = legitimize_pic_address (x, 0);
2552 if (changed && legitimate_address_p (mode, x, FALSE))
2555 if (GET_CODE (XEXP (x, 0)) == REG)
2557 register rtx temp = gen_reg_rtx (Pmode);
2558 register rtx val = force_operand (XEXP (x, 1), temp);
2560 emit_move_insn (temp, val);
2566 else if (GET_CODE (XEXP (x, 1)) == REG)
2568 register rtx temp = gen_reg_rtx (Pmode);
2569 register rtx val = force_operand (XEXP (x, 0), temp);
2571 emit_move_insn (temp, val);
2582 /* Print an integer constant expression in assembler syntax. Addition
2583 and subtraction are the only arithmetic that may appear in these
2584 expressions. FILE is the stdio stream to write to, X is the rtx, and
2585 CODE is the operand print code from the output string. */
2588 output_pic_addr_const (file, x, code)
2595 switch (GET_CODE (x))
2606 if (GET_CODE (x) == SYMBOL_REF)
2607 assemble_name (file, XSTR (x, 0));
2610 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
2611 CODE_LABEL_NUMBER (XEXP (x, 0)));
2612 assemble_name (asm_out_file, buf);
2615 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2616 fprintf (file, "@GOTOFF(%%ebx)");
2617 else if (code == 'P')
2618 fprintf (file, "@PLT");
2619 else if (GET_CODE (x) == LABEL_REF)
2620 fprintf (file, "@GOTOFF");
2621 else if (! SYMBOL_REF_FLAG (x))
2622 fprintf (file, "@GOT");
2624 fprintf (file, "@GOTOFF");
2629 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
2630 assemble_name (asm_out_file, buf);
2634 fprintf (file, "%d", INTVAL (x));
2638 /* This used to output parentheses around the expression,
2639 but that does not work on the 386 (either ATT or BSD assembler). */
2640 output_pic_addr_const (file, XEXP (x, 0), code);
2644 if (GET_MODE (x) == VOIDmode)
2646 /* We can use %d if the number is <32 bits and positive. */
2647 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
2648 fprintf (file, "0x%x%08x",
2649 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
2651 fprintf (file, "%d", CONST_DOUBLE_LOW (x));
2654 /* We can't handle floating point constants;
2655 PRINT_OPERAND must handle them. */
2656 output_operand_lossage ("floating constant misused");
2660 /* Some assemblers need integer constants to appear last (eg masm). */
2661 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
2663 output_pic_addr_const (file, XEXP (x, 1), code);
2664 if (INTVAL (XEXP (x, 0)) >= 0)
2665 fprintf (file, "+");
2666 output_pic_addr_const (file, XEXP (x, 0), code);
2670 output_pic_addr_const (file, XEXP (x, 0), code);
2671 if (INTVAL (XEXP (x, 1)) >= 0)
2672 fprintf (file, "+");
2673 output_pic_addr_const (file, XEXP (x, 1), code);
2678 output_pic_addr_const (file, XEXP (x, 0), code);
2679 fprintf (file, "-");
2680 output_pic_addr_const (file, XEXP (x, 1), code);
2684 output_operand_lossage ("invalid expression as operand");
2688 /* Append the correct conditional move suffix which corresponds to CODE */
2691 put_condition_code (code, mode, file)
2693 enum mode_class mode;
2696 if (mode == MODE_INT)
2700 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2706 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2712 fputs ("ge", file); return;
2714 fputs ("g", file); return;
2716 fputs ("le", file); return;
2718 fputs ("l", file); return;
2720 fputs ("ae", file); return;
2722 fputs ("a", file); return;
2724 fputs ("be", file); return;
2726 fputs ("b", file); return;
2727 default: output_operand_lossage ("Invalid %%C operand");
2729 else if (mode == MODE_FLOAT)
2733 fputs ("ne", file); return;
2735 fputs ("e", file); return;
2737 fputs ("nb", file); return;
2739 fputs ("nbe", file); return;
2741 fputs ("be", file); return;
2743 fputs ("b", file); return;
2745 fputs ("nb", file); return;
2747 fputs ("nbe", file); return;
2749 fputs ("be", file); return;
2751 fputs ("b", file); return;
2752 default: output_operand_lossage ("Invalid %%C operand");
2757 L,W,B,Q,S,T -- print the opcode suffix for specified size of operand.
2758 C -- print opcode suffix for set/cmov insn.
2759 c -- like C, but print reversed condition
2760 F -- print opcode suffix for fcmov insn.
2761 f -- like C, but print reversed condition
2762 R -- print the prefix for register names.
2763 z -- print the opcode suffix for the size of the current operand.
2764 * -- print a star (in certain assembler syntax)
2765 w -- print the operand as if it's a "word" (HImode) even if it isn't.
2766 c -- don't print special prefixes before constant operands.
2767 J -- print the appropriate jump operand.
2768 s -- print a shift double count, followed by the assemblers argument
2770 b -- print the QImode name of the register for the indicated operand.
2771 %b0 would print %al if operands[0] is reg 0.
2772 w -- likewise, print the HImode name of the register.
2773 k -- likewise, print the SImode name of the register.
2774 h -- print the QImode name for a "high" register, either ah, bh, ch or dh.
2775 y -- print "st(0)" instead of "st" as a register.
2776 P -- print as a PIC constant
2780 print_operand (file, x, code)
2795 PUT_OP_SIZE (code, 'l', file);
2799 PUT_OP_SIZE (code, 'w', file);
2803 PUT_OP_SIZE (code, 'b', file);
2807 PUT_OP_SIZE (code, 'l', file);
2811 PUT_OP_SIZE (code, 's', file);
2815 PUT_OP_SIZE (code, 't', file);
2819 /* 387 opcodes don't get size suffixes if the operands are
2822 if (STACK_REG_P (x))
2825 /* this is the size of op from size of operand */
2826 switch (GET_MODE_SIZE (GET_MODE (x)))
2829 PUT_OP_SIZE ('B', 'b', file);
2833 PUT_OP_SIZE ('W', 'w', file);
2837 if (GET_MODE (x) == SFmode)
2839 PUT_OP_SIZE ('S', 's', file);
2843 PUT_OP_SIZE ('L', 'l', file);
2847 PUT_OP_SIZE ('T', 't', file);
2851 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
2853 #ifdef GAS_MNEMONICS
2854 PUT_OP_SIZE ('Q', 'q', file);
2857 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
2861 PUT_OP_SIZE ('Q', 'l', file);
2874 switch (GET_CODE (x))
2876 /* These conditions are appropriate for testing the result
2877 of an arithmetic operation, not for a compare operation.
2878 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
2879 CC_Z_IN_NOT_C false and not floating point. */
2880 case NE: fputs ("jne", file); return;
2881 case EQ: fputs ("je", file); return;
2882 case GE: fputs ("jns", file); return;
2883 case LT: fputs ("js", file); return;
2884 case GEU: fputs ("jmp", file); return;
2885 case GTU: fputs ("jne", file); return;
2886 case LEU: fputs ("je", file); return;
2887 case LTU: fputs ("#branch never", file); return;
2889 /* no matching branches for GT nor LE */
2894 if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
2896 PRINT_OPERAND (file, x, 0);
2897 fputs (AS2C (,) + 1, file);
2901 /* This is used by the conditional move instructions. */
2903 put_condition_code (GET_CODE (x), MODE_INT, file);
2906 /* like above, but reverse condition */
2908 put_condition_code (reverse_condition (GET_CODE (x)), MODE_INT, file);
2912 put_condition_code (GET_CODE (x), MODE_FLOAT, file);
2915 /* like above, but reverse condition */
2917 put_condition_code (reverse_condition (GET_CODE (x)),
2925 sprintf (str, "invalid operand code `%c'", code);
2926 output_operand_lossage (str);
2930 if (GET_CODE (x) == REG)
2932 PRINT_REG (x, code, file);
2934 else if (GET_CODE (x) == MEM)
2936 PRINT_PTR (x, file);
2937 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2940 output_pic_addr_const (file, XEXP (x, 0), code);
2942 output_addr_const (file, XEXP (x, 0));
2945 output_address (XEXP (x, 0));
2947 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
2949 REAL_VALUE_TYPE r; long l;
2950 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2951 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2952 PRINT_IMMED_PREFIX (file);
2953 fprintf (file, "0x%x", l);
2955 /* These float cases don't actually occur as immediate operands. */
2956 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
2958 REAL_VALUE_TYPE r; char dstr[30];
2959 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2960 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
2961 fprintf (file, "%s", dstr);
2963 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
2965 REAL_VALUE_TYPE r; char dstr[30];
2966 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2967 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
2968 fprintf (file, "%s", dstr);
2974 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
2975 PRINT_IMMED_PREFIX (file);
2976 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
2977 || GET_CODE (x) == LABEL_REF)
2978 PRINT_OFFSET_PREFIX (file);
2981 output_pic_addr_const (file, x, code);
2983 output_addr_const (file, x);
2987 /* Print a memory operand whose address is ADDR. */
2990 print_operand_address (file, addr)
2994 register rtx reg1, reg2, breg, ireg;
2997 switch (GET_CODE (addr))
3001 fprintf (file, "%se", RP);
3002 fputs (hi_reg_name[REGNO (addr)], file);
3012 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
3014 offset = XEXP (addr, 0);
3015 addr = XEXP (addr, 1);
3017 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
3019 offset = XEXP (addr, 1);
3020 addr = XEXP (addr, 0);
3022 if (GET_CODE (addr) != PLUS) ;
3023 else if (GET_CODE (XEXP (addr, 0)) == MULT)
3025 reg1 = XEXP (addr, 0);
3026 addr = XEXP (addr, 1);
3028 else if (GET_CODE (XEXP (addr, 1)) == MULT)
3030 reg1 = XEXP (addr, 1);
3031 addr = XEXP (addr, 0);
3033 else if (GET_CODE (XEXP (addr, 0)) == REG)
3035 reg1 = XEXP (addr, 0);
3036 addr = XEXP (addr, 1);
3038 else if (GET_CODE (XEXP (addr, 1)) == REG)
3040 reg1 = XEXP (addr, 1);
3041 addr = XEXP (addr, 0);
3043 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
3045 if (reg1 == 0) reg1 = addr;
3051 if (addr != 0) abort ();
3054 if ((reg1 && GET_CODE (reg1) == MULT)
3055 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3060 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3066 if (ireg != 0 || breg != 0)
3073 output_pic_addr_const (file, addr, 0);
3075 else if (GET_CODE (addr) == LABEL_REF)
3076 output_asm_label (addr);
3079 output_addr_const (file, addr);
3082 if (ireg != 0 && GET_CODE (ireg) == MULT)
3084 scale = INTVAL (XEXP (ireg, 1));
3085 ireg = XEXP (ireg, 0);
3088 /* The stack pointer can only appear as a base register,
3089 never an index register, so exchange the regs if it is wrong. */
3091 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3100 /* output breg+ireg*scale */
3101 PRINT_B_I_S (breg, ireg, scale, file);
3108 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3110 scale = INTVAL (XEXP (addr, 0));
3111 ireg = XEXP (addr, 1);
3115 scale = INTVAL (XEXP (addr, 1));
3116 ireg = XEXP (addr, 0);
3118 output_addr_const (file, const0_rtx);
3119 PRINT_B_I_S ((rtx) 0, ireg, scale, file);
3124 if (GET_CODE (addr) == CONST_INT
3125 && INTVAL (addr) < 0x8000
3126 && INTVAL (addr) >= -0x8000)
3127 fprintf (file, "%d", INTVAL (addr));
3131 output_pic_addr_const (file, addr, 0);
3133 output_addr_const (file, addr);
3138 /* Set the cc_status for the results of an insn whose pattern is EXP.
3139 On the 80386, we assume that only test and compare insns, as well
3140 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3141 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3142 Also, we assume that jumps, moves and sCOND don't affect the condition
3143 codes. All else clobbers the condition codes, by assumption.
3145 We assume that ALL integer add, minus, etc. instructions effect the
3146 condition codes. This MUST be consistent with i386.md.
3148 We don't record any float test or compare - the redundant test &
3149 compare check in final.c does not handle stack-like regs correctly. */
3152 notice_update_cc (exp)
3155 if (GET_CODE (exp) == SET)
3157 /* Jumps do not alter the cc's. */
3158 if (SET_DEST (exp) == pc_rtx)
3160 #ifdef IS_STACK_MODE
3161 /* Moving into a memory of stack_mode may have been moved
3162 in between the use and set of cc0 by loop_spl(). So
3163 old value of cc.status must be retained */
3164 if(GET_CODE(SET_DEST(exp))==MEM
3165 && IS_STACK_MODE(GET_MODE(SET_DEST(exp))))
3170 /* Moving register or memory into a register:
3171 it doesn't alter the cc's, but it might invalidate
3172 the RTX's which we remember the cc's came from.
3173 (Note that moving a constant 0 or 1 MAY set the cc's). */
3174 if (REG_P (SET_DEST (exp))
3175 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3176 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3178 if (cc_status.value1
3179 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3180 cc_status.value1 = 0;
3181 if (cc_status.value2
3182 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3183 cc_status.value2 = 0;
3186 /* Moving register into memory doesn't alter the cc's.
3187 It may invalidate the RTX's which we remember the cc's came from. */
3188 if (GET_CODE (SET_DEST (exp)) == MEM
3189 && (REG_P (SET_SRC (exp))
3190 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3192 if (cc_status.value1 && GET_CODE (cc_status.value1) == MEM
3193 || reg_mentioned_p (SET_DEST (exp), cc_status.value1))
3194 cc_status.value1 = 0;
3195 if (cc_status.value2 && GET_CODE (cc_status.value2) == MEM
3196 || reg_mentioned_p (SET_DEST (exp), cc_status.value2))
3197 cc_status.value2 = 0;
3200 /* Function calls clobber the cc's. */
3201 else if (GET_CODE (SET_SRC (exp)) == CALL)
3206 /* Tests and compares set the cc's in predictable ways. */
3207 else if (SET_DEST (exp) == cc0_rtx)
3210 cc_status.value1 = SET_SRC (exp);
3213 /* Certain instructions effect the condition codes. */
3214 else if (GET_MODE (SET_SRC (exp)) == SImode
3215 || GET_MODE (SET_SRC (exp)) == HImode
3216 || GET_MODE (SET_SRC (exp)) == QImode)
3217 switch (GET_CODE (SET_SRC (exp)))
3219 case ASHIFTRT: case LSHIFTRT:
3221 /* Shifts on the 386 don't set the condition codes if the
3222 shift count is zero. */
3223 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3228 /* We assume that the CONST_INT is non-zero (this rtx would
3229 have been deleted if it were zero. */
3231 case PLUS: case MINUS: case NEG:
3232 case AND: case IOR: case XOR:
3233 cc_status.flags = CC_NO_OVERFLOW;
3234 cc_status.value1 = SET_SRC (exp);
3235 cc_status.value2 = SET_DEST (exp);
3246 else if (GET_CODE (exp) == PARALLEL
3247 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3249 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3251 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3254 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3255 cc_status.flags |= CC_IN_80387;
3257 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3268 /* Split one or more DImode RTL references into pairs of SImode
3269 references. The RTL can be REG, offsettable MEM, integer constant, or
3270 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3271 split and "num" is its length. lo_half and hi_half are output arrays
3272 that parallel "operands". */
3275 split_di (operands, num, lo_half, hi_half)
3278 rtx lo_half[], hi_half[];
3282 if (GET_CODE (operands[num]) == REG)
3284 lo_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]));
3285 hi_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]) + 1);
3287 else if (CONSTANT_P (operands[num]))
3289 split_double (operands[num], &lo_half[num], &hi_half[num]);
3291 else if (offsettable_memref_p (operands[num]))
3293 lo_half[num] = operands[num];
3294 hi_half[num] = adj_offsettable_operand (operands[num], 4);
3301 /* Return 1 if this is a valid binary operation on a 387.
3302 OP is the expression matched, and MODE is its mode. */
3305 binary_387_op (op, mode)
3307 enum machine_mode mode;
3309 if (mode != VOIDmode && mode != GET_MODE (op))
3312 switch (GET_CODE (op))
3318 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
3326 /* Return 1 if this is a valid shift or rotate operation on a 386.
3327 OP is the expression matched, and MODE is its mode. */
3332 enum machine_mode mode;
3334 rtx operand = XEXP (op, 0);
3336 if (mode != VOIDmode && mode != GET_MODE (op))
3339 if (GET_MODE (operand) != GET_MODE (op)
3340 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
3343 return (GET_CODE (op) == ASHIFT
3344 || GET_CODE (op) == ASHIFTRT
3345 || GET_CODE (op) == LSHIFTRT
3346 || GET_CODE (op) == ROTATE
3347 || GET_CODE (op) == ROTATERT);
3350 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3351 MODE is not used. */
3354 VOIDmode_compare_op (op, mode)
3356 enum machine_mode mode;
3358 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
3361 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3362 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3363 is the expression of the binary operation. The output may either be
3364 emitted here, or returned to the caller, like all output_* functions.
3366 There is no guarantee that the operands are the same mode, as they
3367 might be within FLOAT or FLOAT_EXTEND expressions. */
3370 output_387_binary_op (insn, operands)
3376 static char buf[100];
3378 switch (GET_CODE (operands[3]))
3381 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3382 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3389 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3390 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3397 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3398 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3405 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3406 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3416 strcpy (buf, base_op);
3418 switch (GET_CODE (operands[3]))
3422 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
3425 operands[2] = operands[1];
3429 if (GET_CODE (operands[2]) == MEM)
3430 return strcat (buf, AS1 (%z2,%2));
3432 if (NON_STACK_REG_P (operands[1]))
3434 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3437 else if (NON_STACK_REG_P (operands[2]))
3439 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3443 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3444 return strcat (buf, AS2 (p,%2,%0));
3446 if (STACK_TOP_P (operands[0]))
3447 return strcat (buf, AS2C (%y2,%0));
3449 return strcat (buf, AS2C (%2,%0));
3453 if (GET_CODE (operands[1]) == MEM)
3454 return strcat (buf, AS1 (r%z1,%1));
3456 if (GET_CODE (operands[2]) == MEM)
3457 return strcat (buf, AS1 (%z2,%2));
3459 if (NON_STACK_REG_P (operands[1]))
3461 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
3464 else if (NON_STACK_REG_P (operands[2]))
3466 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3470 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
3473 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3474 return strcat (buf, AS2 (rp,%2,%0));
3476 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
3477 return strcat (buf, AS2 (p,%1,%0));
3479 if (STACK_TOP_P (operands[0]))
3481 if (STACK_TOP_P (operands[1]))
3482 return strcat (buf, AS2C (%y2,%0));
3484 return strcat (buf, AS2 (r,%y1,%0));
3486 else if (STACK_TOP_P (operands[1]))
3487 return strcat (buf, AS2C (%1,%0));
3489 return strcat (buf, AS2 (r,%2,%0));
3496 /* Output code for INSN to convert a float to a signed int. OPERANDS
3497 are the insn operands. The output may be SFmode or DFmode and the
3498 input operand may be SImode or DImode. As a special case, make sure
3499 that the 387 stack top dies if the output mode is DImode, because the
3500 hardware requires this. */
3503 output_fix_trunc (insn, operands)
3507 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3510 if (! STACK_TOP_P (operands[1]) ||
3511 (GET_MODE (operands[0]) == DImode && ! stack_top_dies))
3514 xops[0] = GEN_INT (12);
3515 xops[1] = operands[4];
3517 output_asm_insn (AS1 (fnstc%W2,%2), operands);
3518 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
3519 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
3520 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
3521 output_asm_insn (AS1 (fldc%W3,%3), operands);
3523 if (NON_STACK_REG_P (operands[0]))
3524 output_to_reg (operands[0], stack_top_dies);
3525 else if (GET_CODE (operands[0]) == MEM)
3528 output_asm_insn (AS1 (fistp%z0,%0), operands);
3530 output_asm_insn (AS1 (fist%z0,%0), operands);
3535 return AS1 (fldc%W2,%2);
3538 /* Output code for INSN to compare OPERANDS. The two operands might
3539 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3540 expression. If the compare is in mode CCFPEQmode, use an opcode that
3541 will not fault if a qNaN is present. */
3544 output_float_compare (insn, operands)
3549 rtx body = XVECEXP (PATTERN (insn), 0, 0);
3550 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
3553 if (! STACK_TOP_P (operands[0]))
3556 operands[0] = operands[1];
3558 cc_status.flags |= CC_REVERSED;
3561 if (! STACK_TOP_P (operands[0]))
3564 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3566 if (STACK_REG_P (operands[1])
3568 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
3569 && REGNO (operands[1]) != FIRST_STACK_REG)
3571 /* If both the top of the 387 stack dies, and the other operand
3572 is also a stack register that dies, then this must be a
3573 `fcompp' float compare */
3575 if (unordered_compare)
3576 output_asm_insn ("fucompp", operands);
3578 output_asm_insn ("fcompp", operands);
3582 static char buf[100];
3584 /* Decide if this is the integer or float compare opcode, or the
3585 unordered float compare. */
3587 if (unordered_compare)
3588 strcpy (buf, "fucom");
3589 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
3590 strcpy (buf, "fcom");
3592 strcpy (buf, "ficom");
3594 /* Modify the opcode if the 387 stack is to be popped. */
3599 if (NON_STACK_REG_P (operands[1]))
3600 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3602 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
3605 /* Now retrieve the condition code. */
3607 return output_fp_cc0_set (insn);
3610 /* Output opcodes to transfer the results of FP compare or test INSN
3611 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
3612 result of the compare or test is unordered, no comparison operator
3613 succeeds except NE. Return an output template, if any. */
3616 output_fp_cc0_set (insn)
3620 rtx unordered_label;
3624 xops[0] = gen_rtx (REG, HImode, 0);
3625 output_asm_insn (AS1 (fnsts%W0,%0), xops);
3627 if (! TARGET_IEEE_FP)
3629 if (!(cc_status.flags & CC_REVERSED))
3631 next = next_cc0_user (insn);
3633 if (GET_CODE (next) == JUMP_INSN
3634 && GET_CODE (PATTERN (next)) == SET
3635 && SET_DEST (PATTERN (next)) == pc_rtx
3636 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3638 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3640 else if (GET_CODE (PATTERN (next)) == SET)
3642 code = GET_CODE (SET_SRC (PATTERN (next)));
3648 if (code == GT || code == LT || code == EQ || code == NE
3649 || code == LE || code == GE)
3650 { /* We will test eax directly */
3651 cc_status.flags |= CC_TEST_AX;
3658 next = next_cc0_user (insn);
3659 if (next == NULL_RTX)
3662 if (GET_CODE (next) == JUMP_INSN
3663 && GET_CODE (PATTERN (next)) == SET
3664 && SET_DEST (PATTERN (next)) == pc_rtx
3665 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3667 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3669 else if (GET_CODE (PATTERN (next)) == SET)
3671 if (GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3672 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3673 else code = GET_CODE (SET_SRC (PATTERN (next)));
3678 xops[0] = gen_rtx (REG, QImode, 0);
3683 xops[1] = GEN_INT (0x45);
3684 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3689 xops[1] = GEN_INT (0x45);
3690 xops[2] = GEN_INT (0x01);
3691 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3692 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3697 xops[1] = GEN_INT (0x05);
3698 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3703 xops[1] = GEN_INT (0x45);
3704 xops[2] = GEN_INT (0x40);
3705 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3706 output_asm_insn (AS1 (dec%B0,%h0), xops);
3707 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3712 xops[1] = GEN_INT (0x45);
3713 xops[2] = GEN_INT (0x40);
3714 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3715 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3720 xops[1] = GEN_INT (0x44);
3721 xops[2] = GEN_INT (0x40);
3722 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3723 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
3737 #define MAX_386_STACK_LOCALS 2
3739 static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3741 /* Define the structure for the machine field in struct function. */
3742 struct machine_function
3744 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3747 /* Functions to save and restore i386_stack_locals.
3748 These will be called, via pointer variables,
3749 from push_function_context and pop_function_context. */
3752 save_386_machine_status (p)
3755 p->machine = (struct machine_function *) xmalloc (sizeof i386_stack_locals);
3756 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
3757 sizeof i386_stack_locals);
3761 restore_386_machine_status (p)
3764 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
3765 sizeof i386_stack_locals);
3769 /* Clear stack slot assignments remembered from previous functions.
3770 This is called from INIT_EXPANDERS once before RTL is emitted for each
3774 clear_386_stack_locals ()
3776 enum machine_mode mode;
3779 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
3780 mode = (enum machine_mode) ((int) mode + 1))
3781 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
3782 i386_stack_locals[(int) mode][n] = NULL_RTX;
3784 /* Arrange to save and restore i386_stack_locals around nested functions. */
3785 save_machine_status = save_386_machine_status;
3786 restore_machine_status = restore_386_machine_status;
3789 /* Return a MEM corresponding to a stack slot with mode MODE.
3790 Allocate a new slot if necessary.
3792 The RTL for a function can have several slots available: N is
3793 which slot to use. */
3796 assign_386_stack_local (mode, n)
3797 enum machine_mode mode;
3800 if (n < 0 || n >= MAX_386_STACK_LOCALS)
3803 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
3804 i386_stack_locals[(int) mode][n]
3805 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
3807 return i386_stack_locals[(int) mode][n];
3813 enum machine_mode mode;
3815 return (GET_CODE (op) == MULT);
3820 enum machine_mode mode;
3822 return (GET_CODE (op) == DIV);
3827 /* Create a new copy of an rtx.
3828 Recursively copies the operands of the rtx,
3829 except for those few rtx codes that are sharable.
3830 Doesn't share CONST */
3838 register RTX_CODE code;
3839 register char *format_ptr;
3841 code = GET_CODE (orig);
3854 /* SCRATCH must be shared because they represent distinct values. */
3859 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3860 a LABEL_REF, it isn't sharable. */
3861 if (GET_CODE (XEXP (orig, 0)) == PLUS
3862 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
3863 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
3867 /* A MEM with a constant address is not sharable. The problem is that
3868 the constant address may need to be reloaded. If the mem is shared,
3869 then reloading one copy of this mem will cause all copies to appear
3870 to have been reloaded. */
3873 copy = rtx_alloc (code);
3874 PUT_MODE (copy, GET_MODE (orig));
3875 copy->in_struct = orig->in_struct;
3876 copy->volatil = orig->volatil;
3877 copy->unchanging = orig->unchanging;
3878 copy->integrated = orig->integrated;
3880 copy->is_spill_rtx = orig->is_spill_rtx;
3882 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
3884 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
3886 switch (*format_ptr++)
3889 XEXP (copy, i) = XEXP (orig, i);
3890 if (XEXP (orig, i) != NULL)
3891 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
3896 XEXP (copy, i) = XEXP (orig, i);
3901 XVEC (copy, i) = XVEC (orig, i);
3902 if (XVEC (orig, i) != NULL)
3904 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
3905 for (j = 0; j < XVECLEN (copy, i); j++)
3906 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
3911 XWINT (copy, i) = XWINT (orig, i);
3915 XINT (copy, i) = XINT (orig, i);
3920 XSTR (copy, i) = XSTR (orig, i);
3931 /* try to rewrite a memory address to make it valid */
3933 rewrite_address (mem_rtx)
3936 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
3938 int offset_adjust = 0;
3939 int was_only_offset = 0;
3940 rtx mem_addr = XEXP (mem_rtx, 0);
3941 char *storage = (char *) oballoc (0);
3943 int is_spill_rtx = 0;
3945 in_struct = MEM_IN_STRUCT_P (mem_rtx);
3946 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
3948 if (GET_CODE (mem_addr) == PLUS &&
3949 GET_CODE (XEXP (mem_addr, 1)) == PLUS &&
3950 GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
3951 { /* this part is utilized by the combiner */
3953 gen_rtx (PLUS, GET_MODE (mem_addr),
3954 gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
3956 XEXP (XEXP (mem_addr, 1), 0)),
3957 XEXP (XEXP (mem_addr, 1), 1));
3958 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
3960 XEXP (mem_rtx, 0) = ret_rtx;
3961 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
3967 /* this part is utilized by loop.c */
3968 /* If the address contains PLUS (reg,const) and this pattern is invalid
3969 in this case - try to rewrite the address to make it valid intel1
3971 storage = (char *) oballoc (0);
3972 index_rtx = base_rtx = offset_rtx = NULL;
3973 /* find the base index and offset elements of the memory address */
3974 if (GET_CODE (mem_addr) == PLUS)
3976 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
3978 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
3980 base_rtx = XEXP (mem_addr, 1);
3981 index_rtx = XEXP (mem_addr, 0);
3985 base_rtx = XEXP (mem_addr, 0);
3986 offset_rtx = XEXP (mem_addr, 1);
3989 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
3991 index_rtx = XEXP (mem_addr, 0);
3992 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
3994 base_rtx = XEXP (mem_addr, 1);
3998 offset_rtx = XEXP (mem_addr, 1);
4001 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
4004 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS &&
4005 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT &&
4006 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0)) == REG &&
4007 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1)) == CONST_INT &&
4008 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1)) == CONST_INT &&
4009 GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG &&
4010 GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
4012 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
4013 offset_rtx = XEXP (mem_addr, 1);
4014 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4015 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
4019 offset_rtx = XEXP (mem_addr, 1);
4020 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
4021 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
4024 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
4026 was_only_offset = 1;
4029 offset_rtx = XEXP (mem_addr, 1);
4030 offset_adjust = INTVAL (XEXP (mem_addr, 0));
4031 if (offset_adjust == 0)
4033 XEXP (mem_rtx, 0) = offset_rtx;
4034 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4044 else if (GET_CODE (mem_addr) == MULT)
4046 index_rtx = mem_addr;
4053 if (index_rtx && GET_CODE (index_rtx) == MULT)
4055 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4060 scale_rtx = XEXP (index_rtx, 1);
4061 scale = INTVAL (scale_rtx);
4062 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4064 /* now find which of the elements are invalid and try to fix them */
4065 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4067 offset_adjust = INTVAL (index_rtx) * scale;
4068 if (offset_rtx && GET_CODE (offset_rtx) == CONST &&
4069 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4071 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4072 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4074 offset_rtx = copy_all_rtx (offset_rtx);
4075 XEXP (XEXP (offset_rtx, 0), 1) =
4076 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4077 if (!CONSTANT_P (offset_rtx))
4084 else if (offset_rtx && GET_CODE (offset_rtx) == SYMBOL_REF)
4087 gen_rtx (CONST, GET_MODE (offset_rtx),
4088 gen_rtx (PLUS, GET_MODE (offset_rtx),
4090 gen_rtx (CONST_INT, 0, offset_adjust)));
4091 if (!CONSTANT_P (offset_rtx))
4097 else if (offset_rtx && GET_CODE (offset_rtx) == CONST_INT)
4099 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4101 else if (!offset_rtx)
4103 offset_rtx = gen_rtx (CONST_INT, 0, 0);
4105 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4106 XEXP (mem_rtx, 0) = offset_rtx;
4109 if (base_rtx && GET_CODE (base_rtx) == PLUS &&
4110 GET_CODE (XEXP (base_rtx, 0)) == REG &&
4111 GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4113 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4114 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4116 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4118 offset_adjust += INTVAL (base_rtx);
4121 if (index_rtx && GET_CODE (index_rtx) == PLUS &&
4122 GET_CODE (XEXP (index_rtx, 0)) == REG &&
4123 GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4125 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4126 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4130 if (!LEGITIMATE_INDEX_P (index_rtx)
4131 && !(index_rtx == stack_pointer_rtx && scale == 1 && base_rtx == NULL))
4139 if (!LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4145 if (offset_adjust != 0)
4149 if (GET_CODE (offset_rtx) == CONST &&
4150 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4152 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4153 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4155 offset_rtx = copy_all_rtx (offset_rtx);
4156 XEXP (XEXP (offset_rtx, 0), 1) =
4157 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4158 if (!CONSTANT_P (offset_rtx))
4165 else if (GET_CODE (offset_rtx) == SYMBOL_REF)
4168 gen_rtx (CONST, GET_MODE (offset_rtx),
4169 gen_rtx (PLUS, GET_MODE (offset_rtx),
4171 gen_rtx (CONST_INT, 0, offset_adjust)));
4172 if (!CONSTANT_P (offset_rtx))
4178 else if (GET_CODE (offset_rtx) == CONST_INT)
4180 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4190 offset_rtx = gen_rtx (CONST_INT, 0, offset_adjust);
4198 if (GET_CODE (offset_rtx) == CONST_INT &&
4199 INTVAL (offset_rtx) == 0)
4201 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4202 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4208 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4209 gen_rtx (PLUS, GET_MODE (base_rtx),
4210 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4218 if (GET_CODE (offset_rtx) == CONST_INT &&
4219 INTVAL (offset_rtx) == 0)
4221 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, base_rtx);
4225 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4226 gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx,
4236 if (GET_CODE (offset_rtx) == CONST_INT &&
4237 INTVAL (offset_rtx) == 0)
4239 ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx), index_rtx, scale_rtx);
4244 gen_rtx (PLUS, GET_MODE (offset_rtx),
4245 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4252 if (GET_CODE (offset_rtx) == CONST_INT &&
4253 INTVAL (offset_rtx) == 0)
4255 ret_rtx = index_rtx;
4259 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, offset_rtx);
4268 if (GET_CODE (offset_rtx) == CONST_INT &&
4269 INTVAL (offset_rtx) == 0)
4275 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx, offset_rtx);
4278 else if (was_only_offset)
4280 ret_rtx = offset_rtx;
4288 XEXP (mem_rtx, 0) = ret_rtx;
4289 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4301 /* return 1 if the first insn to set cc before insn also sets the register
4302 reg_rtx - otherwise return 0 */
4304 last_to_set_cc (reg_rtx, insn)
4307 rtx prev_insn = PREV_INSN (insn);
4311 if (GET_CODE (prev_insn) == NOTE)
4314 else if (GET_CODE (prev_insn) == INSN)
4316 if (GET_CODE (PATTERN (prev_insn)) != SET)
4319 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
4321 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
4327 else if (!doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
4334 prev_insn = PREV_INSN (prev_insn);
4342 doesnt_set_condition_code (pat)
4345 switch (GET_CODE (pat))
4359 sets_condition_code (pat)
4362 switch (GET_CODE (pat))
4386 str_immediate_operand (op, mode)
4388 enum machine_mode mode;
4390 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
4402 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4403 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4404 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4405 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
4414 Return 1 if the mode of the SET_DEST of insn is floating point
4415 and it is not an fld or a move from memory to memory.
4416 Otherwise return 0 */
4421 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4422 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4423 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4424 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4425 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
4426 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
4427 && GET_CODE (SET_SRC (insn)) != MEM)
4436 Return 1 if the mode of the SET_DEST floating point and is memory
4437 and the source is a register.
4443 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4444 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4445 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4446 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4447 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
4448 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
4458 Return 1 if dep_insn sets a register which insn uses as a base
4459 or index to reference memory.
4460 otherwise return 0 */
4463 agi_dependent (insn, dep_insn)
4466 if (GET_CODE (dep_insn) == INSN
4467 && GET_CODE (PATTERN (dep_insn)) == SET
4468 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
4470 return (reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn));
4473 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
4474 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
4475 && push_operand (SET_DEST (PATTERN (dep_insn)),
4476 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
4478 return (reg_mentioned_in_mem (stack_pointer_rtx, insn));
4486 Return 1 if reg is used in rtl as a base or index for a memory ref
4487 otherwise return 0. */
4490 reg_mentioned_in_mem (reg, rtl)
4495 register enum rtx_code code;
4500 code = GET_CODE (rtl);
4518 if (code == MEM && reg_mentioned_p (reg, rtl))
4521 fmt = GET_RTX_FORMAT (code);
4522 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4527 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
4529 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
4534 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
4541 /* Output the approprate insns for doing strlen if not just doing repnz; scasb
4543 operands[0] = result, initialized with the startaddress
4544 operands[1] = alignment of the address.
4545 operands[2] = scratch register, initialized with the startaddress when
4546 not aligned, otherwise undefined
4548 This is just the body. It needs the initialisations mentioned above and
4549 some address computing at the end. These things are done in i386.md. */
4552 output_strlen_unroll (operands)
4557 xops[0] = operands[0]; /* Result */
4558 /* operands[1]; * Alignment */
4559 xops[1] = operands[2]; /* Scratch */
4560 xops[2] = GEN_INT (0);
4561 xops[3] = GEN_INT (2);
4562 xops[4] = GEN_INT (3);
4563 xops[5] = GEN_INT (4);
4564 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4565 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4566 xops[8] = gen_label_rtx (); /* label of main loop */
4567 if(TARGET_USE_Q_REG && QI_REG_P (xops[1]))
4568 xops[9] = gen_label_rtx (); /* pentium optimisation */
4569 xops[10] = gen_label_rtx (); /* end label 2 */
4570 xops[11] = gen_label_rtx (); /* end label 1 */
4571 xops[12] = gen_label_rtx (); /* end label */
4572 /* xops[13] * Temporary used */
4573 xops[14] = GEN_INT (0xff);
4574 xops[15] = GEN_INT (0xff00);
4575 xops[16] = GEN_INT (0xff0000);
4576 xops[17] = GEN_INT (0xff000000);
4578 /* Loop to check 1..3 bytes for null to get an aligned pointer */
4580 /* is there a known alignment and is it less then 4 */
4581 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
4583 /* is there a known alignment and is it not 2 */
4584 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4586 xops[6] = gen_label_rtx (); /* label when aligned to 3-byte */
4587 xops[7] = gen_label_rtx (); /* label when aligned to 2-byte */
4589 /* leave just the 3 lower bits */
4590 /* if this is a q-register, then the high part is used later */
4591 /* therefore user andl rather than andb */
4592 output_asm_insn (AS2 (and%L1,%4,%1), xops);
4593 /* is aligned to 4-byte adress when zero */
4594 output_asm_insn (AS1 (je,%l8), xops);
4595 /* side-effect even Parity when %eax == 3 */
4596 output_asm_insn (AS1 (jp,%6), xops);
4598 /* is it aligned to 2 bytes ? */
4599 if (QI_REG_P (xops[1]))
4600 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4602 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4603 output_asm_insn (AS1 (je,%7), xops);
4607 /* since the alignment is 2, we have to check 2 or 0 bytes */
4609 /* check if is aligned to 4 - byte */
4610 output_asm_insn (AS2 (and%L1,%3,%1), xops);
4611 /* is aligned to 4-byte adress when zero */
4612 output_asm_insn (AS1 (je,%l8), xops);
4615 xops[13] = gen_rtx (MEM, QImode, xops[0]);
4616 /* now, compare the bytes */
4617 /* compare with the high part of a q-reg gives shorter code */
4618 if (QI_REG_P (xops[1]))
4620 /* compare the first n unaligned byte on a byte per byte basis */
4621 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4622 /* when zero we reached the end */
4623 output_asm_insn (AS1 (je,%l12), xops);
4624 /* increment the address */
4625 output_asm_insn (AS1 (inc%L0,%0), xops);
4627 /* not needed with an alignment of 2 */
4628 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4630 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4631 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4632 output_asm_insn (AS1 (je,%l12), xops);
4633 output_asm_insn (AS1 (inc%L0,%0), xops);
4635 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4637 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4641 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4642 output_asm_insn (AS1 (je,%l12), xops);
4643 output_asm_insn (AS1 (inc%L0,%0), xops);
4645 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4646 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4647 output_asm_insn (AS1 (je,%l12), xops);
4648 output_asm_insn (AS1 (inc%L0,%0), xops);
4650 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4651 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4653 output_asm_insn (AS1 (je,%l12), xops);
4654 output_asm_insn (AS1 (inc%L0,%0), xops);
4657 /* Generate loop to check 4 bytes at a time */
4658 /* IMHO it is not a good idea to align this loop. It gives only */
4659 /* huge programs, but does not help to speed up */
4660 /* ASM_OUTPUT_LOOP_ALIGN (asm_out_file); */
4661 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
4663 xops[13] = gen_rtx (MEM, SImode, xops[0]);
4664 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
4666 if (QI_REG_P (xops[1]))
4668 /* On i586 it is faster to combine the hi- and lo- part as
4669 a kind of lookahead. If anding both yields zero, then one
4670 of both *could* be zero, otherwise none of both is zero;
4671 this saves one instruction, on i486 this is slower
4672 tested with P-90, i486DX2-66, AMD486DX2-66 */
4675 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
4676 output_asm_insn (AS1 (jne,%l9), xops);
4679 /* check first byte */
4680 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
4681 output_asm_insn (AS1 (je,%l12), xops);
4683 /* check second byte */
4684 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
4685 output_asm_insn (AS1 (je,%l11), xops);
4688 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[9]));
4692 /* check first byte */
4693 output_asm_insn (AS2 (test%L1,%14,%1), xops);
4694 output_asm_insn (AS1 (je,%l12), xops);
4696 /* check second byte */
4697 output_asm_insn (AS2 (test%L1,%15,%1), xops);
4698 output_asm_insn (AS1 (je,%l11), xops);
4701 /* check third byte */
4702 output_asm_insn (AS2 (test%L1,%16,%1), xops);
4703 output_asm_insn (AS1 (je,%l10), xops);
4705 /* check fourth byte and increment address */
4706 output_asm_insn (AS2 (add%L0,%5,%0), xops);
4707 output_asm_insn (AS2 (test%L1,%17,%1), xops);
4708 output_asm_insn (AS1 (jne,%l8), xops);
4710 /* now generate fixups when the compare stops within a 4-byte word */
4711 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
4713 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
4714 output_asm_insn (AS1 (inc%L0,%0), xops);
4716 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
4717 output_asm_insn (AS1 (inc%L0,%0), xops);
4719 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));