1 /* Subroutines for insn-output.c for Intel X86.
2 Copyright (C) 1988, 1992, 1994, 1995, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
39 #ifdef EXTRA_CONSTRAINT
40 /* If EXTRA_CONSTRAINT is defined, then the 'S'
41 constraint in REG_CLASS_FROM_LETTER will no longer work, and various
42 asm statements that need 'S' for class SIREG will break. */
43 error EXTRA_CONSTRAINT conflicts with S constraint letter
44 /* The previous line used to be #error, but some compilers barf
45 even if the conditional was untrue. */
48 enum reg_mem /* Type of an operand for ix86_{binary,unary}_operator_ok */
55 /* Processor costs (relative to an add) */
56 struct processor_costs i386_cost = { /* 386 specific costs */
57 1, /* cost of an add instruction (2 cycles) */
58 1, /* cost of a lea instruction */
59 3, /* variable shift costs */
60 2, /* constant shift costs */
61 6, /* cost of starting a multiply */
62 1, /* cost of multiply per each bit set */
63 23 /* cost of a divide/mod */
66 struct processor_costs i486_cost = { /* 486 specific costs */
67 1, /* cost of an add instruction */
68 1, /* cost of a lea instruction */
69 3, /* variable shift costs */
70 2, /* constant shift costs */
71 12, /* cost of starting a multiply */
72 1, /* cost of multiply per each bit set */
73 40 /* cost of a divide/mod */
76 struct processor_costs pentium_cost = {
77 1, /* cost of an add instruction */
78 1, /* cost of a lea instruction */
79 3, /* variable shift costs */
80 1, /* constant shift costs */
81 12, /* cost of starting a multiply */
82 1, /* cost of multiply per each bit set */
83 25 /* cost of a divide/mod */
86 struct processor_costs *ix86_cost = &pentium_cost;
88 #define AT_BP(mode) (gen_rtx (MEM, (mode), frame_pointer_rtx))
90 extern FILE *asm_out_file;
91 extern char *strcat ();
93 char *singlemove_string ();
94 char *output_move_const_single ();
95 char *output_fp_cc0_set ();
97 char *hi_reg_name[] = HI_REGISTER_NAMES;
98 char *qi_reg_name[] = QI_REGISTER_NAMES;
99 char *qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES;
101 /* Array of the smallest class containing reg number REGNO, indexed by
102 REGNO. Used by REGNO_REG_CLASS in i386.h. */
104 enum reg_class regclass_map[FIRST_PSEUDO_REGISTER] =
107 AREG, DREG, CREG, BREG,
109 SIREG, DIREG, INDEX_REGS, GENERAL_REGS,
111 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
112 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
117 /* Test and compare insns in i386.md store the information needed to
118 generate branch and scc insns here. */
120 struct rtx_def *i386_compare_op0 = NULL_RTX;
121 struct rtx_def *i386_compare_op1 = NULL_RTX;
122 struct rtx_def *(*i386_compare_gen)(), *(*i386_compare_gen_eq)();
124 /* which cpu are we scheduling for */
125 enum processor_type ix86_cpu;
127 /* which instruction set architecture to use. */
130 /* Strings to hold which cpu and instruction set architecture to use. */
131 char *ix86_cpu_string; /* for -mcpu=<xxx> */
132 char *ix86_isa_string; /* for -misa=<xxx> */
134 /* Register allocation order */
135 char *i386_reg_alloc_order;
136 static char regs_allocated[FIRST_PSEUDO_REGISTER];
138 /* # of registers to use to pass arguments. */
139 char *i386_regparm_string; /* # registers to use to pass args */
140 int i386_regparm; /* i386_regparm_string as a number */
142 /* Alignment to use for loops and jumps */
143 char *i386_align_loops_string; /* power of two alignment for loops */
144 char *i386_align_jumps_string; /* power of two alignment for non-loop jumps */
145 char *i386_align_funcs_string; /* power of two alignment for functions */
147 int i386_align_loops; /* power of two alignment for loops */
148 int i386_align_jumps; /* power of two alignment for non-loop jumps */
149 int i386_align_funcs; /* power of two alignment for functions */
151 /* Sometimes certain combinations of command options do not make
152 sense on a particular target machine. You can define a macro
153 `OVERRIDE_OPTIONS' to take account of this. This macro, if
154 defined, is executed once just after all the command options have
157 Don't use this macro to turn on various extra optimizations for
158 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
169 char *name; /* Canonical processor name. */
170 enum processor_type processor; /* Processor type enum value. */
171 struct processor_costs *cost; /* Processor costs */
172 int target_enable; /* Target flags to enable. */
173 int target_disable; /* Target flags to disable. */
174 } processor_target_table[]
175 = {{PROCESSOR_COMMON_STRING, PROCESSOR_COMMON, &i486_cost, 0, 0},
176 {PROCESSOR_I386_STRING, PROCESSOR_I386, &i386_cost, 0, 0},
177 {PROCESSOR_I486_STRING, PROCESSOR_I486, &i486_cost, 0, 0},
178 {PROCESSOR_I586_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
179 {PROCESSOR_PENTIUM_STRING, PROCESSOR_PENTIUM, &pentium_cost, 0, 0},
180 {PROCESSOR_I686_STRING, PROCESSOR_PENTIUMPRO, &pentium_cost, 0, 0},
181 {PROCESSOR_PENTIUMPRO_STRING, PROCESSOR_PENTIUMPRO, &pentium_cost, 0, 0}};
183 int ptt_size = sizeof (processor_target_table) / sizeof (struct ptt);
185 #ifdef SUBTARGET_OVERRIDE_OPTIONS
186 SUBTARGET_OVERRIDE_OPTIONS;
189 /* Validate registers in register allocation order */
190 if (i386_reg_alloc_order)
192 for (i = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
196 case 'a': regno = 0; break;
197 case 'd': regno = 1; break;
198 case 'c': regno = 2; break;
199 case 'b': regno = 3; break;
200 case 'S': regno = 4; break;
201 case 'D': regno = 5; break;
202 case 'B': regno = 6; break;
204 default: fatal ("Register '%c' is unknown", ch);
207 if (regs_allocated[regno])
208 fatal ("Register '%c' was already specified in the allocation order", ch);
210 regs_allocated[regno] = 1;
214 /* Get the architectural level. */
215 if (ix86_isa_string == (char *)0)
216 ix86_isa_string = PROCESSOR_DEFAULT_STRING;
218 for (i = 0; i < ptt_size; i++)
219 if (! strcmp (ix86_isa_string, processor_target_table[i].name))
221 ix86_isa = processor_target_table[i].processor;
222 if (ix86_cpu_string == (char *)0)
223 ix86_cpu_string = processor_target_table[i].name;
229 error ("bad value (%s) for -misa= switch", ix86_isa_string);
230 ix86_isa_string = PROCESSOR_DEFAULT_STRING;
231 ix86_isa = PROCESSOR_DEFAULT;
234 for (j = 0; j < ptt_size; j++)
235 if (! strcmp (ix86_cpu_string, processor_target_table[j].name))
237 ix86_cpu = processor_target_table[j].processor;
238 if (i > j && (int)ix86_isa >= (int)PROCESSOR_PENTIUMPRO)
239 error ("-mcpu=%s does not support -march=%s", ix86_cpu_string, ix86_isa_string);
241 target_flags |= processor_target_table[j].target_enable;
242 target_flags &= ~processor_target_table[j].target_disable;
248 error ("bad value (%s) for -mcpu= switch", ix86_cpu_string);
249 ix86_cpu_string = PROCESSOR_DEFAULT_STRING;
250 ix86_cpu = PROCESSOR_DEFAULT;
253 /* Validate -mregparm= value */
254 if (i386_regparm_string)
256 i386_regparm = atoi (i386_regparm_string);
257 if (i386_regparm < 0 || i386_regparm > REGPARM_MAX)
258 fatal ("-mregparm=%d is not between 0 and %d", i386_regparm, REGPARM_MAX);
261 def_align = (TARGET_386) ? 2 : 4;
263 /* Validate -malign-loops= value, or provide default */
264 if (i386_align_loops_string)
266 i386_align_loops = atoi (i386_align_loops_string);
267 if (i386_align_loops < 0 || i386_align_loops > MAX_CODE_ALIGN)
268 fatal ("-malign-loops=%d is not between 0 and %d",
269 i386_align_loops, MAX_CODE_ALIGN);
272 i386_align_loops = 2;
274 /* Validate -malign-jumps= value, or provide default */
275 if (i386_align_jumps_string)
277 i386_align_jumps = atoi (i386_align_jumps_string);
278 if (i386_align_jumps < 0 || i386_align_jumps > MAX_CODE_ALIGN)
279 fatal ("-malign-jumps=%d is not between 0 and %d",
280 i386_align_jumps, MAX_CODE_ALIGN);
283 i386_align_jumps = def_align;
285 /* Validate -malign-functions= value, or provide default */
286 if (i386_align_funcs_string)
288 i386_align_funcs = atoi (i386_align_funcs_string);
289 if (i386_align_funcs < 0 || i386_align_funcs > MAX_CODE_ALIGN)
290 fatal ("-malign-functions=%d is not between 0 and %d",
291 i386_align_funcs, MAX_CODE_ALIGN);
294 i386_align_funcs = def_align;
296 if (TARGET_OMIT_LEAF_FRAME_POINTER) /* keep nonleaf frame pointers */
297 flag_omit_frame_pointer = 1;
299 /* pic references don't explicitly mention pic_offset_table_rtx */
301 target_flags &= ~MASK_SCHEDULE_PROLOGUE;
304 /* A C statement (sans semicolon) to choose the order in which to
305 allocate hard registers for pseudo-registers local to a basic
308 Store the desired register order in the array `reg_alloc_order'.
309 Element 0 should be the register to allocate first; element 1, the
310 next register; and so on.
312 The macro body should not assume anything about the contents of
313 `reg_alloc_order' before execution of the macro.
315 On most machines, it is not necessary to define this macro. */
318 order_regs_for_local_alloc ()
320 int i, ch, order, regno;
322 /* User specified the register allocation order */
323 if (i386_reg_alloc_order)
325 for (i = order = 0; (ch = i386_reg_alloc_order[i]) != '\0'; i++)
329 case 'a': regno = 0; break;
330 case 'd': regno = 1; break;
331 case 'c': regno = 2; break;
332 case 'b': regno = 3; break;
333 case 'S': regno = 4; break;
334 case 'D': regno = 5; break;
335 case 'B': regno = 6; break;
338 reg_alloc_order[order++] = regno;
341 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
343 if (!regs_allocated[i])
344 reg_alloc_order[order++] = i;
348 /* If users did not specify a register allocation order, use natural order */
351 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
352 reg_alloc_order[i] = i;
358 optimization_options (level)
361 /* For -O2, and beyond, turn off -fschedule-insns by default. It tends to
362 make the problem with not enough registers even worse */
363 #ifdef INSN_SCHEDULING
365 flag_schedule_insns = 0;
369 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
370 attribute for DECL. The attributes in ATTRIBUTES have previously been
374 i386_valid_decl_attribute_p (decl, attributes, identifier, args)
383 /* Return nonzero if IDENTIFIER with arguments ARGS is a valid machine specific
384 attribute for TYPE. The attributes in ATTRIBUTES have previously been
388 i386_valid_type_attribute_p (type, attributes, identifier, args)
394 if (TREE_CODE (type) != FUNCTION_TYPE
395 && TREE_CODE (type) != FIELD_DECL
396 && TREE_CODE (type) != TYPE_DECL)
399 /* Stdcall attribute says callee is responsible for popping arguments
400 if they are not variable. */
401 if (is_attribute_p ("stdcall", identifier))
402 return (args == NULL_TREE);
404 /* Cdecl attribute says the callee is a normal C declaration */
405 if (is_attribute_p ("cdecl", identifier))
406 return (args == NULL_TREE);
408 /* Regparm attribute specifies how many integer arguments are to be
409 passed in registers */
410 if (is_attribute_p ("regparm", identifier))
414 if (!args || TREE_CODE (args) != TREE_LIST
415 || TREE_CHAIN (args) != NULL_TREE
416 || TREE_VALUE (args) == NULL_TREE)
419 cst = TREE_VALUE (args);
420 if (TREE_CODE (cst) != INTEGER_CST)
423 if (TREE_INT_CST_HIGH (cst) != 0
424 || TREE_INT_CST_LOW (cst) < 0
425 || TREE_INT_CST_LOW (cst) > REGPARM_MAX)
434 /* Return 0 if the attributes for two types are incompatible, 1 if they
435 are compatible, and 2 if they are nearly compatible (which causes a
436 warning to be generated). */
439 i386_comp_type_attributes (type1, type2)
447 /* Value is the number of bytes of arguments automatically
448 popped when returning from a subroutine call.
449 FUNDECL is the declaration node of the function (as a tree),
450 FUNTYPE is the data type of the function (as a tree),
451 or for a library call it is an identifier node for the subroutine name.
452 SIZE is the number of bytes of arguments passed on the stack.
454 On the 80386, the RTD insn may be used to pop them if the number
455 of args is fixed, but if the number is variable then the caller
456 must pop them all. RTD can't be used for library calls now
457 because the library is compiled with the Unix compiler.
458 Use of RTD is a selectable option, since it is incompatible with
459 standard Unix calling sequences. If the option is not selected,
460 the caller must always pop the args.
462 The attribute stdcall is equivalent to RTD on a per module basis. */
465 i386_return_pops_args (fundecl, funtype, size)
470 int rtd = TARGET_RTD;
472 if (TREE_CODE (funtype) == IDENTIFIER_NODE)
475 /* Cdecl functions override -mrtd, and never pop the stack */
476 if (!lookup_attribute ("cdecl", TYPE_ATTRIBUTES (funtype))) {
478 /* Stdcall functions will pop the stack if not variable args */
479 if (lookup_attribute ("stdcall", TYPE_ATTRIBUTES (funtype)))
483 && (TYPE_ARG_TYPES (funtype) == NULL_TREE
484 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (funtype))) == void_type_node)))
488 /* Lose any fake structure return argument */
489 if (aggregate_value_p (TREE_TYPE (funtype)))
490 return GET_MODE_SIZE (Pmode);
496 /* Argument support functions. */
498 /* Initialize a variable CUM of type CUMULATIVE_ARGS
499 for a call to a function whose data type is FNTYPE.
500 For a library call, FNTYPE is 0. */
503 init_cumulative_args (cum, fntype, libname)
504 CUMULATIVE_ARGS *cum; /* argument info to initialize */
505 tree fntype; /* tree ptr for function decl */
506 rtx libname; /* SYMBOL_REF of library name or 0 */
508 static CUMULATIVE_ARGS zero_cum;
509 tree param, next_param;
511 if (TARGET_DEBUG_ARG)
513 fprintf (stderr, "\ninit_cumulative_args (");
516 tree ret_type = TREE_TYPE (fntype);
517 fprintf (stderr, "fntype code = %s, ret code = %s",
518 tree_code_name[ (int)TREE_CODE (fntype) ],
519 tree_code_name[ (int)TREE_CODE (ret_type) ]);
522 fprintf (stderr, "no fntype");
525 fprintf (stderr, ", libname = %s", XSTR (libname, 0));
530 /* Set up the number of registers to use for passing arguments. */
531 cum->nregs = i386_regparm;
534 tree attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (fntype));
536 cum->nregs = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)));
539 /* Determine if this function has variable arguments. This is
540 indicated by the last argument being 'void_type_mode' if there
541 are no variable arguments. If there are variable arguments, then
542 we won't pass anything in registers */
546 for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0;
550 next_param = TREE_CHAIN (param);
551 if (next_param == (tree)0 && TREE_VALUE (param) != void_type_node)
556 if (TARGET_DEBUG_ARG)
557 fprintf (stderr, ", nregs=%d )\n", cum->nregs);
562 /* Update the data in CUM to advance over an argument
563 of mode MODE and data type TYPE.
564 (TYPE is null for libcalls where that information may not be available.) */
567 function_arg_advance (cum, mode, type, named)
568 CUMULATIVE_ARGS *cum; /* current arg information */
569 enum machine_mode mode; /* current arg mode */
570 tree type; /* type of the argument or 0 if lib support */
571 int named; /* whether or not the argument was named */
573 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
574 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
576 if (TARGET_DEBUG_ARG)
578 "function_adv( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d )\n\n",
579 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
594 /* Define where to put the arguments to a function.
595 Value is zero to push the argument on the stack,
596 or a hard register in which to store the argument.
598 MODE is the argument's machine mode.
599 TYPE is the data type of the argument (as a tree).
600 This is null for libcalls where that information may
602 CUM is a variable of type CUMULATIVE_ARGS which gives info about
603 the preceding args and about the function being called.
604 NAMED is nonzero if this argument is a named parameter
605 (otherwise it is an extra parameter matching an ellipsis). */
608 function_arg (cum, mode, type, named)
609 CUMULATIVE_ARGS *cum; /* current arg information */
610 enum machine_mode mode; /* current arg mode */
611 tree type; /* type of the argument or 0 if lib support */
612 int named; /* != 0 for normal args, == 0 for ... args */
615 int bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
616 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
620 default: /* for now, pass fp/complex values on the stack */
628 if (words <= cum->nregs)
629 ret = gen_rtx (REG, mode, cum->regno);
633 if (TARGET_DEBUG_ARG)
636 "function_arg( size=%d, words=%2d, nregs=%d, mode=%4s, named=%d",
637 words, cum->words, cum->nregs, GET_MODE_NAME (mode), named);
640 fprintf (stderr, ", reg=%%e%s", reg_names[ REGNO(ret) ]);
642 fprintf (stderr, ", stack");
644 fprintf (stderr, " )\n");
650 /* For an arg passed partly in registers and partly in memory,
651 this is the number of registers used.
652 For args passed entirely in registers or entirely in memory, zero. */
655 function_arg_partial_nregs (cum, mode, type, named)
656 CUMULATIVE_ARGS *cum; /* current arg information */
657 enum machine_mode mode; /* current arg mode */
658 tree type; /* type of the argument or 0 if lib support */
659 int named; /* != 0 for normal args, == 0 for ... args */
665 /* Output an insn whose source is a 386 integer register. SRC is the
666 rtx for the register, and TEMPLATE is the op-code template. SRC may
667 be either SImode or DImode.
669 The template will be output with operands[0] as SRC, and operands[1]
670 as a pointer to the top of the 386 stack. So a call from floatsidf2
671 would look like this:
673 output_op_from_reg (operands[1], AS1 (fild%z0,%1));
675 where %z0 corresponds to the caller's operands[1], and is used to
676 emit the proper size suffix.
678 ??? Extend this to handle HImode - a 387 can load and store HImode
682 output_op_from_reg (src, template)
687 int size = GET_MODE_SIZE (GET_MODE (src));
690 xops[1] = AT_SP (Pmode);
691 xops[2] = GEN_INT (size);
692 xops[3] = stack_pointer_rtx;
694 if (size > UNITS_PER_WORD)
697 if (size > 2 * UNITS_PER_WORD)
699 high = gen_rtx (REG, SImode, REGNO (src) + 2);
700 output_asm_insn (AS1 (push%L0,%0), &high);
702 high = gen_rtx (REG, SImode, REGNO (src) + 1);
703 output_asm_insn (AS1 (push%L0,%0), &high);
705 output_asm_insn (AS1 (push%L0,%0), &src);
707 output_asm_insn (template, xops);
709 output_asm_insn (AS2 (add%L3,%2,%3), xops);
712 /* Output an insn to pop an value from the 387 top-of-stack to 386
713 register DEST. The 387 register stack is popped if DIES is true. If
714 the mode of DEST is an integer mode, a `fist' integer store is done,
715 otherwise a `fst' float store is done. */
718 output_to_reg (dest, dies)
723 int size = GET_MODE_SIZE (GET_MODE (dest));
725 xops[0] = AT_SP (Pmode);
726 xops[1] = stack_pointer_rtx;
727 xops[2] = GEN_INT (size);
730 output_asm_insn (AS2 (sub%L1,%2,%1), xops);
732 if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_INT)
735 output_asm_insn (AS1 (fistp%z3,%y0), xops);
737 output_asm_insn (AS1 (fist%z3,%y0), xops);
739 else if (GET_MODE_CLASS (GET_MODE (dest)) == MODE_FLOAT)
742 output_asm_insn (AS1 (fstp%z3,%y0), xops);
745 if (GET_MODE (dest) == XFmode)
747 output_asm_insn (AS1 (fstp%z3,%y0), xops);
748 output_asm_insn (AS1 (fld%z3,%y0), xops);
751 output_asm_insn (AS1 (fst%z3,%y0), xops);
757 output_asm_insn (AS1 (pop%L0,%0), &dest);
759 if (size > UNITS_PER_WORD)
761 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
762 output_asm_insn (AS1 (pop%L0,%0), &dest);
763 if (size > 2 * UNITS_PER_WORD)
765 dest = gen_rtx (REG, SImode, REGNO (dest) + 1);
766 output_asm_insn (AS1 (pop%L0,%0), &dest);
772 singlemove_string (operands)
776 if (GET_CODE (operands[0]) == MEM
777 && GET_CODE (x = XEXP (operands[0], 0)) == PRE_DEC)
779 if (XEXP (x, 0) != stack_pointer_rtx)
783 else if (GET_CODE (operands[1]) == CONST_DOUBLE)
785 return output_move_const_single (operands);
787 else if (GET_CODE (operands[0]) == REG || GET_CODE (operands[1]) == REG)
788 return AS2 (mov%L0,%1,%0);
789 else if (CONSTANT_P (operands[1]))
790 return AS2 (mov%L0,%1,%0);
793 output_asm_insn ("push%L1 %1", operands);
798 /* Return a REG that occurs in ADDR with coefficient 1.
799 ADDR can be effectively incremented by incrementing REG. */
805 while (GET_CODE (addr) == PLUS)
807 if (GET_CODE (XEXP (addr, 0)) == REG)
808 addr = XEXP (addr, 0);
809 else if (GET_CODE (XEXP (addr, 1)) == REG)
810 addr = XEXP (addr, 1);
811 else if (CONSTANT_P (XEXP (addr, 0)))
812 addr = XEXP (addr, 1);
813 else if (CONSTANT_P (XEXP (addr, 1)))
814 addr = XEXP (addr, 0);
818 if (GET_CODE (addr) == REG)
824 /* Output an insn to add the constant N to the register X. */
835 output_asm_insn (AS1 (dec%L0,%0), xops);
837 output_asm_insn (AS1 (inc%L0,%0), xops);
840 xops[1] = GEN_INT (-n);
841 output_asm_insn (AS2 (sub%L0,%1,%0), xops);
845 xops[1] = GEN_INT (n);
846 output_asm_insn (AS2 (add%L0,%1,%0), xops);
851 /* Output assembler code to perform a doubleword move insn
852 with operands OPERANDS. */
855 output_move_double (operands)
858 enum {REGOP, OFFSOP, MEMOP, PUSHOP, POPOP, CNSTOP, RNDOP } optype0, optype1;
862 rtx addreg0 = 0, addreg1 = 0;
863 int dest_overlapped_low = 0;
864 int size = GET_MODE_SIZE (GET_MODE (operands[0]));
869 /* First classify both operands. */
871 if (REG_P (operands[0]))
873 else if (offsettable_memref_p (operands[0]))
875 else if (GET_CODE (XEXP (operands[0], 0)) == POST_INC)
877 else if (GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
879 else if (GET_CODE (operands[0]) == MEM)
884 if (REG_P (operands[1]))
886 else if (CONSTANT_P (operands[1]))
888 else if (offsettable_memref_p (operands[1]))
890 else if (GET_CODE (XEXP (operands[1], 0)) == POST_INC)
892 else if (GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
894 else if (GET_CODE (operands[1]) == MEM)
899 /* Check for the cases that the operand constraints are not
900 supposed to allow to happen. Abort if we get one,
901 because generating code for these cases is painful. */
903 if (optype0 == RNDOP || optype1 == RNDOP)
906 /* If one operand is decrementing and one is incrementing
907 decrement the former register explicitly
908 and change that operand into ordinary indexing. */
910 if (optype0 == PUSHOP && optype1 == POPOP)
912 /* ??? Can this ever happen on i386? */
913 operands[0] = XEXP (XEXP (operands[0], 0), 0);
914 asm_add (-size, operands[0]);
915 if (GET_MODE (operands[1]) == XFmode)
916 operands[0] = gen_rtx (MEM, XFmode, operands[0]);
917 else if (GET_MODE (operands[0]) == DFmode)
918 operands[0] = gen_rtx (MEM, DFmode, operands[0]);
920 operands[0] = gen_rtx (MEM, DImode, operands[0]);
924 if (optype0 == POPOP && optype1 == PUSHOP)
926 /* ??? Can this ever happen on i386? */
927 operands[1] = XEXP (XEXP (operands[1], 0), 0);
928 asm_add (-size, operands[1]);
929 if (GET_MODE (operands[1]) == XFmode)
930 operands[1] = gen_rtx (MEM, XFmode, operands[1]);
931 else if (GET_MODE (operands[1]) == DFmode)
932 operands[1] = gen_rtx (MEM, DFmode, operands[1]);
934 operands[1] = gen_rtx (MEM, DImode, operands[1]);
938 /* If an operand is an unoffsettable memory ref, find a register
939 we can increment temporarily to make it refer to the second word. */
941 if (optype0 == MEMOP)
942 addreg0 = find_addr_reg (XEXP (operands[0], 0));
944 if (optype1 == MEMOP)
945 addreg1 = find_addr_reg (XEXP (operands[1], 0));
947 /* Ok, we can do one word at a time.
948 Normally we do the low-numbered word first,
949 but if either operand is autodecrementing then we
950 do the high-numbered word first.
952 In either case, set up in LATEHALF the operands to use
953 for the high-numbered word and in some cases alter the
954 operands in OPERANDS to be suitable for the low-numbered word. */
958 if (optype0 == REGOP)
960 middlehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
961 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 2);
963 else if (optype0 == OFFSOP)
965 middlehalf[0] = adj_offsettable_operand (operands[0], 4);
966 latehalf[0] = adj_offsettable_operand (operands[0], 8);
970 middlehalf[0] = operands[0];
971 latehalf[0] = operands[0];
974 if (optype1 == REGOP)
976 middlehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
977 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 2);
979 else if (optype1 == OFFSOP)
981 middlehalf[1] = adj_offsettable_operand (operands[1], 4);
982 latehalf[1] = adj_offsettable_operand (operands[1], 8);
984 else if (optype1 == CNSTOP)
986 if (GET_CODE (operands[1]) == CONST_DOUBLE)
988 REAL_VALUE_TYPE r; long l[3];
990 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
991 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, l);
992 operands[1] = GEN_INT (l[0]);
993 middlehalf[1] = GEN_INT (l[1]);
994 latehalf[1] = GEN_INT (l[2]);
996 else if (CONSTANT_P (operands[1]))
997 /* No non-CONST_DOUBLE constant should ever appear here. */
1002 middlehalf[1] = operands[1];
1003 latehalf[1] = operands[1];
1006 else /* size is not 12: */
1008 if (optype0 == REGOP)
1009 latehalf[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1010 else if (optype0 == OFFSOP)
1011 latehalf[0] = adj_offsettable_operand (operands[0], 4);
1013 latehalf[0] = operands[0];
1015 if (optype1 == REGOP)
1016 latehalf[1] = gen_rtx (REG, SImode, REGNO (operands[1]) + 1);
1017 else if (optype1 == OFFSOP)
1018 latehalf[1] = adj_offsettable_operand (operands[1], 4);
1019 else if (optype1 == CNSTOP)
1020 split_double (operands[1], &operands[1], &latehalf[1]);
1022 latehalf[1] = operands[1];
1025 /* If insn is effectively movd N (sp),-(sp) then we will do the
1026 high word first. We should use the adjusted operand 1
1027 (which is N+4 (sp) or N+8 (sp))
1028 for the low word and middle word as well,
1029 to compensate for the first decrement of sp. */
1030 if (optype0 == PUSHOP
1031 && REGNO (XEXP (XEXP (operands[0], 0), 0)) == STACK_POINTER_REGNUM
1032 && reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
1033 middlehalf[1] = operands[1] = latehalf[1];
1035 /* For (set (reg:DI N) (mem:DI ... (reg:SI N) ...)),
1036 if the upper part of reg N does not appear in the MEM, arrange to
1037 emit the move late-half first. Otherwise, compute the MEM address
1038 into the upper part of N and use that as a pointer to the memory
1040 if (optype0 == REGOP
1041 && (optype1 == OFFSOP || optype1 == MEMOP))
1043 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1044 && reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1046 /* If both halves of dest are used in the src memory address,
1047 compute the address into latehalf of dest. */
1049 xops[0] = latehalf[0];
1050 xops[1] = XEXP (operands[1], 0);
1051 output_asm_insn (AS2 (lea%L0,%a1,%0), xops);
1052 if( GET_MODE (operands[1]) == XFmode )
1055 operands[1] = gen_rtx (MEM, XFmode, latehalf[0]);
1056 middlehalf[1] = adj_offsettable_operand (operands[1], size-8);
1057 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1061 operands[1] = gen_rtx (MEM, DImode, latehalf[0]);
1062 latehalf[1] = adj_offsettable_operand (operands[1], size-4);
1066 && reg_mentioned_p (middlehalf[0], XEXP (operands[1], 0)))
1068 /* Check for two regs used by both source and dest. */
1069 if (reg_mentioned_p (operands[0], XEXP (operands[1], 0))
1070 || reg_mentioned_p (latehalf[0], XEXP (operands[1], 0)))
1073 /* JRV says this can't happen: */
1074 if (addreg0 || addreg1)
1077 /* Only the middle reg conflicts; simply put it last. */
1078 output_asm_insn (singlemove_string (operands), operands);
1079 output_asm_insn (singlemove_string (latehalf), latehalf);
1080 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1083 else if (reg_mentioned_p (operands[0], XEXP (operands[1], 0)))
1084 /* If the low half of dest is mentioned in the source memory
1085 address, the arrange to emit the move late half first. */
1086 dest_overlapped_low = 1;
1089 /* If one or both operands autodecrementing,
1090 do the two words, high-numbered first. */
1092 /* Likewise, the first move would clobber the source of the second one,
1093 do them in the other order. This happens only for registers;
1094 such overlap can't happen in memory unless the user explicitly
1095 sets it up, and that is an undefined circumstance. */
1098 if (optype0 == PUSHOP || optype1 == PUSHOP
1099 || (optype0 == REGOP && optype1 == REGOP
1100 && REGNO (operands[0]) == REGNO (latehalf[1]))
1101 || dest_overlapped_low)
1103 if (optype0 == PUSHOP || optype1 == PUSHOP
1104 || (optype0 == REGOP && optype1 == REGOP
1105 && ((middlehalf[1] && REGNO (operands[0]) == REGNO (middlehalf[1]))
1106 || REGNO (operands[0]) == REGNO (latehalf[1])))
1107 || dest_overlapped_low)
1109 /* Make any unoffsettable addresses point at high-numbered word. */
1111 asm_add (size-4, addreg0);
1113 asm_add (size-4, addreg1);
1116 output_asm_insn (singlemove_string (latehalf), latehalf);
1118 /* Undo the adds we just did. */
1120 asm_add (-4, addreg0);
1122 asm_add (-4, addreg1);
1126 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1128 asm_add (-4, addreg0);
1130 asm_add (-4, addreg1);
1133 /* Do low-numbered word. */
1134 return singlemove_string (operands);
1137 /* Normal case: do the two words, low-numbered first. */
1139 output_asm_insn (singlemove_string (operands), operands);
1141 /* Do the middle one of the three words for long double */
1145 asm_add (4, addreg0);
1147 asm_add (4, addreg1);
1149 output_asm_insn (singlemove_string (middlehalf), middlehalf);
1152 /* Make any unoffsettable addresses point at high-numbered word. */
1154 asm_add (4, addreg0);
1156 asm_add (4, addreg1);
1159 output_asm_insn (singlemove_string (latehalf), latehalf);
1161 /* Undo the adds we just did. */
1163 asm_add (4-size, addreg0);
1165 asm_add (4-size, addreg1);
1171 #define MAX_TMPS 2 /* max temporary registers used */
1173 /* Output the appropriate code to move push memory on the stack */
1176 output_move_pushmem (operands, insn, length, tmp_start, n_operands)
1188 } tmp_info[MAX_TMPS];
1190 rtx src = operands[1];
1193 int stack_p = reg_overlap_mentioned_p (stack_pointer_rtx, src);
1194 int stack_offset = 0;
1198 if (!offsettable_memref_p (src))
1199 fatal_insn ("Source is not offsettable", insn);
1201 if ((length & 3) != 0)
1202 fatal_insn ("Pushing non-word aligned size", insn);
1204 /* Figure out which temporary registers we have available */
1205 for (i = tmp_start; i < n_operands; i++)
1207 if (GET_CODE (operands[i]) == REG)
1209 if (reg_overlap_mentioned_p (operands[i], src))
1212 tmp_info[ max_tmps++ ].xops[1] = operands[i];
1213 if (max_tmps == MAX_TMPS)
1219 for (offset = length - 4; offset >= 0; offset -= 4)
1221 xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1222 output_asm_insn (AS1(push%L0,%0), xops);
1228 for (offset = length - 4; offset >= 0; )
1230 for (num_tmps = 0; num_tmps < max_tmps && offset >= 0; num_tmps++)
1232 tmp_info[num_tmps].load = AS2(mov%L0,%0,%1);
1233 tmp_info[num_tmps].push = AS1(push%L0,%1);
1234 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (src, offset + stack_offset);
1238 for (i = 0; i < num_tmps; i++)
1239 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1241 for (i = 0; i < num_tmps; i++)
1242 output_asm_insn (tmp_info[i].push, tmp_info[i].xops);
1245 stack_offset += 4*num_tmps;
1253 /* Output the appropriate code to move data between two memory locations */
1256 output_move_memory (operands, insn, length, tmp_start, n_operands)
1267 } tmp_info[MAX_TMPS];
1269 rtx dest = operands[0];
1270 rtx src = operands[1];
1271 rtx qi_tmp = NULL_RTX;
1277 if (GET_CODE (dest) == MEM
1278 && GET_CODE (XEXP (dest, 0)) == PRE_INC
1279 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx)
1280 return output_move_pushmem (operands, insn, length, tmp_start, n_operands);
1282 if (!offsettable_memref_p (src))
1283 fatal_insn ("Source is not offsettable", insn);
1285 if (!offsettable_memref_p (dest))
1286 fatal_insn ("Destination is not offsettable", insn);
1288 /* Figure out which temporary registers we have available */
1289 for (i = tmp_start; i < n_operands; i++)
1291 if (GET_CODE (operands[i]) == REG)
1293 if ((length & 1) != 0 && !qi_tmp && QI_REG_P (operands[i]))
1294 qi_tmp = operands[i];
1296 if (reg_overlap_mentioned_p (operands[i], dest))
1297 fatal_insn ("Temporary register overlaps the destination", insn);
1299 if (reg_overlap_mentioned_p (operands[i], src))
1300 fatal_insn ("Temporary register overlaps the source", insn);
1302 tmp_info[ max_tmps++ ].xops[2] = operands[i];
1303 if (max_tmps == MAX_TMPS)
1309 fatal_insn ("No scratch registers were found to do memory->memory moves", insn);
1311 if ((length & 1) != 0)
1314 fatal_insn ("No byte register found when moving odd # of bytes.", insn);
1319 for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)
1323 tmp_info[num_tmps].load = AS2(mov%L0,%1,%2);
1324 tmp_info[num_tmps].store = AS2(mov%L0,%2,%0);
1325 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1326 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1330 else if (length >= 2)
1332 tmp_info[num_tmps].load = AS2(mov%W0,%1,%2);
1333 tmp_info[num_tmps].store = AS2(mov%W0,%2,%0);
1334 tmp_info[num_tmps].xops[0] = adj_offsettable_operand (dest, offset);
1335 tmp_info[num_tmps].xops[1] = adj_offsettable_operand (src, offset);
1343 for (i = 0; i < num_tmps; i++)
1344 output_asm_insn (tmp_info[i].load, tmp_info[i].xops);
1346 for (i = 0; i < num_tmps; i++)
1347 output_asm_insn (tmp_info[i].store, tmp_info[i].xops);
1352 xops[0] = adj_offsettable_operand (dest, offset);
1353 xops[1] = adj_offsettable_operand (src, offset);
1355 output_asm_insn (AS2(mov%B0,%1,%2), xops);
1356 output_asm_insn (AS2(mov%B0,%2,%0), xops);
1364 standard_80387_constant_p (x)
1367 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1372 if (setjmp (handler))
1375 set_float_handler (handler);
1376 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1377 is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);
1378 is1 = REAL_VALUES_EQUAL (d, dconst1);
1379 set_float_handler (NULL_PTR);
1387 /* Note that on the 80387, other constants, such as pi,
1388 are much slower to load as standard constants
1389 than to load from doubles in memory! */
1396 output_move_const_single (operands)
1399 if (FP_REG_P (operands[0]))
1401 int conval = standard_80387_constant_p (operands[1]);
1409 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1411 REAL_VALUE_TYPE r; long l;
1413 if (GET_MODE (operands[1]) == XFmode)
1416 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);
1417 REAL_VALUE_TO_TARGET_SINGLE (r, l);
1418 operands[1] = GEN_INT (l);
1420 return singlemove_string (operands);
1423 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
1424 reference and a constant. */
1427 symbolic_operand (op, mode)
1429 enum machine_mode mode;
1431 switch (GET_CODE (op))
1438 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1439 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1440 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1446 /* Test for a valid operand for a call instruction.
1447 Don't allow the arg pointer register or virtual regs
1448 since they may change into reg + const, which the patterns
1449 can't handle yet. */
1452 call_insn_operand (op, mode)
1454 enum machine_mode mode;
1456 if (GET_CODE (op) == MEM
1457 && ((CONSTANT_ADDRESS_P (XEXP (op, 0))
1458 /* This makes a difference for PIC. */
1459 && general_operand (XEXP (op, 0), Pmode))
1460 || (GET_CODE (XEXP (op, 0)) == REG
1461 && XEXP (op, 0) != arg_pointer_rtx
1462 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1463 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1468 /* Like call_insn_operand but allow (mem (symbol_ref ...))
1472 expander_call_insn_operand (op, mode)
1474 enum machine_mode mode;
1476 if (GET_CODE (op) == MEM
1477 && (CONSTANT_ADDRESS_P (XEXP (op, 0))
1478 || (GET_CODE (XEXP (op, 0)) == REG
1479 && XEXP (op, 0) != arg_pointer_rtx
1480 && !(REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER
1481 && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))
1486 /* Return 1 if OP is a comparison operator that can use the condition code
1487 generated by an arithmetic operation. */
1490 arithmetic_comparison_operator (op, mode)
1492 enum machine_mode mode;
1496 if (mode != VOIDmode && mode != GET_MODE (op))
1498 code = GET_CODE (op);
1499 if (GET_RTX_CLASS (code) != '<')
1502 return (code != GT && code != LE);
1505 /* Returns 1 if OP contains a symbol reference */
1508 symbolic_reference_mentioned_p (op)
1514 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1517 fmt = GET_RTX_FORMAT (GET_CODE (op));
1518 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1524 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1525 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1528 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1535 /* Attempt to expand a binary operator. Make the expansion closer to the
1536 actual machine, then just general_operand, which will allow 3 separate
1537 memory references (one output, two input) in a single insn. Return
1538 whether the insn fails, or succeeds. */
1541 ix86_expand_binary_operator (code, mode, operands)
1543 enum machine_mode mode;
1550 /* Recognize <var1> = <value> <op> <var1> for commutative operators */
1551 if (GET_RTX_CLASS (code) == 'c'
1552 && (rtx_equal_p (operands[0], operands[2])
1553 || immediate_operand (operands[1], mode)))
1555 rtx temp = operands[1];
1556 operands[1] = operands[2];
1560 /* If optimizing, copy to regs to improve CSE */
1561 if (TARGET_PSEUDO && optimize && ((reload_in_progress | reload_completed) == 0))
1563 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1564 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1566 if (GET_CODE (operands[2]) == MEM)
1567 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1569 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1571 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1572 emit_move_insn (temp, operands[1]);
1578 if (!ix86_binary_operator_ok (code, mode, operands))
1580 /* If not optimizing, try to make a valid insn (optimize code previously did
1581 this above to improve chances of CSE) */
1583 if ((!TARGET_PSEUDO || !optimize)
1584 && ((reload_in_progress | reload_completed) == 0)
1585 && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))
1588 if (GET_CODE (operands[1]) == MEM && !rtx_equal_p (operands[0], operands[1]))
1590 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1594 if (GET_CODE (operands[2]) == MEM)
1596 operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);
1600 if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)
1602 rtx temp = gen_reg_rtx (GET_MODE (operands[0]));
1603 emit_move_insn (temp, operands[1]);
1608 if (modified && !ix86_binary_operator_ok (code, mode, operands))
1618 /* Return TRUE or FALSE depending on whether the binary operator meets the
1619 appropriate constraints. */
1622 ix86_binary_operator_ok (code, mode, operands)
1624 enum machine_mode mode;
1627 return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
1628 && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');
1631 /* Attempt to expand a unary operator. Make the expansion closer to the
1632 actual machine, then just general_operand, which will allow 2 separate
1633 memory references (one output, one input) in a single insn. Return
1634 whether the insn fails, or succeeds. */
1637 ix86_expand_unary_operator (code, mode, operands)
1639 enum machine_mode mode;
1644 /* If optimizing, copy to regs to improve CSE */
1647 && ((reload_in_progress | reload_completed) == 0)
1648 && GET_CODE (operands[1]) == MEM)
1650 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1653 if (!ix86_unary_operator_ok (code, mode, operands))
1655 if ((!TARGET_PSEUDO || !optimize)
1656 && ((reload_in_progress | reload_completed) == 0)
1657 && GET_CODE (operands[1]) == MEM)
1659 operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);
1660 if (!ix86_unary_operator_ok (code, mode, operands))
1670 /* Return TRUE or FALSE depending on whether the unary operator meets the
1671 appropriate constraints. */
1674 ix86_unary_operator_ok (code, mode, operands)
1676 enum machine_mode mode;
1684 static rtx pic_label_rtx;
1686 /* This function generates code for -fpic that loads %ebx with
1687 with the return address of the caller and then returns. */
1689 asm_output_function_prefix (file, name)
1694 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1695 || current_function_uses_const_pool);
1696 xops[0] = pic_offset_table_rtx;
1697 xops[1] = stack_pointer_rtx;
1699 /* deep branch prediction favors having a return for every call */
1700 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1702 if (pic_label_rtx == 0)
1703 pic_label_rtx = (rtx) gen_label_rtx ();
1704 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (pic_label_rtx));
1705 output_asm_insn ("movl (%1),%0", xops);
1706 output_asm_insn ("ret", xops);
1710 /* Set up the stack and frame (if desired) for the function. */
1713 function_prologue (file, size)
1720 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1721 || current_function_uses_const_pool);
1722 long tsize = get_frame_size ();
1724 /* pic references don't explicitly mention pic_offset_table_rtx */
1725 if (TARGET_SCHEDULE_PROLOGUE)
1728 xops[0] = stack_pointer_rtx;
1729 xops[1] = frame_pointer_rtx;
1730 xops[2] = GEN_INT (tsize);
1731 if (frame_pointer_needed)
1733 output_asm_insn ("push%L1 %1", xops);
1734 output_asm_insn (AS2 (mov%L0,%0,%1), xops);
1738 output_asm_insn (AS2 (sub%L0,%2,%0), xops);
1740 /* Note If use enter it is NOT reversed args.
1741 This one is not reversed from intel!!
1742 I think enter is slower. Also sdb doesn't like it.
1743 But if you want it the code is:
1745 xops[3] = const0_rtx;
1746 output_asm_insn ("enter %2,%3", xops);
1749 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1750 for (regno = limit - 1; regno >= 0; regno--)
1751 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1752 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1754 xops[0] = gen_rtx (REG, SImode, regno);
1755 output_asm_insn ("push%L0 %0", xops);
1758 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1760 xops[0] = pic_offset_table_rtx;
1761 if (pic_label_rtx == 0)
1762 pic_label_rtx = (rtx) gen_label_rtx ();
1763 xops[1] = pic_label_rtx;
1765 output_asm_insn (AS1 (call,%P1), xops);
1766 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_,%0", xops);
1768 else if (pic_reg_used)
1770 xops[0] = pic_offset_table_rtx;
1771 xops[1] = (rtx) gen_label_rtx ();
1773 output_asm_insn (AS1 (call,%P1), xops);
1774 ASM_OUTPUT_INTERNAL_LABEL (file, "L", CODE_LABEL_NUMBER (xops[1]));
1775 output_asm_insn (AS1 (pop%L0,%0), xops);
1776 output_asm_insn ("addl $_GLOBAL_OFFSET_TABLE_+[.-%P1],%0", xops);
1780 /* This function generates the assembly code for function entry.
1781 FILE is an stdio stream to output the code to.
1782 SIZE is an int: how many units of temporary storage to allocate. */
1785 ix86_expand_prologue ()
1790 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1791 || current_function_uses_const_pool);
1792 long tsize = get_frame_size ();
1794 if (!TARGET_SCHEDULE_PROLOGUE)
1797 xops[0] = stack_pointer_rtx;
1798 xops[1] = frame_pointer_rtx;
1799 xops[2] = GEN_INT (tsize);
1800 if (frame_pointer_needed)
1802 emit_insn (gen_rtx (SET, 0,
1803 gen_rtx (MEM, SImode,
1804 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1805 frame_pointer_rtx));
1806 emit_move_insn (xops[1], xops[0]);
1810 emit_insn (gen_rtx (SET, SImode,
1812 gen_rtx (MINUS, SImode,
1816 /* Note If use enter it is NOT reversed args.
1817 This one is not reversed from intel!!
1818 I think enter is slower. Also sdb doesn't like it.
1819 But if you want it the code is:
1821 xops[3] = const0_rtx;
1822 output_asm_insn ("enter %2,%3", xops);
1825 limit = (frame_pointer_needed ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1826 for (regno = limit - 1; regno >= 0; regno--)
1827 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1828 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1830 xops[0] = gen_rtx (REG, SImode, regno);
1831 emit_insn (gen_rtx (SET, 0,
1832 gen_rtx (MEM, SImode,
1833 gen_rtx (PRE_DEC, SImode, stack_pointer_rtx)),
1837 if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)
1839 xops[0] = pic_offset_table_rtx;
1840 if (pic_label_rtx == 0)
1841 pic_label_rtx = (rtx) gen_label_rtx ();
1842 xops[1] = pic_label_rtx;
1844 emit_insn (gen_prologue_get_pc (xops[0], gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1845 emit_insn (gen_prologue_set_got (xops[0],
1846 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1847 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1849 else if (pic_reg_used)
1851 xops[0] = pic_offset_table_rtx;
1852 xops[1] = (rtx) gen_label_rtx ();
1854 emit_insn (gen_prologue_get_pc (xops[0], gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER(xops[1]))));
1855 emit_insn (gen_pop (xops[0]));
1856 emit_insn (gen_prologue_set_got (xops[0],
1857 gen_rtx (SYMBOL_REF, Pmode, "$_GLOBAL_OFFSET_TABLE_"),
1858 gen_rtx (CONST_INT, Pmode, CODE_LABEL_NUMBER (xops[1]))));
1862 /* Restore function stack, frame, and registers. */
1865 function_epilogue (file, size)
1871 /* Return 1 if it is appropriate to emit `ret' instructions in the
1872 body of a function. Do this only if the epilogue is simple, needing a
1873 couple of insns. Prior to reloading, we can't tell how many registers
1874 must be saved, so return 0 then. Return 0 if there is no frame
1875 marker to de-allocate.
1877 If NON_SAVING_SETJMP is defined and true, then it is not possible
1878 for the epilogue to be simple, so return 0. This is a special case
1879 since NON_SAVING_SETJMP will not cause regs_ever_live to change
1880 until final, but jump_optimize may need to know sooner if a
1884 ix86_can_use_return_insn_p ()
1888 int reglimit = (frame_pointer_needed
1889 ? FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM);
1890 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1891 || current_function_uses_const_pool);
1893 #ifdef NON_SAVING_SETJMP
1894 if (NON_SAVING_SETJMP && current_function_calls_setjmp)
1898 if (! reload_completed)
1901 for (regno = reglimit - 1; regno >= 0; regno--)
1902 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1903 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1906 return nregs == 0 || ! frame_pointer_needed;
1910 /* This function generates the assembly code for function exit.
1911 FILE is an stdio stream to output the code to.
1912 SIZE is an int: how many units of temporary storage to deallocate. */
1915 ix86_expand_epilogue ()
1918 register int nregs, limit;
1921 int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table
1922 || current_function_uses_const_pool);
1923 long tsize = get_frame_size ();
1925 /* Compute the number of registers to pop */
1927 limit = (frame_pointer_needed
1928 ? FRAME_POINTER_REGNUM
1929 : STACK_POINTER_REGNUM);
1933 for (regno = limit - 1; regno >= 0; regno--)
1934 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1935 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1938 /* sp is often unreliable so we must go off the frame pointer,
1941 /* In reality, we may not care if sp is unreliable, because we can
1942 restore the register relative to the frame pointer. In theory,
1943 since each move is the same speed as a pop, and we don't need the
1944 leal, this is faster. For now restore multiple registers the old
1947 offset = -tsize - (nregs * UNITS_PER_WORD);
1949 xops[2] = stack_pointer_rtx;
1951 if (nregs > 1 || ! frame_pointer_needed)
1953 if (frame_pointer_needed)
1955 xops[0] = adj_offsettable_operand (AT_BP (QImode), offset);
1956 emit_insn (gen_movsi_lea (xops[2], XEXP (xops[0], 0)));
1957 /* output_asm_insn (AS2 (lea%L2,%0,%2), xops);*/
1960 for (regno = 0; regno < limit; regno++)
1961 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1962 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1964 xops[0] = gen_rtx (REG, SImode, regno);
1965 emit_insn (gen_pop (xops[0]));
1966 /* output_asm_insn ("pop%L0 %0", xops);*/
1970 for (regno = 0; regno < limit; regno++)
1971 if ((regs_ever_live[regno] && ! call_used_regs[regno])
1972 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1974 xops[0] = gen_rtx (REG, SImode, regno);
1975 xops[1] = adj_offsettable_operand (AT_BP (Pmode), offset);
1976 emit_move_insn (xops[0], xops[1]);
1977 /* output_asm_insn (AS2 (mov%L0,%1,%0), xops);*/
1981 if (frame_pointer_needed)
1983 /* If not an i386, mov & pop is faster than "leave". */
1985 if (TARGET_USE_LEAVE)
1986 emit_insn (gen_leave());
1987 /* output_asm_insn ("leave", xops);*/
1990 xops[0] = frame_pointer_rtx;
1991 xops[1] = stack_pointer_rtx;
1992 emit_insn (gen_epilogue_set_stack_ptr());
1993 /* output_asm_insn (AS2 (mov%L2,%0,%2), xops);*/
1994 emit_insn (gen_pop (xops[0]));
1995 /* output_asm_insn ("pop%L0 %0", xops);*/
2000 /* If there is no frame pointer, we must still release the frame. */
2002 xops[0] = GEN_INT (tsize);
2003 emit_insn (gen_rtx (SET, SImode,
2005 gen_rtx (PLUS, SImode,
2008 /* output_asm_insn (AS2 (add%L2,%0,%2), xops);*/
2011 #ifdef FUNCTION_BLOCK_PROFILER_EXIT
2012 if (profile_block_flag == 2)
2014 FUNCTION_BLOCK_PROFILER_EXIT(file);
2018 if (current_function_pops_args && current_function_args_size)
2020 xops[1] = GEN_INT (current_function_pops_args);
2022 /* i386 can only pop 32K bytes (maybe 64K? Is it signed?). If
2023 asked to pop more, pop return address, do explicit add, and jump
2024 indirectly to the caller. */
2026 if (current_function_pops_args >= 32768)
2028 /* ??? Which register to use here? */
2029 xops[0] = gen_rtx (REG, SImode, 2);
2030 emit_insn (gen_pop (xops[0]));
2031 /* output_asm_insn ("pop%L0 %0", xops);*/
2032 emit_insn (gen_rtx (SET, SImode,
2034 gen_rtx (PLUS, SImode,
2037 /* output_asm_insn (AS2 (add%L2,%1,%2), xops);*/
2038 emit_jump_insn (xops[0]);
2039 /* output_asm_insn ("jmp %*%0", xops);*/
2042 emit_jump_insn (gen_return_pop_internal (xops[1]));
2043 /* output_asm_insn ("ret %1", xops);*/
2046 /* output_asm_insn ("ret", xops);*/
2047 emit_jump_insn (gen_return_internal ());
2051 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2052 that is a valid memory address for an instruction.
2053 The MODE argument is the machine mode for the MEM expression
2054 that wants to use this address.
2056 On x86, legitimate addresses are:
2057 base movl (base),reg
2058 displacement movl disp,reg
2059 base + displacement movl disp(base),reg
2060 index + base movl (base,index),reg
2061 (index + base) + displacement movl disp(base,index),reg
2062 index*scale movl (,index,scale),reg
2063 index*scale + disp movl disp(,index,scale),reg
2064 index*scale + base movl (base,index,scale),reg
2065 (index*scale + base) + disp movl disp(base,index,scale),reg
2067 In each case, scale can be 1, 2, 4, 8. */
2069 /* This is exactly the same as print_operand_addr, except that
2070 it recognizes addresses instead of printing them.
2072 It only recognizes address in canonical form. LEGITIMIZE_ADDRESS should
2073 convert common non-canonical forms to canonical form so that they will
2076 #define ADDR_INVALID(msg,insn) \
2078 if (TARGET_DEBUG_ADDR) \
2080 fprintf (stderr, msg); \
2086 legitimate_address_p (mode, addr, strict)
2087 enum machine_mode mode;
2091 rtx base = NULL_RTX;
2092 rtx indx = NULL_RTX;
2093 rtx scale = NULL_RTX;
2094 rtx disp = NULL_RTX;
2096 if (TARGET_DEBUG_ADDR)
2099 "\n==========\nGO_IF_LEGITIMATE_ADDRESS, mode = %s, strict = %d\n",
2100 GET_MODE_NAME (mode), strict);
2105 if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
2106 base = addr; /* base reg */
2108 else if (GET_CODE (addr) == PLUS)
2110 rtx op0 = XEXP (addr, 0);
2111 rtx op1 = XEXP (addr, 1);
2112 enum rtx_code code0 = GET_CODE (op0);
2113 enum rtx_code code1 = GET_CODE (op1);
2115 if (code0 == REG || code0 == SUBREG)
2117 if (code1 == REG || code1 == SUBREG)
2119 indx = op0; /* index + base */
2125 base = op0; /* base + displacement */
2130 else if (code0 == MULT)
2132 indx = XEXP (op0, 0);
2133 scale = XEXP (op0, 1);
2135 if (code1 == REG || code1 == SUBREG)
2136 base = op1; /* index*scale + base */
2139 disp = op1; /* index*scale + disp */
2142 else if (code0 == PLUS && GET_CODE (XEXP (op0, 0)) == MULT)
2144 indx = XEXP (XEXP (op0, 0), 0); /* index*scale + base + disp */
2145 scale = XEXP (XEXP (op0, 0), 1);
2146 base = XEXP (op0, 1);
2150 else if (code0 == PLUS)
2152 indx = XEXP (op0, 0); /* index + base + disp */
2153 base = XEXP (op0, 1);
2159 ADDR_INVALID ("PLUS subcode is not valid.\n", op0);
2164 else if (GET_CODE (addr) == MULT)
2166 indx = XEXP (addr, 0); /* index*scale */
2167 scale = XEXP (addr, 1);
2171 disp = addr; /* displacement */
2173 /* Allow arg pointer and stack pointer as index if there is not scaling */
2174 if (base && indx && !scale
2175 && (indx == arg_pointer_rtx || indx == stack_pointer_rtx))
2182 /* Validate base register */
2183 /* Don't allow SUBREG's here, it can lead to spill failures when the base
2184 is one word out of a two word structure, which is represented internally
2188 if (GET_CODE (base) != REG)
2190 ADDR_INVALID ("Base is not a register.\n", base);
2194 if ((strict && !REG_OK_FOR_BASE_STRICT_P (base))
2195 || (!strict && !REG_OK_FOR_BASE_NONSTRICT_P (base)))
2197 ADDR_INVALID ("Base is not valid.\n", base);
2202 /* Validate index register */
2203 /* Don't allow SUBREG's here, it can lead to spill failures when the index
2204 is one word out of a two word structure, which is represented internally
2208 if (GET_CODE (indx) != REG)
2210 ADDR_INVALID ("Index is not a register.\n", indx);
2214 if ((strict && !REG_OK_FOR_INDEX_STRICT_P (indx))
2215 || (!strict && !REG_OK_FOR_INDEX_NONSTRICT_P (indx)))
2217 ADDR_INVALID ("Index is not valid.\n", indx);
2222 abort (); /* scale w/o index invalid */
2224 /* Validate scale factor */
2227 HOST_WIDE_INT value;
2229 if (GET_CODE (scale) != CONST_INT)
2231 ADDR_INVALID ("Scale is not valid.\n", scale);
2235 value = INTVAL (scale);
2236 if (value != 1 && value != 2 && value != 4 && value != 8)
2238 ADDR_INVALID ("Scale is not a good multiplier.\n", scale);
2243 /* Validate displacement
2244 Constant pool addresses must be handled special. They are
2245 considered legitimate addresses, but only if not used with regs.
2246 When printed, the output routines know to print the reference with the
2247 PIC reg, even though the PIC reg doesn't appear in the RTL. */
2250 if (GET_CODE (disp) == SYMBOL_REF
2251 && CONSTANT_POOL_ADDRESS_P (disp)
2256 else if (!CONSTANT_ADDRESS_P (disp))
2258 ADDR_INVALID ("Displacement is not valid.\n", disp);
2262 else if (GET_CODE (disp) == CONST_DOUBLE)
2264 ADDR_INVALID ("Displacement is a const_double.\n", disp);
2268 else if (flag_pic && SYMBOLIC_CONST (disp)
2269 && base != pic_offset_table_rtx
2270 && (indx != pic_offset_table_rtx || scale != NULL_RTX))
2272 ADDR_INVALID ("Displacement is an invalid pic reference.\n", disp);
2276 else if (HALF_PIC_P () && HALF_PIC_ADDRESS_P (disp)
2277 && (base != NULL_RTX || indx != NULL_RTX))
2279 ADDR_INVALID ("Displacement is an invalid half-pic reference.\n", disp);
2284 if (TARGET_DEBUG_ADDR)
2285 fprintf (stderr, "Address is valid.\n");
2287 /* Everything looks valid, return true */
2292 /* Return a legitimate reference for ORIG (an address) using the
2293 register REG. If REG is 0, a new pseudo is generated.
2295 There are three types of references that must be handled:
2297 1. Global data references must load the address from the GOT, via
2298 the PIC reg. An insn is emitted to do this load, and the reg is
2301 2. Static data references must compute the address as an offset
2302 from the GOT, whose base is in the PIC reg. An insn is emitted to
2303 compute the address into a reg, and the reg is returned. Static
2304 data objects have SYMBOL_REF_FLAG set to differentiate them from
2305 global data objects.
2307 3. Constant pool addresses must be handled special. They are
2308 considered legitimate addresses, but only if not used with regs.
2309 When printed, the output routines know to print the reference with the
2310 PIC reg, even though the PIC reg doesn't appear in the RTL.
2312 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC
2313 reg also appears in the address (except for constant pool references,
2316 "switch" statements also require special handling when generating
2317 PIC code. See comments by the `casesi' insn in i386.md for details. */
2320 legitimize_pic_address (orig, reg)
2327 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
2329 if (GET_CODE (addr) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (addr))
2334 reg = gen_reg_rtx (Pmode);
2336 if ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FLAG (addr))
2337 || GET_CODE (addr) == LABEL_REF)
2338 new = gen_rtx (PLUS, Pmode, pic_offset_table_rtx, orig);
2340 new = gen_rtx (MEM, Pmode,
2341 gen_rtx (PLUS, Pmode,
2342 pic_offset_table_rtx, orig));
2344 emit_move_insn (reg, new);
2346 current_function_uses_pic_offset_table = 1;
2349 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
2353 if (GET_CODE (addr) == CONST)
2355 addr = XEXP (addr, 0);
2356 if (GET_CODE (addr) != PLUS)
2360 if (XEXP (addr, 0) == pic_offset_table_rtx)
2364 reg = gen_reg_rtx (Pmode);
2366 base = legitimize_pic_address (XEXP (addr, 0), reg);
2367 addr = legitimize_pic_address (XEXP (addr, 1),
2368 base == reg ? NULL_RTX : reg);
2370 if (GET_CODE (addr) == CONST_INT)
2371 return plus_constant (base, INTVAL (addr));
2373 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
2375 base = gen_rtx (PLUS, Pmode, base, XEXP (addr, 0));
2376 addr = XEXP (addr, 1);
2378 return gen_rtx (PLUS, Pmode, base, addr);
2384 /* Emit insns to move operands[1] into operands[0]. */
2387 emit_pic_move (operands, mode)
2389 enum machine_mode mode;
2391 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
2393 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
2394 operands[1] = (rtx) force_reg (SImode, operands[1]);
2396 operands[1] = legitimize_pic_address (operands[1], temp);
2400 /* Try machine-dependent ways of modifying an illegitimate address
2401 to be legitimate. If we find one, return the new, valid address.
2402 This macro is used in only one place: `memory_address' in explow.c.
2404 OLDX is the address as it was before break_out_memory_refs was called.
2405 In some cases it is useful to look at this to decide what needs to be done.
2407 MODE and WIN are passed so that this macro can use
2408 GO_IF_LEGITIMATE_ADDRESS.
2410 It is always safe for this macro to do nothing. It exists to recognize
2411 opportunities to optimize the output.
2413 For the 80386, we handle X+REG by loading X into a register R and
2414 using R+REG. R will go in a general reg and indexing will be used.
2415 However, if REG is a broken-out memory address or multiplication,
2416 nothing needs to be done because REG can certainly go in a general reg.
2418 When -fpic is used, special handling is needed for symbolic references.
2419 See comments by legitimize_pic_address in i386.c for details. */
2422 legitimize_address (x, oldx, mode)
2425 enum machine_mode mode;
2430 if (TARGET_DEBUG_ADDR)
2432 fprintf (stderr, "\n==========\nLEGITIMIZE_ADDRESS, mode = %s\n", GET_MODE_NAME (mode));
2436 if (flag_pic && SYMBOLIC_CONST (x))
2437 return legitimize_pic_address (x, 0);
2439 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2440 if (GET_CODE (x) == ASHIFT
2441 && GET_CODE (XEXP (x, 1)) == CONST_INT
2442 && (log = (unsigned)exact_log2 (INTVAL (XEXP (x, 1)))) < 4)
2445 x = gen_rtx (MULT, Pmode,
2446 force_reg (Pmode, XEXP (x, 0)),
2447 GEN_INT (1 << log));
2450 if (GET_CODE (x) == PLUS)
2452 /* Canonicalize shifts by 0, 1, 2, 3 into multiply */
2453 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2454 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2455 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) < 4)
2458 XEXP (x, 0) = gen_rtx (MULT, Pmode,
2459 force_reg (Pmode, XEXP (XEXP (x, 0), 0)),
2460 GEN_INT (1 << log));
2463 if (GET_CODE (XEXP (x, 1)) == ASHIFT
2464 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2465 && (log = (unsigned)exact_log2 (INTVAL (XEXP (XEXP (x, 1), 1)))) < 4)
2468 XEXP (x, 1) = gen_rtx (MULT, Pmode,
2469 force_reg (Pmode, XEXP (XEXP (x, 1), 0)),
2470 GEN_INT (1 << log));
2473 /* Put multiply first if it isn't already */
2474 if (GET_CODE (XEXP (x, 1)) == MULT)
2476 rtx tmp = XEXP (x, 0);
2477 XEXP (x, 0) = XEXP (x, 1);
2482 /* Canonicalize (plus (mult (reg) (const)) (plus (reg) (const)))
2483 into (plus (plus (mult (reg) (const)) (reg)) (const)). This can be
2484 created by virtual register instantiation, register elimination, and
2485 similar optimizations. */
2486 if (GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == PLUS)
2489 x = gen_rtx (PLUS, Pmode,
2490 gen_rtx (PLUS, Pmode, XEXP (x, 0), XEXP (XEXP (x, 1), 0)),
2491 XEXP (XEXP (x, 1), 1));
2494 /* Canonicalize (plus (plus (mult (reg) (const)) (plus (reg) (const))) const)
2495 into (plus (plus (mult (reg) (const)) (reg)) (const)). */
2496 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS
2497 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
2498 && GET_CODE (XEXP (XEXP (x, 0), 1)) == PLUS
2499 && CONSTANT_P (XEXP (x, 1)))
2501 rtx constant, other;
2503 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
2505 constant = XEXP (x, 1);
2506 other = XEXP (XEXP (XEXP (x, 0), 1), 1);
2508 else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
2510 constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
2511 other = XEXP (x, 1);
2519 x = gen_rtx (PLUS, Pmode,
2520 gen_rtx (PLUS, Pmode, XEXP (XEXP (x, 0), 0),
2521 XEXP (XEXP (XEXP (x, 0), 1), 0)),
2522 plus_constant (other, INTVAL (constant)));
2526 if (changed && legitimate_address_p (mode, x, FALSE))
2529 if (GET_CODE (XEXP (x, 0)) == MULT)
2532 XEXP (x, 0) = force_operand (XEXP (x, 0), 0);
2535 if (GET_CODE (XEXP (x, 1)) == MULT)
2538 XEXP (x, 1) = force_operand (XEXP (x, 1), 0);
2542 && GET_CODE (XEXP (x, 1)) == REG
2543 && GET_CODE (XEXP (x, 0)) == REG)
2546 if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
2549 x = legitimize_pic_address (x, 0);
2552 if (changed && legitimate_address_p (mode, x, FALSE))
2555 if (GET_CODE (XEXP (x, 0)) == REG)
2557 register rtx temp = gen_reg_rtx (Pmode);
2558 register rtx val = force_operand (XEXP (x, 1), temp);
2560 emit_move_insn (temp, val);
2566 else if (GET_CODE (XEXP (x, 1)) == REG)
2568 register rtx temp = gen_reg_rtx (Pmode);
2569 register rtx val = force_operand (XEXP (x, 0), temp);
2571 emit_move_insn (temp, val);
2582 /* Print an integer constant expression in assembler syntax. Addition
2583 and subtraction are the only arithmetic that may appear in these
2584 expressions. FILE is the stdio stream to write to, X is the rtx, and
2585 CODE is the operand print code from the output string. */
2588 output_pic_addr_const (file, x, code)
2595 switch (GET_CODE (x))
2606 if (GET_CODE (x) == SYMBOL_REF)
2607 assemble_name (file, XSTR (x, 0));
2610 ASM_GENERATE_INTERNAL_LABEL (buf, "L",
2611 CODE_LABEL_NUMBER (XEXP (x, 0)));
2612 assemble_name (asm_out_file, buf);
2615 if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x))
2616 fprintf (file, "@GOTOFF(%%ebx)");
2617 else if (code == 'P')
2618 fprintf (file, "@PLT");
2619 else if (GET_CODE (x) == LABEL_REF)
2620 fprintf (file, "@GOTOFF");
2621 else if (! SYMBOL_REF_FLAG (x))
2622 fprintf (file, "@GOT");
2624 fprintf (file, "@GOTOFF");
2629 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
2630 assemble_name (asm_out_file, buf);
2634 fprintf (file, "%d", INTVAL (x));
2638 /* This used to output parentheses around the expression,
2639 but that does not work on the 386 (either ATT or BSD assembler). */
2640 output_pic_addr_const (file, XEXP (x, 0), code);
2644 if (GET_MODE (x) == VOIDmode)
2646 /* We can use %d if the number is <32 bits and positive. */
2647 if (CONST_DOUBLE_HIGH (x) || CONST_DOUBLE_LOW (x) < 0)
2648 fprintf (file, "0x%x%08x",
2649 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
2651 fprintf (file, "%d", CONST_DOUBLE_LOW (x));
2654 /* We can't handle floating point constants;
2655 PRINT_OPERAND must handle them. */
2656 output_operand_lossage ("floating constant misused");
2660 /* Some assemblers need integer constants to appear last (eg masm). */
2661 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
2663 output_pic_addr_const (file, XEXP (x, 1), code);
2664 if (INTVAL (XEXP (x, 0)) >= 0)
2665 fprintf (file, "+");
2666 output_pic_addr_const (file, XEXP (x, 0), code);
2670 output_pic_addr_const (file, XEXP (x, 0), code);
2671 if (INTVAL (XEXP (x, 1)) >= 0)
2672 fprintf (file, "+");
2673 output_pic_addr_const (file, XEXP (x, 1), code);
2678 output_pic_addr_const (file, XEXP (x, 0), code);
2679 fprintf (file, "-");
2680 output_pic_addr_const (file, XEXP (x, 1), code);
2684 output_operand_lossage ("invalid expression as operand");
2689 /* Append the correct conditional move suffix which corresponds to CODE */
2692 put_condition_code (code, file)
2699 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2705 if (cc_prev_status.flags & CC_Z_IN_NOT_C)
2711 fputs ("ge", file); return;
2713 fputs ("g", file); return;
2715 fputs ("le", file); return;
2717 fputs ("l", file); return;
2719 fputs ("ae", file); return;
2721 fputs ("a", file); return;
2723 fputs ("be", file); return;
2725 fputs ("b", file); return;
2726 default: output_operand_lossage ("Invalid %%C operand");
2731 f -- float insn (print a CONST_DOUBLE as a float rather than in hex).
2732 D,L,W,B,Q,S -- print the opcode suffix for specified size of operand.
2733 C -- print opcode suffix for set/cmov insn.
2734 N -- like C, but print reversed condition
2735 R -- print the prefix for register names.
2736 z -- print the opcode suffix for the size of the current operand.
2737 * -- print a star (in certain assembler syntax)
2738 w -- print the operand as if it's a "word" (HImode) even if it isn't.
2739 c -- don't print special prefixes before constant operands.
2740 J -- print the appropriate jump operand.
2741 s -- print a shift double count, followed by the assemblers argument
2746 print_operand (file, x, code)
2761 PUT_OP_SIZE (code, 'l', file);
2765 PUT_OP_SIZE (code, 'w', file);
2769 PUT_OP_SIZE (code, 'b', file);
2773 PUT_OP_SIZE (code, 'l', file);
2777 PUT_OP_SIZE (code, 's', file);
2781 PUT_OP_SIZE (code, 't', file);
2785 /* 387 opcodes don't get size suffixes if the operands are
2788 if (STACK_REG_P (x))
2791 /* this is the size of op from size of operand */
2792 switch (GET_MODE_SIZE (GET_MODE (x)))
2795 PUT_OP_SIZE ('B', 'b', file);
2799 PUT_OP_SIZE ('W', 'w', file);
2803 if (GET_MODE (x) == SFmode)
2805 PUT_OP_SIZE ('S', 's', file);
2809 PUT_OP_SIZE ('L', 'l', file);
2813 PUT_OP_SIZE ('T', 't', file);
2817 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
2819 #ifdef GAS_MNEMONICS
2820 PUT_OP_SIZE ('Q', 'q', file);
2823 PUT_OP_SIZE ('Q', 'l', file); /* Fall through */
2827 PUT_OP_SIZE ('Q', 'l', file);
2840 switch (GET_CODE (x))
2842 /* These conditions are appropriate for testing the result
2843 of an arithmetic operation, not for a compare operation.
2844 Cases GE, LT assume CC_NO_OVERFLOW true. All cases assume
2845 CC_Z_IN_NOT_C false and not floating point. */
2846 case NE: fputs ("jne", file); return;
2847 case EQ: fputs ("je", file); return;
2848 case GE: fputs ("jns", file); return;
2849 case LT: fputs ("js", file); return;
2850 case GEU: fputs ("jmp", file); return;
2851 case GTU: fputs ("jne", file); return;
2852 case LEU: fputs ("je", file); return;
2853 case LTU: fputs ("#branch never", file); return;
2855 /* no matching branches for GT nor LE */
2860 if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
2862 PRINT_OPERAND (file, x, 0);
2863 fputs (AS2C (,) + 1, file);
2867 /* This is used by the conditional move instructions. */
2869 put_condition_code (GET_CODE (x), file);
2871 /* like above, but reverse condition */
2873 put_condition_code (reverse_condition (GET_CODE (x)), file);
2880 sprintf (str, "invalid operand code `%c'", code);
2881 output_operand_lossage (str);
2885 if (GET_CODE (x) == REG)
2887 PRINT_REG (x, code, file);
2889 else if (GET_CODE (x) == MEM)
2891 PRINT_PTR (x, file);
2892 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2895 output_pic_addr_const (file, XEXP (x, 0), code);
2897 output_addr_const (file, XEXP (x, 0));
2900 output_address (XEXP (x, 0));
2902 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
2904 REAL_VALUE_TYPE r; long l;
2905 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2906 REAL_VALUE_TO_TARGET_SINGLE (r, l);
2907 PRINT_IMMED_PREFIX (file);
2908 fprintf (file, "0x%x", l);
2910 /* These float cases don't actually occur as immediate operands. */
2911 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
2913 REAL_VALUE_TYPE r; char dstr[30];
2914 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2915 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
2916 fprintf (file, "%s", dstr);
2918 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == XFmode)
2920 REAL_VALUE_TYPE r; char dstr[30];
2921 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2922 REAL_VALUE_TO_DECIMAL (r, "%.22e", dstr);
2923 fprintf (file, "%s", dstr);
2929 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
2930 PRINT_IMMED_PREFIX (file);
2931 else if (GET_CODE (x) == CONST || GET_CODE (x) == SYMBOL_REF
2932 || GET_CODE (x) == LABEL_REF)
2933 PRINT_OFFSET_PREFIX (file);
2936 output_pic_addr_const (file, x, code);
2938 output_addr_const (file, x);
2942 /* Print a memory operand whose address is ADDR. */
2945 print_operand_address (file, addr)
2949 register rtx reg1, reg2, breg, ireg;
2952 switch (GET_CODE (addr))
2956 fprintf (file, "%se", RP);
2957 fputs (hi_reg_name[REGNO (addr)], file);
2967 if (CONSTANT_ADDRESS_P (XEXP (addr, 0)))
2969 offset = XEXP (addr, 0);
2970 addr = XEXP (addr, 1);
2972 else if (CONSTANT_ADDRESS_P (XEXP (addr, 1)))
2974 offset = XEXP (addr, 1);
2975 addr = XEXP (addr, 0);
2977 if (GET_CODE (addr) != PLUS) ;
2978 else if (GET_CODE (XEXP (addr, 0)) == MULT)
2980 reg1 = XEXP (addr, 0);
2981 addr = XEXP (addr, 1);
2983 else if (GET_CODE (XEXP (addr, 1)) == MULT)
2985 reg1 = XEXP (addr, 1);
2986 addr = XEXP (addr, 0);
2988 else if (GET_CODE (XEXP (addr, 0)) == REG)
2990 reg1 = XEXP (addr, 0);
2991 addr = XEXP (addr, 1);
2993 else if (GET_CODE (XEXP (addr, 1)) == REG)
2995 reg1 = XEXP (addr, 1);
2996 addr = XEXP (addr, 0);
2998 if (GET_CODE (addr) == REG || GET_CODE (addr) == MULT)
3000 if (reg1 == 0) reg1 = addr;
3006 if (addr != 0) abort ();
3009 if ((reg1 && GET_CODE (reg1) == MULT)
3010 || (reg2 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg2))))
3015 else if (reg1 != 0 && REGNO_OK_FOR_BASE_P (REGNO (reg1)))
3021 if (ireg != 0 || breg != 0)
3028 output_pic_addr_const (file, addr, 0);
3030 else if (GET_CODE (addr) == LABEL_REF)
3031 output_asm_label (addr);
3034 output_addr_const (file, addr);
3037 if (ireg != 0 && GET_CODE (ireg) == MULT)
3039 scale = INTVAL (XEXP (ireg, 1));
3040 ireg = XEXP (ireg, 0);
3043 /* The stack pointer can only appear as a base register,
3044 never an index register, so exchange the regs if it is wrong. */
3046 if (scale == 1 && ireg && REGNO (ireg) == STACK_POINTER_REGNUM)
3055 /* output breg+ireg*scale */
3056 PRINT_B_I_S (breg, ireg, scale, file);
3063 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
3065 scale = INTVAL (XEXP (addr, 0));
3066 ireg = XEXP (addr, 1);
3070 scale = INTVAL (XEXP (addr, 1));
3071 ireg = XEXP (addr, 0);
3073 output_addr_const (file, const0_rtx);
3074 PRINT_B_I_S ((rtx) 0, ireg, scale, file);
3079 if (GET_CODE (addr) == CONST_INT
3080 && INTVAL (addr) < 0x8000
3081 && INTVAL (addr) >= -0x8000)
3082 fprintf (file, "%d", INTVAL (addr));
3086 output_pic_addr_const (file, addr, 0);
3088 output_addr_const (file, addr);
3093 /* Set the cc_status for the results of an insn whose pattern is EXP.
3094 On the 80386, we assume that only test and compare insns, as well
3095 as SI, HI, & DI mode ADD, SUB, NEG, AND, IOR, XOR, ASHIFT,
3096 ASHIFTRT, and LSHIFTRT instructions set the condition codes usefully.
3097 Also, we assume that jumps, moves and sCOND don't affect the condition
3098 codes. All else clobbers the condition codes, by assumption.
3100 We assume that ALL integer add, minus, etc. instructions effect the
3101 condition codes. This MUST be consistent with i386.md.
3103 We don't record any float test or compare - the redundant test &
3104 compare check in final.c does not handle stack-like regs correctly. */
3107 notice_update_cc (exp)
3110 if (GET_CODE (exp) == SET)
3112 /* Jumps do not alter the cc's. */
3113 if (SET_DEST (exp) == pc_rtx)
3115 #ifdef IS_STACK_MODE
3116 /* Moving into a memory of stack_mode may have been moved
3117 in between the use and set of cc0 by loop_spl(). So
3118 old value of cc.status must be retained */
3119 if(GET_CODE(SET_DEST(exp))==MEM
3120 && IS_STACK_MODE(GET_MODE(SET_DEST(exp))))
3125 /* Moving register or memory into a register:
3126 it doesn't alter the cc's, but it might invalidate
3127 the RTX's which we remember the cc's came from.
3128 (Note that moving a constant 0 or 1 MAY set the cc's). */
3129 if (REG_P (SET_DEST (exp))
3130 && (REG_P (SET_SRC (exp)) || GET_CODE (SET_SRC (exp)) == MEM
3131 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3133 if (cc_status.value1
3134 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value1))
3135 cc_status.value1 = 0;
3136 if (cc_status.value2
3137 && reg_overlap_mentioned_p (SET_DEST (exp), cc_status.value2))
3138 cc_status.value2 = 0;
3141 /* Moving register into memory doesn't alter the cc's.
3142 It may invalidate the RTX's which we remember the cc's came from. */
3143 if (GET_CODE (SET_DEST (exp)) == MEM
3144 && (REG_P (SET_SRC (exp))
3145 || GET_RTX_CLASS (GET_CODE (SET_SRC (exp))) == '<'))
3147 if (cc_status.value1 && GET_CODE (cc_status.value1) == MEM
3148 || reg_mentioned_p (SET_DEST (exp), cc_status.value1))
3149 cc_status.value1 = 0;
3150 if (cc_status.value2 && GET_CODE (cc_status.value2) == MEM
3151 || reg_mentioned_p (SET_DEST (exp), cc_status.value2))
3152 cc_status.value2 = 0;
3155 /* Function calls clobber the cc's. */
3156 else if (GET_CODE (SET_SRC (exp)) == CALL)
3161 /* Tests and compares set the cc's in predictable ways. */
3162 else if (SET_DEST (exp) == cc0_rtx)
3165 cc_status.value1 = SET_SRC (exp);
3168 /* Certain instructions effect the condition codes. */
3169 else if (GET_MODE (SET_SRC (exp)) == SImode
3170 || GET_MODE (SET_SRC (exp)) == HImode
3171 || GET_MODE (SET_SRC (exp)) == QImode)
3172 switch (GET_CODE (SET_SRC (exp)))
3174 case ASHIFTRT: case LSHIFTRT:
3176 /* Shifts on the 386 don't set the condition codes if the
3177 shift count is zero. */
3178 if (GET_CODE (XEXP (SET_SRC (exp), 1)) != CONST_INT)
3183 /* We assume that the CONST_INT is non-zero (this rtx would
3184 have been deleted if it were zero. */
3186 case PLUS: case MINUS: case NEG:
3187 case AND: case IOR: case XOR:
3188 cc_status.flags = CC_NO_OVERFLOW;
3189 cc_status.value1 = SET_SRC (exp);
3190 cc_status.value2 = SET_DEST (exp);
3201 else if (GET_CODE (exp) == PARALLEL
3202 && GET_CODE (XVECEXP (exp, 0, 0)) == SET)
3204 if (SET_DEST (XVECEXP (exp, 0, 0)) == pc_rtx)
3206 if (SET_DEST (XVECEXP (exp, 0, 0)) == cc0_rtx)
3209 if (stack_regs_mentioned_p (SET_SRC (XVECEXP (exp, 0, 0))))
3210 cc_status.flags |= CC_IN_80387;
3212 cc_status.value1 = SET_SRC (XVECEXP (exp, 0, 0));
3223 /* Split one or more DImode RTL references into pairs of SImode
3224 references. The RTL can be REG, offsettable MEM, integer constant, or
3225 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
3226 split and "num" is its length. lo_half and hi_half are output arrays
3227 that parallel "operands". */
3230 split_di (operands, num, lo_half, hi_half)
3233 rtx lo_half[], hi_half[];
3237 if (GET_CODE (operands[num]) == REG)
3239 lo_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]));
3240 hi_half[num] = gen_rtx (REG, SImode, REGNO (operands[num]) + 1);
3242 else if (CONSTANT_P (operands[num]))
3244 split_double (operands[num], &lo_half[num], &hi_half[num]);
3246 else if (offsettable_memref_p (operands[num]))
3248 lo_half[num] = operands[num];
3249 hi_half[num] = adj_offsettable_operand (operands[num], 4);
3256 /* Return 1 if this is a valid binary operation on a 387.
3257 OP is the expression matched, and MODE is its mode. */
3260 binary_387_op (op, mode)
3262 enum machine_mode mode;
3264 if (mode != VOIDmode && mode != GET_MODE (op))
3267 switch (GET_CODE (op))
3273 return GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT;
3281 /* Return 1 if this is a valid shift or rotate operation on a 386.
3282 OP is the expression matched, and MODE is its mode. */
3287 enum machine_mode mode;
3289 rtx operand = XEXP (op, 0);
3291 if (mode != VOIDmode && mode != GET_MODE (op))
3294 if (GET_MODE (operand) != GET_MODE (op)
3295 || GET_MODE_CLASS (GET_MODE (op)) != MODE_INT)
3298 return (GET_CODE (op) == ASHIFT
3299 || GET_CODE (op) == ASHIFTRT
3300 || GET_CODE (op) == LSHIFTRT
3301 || GET_CODE (op) == ROTATE
3302 || GET_CODE (op) == ROTATERT);
3305 /* Return 1 if OP is COMPARE rtx with mode VOIDmode.
3306 MODE is not used. */
3309 VOIDmode_compare_op (op, mode)
3311 enum machine_mode mode;
3313 return GET_CODE (op) == COMPARE && GET_MODE (op) == VOIDmode;
3316 /* Output code to perform a 387 binary operation in INSN, one of PLUS,
3317 MINUS, MULT or DIV. OPERANDS are the insn operands, where operands[3]
3318 is the expression of the binary operation. The output may either be
3319 emitted here, or returned to the caller, like all output_* functions.
3321 There is no guarantee that the operands are the same mode, as they
3322 might be within FLOAT or FLOAT_EXTEND expressions. */
3325 output_387_binary_op (insn, operands)
3331 static char buf[100];
3333 switch (GET_CODE (operands[3]))
3336 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3337 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3344 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3345 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3352 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3353 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3360 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT
3361 || GET_MODE_CLASS (GET_MODE (operands[2])) == MODE_INT)
3371 strcpy (buf, base_op);
3373 switch (GET_CODE (operands[3]))
3377 if (REG_P (operands[2]) && REGNO (operands[0]) == REGNO (operands[2]))
3380 operands[2] = operands[1];
3384 if (GET_CODE (operands[2]) == MEM)
3385 return strcat (buf, AS1 (%z2,%2));
3387 if (NON_STACK_REG_P (operands[1]))
3389 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3392 else if (NON_STACK_REG_P (operands[2]))
3394 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3398 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3399 return strcat (buf, AS2 (p,%2,%0));
3401 if (STACK_TOP_P (operands[0]))
3402 return strcat (buf, AS2C (%y2,%0));
3404 return strcat (buf, AS2C (%2,%0));
3408 if (GET_CODE (operands[1]) == MEM)
3409 return strcat (buf, AS1 (r%z1,%1));
3411 if (GET_CODE (operands[2]) == MEM)
3412 return strcat (buf, AS1 (%z2,%2));
3414 if (NON_STACK_REG_P (operands[1]))
3416 output_op_from_reg (operands[1], strcat (buf, AS1 (r%z0,%1)));
3419 else if (NON_STACK_REG_P (operands[2]))
3421 output_op_from_reg (operands[2], strcat (buf, AS1 (%z0,%1)));
3425 if (! STACK_REG_P (operands[1]) || ! STACK_REG_P (operands[2]))
3428 if (find_regno_note (insn, REG_DEAD, REGNO (operands[2])))
3429 return strcat (buf, AS2 (rp,%2,%0));
3431 if (find_regno_note (insn, REG_DEAD, REGNO (operands[1])))
3432 return strcat (buf, AS2 (p,%1,%0));
3434 if (STACK_TOP_P (operands[0]))
3436 if (STACK_TOP_P (operands[1]))
3437 return strcat (buf, AS2C (%y2,%0));
3439 return strcat (buf, AS2 (r,%y1,%0));
3441 else if (STACK_TOP_P (operands[1]))
3442 return strcat (buf, AS2C (%1,%0));
3444 return strcat (buf, AS2 (r,%2,%0));
3451 /* Output code for INSN to convert a float to a signed int. OPERANDS
3452 are the insn operands. The output may be SFmode or DFmode and the
3453 input operand may be SImode or DImode. As a special case, make sure
3454 that the 387 stack top dies if the output mode is DImode, because the
3455 hardware requires this. */
3458 output_fix_trunc (insn, operands)
3462 int stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3465 if (! STACK_TOP_P (operands[1]) ||
3466 (GET_MODE (operands[0]) == DImode && ! stack_top_dies))
3469 xops[0] = GEN_INT (12);
3470 xops[1] = operands[4];
3472 output_asm_insn (AS1 (fnstc%W2,%2), operands);
3473 output_asm_insn (AS2 (mov%L2,%2,%4), operands);
3474 output_asm_insn (AS2 (mov%B1,%0,%h1), xops);
3475 output_asm_insn (AS2 (mov%L4,%4,%3), operands);
3476 output_asm_insn (AS1 (fldc%W3,%3), operands);
3478 if (NON_STACK_REG_P (operands[0]))
3479 output_to_reg (operands[0], stack_top_dies);
3480 else if (GET_CODE (operands[0]) == MEM)
3483 output_asm_insn (AS1 (fistp%z0,%0), operands);
3485 output_asm_insn (AS1 (fist%z0,%0), operands);
3490 return AS1 (fldc%W2,%2);
3493 /* Output code for INSN to compare OPERANDS. The two operands might
3494 not have the same mode: one might be within a FLOAT or FLOAT_EXTEND
3495 expression. If the compare is in mode CCFPEQmode, use an opcode that
3496 will not fault if a qNaN is present. */
3499 output_float_compare (insn, operands)
3504 rtx body = XVECEXP (PATTERN (insn), 0, 0);
3505 int unordered_compare = GET_MODE (SET_SRC (body)) == CCFPEQmode;
3508 if (! STACK_TOP_P (operands[0]))
3511 operands[0] = operands[1];
3513 cc_status.flags |= CC_REVERSED;
3516 if (! STACK_TOP_P (operands[0]))
3519 stack_top_dies = find_regno_note (insn, REG_DEAD, FIRST_STACK_REG) != 0;
3521 if (STACK_REG_P (operands[1])
3523 && find_regno_note (insn, REG_DEAD, REGNO (operands[1]))
3524 && REGNO (operands[1]) != FIRST_STACK_REG)
3526 /* If both the top of the 387 stack dies, and the other operand
3527 is also a stack register that dies, then this must be a
3528 `fcompp' float compare */
3530 if (unordered_compare)
3531 output_asm_insn ("fucompp", operands);
3533 output_asm_insn ("fcompp", operands);
3537 static char buf[100];
3539 /* Decide if this is the integer or float compare opcode, or the
3540 unordered float compare. */
3542 if (unordered_compare)
3543 strcpy (buf, "fucom");
3544 else if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_FLOAT)
3545 strcpy (buf, "fcom");
3547 strcpy (buf, "ficom");
3549 /* Modify the opcode if the 387 stack is to be popped. */
3554 if (NON_STACK_REG_P (operands[1]))
3555 output_op_from_reg (operands[1], strcat (buf, AS1 (%z0,%1)));
3557 output_asm_insn (strcat (buf, AS1 (%z1,%y1)), operands);
3560 /* Now retrieve the condition code. */
3562 return output_fp_cc0_set (insn);
3565 /* Output opcodes to transfer the results of FP compare or test INSN
3566 from the FPU to the CPU flags. If TARGET_IEEE_FP, ensure that if the
3567 result of the compare or test is unordered, no comparison operator
3568 succeeds except NE. Return an output template, if any. */
3571 output_fp_cc0_set (insn)
3575 rtx unordered_label;
3579 xops[0] = gen_rtx (REG, HImode, 0);
3580 output_asm_insn (AS1 (fnsts%W0,%0), xops);
3582 if (! TARGET_IEEE_FP)
3584 if (!(cc_status.flags & CC_REVERSED))
3586 next = next_cc0_user (insn);
3588 if (GET_CODE (next) == JUMP_INSN
3589 && GET_CODE (PATTERN (next)) == SET
3590 && SET_DEST (PATTERN (next)) == pc_rtx
3591 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3593 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3595 else if (GET_CODE (PATTERN (next)) == SET)
3597 code = GET_CODE (SET_SRC (PATTERN (next)));
3603 if (code == GT || code == LT || code == EQ || code == NE
3604 || code == LE || code == GE)
3605 { /* We will test eax directly */
3606 cc_status.flags |= CC_TEST_AX;
3613 next = next_cc0_user (insn);
3614 if (next == NULL_RTX)
3617 if (GET_CODE (next) == JUMP_INSN
3618 && GET_CODE (PATTERN (next)) == SET
3619 && SET_DEST (PATTERN (next)) == pc_rtx
3620 && GET_CODE (SET_SRC (PATTERN (next))) == IF_THEN_ELSE)
3622 code = GET_CODE (XEXP (SET_SRC (PATTERN (next)), 0));
3624 else if (GET_CODE (PATTERN (next)) == SET)
3626 code = GET_CODE (SET_SRC (PATTERN (next)));
3631 xops[0] = gen_rtx (REG, QImode, 0);
3636 xops[1] = GEN_INT (0x45);
3637 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3642 xops[1] = GEN_INT (0x45);
3643 xops[2] = GEN_INT (0x01);
3644 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3645 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3650 xops[1] = GEN_INT (0x05);
3651 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3656 xops[1] = GEN_INT (0x45);
3657 xops[2] = GEN_INT (0x40);
3658 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3659 output_asm_insn (AS1 (dec%B0,%h0), xops);
3660 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3665 xops[1] = GEN_INT (0x45);
3666 xops[2] = GEN_INT (0x40);
3667 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3668 output_asm_insn (AS2 (cmp%B0,%2,%h0), xops);
3673 xops[1] = GEN_INT (0x44);
3674 xops[2] = GEN_INT (0x40);
3675 output_asm_insn (AS2 (and%B0,%1,%h0), xops);
3676 output_asm_insn (AS2 (xor%B0,%2,%h0), xops);
3690 #define MAX_386_STACK_LOCALS 2
3692 static rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3694 /* Define the structure for the machine field in struct function. */
3695 struct machine_function
3697 rtx i386_stack_locals[(int) MAX_MACHINE_MODE][MAX_386_STACK_LOCALS];
3700 /* Functions to save and restore i386_stack_locals.
3701 These will be called, via pointer variables,
3702 from push_function_context and pop_function_context. */
3705 save_386_machine_status (p)
3708 p->machine = (struct machine_function *) xmalloc (sizeof i386_stack_locals);
3709 bcopy ((char *) i386_stack_locals, (char *) p->machine->i386_stack_locals,
3710 sizeof i386_stack_locals);
3714 restore_386_machine_status (p)
3717 bcopy ((char *) p->machine->i386_stack_locals, (char *) i386_stack_locals,
3718 sizeof i386_stack_locals);
3722 /* Clear stack slot assignments remembered from previous functions.
3723 This is called from INIT_EXPANDERS once before RTL is emitted for each
3727 clear_386_stack_locals ()
3729 enum machine_mode mode;
3732 for (mode = VOIDmode; (int) mode < (int) MAX_MACHINE_MODE;
3733 mode = (enum machine_mode) ((int) mode + 1))
3734 for (n = 0; n < MAX_386_STACK_LOCALS; n++)
3735 i386_stack_locals[(int) mode][n] = NULL_RTX;
3737 /* Arrange to save and restore i386_stack_locals around nested functions. */
3738 save_machine_status = save_386_machine_status;
3739 restore_machine_status = restore_386_machine_status;
3742 /* Return a MEM corresponding to a stack slot with mode MODE.
3743 Allocate a new slot if necessary.
3745 The RTL for a function can have several slots available: N is
3746 which slot to use. */
3749 assign_386_stack_local (mode, n)
3750 enum machine_mode mode;
3753 if (n < 0 || n >= MAX_386_STACK_LOCALS)
3756 if (i386_stack_locals[(int) mode][n] == NULL_RTX)
3757 i386_stack_locals[(int) mode][n]
3758 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
3760 return i386_stack_locals[(int) mode][n];
3766 enum machine_mode mode;
3768 return (GET_CODE (op) == MULT);
3773 enum machine_mode mode;
3775 return (GET_CODE (op) == DIV);
3780 /* Create a new copy of an rtx.
3781 Recursively copies the operands of the rtx,
3782 except for those few rtx codes that are sharable.
3783 Doesn't share CONST */
3791 register RTX_CODE code;
3792 register char *format_ptr;
3794 code = GET_CODE (orig);
3807 /* SCRATCH must be shared because they represent distinct values. */
3812 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
3813 a LABEL_REF, it isn't sharable. */
3814 if (GET_CODE (XEXP (orig, 0)) == PLUS
3815 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
3816 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
3820 /* A MEM with a constant address is not sharable. The problem is that
3821 the constant address may need to be reloaded. If the mem is shared,
3822 then reloading one copy of this mem will cause all copies to appear
3823 to have been reloaded. */
3826 copy = rtx_alloc (code);
3827 PUT_MODE (copy, GET_MODE (orig));
3828 copy->in_struct = orig->in_struct;
3829 copy->volatil = orig->volatil;
3830 copy->unchanging = orig->unchanging;
3831 copy->integrated = orig->integrated;
3833 copy->is_spill_rtx = orig->is_spill_rtx;
3835 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
3837 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
3839 switch (*format_ptr++)
3842 XEXP (copy, i) = XEXP (orig, i);
3843 if (XEXP (orig, i) != NULL)
3844 XEXP (copy, i) = copy_rtx (XEXP (orig, i));
3849 XEXP (copy, i) = XEXP (orig, i);
3854 XVEC (copy, i) = XVEC (orig, i);
3855 if (XVEC (orig, i) != NULL)
3857 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
3858 for (j = 0; j < XVECLEN (copy, i); j++)
3859 XVECEXP (copy, i, j) = copy_rtx (XVECEXP (orig, i, j));
3864 XWINT (copy, i) = XWINT (orig, i);
3868 XINT (copy, i) = XINT (orig, i);
3873 XSTR (copy, i) = XSTR (orig, i);
3884 /* try to rewrite a memory address to make it valid */
3886 rewrite_address (mem_rtx)
3889 rtx index_rtx, base_rtx, offset_rtx, scale_rtx, ret_rtx;
3891 int offset_adjust = 0;
3892 int was_only_offset = 0;
3893 rtx mem_addr = XEXP (mem_rtx, 0);
3894 char *storage = (char *) oballoc (0);
3896 int is_spill_rtx = 0;
3898 in_struct = MEM_IN_STRUCT_P (mem_rtx);
3899 is_spill_rtx = RTX_IS_SPILL_P (mem_rtx);
3901 if (GET_CODE (mem_addr) == PLUS &&
3902 GET_CODE (XEXP (mem_addr, 1)) == PLUS &&
3903 GET_CODE (XEXP (XEXP (mem_addr, 1), 0)) == REG)
3904 { /* this part is utilized by the combiner */
3906 gen_rtx (PLUS, GET_MODE (mem_addr),
3907 gen_rtx (PLUS, GET_MODE (XEXP (mem_addr, 1)),
3909 XEXP (XEXP (mem_addr, 1), 0)),
3910 XEXP (XEXP (mem_addr, 1), 1));
3911 if (memory_address_p (GET_MODE (mem_rtx), ret_rtx))
3913 XEXP (mem_rtx, 0) = ret_rtx;
3914 RTX_IS_SPILL_P (ret_rtx) = is_spill_rtx;
3920 /* this part is utilized by loop.c */
3921 /* If the address contains PLUS (reg,const) and this pattern is invalid
3922 in this case - try to rewrite the address to make it valid intel1
3924 storage = (char *) oballoc (0);
3925 index_rtx = base_rtx = offset_rtx = NULL;
3926 /* find the base index and offset elements of the memory address */
3927 if (GET_CODE (mem_addr) == PLUS)
3929 if (GET_CODE (XEXP (mem_addr, 0)) == REG)
3931 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
3933 base_rtx = XEXP (mem_addr, 1);
3934 index_rtx = XEXP (mem_addr, 0);
3938 base_rtx = XEXP (mem_addr, 0);
3939 offset_rtx = XEXP (mem_addr, 1);
3942 else if (GET_CODE (XEXP (mem_addr, 0)) == MULT)
3944 index_rtx = XEXP (mem_addr, 0);
3945 if (GET_CODE (XEXP (mem_addr, 1)) == REG)
3947 base_rtx = XEXP (mem_addr, 1);
3951 offset_rtx = XEXP (mem_addr, 1);
3954 else if (GET_CODE (XEXP (mem_addr, 0)) == PLUS)
3957 if (GET_CODE (XEXP (XEXP (mem_addr, 0), 0)) == PLUS &&
3958 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0)) == MULT &&
3959 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 0)) == REG &&
3960 GET_CODE (XEXP (XEXP (XEXP (XEXP (mem_addr, 0), 0), 0), 1)) == CONST_INT &&
3961 GET_CODE (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1)) == CONST_INT &&
3962 GET_CODE (XEXP (XEXP (mem_addr, 0), 1)) == REG &&
3963 GET_CODE (XEXP (mem_addr, 1)) == SYMBOL_REF)
3965 index_rtx = XEXP (XEXP (XEXP (mem_addr, 0), 0), 0);
3966 offset_rtx = XEXP (mem_addr, 1);
3967 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
3968 offset_adjust = INTVAL (XEXP (XEXP (XEXP (mem_addr, 0), 0), 1));
3972 offset_rtx = XEXP (mem_addr, 1);
3973 index_rtx = XEXP (XEXP (mem_addr, 0), 0);
3974 base_rtx = XEXP (XEXP (mem_addr, 0), 1);
3977 else if (GET_CODE (XEXP (mem_addr, 0)) == CONST_INT)
3979 was_only_offset = 1;
3982 offset_rtx = XEXP (mem_addr, 1);
3983 offset_adjust = INTVAL (XEXP (mem_addr, 0));
3984 if (offset_adjust == 0)
3986 XEXP (mem_rtx, 0) = offset_rtx;
3987 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
3997 else if (GET_CODE (mem_addr) == MULT)
3999 index_rtx = mem_addr;
4006 if (index_rtx && GET_CODE (index_rtx) == MULT)
4008 if (GET_CODE (XEXP (index_rtx, 1)) != CONST_INT)
4013 scale_rtx = XEXP (index_rtx, 1);
4014 scale = INTVAL (scale_rtx);
4015 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4017 /* now find which of the elements are invalid and try to fix them */
4018 if (index_rtx && GET_CODE (index_rtx) == CONST_INT && base_rtx == NULL)
4020 offset_adjust = INTVAL (index_rtx) * scale;
4021 if (offset_rtx && GET_CODE (offset_rtx) == CONST &&
4022 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4024 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4025 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4027 offset_rtx = copy_all_rtx (offset_rtx);
4028 XEXP (XEXP (offset_rtx, 0), 1) =
4029 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4030 if (!CONSTANT_P (offset_rtx))
4037 else if (offset_rtx && GET_CODE (offset_rtx) == SYMBOL_REF)
4040 gen_rtx (CONST, GET_MODE (offset_rtx),
4041 gen_rtx (PLUS, GET_MODE (offset_rtx),
4043 gen_rtx (CONST_INT, 0, offset_adjust)));
4044 if (!CONSTANT_P (offset_rtx))
4050 else if (offset_rtx && GET_CODE (offset_rtx) == CONST_INT)
4052 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4054 else if (!offset_rtx)
4056 offset_rtx = gen_rtx (CONST_INT, 0, 0);
4058 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4059 XEXP (mem_rtx, 0) = offset_rtx;
4062 if (base_rtx && GET_CODE (base_rtx) == PLUS &&
4063 GET_CODE (XEXP (base_rtx, 0)) == REG &&
4064 GET_CODE (XEXP (base_rtx, 1)) == CONST_INT)
4066 offset_adjust += INTVAL (XEXP (base_rtx, 1));
4067 base_rtx = copy_all_rtx (XEXP (base_rtx, 0));
4069 else if (base_rtx && GET_CODE (base_rtx) == CONST_INT)
4071 offset_adjust += INTVAL (base_rtx);
4074 if (index_rtx && GET_CODE (index_rtx) == PLUS &&
4075 GET_CODE (XEXP (index_rtx, 0)) == REG &&
4076 GET_CODE (XEXP (index_rtx, 1)) == CONST_INT)
4078 offset_adjust += INTVAL (XEXP (index_rtx, 1)) * scale;
4079 index_rtx = copy_all_rtx (XEXP (index_rtx, 0));
4083 if (!LEGITIMATE_INDEX_P (index_rtx)
4084 && !(index_rtx == stack_pointer_rtx && scale == 1 && base_rtx == NULL))
4092 if (!LEGITIMATE_INDEX_P (base_rtx) && GET_CODE (base_rtx) != REG)
4098 if (offset_adjust != 0)
4102 if (GET_CODE (offset_rtx) == CONST &&
4103 GET_CODE (XEXP (offset_rtx, 0)) == PLUS)
4105 if (GET_CODE (XEXP (XEXP (offset_rtx, 0), 0)) == SYMBOL_REF &&
4106 GET_CODE (XEXP (XEXP (offset_rtx, 0), 1)) == CONST_INT)
4108 offset_rtx = copy_all_rtx (offset_rtx);
4109 XEXP (XEXP (offset_rtx, 0), 1) =
4110 gen_rtx (CONST_INT, 0, INTVAL (XEXP (XEXP (offset_rtx, 0), 1)) + offset_adjust);
4111 if (!CONSTANT_P (offset_rtx))
4118 else if (GET_CODE (offset_rtx) == SYMBOL_REF)
4121 gen_rtx (CONST, GET_MODE (offset_rtx),
4122 gen_rtx (PLUS, GET_MODE (offset_rtx),
4124 gen_rtx (CONST_INT, 0, offset_adjust)));
4125 if (!CONSTANT_P (offset_rtx))
4131 else if (GET_CODE (offset_rtx) == CONST_INT)
4133 offset_rtx = gen_rtx (CONST_INT, 0, INTVAL (offset_rtx) + offset_adjust);
4143 offset_rtx = gen_rtx (CONST_INT, 0, offset_adjust);
4151 if (GET_CODE (offset_rtx) == CONST_INT &&
4152 INTVAL (offset_rtx) == 0)
4154 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx),
4155 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4161 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4162 gen_rtx (PLUS, GET_MODE (base_rtx),
4163 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4171 if (GET_CODE (offset_rtx) == CONST_INT &&
4172 INTVAL (offset_rtx) == 0)
4174 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, base_rtx);
4178 ret_rtx = gen_rtx (PLUS, GET_MODE (offset_rtx),
4179 gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx,
4189 if (GET_CODE (offset_rtx) == CONST_INT &&
4190 INTVAL (offset_rtx) == 0)
4192 ret_rtx = gen_rtx (MULT, GET_MODE (index_rtx), index_rtx, scale_rtx);
4197 gen_rtx (PLUS, GET_MODE (offset_rtx),
4198 gen_rtx (MULT, GET_MODE (index_rtx), index_rtx,
4205 if (GET_CODE (offset_rtx) == CONST_INT &&
4206 INTVAL (offset_rtx) == 0)
4208 ret_rtx = index_rtx;
4212 ret_rtx = gen_rtx (PLUS, GET_MODE (index_rtx), index_rtx, offset_rtx);
4221 if (GET_CODE (offset_rtx) == CONST_INT &&
4222 INTVAL (offset_rtx) == 0)
4228 ret_rtx = gen_rtx (PLUS, GET_MODE (base_rtx), base_rtx, offset_rtx);
4231 else if (was_only_offset)
4233 ret_rtx = offset_rtx;
4241 XEXP (mem_rtx, 0) = ret_rtx;
4242 RTX_IS_SPILL_P (XEXP (mem_rtx, 0)) = is_spill_rtx;
4254 /* return 1 if the first insn to set cc before insn also sets the register
4255 reg_rtx - otherwise return 0 */
4257 last_to_set_cc (reg_rtx, insn)
4260 rtx prev_insn = PREV_INSN (insn);
4264 if (GET_CODE (prev_insn) == NOTE)
4267 else if (GET_CODE (prev_insn) == INSN)
4269 if (GET_CODE (PATTERN (prev_insn)) != SET)
4272 if (rtx_equal_p (SET_DEST (PATTERN (prev_insn)), reg_rtx))
4274 if (sets_condition_code (SET_SRC (PATTERN (prev_insn))))
4280 else if (!doesnt_set_condition_code (SET_SRC (PATTERN (prev_insn))))
4287 prev_insn = PREV_INSN (prev_insn);
4295 doesnt_set_condition_code (pat)
4298 switch (GET_CODE (pat))
4312 sets_condition_code (pat)
4315 switch (GET_CODE (pat))
4339 str_immediate_operand (op, mode)
4341 enum machine_mode mode;
4343 if (GET_CODE (op) == CONST_INT && INTVAL (op) <= 32 && INTVAL (op) >= 0)
4355 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4356 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4357 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4358 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode))
4367 Return 1 if the mode of the SET_DEST of insn is floating point
4368 and it is not an fld or a move from memory to memory.
4369 Otherwise return 0 */
4374 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4375 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4376 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4377 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4378 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
4379 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_FLOAT_REG
4380 && GET_CODE (SET_SRC (insn)) != MEM)
4389 Return 1 if the mode of the SET_DEST floating point and is memory
4390 and the source is a register.
4396 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SET
4397 && (GET_MODE (SET_DEST (PATTERN (insn))) == DFmode
4398 || GET_MODE (SET_DEST (PATTERN (insn))) == SFmode
4399 || GET_MODE (SET_DEST (PATTERN (insn))) == XFmode)
4400 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM
4401 && GET_CODE (SET_SRC (PATTERN (insn))) == REG)
4411 Return 1 if dep_insn sets a register which insn uses as a base
4412 or index to reference memory.
4413 otherwise return 0 */
4416 agi_dependent (insn, dep_insn)
4419 if (GET_CODE (dep_insn) == INSN
4420 && GET_CODE (PATTERN (dep_insn)) == SET
4421 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == REG)
4423 return (reg_mentioned_in_mem (SET_DEST (PATTERN (dep_insn)), insn));
4426 if (GET_CODE (dep_insn) == INSN && GET_CODE (PATTERN (dep_insn)) == SET
4427 && GET_CODE (SET_DEST (PATTERN (dep_insn))) == MEM
4428 && push_operand (SET_DEST (PATTERN (dep_insn)),
4429 GET_MODE (SET_DEST (PATTERN (dep_insn)))))
4431 return (reg_mentioned_in_mem (stack_pointer_rtx, insn));
4439 Return 1 if reg is used in rtl as a base or index for a memory ref
4440 otherwise return 0. */
4443 reg_mentioned_in_mem (reg, rtl)
4448 register enum rtx_code code;
4453 code = GET_CODE (rtl);
4471 if (code == MEM && reg_mentioned_p (reg, rtl))
4474 fmt = GET_RTX_FORMAT (code);
4475 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4480 for (j = XVECLEN (rtl, i) - 1; j >= 0; j--)
4482 if (reg_mentioned_in_mem (reg, XVECEXP (rtl, i, j)))
4487 else if (fmt[i] == 'e' && reg_mentioned_in_mem (reg, XEXP (rtl, i)))
4494 /* Output the approprate insns for doing strlen if not just doing repnz; scasb
4496 operands[0] = result, initialized with the startaddress
4497 operands[1] = alignment of the address.
4498 operands[2] = scratch register, initialized with the startaddress when
4499 not aligned, otherwise undefined
4501 This is just the body. It needs the initialisations mentioned above and
4502 some address computing at the end. These things are done in i386.md. */
4505 output_strlen_unroll (operands)
4510 xops[0] = operands[0]; /* Result */
4511 /* operands[1]; * Alignment */
4512 xops[1] = operands[2]; /* Scratch */
4513 xops[2] = GEN_INT (0);
4514 xops[3] = GEN_INT (2);
4515 xops[4] = GEN_INT (3);
4516 xops[5] = GEN_INT (4);
4517 /* xops[6] = gen_label_rtx (); * label when aligned to 3-byte */
4518 /* xops[7] = gen_label_rtx (); * label when aligned to 2-byte */
4519 xops[8] = gen_label_rtx (); /* label of main loop */
4520 if(TARGET_USE_Q_REG && QI_REG_P (xops[1]))
4521 xops[9] = gen_label_rtx (); /* pentium optimisation */
4522 xops[10] = gen_label_rtx (); /* end label 2 */
4523 xops[11] = gen_label_rtx (); /* end label 1 */
4524 xops[12] = gen_label_rtx (); /* end label */
4525 /* xops[13] * Temporary used */
4526 xops[14] = GEN_INT (0xff);
4527 xops[15] = GEN_INT (0xff00);
4528 xops[16] = GEN_INT (0xff0000);
4529 xops[17] = GEN_INT (0xff000000);
4531 /* Loop to check 1..3 bytes for null to get an aligned pointer */
4533 /* is there a known alignment and is it less then 4 */
4534 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) < 4)
4536 /* is there a known alignment and is it not 2 */
4537 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4539 xops[6] = gen_label_rtx (); /* label when aligned to 3-byte */
4540 xops[7] = gen_label_rtx (); /* label when aligned to 2-byte */
4542 /* leave just the 3 lower bits */
4543 /* if this is a q-register, then the high part is used later */
4544 /* therefore user andl rather than andb */
4545 output_asm_insn (AS2 (and%L1,%4,%1), xops);
4546 /* is aligned to 4-byte adress when zero */
4547 output_asm_insn (AS1 (je,%l8), xops);
4548 /* side-effect even Parity when %eax == 3 */
4549 output_asm_insn (AS1 (jp,%6), xops);
4551 /* is it aligned to 2 bytes ? */
4552 if (QI_REG_P (xops[1]))
4553 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4555 output_asm_insn (AS2 (cmp%L1,%3,%1), xops);
4556 output_asm_insn (AS1 (je,%7), xops);
4560 /* since the alignment is 2, we have to check 2 or 0 bytes */
4562 /* check if is aligned to 4 - byte */
4563 output_asm_insn (AS2 (and%L1,%3,%1), xops);
4564 /* is aligned to 4-byte adress when zero */
4565 output_asm_insn (AS1 (je,%l8), xops);
4568 xops[13] = gen_rtx (MEM, QImode, xops[0]);
4569 /* now, compare the bytes */
4570 /* compare with the high part of a q-reg gives shorter code */
4571 if (QI_REG_P (xops[1]))
4573 /* compare the first n unaligned byte on a byte per byte basis */
4574 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4575 /* when zero we reached the end */
4576 output_asm_insn (AS1 (je,%l12), xops);
4577 /* increment the address */
4578 output_asm_insn (AS1 (inc%L0,%0), xops);
4580 /* not needed with an alignment of 2 */
4581 if (GET_CODE (operands[1]) != CONST_INT || INTVAL (operands[1]) != 2)
4583 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4584 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4585 output_asm_insn (AS1 (je,%l12), xops);
4586 output_asm_insn (AS1 (inc%L0,%0), xops);
4588 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4590 output_asm_insn (AS2 (cmp%B1,%h1,%13), xops);
4594 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4595 output_asm_insn (AS1 (je,%l12), xops);
4596 output_asm_insn (AS1 (inc%L0,%0), xops);
4598 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[7]));
4599 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4600 output_asm_insn (AS1 (je,%l12), xops);
4601 output_asm_insn (AS1 (inc%L0,%0), xops);
4603 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[6]));
4604 output_asm_insn (AS2 (cmp%B13,%2,%13), xops);
4606 output_asm_insn (AS1 (je,%l12), xops);
4607 output_asm_insn (AS1 (inc%L0,%0), xops);
4610 /* Generate loop to check 4 bytes at a time */
4611 /* IMHO it is not a good idea to align this loop. It gives only */
4612 /* huge programs, but does not help to speed up */
4613 /* ASM_OUTPUT_LOOP_ALIGN (asm_out_file); */
4614 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[8]));
4616 xops[13] = gen_rtx (MEM, SImode, xops[0]);
4617 output_asm_insn (AS2 (mov%L1,%13,%1), xops);
4619 if (QI_REG_P (xops[1]))
4621 /* On i586 it is faster to combine the hi- and lo- part as
4622 a kind of lookahead. If anding both yields zero, then one
4623 of both *could* be zero, otherwise none of both is zero;
4624 this saves one instruction, on i486 this is slower
4625 tested with P-90, i486DX2-66, AMD486DX2-66 */
4628 output_asm_insn (AS2 (test%B1,%h1,%b1), xops);
4629 output_asm_insn (AS1 (jne,%l9), xops);
4632 /* check first byte */
4633 output_asm_insn (AS2 (test%B1,%b1,%b1), xops);
4634 output_asm_insn (AS1 (je,%l12), xops);
4636 /* check second byte */
4637 output_asm_insn (AS2 (test%B1,%h1,%h1), xops);
4638 output_asm_insn (AS1 (je,%l11), xops);
4641 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[9]));
4645 /* check first byte */
4646 output_asm_insn (AS2 (test%L1,%14,%1), xops);
4647 output_asm_insn (AS1 (je,%l12), xops);
4649 /* check second byte */
4650 output_asm_insn (AS2 (test%L1,%15,%1), xops);
4651 output_asm_insn (AS1 (je,%l11), xops);
4654 /* check third byte */
4655 output_asm_insn (AS2 (test%L1,%16,%1), xops);
4656 output_asm_insn (AS1 (je,%l10), xops);
4658 /* check fourth byte and increment address */
4659 output_asm_insn (AS2 (add%L0,%5,%0), xops);
4660 output_asm_insn (AS2 (test%L1,%17,%1), xops);
4661 output_asm_insn (AS1 (jne,%l8), xops);
4663 /* now generate fixups when the compare stops within a 4-byte word */
4664 output_asm_insn (AS2 (sub%L0,%4,%0), xops);
4666 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[10]));
4667 output_asm_insn (AS1 (inc%L0,%0), xops);
4669 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[11]));
4670 output_asm_insn (AS1 (inc%L0,%0), xops);
4672 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "L", CODE_LABEL_NUMBER (xops[12]));